diff --git a/.gitattributes b/.gitattributes index 4cd50e4810..7c2f752d69 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,5 +1,5 @@ * text=auto eol=lf -crates/ra_syntax/test_data/** -text eof=LF +crates/syntax/test_data/** -text eof=LF # Older git versions try to fix line endings on images, this prevents it. *.png binary *.jpg binary diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2acd440122..2deb009cef 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -16,20 +16,6 @@ env: RUSTUP_MAX_RETRIES: 10 jobs: - # rust-audit: - # name: Audit Rust vulnerabilities - # runs-on: ubuntu-latest - # steps: - # - name: Checkout repository - # uses: actions/checkout@v2 - - # - uses: actions-rs/install@v0.1 - # with: - # crate: cargo-audit - # use-tool-cache: true - - # - run: cargo audit - rust: name: Rust runs-on: ${{ matrix.os }} @@ -39,7 +25,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, windows-latest, macos-latest] + os: [ubuntu-latest, windows-latest] #, macos-latest] steps: - name: Checkout repository @@ -88,11 +74,14 @@ jobs: if: matrix.os == 'windows-latest' run: Remove-Item ./target/debug/xtask.exe, ./target/debug/deps/xtask.exe - # Weird target to catch non-portable code - rust-power: - name: Rust Power + # Weird targets to catch non-portable code + rust-cross: + name: Rust Cross runs-on: ubuntu-latest + env: + targets: "powerpc-unknown-linux-gnu x86_64-unknown-linux-musl" + steps: - name: Checkout repository uses: actions/checkout@v2 @@ -103,7 +92,9 @@ jobs: toolchain: stable profile: minimal override: true - target: 'powerpc-unknown-linux-gnu' + + - name: Install Rust targets + run: rustup target add ${{ env.targets }} - name: Cache cargo directories uses: actions/cache@v2 @@ -114,14 +105,17 @@ jobs: key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} - name: Check - run: cargo check --target=powerpc-unknown-linux-gnu --all-targets + run: | + for target in ${{ env.targets }}; do + cargo check --target=$target --all-targets + done typescript: name: TypeScript strategy: fail-fast: false matrix: - os: [ubuntu-latest, windows-latest, macos-latest] + os: [ubuntu-latest, windows-latest] runs-on: ${{ matrix.os }} diff --git a/Cargo.lock b/Cargo.lock index dc49fc4bdc..2386c8f3a5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -15,15 +15,6 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e" -[[package]] -name = "aho-corasick" -version = "0.7.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "043164d8ba5c4c3035fec9bbee8647c0261d788f3474306f93bb65901cae0e86" -dependencies = [ - "memchr", -] - [[package]] name = "ansi_term" version = "0.12.1" @@ -45,12 +36,33 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33954243bd79057c2de7338850b85983a44588021f8a5fee574a8888c6de4344" +[[package]] +name = "arena" +version = "0.0.0" + [[package]] name = "arrayvec" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8" +[[package]] +name = "assists" +version = "0.0.0" +dependencies = [ + "base_db", + "either", + "hir", + "ide_db", + "itertools", + "profile", + "rustc-hash", + "stdx", + "syntax", + "test_utils", + "text_edit", +] + [[package]] name = "atty" version = "0.2.14" @@ -88,6 +100,21 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" +[[package]] +name = "base_db" +version = "0.0.0" +dependencies = [ + "cfg", + "profile", + "rustc-hash", + "salsa", + "stdx", + "syntax", + "test_utils", + "tt", + "vfs", +] + [[package]] name = "bitflags" version = "1.2.1" @@ -117,6 +144,16 @@ version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9a06fb2e53271d7c279ec1efea6ab691c35a2ae67ec0d91d7acec0caf13b518" +[[package]] +name = "cfg" +version = "0.0.0" +dependencies = [ + "mbe", + "rustc-hash", + "syntax", + "tt", +] + [[package]] name = "cfg-if" version = "0.1.10" @@ -285,9 +322,9 @@ checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1" [[package]] name = "either" -version = "1.5.3" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3" +checksum = "cd56b59865bce947ac5958779cfa508f6c3b9497cc762b7e24a12d11ccde2c4f" [[package]] name = "ena" @@ -309,7 +346,7 @@ dependencies = [ [[package]] name = "expect" -version = "0.1.0" +version = "0.0.0" dependencies = [ "difference", "once_cell", @@ -348,14 +385,14 @@ dependencies = [ [[package]] name = "flycheck" -version = "0.1.0" +version = "0.0.0" dependencies = [ "cargo_metadata", "crossbeam-channel", "jod-thread", "log", - "ra_toolchain", "serde_json", + "toolchain", ] [[package]] @@ -418,9 +455,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34f595585f103464d8d2f6e9864682d74c1601fed5e07d62b1c9058dba8246fb" +checksum = "e91b62f79061a0bc2e046024cb7ba44b08419ed238ecbd9adbd787434b9e8c25" dependencies = [ "autocfg", ] @@ -443,6 +480,96 @@ dependencies = [ "libc", ] +[[package]] +name = "hir" +version = "0.0.0" +dependencies = [ + "arrayvec", + "base_db", + "either", + "hir_def", + "hir_expand", + "hir_ty", + "itertools", + "log", + "profile", + "rustc-hash", + "stdx", + "syntax", +] + +[[package]] +name = "hir_def" +version = "0.0.0" +dependencies = [ + "anymap", + "arena", + "base_db", + "cfg", + "drop_bomb", + "either", + "expect", + "fst", + "hir_expand", + "indexmap", + "itertools", + "log", + "mbe", + "once_cell", + "profile", + "rustc-hash", + "smallvec", + "stdx", + "syntax", + "test_utils", + "tt", +] + +[[package]] +name = "hir_expand" +version = "0.0.0" +dependencies = [ + "arena", + "base_db", + "either", + "log", + "mbe", + "parser", + "profile", + "rustc-hash", + "syntax", + "test_utils", + "tt", +] + +[[package]] +name = "hir_ty" +version = "0.0.0" +dependencies = [ + "arena", + "arrayvec", + "base_db", + "chalk-ir", + "chalk-recursive", + "chalk-solve", + "ena", + "expect", + "hir_def", + "hir_expand", + "itertools", + "log", + "profile", + "rustc-hash", + "scoped-tls", + "smallvec", + "stdx", + "syntax", + "test_utils", + "tracing", + "tracing-subscriber", + "tracing-tree", +] + [[package]] name = "home" version = "0.5.3" @@ -452,6 +579,49 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "ide" +version = "0.0.0" +dependencies = [ + "assists", + "base_db", + "cfg", + "either", + "expect", + "hir", + "ide_db", + "indexmap", + "itertools", + "log", + "oorandom", + "profile", + "rustc-hash", + "ssr", + "stdx", + "syntax", + "test_utils", + "text_edit", +] + +[[package]] +name = "ide_db" +version = "0.0.0" +dependencies = [ + "base_db", + "either", + "fst", + "hir", + "log", + "once_cell", + "profile", + "rayon", + "rustc-hash", + "stdx", + "syntax", + "test_utils", + "text_edit", +] + [[package]] name = "idna" version = "0.2.0" @@ -465,9 +635,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b88cd59ee5f71fea89a62248fc8f387d44400cefe05ef548466d61ced9029a7" +checksum = "86b45e59b16c76b11bf9738fd5d38879d3bd28ad292d7b313608becb17ae2df9" dependencies = [ "autocfg", "hashbrown", @@ -547,9 +717,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "lazycell" -version = "1.2.1" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" @@ -641,10 +811,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" [[package]] -name = "memchr" -version = "2.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400" +name = "mbe" +version = "0.0.0" +dependencies = [ + "log", + "parser", + "rustc-hash", + "smallvec", + "syntax", + "test_utils", + "tt", +] [[package]] name = "memmap" @@ -830,9 +1007,16 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "parser" +version = "0.0.0" +dependencies = [ + "drop_bomb", +] + [[package]] name = "paths" -version = "0.1.0" +version = "0.0.0" [[package]] name = "percent-encoding" @@ -852,9 +1036,9 @@ dependencies = [ [[package]] name = "perf-event-open-sys" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95db63e37862bc1b842135d2234ef9418f222cc660c6752f45e7cf9ddfb97f96" +checksum = "83e7183862f36d10263d0a1ccaef50fef734ade948bf026afd1bd97355c78273" dependencies = [ "libc", ] @@ -871,9 +1055,9 @@ dependencies = [ [[package]] name = "pico-args" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b1eee8b1f4966c8343d7ca0f5a8452cd35d5610a2e0efbe2a68cae44bef2046" +checksum = "28b9b4df73455c861d7cbf8be42f01d3b373ed7f02e378d55fa84eafc6f638b1" [[package]] name = "plain" @@ -890,6 +1074,66 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "proc_macro_api" +version = "0.0.0" +dependencies = [ + "crossbeam-channel", + "jod-thread", + "log", + "serde", + "serde_json", + "tt", +] + +[[package]] +name = "proc_macro_srv" +version = "0.0.0" +dependencies = [ + "cargo_metadata", + "difference", + "goblin", + "libloading", + "mbe", + "memmap", + "proc_macro_api", + "serde_derive", + "test_utils", + "toolchain", + "tt", +] + +[[package]] +name = "profile" +version = "0.0.0" +dependencies = [ + "arena", + "backtrace", + "cfg-if", + "libc", + "once_cell", + "perf-event", +] + +[[package]] +name = "project_model" +version = "0.0.0" +dependencies = [ + "anyhow", + "arena", + "base_db", + "cargo_metadata", + "cfg", + "log", + "paths", + "proc_macro_api", + "rustc-hash", + "serde", + "serde_json", + "stdx", + "toolchain", +] + [[package]] name = "quote" version = "1.0.7" @@ -899,332 +1143,6 @@ dependencies = [ "proc-macro2", ] -[[package]] -name = "ra_arena" -version = "0.1.0" - -[[package]] -name = "ra_assists" -version = "0.1.0" -dependencies = [ - "either", - "itertools", - "ra_db", - "ra_fmt", - "ra_hir", - "ra_ide_db", - "ra_prof", - "ra_syntax", - "ra_text_edit", - "rustc-hash", - "stdx", - "test_utils", -] - -[[package]] -name = "ra_cfg" -version = "0.1.0" -dependencies = [ - "ra_mbe", - "ra_syntax", - "ra_tt", - "rustc-hash", -] - -[[package]] -name = "ra_db" -version = "0.1.0" -dependencies = [ - "ra_cfg", - "ra_prof", - "ra_syntax", - "ra_tt", - "rustc-hash", - "salsa", - "stdx", - "test_utils", - "vfs", -] - -[[package]] -name = "ra_fmt" -version = "0.1.0" -dependencies = [ - "itertools", - "ra_syntax", -] - -[[package]] -name = "ra_hir" -version = "0.1.0" -dependencies = [ - "arrayvec", - "either", - "itertools", - "log", - "ra_db", - "ra_hir_def", - "ra_hir_expand", - "ra_hir_ty", - "ra_prof", - "ra_syntax", - "rustc-hash", - "stdx", -] - -[[package]] -name = "ra_hir_def" -version = "0.1.0" -dependencies = [ - "anymap", - "drop_bomb", - "either", - "expect", - "fst", - "indexmap", - "itertools", - "log", - "once_cell", - "ra_arena", - "ra_cfg", - "ra_db", - "ra_hir_expand", - "ra_mbe", - "ra_prof", - "ra_syntax", - "ra_tt", - "rustc-hash", - "smallvec", - "stdx", - "test_utils", -] - -[[package]] -name = "ra_hir_expand" -version = "0.1.0" -dependencies = [ - "either", - "log", - "ra_arena", - "ra_db", - "ra_mbe", - "ra_parser", - "ra_prof", - "ra_syntax", - "ra_tt", - "rustc-hash", - "test_utils", -] - -[[package]] -name = "ra_hir_ty" -version = "0.1.0" -dependencies = [ - "arrayvec", - "chalk-ir", - "chalk-recursive", - "chalk-solve", - "ena", - "expect", - "itertools", - "log", - "ra_arena", - "ra_db", - "ra_hir_def", - "ra_hir_expand", - "ra_prof", - "ra_syntax", - "rustc-hash", - "scoped-tls", - "smallvec", - "stdx", - "test_utils", - "tracing", - "tracing-subscriber", - "tracing-tree", -] - -[[package]] -name = "ra_ide" -version = "0.1.0" -dependencies = [ - "either", - "expect", - "indexmap", - "itertools", - "log", - "oorandom", - "ra_assists", - "ra_cfg", - "ra_db", - "ra_fmt", - "ra_hir", - "ra_ide_db", - "ra_prof", - "ra_ssr", - "ra_syntax", - "ra_text_edit", - "rustc-hash", - "stdx", - "test_utils", -] - -[[package]] -name = "ra_ide_db" -version = "0.1.0" -dependencies = [ - "either", - "fst", - "log", - "once_cell", - "ra_db", - "ra_hir", - "ra_prof", - "ra_syntax", - "ra_text_edit", - "rayon", - "rustc-hash", - "stdx", - "test_utils", -] - -[[package]] -name = "ra_mbe" -version = "0.1.0" -dependencies = [ - "log", - "ra_parser", - "ra_syntax", - "ra_tt", - "rustc-hash", - "smallvec", - "test_utils", -] - -[[package]] -name = "ra_parser" -version = "0.1.0" -dependencies = [ - "drop_bomb", -] - -[[package]] -name = "ra_proc_macro" -version = "0.1.0" -dependencies = [ - "crossbeam-channel", - "jod-thread", - "log", - "ra_tt", - "serde", - "serde_json", -] - -[[package]] -name = "ra_proc_macro_srv" -version = "0.1.0" -dependencies = [ - "cargo_metadata", - "difference", - "goblin", - "libloading", - "memmap", - "ra_mbe", - "ra_proc_macro", - "ra_toolchain", - "ra_tt", - "serde_derive", - "test_utils", -] - -[[package]] -name = "ra_prof" -version = "0.1.0" -dependencies = [ - "backtrace", - "cfg-if", - "libc", - "once_cell", - "perf-event", - "ra_arena", -] - -[[package]] -name = "ra_project_model" -version = "0.1.0" -dependencies = [ - "anyhow", - "cargo_metadata", - "log", - "paths", - "ra_arena", - "ra_cfg", - "ra_db", - "ra_proc_macro", - "ra_toolchain", - "rustc-hash", - "serde", - "serde_json", - "stdx", -] - -[[package]] -name = "ra_ssr" -version = "0.1.0" -dependencies = [ - "expect", - "ra_db", - "ra_hir", - "ra_ide_db", - "ra_syntax", - "ra_text_edit", - "rustc-hash", - "test_utils", -] - -[[package]] -name = "ra_syntax" -version = "0.1.0" -dependencies = [ - "arrayvec", - "expect", - "itertools", - "once_cell", - "ra_parser", - "ra_text_edit", - "rayon", - "rowan", - "rustc-ap-rustc_lexer", - "rustc-hash", - "serde", - "smol_str", - "stdx", - "test_utils", - "walkdir", -] - -[[package]] -name = "ra_text_edit" -version = "0.1.0" -dependencies = [ - "text-size", -] - -[[package]] -name = "ra_toolchain" -version = "0.1.0" -dependencies = [ - "home", -] - -[[package]] -name = "ra_tt" -version = "0.1.0" -dependencies = [ - "smol_str", - "stdx", -] - [[package]] name = "rayon" version = "1.3.1" @@ -1262,10 +1180,7 @@ version = "1.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c3780fcf44b193bc4d09f36d2a3c87b251da4a046c87795a0d35f4f927ad8e6" dependencies = [ - "aho-corasick", - "memchr", "regex-syntax", - "thread_local", ] [[package]] @@ -1298,45 +1213,45 @@ dependencies = [ [[package]] name = "rust-analyzer" -version = "0.1.0" +version = "0.0.0" dependencies = [ "anyhow", + "base_db", + "cfg", "crossbeam-channel", "env_logger", "expect", "flycheck", + "hir", + "hir_def", + "hir_ty", + "ide", + "ide_db", "itertools", "jod-thread", "log", "lsp-server", "lsp-types", + "mbe", "mimalloc", "oorandom", "parking_lot", "pico-args", - "ra_cfg", - "ra_db", - "ra_hir", - "ra_hir_def", - "ra_hir_ty", - "ra_ide", - "ra_ide_db", - "ra_mbe", - "ra_proc_macro_srv", - "ra_prof", - "ra_project_model", - "ra_ssr", - "ra_syntax", - "ra_text_edit", - "ra_toolchain", - "ra_tt", + "proc_macro_srv", + "profile", + "project_model", "rayon", "rustc-hash", "serde", "serde_json", + "ssr", "stdx", + "syntax", "test_utils", + "text_edit", "threadpool", + "toolchain", + "tt", "vfs", "vfs-notify", "winapi 0.3.9", @@ -1457,18 +1372,18 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.114" +version = "1.0.115" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5317f7588f0a5078ee60ef675ef96735a1442132dc645eb1d12c018620ed8cd3" +checksum = "e54c9a88f2da7238af84b5101443f0c0d0a3bbdc455e34a5c9497b1903ed55d5" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.114" +version = "1.0.115" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0be94b04690fbaed37cddffc5c134bf537c8e3329d53e982fe04c374978f8e" +checksum = "609feed1d0a73cc36a0182a840a9b37b4a82f0b1150369f0536a9e3f2a31dc48" dependencies = [ "proc-macro2", "quote", @@ -1514,9 +1429,9 @@ checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" [[package]] name = "smallvec" -version = "1.4.1" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3757cb9d89161a2f24e1cf78efa0c1fcff485d18e3f55e0aa3480824ddaa0f3f" +checksum = "fbee7696b84bbf3d89a1c2eccff0850e3047ed46bfcd2e92c29a2d074d57e252" [[package]] name = "smol_str" @@ -1527,9 +1442,23 @@ dependencies = [ "serde", ] +[[package]] +name = "ssr" +version = "0.0.0" +dependencies = [ + "base_db", + "expect", + "hir", + "ide_db", + "rustc-hash", + "syntax", + "test_utils", + "text_edit", +] + [[package]] name = "stdx" -version = "0.1.0" +version = "0.0.0" [[package]] name = "syn" @@ -1554,6 +1483,27 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "syntax" +version = "0.0.0" +dependencies = [ + "arrayvec", + "expect", + "itertools", + "once_cell", + "parser", + "rayon", + "rowan", + "rustc-ap-rustc_lexer", + "rustc-hash", + "serde", + "smol_str", + "stdx", + "test_utils", + "text_edit", + "walkdir", +] + [[package]] name = "termcolor" version = "1.1.0" @@ -1565,7 +1515,7 @@ dependencies = [ [[package]] name = "test_utils" -version = "0.1.0" +version = "0.0.0" dependencies = [ "difference", "rustc-hash", @@ -1580,6 +1530,13 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f03e7efdedc3bc78cb2337f1e2785c39e45f5ef762d9e4ebb137fff7380a6d8a" +[[package]] +name = "text_edit" +version = "0.0.0" +dependencies = [ + "text-size", +] + [[package]] name = "thin-dst" version = "1.1.0" @@ -1620,11 +1577,18 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53953d2d3a5ad81d9f844a32f14ebb121f50b650cd59d0ee2a07cf13c617efed" +[[package]] +name = "toolchain" +version = "0.0.0" +dependencies = [ + "home", +] + [[package]] name = "tracing" -version = "0.1.18" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0aae59226cf195d8e74d4b34beae1859257efb4e5fed3f147d2dc2c7d372178" +checksum = "6d79ca061b032d6ce30c660fded31189ca0b9922bf483cd70759f13a2d86786c" dependencies = [ "cfg-if", "tracing-attributes", @@ -1633,9 +1597,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0693bf8d6f2bf22c690fc61a9d21ac69efdbb894a17ed596b9af0f01e64b84b" +checksum = "1fe233f4227389ab7df5b32649239da7ebe0b281824b4e84b342d04d3fd8c25e" dependencies = [ "proc-macro2", "quote", @@ -1644,9 +1608,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d593f98af59ebc017c0648f0117525db358745a8894a8d684e185ba3f45954f9" +checksum = "db63662723c316b43ca36d833707cc93dff82a02ba3d7e354f342682cc8b3545" dependencies = [ "lazy_static", ] @@ -1674,9 +1638,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7b33f8b2ef2ab0c3778c12646d9c42a24f7772bee4cdafc72199644a9f58fdc" +checksum = "abd165311cc4d7a555ad11cc77a37756df836182db0d81aac908c8184c584f40" dependencies = [ "ansi_term", "chrono", @@ -1687,6 +1651,7 @@ dependencies = [ "serde_json", "sharded-slab", "smallvec", + "thread_local", "tracing-core", "tracing-log", "tracing-serde", @@ -1694,9 +1659,9 @@ dependencies = [ [[package]] name = "tracing-tree" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37ee7f0f53ed2093971a698db799ef56a2dfd89b32e3aeb5165f0e637a02be04" +checksum = "e1a3dc4774db3a6b2d66a4f8d8de670e874ec3ed55615860c994927419b32c5f" dependencies = [ "ansi_term", "atty", @@ -1706,6 +1671,14 @@ dependencies = [ "tracing-subscriber", ] +[[package]] +name = "tt" +version = "0.0.0" +dependencies = [ + "smol_str", + "stdx", +] + [[package]] name = "ungrammar" version = "1.1.1" @@ -1756,7 +1729,7 @@ dependencies = [ [[package]] name = "vfs" -version = "0.1.0" +version = "0.0.0" dependencies = [ "fst", "paths", @@ -1765,7 +1738,7 @@ dependencies = [ [[package]] name = "vfs-notify" -version = "0.1.0" +version = "0.0.0" dependencies = [ "crossbeam-channel", "jod-thread", diff --git a/README.md b/README.md index 16c980400c..264e4da707 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,7 @@ https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frls-2.2E0 * Website: https://rust-analyzer.github.io/ * Metrics: https://rust-analyzer.github.io/metrics/ -* API docs: https://rust-analyzer.github.io/rust-analyzer/ra_ide/ +* API docs: https://rust-analyzer.github.io/rust-analyzer/ide/ ## License diff --git a/bors.toml b/bors.toml index 13ce236df5..4c980a24d3 100644 --- a/bors.toml +++ b/bors.toml @@ -1,9 +1,8 @@ status = [ "Rust (ubuntu-latest)", "Rust (windows-latest)", - "Rust (macos-latest)", + # "Rust (macos-latest)", "TypeScript (ubuntu-latest)", "TypeScript (windows-latest)", - "TypeScript (macos-latest)", ] delete_merged_branches = true diff --git a/crates/arena/Cargo.toml b/crates/arena/Cargo.toml new file mode 100644 index 0000000000..f2bb5cc456 --- /dev/null +++ b/crates/arena/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "arena" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false diff --git a/crates/ra_arena/src/lib.rs b/crates/arena/src/lib.rs similarity index 100% rename from crates/ra_arena/src/lib.rs rename to crates/arena/src/lib.rs diff --git a/crates/ra_arena/src/map.rs b/crates/arena/src/map.rs similarity index 100% rename from crates/ra_arena/src/map.rs rename to crates/arena/src/map.rs diff --git a/crates/assists/Cargo.toml b/crates/assists/Cargo.toml new file mode 100644 index 0000000000..a560a35c7e --- /dev/null +++ b/crates/assists/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "assists" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +rustc-hash = "1.1.0" +itertools = "0.9.0" +either = "1.5.3" + +stdx = { path = "../stdx" } +syntax = { path = "../syntax" } +text_edit = { path = "../text_edit" } +profile = { path = "../profile" } +base_db = { path = "../base_db" } +ide_db = { path = "../ide_db" } +hir = { path = "../hir" } +test_utils = { path = "../test_utils" } diff --git a/crates/ra_assists/src/assist_config.rs b/crates/assists/src/assist_config.rs similarity index 100% rename from crates/ra_assists/src/assist_config.rs rename to crates/assists/src/assist_config.rs diff --git a/crates/assists/src/assist_context.rs b/crates/assists/src/assist_context.rs new file mode 100644 index 0000000000..79574b9ac8 --- /dev/null +++ b/crates/assists/src/assist_context.rs @@ -0,0 +1,291 @@ +//! See `AssistContext` + +use std::mem; + +use algo::find_covering_element; +use base_db::{FileId, FileRange}; +use hir::Semantics; +use ide_db::{ + source_change::{SourceChange, SourceFileEdit}, + RootDatabase, +}; +use syntax::{ + algo::{self, find_node_at_offset, SyntaxRewriter}, + AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange, TextSize, + TokenAtOffset, +}; +use text_edit::{TextEdit, TextEditBuilder}; + +use crate::{ + assist_config::{AssistConfig, SnippetCap}, + Assist, AssistId, AssistKind, GroupLabel, ResolvedAssist, +}; + +/// `AssistContext` allows to apply an assist or check if it could be applied. +/// +/// Assists use a somewhat over-engineered approach, given the current needs. +/// The assists workflow consists of two phases. In the first phase, a user asks +/// for the list of available assists. In the second phase, the user picks a +/// particular assist and it gets applied. +/// +/// There are two peculiarities here: +/// +/// * first, we ideally avoid computing more things then necessary to answer "is +/// assist applicable" in the first phase. +/// * second, when we are applying assist, we don't have a guarantee that there +/// weren't any changes between the point when user asked for assists and when +/// they applied a particular assist. So, when applying assist, we need to do +/// all the checks from scratch. +/// +/// To avoid repeating the same code twice for both "check" and "apply" +/// functions, we use an approach reminiscent of that of Django's function based +/// views dealing with forms. Each assist receives a runtime parameter, +/// `resolve`. It first check if an edit is applicable (potentially computing +/// info required to compute the actual edit). If it is applicable, and +/// `resolve` is `true`, it then computes the actual edit. +/// +/// So, to implement the original assists workflow, we can first apply each edit +/// with `resolve = false`, and then applying the selected edit again, with +/// `resolve = true` this time. +/// +/// Note, however, that we don't actually use such two-phase logic at the +/// moment, because the LSP API is pretty awkward in this place, and it's much +/// easier to just compute the edit eagerly :-) +pub(crate) struct AssistContext<'a> { + pub(crate) config: &'a AssistConfig, + pub(crate) sema: Semantics<'a, RootDatabase>, + pub(crate) frange: FileRange, + source_file: SourceFile, +} + +impl<'a> AssistContext<'a> { + pub(crate) fn new( + sema: Semantics<'a, RootDatabase>, + config: &'a AssistConfig, + frange: FileRange, + ) -> AssistContext<'a> { + let source_file = sema.parse(frange.file_id); + AssistContext { config, sema, frange, source_file } + } + + pub(crate) fn db(&self) -> &RootDatabase { + self.sema.db + } + + pub(crate) fn source_file(&self) -> &SourceFile { + &self.source_file + } + + // NB, this ignores active selection. + pub(crate) fn offset(&self) -> TextSize { + self.frange.range.start() + } + + pub(crate) fn token_at_offset(&self) -> TokenAtOffset { + self.source_file.syntax().token_at_offset(self.offset()) + } + pub(crate) fn find_token_at_offset(&self, kind: SyntaxKind) -> Option { + self.token_at_offset().find(|it| it.kind() == kind) + } + pub(crate) fn find_node_at_offset(&self) -> Option { + find_node_at_offset(self.source_file.syntax(), self.offset()) + } + pub(crate) fn find_node_at_offset_with_descend(&self) -> Option { + self.sema.find_node_at_offset_with_descend(self.source_file.syntax(), self.offset()) + } + pub(crate) fn covering_element(&self) -> SyntaxElement { + find_covering_element(self.source_file.syntax(), self.frange.range) + } + // FIXME: remove + pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement { + find_covering_element(self.source_file.syntax(), range) + } +} + +pub(crate) struct Assists { + resolve: bool, + file: FileId, + buf: Vec<(Assist, Option)>, + allowed: Option>, +} + +impl Assists { + pub(crate) fn new_resolved(ctx: &AssistContext) -> Assists { + Assists { + resolve: true, + file: ctx.frange.file_id, + buf: Vec::new(), + allowed: ctx.config.allowed.clone(), + } + } + + pub(crate) fn new_unresolved(ctx: &AssistContext) -> Assists { + Assists { + resolve: false, + file: ctx.frange.file_id, + buf: Vec::new(), + allowed: ctx.config.allowed.clone(), + } + } + + pub(crate) fn finish_unresolved(self) -> Vec { + assert!(!self.resolve); + self.finish() + .into_iter() + .map(|(label, edit)| { + assert!(edit.is_none()); + label + }) + .collect() + } + + pub(crate) fn finish_resolved(self) -> Vec { + assert!(self.resolve); + self.finish() + .into_iter() + .map(|(label, edit)| ResolvedAssist { assist: label, source_change: edit.unwrap() }) + .collect() + } + + pub(crate) fn add( + &mut self, + id: AssistId, + label: impl Into, + target: TextRange, + f: impl FnOnce(&mut AssistBuilder), + ) -> Option<()> { + if !self.is_allowed(&id) { + return None; + } + let label = Assist::new(id, label.into(), None, target); + self.add_impl(label, f) + } + + pub(crate) fn add_group( + &mut self, + group: &GroupLabel, + id: AssistId, + label: impl Into, + target: TextRange, + f: impl FnOnce(&mut AssistBuilder), + ) -> Option<()> { + if !self.is_allowed(&id) { + return None; + } + + let label = Assist::new(id, label.into(), Some(group.clone()), target); + self.add_impl(label, f) + } + + fn add_impl(&mut self, label: Assist, f: impl FnOnce(&mut AssistBuilder)) -> Option<()> { + let source_change = if self.resolve { + let mut builder = AssistBuilder::new(self.file); + f(&mut builder); + Some(builder.finish()) + } else { + None + }; + + self.buf.push((label, source_change)); + Some(()) + } + + fn finish(mut self) -> Vec<(Assist, Option)> { + self.buf.sort_by_key(|(label, _edit)| label.target.len()); + self.buf + } + + fn is_allowed(&self, id: &AssistId) -> bool { + match &self.allowed { + Some(allowed) => allowed.iter().any(|kind| kind.contains(id.1)), + None => true, + } + } +} + +pub(crate) struct AssistBuilder { + edit: TextEditBuilder, + file_id: FileId, + is_snippet: bool, + change: SourceChange, +} + +impl AssistBuilder { + pub(crate) fn new(file_id: FileId) -> AssistBuilder { + AssistBuilder { + edit: TextEdit::builder(), + file_id, + is_snippet: false, + change: SourceChange::default(), + } + } + + pub(crate) fn edit_file(&mut self, file_id: FileId) { + self.file_id = file_id; + } + + fn commit(&mut self) { + let edit = mem::take(&mut self.edit).finish(); + if !edit.is_empty() { + let new_edit = SourceFileEdit { file_id: self.file_id, edit }; + assert!(!self.change.source_file_edits.iter().any(|it| it.file_id == new_edit.file_id)); + self.change.source_file_edits.push(new_edit); + } + } + + /// Remove specified `range` of text. + pub(crate) fn delete(&mut self, range: TextRange) { + self.edit.delete(range) + } + /// Append specified `text` at the given `offset` + pub(crate) fn insert(&mut self, offset: TextSize, text: impl Into) { + self.edit.insert(offset, text.into()) + } + /// Append specified `snippet` at the given `offset` + pub(crate) fn insert_snippet( + &mut self, + _cap: SnippetCap, + offset: TextSize, + snippet: impl Into, + ) { + self.is_snippet = true; + self.insert(offset, snippet); + } + /// Replaces specified `range` of text with a given string. + pub(crate) fn replace(&mut self, range: TextRange, replace_with: impl Into) { + self.edit.replace(range, replace_with.into()) + } + /// Replaces specified `range` of text with a given `snippet`. + pub(crate) fn replace_snippet( + &mut self, + _cap: SnippetCap, + range: TextRange, + snippet: impl Into, + ) { + self.is_snippet = true; + self.replace(range, snippet); + } + pub(crate) fn replace_ast(&mut self, old: N, new: N) { + algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit) + } + pub(crate) fn rewrite(&mut self, rewriter: SyntaxRewriter) { + let node = rewriter.rewrite_root().unwrap(); + let new = rewriter.rewrite(&node); + algo::diff(&node, &new).into_text_edit(&mut self.edit); + } + + // FIXME: kill this API + /// Get access to the raw `TextEditBuilder`. + pub(crate) fn text_edit_builder(&mut self) -> &mut TextEditBuilder { + &mut self.edit + } + + fn finish(mut self) -> SourceChange { + self.commit(); + let mut change = mem::take(&mut self.change); + if self.is_snippet { + change.is_snippet = true; + } + change + } +} diff --git a/crates/assists/src/ast_transform.rs b/crates/assists/src/ast_transform.rs new file mode 100644 index 0000000000..4c41c16d86 --- /dev/null +++ b/crates/assists/src/ast_transform.rs @@ -0,0 +1,206 @@ +//! `AstTransformer`s are functions that replace nodes in an AST and can be easily combined. +use rustc_hash::FxHashMap; + +use hir::{HirDisplay, PathResolution, SemanticsScope}; +use syntax::{ + algo::SyntaxRewriter, + ast::{self, AstNode}, +}; + +pub trait AstTransform<'a> { + fn get_substitution(&self, node: &syntax::SyntaxNode) -> Option; + + fn chain_before(self, other: Box + 'a>) -> Box + 'a>; + fn or + 'a>(self, other: T) -> Box + 'a> + where + Self: Sized + 'a, + { + self.chain_before(Box::new(other)) + } +} + +struct NullTransformer; + +impl<'a> AstTransform<'a> for NullTransformer { + fn get_substitution(&self, _node: &syntax::SyntaxNode) -> Option { + None + } + fn chain_before(self, other: Box + 'a>) -> Box + 'a> { + other + } +} + +pub struct SubstituteTypeParams<'a> { + source_scope: &'a SemanticsScope<'a>, + substs: FxHashMap, + previous: Box + 'a>, +} + +impl<'a> SubstituteTypeParams<'a> { + pub fn for_trait_impl( + source_scope: &'a SemanticsScope<'a>, + // FIXME: there's implicit invariant that `trait_` and `source_scope` match... + trait_: hir::Trait, + impl_def: ast::Impl, + ) -> SubstituteTypeParams<'a> { + let substs = get_syntactic_substs(impl_def).unwrap_or_default(); + let generic_def: hir::GenericDef = trait_.into(); + let substs_by_param: FxHashMap<_, _> = generic_def + .params(source_scope.db) + .into_iter() + // this is a trait impl, so we need to skip the first type parameter -- this is a bit hacky + .skip(1) + // The actual list of trait type parameters may be longer than the one + // used in the `impl` block due to trailing default type parameters. + // For that case we extend the `substs` with an empty iterator so we + // can still hit those trailing values and check if they actually have + // a default type. If they do, go for that type from `hir` to `ast` so + // the resulting change can be applied correctly. + .zip(substs.into_iter().map(Some).chain(std::iter::repeat(None))) + .filter_map(|(k, v)| match v { + Some(v) => Some((k, v)), + None => { + let default = k.default(source_scope.db)?; + Some(( + k, + ast::make::ty( + &default + .display_source_code(source_scope.db, source_scope.module()?.into()) + .ok()?, + ), + )) + } + }) + .collect(); + return SubstituteTypeParams { + source_scope, + substs: substs_by_param, + previous: Box::new(NullTransformer), + }; + + // FIXME: It would probably be nicer if we could get this via HIR (i.e. get the + // trait ref, and then go from the types in the substs back to the syntax). + fn get_syntactic_substs(impl_def: ast::Impl) -> Option> { + let target_trait = impl_def.trait_()?; + let path_type = match target_trait { + ast::Type::PathType(path) => path, + _ => return None, + }; + let generic_arg_list = path_type.path()?.segment()?.generic_arg_list()?; + + let mut result = Vec::new(); + for generic_arg in generic_arg_list.generic_args() { + match generic_arg { + ast::GenericArg::TypeArg(type_arg) => result.push(type_arg.ty()?), + ast::GenericArg::AssocTypeArg(_) + | ast::GenericArg::LifetimeArg(_) + | ast::GenericArg::ConstArg(_) => (), + } + } + + Some(result) + } + } + fn get_substitution_inner(&self, node: &syntax::SyntaxNode) -> Option { + let type_ref = ast::Type::cast(node.clone())?; + let path = match &type_ref { + ast::Type::PathType(path_type) => path_type.path()?, + _ => return None, + }; + // FIXME: use `hir::Path::from_src` instead. + #[allow(deprecated)] + let path = hir::Path::from_ast(path)?; + let resolution = self.source_scope.resolve_hir_path(&path)?; + match resolution { + hir::PathResolution::TypeParam(tp) => Some(self.substs.get(&tp)?.syntax().clone()), + _ => None, + } + } +} + +impl<'a> AstTransform<'a> for SubstituteTypeParams<'a> { + fn get_substitution(&self, node: &syntax::SyntaxNode) -> Option { + self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node)) + } + fn chain_before(self, other: Box + 'a>) -> Box + 'a> { + Box::new(SubstituteTypeParams { previous: other, ..self }) + } +} + +pub struct QualifyPaths<'a> { + target_scope: &'a SemanticsScope<'a>, + source_scope: &'a SemanticsScope<'a>, + previous: Box + 'a>, +} + +impl<'a> QualifyPaths<'a> { + pub fn new(target_scope: &'a SemanticsScope<'a>, source_scope: &'a SemanticsScope<'a>) -> Self { + Self { target_scope, source_scope, previous: Box::new(NullTransformer) } + } + + fn get_substitution_inner(&self, node: &syntax::SyntaxNode) -> Option { + // FIXME handle value ns? + let from = self.target_scope.module()?; + let p = ast::Path::cast(node.clone())?; + if p.segment().and_then(|s| s.param_list()).is_some() { + // don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway + return None; + } + // FIXME: use `hir::Path::from_src` instead. + #[allow(deprecated)] + let hir_path = hir::Path::from_ast(p.clone()); + let resolution = self.source_scope.resolve_hir_path(&hir_path?)?; + match resolution { + PathResolution::Def(def) => { + let found_path = from.find_use_path(self.source_scope.db.upcast(), def)?; + let mut path = path_to_ast(found_path); + + let type_args = p + .segment() + .and_then(|s| s.generic_arg_list()) + .map(|arg_list| apply(self, arg_list)); + if let Some(type_args) = type_args { + let last_segment = path.segment().unwrap(); + path = path.with_segment(last_segment.with_type_args(type_args)) + } + + Some(path.syntax().clone()) + } + PathResolution::Local(_) + | PathResolution::TypeParam(_) + | PathResolution::SelfType(_) => None, + PathResolution::Macro(_) => None, + PathResolution::AssocItem(_) => None, + } + } +} + +pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N { + SyntaxRewriter::from_fn(|element| match element { + syntax::SyntaxElement::Node(n) => { + let replacement = transformer.get_substitution(&n)?; + Some(replacement.into()) + } + _ => None, + }) + .rewrite_ast(&node) +} + +impl<'a> AstTransform<'a> for QualifyPaths<'a> { + fn get_substitution(&self, node: &syntax::SyntaxNode) -> Option { + self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node)) + } + fn chain_before(self, other: Box + 'a>) -> Box + 'a> { + Box::new(QualifyPaths { previous: other, ..self }) + } +} + +pub(crate) fn path_to_ast(path: hir::ModPath) -> ast::Path { + let parse = ast::SourceFile::parse(&path.to_string()); + parse + .tree() + .syntax() + .descendants() + .find_map(ast::Path::cast) + .unwrap_or_else(|| panic!("failed to parse path {:?}, `{}`", path, path)) +} diff --git a/crates/assists/src/handlers/add_custom_impl.rs b/crates/assists/src/handlers/add_custom_impl.rs new file mode 100644 index 0000000000..8757fa33f2 --- /dev/null +++ b/crates/assists/src/handlers/add_custom_impl.rs @@ -0,0 +1,208 @@ +use itertools::Itertools; +use syntax::{ + ast::{self, AstNode}, + Direction, SmolStr, + SyntaxKind::{IDENT, WHITESPACE}, + TextRange, TextSize, +}; + +use crate::{ + assist_context::{AssistContext, Assists}, + AssistId, AssistKind, +}; + +// Assist: add_custom_impl +// +// Adds impl block for derived trait. +// +// ``` +// #[derive(Deb<|>ug, Display)] +// struct S; +// ``` +// -> +// ``` +// #[derive(Display)] +// struct S; +// +// impl Debug for S { +// $0 +// } +// ``` +pub(crate) fn add_custom_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let attr = ctx.find_node_at_offset::()?; + let input = attr.token_tree()?; + + let attr_name = attr + .syntax() + .descendants_with_tokens() + .filter(|t| t.kind() == IDENT) + .find_map(|i| i.into_token()) + .filter(|t| *t.text() == "derive")? + .text() + .clone(); + + let trait_token = + ctx.token_at_offset().find(|t| t.kind() == IDENT && *t.text() != attr_name)?; + + let annotated = attr.syntax().siblings(Direction::Next).find_map(ast::Name::cast)?; + let annotated_name = annotated.syntax().text().to_string(); + let start_offset = annotated.syntax().parent()?.text_range().end(); + + let label = + format!("Add custom impl `{}` for `{}`", trait_token.text().as_str(), annotated_name); + + let target = attr.syntax().text_range(); + acc.add(AssistId("add_custom_impl", AssistKind::Refactor), label, target, |builder| { + let new_attr_input = input + .syntax() + .descendants_with_tokens() + .filter(|t| t.kind() == IDENT) + .filter_map(|t| t.into_token().map(|t| t.text().clone())) + .filter(|t| t != trait_token.text()) + .collect::>(); + let has_more_derives = !new_attr_input.is_empty(); + + if has_more_derives { + let new_attr_input = format!("({})", new_attr_input.iter().format(", ")); + builder.replace(input.syntax().text_range(), new_attr_input); + } else { + let attr_range = attr.syntax().text_range(); + builder.delete(attr_range); + + let line_break_range = attr + .syntax() + .next_sibling_or_token() + .filter(|t| t.kind() == WHITESPACE) + .map(|t| t.text_range()) + .unwrap_or_else(|| TextRange::new(TextSize::from(0), TextSize::from(0))); + builder.delete(line_break_range); + } + + match ctx.config.snippet_cap { + Some(cap) => { + builder.insert_snippet( + cap, + start_offset, + format!("\n\nimpl {} for {} {{\n $0\n}}", trait_token, annotated_name), + ); + } + None => { + builder.insert( + start_offset, + format!("\n\nimpl {} for {} {{\n\n}}", trait_token, annotated_name), + ); + } + } + }) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn add_custom_impl_for_unique_input() { + check_assist( + add_custom_impl, + " +#[derive(Debu<|>g)] +struct Foo { + bar: String, +} + ", + " +struct Foo { + bar: String, +} + +impl Debug for Foo { + $0 +} + ", + ) + } + + #[test] + fn add_custom_impl_for_with_visibility_modifier() { + check_assist( + add_custom_impl, + " +#[derive(Debug<|>)] +pub struct Foo { + bar: String, +} + ", + " +pub struct Foo { + bar: String, +} + +impl Debug for Foo { + $0 +} + ", + ) + } + + #[test] + fn add_custom_impl_when_multiple_inputs() { + check_assist( + add_custom_impl, + " +#[derive(Display, Debug<|>, Serialize)] +struct Foo {} + ", + " +#[derive(Display, Serialize)] +struct Foo {} + +impl Debug for Foo { + $0 +} + ", + ) + } + + #[test] + fn test_ignore_derive_macro_without_input() { + check_assist_not_applicable( + add_custom_impl, + " +#[derive(<|>)] +struct Foo {} + ", + ) + } + + #[test] + fn test_ignore_if_cursor_on_param() { + check_assist_not_applicable( + add_custom_impl, + " +#[derive<|>(Debug)] +struct Foo {} + ", + ); + + check_assist_not_applicable( + add_custom_impl, + " +#[derive(Debug)<|>] +struct Foo {} + ", + ) + } + + #[test] + fn test_ignore_if_not_derive() { + check_assist_not_applicable( + add_custom_impl, + " +#[allow(non_camel_<|>case_types)] +struct Foo {} + ", + ) + } +} diff --git a/crates/assists/src/handlers/add_explicit_type.rs b/crates/assists/src/handlers/add_explicit_type.rs new file mode 100644 index 0000000000..563cbf505f --- /dev/null +++ b/crates/assists/src/handlers/add_explicit_type.rs @@ -0,0 +1,211 @@ +use hir::HirDisplay; +use syntax::{ + ast::{self, AstNode, LetStmt, NameOwner}, + TextRange, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: add_explicit_type +// +// Specify type for a let binding. +// +// ``` +// fn main() { +// let x<|> = 92; +// } +// ``` +// -> +// ``` +// fn main() { +// let x: i32 = 92; +// } +// ``` +pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let let_stmt = ctx.find_node_at_offset::()?; + let module = ctx.sema.scope(let_stmt.syntax()).module()?; + let expr = let_stmt.initializer()?; + // Must be a binding + let pat = match let_stmt.pat()? { + ast::Pat::IdentPat(bind_pat) => bind_pat, + _ => return None, + }; + let pat_range = pat.syntax().text_range(); + // The binding must have a name + let name = pat.name()?; + let name_range = name.syntax().text_range(); + let stmt_range = let_stmt.syntax().text_range(); + let eq_range = let_stmt.eq_token()?.text_range(); + // Assist should only be applicable if cursor is between 'let' and '=' + let let_range = TextRange::new(stmt_range.start(), eq_range.start()); + let cursor_in_range = let_range.contains_range(ctx.frange.range); + if !cursor_in_range { + return None; + } + // Assist not applicable if the type has already been specified + // and it has no placeholders + let ascribed_ty = let_stmt.ty(); + if let Some(ty) = &ascribed_ty { + if ty.syntax().descendants().find_map(ast::InferType::cast).is_none() { + return None; + } + } + // Infer type + let ty = ctx.sema.type_of_expr(&expr)?; + + if ty.contains_unknown() || ty.is_closure() { + return None; + } + + let inferred_type = ty.display_source_code(ctx.db(), module.into()).ok()?; + acc.add( + AssistId("add_explicit_type", AssistKind::RefactorRewrite), + format!("Insert explicit type `{}`", inferred_type), + pat_range, + |builder| match ascribed_ty { + Some(ascribed_ty) => { + builder.replace(ascribed_ty.syntax().text_range(), inferred_type); + } + None => { + builder.insert(name_range.end(), format!(": {}", inferred_type)); + } + }, + ) +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; + + #[test] + fn add_explicit_type_target() { + check_assist_target(add_explicit_type, "fn f() { let a<|> = 1; }", "a"); + } + + #[test] + fn add_explicit_type_works_for_simple_expr() { + check_assist(add_explicit_type, "fn f() { let a<|> = 1; }", "fn f() { let a: i32 = 1; }"); + } + + #[test] + fn add_explicit_type_works_for_underscore() { + check_assist( + add_explicit_type, + "fn f() { let a<|>: _ = 1; }", + "fn f() { let a: i32 = 1; }", + ); + } + + #[test] + fn add_explicit_type_works_for_nested_underscore() { + check_assist( + add_explicit_type, + r#" + enum Option { + Some(T), + None + } + + fn f() { + let a<|>: Option<_> = Option::Some(1); + }"#, + r#" + enum Option { + Some(T), + None + } + + fn f() { + let a: Option = Option::Some(1); + }"#, + ); + } + + #[test] + fn add_explicit_type_works_for_macro_call() { + check_assist( + add_explicit_type, + r"macro_rules! v { () => {0u64} } fn f() { let a<|> = v!(); }", + r"macro_rules! v { () => {0u64} } fn f() { let a: u64 = v!(); }", + ); + } + + #[test] + fn add_explicit_type_works_for_macro_call_recursive() { + check_assist( + add_explicit_type, + r#"macro_rules! u { () => {0u64} } macro_rules! v { () => {u!()} } fn f() { let a<|> = v!(); }"#, + r#"macro_rules! u { () => {0u64} } macro_rules! v { () => {u!()} } fn f() { let a: u64 = v!(); }"#, + ); + } + + #[test] + fn add_explicit_type_not_applicable_if_ty_not_inferred() { + check_assist_not_applicable(add_explicit_type, "fn f() { let a<|> = None; }"); + } + + #[test] + fn add_explicit_type_not_applicable_if_ty_already_specified() { + check_assist_not_applicable(add_explicit_type, "fn f() { let a<|>: i32 = 1; }"); + } + + #[test] + fn add_explicit_type_not_applicable_if_specified_ty_is_tuple() { + check_assist_not_applicable(add_explicit_type, "fn f() { let a<|>: (i32, i32) = (3, 4); }"); + } + + #[test] + fn add_explicit_type_not_applicable_if_cursor_after_equals() { + check_assist_not_applicable( + add_explicit_type, + "fn f() {let a =<|> match 1 {2 => 3, 3 => 5};}", + ) + } + + #[test] + fn add_explicit_type_not_applicable_if_cursor_before_let() { + check_assist_not_applicable( + add_explicit_type, + "fn f() <|>{let a = match 1 {2 => 3, 3 => 5};}", + ) + } + + #[test] + fn closure_parameters_are_not_added() { + check_assist_not_applicable( + add_explicit_type, + r#" +fn main() { + let multiply_by_two<|> = |i| i * 3; + let six = multiply_by_two(2); +}"#, + ) + } + + #[test] + fn default_generics_should_not_be_added() { + check_assist( + add_explicit_type, + r#" +struct Test { + k: K, + t: T, +} + +fn main() { + let test<|> = Test { t: 23u8, k: 33 }; +}"#, + r#" +struct Test { + k: K, + t: T, +} + +fn main() { + let test: Test = Test { t: 23u8, k: 33 }; +}"#, + ); + } +} diff --git a/crates/assists/src/handlers/add_missing_impl_members.rs b/crates/assists/src/handlers/add_missing_impl_members.rs new file mode 100644 index 0000000000..81b61ebf8e --- /dev/null +++ b/crates/assists/src/handlers/add_missing_impl_members.rs @@ -0,0 +1,711 @@ +use hir::HasSource; +use syntax::{ + ast::{ + self, + edit::{self, AstNodeEdit, IndentLevel}, + make, AstNode, NameOwner, + }, + SmolStr, +}; + +use crate::{ + assist_context::{AssistContext, Assists}, + ast_transform::{self, AstTransform, QualifyPaths, SubstituteTypeParams}, + utils::{get_missing_assoc_items, render_snippet, resolve_target_trait, Cursor}, + AssistId, AssistKind, +}; + +#[derive(PartialEq)] +enum AddMissingImplMembersMode { + DefaultMethodsOnly, + NoDefaultMethods, +} + +// Assist: add_impl_missing_members +// +// Adds scaffold for required impl members. +// +// ``` +// trait Trait { +// Type X; +// fn foo(&self) -> T; +// fn bar(&self) {} +// } +// +// impl Trait for () {<|> +// +// } +// ``` +// -> +// ``` +// trait Trait { +// Type X; +// fn foo(&self) -> T; +// fn bar(&self) {} +// } +// +// impl Trait for () { +// fn foo(&self) -> u32 { +// ${0:todo!()} +// } +// +// } +// ``` +pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + add_missing_impl_members_inner( + acc, + ctx, + AddMissingImplMembersMode::NoDefaultMethods, + "add_impl_missing_members", + "Implement missing members", + ) +} + +// Assist: add_impl_default_members +// +// Adds scaffold for overriding default impl members. +// +// ``` +// trait Trait { +// Type X; +// fn foo(&self); +// fn bar(&self) {} +// } +// +// impl Trait for () { +// Type X = (); +// fn foo(&self) {}<|> +// +// } +// ``` +// -> +// ``` +// trait Trait { +// Type X; +// fn foo(&self); +// fn bar(&self) {} +// } +// +// impl Trait for () { +// Type X = (); +// fn foo(&self) {} +// $0fn bar(&self) {} +// +// } +// ``` +pub(crate) fn add_missing_default_members(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + add_missing_impl_members_inner( + acc, + ctx, + AddMissingImplMembersMode::DefaultMethodsOnly, + "add_impl_default_members", + "Implement default members", + ) +} + +fn add_missing_impl_members_inner( + acc: &mut Assists, + ctx: &AssistContext, + mode: AddMissingImplMembersMode, + assist_id: &'static str, + label: &'static str, +) -> Option<()> { + let _p = profile::span("add_missing_impl_members_inner"); + let impl_def = ctx.find_node_at_offset::()?; + let impl_item_list = impl_def.assoc_item_list()?; + + let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?; + + let def_name = |item: &ast::AssocItem| -> Option { + match item { + ast::AssocItem::Fn(def) => def.name(), + ast::AssocItem::TypeAlias(def) => def.name(), + ast::AssocItem::Const(def) => def.name(), + ast::AssocItem::MacroCall(_) => None, + } + .map(|it| it.text().clone()) + }; + + let missing_items = get_missing_assoc_items(&ctx.sema, &impl_def) + .iter() + .map(|i| match i { + hir::AssocItem::Function(i) => ast::AssocItem::Fn(i.source(ctx.db()).value), + hir::AssocItem::TypeAlias(i) => ast::AssocItem::TypeAlias(i.source(ctx.db()).value), + hir::AssocItem::Const(i) => ast::AssocItem::Const(i.source(ctx.db()).value), + }) + .filter(|t| def_name(&t).is_some()) + .filter(|t| match t { + ast::AssocItem::Fn(def) => match mode { + AddMissingImplMembersMode::DefaultMethodsOnly => def.body().is_some(), + AddMissingImplMembersMode::NoDefaultMethods => def.body().is_none(), + }, + _ => mode == AddMissingImplMembersMode::NoDefaultMethods, + }) + .collect::>(); + + if missing_items.is_empty() { + return None; + } + + let target = impl_def.syntax().text_range(); + acc.add(AssistId(assist_id, AssistKind::QuickFix), label, target, |builder| { + let n_existing_items = impl_item_list.assoc_items().count(); + let source_scope = ctx.sema.scope_for_def(trait_); + let target_scope = ctx.sema.scope(impl_item_list.syntax()); + let ast_transform = QualifyPaths::new(&target_scope, &source_scope) + .or(SubstituteTypeParams::for_trait_impl(&source_scope, trait_, impl_def)); + let items = missing_items + .into_iter() + .map(|it| ast_transform::apply(&*ast_transform, it)) + .map(|it| match it { + ast::AssocItem::Fn(def) => ast::AssocItem::Fn(add_body(def)), + ast::AssocItem::TypeAlias(def) => ast::AssocItem::TypeAlias(def.remove_bounds()), + _ => it, + }) + .map(|it| edit::remove_attrs_and_docs(&it)); + let new_impl_item_list = impl_item_list.append_items(items); + let first_new_item = new_impl_item_list.assoc_items().nth(n_existing_items).unwrap(); + + let original_range = impl_item_list.syntax().text_range(); + match ctx.config.snippet_cap { + None => builder.replace(original_range, new_impl_item_list.to_string()), + Some(cap) => { + let mut cursor = Cursor::Before(first_new_item.syntax()); + let placeholder; + if let ast::AssocItem::Fn(func) = &first_new_item { + if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) { + if m.syntax().text() == "todo!()" { + placeholder = m; + cursor = Cursor::Replace(placeholder.syntax()); + } + } + } + builder.replace_snippet( + cap, + original_range, + render_snippet(cap, new_impl_item_list.syntax(), cursor), + ) + } + }; + }) +} + +fn add_body(fn_def: ast::Fn) -> ast::Fn { + if fn_def.body().is_some() { + return fn_def; + } + let body = make::block_expr(None, Some(make::expr_todo())).indent(IndentLevel(1)); + fn_def.with_body(body) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn test_add_missing_impl_members() { + check_assist( + add_missing_impl_members, + r#" +trait Foo { + type Output; + + const CONST: usize = 42; + + fn foo(&self); + fn bar(&self); + fn baz(&self); +} + +struct S; + +impl Foo for S { + fn bar(&self) {} +<|> +}"#, + r#" +trait Foo { + type Output; + + const CONST: usize = 42; + + fn foo(&self); + fn bar(&self); + fn baz(&self); +} + +struct S; + +impl Foo for S { + fn bar(&self) {} + $0type Output; + const CONST: usize = 42; + fn foo(&self) { + todo!() + } + fn baz(&self) { + todo!() + } + +}"#, + ); + } + + #[test] + fn test_copied_overriden_members() { + check_assist( + add_missing_impl_members, + r#" +trait Foo { + fn foo(&self); + fn bar(&self) -> bool { true } + fn baz(&self) -> u32 { 42 } +} + +struct S; + +impl Foo for S { + fn bar(&self) {} +<|> +}"#, + r#" +trait Foo { + fn foo(&self); + fn bar(&self) -> bool { true } + fn baz(&self) -> u32 { 42 } +} + +struct S; + +impl Foo for S { + fn bar(&self) {} + fn foo(&self) { + ${0:todo!()} + } + +}"#, + ); + } + + #[test] + fn test_empty_impl_def() { + check_assist( + add_missing_impl_members, + r#" +trait Foo { fn foo(&self); } +struct S; +impl Foo for S { <|> }"#, + r#" +trait Foo { fn foo(&self); } +struct S; +impl Foo for S { + fn foo(&self) { + ${0:todo!()} + } +}"#, + ); + } + + #[test] + fn fill_in_type_params_1() { + check_assist( + add_missing_impl_members, + r#" +trait Foo { fn foo(&self, t: T) -> &T; } +struct S; +impl Foo for S { <|> }"#, + r#" +trait Foo { fn foo(&self, t: T) -> &T; } +struct S; +impl Foo for S { + fn foo(&self, t: u32) -> &u32 { + ${0:todo!()} + } +}"#, + ); + } + + #[test] + fn fill_in_type_params_2() { + check_assist( + add_missing_impl_members, + r#" +trait Foo { fn foo(&self, t: T) -> &T; } +struct S; +impl Foo for S { <|> }"#, + r#" +trait Foo { fn foo(&self, t: T) -> &T; } +struct S; +impl Foo for S { + fn foo(&self, t: U) -> &U { + ${0:todo!()} + } +}"#, + ); + } + + #[test] + fn test_cursor_after_empty_impl_def() { + check_assist( + add_missing_impl_members, + r#" +trait Foo { fn foo(&self); } +struct S; +impl Foo for S {}<|>"#, + r#" +trait Foo { fn foo(&self); } +struct S; +impl Foo for S { + fn foo(&self) { + ${0:todo!()} + } +}"#, + ) + } + + #[test] + fn test_qualify_path_1() { + check_assist( + add_missing_impl_members, + r#" +mod foo { + pub struct Bar; + trait Foo { fn foo(&self, bar: Bar); } +} +struct S; +impl foo::Foo for S { <|> }"#, + r#" +mod foo { + pub struct Bar; + trait Foo { fn foo(&self, bar: Bar); } +} +struct S; +impl foo::Foo for S { + fn foo(&self, bar: foo::Bar) { + ${0:todo!()} + } +}"#, + ); + } + + #[test] + fn test_qualify_path_generic() { + check_assist( + add_missing_impl_members, + r#" +mod foo { + pub struct Bar; + trait Foo { fn foo(&self, bar: Bar); } +} +struct S; +impl foo::Foo for S { <|> }"#, + r#" +mod foo { + pub struct Bar; + trait Foo { fn foo(&self, bar: Bar); } +} +struct S; +impl foo::Foo for S { + fn foo(&self, bar: foo::Bar) { + ${0:todo!()} + } +}"#, + ); + } + + #[test] + fn test_qualify_path_and_substitute_param() { + check_assist( + add_missing_impl_members, + r#" +mod foo { + pub struct Bar; + trait Foo { fn foo(&self, bar: Bar); } +} +struct S; +impl foo::Foo for S { <|> }"#, + r#" +mod foo { + pub struct Bar; + trait Foo { fn foo(&self, bar: Bar); } +} +struct S; +impl foo::Foo for S { + fn foo(&self, bar: foo::Bar) { + ${0:todo!()} + } +}"#, + ); + } + + #[test] + fn test_substitute_param_no_qualify() { + // when substituting params, the substituted param should not be qualified! + check_assist( + add_missing_impl_members, + r#" +mod foo { + trait Foo { fn foo(&self, bar: T); } + pub struct Param; +} +struct Param; +struct S; +impl foo::Foo for S { <|> }"#, + r#" +mod foo { + trait Foo { fn foo(&self, bar: T); } + pub struct Param; +} +struct Param; +struct S; +impl foo::Foo for S { + fn foo(&self, bar: Param) { + ${0:todo!()} + } +}"#, + ); + } + + #[test] + fn test_qualify_path_associated_item() { + check_assist( + add_missing_impl_members, + r#" +mod foo { + pub struct Bar; + impl Bar { type Assoc = u32; } + trait Foo { fn foo(&self, bar: Bar::Assoc); } +} +struct S; +impl foo::Foo for S { <|> }"#, + r#" +mod foo { + pub struct Bar; + impl Bar { type Assoc = u32; } + trait Foo { fn foo(&self, bar: Bar::Assoc); } +} +struct S; +impl foo::Foo for S { + fn foo(&self, bar: foo::Bar::Assoc) { + ${0:todo!()} + } +}"#, + ); + } + + #[test] + fn test_qualify_path_nested() { + check_assist( + add_missing_impl_members, + r#" +mod foo { + pub struct Bar; + pub struct Baz; + trait Foo { fn foo(&self, bar: Bar); } +} +struct S; +impl foo::Foo for S { <|> }"#, + r#" +mod foo { + pub struct Bar; + pub struct Baz; + trait Foo { fn foo(&self, bar: Bar); } +} +struct S; +impl foo::Foo for S { + fn foo(&self, bar: foo::Bar) { + ${0:todo!()} + } +}"#, + ); + } + + #[test] + fn test_qualify_path_fn_trait_notation() { + check_assist( + add_missing_impl_members, + r#" +mod foo { + pub trait Fn { type Output; } + trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); } +} +struct S; +impl foo::Foo for S { <|> }"#, + r#" +mod foo { + pub trait Fn { type Output; } + trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); } +} +struct S; +impl foo::Foo for S { + fn foo(&self, bar: dyn Fn(u32) -> i32) { + ${0:todo!()} + } +}"#, + ); + } + + #[test] + fn test_empty_trait() { + check_assist_not_applicable( + add_missing_impl_members, + r#" +trait Foo; +struct S; +impl Foo for S { <|> }"#, + ) + } + + #[test] + fn test_ignore_unnamed_trait_members_and_default_methods() { + check_assist_not_applicable( + add_missing_impl_members, + r#" +trait Foo { + fn (arg: u32); + fn valid(some: u32) -> bool { false } +} +struct S; +impl Foo for S { <|> }"#, + ) + } + + #[test] + fn test_with_docstring_and_attrs() { + check_assist( + add_missing_impl_members, + r#" +#[doc(alias = "test alias")] +trait Foo { + /// doc string + type Output; + + #[must_use] + fn foo(&self); +} +struct S; +impl Foo for S {}<|>"#, + r#" +#[doc(alias = "test alias")] +trait Foo { + /// doc string + type Output; + + #[must_use] + fn foo(&self); +} +struct S; +impl Foo for S { + $0type Output; + fn foo(&self) { + todo!() + } +}"#, + ) + } + + #[test] + fn test_default_methods() { + check_assist( + add_missing_default_members, + r#" +trait Foo { + type Output; + + const CONST: usize = 42; + + fn valid(some: u32) -> bool { false } + fn foo(some: u32) -> bool; +} +struct S; +impl Foo for S { <|> }"#, + r#" +trait Foo { + type Output; + + const CONST: usize = 42; + + fn valid(some: u32) -> bool { false } + fn foo(some: u32) -> bool; +} +struct S; +impl Foo for S { + $0fn valid(some: u32) -> bool { false } +}"#, + ) + } + + #[test] + fn test_generic_single_default_parameter() { + check_assist( + add_missing_impl_members, + r#" +trait Foo { + fn bar(&self, other: &T); +} + +struct S; +impl Foo for S { <|> }"#, + r#" +trait Foo { + fn bar(&self, other: &T); +} + +struct S; +impl Foo for S { + fn bar(&self, other: &Self) { + ${0:todo!()} + } +}"#, + ) + } + + #[test] + fn test_generic_default_parameter_is_second() { + check_assist( + add_missing_impl_members, + r#" +trait Foo { + fn bar(&self, this: &T1, that: &T2); +} + +struct S; +impl Foo for S { <|> }"#, + r#" +trait Foo { + fn bar(&self, this: &T1, that: &T2); +} + +struct S; +impl Foo for S { + fn bar(&self, this: &T, that: &Self) { + ${0:todo!()} + } +}"#, + ) + } + + #[test] + fn test_assoc_type_bounds_are_removed() { + check_assist( + add_missing_impl_members, + r#" +trait Tr { + type Ty: Copy + 'static; +} + +impl Tr for ()<|> { +}"#, + r#" +trait Tr { + type Ty: Copy + 'static; +} + +impl Tr for () { + $0type Ty; +}"#, + ) + } +} diff --git a/crates/assists/src/handlers/add_turbo_fish.rs b/crates/assists/src/handlers/add_turbo_fish.rs new file mode 100644 index 0000000000..f4f997d8e1 --- /dev/null +++ b/crates/assists/src/handlers/add_turbo_fish.rs @@ -0,0 +1,164 @@ +use ide_db::defs::{classify_name_ref, Definition, NameRefClass}; +use syntax::{ast, AstNode, SyntaxKind, T}; +use test_utils::mark; + +use crate::{ + assist_context::{AssistContext, Assists}, + AssistId, AssistKind, +}; + +// Assist: add_turbo_fish +// +// Adds `::<_>` to a call of a generic method or function. +// +// ``` +// fn make() -> T { todo!() } +// fn main() { +// let x = make<|>(); +// } +// ``` +// -> +// ``` +// fn make() -> T { todo!() } +// fn main() { +// let x = make::<${0:_}>(); +// } +// ``` +pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let ident = ctx.find_token_at_offset(SyntaxKind::IDENT).or_else(|| { + let arg_list = ctx.find_node_at_offset::()?; + if arg_list.args().count() > 0 { + return None; + } + mark::hit!(add_turbo_fish_after_call); + arg_list.l_paren_token()?.prev_token().filter(|it| it.kind() == SyntaxKind::IDENT) + })?; + let next_token = ident.next_token()?; + if next_token.kind() == T![::] { + mark::hit!(add_turbo_fish_one_fish_is_enough); + return None; + } + let name_ref = ast::NameRef::cast(ident.parent())?; + let def = match classify_name_ref(&ctx.sema, &name_ref)? { + NameRefClass::Definition(def) => def, + NameRefClass::ExternCrate(_) | NameRefClass::FieldShorthand { .. } => return None, + }; + let fun = match def { + Definition::ModuleDef(hir::ModuleDef::Function(it)) => it, + _ => return None, + }; + let generics = hir::GenericDef::Function(fun).params(ctx.sema.db); + if generics.is_empty() { + mark::hit!(add_turbo_fish_non_generic); + return None; + } + acc.add( + AssistId("add_turbo_fish", AssistKind::RefactorRewrite), + "Add `::<>`", + ident.text_range(), + |builder| match ctx.config.snippet_cap { + Some(cap) => builder.insert_snippet(cap, ident.text_range().end(), "::<${0:_}>"), + None => builder.insert(ident.text_range().end(), "::<_>"), + }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + use test_utils::mark; + + #[test] + fn add_turbo_fish_function() { + check_assist( + add_turbo_fish, + r#" +fn make() -> T {} +fn main() { + make<|>(); +} +"#, + r#" +fn make() -> T {} +fn main() { + make::<${0:_}>(); +} +"#, + ); + } + + #[test] + fn add_turbo_fish_after_call() { + mark::check!(add_turbo_fish_after_call); + check_assist( + add_turbo_fish, + r#" +fn make() -> T {} +fn main() { + make()<|>; +} +"#, + r#" +fn make() -> T {} +fn main() { + make::<${0:_}>(); +} +"#, + ); + } + + #[test] + fn add_turbo_fish_method() { + check_assist( + add_turbo_fish, + r#" +struct S; +impl S { + fn make(&self) -> T {} +} +fn main() { + S.make<|>(); +} +"#, + r#" +struct S; +impl S { + fn make(&self) -> T {} +} +fn main() { + S.make::<${0:_}>(); +} +"#, + ); + } + + #[test] + fn add_turbo_fish_one_fish_is_enough() { + mark::check!(add_turbo_fish_one_fish_is_enough); + check_assist_not_applicable( + add_turbo_fish, + r#" +fn make() -> T {} +fn main() { + make<|>::<()>(); +} +"#, + ); + } + + #[test] + fn add_turbo_fish_non_generic() { + mark::check!(add_turbo_fish_non_generic); + check_assist_not_applicable( + add_turbo_fish, + r#" +fn make() -> () {} +fn main() { + make<|>(); +} +"#, + ); + } +} diff --git a/crates/assists/src/handlers/apply_demorgan.rs b/crates/assists/src/handlers/apply_demorgan.rs new file mode 100644 index 0000000000..1a6fdafda2 --- /dev/null +++ b/crates/assists/src/handlers/apply_demorgan.rs @@ -0,0 +1,93 @@ +use syntax::ast::{self, AstNode}; + +use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKind, Assists}; + +// Assist: apply_demorgan +// +// Apply https://en.wikipedia.org/wiki/De_Morgan%27s_laws[De Morgan's law]. +// This transforms expressions of the form `!l || !r` into `!(l && r)`. +// This also works with `&&`. This assist can only be applied with the cursor +// on either `||` or `&&`, with both operands being a negation of some kind. +// This means something of the form `!x` or `x != y`. +// +// ``` +// fn main() { +// if x != 4 ||<|> !y {} +// } +// ``` +// -> +// ``` +// fn main() { +// if !(x == 4 && y) {} +// } +// ``` +pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let expr = ctx.find_node_at_offset::()?; + let op = expr.op_kind()?; + let op_range = expr.op_token()?.text_range(); + let opposite_op = opposite_logic_op(op)?; + let cursor_in_range = op_range.contains_range(ctx.frange.range); + if !cursor_in_range { + return None; + } + + let lhs = expr.lhs()?; + let lhs_range = lhs.syntax().text_range(); + let not_lhs = invert_boolean_expression(lhs); + + let rhs = expr.rhs()?; + let rhs_range = rhs.syntax().text_range(); + let not_rhs = invert_boolean_expression(rhs); + + acc.add( + AssistId("apply_demorgan", AssistKind::RefactorRewrite), + "Apply De Morgan's law", + op_range, + |edit| { + edit.replace(op_range, opposite_op); + edit.replace(lhs_range, format!("!({}", not_lhs.syntax().text())); + edit.replace(rhs_range, format!("{})", not_rhs.syntax().text())); + }, + ) +} + +// Return the opposite text for a given logical operator, if it makes sense +fn opposite_logic_op(kind: ast::BinOp) -> Option<&'static str> { + match kind { + ast::BinOp::BooleanOr => Some("&&"), + ast::BinOp::BooleanAnd => Some("||"), + _ => None, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::tests::{check_assist, check_assist_not_applicable}; + + #[test] + fn demorgan_turns_and_into_or() { + check_assist(apply_demorgan, "fn f() { !x &&<|> !x }", "fn f() { !(x || x) }") + } + + #[test] + fn demorgan_turns_or_into_and() { + check_assist(apply_demorgan, "fn f() { !x ||<|> !x }", "fn f() { !(x && x) }") + } + + #[test] + fn demorgan_removes_inequality() { + check_assist(apply_demorgan, "fn f() { x != x ||<|> !x }", "fn f() { !(x == x && x) }") + } + + #[test] + fn demorgan_general_case() { + check_assist(apply_demorgan, "fn f() { x ||<|> x }", "fn f() { !(!x && !x) }") + } + + #[test] + fn demorgan_doesnt_apply_with_cursor_not_on_op() { + check_assist_not_applicable(apply_demorgan, "fn f() { <|> !x || !x }") + } +} diff --git a/crates/assists/src/handlers/auto_import.rs b/crates/assists/src/handlers/auto_import.rs new file mode 100644 index 0000000000..cce789972e --- /dev/null +++ b/crates/assists/src/handlers/auto_import.rs @@ -0,0 +1,1088 @@ +use std::collections::BTreeSet; + +use either::Either; +use hir::{ + AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution, Semantics, Trait, + Type, +}; +use ide_db::{imports_locator, RootDatabase}; +use rustc_hash::FxHashSet; +use syntax::{ + ast::{self, AstNode}, + SyntaxNode, +}; + +use crate::{ + utils::insert_use_statement, AssistContext, AssistId, AssistKind, Assists, GroupLabel, +}; + +// Assist: auto_import +// +// If the name is unresolved, provides all possible imports for it. +// +// ``` +// fn main() { +// let map = HashMap<|>::new(); +// } +// # pub mod std { pub mod collections { pub struct HashMap { } } } +// ``` +// -> +// ``` +// use std::collections::HashMap; +// +// fn main() { +// let map = HashMap::new(); +// } +// # pub mod std { pub mod collections { pub struct HashMap { } } } +// ``` +pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let auto_import_assets = AutoImportAssets::new(ctx)?; + let proposed_imports = auto_import_assets.search_for_imports(ctx); + if proposed_imports.is_empty() { + return None; + } + + let range = ctx.sema.original_range(&auto_import_assets.syntax_under_caret).range; + let group = auto_import_assets.get_import_group_message(); + for import in proposed_imports { + acc.add_group( + &group, + AssistId("auto_import", AssistKind::QuickFix), + format!("Import `{}`", &import), + range, + |builder| { + insert_use_statement( + &auto_import_assets.syntax_under_caret, + &import, + ctx, + builder.text_edit_builder(), + ); + }, + ); + } + Some(()) +} + +#[derive(Debug)] +struct AutoImportAssets { + import_candidate: ImportCandidate, + module_with_name_to_import: Module, + syntax_under_caret: SyntaxNode, +} + +impl AutoImportAssets { + fn new(ctx: &AssistContext) -> Option { + if let Some(path_under_caret) = ctx.find_node_at_offset_with_descend::() { + Self::for_regular_path(path_under_caret, &ctx) + } else { + Self::for_method_call(ctx.find_node_at_offset_with_descend()?, &ctx) + } + } + + fn for_method_call(method_call: ast::MethodCallExpr, ctx: &AssistContext) -> Option { + let syntax_under_caret = method_call.syntax().to_owned(); + let module_with_name_to_import = ctx.sema.scope(&syntax_under_caret).module()?; + Some(Self { + import_candidate: ImportCandidate::for_method_call(&ctx.sema, &method_call)?, + module_with_name_to_import, + syntax_under_caret, + }) + } + + fn for_regular_path(path_under_caret: ast::Path, ctx: &AssistContext) -> Option { + let syntax_under_caret = path_under_caret.syntax().to_owned(); + if syntax_under_caret.ancestors().find_map(ast::Use::cast).is_some() { + return None; + } + + let module_with_name_to_import = ctx.sema.scope(&syntax_under_caret).module()?; + Some(Self { + import_candidate: ImportCandidate::for_regular_path(&ctx.sema, &path_under_caret)?, + module_with_name_to_import, + syntax_under_caret, + }) + } + + fn get_search_query(&self) -> &str { + match &self.import_candidate { + ImportCandidate::UnqualifiedName(name) => name, + ImportCandidate::QualifierStart(qualifier_start) => qualifier_start, + ImportCandidate::TraitAssocItem(_, trait_assoc_item_name) => trait_assoc_item_name, + ImportCandidate::TraitMethod(_, trait_method_name) => trait_method_name, + } + } + + fn get_import_group_message(&self) -> GroupLabel { + let name = match &self.import_candidate { + ImportCandidate::UnqualifiedName(name) => format!("Import {}", name), + ImportCandidate::QualifierStart(qualifier_start) => { + format!("Import {}", qualifier_start) + } + ImportCandidate::TraitAssocItem(_, trait_assoc_item_name) => { + format!("Import a trait for item {}", trait_assoc_item_name) + } + ImportCandidate::TraitMethod(_, trait_method_name) => { + format!("Import a trait for method {}", trait_method_name) + } + }; + GroupLabel(name) + } + + fn search_for_imports(&self, ctx: &AssistContext) -> BTreeSet { + let _p = profile::span("auto_import::search_for_imports"); + let db = ctx.db(); + let current_crate = self.module_with_name_to_import.krate(); + imports_locator::find_imports(&ctx.sema, current_crate, &self.get_search_query()) + .into_iter() + .filter_map(|candidate| match &self.import_candidate { + ImportCandidate::TraitAssocItem(assoc_item_type, _) => { + let located_assoc_item = match candidate { + Either::Left(ModuleDef::Function(located_function)) => located_function + .as_assoc_item(db) + .map(|assoc| assoc.container(db)) + .and_then(Self::assoc_to_trait), + Either::Left(ModuleDef::Const(located_const)) => located_const + .as_assoc_item(db) + .map(|assoc| assoc.container(db)) + .and_then(Self::assoc_to_trait), + _ => None, + }?; + + let mut trait_candidates = FxHashSet::default(); + trait_candidates.insert(located_assoc_item.into()); + + assoc_item_type + .iterate_path_candidates( + db, + current_crate, + &trait_candidates, + None, + |_, assoc| Self::assoc_to_trait(assoc.container(db)), + ) + .map(ModuleDef::from) + .map(Either::Left) + } + ImportCandidate::TraitMethod(function_callee, _) => { + let located_assoc_item = + if let Either::Left(ModuleDef::Function(located_function)) = candidate { + located_function + .as_assoc_item(db) + .map(|assoc| assoc.container(db)) + .and_then(Self::assoc_to_trait) + } else { + None + }?; + + let mut trait_candidates = FxHashSet::default(); + trait_candidates.insert(located_assoc_item.into()); + + function_callee + .iterate_method_candidates( + db, + current_crate, + &trait_candidates, + None, + |_, function| { + Self::assoc_to_trait(function.as_assoc_item(db)?.container(db)) + }, + ) + .map(ModuleDef::from) + .map(Either::Left) + } + _ => Some(candidate), + }) + .filter_map(|candidate| match candidate { + Either::Left(module_def) => { + self.module_with_name_to_import.find_use_path(db, module_def) + } + Either::Right(macro_def) => { + self.module_with_name_to_import.find_use_path(db, macro_def) + } + }) + .filter(|use_path| !use_path.segments.is_empty()) + .take(20) + .collect::>() + } + + fn assoc_to_trait(assoc: AssocItemContainer) -> Option { + if let AssocItemContainer::Trait(extracted_trait) = assoc { + Some(extracted_trait) + } else { + None + } + } +} + +#[derive(Debug)] +enum ImportCandidate { + /// Simple name like 'HashMap' + UnqualifiedName(String), + /// First part of the qualified name. + /// For 'std::collections::HashMap', that will be 'std'. + QualifierStart(String), + /// A trait associated function (with no self parameter) or associated constant. + /// For 'test_mod::TestEnum::test_function', `Type` is the `test_mod::TestEnum` expression type + /// and `String` is the `test_function` + TraitAssocItem(Type, String), + /// A trait method with self parameter. + /// For 'test_enum.test_method()', `Type` is the `test_enum` expression type + /// and `String` is the `test_method` + TraitMethod(Type, String), +} + +impl ImportCandidate { + fn for_method_call( + sema: &Semantics, + method_call: &ast::MethodCallExpr, + ) -> Option { + if sema.resolve_method_call(method_call).is_some() { + return None; + } + Some(Self::TraitMethod( + sema.type_of_expr(&method_call.expr()?)?, + method_call.name_ref()?.syntax().to_string(), + )) + } + + fn for_regular_path( + sema: &Semantics, + path_under_caret: &ast::Path, + ) -> Option { + if sema.resolve_path(path_under_caret).is_some() { + return None; + } + + let segment = path_under_caret.segment()?; + if let Some(qualifier) = path_under_caret.qualifier() { + let qualifier_start = qualifier.syntax().descendants().find_map(ast::NameRef::cast)?; + let qualifier_start_path = + qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?; + if let Some(qualifier_start_resolution) = sema.resolve_path(&qualifier_start_path) { + let qualifier_resolution = if qualifier_start_path == qualifier { + qualifier_start_resolution + } else { + sema.resolve_path(&qualifier)? + }; + if let PathResolution::Def(ModuleDef::Adt(assoc_item_path)) = qualifier_resolution { + Some(ImportCandidate::TraitAssocItem( + assoc_item_path.ty(sema.db), + segment.syntax().to_string(), + )) + } else { + None + } + } else { + Some(ImportCandidate::QualifierStart(qualifier_start.syntax().to_string())) + } + } else { + Some(ImportCandidate::UnqualifiedName( + segment.syntax().descendants().find_map(ast::NameRef::cast)?.syntax().to_string(), + )) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; + + #[test] + fn applicable_when_found_an_import() { + check_assist( + auto_import, + r" + <|>PubStruct + + pub mod PubMod { + pub struct PubStruct; + } + ", + r" + use PubMod::PubStruct; + + PubStruct + + pub mod PubMod { + pub struct PubStruct; + } + ", + ); + } + + #[test] + fn applicable_when_found_an_import_in_macros() { + check_assist( + auto_import, + r" + macro_rules! foo { + ($i:ident) => { fn foo(a: $i) {} } + } + foo!(Pub<|>Struct); + + pub mod PubMod { + pub struct PubStruct; + } + ", + r" + use PubMod::PubStruct; + + macro_rules! foo { + ($i:ident) => { fn foo(a: $i) {} } + } + foo!(PubStruct); + + pub mod PubMod { + pub struct PubStruct; + } + ", + ); + } + + #[test] + fn auto_imports_are_merged() { + check_assist( + auto_import, + r" + use PubMod::PubStruct1; + + struct Test { + test: Pub<|>Struct2, + } + + pub mod PubMod { + pub struct PubStruct1; + pub struct PubStruct2 { + _t: T, + } + } + ", + r" + use PubMod::{PubStruct2, PubStruct1}; + + struct Test { + test: PubStruct2, + } + + pub mod PubMod { + pub struct PubStruct1; + pub struct PubStruct2 { + _t: T, + } + } + ", + ); + } + + #[test] + fn applicable_when_found_multiple_imports() { + check_assist( + auto_import, + r" + PubSt<|>ruct + + pub mod PubMod1 { + pub struct PubStruct; + } + pub mod PubMod2 { + pub struct PubStruct; + } + pub mod PubMod3 { + pub struct PubStruct; + } + ", + r" + use PubMod3::PubStruct; + + PubStruct + + pub mod PubMod1 { + pub struct PubStruct; + } + pub mod PubMod2 { + pub struct PubStruct; + } + pub mod PubMod3 { + pub struct PubStruct; + } + ", + ); + } + + #[test] + fn not_applicable_for_already_imported_types() { + check_assist_not_applicable( + auto_import, + r" + use PubMod::PubStruct; + + PubStruct<|> + + pub mod PubMod { + pub struct PubStruct; + } + ", + ); + } + + #[test] + fn not_applicable_for_types_with_private_paths() { + check_assist_not_applicable( + auto_import, + r" + PrivateStruct<|> + + pub mod PubMod { + struct PrivateStruct; + } + ", + ); + } + + #[test] + fn not_applicable_when_no_imports_found() { + check_assist_not_applicable( + auto_import, + " + PubStruct<|>", + ); + } + + #[test] + fn not_applicable_in_import_statements() { + check_assist_not_applicable( + auto_import, + r" + use PubStruct<|>; + + pub mod PubMod { + pub struct PubStruct; + }", + ); + } + + #[test] + fn function_import() { + check_assist( + auto_import, + r" + test_function<|> + + pub mod PubMod { + pub fn test_function() {}; + } + ", + r" + use PubMod::test_function; + + test_function + + pub mod PubMod { + pub fn test_function() {}; + } + ", + ); + } + + #[test] + fn macro_import() { + check_assist( + auto_import, + r" +//- /lib.rs crate:crate_with_macro +#[macro_export] +macro_rules! foo { + () => () +} + +//- /main.rs crate:main deps:crate_with_macro +fn main() { + foo<|> +} +", + r"use crate_with_macro::foo; + +fn main() { + foo +} +", + ); + } + + #[test] + fn auto_import_target() { + check_assist_target( + auto_import, + r" + struct AssistInfo { + group_label: Option<<|>GroupLabel>, + } + + mod m { pub struct GroupLabel; } + ", + "GroupLabel", + ) + } + + #[test] + fn not_applicable_when_path_start_is_imported() { + check_assist_not_applicable( + auto_import, + r" + pub mod mod1 { + pub mod mod2 { + pub mod mod3 { + pub struct TestStruct; + } + } + } + + use mod1::mod2; + fn main() { + mod2::mod3::TestStruct<|> + } + ", + ); + } + + #[test] + fn not_applicable_for_imported_function() { + check_assist_not_applicable( + auto_import, + r" + pub mod test_mod { + pub fn test_function() {} + } + + use test_mod::test_function; + fn main() { + test_function<|> + } + ", + ); + } + + #[test] + fn associated_struct_function() { + check_assist( + auto_import, + r" + mod test_mod { + pub struct TestStruct {} + impl TestStruct { + pub fn test_function() {} + } + } + + fn main() { + TestStruct::test_function<|> + } + ", + r" + use test_mod::TestStruct; + + mod test_mod { + pub struct TestStruct {} + impl TestStruct { + pub fn test_function() {} + } + } + + fn main() { + TestStruct::test_function + } + ", + ); + } + + #[test] + fn associated_struct_const() { + check_assist( + auto_import, + r" + mod test_mod { + pub struct TestStruct {} + impl TestStruct { + const TEST_CONST: u8 = 42; + } + } + + fn main() { + TestStruct::TEST_CONST<|> + } + ", + r" + use test_mod::TestStruct; + + mod test_mod { + pub struct TestStruct {} + impl TestStruct { + const TEST_CONST: u8 = 42; + } + } + + fn main() { + TestStruct::TEST_CONST + } + ", + ); + } + + #[test] + fn associated_trait_function() { + check_assist( + auto_import, + r" + mod test_mod { + pub trait TestTrait { + fn test_function(); + } + pub struct TestStruct {} + impl TestTrait for TestStruct { + fn test_function() {} + } + } + + fn main() { + test_mod::TestStruct::test_function<|> + } + ", + r" + use test_mod::TestTrait; + + mod test_mod { + pub trait TestTrait { + fn test_function(); + } + pub struct TestStruct {} + impl TestTrait for TestStruct { + fn test_function() {} + } + } + + fn main() { + test_mod::TestStruct::test_function + } + ", + ); + } + + #[test] + fn not_applicable_for_imported_trait_for_function() { + check_assist_not_applicable( + auto_import, + r" + mod test_mod { + pub trait TestTrait { + fn test_function(); + } + pub trait TestTrait2 { + fn test_function(); + } + pub enum TestEnum { + One, + Two, + } + impl TestTrait2 for TestEnum { + fn test_function() {} + } + impl TestTrait for TestEnum { + fn test_function() {} + } + } + + use test_mod::TestTrait2; + fn main() { + test_mod::TestEnum::test_function<|>; + } + ", + ) + } + + #[test] + fn associated_trait_const() { + check_assist( + auto_import, + r" + mod test_mod { + pub trait TestTrait { + const TEST_CONST: u8; + } + pub struct TestStruct {} + impl TestTrait for TestStruct { + const TEST_CONST: u8 = 42; + } + } + + fn main() { + test_mod::TestStruct::TEST_CONST<|> + } + ", + r" + use test_mod::TestTrait; + + mod test_mod { + pub trait TestTrait { + const TEST_CONST: u8; + } + pub struct TestStruct {} + impl TestTrait for TestStruct { + const TEST_CONST: u8 = 42; + } + } + + fn main() { + test_mod::TestStruct::TEST_CONST + } + ", + ); + } + + #[test] + fn not_applicable_for_imported_trait_for_const() { + check_assist_not_applicable( + auto_import, + r" + mod test_mod { + pub trait TestTrait { + const TEST_CONST: u8; + } + pub trait TestTrait2 { + const TEST_CONST: f64; + } + pub enum TestEnum { + One, + Two, + } + impl TestTrait2 for TestEnum { + const TEST_CONST: f64 = 42.0; + } + impl TestTrait for TestEnum { + const TEST_CONST: u8 = 42; + } + } + + use test_mod::TestTrait2; + fn main() { + test_mod::TestEnum::TEST_CONST<|>; + } + ", + ) + } + + #[test] + fn trait_method() { + check_assist( + auto_import, + r" + mod test_mod { + pub trait TestTrait { + fn test_method(&self); + } + pub struct TestStruct {} + impl TestTrait for TestStruct { + fn test_method(&self) {} + } + } + + fn main() { + let test_struct = test_mod::TestStruct {}; + test_struct.test_meth<|>od() + } + ", + r" + use test_mod::TestTrait; + + mod test_mod { + pub trait TestTrait { + fn test_method(&self); + } + pub struct TestStruct {} + impl TestTrait for TestStruct { + fn test_method(&self) {} + } + } + + fn main() { + let test_struct = test_mod::TestStruct {}; + test_struct.test_method() + } + ", + ); + } + + #[test] + fn trait_method_cross_crate() { + check_assist( + auto_import, + r" + //- /main.rs crate:main deps:dep + fn main() { + let test_struct = dep::test_mod::TestStruct {}; + test_struct.test_meth<|>od() + } + //- /dep.rs crate:dep + pub mod test_mod { + pub trait TestTrait { + fn test_method(&self); + } + pub struct TestStruct {} + impl TestTrait for TestStruct { + fn test_method(&self) {} + } + } + ", + r" + use dep::test_mod::TestTrait; + + fn main() { + let test_struct = dep::test_mod::TestStruct {}; + test_struct.test_method() + } + ", + ); + } + + #[test] + fn assoc_fn_cross_crate() { + check_assist( + auto_import, + r" + //- /main.rs crate:main deps:dep + fn main() { + dep::test_mod::TestStruct::test_func<|>tion + } + //- /dep.rs crate:dep + pub mod test_mod { + pub trait TestTrait { + fn test_function(); + } + pub struct TestStruct {} + impl TestTrait for TestStruct { + fn test_function() {} + } + } + ", + r" + use dep::test_mod::TestTrait; + + fn main() { + dep::test_mod::TestStruct::test_function + } + ", + ); + } + + #[test] + fn assoc_const_cross_crate() { + check_assist( + auto_import, + r" + //- /main.rs crate:main deps:dep + fn main() { + dep::test_mod::TestStruct::CONST<|> + } + //- /dep.rs crate:dep + pub mod test_mod { + pub trait TestTrait { + const CONST: bool; + } + pub struct TestStruct {} + impl TestTrait for TestStruct { + const CONST: bool = true; + } + } + ", + r" + use dep::test_mod::TestTrait; + + fn main() { + dep::test_mod::TestStruct::CONST + } + ", + ); + } + + #[test] + fn assoc_fn_as_method_cross_crate() { + check_assist_not_applicable( + auto_import, + r" + //- /main.rs crate:main deps:dep + fn main() { + let test_struct = dep::test_mod::TestStruct {}; + test_struct.test_func<|>tion() + } + //- /dep.rs crate:dep + pub mod test_mod { + pub trait TestTrait { + fn test_function(); + } + pub struct TestStruct {} + impl TestTrait for TestStruct { + fn test_function() {} + } + } + ", + ); + } + + #[test] + fn private_trait_cross_crate() { + check_assist_not_applicable( + auto_import, + r" + //- /main.rs crate:main deps:dep + fn main() { + let test_struct = dep::test_mod::TestStruct {}; + test_struct.test_meth<|>od() + } + //- /dep.rs crate:dep + pub mod test_mod { + trait TestTrait { + fn test_method(&self); + } + pub struct TestStruct {} + impl TestTrait for TestStruct { + fn test_method(&self) {} + } + } + ", + ); + } + + #[test] + fn not_applicable_for_imported_trait_for_method() { + check_assist_not_applicable( + auto_import, + r" + mod test_mod { + pub trait TestTrait { + fn test_method(&self); + } + pub trait TestTrait2 { + fn test_method(&self); + } + pub enum TestEnum { + One, + Two, + } + impl TestTrait2 for TestEnum { + fn test_method(&self) {} + } + impl TestTrait for TestEnum { + fn test_method(&self) {} + } + } + + use test_mod::TestTrait2; + fn main() { + let one = test_mod::TestEnum::One; + one.test<|>_method(); + } + ", + ) + } + + #[test] + fn dep_import() { + check_assist( + auto_import, + r" +//- /lib.rs crate:dep +pub struct Struct; + +//- /main.rs crate:main deps:dep +fn main() { + Struct<|> +} +", + r"use dep::Struct; + +fn main() { + Struct +} +", + ); + } + + #[test] + fn whole_segment() { + // Tests that only imports whose last segment matches the identifier get suggested. + check_assist( + auto_import, + r" +//- /lib.rs crate:dep +pub mod fmt { + pub trait Display {} +} + +pub fn panic_fmt() {} + +//- /main.rs crate:main deps:dep +struct S; + +impl f<|>mt::Display for S {} +", + r"use dep::fmt; + +struct S; + +impl fmt::Display for S {} +", + ); + } + + #[test] + fn macro_generated() { + // Tests that macro-generated items are suggested from external crates. + check_assist( + auto_import, + r" +//- /lib.rs crate:dep +macro_rules! mac { + () => { + pub struct Cheese; + }; +} + +mac!(); + +//- /main.rs crate:main deps:dep +fn main() { + Cheese<|>; +} +", + r"use dep::Cheese; + +fn main() { + Cheese; +} +", + ); + } + + #[test] + fn casing() { + // Tests that differently cased names don't interfere and we only suggest the matching one. + check_assist( + auto_import, + r" +//- /lib.rs crate:dep +pub struct FMT; +pub struct fmt; + +//- /main.rs crate:main deps:dep +fn main() { + FMT<|>; +} +", + r"use dep::FMT; + +fn main() { + FMT; +} +", + ); + } +} diff --git a/crates/assists/src/handlers/change_return_type_to_result.rs b/crates/assists/src/handlers/change_return_type_to_result.rs new file mode 100644 index 0000000000..be480943c5 --- /dev/null +++ b/crates/assists/src/handlers/change_return_type_to_result.rs @@ -0,0 +1,998 @@ +use std::iter; + +use syntax::{ + ast::{self, make, BlockExpr, Expr, LoopBodyOwner}, + AstNode, SyntaxNode, +}; +use test_utils::mark; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: change_return_type_to_result +// +// Change the function's return type to Result. +// +// ``` +// fn foo() -> i32<|> { 42i32 } +// ``` +// -> +// ``` +// fn foo() -> Result { Ok(42i32) } +// ``` +pub(crate) fn change_return_type_to_result(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let ret_type = ctx.find_node_at_offset::()?; + // FIXME: extend to lambdas as well + let fn_def = ret_type.syntax().parent().and_then(ast::Fn::cast)?; + + let type_ref = &ret_type.ty()?; + let ret_type_str = type_ref.syntax().text().to_string(); + let first_part_ret_type = ret_type_str.splitn(2, '<').next(); + if let Some(ret_type_first_part) = first_part_ret_type { + if ret_type_first_part.ends_with("Result") { + mark::hit!(change_return_type_to_result_simple_return_type_already_result); + return None; + } + } + + let block_expr = &fn_def.body()?; + + acc.add( + AssistId("change_return_type_to_result", AssistKind::RefactorRewrite), + "Wrap return type in Result", + type_ref.syntax().text_range(), + |builder| { + let mut tail_return_expr_collector = TailReturnCollector::new(); + tail_return_expr_collector.collect_jump_exprs(block_expr, false); + tail_return_expr_collector.collect_tail_exprs(block_expr); + + for ret_expr_arg in tail_return_expr_collector.exprs_to_wrap { + let ok_wrapped = make::expr_call( + make::expr_path(make::path_unqualified(make::path_segment(make::name_ref( + "Ok", + )))), + make::arg_list(iter::once(ret_expr_arg.clone())), + ); + builder.replace_ast(ret_expr_arg, ok_wrapped); + } + + match ctx.config.snippet_cap { + Some(cap) => { + let snippet = format!("Result<{}, ${{0:_}}>", type_ref); + builder.replace_snippet(cap, type_ref.syntax().text_range(), snippet) + } + None => builder + .replace(type_ref.syntax().text_range(), format!("Result<{}, _>", type_ref)), + } + }, + ) +} + +struct TailReturnCollector { + exprs_to_wrap: Vec, +} + +impl TailReturnCollector { + fn new() -> Self { + Self { exprs_to_wrap: vec![] } + } + /// Collect all`return` expression + fn collect_jump_exprs(&mut self, block_expr: &BlockExpr, collect_break: bool) { + let statements = block_expr.statements(); + for stmt in statements { + let expr = match &stmt { + ast::Stmt::ExprStmt(stmt) => stmt.expr(), + ast::Stmt::LetStmt(stmt) => stmt.initializer(), + ast::Stmt::Item(_) => continue, + }; + if let Some(expr) = &expr { + self.handle_exprs(expr, collect_break); + } + } + + // Browse tail expressions for each block + if let Some(expr) = block_expr.expr() { + if let Some(last_exprs) = get_tail_expr_from_block(&expr) { + for last_expr in last_exprs { + let last_expr = match last_expr { + NodeType::Node(expr) => expr, + NodeType::Leaf(expr) => expr.syntax().clone(), + }; + + if let Some(last_expr) = Expr::cast(last_expr.clone()) { + self.handle_exprs(&last_expr, collect_break); + } else if let Some(expr_stmt) = ast::Stmt::cast(last_expr) { + let expr_stmt = match &expr_stmt { + ast::Stmt::ExprStmt(stmt) => stmt.expr(), + ast::Stmt::LetStmt(stmt) => stmt.initializer(), + ast::Stmt::Item(_) => None, + }; + if let Some(expr) = &expr_stmt { + self.handle_exprs(expr, collect_break); + } + } + } + } + } + } + + fn handle_exprs(&mut self, expr: &Expr, collect_break: bool) { + match expr { + Expr::BlockExpr(block_expr) => { + self.collect_jump_exprs(&block_expr, collect_break); + } + Expr::ReturnExpr(ret_expr) => { + if let Some(ret_expr_arg) = &ret_expr.expr() { + self.exprs_to_wrap.push(ret_expr_arg.clone()); + } + } + Expr::BreakExpr(break_expr) if collect_break => { + if let Some(break_expr_arg) = &break_expr.expr() { + self.exprs_to_wrap.push(break_expr_arg.clone()); + } + } + Expr::IfExpr(if_expr) => { + for block in if_expr.blocks() { + self.collect_jump_exprs(&block, collect_break); + } + } + Expr::LoopExpr(loop_expr) => { + if let Some(block_expr) = loop_expr.loop_body() { + self.collect_jump_exprs(&block_expr, collect_break); + } + } + Expr::ForExpr(for_expr) => { + if let Some(block_expr) = for_expr.loop_body() { + self.collect_jump_exprs(&block_expr, collect_break); + } + } + Expr::WhileExpr(while_expr) => { + if let Some(block_expr) = while_expr.loop_body() { + self.collect_jump_exprs(&block_expr, collect_break); + } + } + Expr::MatchExpr(match_expr) => { + if let Some(arm_list) = match_expr.match_arm_list() { + arm_list.arms().filter_map(|match_arm| match_arm.expr()).for_each(|expr| { + self.handle_exprs(&expr, collect_break); + }); + } + } + _ => {} + } + } + + fn collect_tail_exprs(&mut self, block: &BlockExpr) { + if let Some(expr) = block.expr() { + self.handle_exprs(&expr, true); + self.fetch_tail_exprs(&expr); + } + } + + fn fetch_tail_exprs(&mut self, expr: &Expr) { + if let Some(exprs) = get_tail_expr_from_block(expr) { + for node_type in &exprs { + match node_type { + NodeType::Leaf(expr) => { + self.exprs_to_wrap.push(expr.clone()); + } + NodeType::Node(expr) => { + if let Some(last_expr) = Expr::cast(expr.clone()) { + self.fetch_tail_exprs(&last_expr); + } + } + } + } + } + } +} + +#[derive(Debug)] +enum NodeType { + Leaf(ast::Expr), + Node(SyntaxNode), +} + +/// Get a tail expression inside a block +fn get_tail_expr_from_block(expr: &Expr) -> Option> { + match expr { + Expr::IfExpr(if_expr) => { + let mut nodes = vec![]; + for block in if_expr.blocks() { + if let Some(block_expr) = block.expr() { + if let Some(tail_exprs) = get_tail_expr_from_block(&block_expr) { + nodes.extend(tail_exprs); + } + } else if let Some(last_expr) = block.syntax().last_child() { + nodes.push(NodeType::Node(last_expr)); + } else { + nodes.push(NodeType::Node(block.syntax().clone())); + } + } + Some(nodes) + } + Expr::LoopExpr(loop_expr) => { + loop_expr.syntax().last_child().map(|lc| vec![NodeType::Node(lc)]) + } + Expr::ForExpr(for_expr) => { + for_expr.syntax().last_child().map(|lc| vec![NodeType::Node(lc)]) + } + Expr::WhileExpr(while_expr) => { + while_expr.syntax().last_child().map(|lc| vec![NodeType::Node(lc)]) + } + Expr::BlockExpr(block_expr) => { + block_expr.expr().map(|lc| vec![NodeType::Node(lc.syntax().clone())]) + } + Expr::MatchExpr(match_expr) => { + let arm_list = match_expr.match_arm_list()?; + let arms: Vec = arm_list + .arms() + .filter_map(|match_arm| match_arm.expr()) + .map(|expr| match expr { + Expr::ReturnExpr(ret_expr) => NodeType::Node(ret_expr.syntax().clone()), + Expr::BreakExpr(break_expr) => NodeType::Node(break_expr.syntax().clone()), + _ => match expr.syntax().last_child() { + Some(last_expr) => NodeType::Node(last_expr), + None => NodeType::Node(expr.syntax().clone()), + }, + }) + .collect(); + + Some(arms) + } + Expr::BreakExpr(expr) => expr.expr().map(|e| vec![NodeType::Leaf(e)]), + Expr::ReturnExpr(ret_expr) => Some(vec![NodeType::Node(ret_expr.syntax().clone())]), + + Expr::CallExpr(_) + | Expr::Literal(_) + | Expr::TupleExpr(_) + | Expr::ArrayExpr(_) + | Expr::ParenExpr(_) + | Expr::PathExpr(_) + | Expr::RecordExpr(_) + | Expr::IndexExpr(_) + | Expr::MethodCallExpr(_) + | Expr::AwaitExpr(_) + | Expr::CastExpr(_) + | Expr::RefExpr(_) + | Expr::PrefixExpr(_) + | Expr::RangeExpr(_) + | Expr::BinExpr(_) + | Expr::MacroCall(_) + | Expr::BoxExpr(_) => Some(vec![NodeType::Leaf(expr.clone())]), + _ => None, + } +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn change_return_type_to_result_simple() { + check_assist( + change_return_type_to_result, + r#"fn foo() -> i3<|>2 { + let test = "test"; + return 42i32; + }"#, + r#"fn foo() -> Result { + let test = "test"; + return Ok(42i32); + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_return_type() { + check_assist( + change_return_type_to_result, + r#"fn foo() -> i32<|> { + let test = "test"; + return 42i32; + }"#, + r#"fn foo() -> Result { + let test = "test"; + return Ok(42i32); + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_return_type_bad_cursor() { + check_assist_not_applicable( + change_return_type_to_result, + r#"fn foo() -> i32 { + let test = "test";<|> + return 42i32; + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_return_type_already_result_std() { + check_assist_not_applicable( + change_return_type_to_result, + r#"fn foo() -> std::result::Result, String> { + let test = "test"; + return 42i32; + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_return_type_already_result() { + mark::check!(change_return_type_to_result_simple_return_type_already_result); + check_assist_not_applicable( + change_return_type_to_result, + r#"fn foo() -> Result, String> { + let test = "test"; + return 42i32; + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_with_cursor() { + check_assist( + change_return_type_to_result, + r#"fn foo() -> <|>i32 { + let test = "test"; + return 42i32; + }"#, + r#"fn foo() -> Result { + let test = "test"; + return Ok(42i32); + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_with_tail() { + check_assist( + change_return_type_to_result, + r#"fn foo() -><|> i32 { + let test = "test"; + 42i32 + }"#, + r#"fn foo() -> Result { + let test = "test"; + Ok(42i32) + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_with_tail_only() { + check_assist( + change_return_type_to_result, + r#"fn foo() -> i32<|> { + 42i32 + }"#, + r#"fn foo() -> Result { + Ok(42i32) + }"#, + ); + } + #[test] + fn change_return_type_to_result_simple_with_tail_block_like() { + check_assist( + change_return_type_to_result, + r#"fn foo() -> i32<|> { + if true { + 42i32 + } else { + 24i32 + } + }"#, + r#"fn foo() -> Result { + if true { + Ok(42i32) + } else { + Ok(24i32) + } + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_with_nested_if() { + check_assist( + change_return_type_to_result, + r#"fn foo() -> i32<|> { + if true { + if false { + 1 + } else { + 2 + } + } else { + 24i32 + } + }"#, + r#"fn foo() -> Result { + if true { + if false { + Ok(1) + } else { + Ok(2) + } + } else { + Ok(24i32) + } + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_with_await() { + check_assist( + change_return_type_to_result, + r#"async fn foo() -> i<|>32 { + if true { + if false { + 1.await + } else { + 2.await + } + } else { + 24i32.await + } + }"#, + r#"async fn foo() -> Result { + if true { + if false { + Ok(1.await) + } else { + Ok(2.await) + } + } else { + Ok(24i32.await) + } + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_with_array() { + check_assist( + change_return_type_to_result, + r#"fn foo() -> [i32;<|> 3] { + [1, 2, 3] + }"#, + r#"fn foo() -> Result<[i32; 3], ${0:_}> { + Ok([1, 2, 3]) + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_with_cast() { + check_assist( + change_return_type_to_result, + r#"fn foo() -<|>> i32 { + if true { + if false { + 1 as i32 + } else { + 2 as i32 + } + } else { + 24 as i32 + } + }"#, + r#"fn foo() -> Result { + if true { + if false { + Ok(1 as i32) + } else { + Ok(2 as i32) + } + } else { + Ok(24 as i32) + } + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_with_tail_block_like_match() { + check_assist( + change_return_type_to_result, + r#"fn foo() -> i32<|> { + let my_var = 5; + match my_var { + 5 => 42i32, + _ => 24i32, + } + }"#, + r#"fn foo() -> Result { + let my_var = 5; + match my_var { + 5 => Ok(42i32), + _ => Ok(24i32), + } + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_with_loop_with_tail() { + check_assist( + change_return_type_to_result, + r#"fn foo() -> i32<|> { + let my_var = 5; + loop { + println!("test"); + 5 + } + + my_var + }"#, + r#"fn foo() -> Result { + let my_var = 5; + loop { + println!("test"); + 5 + } + + Ok(my_var) + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_with_loop_in_let_stmt() { + check_assist( + change_return_type_to_result, + r#"fn foo() -> i32<|> { + let my_var = let x = loop { + break 1; + }; + + my_var + }"#, + r#"fn foo() -> Result { + let my_var = let x = loop { + break 1; + }; + + Ok(my_var) + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_with_tail_block_like_match_return_expr() { + check_assist( + change_return_type_to_result, + r#"fn foo() -> i32<|> { + let my_var = 5; + let res = match my_var { + 5 => 42i32, + _ => return 24i32, + }; + + res + }"#, + r#"fn foo() -> Result { + let my_var = 5; + let res = match my_var { + 5 => 42i32, + _ => return Ok(24i32), + }; + + Ok(res) + }"#, + ); + + check_assist( + change_return_type_to_result, + r#"fn foo() -> i32<|> { + let my_var = 5; + let res = if my_var == 5 { + 42i32 + } else { + return 24i32; + }; + + res + }"#, + r#"fn foo() -> Result { + let my_var = 5; + let res = if my_var == 5 { + 42i32 + } else { + return Ok(24i32); + }; + + Ok(res) + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_with_tail_block_like_match_deeper() { + check_assist( + change_return_type_to_result, + r#"fn foo() -> i32<|> { + let my_var = 5; + match my_var { + 5 => { + if true { + 42i32 + } else { + 25i32 + } + }, + _ => { + let test = "test"; + if test == "test" { + return bar(); + } + 53i32 + }, + } + }"#, + r#"fn foo() -> Result { + let my_var = 5; + match my_var { + 5 => { + if true { + Ok(42i32) + } else { + Ok(25i32) + } + }, + _ => { + let test = "test"; + if test == "test" { + return Ok(bar()); + } + Ok(53i32) + }, + } + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_with_tail_block_like_early_return() { + check_assist( + change_return_type_to_result, + r#"fn foo() -> i<|>32 { + let test = "test"; + if test == "test" { + return 24i32; + } + 53i32 + }"#, + r#"fn foo() -> Result { + let test = "test"; + if test == "test" { + return Ok(24i32); + } + Ok(53i32) + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_with_closure() { + check_assist( + change_return_type_to_result, + r#"fn foo(the_field: u32) -><|> u32 { + let true_closure = || { + return true; + }; + if the_field < 5 { + let mut i = 0; + + + if true_closure() { + return 99; + } else { + return 0; + } + } + + the_field + }"#, + r#"fn foo(the_field: u32) -> Result { + let true_closure = || { + return true; + }; + if the_field < 5 { + let mut i = 0; + + + if true_closure() { + return Ok(99); + } else { + return Ok(0); + } + } + + Ok(the_field) + }"#, + ); + + check_assist( + change_return_type_to_result, + r#"fn foo(the_field: u32) -> u32<|> { + let true_closure = || { + return true; + }; + if the_field < 5 { + let mut i = 0; + + + if true_closure() { + return 99; + } else { + return 0; + } + } + let t = None; + + t.unwrap_or_else(|| the_field) + }"#, + r#"fn foo(the_field: u32) -> Result { + let true_closure = || { + return true; + }; + if the_field < 5 { + let mut i = 0; + + + if true_closure() { + return Ok(99); + } else { + return Ok(0); + } + } + let t = None; + + Ok(t.unwrap_or_else(|| the_field)) + }"#, + ); + } + + #[test] + fn change_return_type_to_result_simple_with_weird_forms() { + check_assist( + change_return_type_to_result, + r#"fn foo() -> i32<|> { + let test = "test"; + if test == "test" { + return 24i32; + } + let mut i = 0; + loop { + if i == 1 { + break 55; + } + i += 1; + } + }"#, + r#"fn foo() -> Result { + let test = "test"; + if test == "test" { + return Ok(24i32); + } + let mut i = 0; + loop { + if i == 1 { + break Ok(55); + } + i += 1; + } + }"#, + ); + + check_assist( + change_return_type_to_result, + r#"fn foo() -> i32<|> { + let test = "test"; + if test == "test" { + return 24i32; + } + let mut i = 0; + loop { + loop { + if i == 1 { + break 55; + } + i += 1; + } + } + }"#, + r#"fn foo() -> Result { + let test = "test"; + if test == "test" { + return Ok(24i32); + } + let mut i = 0; + loop { + loop { + if i == 1 { + break Ok(55); + } + i += 1; + } + } + }"#, + ); + + check_assist( + change_return_type_to_result, + r#"fn foo() -> i3<|>2 { + let test = "test"; + let other = 5; + if test == "test" { + let res = match other { + 5 => 43, + _ => return 56, + }; + } + let mut i = 0; + loop { + loop { + if i == 1 { + break 55; + } + i += 1; + } + } + }"#, + r#"fn foo() -> Result { + let test = "test"; + let other = 5; + if test == "test" { + let res = match other { + 5 => 43, + _ => return Ok(56), + }; + } + let mut i = 0; + loop { + loop { + if i == 1 { + break Ok(55); + } + i += 1; + } + } + }"#, + ); + + check_assist( + change_return_type_to_result, + r#"fn foo(the_field: u32) -> u32<|> { + if the_field < 5 { + let mut i = 0; + loop { + if i > 5 { + return 55u32; + } + i += 3; + } + + match i { + 5 => return 99, + _ => return 0, + }; + } + + the_field + }"#, + r#"fn foo(the_field: u32) -> Result { + if the_field < 5 { + let mut i = 0; + loop { + if i > 5 { + return Ok(55u32); + } + i += 3; + } + + match i { + 5 => return Ok(99), + _ => return Ok(0), + }; + } + + Ok(the_field) + }"#, + ); + + check_assist( + change_return_type_to_result, + r#"fn foo(the_field: u32) -> u3<|>2 { + if the_field < 5 { + let mut i = 0; + + match i { + 5 => return 99, + _ => return 0, + } + } + + the_field + }"#, + r#"fn foo(the_field: u32) -> Result { + if the_field < 5 { + let mut i = 0; + + match i { + 5 => return Ok(99), + _ => return Ok(0), + } + } + + Ok(the_field) + }"#, + ); + + check_assist( + change_return_type_to_result, + r#"fn foo(the_field: u32) -> u32<|> { + if the_field < 5 { + let mut i = 0; + + if i == 5 { + return 99 + } else { + return 0 + } + } + + the_field + }"#, + r#"fn foo(the_field: u32) -> Result { + if the_field < 5 { + let mut i = 0; + + if i == 5 { + return Ok(99) + } else { + return Ok(0) + } + } + + Ok(the_field) + }"#, + ); + + check_assist( + change_return_type_to_result, + r#"fn foo(the_field: u32) -> <|>u32 { + if the_field < 5 { + let mut i = 0; + + if i == 5 { + return 99; + } else { + return 0; + } + } + + the_field + }"#, + r#"fn foo(the_field: u32) -> Result { + if the_field < 5 { + let mut i = 0; + + if i == 5 { + return Ok(99); + } else { + return Ok(0); + } + } + + Ok(the_field) + }"#, + ); + } +} diff --git a/crates/assists/src/handlers/change_visibility.rs b/crates/assists/src/handlers/change_visibility.rs new file mode 100644 index 0000000000..32dc053789 --- /dev/null +++ b/crates/assists/src/handlers/change_visibility.rs @@ -0,0 +1,200 @@ +use syntax::{ + ast::{self, NameOwner, VisibilityOwner}, + AstNode, + SyntaxKind::{CONST, ENUM, FN, MODULE, STATIC, STRUCT, TRAIT, VISIBILITY}, + T, +}; +use test_utils::mark; + +use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists}; + +// Assist: change_visibility +// +// Adds or changes existing visibility specifier. +// +// ``` +// <|>fn frobnicate() {} +// ``` +// -> +// ``` +// pub(crate) fn frobnicate() {} +// ``` +pub(crate) fn change_visibility(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + if let Some(vis) = ctx.find_node_at_offset::() { + return change_vis(acc, vis); + } + add_vis(acc, ctx) +} + +fn add_vis(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let item_keyword = ctx.token_at_offset().find(|leaf| { + matches!( + leaf.kind(), + T![const] | T![static] | T![fn] | T![mod] | T![struct] | T![enum] | T![trait] + ) + }); + + let (offset, target) = if let Some(keyword) = item_keyword { + let parent = keyword.parent(); + let def_kws = vec![CONST, STATIC, FN, MODULE, STRUCT, ENUM, TRAIT]; + // Parent is not a definition, can't add visibility + if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) { + return None; + } + // Already have visibility, do nothing + if parent.children().any(|child| child.kind() == VISIBILITY) { + return None; + } + (vis_offset(&parent), keyword.text_range()) + } else if let Some(field_name) = ctx.find_node_at_offset::() { + let field = field_name.syntax().ancestors().find_map(ast::RecordField::cast)?; + if field.name()? != field_name { + mark::hit!(change_visibility_field_false_positive); + return None; + } + if field.visibility().is_some() { + return None; + } + (vis_offset(field.syntax()), field_name.syntax().text_range()) + } else if let Some(field) = ctx.find_node_at_offset::() { + if field.visibility().is_some() { + return None; + } + (vis_offset(field.syntax()), field.syntax().text_range()) + } else { + return None; + }; + + acc.add( + AssistId("change_visibility", AssistKind::RefactorRewrite), + "Change visibility to pub(crate)", + target, + |edit| { + edit.insert(offset, "pub(crate) "); + }, + ) +} + +fn change_vis(acc: &mut Assists, vis: ast::Visibility) -> Option<()> { + if vis.syntax().text() == "pub" { + let target = vis.syntax().text_range(); + return acc.add( + AssistId("change_visibility", AssistKind::RefactorRewrite), + "Change Visibility to pub(crate)", + target, + |edit| { + edit.replace(vis.syntax().text_range(), "pub(crate)"); + }, + ); + } + if vis.syntax().text() == "pub(crate)" { + let target = vis.syntax().text_range(); + return acc.add( + AssistId("change_visibility", AssistKind::RefactorRewrite), + "Change visibility to pub", + target, + |edit| { + edit.replace(vis.syntax().text_range(), "pub"); + }, + ); + } + None +} + +#[cfg(test)] +mod tests { + use test_utils::mark; + + use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; + + use super::*; + + #[test] + fn change_visibility_adds_pub_crate_to_items() { + check_assist(change_visibility, "<|>fn foo() {}", "pub(crate) fn foo() {}"); + check_assist(change_visibility, "f<|>n foo() {}", "pub(crate) fn foo() {}"); + check_assist(change_visibility, "<|>struct Foo {}", "pub(crate) struct Foo {}"); + check_assist(change_visibility, "<|>mod foo {}", "pub(crate) mod foo {}"); + check_assist(change_visibility, "<|>trait Foo {}", "pub(crate) trait Foo {}"); + check_assist(change_visibility, "m<|>od {}", "pub(crate) mod {}"); + check_assist(change_visibility, "unsafe f<|>n foo() {}", "pub(crate) unsafe fn foo() {}"); + } + + #[test] + fn change_visibility_works_with_struct_fields() { + check_assist( + change_visibility, + r"struct S { <|>field: u32 }", + r"struct S { pub(crate) field: u32 }", + ); + check_assist(change_visibility, r"struct S ( <|>u32 )", r"struct S ( pub(crate) u32 )"); + } + + #[test] + fn change_visibility_field_false_positive() { + mark::check!(change_visibility_field_false_positive); + check_assist_not_applicable( + change_visibility, + r"struct S { field: [(); { let <|>x = ();}] }", + ) + } + + #[test] + fn change_visibility_pub_to_pub_crate() { + check_assist(change_visibility, "<|>pub fn foo() {}", "pub(crate) fn foo() {}") + } + + #[test] + fn change_visibility_pub_crate_to_pub() { + check_assist(change_visibility, "<|>pub(crate) fn foo() {}", "pub fn foo() {}") + } + + #[test] + fn change_visibility_const() { + check_assist(change_visibility, "<|>const FOO = 3u8;", "pub(crate) const FOO = 3u8;"); + } + + #[test] + fn change_visibility_static() { + check_assist(change_visibility, "<|>static FOO = 3u8;", "pub(crate) static FOO = 3u8;"); + } + + #[test] + fn change_visibility_handles_comment_attrs() { + check_assist( + change_visibility, + r" + /// docs + + // comments + + #[derive(Debug)] + <|>struct Foo; + ", + r" + /// docs + + // comments + + #[derive(Debug)] + pub(crate) struct Foo; + ", + ) + } + + #[test] + fn not_applicable_for_enum_variants() { + check_assist_not_applicable( + change_visibility, + r"mod foo { pub enum Foo {Foo1} } + fn main() { foo::Foo::Foo1<|> } ", + ); + } + + #[test] + fn change_visibility_target() { + check_assist_target(change_visibility, "<|>fn foo() {}", "fn"); + check_assist_target(change_visibility, "pub(crate)<|> fn foo() {}", "pub(crate)"); + check_assist_target(change_visibility, "struct S { <|>field: u32 }", "field"); + } +} diff --git a/crates/assists/src/handlers/early_return.rs b/crates/assists/src/handlers/early_return.rs new file mode 100644 index 0000000000..7fd78e9d47 --- /dev/null +++ b/crates/assists/src/handlers/early_return.rs @@ -0,0 +1,515 @@ +use std::{iter::once, ops::RangeInclusive}; + +use syntax::{ + algo::replace_children, + ast::{ + self, + edit::{AstNodeEdit, IndentLevel}, + make, + }, + AstNode, + SyntaxKind::{FN, LOOP_EXPR, L_CURLY, R_CURLY, WHILE_EXPR, WHITESPACE}, + SyntaxNode, +}; + +use crate::{ + assist_context::{AssistContext, Assists}, + utils::invert_boolean_expression, + AssistId, AssistKind, +}; + +// Assist: convert_to_guarded_return +// +// Replace a large conditional with a guarded return. +// +// ``` +// fn main() { +// <|>if cond { +// foo(); +// bar(); +// } +// } +// ``` +// -> +// ``` +// fn main() { +// if !cond { +// return; +// } +// foo(); +// bar(); +// } +// ``` +pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let if_expr: ast::IfExpr = ctx.find_node_at_offset()?; + if if_expr.else_branch().is_some() { + return None; + } + + let cond = if_expr.condition()?; + + // Check if there is an IfLet that we can handle. + let if_let_pat = match cond.pat() { + None => None, // No IfLet, supported. + Some(ast::Pat::TupleStructPat(pat)) if pat.fields().count() == 1 => { + let path = pat.path()?; + match path.qualifier() { + None => { + let bound_ident = pat.fields().next().unwrap(); + Some((path, bound_ident)) + } + Some(_) => return None, + } + } + Some(_) => return None, // Unsupported IfLet. + }; + + let cond_expr = cond.expr()?; + let then_block = if_expr.then_branch()?; + + let parent_block = if_expr.syntax().parent()?.ancestors().find_map(ast::BlockExpr::cast)?; + + if parent_block.expr()? != if_expr.clone().into() { + return None; + } + + // check for early return and continue + let first_in_then_block = then_block.syntax().first_child()?; + if ast::ReturnExpr::can_cast(first_in_then_block.kind()) + || ast::ContinueExpr::can_cast(first_in_then_block.kind()) + || first_in_then_block + .children() + .any(|x| ast::ReturnExpr::can_cast(x.kind()) || ast::ContinueExpr::can_cast(x.kind())) + { + return None; + } + + let parent_container = parent_block.syntax().parent()?; + + let early_expression: ast::Expr = match parent_container.kind() { + WHILE_EXPR | LOOP_EXPR => make::expr_continue(), + FN => make::expr_return(), + _ => return None, + }; + + if then_block.syntax().first_child_or_token().map(|t| t.kind() == L_CURLY).is_none() { + return None; + } + + then_block.syntax().last_child_or_token().filter(|t| t.kind() == R_CURLY)?; + + let target = if_expr.syntax().text_range(); + acc.add( + AssistId("convert_to_guarded_return", AssistKind::RefactorRewrite), + "Convert to guarded return", + target, + |edit| { + let if_indent_level = IndentLevel::from_node(&if_expr.syntax()); + let new_block = match if_let_pat { + None => { + // If. + let new_expr = { + let then_branch = + make::block_expr(once(make::expr_stmt(early_expression).into()), None); + let cond = invert_boolean_expression(cond_expr); + make::expr_if(make::condition(cond, None), then_branch) + .indent(if_indent_level) + }; + replace(new_expr.syntax(), &then_block, &parent_block, &if_expr) + } + Some((path, bound_ident)) => { + // If-let. + let match_expr = { + let happy_arm = { + let pat = make::tuple_struct_pat( + path, + once(make::ident_pat(make::name("it")).into()), + ); + let expr = { + let name_ref = make::name_ref("it"); + let segment = make::path_segment(name_ref); + let path = make::path_unqualified(segment); + make::expr_path(path) + }; + make::match_arm(once(pat.into()), expr) + }; + + let sad_arm = make::match_arm( + // FIXME: would be cool to use `None` or `Err(_)` if appropriate + once(make::wildcard_pat().into()), + early_expression, + ); + + make::expr_match(cond_expr, make::match_arm_list(vec![happy_arm, sad_arm])) + }; + + let let_stmt = make::let_stmt( + make::ident_pat(make::name(&bound_ident.syntax().to_string())).into(), + Some(match_expr), + ); + let let_stmt = let_stmt.indent(if_indent_level); + replace(let_stmt.syntax(), &then_block, &parent_block, &if_expr) + } + }; + edit.replace_ast(parent_block, ast::BlockExpr::cast(new_block).unwrap()); + + fn replace( + new_expr: &SyntaxNode, + then_block: &ast::BlockExpr, + parent_block: &ast::BlockExpr, + if_expr: &ast::IfExpr, + ) -> SyntaxNode { + let then_block_items = then_block.dedent(IndentLevel(1)); + let end_of_then = then_block_items.syntax().last_child_or_token().unwrap(); + let end_of_then = + if end_of_then.prev_sibling_or_token().map(|n| n.kind()) == Some(WHITESPACE) { + end_of_then.prev_sibling_or_token().unwrap() + } else { + end_of_then + }; + let mut then_statements = new_expr.children_with_tokens().chain( + then_block_items + .syntax() + .children_with_tokens() + .skip(1) + .take_while(|i| *i != end_of_then), + ); + replace_children( + &parent_block.syntax(), + RangeInclusive::new( + if_expr.clone().syntax().clone().into(), + if_expr.syntax().clone().into(), + ), + &mut then_statements, + ) + } + }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn convert_inside_fn() { + check_assist( + convert_to_guarded_return, + r#" + fn main() { + bar(); + if<|> true { + foo(); + + //comment + bar(); + } + } + "#, + r#" + fn main() { + bar(); + if !true { + return; + } + foo(); + + //comment + bar(); + } + "#, + ); + } + + #[test] + fn convert_let_inside_fn() { + check_assist( + convert_to_guarded_return, + r#" + fn main(n: Option) { + bar(); + if<|> let Some(n) = n { + foo(n); + + //comment + bar(); + } + } + "#, + r#" + fn main(n: Option) { + bar(); + let n = match n { + Some(it) => it, + _ => return, + }; + foo(n); + + //comment + bar(); + } + "#, + ); + } + + #[test] + fn convert_if_let_result() { + check_assist( + convert_to_guarded_return, + r#" + fn main() { + if<|> let Ok(x) = Err(92) { + foo(x); + } + } + "#, + r#" + fn main() { + let x = match Err(92) { + Ok(it) => it, + _ => return, + }; + foo(x); + } + "#, + ); + } + + #[test] + fn convert_let_ok_inside_fn() { + check_assist( + convert_to_guarded_return, + r#" + fn main(n: Option) { + bar(); + if<|> let Ok(n) = n { + foo(n); + + //comment + bar(); + } + } + "#, + r#" + fn main(n: Option) { + bar(); + let n = match n { + Ok(it) => it, + _ => return, + }; + foo(n); + + //comment + bar(); + } + "#, + ); + } + + #[test] + fn convert_inside_while() { + check_assist( + convert_to_guarded_return, + r#" + fn main() { + while true { + if<|> true { + foo(); + bar(); + } + } + } + "#, + r#" + fn main() { + while true { + if !true { + continue; + } + foo(); + bar(); + } + } + "#, + ); + } + + #[test] + fn convert_let_inside_while() { + check_assist( + convert_to_guarded_return, + r#" + fn main() { + while true { + if<|> let Some(n) = n { + foo(n); + bar(); + } + } + } + "#, + r#" + fn main() { + while true { + let n = match n { + Some(it) => it, + _ => continue, + }; + foo(n); + bar(); + } + } + "#, + ); + } + + #[test] + fn convert_inside_loop() { + check_assist( + convert_to_guarded_return, + r#" + fn main() { + loop { + if<|> true { + foo(); + bar(); + } + } + } + "#, + r#" + fn main() { + loop { + if !true { + continue; + } + foo(); + bar(); + } + } + "#, + ); + } + + #[test] + fn convert_let_inside_loop() { + check_assist( + convert_to_guarded_return, + r#" + fn main() { + loop { + if<|> let Some(n) = n { + foo(n); + bar(); + } + } + } + "#, + r#" + fn main() { + loop { + let n = match n { + Some(it) => it, + _ => continue, + }; + foo(n); + bar(); + } + } + "#, + ); + } + + #[test] + fn ignore_already_converted_if() { + check_assist_not_applicable( + convert_to_guarded_return, + r#" + fn main() { + if<|> true { + return; + } + } + "#, + ); + } + + #[test] + fn ignore_already_converted_loop() { + check_assist_not_applicable( + convert_to_guarded_return, + r#" + fn main() { + loop { + if<|> true { + continue; + } + } + } + "#, + ); + } + + #[test] + fn ignore_return() { + check_assist_not_applicable( + convert_to_guarded_return, + r#" + fn main() { + if<|> true { + return + } + } + "#, + ); + } + + #[test] + fn ignore_else_branch() { + check_assist_not_applicable( + convert_to_guarded_return, + r#" + fn main() { + if<|> true { + foo(); + } else { + bar() + } + } + "#, + ); + } + + #[test] + fn ignore_statements_aftert_if() { + check_assist_not_applicable( + convert_to_guarded_return, + r#" + fn main() { + if<|> true { + foo(); + } + bar(); + } + "#, + ); + } + + #[test] + fn ignore_statements_inside_if() { + check_assist_not_applicable( + convert_to_guarded_return, + r#" + fn main() { + if false { + if<|> true { + foo(); + } + } + } + "#, + ); + } +} diff --git a/crates/assists/src/handlers/expand_glob_import.rs b/crates/assists/src/handlers/expand_glob_import.rs new file mode 100644 index 0000000000..f690ec343b --- /dev/null +++ b/crates/assists/src/handlers/expand_glob_import.rs @@ -0,0 +1,391 @@ +use hir::{AssocItem, MacroDef, ModuleDef, Name, PathResolution, ScopeDef, SemanticsScope}; +use ide_db::{ + defs::{classify_name_ref, Definition, NameRefClass}, + RootDatabase, +}; +use syntax::{algo, ast, match_ast, AstNode, SyntaxNode, SyntaxToken, T}; + +use crate::{ + assist_context::{AssistBuilder, AssistContext, Assists}, + AssistId, AssistKind, +}; + +use either::Either; + +// Assist: expand_glob_import +// +// Expands glob imports. +// +// ``` +// mod foo { +// pub struct Bar; +// pub struct Baz; +// } +// +// use foo::*<|>; +// +// fn qux(bar: Bar, baz: Baz) {} +// ``` +// -> +// ``` +// mod foo { +// pub struct Bar; +// pub struct Baz; +// } +// +// use foo::{Baz, Bar}; +// +// fn qux(bar: Bar, baz: Baz) {} +// ``` +pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let star = ctx.find_token_at_offset(T![*])?; + let mod_path = find_mod_path(&star)?; + + let source_file = ctx.source_file(); + let scope = ctx.sema.scope_at_offset(source_file.syntax(), ctx.offset()); + + let defs_in_mod = find_defs_in_mod(ctx, scope, &mod_path)?; + let name_refs_in_source_file = + source_file.syntax().descendants().filter_map(ast::NameRef::cast).collect(); + let used_names = find_used_names(ctx, defs_in_mod, name_refs_in_source_file); + + let parent = star.parent().parent()?; + acc.add( + AssistId("expand_glob_import", AssistKind::RefactorRewrite), + "Expand glob import", + parent.text_range(), + |builder| { + replace_ast(builder, &parent, mod_path, used_names); + }, + ) +} + +fn find_mod_path(star: &SyntaxToken) -> Option { + star.ancestors().find_map(|n| ast::UseTree::cast(n).and_then(|u| u.path())) +} + +#[derive(PartialEq)] +enum Def { + ModuleDef(ModuleDef), + MacroDef(MacroDef), +} + +impl Def { + fn name(&self, db: &RootDatabase) -> Option { + match self { + Def::ModuleDef(def) => def.name(db), + Def::MacroDef(def) => def.name(db), + } + } +} + +fn find_defs_in_mod( + ctx: &AssistContext, + from: SemanticsScope<'_>, + path: &ast::Path, +) -> Option> { + let hir_path = ctx.sema.lower_path(&path)?; + let module = if let Some(PathResolution::Def(ModuleDef::Module(module))) = + from.resolve_hir_path_qualifier(&hir_path) + { + module + } else { + return None; + }; + + let module_scope = module.scope(ctx.db(), from.module()); + + let mut defs = vec![]; + for (_, def) in module_scope { + match def { + ScopeDef::ModuleDef(def) => defs.push(Def::ModuleDef(def)), + ScopeDef::MacroDef(def) => defs.push(Def::MacroDef(def)), + _ => continue, + } + } + + Some(defs) +} + +fn find_used_names( + ctx: &AssistContext, + defs_in_mod: Vec, + name_refs_in_source_file: Vec, +) -> Vec { + let defs_in_source_file = name_refs_in_source_file + .iter() + .filter_map(|r| classify_name_ref(&ctx.sema, r)) + .filter_map(|rc| match rc { + NameRefClass::Definition(Definition::ModuleDef(def)) => Some(Def::ModuleDef(def)), + NameRefClass::Definition(Definition::Macro(def)) => Some(Def::MacroDef(def)), + _ => None, + }) + .collect::>(); + + defs_in_mod + .iter() + .filter(|def| { + if let Def::ModuleDef(ModuleDef::Trait(tr)) = def { + for item in tr.items(ctx.db()) { + if let AssocItem::Function(f) = item { + if defs_in_source_file.contains(&Def::ModuleDef(ModuleDef::Function(f))) { + return true; + } + } + } + } + + defs_in_source_file.contains(def) + }) + .filter_map(|d| d.name(ctx.db())) + .collect() +} + +fn replace_ast( + builder: &mut AssistBuilder, + node: &SyntaxNode, + path: ast::Path, + used_names: Vec, +) { + let replacement: Either = match used_names.as_slice() { + [name] => Either::Left(ast::make::use_tree( + ast::make::path_from_text(&format!("{}::{}", path, name)), + None, + None, + false, + )), + names => Either::Right(ast::make::use_tree_list(names.iter().map(|n| { + ast::make::use_tree(ast::make::path_from_text(&n.to_string()), None, None, false) + }))), + }; + + let mut replace_node = |replacement: Either| { + algo::diff(node, &replacement.either(|u| u.syntax().clone(), |ut| ut.syntax().clone())) + .into_text_edit(builder.text_edit_builder()); + }; + + match_ast! { + match node { + ast::UseTree(use_tree) => { + replace_node(replacement); + }, + ast::UseTreeList(use_tree_list) => { + replace_node(replacement); + }, + ast::Use(use_item) => { + builder.replace_ast(use_item, ast::make::use_(replacement.left_or_else(|ut| ast::make::use_tree(path, Some(ut), None, false)))); + }, + _ => {}, + } + } +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn expanding_glob_import() { + check_assist( + expand_glob_import, + r" +mod foo { + pub struct Bar; + pub struct Baz; + pub struct Qux; + + pub fn f() {} +} + +use foo::*<|>; + +fn qux(bar: Bar, baz: Baz) { + f(); +} +", + r" +mod foo { + pub struct Bar; + pub struct Baz; + pub struct Qux; + + pub fn f() {} +} + +use foo::{Baz, Bar, f}; + +fn qux(bar: Bar, baz: Baz) { + f(); +} +", + ) + } + + #[test] + fn expanding_glob_import_with_existing_explicit_names() { + check_assist( + expand_glob_import, + r" +mod foo { + pub struct Bar; + pub struct Baz; + pub struct Qux; + + pub fn f() {} +} + +use foo::{*<|>, f}; + +fn qux(bar: Bar, baz: Baz) { + f(); +} +", + r" +mod foo { + pub struct Bar; + pub struct Baz; + pub struct Qux; + + pub fn f() {} +} + +use foo::{Baz, Bar, f}; + +fn qux(bar: Bar, baz: Baz) { + f(); +} +", + ) + } + + #[test] + fn expanding_nested_glob_import() { + check_assist( + expand_glob_import, + r" +mod foo { + mod bar { + pub struct Bar; + pub struct Baz; + pub struct Qux; + + pub fn f() {} + } + + mod baz { + pub fn g() {} + } +} + +use foo::{bar::{*<|>, f}, baz::*}; + +fn qux(bar: Bar, baz: Baz) { + f(); + g(); +} +", + r" +mod foo { + mod bar { + pub struct Bar; + pub struct Baz; + pub struct Qux; + + pub fn f() {} + } + + mod baz { + pub fn g() {} + } +} + +use foo::{bar::{Baz, Bar, f}, baz::*}; + +fn qux(bar: Bar, baz: Baz) { + f(); + g(); +} +", + ) + } + + #[test] + fn expanding_glob_import_with_macro_defs() { + check_assist( + expand_glob_import, + r" +//- /lib.rs crate:foo +#[macro_export] +macro_rules! bar { + () => () +} + +pub fn baz() {} + +//- /main.rs crate:main deps:foo +use foo::*<|>; + +fn main() { + bar!(); + baz(); +} +", + r" +use foo::{bar, baz}; + +fn main() { + bar!(); + baz(); +} +", + ) + } + + #[test] + fn expanding_glob_import_with_trait_method_uses() { + check_assist( + expand_glob_import, + r" +//- /lib.rs crate:foo +pub trait Tr { + fn method(&self) {} +} +impl Tr for () {} + +//- /main.rs crate:main deps:foo +use foo::*<|>; + +fn main() { + ().method(); +} +", + r" +use foo::Tr; + +fn main() { + ().method(); +} +", + ) + } + + #[test] + fn expanding_is_not_applicable_if_cursor_is_not_in_star_token() { + check_assist_not_applicable( + expand_glob_import, + r" + mod foo { + pub struct Bar; + pub struct Baz; + pub struct Qux; + } + + use foo::Bar<|>; + + fn qux(bar: Bar, baz: Baz) {} + ", + ) + } +} diff --git a/crates/assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/assists/src/handlers/extract_struct_from_enum_variant.rs new file mode 100644 index 0000000000..4bcdae7ba0 --- /dev/null +++ b/crates/assists/src/handlers/extract_struct_from_enum_variant.rs @@ -0,0 +1,317 @@ +use base_db::FileId; +use hir::{EnumVariant, Module, ModuleDef, Name}; +use ide_db::{defs::Definition, search::Reference, RootDatabase}; +use rustc_hash::FxHashSet; +use syntax::{ + algo::find_node_at_offset, + ast::{self, edit::IndentLevel, ArgListOwner, AstNode, NameOwner, VisibilityOwner}, + SourceFile, TextRange, TextSize, +}; + +use crate::{ + assist_context::AssistBuilder, utils::insert_use_statement, AssistContext, AssistId, + AssistKind, Assists, +}; + +// Assist: extract_struct_from_enum_variant +// +// Extracts a struct from enum variant. +// +// ``` +// enum A { <|>One(u32, u32) } +// ``` +// -> +// ``` +// struct One(pub u32, pub u32); +// +// enum A { One(One) } +// ``` +pub(crate) fn extract_struct_from_enum_variant( + acc: &mut Assists, + ctx: &AssistContext, +) -> Option<()> { + let variant = ctx.find_node_at_offset::()?; + let field_list = match variant.kind() { + ast::StructKind::Tuple(field_list) => field_list, + _ => return None, + }; + let variant_name = variant.name()?.to_string(); + let variant_hir = ctx.sema.to_def(&variant)?; + if existing_struct_def(ctx.db(), &variant_name, &variant_hir) { + return None; + } + let enum_ast = variant.parent_enum(); + let visibility = enum_ast.visibility(); + let enum_hir = ctx.sema.to_def(&enum_ast)?; + let variant_hir_name = variant_hir.name(ctx.db()); + let enum_module_def = ModuleDef::from(enum_hir); + let current_module = enum_hir.module(ctx.db()); + let target = variant.syntax().text_range(); + acc.add( + AssistId("extract_struct_from_enum_variant", AssistKind::RefactorRewrite), + "Extract struct from enum variant", + target, + |builder| { + let definition = Definition::ModuleDef(ModuleDef::EnumVariant(variant_hir)); + let res = definition.find_usages(&ctx.sema, None); + let start_offset = variant.parent_enum().syntax().text_range().start(); + let mut visited_modules_set = FxHashSet::default(); + visited_modules_set.insert(current_module); + for reference in res { + let source_file = ctx.sema.parse(reference.file_range.file_id); + update_reference( + ctx, + builder, + reference, + &source_file, + &enum_module_def, + &variant_hir_name, + &mut visited_modules_set, + ); + } + extract_struct_def( + builder, + &enum_ast, + &variant_name, + &field_list.to_string(), + start_offset, + ctx.frange.file_id, + &visibility, + ); + let list_range = field_list.syntax().text_range(); + update_variant(builder, &variant_name, ctx.frange.file_id, list_range); + }, + ) +} + +fn existing_struct_def(db: &RootDatabase, variant_name: &str, variant: &EnumVariant) -> bool { + variant + .parent_enum(db) + .module(db) + .scope(db, None) + .into_iter() + .any(|(name, _)| name.to_string() == variant_name.to_string()) +} + +fn insert_import( + ctx: &AssistContext, + builder: &mut AssistBuilder, + path: &ast::PathExpr, + module: &Module, + enum_module_def: &ModuleDef, + variant_hir_name: &Name, +) -> Option<()> { + let db = ctx.db(); + let mod_path = module.find_use_path(db, enum_module_def.clone()); + if let Some(mut mod_path) = mod_path { + mod_path.segments.pop(); + mod_path.segments.push(variant_hir_name.clone()); + insert_use_statement(path.syntax(), &mod_path, ctx, builder.text_edit_builder()); + } + Some(()) +} + +// FIXME: this should use strongly-typed `make`, rather than string manipulation. +fn extract_struct_def( + builder: &mut AssistBuilder, + enum_: &ast::Enum, + variant_name: &str, + variant_list: &str, + start_offset: TextSize, + file_id: FileId, + visibility: &Option, +) -> Option<()> { + let visibility_string = if let Some(visibility) = visibility { + format!("{} ", visibility.to_string()) + } else { + "".to_string() + }; + let indent = IndentLevel::from_node(enum_.syntax()); + let struct_def = format!( + r#"{}struct {}{}; + +{}"#, + visibility_string, + variant_name, + list_with_visibility(variant_list), + indent + ); + builder.edit_file(file_id); + builder.insert(start_offset, struct_def); + Some(()) +} + +fn update_variant( + builder: &mut AssistBuilder, + variant_name: &str, + file_id: FileId, + list_range: TextRange, +) -> Option<()> { + let inside_variant_range = TextRange::new( + list_range.start().checked_add(TextSize::from(1))?, + list_range.end().checked_sub(TextSize::from(1))?, + ); + builder.edit_file(file_id); + builder.replace(inside_variant_range, variant_name); + Some(()) +} + +fn update_reference( + ctx: &AssistContext, + builder: &mut AssistBuilder, + reference: Reference, + source_file: &SourceFile, + enum_module_def: &ModuleDef, + variant_hir_name: &Name, + visited_modules_set: &mut FxHashSet, +) -> Option<()> { + let path_expr: ast::PathExpr = find_node_at_offset::( + source_file.syntax(), + reference.file_range.range.start(), + )?; + let call = path_expr.syntax().parent().and_then(ast::CallExpr::cast)?; + let list = call.arg_list()?; + let segment = path_expr.path()?.segment()?; + let module = ctx.sema.scope(&path_expr.syntax()).module()?; + let list_range = list.syntax().text_range(); + let inside_list_range = TextRange::new( + list_range.start().checked_add(TextSize::from(1))?, + list_range.end().checked_sub(TextSize::from(1))?, + ); + builder.edit_file(reference.file_range.file_id); + if !visited_modules_set.contains(&module) { + if insert_import(ctx, builder, &path_expr, &module, enum_module_def, variant_hir_name) + .is_some() + { + visited_modules_set.insert(module); + } + } + builder.replace(inside_list_range, format!("{}{}", segment, list)); + Some(()) +} + +fn list_with_visibility(list: &str) -> String { + list.split(',') + .map(|part| { + let index = if part.chars().next().unwrap() == '(' { 1usize } else { 0 }; + let mut mod_part = part.trim().to_string(); + mod_part.insert_str(index, "pub "); + mod_part + }) + .collect::>() + .join(", ") +} + +#[cfg(test)] +mod tests { + + use crate::{ + tests::{check_assist, check_assist_not_applicable}, + utils::FamousDefs, + }; + + use super::*; + + #[test] + fn test_extract_struct_several_fields() { + check_assist( + extract_struct_from_enum_variant, + "enum A { <|>One(u32, u32) }", + r#"struct One(pub u32, pub u32); + +enum A { One(One) }"#, + ); + } + + #[test] + fn test_extract_struct_one_field() { + check_assist( + extract_struct_from_enum_variant, + "enum A { <|>One(u32) }", + r#"struct One(pub u32); + +enum A { One(One) }"#, + ); + } + + #[test] + fn test_extract_struct_pub_visibility() { + check_assist( + extract_struct_from_enum_variant, + "pub enum A { <|>One(u32, u32) }", + r#"pub struct One(pub u32, pub u32); + +pub enum A { One(One) }"#, + ); + } + + #[test] + fn test_extract_struct_with_complex_imports() { + check_assist( + extract_struct_from_enum_variant, + r#"mod my_mod { + fn another_fn() { + let m = my_other_mod::MyEnum::MyField(1, 1); + } + + pub mod my_other_mod { + fn another_fn() { + let m = MyEnum::MyField(1, 1); + } + + pub enum MyEnum { + <|>MyField(u8, u8), + } + } +} + +fn another_fn() { + let m = my_mod::my_other_mod::MyEnum::MyField(1, 1); +}"#, + r#"use my_mod::my_other_mod::MyField; + +mod my_mod { + use my_other_mod::MyField; + + fn another_fn() { + let m = my_other_mod::MyEnum::MyField(MyField(1, 1)); + } + + pub mod my_other_mod { + fn another_fn() { + let m = MyEnum::MyField(MyField(1, 1)); + } + + pub struct MyField(pub u8, pub u8); + + pub enum MyEnum { + MyField(MyField), + } + } +} + +fn another_fn() { + let m = my_mod::my_other_mod::MyEnum::MyField(MyField(1, 1)); +}"#, + ); + } + + fn check_not_applicable(ra_fixture: &str) { + let fixture = + format!("//- /main.rs crate:main deps:core\n{}\n{}", ra_fixture, FamousDefs::FIXTURE); + check_assist_not_applicable(extract_struct_from_enum_variant, &fixture) + } + + #[test] + fn test_extract_enum_not_applicable_for_element_with_no_fields() { + check_not_applicable("enum A { <|>One }"); + } + + #[test] + fn test_extract_enum_not_applicable_if_struct_exists() { + check_not_applicable( + r#"struct One; + enum A { <|>One(u8) }"#, + ); + } +} diff --git a/crates/assists/src/handlers/extract_variable.rs b/crates/assists/src/handlers/extract_variable.rs new file mode 100644 index 0000000000..d2ae137cdd --- /dev/null +++ b/crates/assists/src/handlers/extract_variable.rs @@ -0,0 +1,588 @@ +use stdx::format_to; +use syntax::{ + ast::{self, AstNode}, + SyntaxKind::{ + BLOCK_EXPR, BREAK_EXPR, CLOSURE_EXPR, COMMENT, LOOP_EXPR, MATCH_ARM, PATH_EXPR, RETURN_EXPR, + }, + SyntaxNode, +}; +use test_utils::mark; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: extract_variable +// +// Extracts subexpression into a variable. +// +// ``` +// fn main() { +// <|>(1 + 2)<|> * 4; +// } +// ``` +// -> +// ``` +// fn main() { +// let $0var_name = (1 + 2); +// var_name * 4; +// } +// ``` +pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + if ctx.frange.range.is_empty() { + return None; + } + let node = ctx.covering_element(); + if node.kind() == COMMENT { + mark::hit!(extract_var_in_comment_is_not_applicable); + return None; + } + let to_extract = node.ancestors().find_map(valid_target_expr)?; + let anchor = Anchor::from(&to_extract)?; + let indent = anchor.syntax().prev_sibling_or_token()?.as_token()?.clone(); + let target = to_extract.syntax().text_range(); + acc.add( + AssistId("extract_variable", AssistKind::RefactorExtract), + "Extract into variable", + target, + move |edit| { + let field_shorthand = + match to_extract.syntax().parent().and_then(ast::RecordExprField::cast) { + Some(field) => field.name_ref(), + None => None, + }; + + let mut buf = String::new(); + + let var_name = match &field_shorthand { + Some(it) => it.to_string(), + None => "var_name".to_string(), + }; + let expr_range = match &field_shorthand { + Some(it) => it.syntax().text_range().cover(to_extract.syntax().text_range()), + None => to_extract.syntax().text_range(), + }; + + if let Anchor::WrapInBlock(_) = anchor { + format_to!(buf, "{{ let {} = ", var_name); + } else { + format_to!(buf, "let {} = ", var_name); + }; + format_to!(buf, "{}", to_extract.syntax()); + + if let Anchor::Replace(stmt) = anchor { + mark::hit!(test_extract_var_expr_stmt); + if stmt.semicolon_token().is_none() { + buf.push_str(";"); + } + match ctx.config.snippet_cap { + Some(cap) => { + let snip = buf + .replace(&format!("let {}", var_name), &format!("let $0{}", var_name)); + edit.replace_snippet(cap, expr_range, snip) + } + None => edit.replace(expr_range, buf), + } + return; + } + + buf.push_str(";"); + + // We want to maintain the indent level, + // but we do not want to duplicate possible + // extra newlines in the indent block + let text = indent.text(); + if text.starts_with('\n') { + buf.push_str("\n"); + buf.push_str(text.trim_start_matches('\n')); + } else { + buf.push_str(text); + } + + edit.replace(expr_range, var_name.clone()); + let offset = anchor.syntax().text_range().start(); + match ctx.config.snippet_cap { + Some(cap) => { + let snip = + buf.replace(&format!("let {}", var_name), &format!("let $0{}", var_name)); + edit.insert_snippet(cap, offset, snip) + } + None => edit.insert(offset, buf), + } + + if let Anchor::WrapInBlock(_) = anchor { + edit.insert(anchor.syntax().text_range().end(), " }"); + } + }, + ) +} + +/// Check whether the node is a valid expression which can be extracted to a variable. +/// In general that's true for any expression, but in some cases that would produce invalid code. +fn valid_target_expr(node: SyntaxNode) -> Option { + match node.kind() { + PATH_EXPR | LOOP_EXPR => None, + BREAK_EXPR => ast::BreakExpr::cast(node).and_then(|e| e.expr()), + RETURN_EXPR => ast::ReturnExpr::cast(node).and_then(|e| e.expr()), + BLOCK_EXPR => { + ast::BlockExpr::cast(node).filter(|it| it.is_standalone()).map(ast::Expr::from) + } + _ => ast::Expr::cast(node), + } +} + +enum Anchor { + Before(SyntaxNode), + Replace(ast::ExprStmt), + WrapInBlock(SyntaxNode), +} + +impl Anchor { + fn from(to_extract: &ast::Expr) -> Option { + to_extract.syntax().ancestors().find_map(|node| { + if let Some(expr) = + node.parent().and_then(ast::BlockExpr::cast).and_then(|it| it.expr()) + { + if expr.syntax() == &node { + mark::hit!(test_extract_var_last_expr); + return Some(Anchor::Before(node)); + } + } + + if let Some(parent) = node.parent() { + if parent.kind() == MATCH_ARM || parent.kind() == CLOSURE_EXPR { + return Some(Anchor::WrapInBlock(node)); + } + } + + if let Some(stmt) = ast::Stmt::cast(node.clone()) { + if let ast::Stmt::ExprStmt(stmt) = stmt { + if stmt.expr().as_ref() == Some(to_extract) { + return Some(Anchor::Replace(stmt)); + } + } + return Some(Anchor::Before(node)); + } + None + }) + } + + fn syntax(&self) -> &SyntaxNode { + match self { + Anchor::Before(it) | Anchor::WrapInBlock(it) => it, + Anchor::Replace(stmt) => stmt.syntax(), + } + } +} + +#[cfg(test)] +mod tests { + use test_utils::mark; + + use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; + + use super::*; + + #[test] + fn test_extract_var_simple() { + check_assist( + extract_variable, + r#" +fn foo() { + foo(<|>1 + 1<|>); +}"#, + r#" +fn foo() { + let $0var_name = 1 + 1; + foo(var_name); +}"#, + ); + } + + #[test] + fn extract_var_in_comment_is_not_applicable() { + mark::check!(extract_var_in_comment_is_not_applicable); + check_assist_not_applicable(extract_variable, "fn main() { 1 + /* <|>comment<|> */ 1; }"); + } + + #[test] + fn test_extract_var_expr_stmt() { + mark::check!(test_extract_var_expr_stmt); + check_assist( + extract_variable, + r#" +fn foo() { + <|>1 + 1<|>; +}"#, + r#" +fn foo() { + let $0var_name = 1 + 1; +}"#, + ); + check_assist( + extract_variable, + " +fn foo() { + <|>{ let x = 0; x }<|> + something_else(); +}", + " +fn foo() { + let $0var_name = { let x = 0; x }; + something_else(); +}", + ); + } + + #[test] + fn test_extract_var_part_of_expr_stmt() { + check_assist( + extract_variable, + " +fn foo() { + <|>1<|> + 1; +}", + " +fn foo() { + let $0var_name = 1; + var_name + 1; +}", + ); + } + + #[test] + fn test_extract_var_last_expr() { + mark::check!(test_extract_var_last_expr); + check_assist( + extract_variable, + r#" +fn foo() { + bar(<|>1 + 1<|>) +} +"#, + r#" +fn foo() { + let $0var_name = 1 + 1; + bar(var_name) +} +"#, + ); + check_assist( + extract_variable, + r#" +fn foo() { + <|>bar(1 + 1)<|> +} +"#, + r#" +fn foo() { + let $0var_name = bar(1 + 1); + var_name +} +"#, + ) + } + + #[test] + fn test_extract_var_in_match_arm_no_block() { + check_assist( + extract_variable, + " +fn main() { + let x = true; + let tuple = match x { + true => (<|>2 + 2<|>, true) + _ => (0, false) + }; +} +", + " +fn main() { + let x = true; + let tuple = match x { + true => { let $0var_name = 2 + 2; (var_name, true) } + _ => (0, false) + }; +} +", + ); + } + + #[test] + fn test_extract_var_in_match_arm_with_block() { + check_assist( + extract_variable, + " +fn main() { + let x = true; + let tuple = match x { + true => { + let y = 1; + (<|>2 + y<|>, true) + } + _ => (0, false) + }; +} +", + " +fn main() { + let x = true; + let tuple = match x { + true => { + let y = 1; + let $0var_name = 2 + y; + (var_name, true) + } + _ => (0, false) + }; +} +", + ); + } + + #[test] + fn test_extract_var_in_closure_no_block() { + check_assist( + extract_variable, + " +fn main() { + let lambda = |x: u32| <|>x * 2<|>; +} +", + " +fn main() { + let lambda = |x: u32| { let $0var_name = x * 2; var_name }; +} +", + ); + } + + #[test] + fn test_extract_var_in_closure_with_block() { + check_assist( + extract_variable, + " +fn main() { + let lambda = |x: u32| { <|>x * 2<|> }; +} +", + " +fn main() { + let lambda = |x: u32| { let $0var_name = x * 2; var_name }; +} +", + ); + } + + #[test] + fn test_extract_var_path_simple() { + check_assist( + extract_variable, + " +fn main() { + let o = <|>Some(true)<|>; +} +", + " +fn main() { + let $0var_name = Some(true); + let o = var_name; +} +", + ); + } + + #[test] + fn test_extract_var_path_method() { + check_assist( + extract_variable, + " +fn main() { + let v = <|>bar.foo()<|>; +} +", + " +fn main() { + let $0var_name = bar.foo(); + let v = var_name; +} +", + ); + } + + #[test] + fn test_extract_var_return() { + check_assist( + extract_variable, + " +fn foo() -> u32 { + <|>return 2 + 2<|>; +} +", + " +fn foo() -> u32 { + let $0var_name = 2 + 2; + return var_name; +} +", + ); + } + + #[test] + fn test_extract_var_does_not_add_extra_whitespace() { + check_assist( + extract_variable, + " +fn foo() -> u32 { + + + <|>return 2 + 2<|>; +} +", + " +fn foo() -> u32 { + + + let $0var_name = 2 + 2; + return var_name; +} +", + ); + + check_assist( + extract_variable, + " +fn foo() -> u32 { + + <|>return 2 + 2<|>; +} +", + " +fn foo() -> u32 { + + let $0var_name = 2 + 2; + return var_name; +} +", + ); + + check_assist( + extract_variable, + " +fn foo() -> u32 { + let foo = 1; + + // bar + + + <|>return 2 + 2<|>; +} +", + " +fn foo() -> u32 { + let foo = 1; + + // bar + + + let $0var_name = 2 + 2; + return var_name; +} +", + ); + } + + #[test] + fn test_extract_var_break() { + check_assist( + extract_variable, + " +fn main() { + let result = loop { + <|>break 2 + 2<|>; + }; +} +", + " +fn main() { + let result = loop { + let $0var_name = 2 + 2; + break var_name; + }; +} +", + ); + } + + #[test] + fn test_extract_var_for_cast() { + check_assist( + extract_variable, + " +fn main() { + let v = <|>0f32 as u32<|>; +} +", + " +fn main() { + let $0var_name = 0f32 as u32; + let v = var_name; +} +", + ); + } + + #[test] + fn extract_var_field_shorthand() { + check_assist( + extract_variable, + r#" +struct S { + foo: i32 +} + +fn main() { + S { foo: <|>1 + 1<|> } +} +"#, + r#" +struct S { + foo: i32 +} + +fn main() { + let $0foo = 1 + 1; + S { foo } +} +"#, + ) + } + + #[test] + fn test_extract_var_for_return_not_applicable() { + check_assist_not_applicable(extract_variable, "fn foo() { <|>return<|>; } "); + } + + #[test] + fn test_extract_var_for_break_not_applicable() { + check_assist_not_applicable(extract_variable, "fn main() { loop { <|>break<|>; }; }"); + } + + // FIXME: This is not quite correct, but good enough(tm) for the sorting heuristic + #[test] + fn extract_var_target() { + check_assist_target(extract_variable, "fn foo() -> u32 { <|>return 2 + 2<|>; }", "2 + 2"); + + check_assist_target( + extract_variable, + " +fn main() { + let x = true; + let tuple = match x { + true => (<|>2 + 2<|>, true) + _ => (0, false) + }; +} +", + "2 + 2", + ); + } +} diff --git a/crates/assists/src/handlers/fill_match_arms.rs b/crates/assists/src/handlers/fill_match_arms.rs new file mode 100644 index 0000000000..3d9bdb2bf7 --- /dev/null +++ b/crates/assists/src/handlers/fill_match_arms.rs @@ -0,0 +1,747 @@ +use std::iter; + +use hir::{Adt, HasSource, ModuleDef, Semantics}; +use ide_db::RootDatabase; +use itertools::Itertools; +use syntax::ast::{self, make, AstNode, MatchArm, NameOwner, Pat}; +use test_utils::mark; + +use crate::{ + utils::{render_snippet, Cursor, FamousDefs}, + AssistContext, AssistId, AssistKind, Assists, +}; + +// Assist: fill_match_arms +// +// Adds missing clauses to a `match` expression. +// +// ``` +// enum Action { Move { distance: u32 }, Stop } +// +// fn handle(action: Action) { +// match action { +// <|> +// } +// } +// ``` +// -> +// ``` +// enum Action { Move { distance: u32 }, Stop } +// +// fn handle(action: Action) { +// match action { +// $0Action::Move { distance } => {} +// Action::Stop => {} +// } +// } +// ``` +pub(crate) fn fill_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let match_expr = ctx.find_node_at_offset::()?; + let match_arm_list = match_expr.match_arm_list()?; + + let expr = match_expr.expr()?; + + let mut arms: Vec = match_arm_list.arms().collect(); + if arms.len() == 1 { + if let Some(Pat::WildcardPat(..)) = arms[0].pat() { + arms.clear(); + } + } + + let module = ctx.sema.scope(expr.syntax()).module()?; + + let missing_arms: Vec = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr) { + let variants = enum_def.variants(ctx.db()); + + let mut variants = variants + .into_iter() + .filter_map(|variant| build_pat(ctx.db(), module, variant)) + .filter(|variant_pat| is_variant_missing(&mut arms, variant_pat)) + .map(|pat| make::match_arm(iter::once(pat), make::expr_empty_block())) + .collect::>(); + if Some(enum_def) == FamousDefs(&ctx.sema, module.krate()).core_option_Option() { + // Match `Some` variant first. + mark::hit!(option_order); + variants.reverse() + } + variants + } else if let Some(enum_defs) = resolve_tuple_of_enum_def(&ctx.sema, &expr) { + // Partial fill not currently supported for tuple of enums. + if !arms.is_empty() { + return None; + } + + // We do not currently support filling match arms for a tuple + // containing a single enum. + if enum_defs.len() < 2 { + return None; + } + + // When calculating the match arms for a tuple of enums, we want + // to create a match arm for each possible combination of enum + // values. The `multi_cartesian_product` method transforms + // Vec> into Vec<(EnumVariant, .., EnumVariant)> + // where each tuple represents a proposed match arm. + enum_defs + .into_iter() + .map(|enum_def| enum_def.variants(ctx.db())) + .multi_cartesian_product() + .map(|variants| { + let patterns = + variants.into_iter().filter_map(|variant| build_pat(ctx.db(), module, variant)); + ast::Pat::from(make::tuple_pat(patterns)) + }) + .filter(|variant_pat| is_variant_missing(&mut arms, variant_pat)) + .map(|pat| make::match_arm(iter::once(pat), make::expr_empty_block())) + .collect() + } else { + return None; + }; + + if missing_arms.is_empty() { + return None; + } + + let target = match_expr.syntax().text_range(); + acc.add( + AssistId("fill_match_arms", AssistKind::QuickFix), + "Fill match arms", + target, + |builder| { + let new_arm_list = match_arm_list.remove_placeholder(); + let n_old_arms = new_arm_list.arms().count(); + let new_arm_list = new_arm_list.append_arms(missing_arms); + let first_new_arm = new_arm_list.arms().nth(n_old_arms); + let old_range = match_arm_list.syntax().text_range(); + match (first_new_arm, ctx.config.snippet_cap) { + (Some(first_new_arm), Some(cap)) => { + let extend_lifetime; + let cursor = + match first_new_arm.syntax().descendants().find_map(ast::WildcardPat::cast) + { + Some(it) => { + extend_lifetime = it.syntax().clone(); + Cursor::Replace(&extend_lifetime) + } + None => Cursor::Before(first_new_arm.syntax()), + }; + let snippet = render_snippet(cap, new_arm_list.syntax(), cursor); + builder.replace_snippet(cap, old_range, snippet); + } + _ => builder.replace(old_range, new_arm_list.to_string()), + } + }, + ) +} + +fn is_variant_missing(existing_arms: &mut Vec, var: &Pat) -> bool { + existing_arms.iter().filter_map(|arm| arm.pat()).all(|pat| { + // Special casee OrPat as separate top-level pats + let top_level_pats: Vec = match pat { + Pat::OrPat(pats) => pats.pats().collect::>(), + _ => vec![pat], + }; + + !top_level_pats.iter().any(|pat| does_pat_match_variant(pat, var)) + }) +} + +fn does_pat_match_variant(pat: &Pat, var: &Pat) -> bool { + let first_node_text = |pat: &Pat| pat.syntax().first_child().map(|node| node.text()); + + let pat_head = match pat { + Pat::IdentPat(bind_pat) => { + if let Some(p) = bind_pat.pat() { + first_node_text(&p) + } else { + return false; + } + } + pat => first_node_text(pat), + }; + + let var_head = first_node_text(var); + + pat_head == var_head +} + +fn resolve_enum_def(sema: &Semantics, expr: &ast::Expr) -> Option { + sema.type_of_expr(&expr)?.autoderef(sema.db).find_map(|ty| match ty.as_adt() { + Some(Adt::Enum(e)) => Some(e), + _ => None, + }) +} + +fn resolve_tuple_of_enum_def( + sema: &Semantics, + expr: &ast::Expr, +) -> Option> { + sema.type_of_expr(&expr)? + .tuple_fields(sema.db) + .iter() + .map(|ty| { + ty.autoderef(sema.db).find_map(|ty| match ty.as_adt() { + Some(Adt::Enum(e)) => Some(e), + // For now we only handle expansion for a tuple of enums. Here + // we map non-enum items to None and rely on `collect` to + // convert Vec> into Option>. + _ => None, + }) + }) + .collect() +} + +fn build_pat(db: &RootDatabase, module: hir::Module, var: hir::EnumVariant) -> Option { + let path = crate::ast_transform::path_to_ast(module.find_use_path(db, ModuleDef::from(var))?); + + // FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though + let pat: ast::Pat = match var.source(db).value.kind() { + ast::StructKind::Tuple(field_list) => { + let pats = iter::repeat(make::wildcard_pat().into()).take(field_list.fields().count()); + make::tuple_struct_pat(path, pats).into() + } + ast::StructKind::Record(field_list) => { + let pats = field_list.fields().map(|f| make::ident_pat(f.name().unwrap()).into()); + make::record_pat(path, pats).into() + } + ast::StructKind::Unit => make::path_pat(path), + }; + + Some(pat) +} + +#[cfg(test)] +mod tests { + use test_utils::mark; + + use crate::{ + tests::{check_assist, check_assist_not_applicable, check_assist_target}, + utils::FamousDefs, + }; + + use super::fill_match_arms; + + #[test] + fn all_match_arms_provided() { + check_assist_not_applicable( + fill_match_arms, + r#" + enum A { + As, + Bs{x:i32, y:Option}, + Cs(i32, Option), + } + fn main() { + match A::As<|> { + A::As, + A::Bs{x,y:Some(_)} => {} + A::Cs(_, Some(_)) => {} + } + } + "#, + ); + } + + #[test] + fn tuple_of_non_enum() { + // for now this case is not handled, although it potentially could be + // in the future + check_assist_not_applicable( + fill_match_arms, + r#" + fn main() { + match (0, false)<|> { + } + } + "#, + ); + } + + #[test] + fn partial_fill_record_tuple() { + check_assist( + fill_match_arms, + r#" + enum A { + As, + Bs { x: i32, y: Option }, + Cs(i32, Option), + } + fn main() { + match A::As<|> { + A::Bs { x, y: Some(_) } => {} + A::Cs(_, Some(_)) => {} + } + } + "#, + r#" + enum A { + As, + Bs { x: i32, y: Option }, + Cs(i32, Option), + } + fn main() { + match A::As { + A::Bs { x, y: Some(_) } => {} + A::Cs(_, Some(_)) => {} + $0A::As => {} + } + } + "#, + ); + } + + #[test] + fn partial_fill_or_pat() { + check_assist( + fill_match_arms, + r#" +enum A { As, Bs, Cs(Option) } +fn main() { + match A::As<|> { + A::Cs(_) | A::Bs => {} + } +} +"#, + r#" +enum A { As, Bs, Cs(Option) } +fn main() { + match A::As { + A::Cs(_) | A::Bs => {} + $0A::As => {} + } +} +"#, + ); + } + + #[test] + fn partial_fill() { + check_assist( + fill_match_arms, + r#" +enum A { As, Bs, Cs, Ds(String), Es(B) } +enum B { Xs, Ys } +fn main() { + match A::As<|> { + A::Bs if 0 < 1 => {} + A::Ds(_value) => { let x = 1; } + A::Es(B::Xs) => (), + } +} +"#, + r#" +enum A { As, Bs, Cs, Ds(String), Es(B) } +enum B { Xs, Ys } +fn main() { + match A::As { + A::Bs if 0 < 1 => {} + A::Ds(_value) => { let x = 1; } + A::Es(B::Xs) => (), + $0A::As => {} + A::Cs => {} + } +} +"#, + ); + } + + #[test] + fn partial_fill_bind_pat() { + check_assist( + fill_match_arms, + r#" +enum A { As, Bs, Cs(Option) } +fn main() { + match A::As<|> { + A::As(_) => {} + a @ A::Bs(_) => {} + } +} +"#, + r#" +enum A { As, Bs, Cs(Option) } +fn main() { + match A::As { + A::As(_) => {} + a @ A::Bs(_) => {} + A::Cs(${0:_}) => {} + } +} +"#, + ); + } + + #[test] + fn fill_match_arms_empty_body() { + check_assist( + fill_match_arms, + r#" +enum A { As, Bs, Cs(String), Ds(String, String), Es { x: usize, y: usize } } + +fn main() { + let a = A::As; + match a<|> {} +} +"#, + r#" +enum A { As, Bs, Cs(String), Ds(String, String), Es { x: usize, y: usize } } + +fn main() { + let a = A::As; + match a { + $0A::As => {} + A::Bs => {} + A::Cs(_) => {} + A::Ds(_, _) => {} + A::Es { x, y } => {} + } +} +"#, + ); + } + + #[test] + fn fill_match_arms_tuple_of_enum() { + check_assist( + fill_match_arms, + r#" + enum A { One, Two } + enum B { One, Two } + + fn main() { + let a = A::One; + let b = B::One; + match (a<|>, b) {} + } + "#, + r#" + enum A { One, Two } + enum B { One, Two } + + fn main() { + let a = A::One; + let b = B::One; + match (a, b) { + $0(A::One, B::One) => {} + (A::One, B::Two) => {} + (A::Two, B::One) => {} + (A::Two, B::Two) => {} + } + } + "#, + ); + } + + #[test] + fn fill_match_arms_tuple_of_enum_ref() { + check_assist( + fill_match_arms, + r#" + enum A { One, Two } + enum B { One, Two } + + fn main() { + let a = A::One; + let b = B::One; + match (&a<|>, &b) {} + } + "#, + r#" + enum A { One, Two } + enum B { One, Two } + + fn main() { + let a = A::One; + let b = B::One; + match (&a, &b) { + $0(A::One, B::One) => {} + (A::One, B::Two) => {} + (A::Two, B::One) => {} + (A::Two, B::Two) => {} + } + } + "#, + ); + } + + #[test] + fn fill_match_arms_tuple_of_enum_partial() { + check_assist_not_applicable( + fill_match_arms, + r#" + enum A { One, Two } + enum B { One, Two } + + fn main() { + let a = A::One; + let b = B::One; + match (a<|>, b) { + (A::Two, B::One) => {} + } + } + "#, + ); + } + + #[test] + fn fill_match_arms_tuple_of_enum_not_applicable() { + check_assist_not_applicable( + fill_match_arms, + r#" + enum A { One, Two } + enum B { One, Two } + + fn main() { + let a = A::One; + let b = B::One; + match (a<|>, b) { + (A::Two, B::One) => {} + (A::One, B::One) => {} + (A::One, B::Two) => {} + (A::Two, B::Two) => {} + } + } + "#, + ); + } + + #[test] + fn fill_match_arms_single_element_tuple_of_enum() { + // For now we don't hande the case of a single element tuple, but + // we could handle this in the future if `make::tuple_pat` allowed + // creating a tuple with a single pattern. + check_assist_not_applicable( + fill_match_arms, + r#" + enum A { One, Two } + + fn main() { + let a = A::One; + match (a<|>, ) { + } + } + "#, + ); + } + + #[test] + fn test_fill_match_arm_refs() { + check_assist( + fill_match_arms, + r#" + enum A { As } + + fn foo(a: &A) { + match a<|> { + } + } + "#, + r#" + enum A { As } + + fn foo(a: &A) { + match a { + $0A::As => {} + } + } + "#, + ); + + check_assist( + fill_match_arms, + r#" + enum A { + Es { x: usize, y: usize } + } + + fn foo(a: &mut A) { + match a<|> { + } + } + "#, + r#" + enum A { + Es { x: usize, y: usize } + } + + fn foo(a: &mut A) { + match a { + $0A::Es { x, y } => {} + } + } + "#, + ); + } + + #[test] + fn fill_match_arms_target() { + check_assist_target( + fill_match_arms, + r#" + enum E { X, Y } + + fn main() { + match E::X<|> {} + } + "#, + "match E::X {}", + ); + } + + #[test] + fn fill_match_arms_trivial_arm() { + check_assist( + fill_match_arms, + r#" + enum E { X, Y } + + fn main() { + match E::X { + <|>_ => {} + } + } + "#, + r#" + enum E { X, Y } + + fn main() { + match E::X { + $0E::X => {} + E::Y => {} + } + } + "#, + ); + } + + #[test] + fn fill_match_arms_qualifies_path() { + check_assist( + fill_match_arms, + r#" + mod foo { pub enum E { X, Y } } + use foo::E::X; + + fn main() { + match X { + <|> + } + } + "#, + r#" + mod foo { pub enum E { X, Y } } + use foo::E::X; + + fn main() { + match X { + $0X => {} + foo::E::Y => {} + } + } + "#, + ); + } + + #[test] + fn fill_match_arms_preserves_comments() { + check_assist( + fill_match_arms, + r#" + enum A { One, Two } + fn foo(a: A) { + match a { + // foo bar baz<|> + A::One => {} + // This is where the rest should be + } + } + "#, + r#" + enum A { One, Two } + fn foo(a: A) { + match a { + // foo bar baz + A::One => {} + // This is where the rest should be + $0A::Two => {} + } + } + "#, + ); + } + + #[test] + fn fill_match_arms_preserves_comments_empty() { + check_assist( + fill_match_arms, + r#" + enum A { One, Two } + fn foo(a: A) { + match a { + // foo bar baz<|> + } + } + "#, + r#" + enum A { One, Two } + fn foo(a: A) { + match a { + // foo bar baz + $0A::One => {} + A::Two => {} + } + } + "#, + ); + } + + #[test] + fn fill_match_arms_placeholder() { + check_assist( + fill_match_arms, + r#" + enum A { One, Two, } + fn foo(a: A) { + match a<|> { + _ => (), + } + } + "#, + r#" + enum A { One, Two, } + fn foo(a: A) { + match a { + $0A::One => {} + A::Two => {} + } + } + "#, + ); + } + + #[test] + fn option_order() { + mark::check!(option_order); + let before = r#" +fn foo(opt: Option) { + match opt<|> { + } +} +"#; + let before = &format!("//- /main.rs crate:main deps:core{}{}", before, FamousDefs::FIXTURE); + + check_assist( + fill_match_arms, + before, + r#" +fn foo(opt: Option) { + match opt { + Some(${0:_}) => {} + None => {} + } +} +"#, + ); + } +} diff --git a/crates/assists/src/handlers/fix_visibility.rs b/crates/assists/src/handlers/fix_visibility.rs new file mode 100644 index 0000000000..7cd76ea065 --- /dev/null +++ b/crates/assists/src/handlers/fix_visibility.rs @@ -0,0 +1,607 @@ +use base_db::FileId; +use hir::{db::HirDatabase, HasSource, HasVisibility, PathResolution}; +use syntax::{ast, AstNode, TextRange, TextSize}; + +use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists}; +use ast::VisibilityOwner; + +// FIXME: this really should be a fix for diagnostic, rather than an assist. + +// Assist: fix_visibility +// +// Makes inaccessible item public. +// +// ``` +// mod m { +// fn frobnicate() {} +// } +// fn main() { +// m::frobnicate<|>() {} +// } +// ``` +// -> +// ``` +// mod m { +// $0pub(crate) fn frobnicate() {} +// } +// fn main() { +// m::frobnicate() {} +// } +// ``` +pub(crate) fn fix_visibility(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + add_vis_to_referenced_module_def(acc, ctx) + .or_else(|| add_vis_to_referenced_record_field(acc, ctx)) +} + +fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let path: ast::Path = ctx.find_node_at_offset()?; + let path_res = ctx.sema.resolve_path(&path)?; + let def = match path_res { + PathResolution::Def(def) => def, + _ => return None, + }; + + let current_module = ctx.sema.scope(&path.syntax()).module()?; + let target_module = def.module(ctx.db())?; + + let vis = target_module.visibility_of(ctx.db(), &def)?; + if vis.is_visible_from(ctx.db(), current_module.into()) { + return None; + }; + + let (offset, current_visibility, target, target_file, target_name) = + target_data_for_def(ctx.db(), def)?; + + let missing_visibility = + if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" }; + + let assist_label = match target_name { + None => format!("Change visibility to {}", missing_visibility), + Some(name) => format!("Change visibility of {} to {}", name, missing_visibility), + }; + + acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| { + builder.edit_file(target_file); + match ctx.config.snippet_cap { + Some(cap) => match current_visibility { + Some(current_visibility) => builder.replace_snippet( + cap, + current_visibility.syntax().text_range(), + format!("$0{}", missing_visibility), + ), + None => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)), + }, + None => match current_visibility { + Some(current_visibility) => { + builder.replace(current_visibility.syntax().text_range(), missing_visibility) + } + None => builder.insert(offset, format!("{} ", missing_visibility)), + }, + } + }) +} + +fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let record_field: ast::RecordExprField = ctx.find_node_at_offset()?; + let (record_field_def, _) = ctx.sema.resolve_record_field(&record_field)?; + + let current_module = ctx.sema.scope(record_field.syntax()).module()?; + let visibility = record_field_def.visibility(ctx.db()); + if visibility.is_visible_from(ctx.db(), current_module.into()) { + return None; + } + + let parent = record_field_def.parent_def(ctx.db()); + let parent_name = parent.name(ctx.db()); + let target_module = parent.module(ctx.db()); + + let in_file_source = record_field_def.source(ctx.db()); + let (offset, current_visibility, target) = match in_file_source.value { + hir::FieldSource::Named(it) => { + let s = it.syntax(); + (vis_offset(s), it.visibility(), s.text_range()) + } + hir::FieldSource::Pos(it) => { + let s = it.syntax(); + (vis_offset(s), it.visibility(), s.text_range()) + } + }; + + let missing_visibility = + if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" }; + let target_file = in_file_source.file_id.original_file(ctx.db()); + + let target_name = record_field_def.name(ctx.db()); + let assist_label = + format!("Change visibility of {}.{} to {}", parent_name, target_name, missing_visibility); + + acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| { + builder.edit_file(target_file); + match ctx.config.snippet_cap { + Some(cap) => match current_visibility { + Some(current_visibility) => builder.replace_snippet( + cap, + current_visibility.syntax().text_range(), + format!("$0{}", missing_visibility), + ), + None => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)), + }, + None => match current_visibility { + Some(current_visibility) => { + builder.replace(current_visibility.syntax().text_range(), missing_visibility) + } + None => builder.insert(offset, format!("{} ", missing_visibility)), + }, + } + }) +} + +fn target_data_for_def( + db: &dyn HirDatabase, + def: hir::ModuleDef, +) -> Option<(TextSize, Option, TextRange, FileId, Option)> { + fn offset_target_and_file_id( + db: &dyn HirDatabase, + x: S, + ) -> (TextSize, Option, TextRange, FileId) + where + S: HasSource, + Ast: AstNode + ast::VisibilityOwner, + { + let source = x.source(db); + let in_file_syntax = source.syntax(); + let file_id = in_file_syntax.file_id; + let syntax = in_file_syntax.value; + let current_visibility = source.value.visibility(); + ( + vis_offset(syntax), + current_visibility, + syntax.text_range(), + file_id.original_file(db.upcast()), + ) + } + + let target_name; + let (offset, current_visibility, target, target_file) = match def { + hir::ModuleDef::Function(f) => { + target_name = Some(f.name(db)); + offset_target_and_file_id(db, f) + } + hir::ModuleDef::Adt(adt) => { + target_name = Some(adt.name(db)); + match adt { + hir::Adt::Struct(s) => offset_target_and_file_id(db, s), + hir::Adt::Union(u) => offset_target_and_file_id(db, u), + hir::Adt::Enum(e) => offset_target_and_file_id(db, e), + } + } + hir::ModuleDef::Const(c) => { + target_name = c.name(db); + offset_target_and_file_id(db, c) + } + hir::ModuleDef::Static(s) => { + target_name = s.name(db); + offset_target_and_file_id(db, s) + } + hir::ModuleDef::Trait(t) => { + target_name = Some(t.name(db)); + offset_target_and_file_id(db, t) + } + hir::ModuleDef::TypeAlias(t) => { + target_name = Some(t.name(db)); + offset_target_and_file_id(db, t) + } + hir::ModuleDef::Module(m) => { + target_name = m.name(db); + let in_file_source = m.declaration_source(db)?; + let file_id = in_file_source.file_id.original_file(db.upcast()); + let syntax = in_file_source.value.syntax(); + (vis_offset(syntax), in_file_source.value.visibility(), syntax.text_range(), file_id) + } + // Enum variants can't be private, we can't modify builtin types + hir::ModuleDef::EnumVariant(_) | hir::ModuleDef::BuiltinType(_) => return None, + }; + + Some((offset, current_visibility, target, target_file, target_name)) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn fix_visibility_of_fn() { + check_assist( + fix_visibility, + r"mod foo { fn foo() {} } + fn main() { foo::foo<|>() } ", + r"mod foo { $0pub(crate) fn foo() {} } + fn main() { foo::foo() } ", + ); + check_assist_not_applicable( + fix_visibility, + r"mod foo { pub fn foo() {} } + fn main() { foo::foo<|>() } ", + ) + } + + #[test] + fn fix_visibility_of_adt_in_submodule() { + check_assist( + fix_visibility, + r"mod foo { struct Foo; } + fn main() { foo::Foo<|> } ", + r"mod foo { $0pub(crate) struct Foo; } + fn main() { foo::Foo } ", + ); + check_assist_not_applicable( + fix_visibility, + r"mod foo { pub struct Foo; } + fn main() { foo::Foo<|> } ", + ); + check_assist( + fix_visibility, + r"mod foo { enum Foo; } + fn main() { foo::Foo<|> } ", + r"mod foo { $0pub(crate) enum Foo; } + fn main() { foo::Foo } ", + ); + check_assist_not_applicable( + fix_visibility, + r"mod foo { pub enum Foo; } + fn main() { foo::Foo<|> } ", + ); + check_assist( + fix_visibility, + r"mod foo { union Foo; } + fn main() { foo::Foo<|> } ", + r"mod foo { $0pub(crate) union Foo; } + fn main() { foo::Foo } ", + ); + check_assist_not_applicable( + fix_visibility, + r"mod foo { pub union Foo; } + fn main() { foo::Foo<|> } ", + ); + } + + #[test] + fn fix_visibility_of_adt_in_other_file() { + check_assist( + fix_visibility, + r" +//- /main.rs +mod foo; +fn main() { foo::Foo<|> } + +//- /foo.rs +struct Foo; +", + r"$0pub(crate) struct Foo; +", + ); + } + + #[test] + fn fix_visibility_of_struct_field() { + check_assist( + fix_visibility, + r"mod foo { pub struct Foo { bar: (), } } + fn main() { foo::Foo { <|>bar: () }; } ", + r"mod foo { pub struct Foo { $0pub(crate) bar: (), } } + fn main() { foo::Foo { bar: () }; } ", + ); + check_assist( + fix_visibility, + r" +//- /lib.rs +mod foo; +fn main() { foo::Foo { <|>bar: () }; } +//- /foo.rs +pub struct Foo { bar: () } +", + r"pub struct Foo { $0pub(crate) bar: () } +", + ); + check_assist_not_applicable( + fix_visibility, + r"mod foo { pub struct Foo { pub bar: (), } } + fn main() { foo::Foo { <|>bar: () }; } ", + ); + check_assist_not_applicable( + fix_visibility, + r" +//- /lib.rs +mod foo; +fn main() { foo::Foo { <|>bar: () }; } +//- /foo.rs +pub struct Foo { pub bar: () } +", + ); + } + + #[test] + fn fix_visibility_of_enum_variant_field() { + check_assist( + fix_visibility, + r"mod foo { pub enum Foo { Bar { bar: () } } } + fn main() { foo::Foo::Bar { <|>bar: () }; } ", + r"mod foo { pub enum Foo { Bar { $0pub(crate) bar: () } } } + fn main() { foo::Foo::Bar { bar: () }; } ", + ); + check_assist( + fix_visibility, + r" +//- /lib.rs +mod foo; +fn main() { foo::Foo::Bar { <|>bar: () }; } +//- /foo.rs +pub enum Foo { Bar { bar: () } } +", + r"pub enum Foo { Bar { $0pub(crate) bar: () } } +", + ); + check_assist_not_applicable( + fix_visibility, + r"mod foo { pub struct Foo { pub bar: (), } } + fn main() { foo::Foo { <|>bar: () }; } ", + ); + check_assist_not_applicable( + fix_visibility, + r" +//- /lib.rs +mod foo; +fn main() { foo::Foo { <|>bar: () }; } +//- /foo.rs +pub struct Foo { pub bar: () } +", + ); + } + + #[test] + #[ignore] + // FIXME reenable this test when `Semantics::resolve_record_field` works with union fields + fn fix_visibility_of_union_field() { + check_assist( + fix_visibility, + r"mod foo { pub union Foo { bar: (), } } + fn main() { foo::Foo { <|>bar: () }; } ", + r"mod foo { pub union Foo { $0pub(crate) bar: (), } } + fn main() { foo::Foo { bar: () }; } ", + ); + check_assist( + fix_visibility, + r" +//- /lib.rs +mod foo; +fn main() { foo::Foo { <|>bar: () }; } +//- /foo.rs +pub union Foo { bar: () } +", + r"pub union Foo { $0pub(crate) bar: () } +", + ); + check_assist_not_applicable( + fix_visibility, + r"mod foo { pub union Foo { pub bar: (), } } + fn main() { foo::Foo { <|>bar: () }; } ", + ); + check_assist_not_applicable( + fix_visibility, + r" +//- /lib.rs +mod foo; +fn main() { foo::Foo { <|>bar: () }; } +//- /foo.rs +pub union Foo { pub bar: () } +", + ); + } + + #[test] + fn fix_visibility_of_const() { + check_assist( + fix_visibility, + r"mod foo { const FOO: () = (); } + fn main() { foo::FOO<|> } ", + r"mod foo { $0pub(crate) const FOO: () = (); } + fn main() { foo::FOO } ", + ); + check_assist_not_applicable( + fix_visibility, + r"mod foo { pub const FOO: () = (); } + fn main() { foo::FOO<|> } ", + ); + } + + #[test] + fn fix_visibility_of_static() { + check_assist( + fix_visibility, + r"mod foo { static FOO: () = (); } + fn main() { foo::FOO<|> } ", + r"mod foo { $0pub(crate) static FOO: () = (); } + fn main() { foo::FOO } ", + ); + check_assist_not_applicable( + fix_visibility, + r"mod foo { pub static FOO: () = (); } + fn main() { foo::FOO<|> } ", + ); + } + + #[test] + fn fix_visibility_of_trait() { + check_assist( + fix_visibility, + r"mod foo { trait Foo { fn foo(&self) {} } } + fn main() { let x: &dyn foo::<|>Foo; } ", + r"mod foo { $0pub(crate) trait Foo { fn foo(&self) {} } } + fn main() { let x: &dyn foo::Foo; } ", + ); + check_assist_not_applicable( + fix_visibility, + r"mod foo { pub trait Foo { fn foo(&self) {} } } + fn main() { let x: &dyn foo::Foo<|>; } ", + ); + } + + #[test] + fn fix_visibility_of_type_alias() { + check_assist( + fix_visibility, + r"mod foo { type Foo = (); } + fn main() { let x: foo::Foo<|>; } ", + r"mod foo { $0pub(crate) type Foo = (); } + fn main() { let x: foo::Foo; } ", + ); + check_assist_not_applicable( + fix_visibility, + r"mod foo { pub type Foo = (); } + fn main() { let x: foo::Foo<|>; } ", + ); + } + + #[test] + fn fix_visibility_of_module() { + check_assist( + fix_visibility, + r"mod foo { mod bar { fn bar() {} } } + fn main() { foo::bar<|>::bar(); } ", + r"mod foo { $0pub(crate) mod bar { fn bar() {} } } + fn main() { foo::bar::bar(); } ", + ); + + check_assist( + fix_visibility, + r" +//- /main.rs +mod foo; +fn main() { foo::bar<|>::baz(); } + +//- /foo.rs +mod bar { + pub fn baz() {} +} +", + r"$0pub(crate) mod bar { + pub fn baz() {} +} +", + ); + + check_assist_not_applicable( + fix_visibility, + r"mod foo { pub mod bar { pub fn bar() {} } } + fn main() { foo::bar<|>::bar(); } ", + ); + } + + #[test] + fn fix_visibility_of_inline_module_in_other_file() { + check_assist( + fix_visibility, + r" +//- /main.rs +mod foo; +fn main() { foo::bar<|>::baz(); } + +//- /foo.rs +mod bar; +//- /foo/bar.rs +pub fn baz() {} +", + r"$0pub(crate) mod bar; +", + ); + } + + #[test] + fn fix_visibility_of_module_declaration_in_other_file() { + check_assist( + fix_visibility, + r" +//- /main.rs +mod foo; +fn main() { foo::bar<|>>::baz(); } + +//- /foo.rs +mod bar { + pub fn baz() {} +} +", + r"$0pub(crate) mod bar { + pub fn baz() {} +} +", + ); + } + + #[test] + fn adds_pub_when_target_is_in_another_crate() { + check_assist( + fix_visibility, + r" +//- /main.rs crate:a deps:foo +foo::Bar<|> +//- /lib.rs crate:foo +struct Bar; +", + r"$0pub struct Bar; +", + ) + } + + #[test] + fn replaces_pub_crate_with_pub() { + check_assist( + fix_visibility, + r" +//- /main.rs crate:a deps:foo +foo::Bar<|> +//- /lib.rs crate:foo +pub(crate) struct Bar; +", + r"$0pub struct Bar; +", + ); + check_assist( + fix_visibility, + r" +//- /main.rs crate:a deps:foo +fn main() { + foo::Foo { <|>bar: () }; +} +//- /lib.rs crate:foo +pub struct Foo { pub(crate) bar: () } +", + r"pub struct Foo { $0pub bar: () } +", + ); + } + + #[test] + #[ignore] + // FIXME handle reexports properly + fn fix_visibility_of_reexport() { + check_assist( + fix_visibility, + r" + mod foo { + use bar::Baz; + mod bar { pub(super) struct Baz; } + } + foo::Baz<|> + ", + r" + mod foo { + $0pub(crate) use bar::Baz; + mod bar { pub(super) struct Baz; } + } + foo::Baz + ", + ) + } +} diff --git a/crates/assists/src/handlers/flip_binexpr.rs b/crates/assists/src/handlers/flip_binexpr.rs new file mode 100644 index 0000000000..404f06133d --- /dev/null +++ b/crates/assists/src/handlers/flip_binexpr.rs @@ -0,0 +1,142 @@ +use syntax::ast::{AstNode, BinExpr, BinOp}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: flip_binexpr +// +// Flips operands of a binary expression. +// +// ``` +// fn main() { +// let _ = 90 +<|> 2; +// } +// ``` +// -> +// ``` +// fn main() { +// let _ = 2 + 90; +// } +// ``` +pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let expr = ctx.find_node_at_offset::()?; + let lhs = expr.lhs()?.syntax().clone(); + let rhs = expr.rhs()?.syntax().clone(); + let op_range = expr.op_token()?.text_range(); + // The assist should be applied only if the cursor is on the operator + let cursor_in_range = op_range.contains_range(ctx.frange.range); + if !cursor_in_range { + return None; + } + let action: FlipAction = expr.op_kind()?.into(); + // The assist should not be applied for certain operators + if let FlipAction::DontFlip = action { + return None; + } + + acc.add( + AssistId("flip_binexpr", AssistKind::RefactorRewrite), + "Flip binary expression", + op_range, + |edit| { + if let FlipAction::FlipAndReplaceOp(new_op) = action { + edit.replace(op_range, new_op); + } + edit.replace(lhs.text_range(), rhs.text()); + edit.replace(rhs.text_range(), lhs.text()); + }, + ) +} + +enum FlipAction { + // Flip the expression + Flip, + // Flip the expression and replace the operator with this string + FlipAndReplaceOp(&'static str), + // Do not flip the expression + DontFlip, +} + +impl From for FlipAction { + fn from(op_kind: BinOp) -> Self { + match op_kind { + kind if kind.is_assignment() => FlipAction::DontFlip, + BinOp::GreaterTest => FlipAction::FlipAndReplaceOp("<"), + BinOp::GreaterEqualTest => FlipAction::FlipAndReplaceOp("<="), + BinOp::LesserTest => FlipAction::FlipAndReplaceOp(">"), + BinOp::LesserEqualTest => FlipAction::FlipAndReplaceOp(">="), + _ => FlipAction::Flip, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; + + #[test] + fn flip_binexpr_target_is_the_op() { + check_assist_target(flip_binexpr, "fn f() { let res = 1 ==<|> 2; }", "==") + } + + #[test] + fn flip_binexpr_not_applicable_for_assignment() { + check_assist_not_applicable(flip_binexpr, "fn f() { let mut _x = 1; _x +=<|> 2 }") + } + + #[test] + fn flip_binexpr_works_for_eq() { + check_assist( + flip_binexpr, + "fn f() { let res = 1 ==<|> 2; }", + "fn f() { let res = 2 == 1; }", + ) + } + + #[test] + fn flip_binexpr_works_for_gt() { + check_assist(flip_binexpr, "fn f() { let res = 1 ><|> 2; }", "fn f() { let res = 2 < 1; }") + } + + #[test] + fn flip_binexpr_works_for_lteq() { + check_assist( + flip_binexpr, + "fn f() { let res = 1 <=<|> 2; }", + "fn f() { let res = 2 >= 1; }", + ) + } + + #[test] + fn flip_binexpr_works_for_complex_expr() { + check_assist( + flip_binexpr, + "fn f() { let res = (1 + 1) ==<|> (2 + 2); }", + "fn f() { let res = (2 + 2) == (1 + 1); }", + ) + } + + #[test] + fn flip_binexpr_works_inside_match() { + check_assist( + flip_binexpr, + r#" + fn dyn_eq(&self, other: &dyn Diagnostic) -> bool { + match other.downcast_ref::() { + None => false, + Some(it) => it ==<|> self, + } + } + "#, + r#" + fn dyn_eq(&self, other: &dyn Diagnostic) -> bool { + match other.downcast_ref::() { + None => false, + Some(it) => self == it, + } + } + "#, + ) + } +} diff --git a/crates/assists/src/handlers/flip_comma.rs b/crates/assists/src/handlers/flip_comma.rs new file mode 100644 index 0000000000..5c69db53e5 --- /dev/null +++ b/crates/assists/src/handlers/flip_comma.rs @@ -0,0 +1,84 @@ +use syntax::{algo::non_trivia_sibling, Direction, T}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: flip_comma +// +// Flips two comma-separated items. +// +// ``` +// fn main() { +// ((1, 2),<|> (3, 4)); +// } +// ``` +// -> +// ``` +// fn main() { +// ((3, 4), (1, 2)); +// } +// ``` +pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let comma = ctx.find_token_at_offset(T![,])?; + let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?; + let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?; + + // Don't apply a "flip" in case of a last comma + // that typically comes before punctuation + if next.kind().is_punct() { + return None; + } + + acc.add( + AssistId("flip_comma", AssistKind::RefactorRewrite), + "Flip comma", + comma.text_range(), + |edit| { + edit.replace(prev.text_range(), next.to_string()); + edit.replace(next.text_range(), prev.to_string()); + }, + ) +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::tests::{check_assist, check_assist_target}; + + #[test] + fn flip_comma_works_for_function_parameters() { + check_assist( + flip_comma, + "fn foo(x: i32,<|> y: Result<(), ()>) {}", + "fn foo(y: Result<(), ()>, x: i32) {}", + ) + } + + #[test] + fn flip_comma_target() { + check_assist_target(flip_comma, "fn foo(x: i32,<|> y: Result<(), ()>) {}", ",") + } + + #[test] + #[should_panic] + fn flip_comma_before_punct() { + // See https://github.com/rust-analyzer/rust-analyzer/issues/1619 + // "Flip comma" assist shouldn't be applicable to the last comma in enum or struct + // declaration body. + check_assist_target( + flip_comma, + "pub enum Test { \ + A,<|> \ + }", + ",", + ); + + check_assist_target( + flip_comma, + "pub struct Test { \ + foo: usize,<|> \ + }", + ",", + ); + } +} diff --git a/crates/assists/src/handlers/flip_trait_bound.rs b/crates/assists/src/handlers/flip_trait_bound.rs new file mode 100644 index 0000000000..347e79b1da --- /dev/null +++ b/crates/assists/src/handlers/flip_trait_bound.rs @@ -0,0 +1,121 @@ +use syntax::{ + algo::non_trivia_sibling, + ast::{self, AstNode}, + Direction, T, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: flip_trait_bound +// +// Flips two trait bounds. +// +// ``` +// fn foo Copy>() { } +// ``` +// -> +// ``` +// fn foo() { } +// ``` +pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + // We want to replicate the behavior of `flip_binexpr` by only suggesting + // the assist when the cursor is on a `+` + let plus = ctx.find_token_at_offset(T![+])?; + + // Make sure we're in a `TypeBoundList` + if ast::TypeBoundList::cast(plus.parent()).is_none() { + return None; + } + + let (before, after) = ( + non_trivia_sibling(plus.clone().into(), Direction::Prev)?, + non_trivia_sibling(plus.clone().into(), Direction::Next)?, + ); + + let target = plus.text_range(); + acc.add( + AssistId("flip_trait_bound", AssistKind::RefactorRewrite), + "Flip trait bounds", + target, + |edit| { + edit.replace(before.text_range(), after.to_string()); + edit.replace(after.text_range(), before.to_string()); + }, + ) +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; + + #[test] + fn flip_trait_bound_assist_available() { + check_assist_target(flip_trait_bound, "struct S where T: A <|>+ B + C { }", "+") + } + + #[test] + fn flip_trait_bound_not_applicable_for_single_trait_bound() { + check_assist_not_applicable(flip_trait_bound, "struct S where T: <|>A { }") + } + + #[test] + fn flip_trait_bound_works_for_struct() { + check_assist( + flip_trait_bound, + "struct S where T: A <|>+ B { }", + "struct S where T: B + A { }", + ) + } + + #[test] + fn flip_trait_bound_works_for_trait_impl() { + check_assist( + flip_trait_bound, + "impl X for S where T: A +<|> B { }", + "impl X for S where T: B + A { }", + ) + } + + #[test] + fn flip_trait_bound_works_for_fn() { + check_assist(flip_trait_bound, "fn f+ B>(t: T) { }", "fn f(t: T) { }") + } + + #[test] + fn flip_trait_bound_works_for_fn_where_clause() { + check_assist( + flip_trait_bound, + "fn f(t: T) where T: A +<|> B { }", + "fn f(t: T) where T: B + A { }", + ) + } + + #[test] + fn flip_trait_bound_works_for_lifetime() { + check_assist( + flip_trait_bound, + "fn f(t: T) where T: A <|>+ 'static { }", + "fn f(t: T) where T: 'static + A { }", + ) + } + + #[test] + fn flip_trait_bound_works_for_complex_bounds() { + check_assist( + flip_trait_bound, + "struct S where T: A <|>+ b_mod::B + C { }", + "struct S where T: b_mod::B + A + C { }", + ) + } + + #[test] + fn flip_trait_bound_works_for_long_bounds() { + check_assist( + flip_trait_bound, + "struct S where T: A + B + C + D + E + F +<|> G + H + I + J { }", + "struct S where T: A + B + C + D + E + G + F + H + I + J { }", + ) + } +} diff --git a/crates/assists/src/handlers/generate_derive.rs b/crates/assists/src/handlers/generate_derive.rs new file mode 100644 index 0000000000..314504e15c --- /dev/null +++ b/crates/assists/src/handlers/generate_derive.rs @@ -0,0 +1,132 @@ +use syntax::{ + ast::{self, AstNode, AttrsOwner}, + SyntaxKind::{COMMENT, WHITESPACE}, + TextSize, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: generate_derive +// +// Adds a new `#[derive()]` clause to a struct or enum. +// +// ``` +// struct Point { +// x: u32, +// y: u32,<|> +// } +// ``` +// -> +// ``` +// #[derive($0)] +// struct Point { +// x: u32, +// y: u32, +// } +// ``` +pub(crate) fn generate_derive(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let cap = ctx.config.snippet_cap?; + let nominal = ctx.find_node_at_offset::()?; + let node_start = derive_insertion_offset(&nominal)?; + let target = nominal.syntax().text_range(); + acc.add( + AssistId("generate_derive", AssistKind::Generate), + "Add `#[derive]`", + target, + |builder| { + let derive_attr = nominal + .attrs() + .filter_map(|x| x.as_simple_call()) + .filter(|(name, _arg)| name == "derive") + .map(|(_name, arg)| arg) + .next(); + match derive_attr { + None => { + builder.insert_snippet(cap, node_start, "#[derive($0)]\n"); + } + Some(tt) => { + // Just move the cursor. + builder.insert_snippet( + cap, + tt.syntax().text_range().end() - TextSize::of(')'), + "$0", + ) + } + }; + }, + ) +} + +// Insert `derive` after doc comments. +fn derive_insertion_offset(nominal: &ast::AdtDef) -> Option { + let non_ws_child = nominal + .syntax() + .children_with_tokens() + .find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?; + Some(non_ws_child.text_range().start()) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_target}; + + use super::*; + + #[test] + fn add_derive_new() { + check_assist( + generate_derive, + "struct Foo { a: i32, <|>}", + "#[derive($0)]\nstruct Foo { a: i32, }", + ); + check_assist( + generate_derive, + "struct Foo { <|> a: i32, }", + "#[derive($0)]\nstruct Foo { a: i32, }", + ); + } + + #[test] + fn add_derive_existing() { + check_assist( + generate_derive, + "#[derive(Clone)]\nstruct Foo { a: i32<|>, }", + "#[derive(Clone$0)]\nstruct Foo { a: i32, }", + ); + } + + #[test] + fn add_derive_new_with_doc_comment() { + check_assist( + generate_derive, + " +/// `Foo` is a pretty important struct. +/// It does stuff. +struct Foo { a: i32<|>, } + ", + " +/// `Foo` is a pretty important struct. +/// It does stuff. +#[derive($0)] +struct Foo { a: i32, } + ", + ); + } + + #[test] + fn add_derive_target() { + check_assist_target( + generate_derive, + " +struct SomeThingIrrelevant; +/// `Foo` is a pretty important struct. +/// It does stuff. +struct Foo { a: i32<|>, } +struct EvenMoreIrrelevant; + ", + "/// `Foo` is a pretty important struct. +/// It does stuff. +struct Foo { a: i32, }", + ); + } +} diff --git a/crates/assists/src/handlers/generate_from_impl_for_enum.rs b/crates/assists/src/handlers/generate_from_impl_for_enum.rs new file mode 100644 index 0000000000..7f04b95725 --- /dev/null +++ b/crates/assists/src/handlers/generate_from_impl_for_enum.rs @@ -0,0 +1,200 @@ +use ide_db::RootDatabase; +use syntax::ast::{self, AstNode, NameOwner}; +use test_utils::mark; + +use crate::{utils::FamousDefs, AssistContext, AssistId, AssistKind, Assists}; + +// Assist: generate_from_impl_for_enum +// +// Adds a From impl for an enum variant with one tuple field. +// +// ``` +// enum A { <|>One(u32) } +// ``` +// -> +// ``` +// enum A { One(u32) } +// +// impl From for A { +// fn from(v: u32) -> Self { +// A::One(v) +// } +// } +// ``` +pub(crate) fn generate_from_impl_for_enum(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let variant = ctx.find_node_at_offset::()?; + let variant_name = variant.name()?; + let enum_name = variant.parent_enum().name()?; + let field_list = match variant.kind() { + ast::StructKind::Tuple(field_list) => field_list, + _ => return None, + }; + if field_list.fields().count() != 1 { + return None; + } + let field_type = field_list.fields().next()?.ty()?; + let path = match field_type { + ast::Type::PathType(it) => it, + _ => return None, + }; + + if existing_from_impl(&ctx.sema, &variant).is_some() { + mark::hit!(test_add_from_impl_already_exists); + return None; + } + + let target = variant.syntax().text_range(); + acc.add( + AssistId("generate_from_impl_for_enum", AssistKind::Generate), + "Generate `From` impl for this enum variant", + target, + |edit| { + let start_offset = variant.parent_enum().syntax().text_range().end(); + let buf = format!( + r#" + +impl From<{0}> for {1} {{ + fn from(v: {0}) -> Self {{ + {1}::{2}(v) + }} +}}"#, + path.syntax(), + enum_name, + variant_name + ); + edit.insert(start_offset, buf); + }, + ) +} + +fn existing_from_impl( + sema: &'_ hir::Semantics<'_, RootDatabase>, + variant: &ast::Variant, +) -> Option<()> { + let variant = sema.to_def(variant)?; + let enum_ = variant.parent_enum(sema.db); + let krate = enum_.module(sema.db).krate(); + + let from_trait = FamousDefs(sema, krate).core_convert_From()?; + + let enum_type = enum_.ty(sema.db); + + let wrapped_type = variant.fields(sema.db).get(0)?.signature_ty(sema.db); + + if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) { + Some(()) + } else { + None + } +} + +#[cfg(test)] +mod tests { + use test_utils::mark; + + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn test_generate_from_impl_for_enum() { + check_assist( + generate_from_impl_for_enum, + "enum A { <|>One(u32) }", + r#"enum A { One(u32) } + +impl From for A { + fn from(v: u32) -> Self { + A::One(v) + } +}"#, + ); + } + + #[test] + fn test_generate_from_impl_for_enum_complicated_path() { + check_assist( + generate_from_impl_for_enum, + r#"enum A { <|>One(foo::bar::baz::Boo) }"#, + r#"enum A { One(foo::bar::baz::Boo) } + +impl From for A { + fn from(v: foo::bar::baz::Boo) -> Self { + A::One(v) + } +}"#, + ); + } + + fn check_not_applicable(ra_fixture: &str) { + let fixture = + format!("//- /main.rs crate:main deps:core\n{}\n{}", ra_fixture, FamousDefs::FIXTURE); + check_assist_not_applicable(generate_from_impl_for_enum, &fixture) + } + + #[test] + fn test_add_from_impl_no_element() { + check_not_applicable("enum A { <|>One }"); + } + + #[test] + fn test_add_from_impl_more_than_one_element_in_tuple() { + check_not_applicable("enum A { <|>One(u32, String) }"); + } + + #[test] + fn test_add_from_impl_struct_variant() { + check_not_applicable("enum A { <|>One { x: u32 } }"); + } + + #[test] + fn test_add_from_impl_already_exists() { + mark::check!(test_add_from_impl_already_exists); + check_not_applicable( + r#" +enum A { <|>One(u32), } + +impl From for A { + fn from(v: u32) -> Self { + A::One(v) + } +} +"#, + ); + } + + #[test] + fn test_add_from_impl_different_variant_impl_exists() { + check_assist( + generate_from_impl_for_enum, + r#"enum A { <|>One(u32), Two(String), } + +impl From for A { + fn from(v: String) -> Self { + A::Two(v) + } +} + +pub trait From { + fn from(T) -> Self; +}"#, + r#"enum A { One(u32), Two(String), } + +impl From for A { + fn from(v: u32) -> Self { + A::One(v) + } +} + +impl From for A { + fn from(v: String) -> Self { + A::Two(v) + } +} + +pub trait From { + fn from(T) -> Self; +}"#, + ); + } +} diff --git a/crates/assists/src/handlers/generate_function.rs b/crates/assists/src/handlers/generate_function.rs new file mode 100644 index 0000000000..b38d640581 --- /dev/null +++ b/crates/assists/src/handlers/generate_function.rs @@ -0,0 +1,1058 @@ +use base_db::FileId; +use hir::HirDisplay; +use rustc_hash::{FxHashMap, FxHashSet}; +use syntax::{ + ast::{ + self, + edit::{AstNodeEdit, IndentLevel}, + make, ArgListOwner, AstNode, ModuleItemOwner, + }, + SyntaxKind, SyntaxNode, TextSize, +}; + +use crate::{ + assist_config::SnippetCap, + utils::{render_snippet, Cursor}, + AssistContext, AssistId, AssistKind, Assists, +}; + +// Assist: generate_function +// +// Adds a stub function with a signature matching the function under the cursor. +// +// ``` +// struct Baz; +// fn baz() -> Baz { Baz } +// fn foo() { +// bar<|>("", baz()); +// } +// +// ``` +// -> +// ``` +// struct Baz; +// fn baz() -> Baz { Baz } +// fn foo() { +// bar("", baz()); +// } +// +// fn bar(arg: &str, baz: Baz) { +// ${0:todo!()} +// } +// +// ``` +pub(crate) fn generate_function(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let path_expr: ast::PathExpr = ctx.find_node_at_offset()?; + let call = path_expr.syntax().parent().and_then(ast::CallExpr::cast)?; + let path = path_expr.path()?; + + if ctx.sema.resolve_path(&path).is_some() { + // The function call already resolves, no need to add a function + return None; + } + + let target_module = match path.qualifier() { + Some(qualifier) => match ctx.sema.resolve_path(&qualifier) { + Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))) => Some(module), + _ => return None, + }, + None => None, + }; + + let function_builder = FunctionBuilder::from_call(&ctx, &call, &path, target_module)?; + + let target = call.syntax().text_range(); + acc.add( + AssistId("generate_function", AssistKind::Generate), + format!("Generate `{}` function", function_builder.fn_name), + target, + |builder| { + let function_template = function_builder.render(); + builder.edit_file(function_template.file); + let new_fn = function_template.to_string(ctx.config.snippet_cap); + match ctx.config.snippet_cap { + Some(cap) => builder.insert_snippet(cap, function_template.insert_offset, new_fn), + None => builder.insert(function_template.insert_offset, new_fn), + } + }, + ) +} + +struct FunctionTemplate { + insert_offset: TextSize, + placeholder_expr: ast::MacroCall, + leading_ws: String, + fn_def: ast::Fn, + trailing_ws: String, + file: FileId, +} + +impl FunctionTemplate { + fn to_string(&self, cap: Option) -> String { + let f = match cap { + Some(cap) => render_snippet( + cap, + self.fn_def.syntax(), + Cursor::Replace(self.placeholder_expr.syntax()), + ), + None => self.fn_def.to_string(), + }; + format!("{}{}{}", self.leading_ws, f, self.trailing_ws) + } +} + +struct FunctionBuilder { + target: GeneratedFunctionTarget, + fn_name: ast::Name, + type_params: Option, + params: ast::ParamList, + file: FileId, + needs_pub: bool, +} + +impl FunctionBuilder { + /// Prepares a generated function that matches `call`. + /// The function is generated in `target_module` or next to `call` + fn from_call( + ctx: &AssistContext, + call: &ast::CallExpr, + path: &ast::Path, + target_module: Option, + ) -> Option { + let mut file = ctx.frange.file_id; + let target = match &target_module { + Some(target_module) => { + let module_source = target_module.definition_source(ctx.db()); + let (in_file, target) = next_space_for_fn_in_module(ctx.sema.db, &module_source)?; + file = in_file; + target + } + None => next_space_for_fn_after_call_site(&call)?, + }; + let needs_pub = target_module.is_some(); + let target_module = target_module.or_else(|| ctx.sema.scope(target.syntax()).module())?; + let fn_name = fn_name(&path)?; + let (type_params, params) = fn_args(ctx, target_module, &call)?; + + Some(Self { target, fn_name, type_params, params, file, needs_pub }) + } + + fn render(self) -> FunctionTemplate { + let placeholder_expr = make::expr_todo(); + let fn_body = make::block_expr(vec![], Some(placeholder_expr)); + let visibility = if self.needs_pub { Some(make::visibility_pub_crate()) } else { None }; + let mut fn_def = + make::fn_(visibility, self.fn_name, self.type_params, self.params, fn_body); + let leading_ws; + let trailing_ws; + + let insert_offset = match self.target { + GeneratedFunctionTarget::BehindItem(it) => { + let indent = IndentLevel::from_node(&it); + leading_ws = format!("\n\n{}", indent); + fn_def = fn_def.indent(indent); + trailing_ws = String::new(); + it.text_range().end() + } + GeneratedFunctionTarget::InEmptyItemList(it) => { + let indent = IndentLevel::from_node(it.syntax()); + leading_ws = format!("\n{}", indent + 1); + fn_def = fn_def.indent(indent + 1); + trailing_ws = format!("\n{}", indent); + it.syntax().text_range().start() + TextSize::of('{') + } + }; + + let placeholder_expr = + fn_def.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); + FunctionTemplate { + insert_offset, + placeholder_expr, + leading_ws, + fn_def, + trailing_ws, + file: self.file, + } + } +} + +enum GeneratedFunctionTarget { + BehindItem(SyntaxNode), + InEmptyItemList(ast::ItemList), +} + +impl GeneratedFunctionTarget { + fn syntax(&self) -> &SyntaxNode { + match self { + GeneratedFunctionTarget::BehindItem(it) => it, + GeneratedFunctionTarget::InEmptyItemList(it) => it.syntax(), + } + } +} + +fn fn_name(call: &ast::Path) -> Option { + let name = call.segment()?.syntax().to_string(); + Some(make::name(&name)) +} + +/// Computes the type variables and arguments required for the generated function +fn fn_args( + ctx: &AssistContext, + target_module: hir::Module, + call: &ast::CallExpr, +) -> Option<(Option, ast::ParamList)> { + let mut arg_names = Vec::new(); + let mut arg_types = Vec::new(); + for arg in call.arg_list()?.args() { + arg_names.push(match fn_arg_name(&arg) { + Some(name) => name, + None => String::from("arg"), + }); + arg_types.push(match fn_arg_type(ctx, target_module, &arg) { + Some(ty) => ty, + None => String::from("()"), + }); + } + deduplicate_arg_names(&mut arg_names); + let params = arg_names.into_iter().zip(arg_types).map(|(name, ty)| make::param(name, ty)); + Some((None, make::param_list(params))) +} + +/// Makes duplicate argument names unique by appending incrementing numbers. +/// +/// ``` +/// let mut names: Vec = +/// vec!["foo".into(), "foo".into(), "bar".into(), "baz".into(), "bar".into()]; +/// deduplicate_arg_names(&mut names); +/// let expected: Vec = +/// vec!["foo_1".into(), "foo_2".into(), "bar_1".into(), "baz".into(), "bar_2".into()]; +/// assert_eq!(names, expected); +/// ``` +fn deduplicate_arg_names(arg_names: &mut Vec) { + let arg_name_counts = arg_names.iter().fold(FxHashMap::default(), |mut m, name| { + *m.entry(name).or_insert(0) += 1; + m + }); + let duplicate_arg_names: FxHashSet = arg_name_counts + .into_iter() + .filter(|(_, count)| *count >= 2) + .map(|(name, _)| name.clone()) + .collect(); + + let mut counter_per_name = FxHashMap::default(); + for arg_name in arg_names.iter_mut() { + if duplicate_arg_names.contains(arg_name) { + let counter = counter_per_name.entry(arg_name.clone()).or_insert(1); + arg_name.push('_'); + arg_name.push_str(&counter.to_string()); + *counter += 1; + } + } +} + +fn fn_arg_name(fn_arg: &ast::Expr) -> Option { + match fn_arg { + ast::Expr::CastExpr(cast_expr) => fn_arg_name(&cast_expr.expr()?), + _ => Some( + fn_arg + .syntax() + .descendants() + .filter(|d| ast::NameRef::can_cast(d.kind())) + .last()? + .to_string(), + ), + } +} + +fn fn_arg_type( + ctx: &AssistContext, + target_module: hir::Module, + fn_arg: &ast::Expr, +) -> Option { + let ty = ctx.sema.type_of_expr(fn_arg)?; + if ty.is_unknown() { + return None; + } + + if let Ok(rendered) = ty.display_source_code(ctx.db(), target_module.into()) { + Some(rendered) + } else { + None + } +} + +/// Returns the position inside the current mod or file +/// directly after the current block +/// We want to write the generated function directly after +/// fns, impls or macro calls, but inside mods +fn next_space_for_fn_after_call_site(expr: &ast::CallExpr) -> Option { + let mut ancestors = expr.syntax().ancestors().peekable(); + let mut last_ancestor: Option = None; + while let Some(next_ancestor) = ancestors.next() { + match next_ancestor.kind() { + SyntaxKind::SOURCE_FILE => { + break; + } + SyntaxKind::ITEM_LIST => { + if ancestors.peek().map(|a| a.kind()) == Some(SyntaxKind::MODULE) { + break; + } + } + _ => {} + } + last_ancestor = Some(next_ancestor); + } + last_ancestor.map(GeneratedFunctionTarget::BehindItem) +} + +fn next_space_for_fn_in_module( + db: &dyn hir::db::AstDatabase, + module_source: &hir::InFile, +) -> Option<(FileId, GeneratedFunctionTarget)> { + let file = module_source.file_id.original_file(db); + let assist_item = match &module_source.value { + hir::ModuleSource::SourceFile(it) => { + if let Some(last_item) = it.items().last() { + GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()) + } else { + GeneratedFunctionTarget::BehindItem(it.syntax().clone()) + } + } + hir::ModuleSource::Module(it) => { + if let Some(last_item) = it.item_list().and_then(|it| it.items().last()) { + GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()) + } else { + GeneratedFunctionTarget::InEmptyItemList(it.item_list()?) + } + } + }; + Some((file, assist_item)) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn add_function_with_no_args() { + check_assist( + generate_function, + r" +fn foo() { + bar<|>(); +} +", + r" +fn foo() { + bar(); +} + +fn bar() { + ${0:todo!()} +} +", + ) + } + + #[test] + fn add_function_from_method() { + // This ensures that the function is correctly generated + // in the next outer mod or file + check_assist( + generate_function, + r" +impl Foo { + fn foo() { + bar<|>(); + } +} +", + r" +impl Foo { + fn foo() { + bar(); + } +} + +fn bar() { + ${0:todo!()} +} +", + ) + } + + #[test] + fn add_function_directly_after_current_block() { + // The new fn should not be created at the end of the file or module + check_assist( + generate_function, + r" +fn foo1() { + bar<|>(); +} + +fn foo2() {} +", + r" +fn foo1() { + bar(); +} + +fn bar() { + ${0:todo!()} +} + +fn foo2() {} +", + ) + } + + #[test] + fn add_function_with_no_args_in_same_module() { + check_assist( + generate_function, + r" +mod baz { + fn foo() { + bar<|>(); + } +} +", + r" +mod baz { + fn foo() { + bar(); + } + + fn bar() { + ${0:todo!()} + } +} +", + ) + } + + #[test] + fn add_function_with_function_call_arg() { + check_assist( + generate_function, + r" +struct Baz; +fn baz() -> Baz { todo!() } +fn foo() { + bar<|>(baz()); +} +", + r" +struct Baz; +fn baz() -> Baz { todo!() } +fn foo() { + bar(baz()); +} + +fn bar(baz: Baz) { + ${0:todo!()} +} +", + ); + } + + #[test] + fn add_function_with_method_call_arg() { + check_assist( + generate_function, + r" +struct Baz; +impl Baz { + fn foo(&self) -> Baz { + ba<|>r(self.baz()) + } + fn baz(&self) -> Baz { + Baz + } +} +", + r" +struct Baz; +impl Baz { + fn foo(&self) -> Baz { + bar(self.baz()) + } + fn baz(&self) -> Baz { + Baz + } +} + +fn bar(baz: Baz) { + ${0:todo!()} +} +", + ) + } + + #[test] + fn add_function_with_string_literal_arg() { + check_assist( + generate_function, + r#" +fn foo() { + <|>bar("bar") +} +"#, + r#" +fn foo() { + bar("bar") +} + +fn bar(arg: &str) { + ${0:todo!()} +} +"#, + ) + } + + #[test] + fn add_function_with_char_literal_arg() { + check_assist( + generate_function, + r#" +fn foo() { + <|>bar('x') +} +"#, + r#" +fn foo() { + bar('x') +} + +fn bar(arg: char) { + ${0:todo!()} +} +"#, + ) + } + + #[test] + fn add_function_with_int_literal_arg() { + check_assist( + generate_function, + r" +fn foo() { + <|>bar(42) +} +", + r" +fn foo() { + bar(42) +} + +fn bar(arg: i32) { + ${0:todo!()} +} +", + ) + } + + #[test] + fn add_function_with_cast_int_literal_arg() { + check_assist( + generate_function, + r" +fn foo() { + <|>bar(42 as u8) +} +", + r" +fn foo() { + bar(42 as u8) +} + +fn bar(arg: u8) { + ${0:todo!()} +} +", + ) + } + + #[test] + fn name_of_cast_variable_is_used() { + // Ensures that the name of the cast type isn't used + // in the generated function signature. + check_assist( + generate_function, + r" +fn foo() { + let x = 42; + bar<|>(x as u8) +} +", + r" +fn foo() { + let x = 42; + bar(x as u8) +} + +fn bar(x: u8) { + ${0:todo!()} +} +", + ) + } + + #[test] + fn add_function_with_variable_arg() { + check_assist( + generate_function, + r" +fn foo() { + let worble = (); + <|>bar(worble) +} +", + r" +fn foo() { + let worble = (); + bar(worble) +} + +fn bar(worble: ()) { + ${0:todo!()} +} +", + ) + } + + #[test] + fn add_function_with_impl_trait_arg() { + check_assist( + generate_function, + r" +trait Foo {} +fn foo() -> impl Foo { + todo!() +} +fn baz() { + <|>bar(foo()) +} +", + r" +trait Foo {} +fn foo() -> impl Foo { + todo!() +} +fn baz() { + bar(foo()) +} + +fn bar(foo: impl Foo) { + ${0:todo!()} +} +", + ) + } + + #[test] + fn borrowed_arg() { + check_assist( + generate_function, + r" +struct Baz; +fn baz() -> Baz { todo!() } + +fn foo() { + bar<|>(&baz()) +} +", + r" +struct Baz; +fn baz() -> Baz { todo!() } + +fn foo() { + bar(&baz()) +} + +fn bar(baz: &Baz) { + ${0:todo!()} +} +", + ) + } + + #[test] + fn add_function_with_qualified_path_arg() { + check_assist( + generate_function, + r" +mod Baz { + pub struct Bof; + pub fn baz() -> Bof { Bof } +} +fn foo() { + <|>bar(Baz::baz()) +} +", + r" +mod Baz { + pub struct Bof; + pub fn baz() -> Bof { Bof } +} +fn foo() { + bar(Baz::baz()) +} + +fn bar(baz: Baz::Bof) { + ${0:todo!()} +} +", + ) + } + + #[test] + #[ignore] + // FIXME fix printing the generics of a `Ty` to make this test pass + fn add_function_with_generic_arg() { + check_assist( + generate_function, + r" +fn foo(t: T) { + <|>bar(t) +} +", + r" +fn foo(t: T) { + bar(t) +} + +fn bar(t: T) { + ${0:todo!()} +} +", + ) + } + + #[test] + #[ignore] + // FIXME Fix function type printing to make this test pass + fn add_function_with_fn_arg() { + check_assist( + generate_function, + r" +struct Baz; +impl Baz { + fn new() -> Self { Baz } +} +fn foo() { + <|>bar(Baz::new); +} +", + r" +struct Baz; +impl Baz { + fn new() -> Self { Baz } +} +fn foo() { + bar(Baz::new); +} + +fn bar(arg: fn() -> Baz) { + ${0:todo!()} +} +", + ) + } + + #[test] + #[ignore] + // FIXME Fix closure type printing to make this test pass + fn add_function_with_closure_arg() { + check_assist( + generate_function, + r" +fn foo() { + let closure = |x: i64| x - 1; + <|>bar(closure) +} +", + r" +fn foo() { + let closure = |x: i64| x - 1; + bar(closure) +} + +fn bar(closure: impl Fn(i64) -> i64) { + ${0:todo!()} +} +", + ) + } + + #[test] + fn unresolveable_types_default_to_unit() { + check_assist( + generate_function, + r" +fn foo() { + <|>bar(baz) +} +", + r" +fn foo() { + bar(baz) +} + +fn bar(baz: ()) { + ${0:todo!()} +} +", + ) + } + + #[test] + fn arg_names_dont_overlap() { + check_assist( + generate_function, + r" +struct Baz; +fn baz() -> Baz { Baz } +fn foo() { + <|>bar(baz(), baz()) +} +", + r" +struct Baz; +fn baz() -> Baz { Baz } +fn foo() { + bar(baz(), baz()) +} + +fn bar(baz_1: Baz, baz_2: Baz) { + ${0:todo!()} +} +", + ) + } + + #[test] + fn arg_name_counters_start_at_1_per_name() { + check_assist( + generate_function, + r#" +struct Baz; +fn baz() -> Baz { Baz } +fn foo() { + <|>bar(baz(), baz(), "foo", "bar") +} +"#, + r#" +struct Baz; +fn baz() -> Baz { Baz } +fn foo() { + bar(baz(), baz(), "foo", "bar") +} + +fn bar(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) { + ${0:todo!()} +} +"#, + ) + } + + #[test] + fn add_function_in_module() { + check_assist( + generate_function, + r" +mod bar {} + +fn foo() { + bar::my_fn<|>() +} +", + r" +mod bar { + pub(crate) fn my_fn() { + ${0:todo!()} + } +} + +fn foo() { + bar::my_fn() +} +", + ) + } + + #[test] + #[ignore] + // Ignored until local imports are supported. + // See https://github.com/rust-analyzer/rust-analyzer/issues/1165 + fn qualified_path_uses_correct_scope() { + check_assist( + generate_function, + " +mod foo { + pub struct Foo; +} +fn bar() { + use foo::Foo; + let foo = Foo; + baz<|>(foo) +} +", + " +mod foo { + pub struct Foo; +} +fn bar() { + use foo::Foo; + let foo = Foo; + baz(foo) +} + +fn baz(foo: foo::Foo) { + ${0:todo!()} +} +", + ) + } + + #[test] + fn add_function_in_module_containing_other_items() { + check_assist( + generate_function, + r" +mod bar { + fn something_else() {} +} + +fn foo() { + bar::my_fn<|>() +} +", + r" +mod bar { + fn something_else() {} + + pub(crate) fn my_fn() { + ${0:todo!()} + } +} + +fn foo() { + bar::my_fn() +} +", + ) + } + + #[test] + fn add_function_in_nested_module() { + check_assist( + generate_function, + r" +mod bar { + mod baz {} +} + +fn foo() { + bar::baz::my_fn<|>() +} +", + r" +mod bar { + mod baz { + pub(crate) fn my_fn() { + ${0:todo!()} + } + } +} + +fn foo() { + bar::baz::my_fn() +} +", + ) + } + + #[test] + fn add_function_in_another_file() { + check_assist( + generate_function, + r" +//- /main.rs +mod foo; + +fn main() { + foo::bar<|>() +} +//- /foo.rs +", + r" + + +pub(crate) fn bar() { + ${0:todo!()} +}", + ) + } + + #[test] + fn add_function_not_applicable_if_function_already_exists() { + check_assist_not_applicable( + generate_function, + r" +fn foo() { + bar<|>(); +} + +fn bar() {} +", + ) + } + + #[test] + fn add_function_not_applicable_if_unresolved_variable_in_call_is_selected() { + check_assist_not_applicable( + // bar is resolved, but baz isn't. + // The assist is only active if the cursor is on an unresolved path, + // but the assist should only be offered if the path is a function call. + generate_function, + r" +fn foo() { + bar(b<|>az); +} + +fn bar(baz: ()) {} +", + ) + } + + #[test] + #[ignore] + fn create_method_with_no_args() { + check_assist( + generate_function, + r" +struct Foo; +impl Foo { + fn foo(&self) { + self.bar()<|>; + } +} + ", + r" +struct Foo; +impl Foo { + fn foo(&self) { + self.bar(); + } + fn bar(&self) { + todo!(); + } +} + ", + ) + } +} diff --git a/crates/assists/src/handlers/generate_impl.rs b/crates/assists/src/handlers/generate_impl.rs new file mode 100644 index 0000000000..9989109b5a --- /dev/null +++ b/crates/assists/src/handlers/generate_impl.rs @@ -0,0 +1,110 @@ +use itertools::Itertools; +use stdx::format_to; +use syntax::ast::{self, AstNode, GenericParamsOwner, NameOwner}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: generate_impl +// +// Adds a new inherent impl for a type. +// +// ``` +// struct Ctx { +// data: T,<|> +// } +// ``` +// -> +// ``` +// struct Ctx { +// data: T, +// } +// +// impl Ctx { +// $0 +// } +// ``` +pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let nominal = ctx.find_node_at_offset::()?; + let name = nominal.name()?; + let target = nominal.syntax().text_range(); + acc.add( + AssistId("generate_impl", AssistKind::Generate), + format!("Generate impl for `{}`", name), + target, + |edit| { + let type_params = nominal.generic_param_list(); + let start_offset = nominal.syntax().text_range().end(); + let mut buf = String::new(); + buf.push_str("\n\nimpl"); + if let Some(type_params) = &type_params { + format_to!(buf, "{}", type_params.syntax()); + } + buf.push_str(" "); + buf.push_str(name.text().as_str()); + if let Some(type_params) = type_params { + let lifetime_params = type_params + .lifetime_params() + .filter_map(|it| it.lifetime_token()) + .map(|it| it.text().clone()); + let type_params = type_params + .type_params() + .filter_map(|it| it.name()) + .map(|it| it.text().clone()); + + let generic_params = lifetime_params.chain(type_params).format(", "); + format_to!(buf, "<{}>", generic_params) + } + match ctx.config.snippet_cap { + Some(cap) => { + buf.push_str(" {\n $0\n}"); + edit.insert_snippet(cap, start_offset, buf); + } + None => { + buf.push_str(" {\n}"); + edit.insert(start_offset, buf); + } + } + }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_target}; + + use super::*; + + #[test] + fn test_add_impl() { + check_assist( + generate_impl, + "struct Foo {<|>}\n", + "struct Foo {}\n\nimpl Foo {\n $0\n}\n", + ); + check_assist( + generate_impl, + "struct Foo {<|>}", + "struct Foo {}\n\nimpl Foo {\n $0\n}", + ); + check_assist( + generate_impl, + "struct Foo<'a, T: Foo<'a>> {<|>}", + "struct Foo<'a, T: Foo<'a>> {}\n\nimpl<'a, T: Foo<'a>> Foo<'a, T> {\n $0\n}", + ); + } + + #[test] + fn add_impl_target() { + check_assist_target( + generate_impl, + " +struct SomeThingIrrelevant; +/// Has a lifetime parameter +struct Foo<'a, T: Foo<'a>> {<|>} +struct EvenMoreIrrelevant; +", + "/// Has a lifetime parameter +struct Foo<'a, T: Foo<'a>> {}", + ); + } +} diff --git a/crates/assists/src/handlers/generate_new.rs b/crates/assists/src/handlers/generate_new.rs new file mode 100644 index 0000000000..7db10f2768 --- /dev/null +++ b/crates/assists/src/handlers/generate_new.rs @@ -0,0 +1,421 @@ +use hir::Adt; +use itertools::Itertools; +use stdx::format_to; +use syntax::{ + ast::{self, AstNode, GenericParamsOwner, NameOwner, StructKind, VisibilityOwner}, + T, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: generate_new +// +// Adds a new inherent impl for a type. +// +// ``` +// struct Ctx { +// data: T,<|> +// } +// ``` +// -> +// ``` +// struct Ctx { +// data: T, +// } +// +// impl Ctx { +// fn $0new(data: T) -> Self { Self { data } } +// } +// +// ``` +pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let strukt = ctx.find_node_at_offset::()?; + + // We want to only apply this to non-union structs with named fields + let field_list = match strukt.kind() { + StructKind::Record(named) => named, + _ => return None, + }; + + // Return early if we've found an existing new fn + let impl_def = find_struct_impl(&ctx, &strukt)?; + + let target = strukt.syntax().text_range(); + acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| { + let mut buf = String::with_capacity(512); + + if impl_def.is_some() { + buf.push('\n'); + } + + let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v)); + + let params = field_list + .fields() + .filter_map(|f| Some(format!("{}: {}", f.name()?.syntax(), f.ty()?.syntax()))) + .format(", "); + let fields = field_list.fields().filter_map(|f| f.name()).format(", "); + + format_to!(buf, " {}fn new({}) -> Self {{ Self {{ {} }} }}", vis, params, fields); + + let start_offset = impl_def + .and_then(|impl_def| { + buf.push('\n'); + let start = impl_def + .syntax() + .descendants_with_tokens() + .find(|t| t.kind() == T!['{'])? + .text_range() + .end(); + + Some(start) + }) + .unwrap_or_else(|| { + buf = generate_impl_text(&strukt, &buf); + strukt.syntax().text_range().end() + }); + + match ctx.config.snippet_cap { + None => builder.insert(start_offset, buf), + Some(cap) => { + buf = buf.replace("fn new", "fn $0new"); + builder.insert_snippet(cap, start_offset, buf); + } + } + }) +} + +// Generates the surrounding `impl Type { }` including type and lifetime +// parameters +fn generate_impl_text(strukt: &ast::Struct, code: &str) -> String { + let type_params = strukt.generic_param_list(); + let mut buf = String::with_capacity(code.len()); + buf.push_str("\n\nimpl"); + if let Some(type_params) = &type_params { + format_to!(buf, "{}", type_params.syntax()); + } + buf.push_str(" "); + buf.push_str(strukt.name().unwrap().text().as_str()); + if let Some(type_params) = type_params { + let lifetime_params = type_params + .lifetime_params() + .filter_map(|it| it.lifetime_token()) + .map(|it| it.text().clone()); + let type_params = + type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone()); + format_to!(buf, "<{}>", lifetime_params.chain(type_params).format(", ")) + } + + format_to!(buf, " {{\n{}\n}}\n", code); + + buf +} + +// Uses a syntax-driven approach to find any impl blocks for the struct that +// exist within the module/file +// +// Returns `None` if we've found an existing `new` fn +// +// FIXME: change the new fn checking to a more semantic approach when that's more +// viable (e.g. we process proc macros, etc) +fn find_struct_impl(ctx: &AssistContext, strukt: &ast::Struct) -> Option> { + let db = ctx.db(); + let module = strukt.syntax().ancestors().find(|node| { + ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind()) + })?; + + let struct_def = ctx.sema.to_def(strukt)?; + + let block = module.descendants().filter_map(ast::Impl::cast).find_map(|impl_blk| { + let blk = ctx.sema.to_def(&impl_blk)?; + + // FIXME: handle e.g. `struct S; impl S {}` + // (we currently use the wrong type parameter) + // also we wouldn't want to use e.g. `impl S` + let same_ty = match blk.target_ty(db).as_adt() { + Some(def) => def == Adt::Struct(struct_def), + None => false, + }; + let not_trait_impl = blk.target_trait(db).is_none(); + + if !(same_ty && not_trait_impl) { + None + } else { + Some(impl_blk) + } + }); + + if let Some(ref impl_blk) = block { + if has_new_fn(impl_blk) { + return None; + } + } + + Some(block) +} + +fn has_new_fn(imp: &ast::Impl) -> bool { + if let Some(il) = imp.assoc_item_list() { + for item in il.assoc_items() { + if let ast::AssocItem::Fn(f) = item { + if let Some(name) = f.name() { + if name.text().eq_ignore_ascii_case("new") { + return true; + } + } + } + } + } + + false +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; + + use super::*; + + #[test] + #[rustfmt::skip] + fn test_generate_new() { + // Check output of generation + check_assist( + generate_new, +"struct Foo {<|>}", +"struct Foo {} + +impl Foo { + fn $0new() -> Self { Self { } } +} +", + ); + check_assist( + generate_new, +"struct Foo {<|>}", +"struct Foo {} + +impl Foo { + fn $0new() -> Self { Self { } } +} +", + ); + check_assist( + generate_new, +"struct Foo<'a, T: Foo<'a>> {<|>}", +"struct Foo<'a, T: Foo<'a>> {} + +impl<'a, T: Foo<'a>> Foo<'a, T> { + fn $0new() -> Self { Self { } } +} +", + ); + check_assist( + generate_new, +"struct Foo { baz: String <|>}", +"struct Foo { baz: String } + +impl Foo { + fn $0new(baz: String) -> Self { Self { baz } } +} +", + ); + check_assist( + generate_new, +"struct Foo { baz: String, qux: Vec <|>}", +"struct Foo { baz: String, qux: Vec } + +impl Foo { + fn $0new(baz: String, qux: Vec) -> Self { Self { baz, qux } } +} +", + ); + + // Check that visibility modifiers don't get brought in for fields + check_assist( + generate_new, +"struct Foo { pub baz: String, pub qux: Vec <|>}", +"struct Foo { pub baz: String, pub qux: Vec } + +impl Foo { + fn $0new(baz: String, qux: Vec) -> Self { Self { baz, qux } } +} +", + ); + + // Check that it reuses existing impls + check_assist( + generate_new, +"struct Foo {<|>} + +impl Foo {} +", +"struct Foo {} + +impl Foo { + fn $0new() -> Self { Self { } } +} +", + ); + check_assist( + generate_new, +"struct Foo {<|>} + +impl Foo { + fn qux(&self) {} +} +", +"struct Foo {} + +impl Foo { + fn $0new() -> Self { Self { } } + + fn qux(&self) {} +} +", + ); + + check_assist( + generate_new, +"struct Foo {<|>} + +impl Foo { + fn qux(&self) {} + fn baz() -> i32 { + 5 + } +} +", +"struct Foo {} + +impl Foo { + fn $0new() -> Self { Self { } } + + fn qux(&self) {} + fn baz() -> i32 { + 5 + } +} +", + ); + + // Check visibility of new fn based on struct + check_assist( + generate_new, +"pub struct Foo {<|>}", +"pub struct Foo {} + +impl Foo { + pub fn $0new() -> Self { Self { } } +} +", + ); + check_assist( + generate_new, +"pub(crate) struct Foo {<|>}", +"pub(crate) struct Foo {} + +impl Foo { + pub(crate) fn $0new() -> Self { Self { } } +} +", + ); + } + + #[test] + fn generate_new_not_applicable_if_fn_exists() { + check_assist_not_applicable( + generate_new, + " +struct Foo {<|>} + +impl Foo { + fn new() -> Self { + Self + } +}", + ); + + check_assist_not_applicable( + generate_new, + " +struct Foo {<|>} + +impl Foo { + fn New() -> Self { + Self + } +}", + ); + } + + #[test] + fn generate_new_target() { + check_assist_target( + generate_new, + " +struct SomeThingIrrelevant; +/// Has a lifetime parameter +struct Foo<'a, T: Foo<'a>> {<|>} +struct EvenMoreIrrelevant; +", + "/// Has a lifetime parameter +struct Foo<'a, T: Foo<'a>> {}", + ); + } + + #[test] + fn test_unrelated_new() { + check_assist( + generate_new, + r##" +pub struct AstId { + file_id: HirFileId, + file_ast_id: FileAstId, +} + +impl AstId { + pub fn new(file_id: HirFileId, file_ast_id: FileAstId) -> AstId { + AstId { file_id, file_ast_id } + } +} + +pub struct Source { + pub file_id: HirFileId,<|> + pub ast: T, +} + +impl Source { + pub fn map U, U>(self, f: F) -> Source { + Source { file_id: self.file_id, ast: f(self.ast) } + } +} +"##, + r##" +pub struct AstId { + file_id: HirFileId, + file_ast_id: FileAstId, +} + +impl AstId { + pub fn new(file_id: HirFileId, file_ast_id: FileAstId) -> AstId { + AstId { file_id, file_ast_id } + } +} + +pub struct Source { + pub file_id: HirFileId, + pub ast: T, +} + +impl Source { + pub fn $0new(file_id: HirFileId, ast: T) -> Self { Self { file_id, ast } } + + pub fn map U, U>(self, f: F) -> Source { + Source { file_id: self.file_id, ast: f(self.ast) } + } +} +"##, + ); + } +} diff --git a/crates/assists/src/handlers/inline_local_variable.rs b/crates/assists/src/handlers/inline_local_variable.rs new file mode 100644 index 0000000000..2b52b333b8 --- /dev/null +++ b/crates/assists/src/handlers/inline_local_variable.rs @@ -0,0 +1,695 @@ +use ide_db::defs::Definition; +use syntax::{ + ast::{self, AstNode, AstToken}, + TextRange, +}; +use test_utils::mark; + +use crate::{ + assist_context::{AssistContext, Assists}, + AssistId, AssistKind, +}; + +// Assist: inline_local_variable +// +// Inlines local variable. +// +// ``` +// fn main() { +// let x<|> = 1 + 2; +// x * 4; +// } +// ``` +// -> +// ``` +// fn main() { +// (1 + 2) * 4; +// } +// ``` +pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let let_stmt = ctx.find_node_at_offset::()?; + let bind_pat = match let_stmt.pat()? { + ast::Pat::IdentPat(pat) => pat, + _ => return None, + }; + if bind_pat.mut_token().is_some() { + mark::hit!(test_not_inline_mut_variable); + return None; + } + if !bind_pat.syntax().text_range().contains_inclusive(ctx.offset()) { + mark::hit!(not_applicable_outside_of_bind_pat); + return None; + } + let initializer_expr = let_stmt.initializer()?; + + let def = ctx.sema.to_def(&bind_pat)?; + let def = Definition::Local(def); + let refs = def.find_usages(&ctx.sema, None); + if refs.is_empty() { + mark::hit!(test_not_applicable_if_variable_unused); + return None; + }; + + let delete_range = if let Some(whitespace) = let_stmt + .syntax() + .next_sibling_or_token() + .and_then(|it| ast::Whitespace::cast(it.as_token()?.clone())) + { + TextRange::new( + let_stmt.syntax().text_range().start(), + whitespace.syntax().text_range().end(), + ) + } else { + let_stmt.syntax().text_range() + }; + + let mut wrap_in_parens = vec![true; refs.len()]; + + for (i, desc) in refs.iter().enumerate() { + let usage_node = ctx + .covering_node_for_range(desc.file_range.range) + .ancestors() + .find_map(ast::PathExpr::cast)?; + let usage_parent_option = usage_node.syntax().parent().and_then(ast::Expr::cast); + let usage_parent = match usage_parent_option { + Some(u) => u, + None => { + wrap_in_parens[i] = false; + continue; + } + }; + + wrap_in_parens[i] = match (&initializer_expr, usage_parent) { + (ast::Expr::CallExpr(_), _) + | (ast::Expr::IndexExpr(_), _) + | (ast::Expr::MethodCallExpr(_), _) + | (ast::Expr::FieldExpr(_), _) + | (ast::Expr::TryExpr(_), _) + | (ast::Expr::RefExpr(_), _) + | (ast::Expr::Literal(_), _) + | (ast::Expr::TupleExpr(_), _) + | (ast::Expr::ArrayExpr(_), _) + | (ast::Expr::ParenExpr(_), _) + | (ast::Expr::PathExpr(_), _) + | (ast::Expr::BlockExpr(_), _) + | (ast::Expr::EffectExpr(_), _) + | (_, ast::Expr::CallExpr(_)) + | (_, ast::Expr::TupleExpr(_)) + | (_, ast::Expr::ArrayExpr(_)) + | (_, ast::Expr::ParenExpr(_)) + | (_, ast::Expr::ForExpr(_)) + | (_, ast::Expr::WhileExpr(_)) + | (_, ast::Expr::BreakExpr(_)) + | (_, ast::Expr::ReturnExpr(_)) + | (_, ast::Expr::MatchExpr(_)) => false, + _ => true, + }; + } + + let init_str = initializer_expr.syntax().text().to_string(); + let init_in_paren = format!("({})", &init_str); + + let target = bind_pat.syntax().text_range(); + acc.add( + AssistId("inline_local_variable", AssistKind::RefactorInline), + "Inline variable", + target, + move |builder| { + builder.delete(delete_range); + for (desc, should_wrap) in refs.iter().zip(wrap_in_parens) { + let replacement = + if should_wrap { init_in_paren.clone() } else { init_str.clone() }; + builder.replace(desc.file_range.range, replacement) + } + }, + ) +} + +#[cfg(test)] +mod tests { + use test_utils::mark; + + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn test_inline_let_bind_literal_expr() { + check_assist( + inline_local_variable, + r" +fn bar(a: usize) {} +fn foo() { + let a<|> = 1; + a + 1; + if a > 10 { + } + + while a > 10 { + + } + let b = a * 10; + bar(a); +}", + r" +fn bar(a: usize) {} +fn foo() { + 1 + 1; + if 1 > 10 { + } + + while 1 > 10 { + + } + let b = 1 * 10; + bar(1); +}", + ); + } + + #[test] + fn test_inline_let_bind_bin_expr() { + check_assist( + inline_local_variable, + r" +fn bar(a: usize) {} +fn foo() { + let a<|> = 1 + 1; + a + 1; + if a > 10 { + } + + while a > 10 { + + } + let b = a * 10; + bar(a); +}", + r" +fn bar(a: usize) {} +fn foo() { + (1 + 1) + 1; + if (1 + 1) > 10 { + } + + while (1 + 1) > 10 { + + } + let b = (1 + 1) * 10; + bar(1 + 1); +}", + ); + } + + #[test] + fn test_inline_let_bind_function_call_expr() { + check_assist( + inline_local_variable, + r" +fn bar(a: usize) {} +fn foo() { + let a<|> = bar(1); + a + 1; + if a > 10 { + } + + while a > 10 { + + } + let b = a * 10; + bar(a); +}", + r" +fn bar(a: usize) {} +fn foo() { + bar(1) + 1; + if bar(1) > 10 { + } + + while bar(1) > 10 { + + } + let b = bar(1) * 10; + bar(bar(1)); +}", + ); + } + + #[test] + fn test_inline_let_bind_cast_expr() { + check_assist( + inline_local_variable, + r" +fn bar(a: usize): usize { a } +fn foo() { + let a<|> = bar(1) as u64; + a + 1; + if a > 10 { + } + + while a > 10 { + + } + let b = a * 10; + bar(a); +}", + r" +fn bar(a: usize): usize { a } +fn foo() { + (bar(1) as u64) + 1; + if (bar(1) as u64) > 10 { + } + + while (bar(1) as u64) > 10 { + + } + let b = (bar(1) as u64) * 10; + bar(bar(1) as u64); +}", + ); + } + + #[test] + fn test_inline_let_bind_block_expr() { + check_assist( + inline_local_variable, + r" +fn foo() { + let a<|> = { 10 + 1 }; + a + 1; + if a > 10 { + } + + while a > 10 { + + } + let b = a * 10; + bar(a); +}", + r" +fn foo() { + { 10 + 1 } + 1; + if { 10 + 1 } > 10 { + } + + while { 10 + 1 } > 10 { + + } + let b = { 10 + 1 } * 10; + bar({ 10 + 1 }); +}", + ); + } + + #[test] + fn test_inline_let_bind_paren_expr() { + check_assist( + inline_local_variable, + r" +fn foo() { + let a<|> = ( 10 + 1 ); + a + 1; + if a > 10 { + } + + while a > 10 { + + } + let b = a * 10; + bar(a); +}", + r" +fn foo() { + ( 10 + 1 ) + 1; + if ( 10 + 1 ) > 10 { + } + + while ( 10 + 1 ) > 10 { + + } + let b = ( 10 + 1 ) * 10; + bar(( 10 + 1 )); +}", + ); + } + + #[test] + fn test_not_inline_mut_variable() { + mark::check!(test_not_inline_mut_variable); + check_assist_not_applicable( + inline_local_variable, + r" +fn foo() { + let mut a<|> = 1 + 1; + a + 1; +}", + ); + } + + #[test] + fn test_call_expr() { + check_assist( + inline_local_variable, + r" +fn foo() { + let a<|> = bar(10 + 1); + let b = a * 10; + let c = a as usize; +}", + r" +fn foo() { + let b = bar(10 + 1) * 10; + let c = bar(10 + 1) as usize; +}", + ); + } + + #[test] + fn test_index_expr() { + check_assist( + inline_local_variable, + r" +fn foo() { + let x = vec![1, 2, 3]; + let a<|> = x[0]; + let b = a * 10; + let c = a as usize; +}", + r" +fn foo() { + let x = vec![1, 2, 3]; + let b = x[0] * 10; + let c = x[0] as usize; +}", + ); + } + + #[test] + fn test_method_call_expr() { + check_assist( + inline_local_variable, + r" +fn foo() { + let bar = vec![1]; + let a<|> = bar.len(); + let b = a * 10; + let c = a as usize; +}", + r" +fn foo() { + let bar = vec![1]; + let b = bar.len() * 10; + let c = bar.len() as usize; +}", + ); + } + + #[test] + fn test_field_expr() { + check_assist( + inline_local_variable, + r" +struct Bar { + foo: usize +} + +fn foo() { + let bar = Bar { foo: 1 }; + let a<|> = bar.foo; + let b = a * 10; + let c = a as usize; +}", + r" +struct Bar { + foo: usize +} + +fn foo() { + let bar = Bar { foo: 1 }; + let b = bar.foo * 10; + let c = bar.foo as usize; +}", + ); + } + + #[test] + fn test_try_expr() { + check_assist( + inline_local_variable, + r" +fn foo() -> Option { + let bar = Some(1); + let a<|> = bar?; + let b = a * 10; + let c = a as usize; + None +}", + r" +fn foo() -> Option { + let bar = Some(1); + let b = bar? * 10; + let c = bar? as usize; + None +}", + ); + } + + #[test] + fn test_ref_expr() { + check_assist( + inline_local_variable, + r" +fn foo() { + let bar = 10; + let a<|> = &bar; + let b = a * 10; +}", + r" +fn foo() { + let bar = 10; + let b = &bar * 10; +}", + ); + } + + #[test] + fn test_tuple_expr() { + check_assist( + inline_local_variable, + r" +fn foo() { + let a<|> = (10, 20); + let b = a[0]; +}", + r" +fn foo() { + let b = (10, 20)[0]; +}", + ); + } + + #[test] + fn test_array_expr() { + check_assist( + inline_local_variable, + r" +fn foo() { + let a<|> = [1, 2, 3]; + let b = a.len(); +}", + r" +fn foo() { + let b = [1, 2, 3].len(); +}", + ); + } + + #[test] + fn test_paren() { + check_assist( + inline_local_variable, + r" +fn foo() { + let a<|> = (10 + 20); + let b = a * 10; + let c = a as usize; +}", + r" +fn foo() { + let b = (10 + 20) * 10; + let c = (10 + 20) as usize; +}", + ); + } + + #[test] + fn test_path_expr() { + check_assist( + inline_local_variable, + r" +fn foo() { + let d = 10; + let a<|> = d; + let b = a * 10; + let c = a as usize; +}", + r" +fn foo() { + let d = 10; + let b = d * 10; + let c = d as usize; +}", + ); + } + + #[test] + fn test_block_expr() { + check_assist( + inline_local_variable, + r" +fn foo() { + let a<|> = { 10 }; + let b = a * 10; + let c = a as usize; +}", + r" +fn foo() { + let b = { 10 } * 10; + let c = { 10 } as usize; +}", + ); + } + + #[test] + fn test_used_in_different_expr1() { + check_assist( + inline_local_variable, + r" +fn foo() { + let a<|> = 10 + 20; + let b = a * 10; + let c = (a, 20); + let d = [a, 10]; + let e = (a); +}", + r" +fn foo() { + let b = (10 + 20) * 10; + let c = (10 + 20, 20); + let d = [10 + 20, 10]; + let e = (10 + 20); +}", + ); + } + + #[test] + fn test_used_in_for_expr() { + check_assist( + inline_local_variable, + r" +fn foo() { + let a<|> = vec![10, 20]; + for i in a {} +}", + r" +fn foo() { + for i in vec![10, 20] {} +}", + ); + } + + #[test] + fn test_used_in_while_expr() { + check_assist( + inline_local_variable, + r" +fn foo() { + let a<|> = 1 > 0; + while a {} +}", + r" +fn foo() { + while 1 > 0 {} +}", + ); + } + + #[test] + fn test_used_in_break_expr() { + check_assist( + inline_local_variable, + r" +fn foo() { + let a<|> = 1 + 1; + loop { + break a; + } +}", + r" +fn foo() { + loop { + break 1 + 1; + } +}", + ); + } + + #[test] + fn test_used_in_return_expr() { + check_assist( + inline_local_variable, + r" +fn foo() { + let a<|> = 1 > 0; + return a; +}", + r" +fn foo() { + return 1 > 0; +}", + ); + } + + #[test] + fn test_used_in_match_expr() { + check_assist( + inline_local_variable, + r" +fn foo() { + let a<|> = 1 > 0; + match a {} +}", + r" +fn foo() { + match 1 > 0 {} +}", + ); + } + + #[test] + fn test_not_applicable_if_variable_unused() { + mark::check!(test_not_applicable_if_variable_unused); + check_assist_not_applicable( + inline_local_variable, + r" +fn foo() { + let <|>a = 0; +} + ", + ) + } + + #[test] + fn not_applicable_outside_of_bind_pat() { + mark::check!(not_applicable_outside_of_bind_pat); + check_assist_not_applicable( + inline_local_variable, + r" +fn main() { + let x = <|>1 + 2; + x * 4; +} +", + ) + } +} diff --git a/crates/assists/src/handlers/introduce_named_lifetime.rs b/crates/assists/src/handlers/introduce_named_lifetime.rs new file mode 100644 index 0000000000..5f623e5f7a --- /dev/null +++ b/crates/assists/src/handlers/introduce_named_lifetime.rs @@ -0,0 +1,318 @@ +use rustc_hash::FxHashSet; +use syntax::{ + ast::{self, GenericParamsOwner, NameOwner}, + AstNode, SyntaxKind, TextRange, TextSize, +}; + +use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists}; + +static ASSIST_NAME: &str = "introduce_named_lifetime"; +static ASSIST_LABEL: &str = "Introduce named lifetime"; + +// Assist: introduce_named_lifetime +// +// Change an anonymous lifetime to a named lifetime. +// +// ``` +// impl Cursor<'_<|>> { +// fn node(self) -> &SyntaxNode { +// match self { +// Cursor::Replace(node) | Cursor::Before(node) => node, +// } +// } +// } +// ``` +// -> +// ``` +// impl<'a> Cursor<'a> { +// fn node(self) -> &SyntaxNode { +// match self { +// Cursor::Replace(node) | Cursor::Before(node) => node, +// } +// } +// } +// ``` +// FIXME: How can we handle renaming any one of multiple anonymous lifetimes? +// FIXME: should also add support for the case fun(f: &Foo) -> &<|>Foo +pub(crate) fn introduce_named_lifetime(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let lifetime_token = ctx + .find_token_at_offset(SyntaxKind::LIFETIME) + .filter(|lifetime| lifetime.text() == "'_")?; + if let Some(fn_def) = lifetime_token.ancestors().find_map(ast::Fn::cast) { + generate_fn_def_assist(acc, &fn_def, lifetime_token.text_range()) + } else if let Some(impl_def) = lifetime_token.ancestors().find_map(ast::Impl::cast) { + generate_impl_def_assist(acc, &impl_def, lifetime_token.text_range()) + } else { + None + } +} + +/// Generate the assist for the fn def case +fn generate_fn_def_assist( + acc: &mut Assists, + fn_def: &ast::Fn, + lifetime_loc: TextRange, +) -> Option<()> { + let param_list: ast::ParamList = fn_def.param_list()?; + let new_lifetime_param = generate_unique_lifetime_param_name(&fn_def.generic_param_list())?; + let end_of_fn_ident = fn_def.name()?.ident_token()?.text_range().end(); + let self_param = + // use the self if it's a reference and has no explicit lifetime + param_list.self_param().filter(|p| p.lifetime_token().is_none() && p.amp_token().is_some()); + // compute the location which implicitly has the same lifetime as the anonymous lifetime + let loc_needing_lifetime = if let Some(self_param) = self_param { + // if we have a self reference, use that + Some(self_param.self_token()?.text_range().start()) + } else { + // otherwise, if there's a single reference parameter without a named liftime, use that + let fn_params_without_lifetime: Vec<_> = param_list + .params() + .filter_map(|param| match param.ty() { + Some(ast::Type::RefType(ascribed_type)) + if ascribed_type.lifetime_token() == None => + { + Some(ascribed_type.amp_token()?.text_range().end()) + } + _ => None, + }) + .collect(); + match fn_params_without_lifetime.len() { + 1 => Some(fn_params_without_lifetime.into_iter().nth(0)?), + 0 => None, + // multiple unnnamed is invalid. assist is not applicable + _ => return None, + } + }; + acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| { + add_lifetime_param(fn_def, builder, end_of_fn_ident, new_lifetime_param); + builder.replace(lifetime_loc, format!("'{}", new_lifetime_param)); + loc_needing_lifetime.map(|loc| builder.insert(loc, format!("'{} ", new_lifetime_param))); + }) +} + +/// Generate the assist for the impl def case +fn generate_impl_def_assist( + acc: &mut Assists, + impl_def: &ast::Impl, + lifetime_loc: TextRange, +) -> Option<()> { + let new_lifetime_param = generate_unique_lifetime_param_name(&impl_def.generic_param_list())?; + let end_of_impl_kw = impl_def.impl_token()?.text_range().end(); + acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| { + add_lifetime_param(impl_def, builder, end_of_impl_kw, new_lifetime_param); + builder.replace(lifetime_loc, format!("'{}", new_lifetime_param)); + }) +} + +/// Given a type parameter list, generate a unique lifetime parameter name +/// which is not in the list +fn generate_unique_lifetime_param_name( + existing_type_param_list: &Option, +) -> Option { + match existing_type_param_list { + Some(type_params) => { + let used_lifetime_params: FxHashSet<_> = type_params + .lifetime_params() + .map(|p| p.syntax().text().to_string()[1..].to_owned()) + .collect(); + (b'a'..=b'z').map(char::from).find(|c| !used_lifetime_params.contains(&c.to_string())) + } + None => Some('a'), + } +} + +/// Add the lifetime param to `builder`. If there are type parameters in `type_params_owner`, add it to the end. Otherwise +/// add new type params brackets with the lifetime parameter at `new_type_params_loc`. +fn add_lifetime_param( + type_params_owner: &TypeParamsOwner, + builder: &mut AssistBuilder, + new_type_params_loc: TextSize, + new_lifetime_param: char, +) { + match type_params_owner.generic_param_list() { + // add the new lifetime parameter to an existing type param list + Some(type_params) => { + builder.insert( + (u32::from(type_params.syntax().text_range().end()) - 1).into(), + format!(", '{}", new_lifetime_param), + ); + } + // create a new type param list containing only the new lifetime parameter + None => { + builder.insert(new_type_params_loc, format!("<'{}>", new_lifetime_param)); + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::{check_assist, check_assist_not_applicable}; + + #[test] + fn test_example_case() { + check_assist( + introduce_named_lifetime, + r#"impl Cursor<'_<|>> { + fn node(self) -> &SyntaxNode { + match self { + Cursor::Replace(node) | Cursor::Before(node) => node, + } + } + }"#, + r#"impl<'a> Cursor<'a> { + fn node(self) -> &SyntaxNode { + match self { + Cursor::Replace(node) | Cursor::Before(node) => node, + } + } + }"#, + ); + } + + #[test] + fn test_example_case_simplified() { + check_assist( + introduce_named_lifetime, + r#"impl Cursor<'_<|>> {"#, + r#"impl<'a> Cursor<'a> {"#, + ); + } + + #[test] + fn test_example_case_cursor_after_tick() { + check_assist( + introduce_named_lifetime, + r#"impl Cursor<'<|>_> {"#, + r#"impl<'a> Cursor<'a> {"#, + ); + } + + #[test] + fn test_impl_with_other_type_param() { + check_assist( + introduce_named_lifetime, + "impl fmt::Display for SepByBuilder<'_<|>, I> + where + I: Iterator, + I::Item: fmt::Display, + {", + "impl fmt::Display for SepByBuilder<'a, I> + where + I: Iterator, + I::Item: fmt::Display, + {", + ) + } + + #[test] + fn test_example_case_cursor_before_tick() { + check_assist( + introduce_named_lifetime, + r#"impl Cursor<<|>'_> {"#, + r#"impl<'a> Cursor<'a> {"#, + ); + } + + #[test] + fn test_not_applicable_cursor_position() { + check_assist_not_applicable(introduce_named_lifetime, r#"impl Cursor<'_><|> {"#); + check_assist_not_applicable(introduce_named_lifetime, r#"impl Cursor<|><'_> {"#); + } + + #[test] + fn test_not_applicable_lifetime_already_name() { + check_assist_not_applicable(introduce_named_lifetime, r#"impl Cursor<'a<|>> {"#); + check_assist_not_applicable(introduce_named_lifetime, r#"fn my_fun<'a>() -> X<'a<|>>"#); + } + + #[test] + fn test_with_type_parameter() { + check_assist( + introduce_named_lifetime, + r#"impl Cursor>"#, + r#"impl Cursor"#, + ); + } + + #[test] + fn test_with_existing_lifetime_name_conflict() { + check_assist( + introduce_named_lifetime, + r#"impl<'a, 'b> Cursor<'a, 'b, '_<|>>"#, + r#"impl<'a, 'b, 'c> Cursor<'a, 'b, 'c>"#, + ); + } + + #[test] + fn test_function_return_value_anon_lifetime_param() { + check_assist( + introduce_named_lifetime, + r#"fn my_fun() -> X<'_<|>>"#, + r#"fn my_fun<'a>() -> X<'a>"#, + ); + } + + #[test] + fn test_function_return_value_anon_reference_lifetime() { + check_assist( + introduce_named_lifetime, + r#"fn my_fun() -> &'_<|> X"#, + r#"fn my_fun<'a>() -> &'a X"#, + ); + } + + #[test] + fn test_function_param_anon_lifetime() { + check_assist( + introduce_named_lifetime, + r#"fn my_fun(x: X<'_<|>>)"#, + r#"fn my_fun<'a>(x: X<'a>)"#, + ); + } + + #[test] + fn test_function_add_lifetime_to_params() { + check_assist( + introduce_named_lifetime, + r#"fn my_fun(f: &Foo) -> X<'_<|>>"#, + r#"fn my_fun<'a>(f: &'a Foo) -> X<'a>"#, + ); + } + + #[test] + fn test_function_add_lifetime_to_params_in_presence_of_other_lifetime() { + check_assist( + introduce_named_lifetime, + r#"fn my_fun<'other>(f: &Foo, b: &'other Bar) -> X<'_<|>>"#, + r#"fn my_fun<'other, 'a>(f: &'a Foo, b: &'other Bar) -> X<'a>"#, + ); + } + + #[test] + fn test_function_not_applicable_without_self_and_multiple_unnamed_param_lifetimes() { + // this is not permitted under lifetime elision rules + check_assist_not_applicable( + introduce_named_lifetime, + r#"fn my_fun(f: &Foo, b: &Bar) -> X<'_<|>>"#, + ); + } + + #[test] + fn test_function_add_lifetime_to_self_ref_param() { + check_assist( + introduce_named_lifetime, + r#"fn my_fun<'other>(&self, f: &Foo, b: &'other Bar) -> X<'_<|>>"#, + r#"fn my_fun<'other, 'a>(&'a self, f: &Foo, b: &'other Bar) -> X<'a>"#, + ); + } + + #[test] + fn test_function_add_lifetime_to_param_with_non_ref_self() { + check_assist( + introduce_named_lifetime, + r#"fn my_fun<'other>(self, f: &Foo, b: &'other Bar) -> X<'_<|>>"#, + r#"fn my_fun<'other, 'a>(self, f: &'a Foo, b: &'other Bar) -> X<'a>"#, + ); + } +} diff --git a/crates/assists/src/handlers/invert_if.rs b/crates/assists/src/handlers/invert_if.rs new file mode 100644 index 0000000000..f0e0475389 --- /dev/null +++ b/crates/assists/src/handlers/invert_if.rs @@ -0,0 +1,109 @@ +use syntax::{ + ast::{self, AstNode}, + T, +}; + +use crate::{ + assist_context::{AssistContext, Assists}, + utils::invert_boolean_expression, + AssistId, AssistKind, +}; + +// Assist: invert_if +// +// Apply invert_if +// This transforms if expressions of the form `if !x {A} else {B}` into `if x {B} else {A}` +// This also works with `!=`. This assist can only be applied with the cursor +// on `if`. +// +// ``` +// fn main() { +// if<|> !y { A } else { B } +// } +// ``` +// -> +// ``` +// fn main() { +// if y { B } else { A } +// } +// ``` + +pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let if_keyword = ctx.find_token_at_offset(T![if])?; + let expr = ast::IfExpr::cast(if_keyword.parent())?; + let if_range = if_keyword.text_range(); + let cursor_in_range = if_range.contains_range(ctx.frange.range); + if !cursor_in_range { + return None; + } + + // This assist should not apply for if-let. + if expr.condition()?.pat().is_some() { + return None; + } + + let cond = expr.condition()?.expr()?; + let then_node = expr.then_branch()?.syntax().clone(); + let else_block = match expr.else_branch()? { + ast::ElseBranch::Block(it) => it, + ast::ElseBranch::IfExpr(_) => return None, + }; + + let cond_range = cond.syntax().text_range(); + let flip_cond = invert_boolean_expression(cond); + let else_node = else_block.syntax(); + let else_range = else_node.text_range(); + let then_range = then_node.text_range(); + acc.add(AssistId("invert_if", AssistKind::RefactorRewrite), "Invert if", if_range, |edit| { + edit.replace(cond_range, flip_cond.syntax().text()); + edit.replace(else_range, then_node.text()); + edit.replace(then_range, else_node.text()); + }) +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::tests::{check_assist, check_assist_not_applicable}; + + #[test] + fn invert_if_remove_inequality() { + check_assist( + invert_if, + "fn f() { i<|>f x != 3 { 1 } else { 3 + 2 } }", + "fn f() { if x == 3 { 3 + 2 } else { 1 } }", + ) + } + + #[test] + fn invert_if_remove_not() { + check_assist( + invert_if, + "fn f() { <|>if !cond { 3 * 2 } else { 1 } }", + "fn f() { if cond { 1 } else { 3 * 2 } }", + ) + } + + #[test] + fn invert_if_general_case() { + check_assist( + invert_if, + "fn f() { i<|>f cond { 3 * 2 } else { 1 } }", + "fn f() { if !cond { 1 } else { 3 * 2 } }", + ) + } + + #[test] + fn invert_if_doesnt_apply_with_cursor_not_on_if() { + check_assist_not_applicable(invert_if, "fn f() { if !<|>cond { 3 * 2 } else { 1 } }") + } + + #[test] + fn invert_if_doesnt_apply_with_if_let() { + check_assist_not_applicable( + invert_if, + "fn f() { i<|>f let Some(_) = Some(1) { 1 } else { 0 } }", + ) + } +} diff --git a/crates/assists/src/handlers/merge_imports.rs b/crates/assists/src/handlers/merge_imports.rs new file mode 100644 index 0000000000..47d4654046 --- /dev/null +++ b/crates/assists/src/handlers/merge_imports.rs @@ -0,0 +1,321 @@ +use std::iter::successors; + +use syntax::{ + algo::{neighbor, skip_trivia_token, SyntaxRewriter}, + ast::{self, edit::AstNodeEdit, make}, + AstNode, Direction, InsertPosition, SyntaxElement, T, +}; + +use crate::{ + assist_context::{AssistContext, Assists}, + AssistId, AssistKind, +}; + +// Assist: merge_imports +// +// Merges two imports with a common prefix. +// +// ``` +// use std::<|>fmt::Formatter; +// use std::io; +// ``` +// -> +// ``` +// use std::{fmt::Formatter, io}; +// ``` +pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let tree: ast::UseTree = ctx.find_node_at_offset()?; + let mut rewriter = SyntaxRewriter::default(); + let mut offset = ctx.offset(); + + if let Some(use_item) = tree.syntax().parent().and_then(ast::Use::cast) { + let (merged, to_delete) = next_prev() + .filter_map(|dir| neighbor(&use_item, dir)) + .filter_map(|it| Some((it.clone(), it.use_tree()?))) + .find_map(|(use_item, use_tree)| { + Some((try_merge_trees(&tree, &use_tree)?, use_item)) + })?; + + rewriter.replace_ast(&tree, &merged); + rewriter += to_delete.remove(); + + if to_delete.syntax().text_range().end() < offset { + offset -= to_delete.syntax().text_range().len(); + } + } else { + let (merged, to_delete) = next_prev() + .filter_map(|dir| neighbor(&tree, dir)) + .find_map(|use_tree| Some((try_merge_trees(&tree, &use_tree)?, use_tree.clone())))?; + + rewriter.replace_ast(&tree, &merged); + rewriter += to_delete.remove(); + + if to_delete.syntax().text_range().end() < offset { + offset -= to_delete.syntax().text_range().len(); + } + }; + + let target = tree.syntax().text_range(); + acc.add( + AssistId("merge_imports", AssistKind::RefactorRewrite), + "Merge imports", + target, + |builder| { + builder.rewrite(rewriter); + }, + ) +} + +fn next_prev() -> impl Iterator { + [Direction::Next, Direction::Prev].iter().copied() +} + +fn try_merge_trees(old: &ast::UseTree, new: &ast::UseTree) -> Option { + let lhs_path = old.path()?; + let rhs_path = new.path()?; + + let (lhs_prefix, rhs_prefix) = common_prefix(&lhs_path, &rhs_path)?; + + let lhs = old.split_prefix(&lhs_prefix); + let rhs = new.split_prefix(&rhs_prefix); + + let should_insert_comma = lhs + .use_tree_list()? + .r_curly_token() + .and_then(|it| skip_trivia_token(it.prev_token()?, Direction::Prev)) + .map(|it| it.kind() != T![,]) + .unwrap_or(true); + + let mut to_insert: Vec = Vec::new(); + if should_insert_comma { + to_insert.push(make::token(T![,]).into()); + to_insert.push(make::tokens::single_space().into()); + } + to_insert.extend( + rhs.use_tree_list()? + .syntax() + .children_with_tokens() + .filter(|it| it.kind() != T!['{'] && it.kind() != T!['}']), + ); + let use_tree_list = lhs.use_tree_list()?; + let pos = InsertPosition::Before(use_tree_list.r_curly_token()?.into()); + let use_tree_list = use_tree_list.insert_children(pos, to_insert); + Some(lhs.with_use_tree_list(use_tree_list)) +} + +fn common_prefix(lhs: &ast::Path, rhs: &ast::Path) -> Option<(ast::Path, ast::Path)> { + let mut res = None; + let mut lhs_curr = first_path(&lhs); + let mut rhs_curr = first_path(&rhs); + loop { + match (lhs_curr.segment(), rhs_curr.segment()) { + (Some(lhs), Some(rhs)) if lhs.syntax().text() == rhs.syntax().text() => (), + _ => break, + } + res = Some((lhs_curr.clone(), rhs_curr.clone())); + + match (lhs_curr.parent_path(), rhs_curr.parent_path()) { + (Some(lhs), Some(rhs)) => { + lhs_curr = lhs; + rhs_curr = rhs; + } + _ => break, + } + } + + res +} + +fn first_path(path: &ast::Path) -> ast::Path { + successors(Some(path.clone()), |it| it.qualifier()).last().unwrap() +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn test_merge_first() { + check_assist( + merge_imports, + r" +use std::fmt<|>::Debug; +use std::fmt::Display; +", + r" +use std::fmt::{Debug, Display}; +", + ) + } + + #[test] + fn test_merge_second() { + check_assist( + merge_imports, + r" +use std::fmt::Debug; +use std::fmt<|>::Display; +", + r" +use std::fmt::{Display, Debug}; +", + ); + } + + #[test] + fn merge_self1() { + check_assist( + merge_imports, + r" +use std::fmt<|>; +use std::fmt::Display; +", + r" +use std::fmt::{self, Display}; +", + ); + } + + #[test] + fn merge_self2() { + check_assist( + merge_imports, + r" +use std::{fmt, <|>fmt::Display}; +", + r" +use std::{fmt::{Display, self}}; +", + ); + } + + #[test] + fn test_merge_nested() { + check_assist( + merge_imports, + r" +use std::{fmt<|>::Debug, fmt::Display}; +", + r" +use std::{fmt::{Debug, Display}}; +", + ); + check_assist( + merge_imports, + r" +use std::{fmt::Debug, fmt<|>::Display}; +", + r" +use std::{fmt::{Display, Debug}}; +", + ); + } + + #[test] + fn test_merge_single_wildcard_diff_prefixes() { + check_assist( + merge_imports, + r" +use std<|>::cell::*; +use std::str; +", + r" +use std::{cell::*, str}; +", + ) + } + + #[test] + fn test_merge_both_wildcard_diff_prefixes() { + check_assist( + merge_imports, + r" +use std<|>::cell::*; +use std::str::*; +", + r" +use std::{cell::*, str::*}; +", + ) + } + + #[test] + fn removes_just_enough_whitespace() { + check_assist( + merge_imports, + r" +use foo<|>::bar; +use foo::baz; + +/// Doc comment +", + r" +use foo::{bar, baz}; + +/// Doc comment +", + ); + } + + #[test] + fn works_with_trailing_comma() { + check_assist( + merge_imports, + r" +use { + foo<|>::bar, + foo::baz, +}; +", + r" +use { + foo::{bar, baz}, +}; +", + ); + check_assist( + merge_imports, + r" +use { + foo::baz, + foo<|>::bar, +}; +", + r" +use { + foo::{bar, baz}, +}; +", + ); + } + + #[test] + fn test_double_comma() { + check_assist( + merge_imports, + r" +use foo::bar::baz; +use foo::<|>{ + FooBar, +}; +", + r" +use foo::{ + FooBar, +bar::baz}; +", + ) + } + + #[test] + fn test_empty_use() { + check_assist_not_applicable( + merge_imports, + r" +use std::<|> +fn main() {}", + ); + } +} diff --git a/crates/assists/src/handlers/merge_match_arms.rs b/crates/assists/src/handlers/merge_match_arms.rs new file mode 100644 index 0000000000..c347eb40ef --- /dev/null +++ b/crates/assists/src/handlers/merge_match_arms.rs @@ -0,0 +1,248 @@ +use std::iter::successors; + +use syntax::{ + algo::neighbor, + ast::{self, AstNode}, + Direction, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists, TextRange}; + +// Assist: merge_match_arms +// +// Merges identical match arms. +// +// ``` +// enum Action { Move { distance: u32 }, Stop } +// +// fn handle(action: Action) { +// match action { +// <|>Action::Move(..) => foo(), +// Action::Stop => foo(), +// } +// } +// ``` +// -> +// ``` +// enum Action { Move { distance: u32 }, Stop } +// +// fn handle(action: Action) { +// match action { +// Action::Move(..) | Action::Stop => foo(), +// } +// } +// ``` +pub(crate) fn merge_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let current_arm = ctx.find_node_at_offset::()?; + // Don't try to handle arms with guards for now - can add support for this later + if current_arm.guard().is_some() { + return None; + } + let current_expr = current_arm.expr()?; + let current_text_range = current_arm.syntax().text_range(); + + // We check if the following match arms match this one. We could, but don't, + // compare to the previous match arm as well. + let arms_to_merge = successors(Some(current_arm), |it| neighbor(it, Direction::Next)) + .take_while(|arm| { + if arm.guard().is_some() { + return false; + } + match arm.expr() { + Some(expr) => expr.syntax().text() == current_expr.syntax().text(), + None => false, + } + }) + .collect::>(); + + if arms_to_merge.len() <= 1 { + return None; + } + + acc.add( + AssistId("merge_match_arms", AssistKind::RefactorRewrite), + "Merge match arms", + current_text_range, + |edit| { + let pats = if arms_to_merge.iter().any(contains_placeholder) { + "_".into() + } else { + arms_to_merge + .iter() + .filter_map(ast::MatchArm::pat) + .map(|x| x.syntax().to_string()) + .collect::>() + .join(" | ") + }; + + let arm = format!("{} => {}", pats, current_expr.syntax().text()); + + let start = arms_to_merge.first().unwrap().syntax().text_range().start(); + let end = arms_to_merge.last().unwrap().syntax().text_range().end(); + + edit.replace(TextRange::new(start, end), arm); + }, + ) +} + +fn contains_placeholder(a: &ast::MatchArm) -> bool { + matches!(a.pat(), Some(ast::Pat::WildcardPat(..))) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn merge_match_arms_single_patterns() { + check_assist( + merge_match_arms, + r#" + #[derive(Debug)] + enum X { A, B, C } + + fn main() { + let x = X::A; + let y = match x { + X::A => { 1i32<|> } + X::B => { 1i32 } + X::C => { 2i32 } + } + } + "#, + r#" + #[derive(Debug)] + enum X { A, B, C } + + fn main() { + let x = X::A; + let y = match x { + X::A | X::B => { 1i32 } + X::C => { 2i32 } + } + } + "#, + ); + } + + #[test] + fn merge_match_arms_multiple_patterns() { + check_assist( + merge_match_arms, + r#" + #[derive(Debug)] + enum X { A, B, C, D, E } + + fn main() { + let x = X::A; + let y = match x { + X::A | X::B => {<|> 1i32 }, + X::C | X::D => { 1i32 }, + X::E => { 2i32 }, + } + } + "#, + r#" + #[derive(Debug)] + enum X { A, B, C, D, E } + + fn main() { + let x = X::A; + let y = match x { + X::A | X::B | X::C | X::D => { 1i32 }, + X::E => { 2i32 }, + } + } + "#, + ); + } + + #[test] + fn merge_match_arms_placeholder_pattern() { + check_assist( + merge_match_arms, + r#" + #[derive(Debug)] + enum X { A, B, C, D, E } + + fn main() { + let x = X::A; + let y = match x { + X::A => { 1i32 }, + X::B => { 2i<|>32 }, + _ => { 2i32 } + } + } + "#, + r#" + #[derive(Debug)] + enum X { A, B, C, D, E } + + fn main() { + let x = X::A; + let y = match x { + X::A => { 1i32 }, + _ => { 2i32 } + } + } + "#, + ); + } + + #[test] + fn merges_all_subsequent_arms() { + check_assist( + merge_match_arms, + r#" + enum X { A, B, C, D, E } + + fn main() { + match X::A { + X::A<|> => 92, + X::B => 92, + X::C => 92, + X::D => 62, + _ => panic!(), + } + } + "#, + r#" + enum X { A, B, C, D, E } + + fn main() { + match X::A { + X::A | X::B | X::C => 92, + X::D => 62, + _ => panic!(), + } + } + "#, + ) + } + + #[test] + fn merge_match_arms_rejects_guards() { + check_assist_not_applicable( + merge_match_arms, + r#" + #[derive(Debug)] + enum X { + A(i32), + B, + C + } + + fn main() { + let x = X::A; + let y = match x { + X::A(a) if a > 5 => { <|>1i32 }, + X::B => { 1i32 }, + X::C => { 2i32 } + } + } + "#, + ); + } +} diff --git a/crates/assists/src/handlers/move_bounds.rs b/crates/assists/src/handlers/move_bounds.rs new file mode 100644 index 0000000000..e2e461520d --- /dev/null +++ b/crates/assists/src/handlers/move_bounds.rs @@ -0,0 +1,152 @@ +use syntax::{ + ast::{self, edit::AstNodeEdit, make, AstNode, NameOwner, TypeBoundsOwner}, + match_ast, + SyntaxKind::*, + T, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: move_bounds_to_where_clause +// +// Moves inline type bounds to a where clause. +// +// ``` +// fn applyF: FnOnce(T) -> U>(f: F, x: T) -> U { +// f(x) +// } +// ``` +// -> +// ``` +// fn apply(f: F, x: T) -> U where F: FnOnce(T) -> U { +// f(x) +// } +// ``` +pub(crate) fn move_bounds_to_where_clause(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let type_param_list = ctx.find_node_at_offset::()?; + + let mut type_params = type_param_list.type_params(); + if type_params.all(|p| p.type_bound_list().is_none()) { + return None; + } + + let parent = type_param_list.syntax().parent()?; + if parent.children_with_tokens().any(|it| it.kind() == WHERE_CLAUSE) { + return None; + } + + let anchor = match_ast! { + match parent { + ast::Fn(it) => it.body()?.syntax().clone().into(), + ast::Trait(it) => it.assoc_item_list()?.syntax().clone().into(), + ast::Impl(it) => it.assoc_item_list()?.syntax().clone().into(), + ast::Enum(it) => it.variant_list()?.syntax().clone().into(), + ast::Struct(it) => { + it.syntax().children_with_tokens() + .find(|it| it.kind() == RECORD_FIELD_LIST || it.kind() == T![;])? + }, + _ => return None + } + }; + + let target = type_param_list.syntax().text_range(); + acc.add( + AssistId("move_bounds_to_where_clause", AssistKind::RefactorRewrite), + "Move to where clause", + target, + |edit| { + let new_params = type_param_list + .type_params() + .filter(|it| it.type_bound_list().is_some()) + .map(|type_param| { + let without_bounds = type_param.remove_bounds(); + (type_param, without_bounds) + }); + + let new_type_param_list = type_param_list.replace_descendants(new_params); + edit.replace_ast(type_param_list.clone(), new_type_param_list); + + let where_clause = { + let predicates = type_param_list.type_params().filter_map(build_predicate); + make::where_clause(predicates) + }; + + let to_insert = match anchor.prev_sibling_or_token() { + Some(ref elem) if elem.kind() == WHITESPACE => { + format!("{} ", where_clause.syntax()) + } + _ => format!(" {}", where_clause.syntax()), + }; + edit.insert(anchor.text_range().start(), to_insert); + }, + ) +} + +fn build_predicate(param: ast::TypeParam) -> Option { + let path = { + let name_ref = make::name_ref(¶m.name()?.syntax().to_string()); + let segment = make::path_segment(name_ref); + make::path_unqualified(segment) + }; + let predicate = make::where_pred(path, param.type_bound_list()?.bounds()); + Some(predicate) +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::tests::check_assist; + + #[test] + fn move_bounds_to_where_clause_fn() { + check_assist( + move_bounds_to_where_clause, + r#" + fn fooF: FnOnce(T) -> T>() {} + "#, + r#" + fn foo() where T: u32, F: FnOnce(T) -> T {} + "#, + ); + } + + #[test] + fn move_bounds_to_where_clause_impl() { + check_assist( + move_bounds_to_where_clause, + r#" + implT> A {} + "#, + r#" + impl A where U: u32 {} + "#, + ); + } + + #[test] + fn move_bounds_to_where_clause_struct() { + check_assist( + move_bounds_to_where_clause, + r#" + struct A<<|>T: Iterator> {} + "#, + r#" + struct A where T: Iterator {} + "#, + ); + } + + #[test] + fn move_bounds_to_where_clause_tuple_struct() { + check_assist( + move_bounds_to_where_clause, + r#" + struct Pair<<|>T: u32>(T, T); + "#, + r#" + struct Pair(T, T) where T: u32; + "#, + ); + } +} diff --git a/crates/assists/src/handlers/move_guard.rs b/crates/assists/src/handlers/move_guard.rs new file mode 100644 index 0000000000..452115fe67 --- /dev/null +++ b/crates/assists/src/handlers/move_guard.rs @@ -0,0 +1,293 @@ +use syntax::{ + ast::{edit::AstNodeEdit, make, AstNode, IfExpr, MatchArm}, + SyntaxKind::WHITESPACE, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: move_guard_to_arm_body +// +// Moves match guard into match arm body. +// +// ``` +// enum Action { Move { distance: u32 }, Stop } +// +// fn handle(action: Action) { +// match action { +// Action::Move { distance } <|>if distance > 10 => foo(), +// _ => (), +// } +// } +// ``` +// -> +// ``` +// enum Action { Move { distance: u32 }, Stop } +// +// fn handle(action: Action) { +// match action { +// Action::Move { distance } => if distance > 10 { +// foo() +// }, +// _ => (), +// } +// } +// ``` +pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let match_arm = ctx.find_node_at_offset::()?; + let guard = match_arm.guard()?; + let space_before_guard = guard.syntax().prev_sibling_or_token(); + + let guard_condition = guard.expr()?; + let arm_expr = match_arm.expr()?; + let if_expr = make::expr_if( + make::condition(guard_condition, None), + make::block_expr(None, Some(arm_expr.clone())), + ) + .indent(arm_expr.indent_level()); + + let target = guard.syntax().text_range(); + acc.add( + AssistId("move_guard_to_arm_body", AssistKind::RefactorRewrite), + "Move guard to arm body", + target, + |edit| { + match space_before_guard { + Some(element) if element.kind() == WHITESPACE => { + edit.delete(element.text_range()); + } + _ => (), + }; + + edit.delete(guard.syntax().text_range()); + edit.replace_ast(arm_expr, if_expr); + }, + ) +} + +// Assist: move_arm_cond_to_match_guard +// +// Moves if expression from match arm body into a guard. +// +// ``` +// enum Action { Move { distance: u32 }, Stop } +// +// fn handle(action: Action) { +// match action { +// Action::Move { distance } => <|>if distance > 10 { foo() }, +// _ => (), +// } +// } +// ``` +// -> +// ``` +// enum Action { Move { distance: u32 }, Stop } +// +// fn handle(action: Action) { +// match action { +// Action::Move { distance } if distance > 10 => foo(), +// _ => (), +// } +// } +// ``` +pub(crate) fn move_arm_cond_to_match_guard(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let match_arm: MatchArm = ctx.find_node_at_offset::()?; + let match_pat = match_arm.pat()?; + + let arm_body = match_arm.expr()?; + let if_expr: IfExpr = IfExpr::cast(arm_body.syntax().clone())?; + let cond = if_expr.condition()?; + let then_block = if_expr.then_branch()?; + + // Not support if with else branch + if if_expr.else_branch().is_some() { + return None; + } + // Not support moving if let to arm guard + if cond.pat().is_some() { + return None; + } + + let buf = format!(" if {}", cond.syntax().text()); + + let target = if_expr.syntax().text_range(); + acc.add( + AssistId("move_arm_cond_to_match_guard", AssistKind::RefactorRewrite), + "Move condition to match guard", + target, + |edit| { + let then_only_expr = then_block.statements().next().is_none(); + + match &then_block.expr() { + Some(then_expr) if then_only_expr => { + edit.replace(if_expr.syntax().text_range(), then_expr.syntax().text()) + } + _ => edit.replace(if_expr.syntax().text_range(), then_block.syntax().text()), + } + + edit.insert(match_pat.syntax().text_range().end(), buf); + }, + ) +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; + + #[test] + fn move_guard_to_arm_body_target() { + check_assist_target( + move_guard_to_arm_body, + r#" +fn main() { + match 92 { + x <|>if x > 10 => false, + _ => true + } +} +"#, + r#"if x > 10"#, + ); + } + + #[test] + fn move_guard_to_arm_body_works() { + check_assist( + move_guard_to_arm_body, + r#" +fn main() { + match 92 { + x <|>if x > 10 => false, + _ => true + } +} +"#, + r#" +fn main() { + match 92 { + x => if x > 10 { + false + }, + _ => true + } +} +"#, + ); + } + + #[test] + fn move_guard_to_arm_body_works_complex_match() { + check_assist( + move_guard_to_arm_body, + r#" +fn main() { + match 92 { + <|>x @ 4 | x @ 5 if x > 5 => true, + _ => false + } +} +"#, + r#" +fn main() { + match 92 { + x @ 4 | x @ 5 => if x > 5 { + true + }, + _ => false + } +} +"#, + ); + } + + #[test] + fn move_arm_cond_to_match_guard_works() { + check_assist( + move_arm_cond_to_match_guard, + r#" +fn main() { + match 92 { + x => if x > 10 { <|>false }, + _ => true + } +} +"#, + r#" +fn main() { + match 92 { + x if x > 10 => false, + _ => true + } +} +"#, + ); + } + + #[test] + fn move_arm_cond_to_match_guard_if_let_not_works() { + check_assist_not_applicable( + move_arm_cond_to_match_guard, + r#" +fn main() { + match 92 { + x => if let 62 = x { <|>false }, + _ => true + } +} +"#, + ); + } + + #[test] + fn move_arm_cond_to_match_guard_if_empty_body_works() { + check_assist( + move_arm_cond_to_match_guard, + r#" +fn main() { + match 92 { + x => if x > 10 { <|> }, + _ => true + } +} +"#, + r#" +fn main() { + match 92 { + x if x > 10 => { }, + _ => true + } +} +"#, + ); + } + + #[test] + fn move_arm_cond_to_match_guard_if_multiline_body_works() { + check_assist( + move_arm_cond_to_match_guard, + r#" +fn main() { + match 92 { + x => if x > 10 { + 92;<|> + false + }, + _ => true + } +} +"#, + r#" +fn main() { + match 92 { + x if x > 10 => { + 92; + false + }, + _ => true + } +} +"#, + ); + } +} diff --git a/crates/assists/src/handlers/raw_string.rs b/crates/assists/src/handlers/raw_string.rs new file mode 100644 index 0000000000..9ddd116e01 --- /dev/null +++ b/crates/assists/src/handlers/raw_string.rs @@ -0,0 +1,504 @@ +use std::borrow::Cow; + +use syntax::{ + ast::{self, HasQuotes, HasStringValue}, + AstToken, + SyntaxKind::{RAW_STRING, STRING}, + TextRange, TextSize, +}; +use test_utils::mark; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: make_raw_string +// +// Adds `r#` to a plain string literal. +// +// ``` +// fn main() { +// "Hello,<|> World!"; +// } +// ``` +// -> +// ``` +// fn main() { +// r#"Hello, World!"#; +// } +// ``` +pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?; + let value = token.value()?; + let target = token.syntax().text_range(); + acc.add( + AssistId("make_raw_string", AssistKind::RefactorRewrite), + "Rewrite as raw string", + target, + |edit| { + let hashes = "#".repeat(required_hashes(&value).max(1)); + if matches!(value, Cow::Borrowed(_)) { + // Avoid replacing the whole string to better position the cursor. + edit.insert(token.syntax().text_range().start(), format!("r{}", hashes)); + edit.insert(token.syntax().text_range().end(), format!("{}", hashes)); + } else { + edit.replace( + token.syntax().text_range(), + format!("r{}\"{}\"{}", hashes, value, hashes), + ); + } + }, + ) +} + +// Assist: make_usual_string +// +// Turns a raw string into a plain string. +// +// ``` +// fn main() { +// r#"Hello,<|> "World!""#; +// } +// ``` +// -> +// ``` +// fn main() { +// "Hello, \"World!\""; +// } +// ``` +pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?; + let value = token.value()?; + let target = token.syntax().text_range(); + acc.add( + AssistId("make_usual_string", AssistKind::RefactorRewrite), + "Rewrite as regular string", + target, + |edit| { + // parse inside string to escape `"` + let escaped = value.escape_default().to_string(); + if let Some(offsets) = token.quote_offsets() { + if token.text()[offsets.contents - token.syntax().text_range().start()] == escaped { + edit.replace(offsets.quotes.0, "\""); + edit.replace(offsets.quotes.1, "\""); + return; + } + } + + edit.replace(token.syntax().text_range(), format!("\"{}\"", escaped)); + }, + ) +} + +// Assist: add_hash +// +// Adds a hash to a raw string literal. +// +// ``` +// fn main() { +// r#"Hello,<|> World!"#; +// } +// ``` +// -> +// ``` +// fn main() { +// r##"Hello, World!"##; +// } +// ``` +pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let token = ctx.find_token_at_offset(RAW_STRING)?; + let target = token.text_range(); + acc.add(AssistId("add_hash", AssistKind::Refactor), "Add #", target, |edit| { + edit.insert(token.text_range().start() + TextSize::of('r'), "#"); + edit.insert(token.text_range().end(), "#"); + }) +} + +// Assist: remove_hash +// +// Removes a hash from a raw string literal. +// +// ``` +// fn main() { +// r#"Hello,<|> World!"#; +// } +// ``` +// -> +// ``` +// fn main() { +// r"Hello, World!"; +// } +// ``` +pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?; + + let text = token.text().as_str(); + if !text.starts_with("r#") && text.ends_with('#') { + return None; + } + + let existing_hashes = text.chars().skip(1).take_while(|&it| it == '#').count(); + + let text_range = token.syntax().text_range(); + let internal_text = &text[token.text_range_between_quotes()? - text_range.start()]; + + if existing_hashes == required_hashes(internal_text) { + mark::hit!(cant_remove_required_hash); + return None; + } + + acc.add(AssistId("remove_hash", AssistKind::RefactorRewrite), "Remove #", text_range, |edit| { + edit.delete(TextRange::at(text_range.start() + TextSize::of('r'), TextSize::of('#'))); + edit.delete(TextRange::new(text_range.end() - TextSize::of('#'), text_range.end())); + }) +} + +fn required_hashes(s: &str) -> usize { + let mut res = 0usize; + for idx in s.match_indices('"').map(|(i, _)| i) { + let (_, sub) = s.split_at(idx + 1); + let n_hashes = sub.chars().take_while(|c| *c == '#').count(); + res = res.max(n_hashes + 1) + } + res +} + +#[test] +fn test_required_hashes() { + assert_eq!(0, required_hashes("abc")); + assert_eq!(0, required_hashes("###")); + assert_eq!(1, required_hashes("\"")); + assert_eq!(2, required_hashes("\"#abc")); + assert_eq!(0, required_hashes("#abc")); + assert_eq!(3, required_hashes("#ab\"##c")); + assert_eq!(5, required_hashes("#ab\"##\"####c")); +} + +#[cfg(test)] +mod tests { + use test_utils::mark; + + use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; + + use super::*; + + #[test] + fn make_raw_string_target() { + check_assist_target( + make_raw_string, + r#" + fn f() { + let s = <|>"random\nstring"; + } + "#, + r#""random\nstring""#, + ); + } + + #[test] + fn make_raw_string_works() { + check_assist( + make_raw_string, + r#" +fn f() { + let s = <|>"random\nstring"; +} +"#, + r##" +fn f() { + let s = r#"random +string"#; +} +"##, + ) + } + + #[test] + fn make_raw_string_works_inside_macros() { + check_assist( + make_raw_string, + r#" + fn f() { + format!(<|>"x = {}", 92) + } + "#, + r##" + fn f() { + format!(r#"x = {}"#, 92) + } + "##, + ) + } + + #[test] + fn make_raw_string_hashes_inside_works() { + check_assist( + make_raw_string, + r###" +fn f() { + let s = <|>"#random##\nstring"; +} +"###, + r####" +fn f() { + let s = r#"#random## +string"#; +} +"####, + ) + } + + #[test] + fn make_raw_string_closing_hashes_inside_works() { + check_assist( + make_raw_string, + r###" +fn f() { + let s = <|>"#random\"##\nstring"; +} +"###, + r####" +fn f() { + let s = r###"#random"## +string"###; +} +"####, + ) + } + + #[test] + fn make_raw_string_nothing_to_unescape_works() { + check_assist( + make_raw_string, + r#" + fn f() { + let s = <|>"random string"; + } + "#, + r##" + fn f() { + let s = r#"random string"#; + } + "##, + ) + } + + #[test] + fn make_raw_string_not_works_on_partial_string() { + check_assist_not_applicable( + make_raw_string, + r#" + fn f() { + let s = "foo<|> + } + "#, + ) + } + + #[test] + fn make_usual_string_not_works_on_partial_string() { + check_assist_not_applicable( + make_usual_string, + r#" + fn main() { + let s = r#"bar<|> + } + "#, + ) + } + + #[test] + fn add_hash_target() { + check_assist_target( + add_hash, + r#" + fn f() { + let s = <|>r"random string"; + } + "#, + r#"r"random string""#, + ); + } + + #[test] + fn add_hash_works() { + check_assist( + add_hash, + r#" + fn f() { + let s = <|>r"random string"; + } + "#, + r##" + fn f() { + let s = r#"random string"#; + } + "##, + ) + } + + #[test] + fn add_more_hash_works() { + check_assist( + add_hash, + r##" + fn f() { + let s = <|>r#"random"string"#; + } + "##, + r###" + fn f() { + let s = r##"random"string"##; + } + "###, + ) + } + + #[test] + fn add_hash_not_works() { + check_assist_not_applicable( + add_hash, + r#" + fn f() { + let s = <|>"random string"; + } + "#, + ); + } + + #[test] + fn remove_hash_target() { + check_assist_target( + remove_hash, + r##" + fn f() { + let s = <|>r#"random string"#; + } + "##, + r##"r#"random string"#"##, + ); + } + + #[test] + fn remove_hash_works() { + check_assist( + remove_hash, + r##"fn f() { let s = <|>r#"random string"#; }"##, + r#"fn f() { let s = r"random string"; }"#, + ) + } + + #[test] + fn cant_remove_required_hash() { + mark::check!(cant_remove_required_hash); + check_assist_not_applicable( + remove_hash, + r##" + fn f() { + let s = <|>r#"random"str"ing"#; + } + "##, + ) + } + + #[test] + fn remove_more_hash_works() { + check_assist( + remove_hash, + r###" + fn f() { + let s = <|>r##"random string"##; + } + "###, + r##" + fn f() { + let s = r#"random string"#; + } + "##, + ) + } + + #[test] + fn remove_hash_doesnt_work() { + check_assist_not_applicable(remove_hash, r#"fn f() { let s = <|>"random string"; }"#); + } + + #[test] + fn remove_hash_no_hash_doesnt_work() { + check_assist_not_applicable(remove_hash, r#"fn f() { let s = <|>r"random string"; }"#); + } + + #[test] + fn make_usual_string_target() { + check_assist_target( + make_usual_string, + r##" + fn f() { + let s = <|>r#"random string"#; + } + "##, + r##"r#"random string"#"##, + ); + } + + #[test] + fn make_usual_string_works() { + check_assist( + make_usual_string, + r##" + fn f() { + let s = <|>r#"random string"#; + } + "##, + r#" + fn f() { + let s = "random string"; + } + "#, + ) + } + + #[test] + fn make_usual_string_with_quote_works() { + check_assist( + make_usual_string, + r##" + fn f() { + let s = <|>r#"random"str"ing"#; + } + "##, + r#" + fn f() { + let s = "random\"str\"ing"; + } + "#, + ) + } + + #[test] + fn make_usual_string_more_hash_works() { + check_assist( + make_usual_string, + r###" + fn f() { + let s = <|>r##"random string"##; + } + "###, + r##" + fn f() { + let s = "random string"; + } + "##, + ) + } + + #[test] + fn make_usual_string_not_works() { + check_assist_not_applicable( + make_usual_string, + r#" + fn f() { + let s = <|>"random string"; + } + "#, + ); + } +} diff --git a/crates/assists/src/handlers/remove_dbg.rs b/crates/assists/src/handlers/remove_dbg.rs new file mode 100644 index 0000000000..f3dcca5348 --- /dev/null +++ b/crates/assists/src/handlers/remove_dbg.rs @@ -0,0 +1,205 @@ +use syntax::{ + ast::{self, AstNode}, + TextRange, TextSize, T, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: remove_dbg +// +// Removes `dbg!()` macro call. +// +// ``` +// fn main() { +// <|>dbg!(92); +// } +// ``` +// -> +// ``` +// fn main() { +// 92; +// } +// ``` +pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let macro_call = ctx.find_node_at_offset::()?; + + if !is_valid_macrocall(¯o_call, "dbg")? { + return None; + } + + let is_leaf = macro_call.syntax().next_sibling().is_none(); + + let macro_end = if macro_call.semicolon_token().is_some() { + macro_call.syntax().text_range().end() - TextSize::of(';') + } else { + macro_call.syntax().text_range().end() + }; + + // macro_range determines what will be deleted and replaced with macro_content + let macro_range = TextRange::new(macro_call.syntax().text_range().start(), macro_end); + let paste_instead_of_dbg = { + let text = macro_call.token_tree()?.syntax().text(); + + // leafiness determines if we should include the parenthesis or not + let slice_index: TextRange = if is_leaf { + // leaf means - we can extract the contents of the dbg! in text + TextRange::new(TextSize::of('('), text.len() - TextSize::of(')')) + } else { + // not leaf - means we should keep the parens + TextRange::up_to(text.len()) + }; + text.slice(slice_index).to_string() + }; + + let target = macro_call.syntax().text_range(); + acc.add(AssistId("remove_dbg", AssistKind::Refactor), "Remove dbg!()", target, |builder| { + builder.replace(macro_range, paste_instead_of_dbg); + }) +} + +/// Verifies that the given macro_call actually matches the given name +/// and contains proper ending tokens +fn is_valid_macrocall(macro_call: &ast::MacroCall, macro_name: &str) -> Option { + let path = macro_call.path()?; + let name_ref = path.segment()?.name_ref()?; + + // Make sure it is actually a dbg-macro call, dbg followed by ! + let excl = path.syntax().next_sibling_or_token()?; + + if name_ref.text() != macro_name || excl.kind() != T![!] { + return None; + } + + let node = macro_call.token_tree()?.syntax().clone(); + let first_child = node.first_child_or_token()?; + let last_child = node.last_child_or_token()?; + + match (first_child.kind(), last_child.kind()) { + (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']) => Some(true), + _ => Some(false), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; + + #[test] + fn test_remove_dbg() { + check_assist(remove_dbg, "<|>dbg!(1 + 1)", "1 + 1"); + + check_assist(remove_dbg, "dbg!<|>((1 + 1))", "(1 + 1)"); + + check_assist(remove_dbg, "dbg!(1 <|>+ 1)", "1 + 1"); + + check_assist(remove_dbg, "let _ = <|>dbg!(1 + 1)", "let _ = 1 + 1"); + + check_assist( + remove_dbg, + " +fn foo(n: usize) { + if let Some(_) = dbg!(n.<|>checked_sub(4)) { + // ... + } +} +", + " +fn foo(n: usize) { + if let Some(_) = n.checked_sub(4) { + // ... + } +} +", + ); + } + + #[test] + fn test_remove_dbg_with_brackets_and_braces() { + check_assist(remove_dbg, "dbg![<|>1 + 1]", "1 + 1"); + check_assist(remove_dbg, "dbg!{<|>1 + 1}", "1 + 1"); + } + + #[test] + fn test_remove_dbg_not_applicable() { + check_assist_not_applicable(remove_dbg, "<|>vec![1, 2, 3]"); + check_assist_not_applicable(remove_dbg, "<|>dbg(5, 6, 7)"); + check_assist_not_applicable(remove_dbg, "<|>dbg!(5, 6, 7"); + } + + #[test] + fn test_remove_dbg_target() { + check_assist_target( + remove_dbg, + " +fn foo(n: usize) { + if let Some(_) = dbg!(n.<|>checked_sub(4)) { + // ... + } +} +", + "dbg!(n.checked_sub(4))", + ); + } + + #[test] + fn test_remove_dbg_keep_semicolon() { + // https://github.com/rust-analyzer/rust-analyzer/issues/5129#issuecomment-651399779 + // not quite though + // adding a comment at the end of the line makes + // the ast::MacroCall to include the semicolon at the end + check_assist( + remove_dbg, + r#"let res = <|>dbg!(1 * 20); // needless comment"#, + r#"let res = 1 * 20; // needless comment"#, + ); + } + + #[test] + fn test_remove_dbg_keep_expression() { + check_assist( + remove_dbg, + r#"let res = <|>dbg!(a + b).foo();"#, + r#"let res = (a + b).foo();"#, + ); + } + + #[test] + fn test_remove_dbg_from_inside_fn() { + check_assist_target( + remove_dbg, + r#" +fn square(x: u32) -> u32 { + x * x +} + +fn main() { + let x = square(dbg<|>!(5 + 10)); + println!("{}", x); +}"#, + "dbg!(5 + 10)", + ); + + check_assist( + remove_dbg, + r#" +fn square(x: u32) -> u32 { + x * x +} + +fn main() { + let x = square(dbg<|>!(5 + 10)); + println!("{}", x); +}"#, + r#" +fn square(x: u32) -> u32 { + x * x +} + +fn main() { + let x = square(5 + 10); + println!("{}", x); +}"#, + ); + } +} diff --git a/crates/assists/src/handlers/remove_mut.rs b/crates/assists/src/handlers/remove_mut.rs new file mode 100644 index 0000000000..44f41daa92 --- /dev/null +++ b/crates/assists/src/handlers/remove_mut.rs @@ -0,0 +1,37 @@ +use syntax::{SyntaxKind, TextRange, T}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: remove_mut +// +// Removes the `mut` keyword. +// +// ``` +// impl Walrus { +// fn feed(&mut<|> self, amount: u32) {} +// } +// ``` +// -> +// ``` +// impl Walrus { +// fn feed(&self, amount: u32) {} +// } +// ``` +pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let mut_token = ctx.find_token_at_offset(T![mut])?; + let delete_from = mut_token.text_range().start(); + let delete_to = match mut_token.next_token() { + Some(it) if it.kind() == SyntaxKind::WHITESPACE => it.text_range().end(), + _ => mut_token.text_range().end(), + }; + + let target = mut_token.text_range(); + acc.add( + AssistId("remove_mut", AssistKind::Refactor), + "Remove `mut` keyword", + target, + |builder| { + builder.delete(TextRange::new(delete_from, delete_to)); + }, + ) +} diff --git a/crates/assists/src/handlers/reorder_fields.rs b/crates/assists/src/handlers/reorder_fields.rs new file mode 100644 index 0000000000..527f457a79 --- /dev/null +++ b/crates/assists/src/handlers/reorder_fields.rs @@ -0,0 +1,220 @@ +use itertools::Itertools; +use rustc_hash::FxHashMap; + +use hir::{Adt, ModuleDef, PathResolution, Semantics, Struct}; +use ide_db::RootDatabase; +use syntax::{algo, ast, match_ast, AstNode, SyntaxKind, SyntaxKind::*, SyntaxNode}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: reorder_fields +// +// Reorder the fields of record literals and record patterns in the same order as in +// the definition. +// +// ``` +// struct Foo {foo: i32, bar: i32}; +// const test: Foo = <|>Foo {bar: 0, foo: 1} +// ``` +// -> +// ``` +// struct Foo {foo: i32, bar: i32}; +// const test: Foo = Foo {foo: 1, bar: 0} +// ``` +// +pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + reorder::(acc, ctx).or_else(|| reorder::(acc, ctx)) +} + +fn reorder(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let record = ctx.find_node_at_offset::()?; + let path = record.syntax().children().find_map(ast::Path::cast)?; + + let ranks = compute_fields_ranks(&path, &ctx)?; + + let fields = get_fields(&record.syntax()); + let sorted_fields = sorted_by_rank(&fields, |node| { + *ranks.get(&get_field_name(node)).unwrap_or(&usize::max_value()) + }); + + if sorted_fields == fields { + return None; + } + + let target = record.syntax().text_range(); + acc.add( + AssistId("reorder_fields", AssistKind::RefactorRewrite), + "Reorder record fields", + target, + |edit| { + for (old, new) in fields.iter().zip(&sorted_fields) { + algo::diff(old, new).into_text_edit(edit.text_edit_builder()); + } + }, + ) +} + +fn get_fields_kind(node: &SyntaxNode) -> Vec { + match node.kind() { + RECORD_EXPR => vec![RECORD_EXPR_FIELD], + RECORD_PAT => vec![RECORD_PAT_FIELD, IDENT_PAT], + _ => vec![], + } +} + +fn get_field_name(node: &SyntaxNode) -> String { + let res = match_ast! { + match node { + ast::RecordExprField(field) => field.field_name().map(|it| it.to_string()), + ast::RecordPatField(field) => field.field_name().map(|it| it.to_string()), + _ => None, + } + }; + res.unwrap_or_default() +} + +fn get_fields(record: &SyntaxNode) -> Vec { + let kinds = get_fields_kind(record); + record.children().flat_map(|n| n.children()).filter(|n| kinds.contains(&n.kind())).collect() +} + +fn sorted_by_rank( + fields: &[SyntaxNode], + get_rank: impl Fn(&SyntaxNode) -> usize, +) -> Vec { + fields.iter().cloned().sorted_by_key(get_rank).collect() +} + +fn struct_definition(path: &ast::Path, sema: &Semantics) -> Option { + match sema.resolve_path(path) { + Some(PathResolution::Def(ModuleDef::Adt(Adt::Struct(s)))) => Some(s), + _ => None, + } +} + +fn compute_fields_ranks(path: &ast::Path, ctx: &AssistContext) -> Option> { + Some( + struct_definition(path, &ctx.sema)? + .fields(ctx.db()) + .iter() + .enumerate() + .map(|(idx, field)| (field.name(ctx.db()).to_string(), idx)) + .collect(), + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn not_applicable_if_sorted() { + check_assist_not_applicable( + reorder_fields, + r#" + struct Foo { + foo: i32, + bar: i32, + } + + const test: Foo = <|>Foo { foo: 0, bar: 0 }; + "#, + ) + } + + #[test] + fn trivial_empty_fields() { + check_assist_not_applicable( + reorder_fields, + r#" + struct Foo {}; + const test: Foo = <|>Foo {} + "#, + ) + } + + #[test] + fn reorder_struct_fields() { + check_assist( + reorder_fields, + r#" + struct Foo {foo: i32, bar: i32}; + const test: Foo = <|>Foo {bar: 0, foo: 1} + "#, + r#" + struct Foo {foo: i32, bar: i32}; + const test: Foo = Foo {foo: 1, bar: 0} + "#, + ) + } + + #[test] + fn reorder_struct_pattern() { + check_assist( + reorder_fields, + r#" + struct Foo { foo: i64, bar: i64, baz: i64 } + + fn f(f: Foo) -> { + match f { + <|>Foo { baz: 0, ref mut bar, .. } => (), + _ => () + } + } + "#, + r#" + struct Foo { foo: i64, bar: i64, baz: i64 } + + fn f(f: Foo) -> { + match f { + Foo { ref mut bar, baz: 0, .. } => (), + _ => () + } + } + "#, + ) + } + + #[test] + fn reorder_with_extra_field() { + check_assist( + reorder_fields, + r#" + struct Foo { + foo: String, + bar: String, + } + + impl Foo { + fn new() -> Foo { + let foo = String::new(); + <|>Foo { + bar: foo.clone(), + extra: "Extra field", + foo, + } + } + } + "#, + r#" + struct Foo { + foo: String, + bar: String, + } + + impl Foo { + fn new() -> Foo { + let foo = String::new(); + Foo { + foo, + bar: foo.clone(), + extra: "Extra field", + } + } + } + "#, + ) + } +} diff --git a/crates/assists/src/handlers/replace_if_let_with_match.rs b/crates/assists/src/handlers/replace_if_let_with_match.rs new file mode 100644 index 0000000000..79097621e6 --- /dev/null +++ b/crates/assists/src/handlers/replace_if_let_with_match.rs @@ -0,0 +1,257 @@ +use syntax::{ + ast::{ + self, + edit::{AstNodeEdit, IndentLevel}, + make, + }, + AstNode, +}; + +use crate::{ + utils::{unwrap_trivial_block, TryEnum}, + AssistContext, AssistId, AssistKind, Assists, +}; + +// Assist: replace_if_let_with_match +// +// Replaces `if let` with an else branch with a `match` expression. +// +// ``` +// enum Action { Move { distance: u32 }, Stop } +// +// fn handle(action: Action) { +// <|>if let Action::Move { distance } = action { +// foo(distance) +// } else { +// bar() +// } +// } +// ``` +// -> +// ``` +// enum Action { Move { distance: u32 }, Stop } +// +// fn handle(action: Action) { +// match action { +// Action::Move { distance } => foo(distance), +// _ => bar(), +// } +// } +// ``` +pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let if_expr: ast::IfExpr = ctx.find_node_at_offset()?; + let cond = if_expr.condition()?; + let pat = cond.pat()?; + let expr = cond.expr()?; + let then_block = if_expr.then_branch()?; + let else_block = match if_expr.else_branch()? { + ast::ElseBranch::Block(it) => it, + ast::ElseBranch::IfExpr(_) => return None, + }; + + let target = if_expr.syntax().text_range(); + acc.add( + AssistId("replace_if_let_with_match", AssistKind::RefactorRewrite), + "Replace with match", + target, + move |edit| { + let match_expr = { + let then_arm = { + let then_block = then_block.reset_indent().indent(IndentLevel(1)); + let then_expr = unwrap_trivial_block(then_block); + make::match_arm(vec![pat.clone()], then_expr) + }; + let else_arm = { + let pattern = ctx + .sema + .type_of_pat(&pat) + .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty)) + .map(|it| it.sad_pattern()) + .unwrap_or_else(|| make::wildcard_pat().into()); + let else_expr = unwrap_trivial_block(else_block); + make::match_arm(vec![pattern], else_expr) + }; + let match_expr = + make::expr_match(expr, make::match_arm_list(vec![then_arm, else_arm])); + match_expr.indent(IndentLevel::from_node(if_expr.syntax())) + }; + + edit.replace_ast::(if_expr.into(), match_expr); + }, + ) +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::tests::{check_assist, check_assist_target}; + + #[test] + fn test_replace_if_let_with_match_unwraps_simple_expressions() { + check_assist( + replace_if_let_with_match, + r#" +impl VariantData { + pub fn is_struct(&self) -> bool { + if <|>let VariantData::Struct(..) = *self { + true + } else { + false + } + } +} "#, + r#" +impl VariantData { + pub fn is_struct(&self) -> bool { + match *self { + VariantData::Struct(..) => true, + _ => false, + } + } +} "#, + ) + } + + #[test] + fn test_replace_if_let_with_match_doesnt_unwrap_multiline_expressions() { + check_assist( + replace_if_let_with_match, + r#" +fn foo() { + if <|>let VariantData::Struct(..) = a { + bar( + 123 + ) + } else { + false + } +} "#, + r#" +fn foo() { + match a { + VariantData::Struct(..) => { + bar( + 123 + ) + } + _ => false, + } +} "#, + ) + } + + #[test] + fn replace_if_let_with_match_target() { + check_assist_target( + replace_if_let_with_match, + r#" +impl VariantData { + pub fn is_struct(&self) -> bool { + if <|>let VariantData::Struct(..) = *self { + true + } else { + false + } + } +} "#, + "if let VariantData::Struct(..) = *self { + true + } else { + false + }", + ); + } + + #[test] + fn special_case_option() { + check_assist( + replace_if_let_with_match, + r#" +enum Option { Some(T), None } +use Option::*; + +fn foo(x: Option) { + <|>if let Some(x) = x { + println!("{}", x) + } else { + println!("none") + } +} + "#, + r#" +enum Option { Some(T), None } +use Option::*; + +fn foo(x: Option) { + match x { + Some(x) => println!("{}", x), + None => println!("none"), + } +} + "#, + ); + } + + #[test] + fn special_case_result() { + check_assist( + replace_if_let_with_match, + r#" +enum Result { Ok(T), Err(E) } +use Result::*; + +fn foo(x: Result) { + <|>if let Ok(x) = x { + println!("{}", x) + } else { + println!("none") + } +} + "#, + r#" +enum Result { Ok(T), Err(E) } +use Result::*; + +fn foo(x: Result) { + match x { + Ok(x) => println!("{}", x), + Err(_) => println!("none"), + } +} + "#, + ); + } + + #[test] + fn nested_indent() { + check_assist( + replace_if_let_with_match, + r#" +fn main() { + if true { + <|>if let Ok(rel_path) = path.strip_prefix(root_path) { + let rel_path = RelativePathBuf::from_path(rel_path).ok()?; + Some((*id, rel_path)) + } else { + None + } + } +} +"#, + r#" +fn main() { + if true { + match path.strip_prefix(root_path) { + Ok(rel_path) => { + let rel_path = RelativePathBuf::from_path(rel_path).ok()?; + Some((*id, rel_path)) + } + _ => None, + } + } +} +"#, + ) + } +} diff --git a/crates/assists/src/handlers/replace_let_with_if_let.rs b/crates/assists/src/handlers/replace_let_with_if_let.rs new file mode 100644 index 0000000000..ed6d0c29be --- /dev/null +++ b/crates/assists/src/handlers/replace_let_with_if_let.rs @@ -0,0 +1,100 @@ +use std::iter::once; + +use syntax::{ + ast::{ + self, + edit::{AstNodeEdit, IndentLevel}, + make, + }, + AstNode, T, +}; + +use crate::{utils::TryEnum, AssistContext, AssistId, AssistKind, Assists}; + +// Assist: replace_let_with_if_let +// +// Replaces `let` with an `if-let`. +// +// ``` +// # enum Option { Some(T), None } +// +// fn main(action: Action) { +// <|>let x = compute(); +// } +// +// fn compute() -> Option { None } +// ``` +// -> +// ``` +// # enum Option { Some(T), None } +// +// fn main(action: Action) { +// if let Some(x) = compute() { +// } +// } +// +// fn compute() -> Option { None } +// ``` +pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let let_kw = ctx.find_token_at_offset(T![let])?; + let let_stmt = let_kw.ancestors().find_map(ast::LetStmt::cast)?; + let init = let_stmt.initializer()?; + let original_pat = let_stmt.pat()?; + let ty = ctx.sema.type_of_expr(&init)?; + let happy_variant = TryEnum::from_ty(&ctx.sema, &ty).map(|it| it.happy_case()); + + let target = let_kw.text_range(); + acc.add( + AssistId("replace_let_with_if_let", AssistKind::RefactorRewrite), + "Replace with if-let", + target, + |edit| { + let with_placeholder: ast::Pat = match happy_variant { + None => make::wildcard_pat().into(), + Some(var_name) => make::tuple_struct_pat( + make::path_unqualified(make::path_segment(make::name_ref(var_name))), + once(make::wildcard_pat().into()), + ) + .into(), + }; + let block = + make::block_expr(None, None).indent(IndentLevel::from_node(let_stmt.syntax())); + let if_ = make::expr_if(make::condition(init, Some(with_placeholder)), block); + let stmt = make::expr_stmt(if_); + + let placeholder = stmt.syntax().descendants().find_map(ast::WildcardPat::cast).unwrap(); + let stmt = stmt.replace_descendant(placeholder.into(), original_pat); + + edit.replace_ast(ast::Stmt::from(let_stmt), ast::Stmt::from(stmt)); + }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_assist; + + use super::*; + + #[test] + fn replace_let_unknown_enum() { + check_assist( + replace_let_with_if_let, + r" +enum E { X(T), Y(T) } + +fn main() { + <|>let x = E::X(92); +} + ", + r" +enum E { X(T), Y(T) } + +fn main() { + if let x = E::X(92) { + } +} + ", + ) + } +} diff --git a/crates/assists/src/handlers/replace_qualified_name_with_use.rs b/crates/assists/src/handlers/replace_qualified_name_with_use.rs new file mode 100644 index 0000000000..011bf1106d --- /dev/null +++ b/crates/assists/src/handlers/replace_qualified_name_with_use.rs @@ -0,0 +1,688 @@ +use hir; +use syntax::{algo::SyntaxRewriter, ast, match_ast, AstNode, SmolStr, SyntaxNode}; + +use crate::{ + utils::{find_insert_use_container, insert_use_statement}, + AssistContext, AssistId, AssistKind, Assists, +}; + +// Assist: replace_qualified_name_with_use +// +// Adds a use statement for a given fully-qualified name. +// +// ``` +// fn process(map: std::collections::<|>HashMap) {} +// ``` +// -> +// ``` +// use std::collections::HashMap; +// +// fn process(map: HashMap) {} +// ``` +pub(crate) fn replace_qualified_name_with_use( + acc: &mut Assists, + ctx: &AssistContext, +) -> Option<()> { + let path: ast::Path = ctx.find_node_at_offset()?; + // We don't want to mess with use statements + if path.syntax().ancestors().find_map(ast::Use::cast).is_some() { + return None; + } + + let hir_path = ctx.sema.lower_path(&path)?; + let segments = collect_hir_path_segments(&hir_path)?; + if segments.len() < 2 { + return None; + } + + let target = path.syntax().text_range(); + acc.add( + AssistId("replace_qualified_name_with_use", AssistKind::RefactorRewrite), + "Replace qualified path with use", + target, + |builder| { + let path_to_import = hir_path.mod_path().clone(); + let container = match find_insert_use_container(path.syntax(), ctx) { + Some(c) => c, + None => return, + }; + insert_use_statement(path.syntax(), &path_to_import, ctx, builder.text_edit_builder()); + + // Now that we've brought the name into scope, re-qualify all paths that could be + // affected (that is, all paths inside the node we added the `use` to). + let mut rewriter = SyntaxRewriter::default(); + let syntax = container.either(|l| l.syntax().clone(), |r| r.syntax().clone()); + shorten_paths(&mut rewriter, syntax, path); + builder.rewrite(rewriter); + }, + ) +} + +fn collect_hir_path_segments(path: &hir::Path) -> Option> { + let mut ps = Vec::::with_capacity(10); + match path.kind() { + hir::PathKind::Abs => ps.push("".into()), + hir::PathKind::Crate => ps.push("crate".into()), + hir::PathKind::Plain => {} + hir::PathKind::Super(0) => ps.push("self".into()), + hir::PathKind::Super(lvl) => { + let mut chain = "super".to_string(); + for _ in 0..*lvl { + chain += "::super"; + } + ps.push(chain.into()); + } + hir::PathKind::DollarCrate(_) => return None, + } + ps.extend(path.segments().iter().map(|it| it.name.to_string().into())); + Some(ps) +} + +/// Adds replacements to `re` that shorten `path` in all descendants of `node`. +fn shorten_paths(rewriter: &mut SyntaxRewriter<'static>, node: SyntaxNode, path: ast::Path) { + for child in node.children() { + match_ast! { + match child { + // Don't modify `use` items, as this can break the `use` item when injecting a new + // import into the use tree. + ast::Use(_it) => continue, + // Don't descend into submodules, they don't have the same `use` items in scope. + ast::Module(_it) => continue, + + ast::Path(p) => { + match maybe_replace_path(rewriter, p.clone(), path.clone()) { + Some(()) => {}, + None => shorten_paths(rewriter, p.syntax().clone(), path.clone()), + } + }, + _ => shorten_paths(rewriter, child, path.clone()), + } + } + } +} + +fn maybe_replace_path( + rewriter: &mut SyntaxRewriter<'static>, + path: ast::Path, + target: ast::Path, +) -> Option<()> { + if !path_eq(path.clone(), target) { + return None; + } + + // Shorten `path`, leaving only its last segment. + if let Some(parent) = path.qualifier() { + rewriter.delete(parent.syntax()); + } + if let Some(double_colon) = path.coloncolon_token() { + rewriter.delete(&double_colon); + } + + Some(()) +} + +fn path_eq(lhs: ast::Path, rhs: ast::Path) -> bool { + let mut lhs_curr = lhs; + let mut rhs_curr = rhs; + loop { + match (lhs_curr.segment(), rhs_curr.segment()) { + (Some(lhs), Some(rhs)) if lhs.syntax().text() == rhs.syntax().text() => (), + _ => return false, + } + + match (lhs_curr.qualifier(), rhs_curr.qualifier()) { + (Some(lhs), Some(rhs)) => { + lhs_curr = lhs; + rhs_curr = rhs; + } + (None, None) => return true, + _ => return false, + } + } +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn test_replace_add_use_no_anchor() { + check_assist( + replace_qualified_name_with_use, + r" +std::fmt::Debug<|> + ", + r" +use std::fmt::Debug; + +Debug + ", + ); + } + #[test] + fn test_replace_add_use_no_anchor_with_item_below() { + check_assist( + replace_qualified_name_with_use, + r" +std::fmt::Debug<|> + +fn main() { +} + ", + r" +use std::fmt::Debug; + +Debug + +fn main() { +} + ", + ); + } + + #[test] + fn test_replace_add_use_no_anchor_with_item_above() { + check_assist( + replace_qualified_name_with_use, + r" +fn main() { +} + +std::fmt::Debug<|> + ", + r" +use std::fmt::Debug; + +fn main() { +} + +Debug + ", + ); + } + + #[test] + fn test_replace_add_use_no_anchor_2seg() { + check_assist( + replace_qualified_name_with_use, + r" +std::fmt<|>::Debug + ", + r" +use std::fmt; + +fmt::Debug + ", + ); + } + + #[test] + fn test_replace_add_use() { + check_assist( + replace_qualified_name_with_use, + r" +use stdx; + +impl std::fmt::Debug<|> for Foo { +} + ", + r" +use stdx; +use std::fmt::Debug; + +impl Debug for Foo { +} + ", + ); + } + + #[test] + fn test_replace_file_use_other_anchor() { + check_assist( + replace_qualified_name_with_use, + r" +impl std::fmt::Debug<|> for Foo { +} + ", + r" +use std::fmt::Debug; + +impl Debug for Foo { +} + ", + ); + } + + #[test] + fn test_replace_add_use_other_anchor_indent() { + check_assist( + replace_qualified_name_with_use, + r" + impl std::fmt::Debug<|> for Foo { + } + ", + r" + use std::fmt::Debug; + + impl Debug for Foo { + } + ", + ); + } + + #[test] + fn test_replace_split_different() { + check_assist( + replace_qualified_name_with_use, + r" +use std::fmt; + +impl std::io<|> for Foo { +} + ", + r" +use std::{io, fmt}; + +impl io for Foo { +} + ", + ); + } + + #[test] + fn test_replace_split_self_for_use() { + check_assist( + replace_qualified_name_with_use, + r" +use std::fmt; + +impl std::fmt::Debug<|> for Foo { +} + ", + r" +use std::fmt::{self, Debug, }; + +impl Debug for Foo { +} + ", + ); + } + + #[test] + fn test_replace_split_self_for_target() { + check_assist( + replace_qualified_name_with_use, + r" +use std::fmt::Debug; + +impl std::fmt<|> for Foo { +} + ", + r" +use std::fmt::{self, Debug}; + +impl fmt for Foo { +} + ", + ); + } + + #[test] + fn test_replace_add_to_nested_self_nested() { + check_assist( + replace_qualified_name_with_use, + r" +use std::fmt::{Debug, nested::{Display}}; + +impl std::fmt::nested<|> for Foo { +} +", + r" +use std::fmt::{Debug, nested::{Display, self}}; + +impl nested for Foo { +} +", + ); + } + + #[test] + fn test_replace_add_to_nested_self_already_included() { + check_assist( + replace_qualified_name_with_use, + r" +use std::fmt::{Debug, nested::{self, Display}}; + +impl std::fmt::nested<|> for Foo { +} +", + r" +use std::fmt::{Debug, nested::{self, Display}}; + +impl nested for Foo { +} +", + ); + } + + #[test] + fn test_replace_add_to_nested_nested() { + check_assist( + replace_qualified_name_with_use, + r" +use std::fmt::{Debug, nested::{Display}}; + +impl std::fmt::nested::Debug<|> for Foo { +} +", + r" +use std::fmt::{Debug, nested::{Display, Debug}}; + +impl Debug for Foo { +} +", + ); + } + + #[test] + fn test_replace_split_common_target_longer() { + check_assist( + replace_qualified_name_with_use, + r" +use std::fmt::Debug; + +impl std::fmt::nested::Display<|> for Foo { +} +", + r" +use std::fmt::{nested::Display, Debug}; + +impl Display for Foo { +} +", + ); + } + + #[test] + fn test_replace_split_common_use_longer() { + check_assist( + replace_qualified_name_with_use, + r" +use std::fmt::nested::Debug; + +impl std::fmt::Display<|> for Foo { +} +", + r" +use std::fmt::{Display, nested::Debug}; + +impl Display for Foo { +} +", + ); + } + + #[test] + fn test_replace_use_nested_import() { + check_assist( + replace_qualified_name_with_use, + r" +use crate::{ + ty::{Substs, Ty}, + AssocItem, +}; + +fn foo() { crate::ty::lower<|>::trait_env() } +", + r" +use crate::{ + ty::{Substs, Ty, lower}, + AssocItem, +}; + +fn foo() { lower::trait_env() } +", + ); + } + + #[test] + fn test_replace_alias() { + check_assist( + replace_qualified_name_with_use, + r" +use std::fmt as foo; + +impl foo::Debug<|> for Foo { +} +", + r" +use std::fmt as foo; + +impl Debug for Foo { +} +", + ); + } + + #[test] + fn test_replace_not_applicable_one_segment() { + check_assist_not_applicable( + replace_qualified_name_with_use, + r" +impl foo<|> for Foo { +} +", + ); + } + + #[test] + fn test_replace_not_applicable_in_use() { + check_assist_not_applicable( + replace_qualified_name_with_use, + r" +use std::fmt<|>; +", + ); + } + + #[test] + fn test_replace_add_use_no_anchor_in_mod_mod() { + check_assist( + replace_qualified_name_with_use, + r" +mod foo { + mod bar { + std::fmt::Debug<|> + } +} + ", + r" +mod foo { + mod bar { + use std::fmt::Debug; + + Debug + } +} + ", + ); + } + + #[test] + fn inserts_imports_after_inner_attributes() { + check_assist( + replace_qualified_name_with_use, + r" +#![allow(dead_code)] + +fn main() { + std::fmt::Debug<|> +} + ", + r" +#![allow(dead_code)] +use std::fmt::Debug; + +fn main() { + Debug +} + ", + ); + } + + #[test] + fn replaces_all_affected_paths() { + check_assist( + replace_qualified_name_with_use, + r" +fn main() { + std::fmt::Debug<|>; + let x: std::fmt::Debug = std::fmt::Debug; +} + ", + r" +use std::fmt::Debug; + +fn main() { + Debug; + let x: Debug = Debug; +} + ", + ); + } + + #[test] + fn replaces_all_affected_paths_mod() { + check_assist( + replace_qualified_name_with_use, + r" +mod m { + fn f() { + std::fmt::Debug<|>; + let x: std::fmt::Debug = std::fmt::Debug; + } + fn g() { + std::fmt::Debug; + } +} + +fn f() { + std::fmt::Debug; +} + ", + r" +mod m { + use std::fmt::Debug; + + fn f() { + Debug; + let x: Debug = Debug; + } + fn g() { + Debug; + } +} + +fn f() { + std::fmt::Debug; +} + ", + ); + } + + #[test] + fn does_not_replace_in_submodules() { + check_assist( + replace_qualified_name_with_use, + r" +fn main() { + std::fmt::Debug<|>; +} + +mod sub { + fn f() { + std::fmt::Debug; + } +} + ", + r" +use std::fmt::Debug; + +fn main() { + Debug; +} + +mod sub { + fn f() { + std::fmt::Debug; + } +} + ", + ); + } + + #[test] + fn does_not_replace_in_use() { + check_assist( + replace_qualified_name_with_use, + r" +use std::fmt::Display; + +fn main() { + std::fmt<|>; +} + ", + r" +use std::fmt::{self, Display}; + +fn main() { + fmt; +} + ", + ); + } + + #[test] + fn does_not_replace_pub_use() { + check_assist( + replace_qualified_name_with_use, + r" +pub use std::fmt; + +impl std::io<|> for Foo { +} + ", + r" +use std::io; + +pub use std::fmt; + +impl io for Foo { +} + ", + ); + } + + #[test] + fn does_not_replace_pub_crate_use() { + check_assist( + replace_qualified_name_with_use, + r" +pub(crate) use std::fmt; + +impl std::io<|> for Foo { +} + ", + r" +use std::io; + +pub(crate) use std::fmt; + +impl io for Foo { +} + ", + ); + } +} diff --git a/crates/assists/src/handlers/replace_unwrap_with_match.rs b/crates/assists/src/handlers/replace_unwrap_with_match.rs new file mode 100644 index 0000000000..9705f11b76 --- /dev/null +++ b/crates/assists/src/handlers/replace_unwrap_with_match.rs @@ -0,0 +1,187 @@ +use std::iter; + +use syntax::{ + ast::{ + self, + edit::{AstNodeEdit, IndentLevel}, + make, + }, + AstNode, +}; + +use crate::{ + utils::{render_snippet, Cursor, TryEnum}, + AssistContext, AssistId, AssistKind, Assists, +}; + +// Assist: replace_unwrap_with_match +// +// Replaces `unwrap` a `match` expression. Works for Result and Option. +// +// ``` +// enum Result { Ok(T), Err(E) } +// fn main() { +// let x: Result = Result::Ok(92); +// let y = x.<|>unwrap(); +// } +// ``` +// -> +// ``` +// enum Result { Ok(T), Err(E) } +// fn main() { +// let x: Result = Result::Ok(92); +// let y = match x { +// Ok(a) => a, +// $0_ => unreachable!(), +// }; +// } +// ``` +pub(crate) fn replace_unwrap_with_match(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let method_call: ast::MethodCallExpr = ctx.find_node_at_offset()?; + let name = method_call.name_ref()?; + if name.text() != "unwrap" { + return None; + } + let caller = method_call.expr()?; + let ty = ctx.sema.type_of_expr(&caller)?; + let happy_variant = TryEnum::from_ty(&ctx.sema, &ty)?.happy_case(); + let target = method_call.syntax().text_range(); + acc.add( + AssistId("replace_unwrap_with_match", AssistKind::RefactorRewrite), + "Replace unwrap with match", + target, + |builder| { + let ok_path = make::path_unqualified(make::path_segment(make::name_ref(happy_variant))); + let it = make::ident_pat(make::name("a")).into(); + let ok_tuple = make::tuple_struct_pat(ok_path, iter::once(it)).into(); + + let bind_path = make::path_unqualified(make::path_segment(make::name_ref("a"))); + let ok_arm = make::match_arm(iter::once(ok_tuple), make::expr_path(bind_path)); + + let unreachable_call = make::expr_unreachable(); + let err_arm = + make::match_arm(iter::once(make::wildcard_pat().into()), unreachable_call); + + let match_arm_list = make::match_arm_list(vec![ok_arm, err_arm]); + let match_expr = make::expr_match(caller.clone(), match_arm_list) + .indent(IndentLevel::from_node(method_call.syntax())); + + let range = method_call.syntax().text_range(); + match ctx.config.snippet_cap { + Some(cap) => { + let err_arm = match_expr + .syntax() + .descendants() + .filter_map(ast::MatchArm::cast) + .last() + .unwrap(); + let snippet = + render_snippet(cap, match_expr.syntax(), Cursor::Before(err_arm.syntax())); + builder.replace_snippet(cap, range, snippet) + } + None => builder.replace(range, match_expr.to_string()), + } + }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_target}; + + use super::*; + + #[test] + fn test_replace_result_unwrap_with_match() { + check_assist( + replace_unwrap_with_match, + r" +enum Result { Ok(T), Err(E) } +fn i(a: T) -> T { a } +fn main() { + let x: Result = Result::Ok(92); + let y = i(x).<|>unwrap(); +} + ", + r" +enum Result { Ok(T), Err(E) } +fn i(a: T) -> T { a } +fn main() { + let x: Result = Result::Ok(92); + let y = match i(x) { + Ok(a) => a, + $0_ => unreachable!(), + }; +} + ", + ) + } + + #[test] + fn test_replace_option_unwrap_with_match() { + check_assist( + replace_unwrap_with_match, + r" +enum Option { Some(T), None } +fn i(a: T) -> T { a } +fn main() { + let x = Option::Some(92); + let y = i(x).<|>unwrap(); +} + ", + r" +enum Option { Some(T), None } +fn i(a: T) -> T { a } +fn main() { + let x = Option::Some(92); + let y = match i(x) { + Some(a) => a, + $0_ => unreachable!(), + }; +} + ", + ); + } + + #[test] + fn test_replace_result_unwrap_with_match_chaining() { + check_assist( + replace_unwrap_with_match, + r" +enum Result { Ok(T), Err(E) } +fn i(a: T) -> T { a } +fn main() { + let x: Result = Result::Ok(92); + let y = i(x).<|>unwrap().count_zeroes(); +} + ", + r" +enum Result { Ok(T), Err(E) } +fn i(a: T) -> T { a } +fn main() { + let x: Result = Result::Ok(92); + let y = match i(x) { + Ok(a) => a, + $0_ => unreachable!(), + }.count_zeroes(); +} + ", + ) + } + + #[test] + fn replace_unwrap_with_match_target() { + check_assist_target( + replace_unwrap_with_match, + r" +enum Option { Some(T), None } +fn i(a: T) -> T { a } +fn main() { + let x = Option::Some(92); + let y = i(x).<|>unwrap(); +} + ", + r"i(x).unwrap()", + ); + } +} diff --git a/crates/assists/src/handlers/split_import.rs b/crates/assists/src/handlers/split_import.rs new file mode 100644 index 0000000000..15e67eaa19 --- /dev/null +++ b/crates/assists/src/handlers/split_import.rs @@ -0,0 +1,79 @@ +use std::iter::successors; + +use syntax::{ast, AstNode, T}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: split_import +// +// Wraps the tail of import into braces. +// +// ``` +// use std::<|>collections::HashMap; +// ``` +// -> +// ``` +// use std::{collections::HashMap}; +// ``` +pub(crate) fn split_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let colon_colon = ctx.find_token_at_offset(T![::])?; + let path = ast::Path::cast(colon_colon.parent())?.qualifier()?; + let top_path = successors(Some(path.clone()), |it| it.parent_path()).last()?; + + let use_tree = top_path.syntax().ancestors().find_map(ast::UseTree::cast)?; + + let new_tree = use_tree.split_prefix(&path); + if new_tree == use_tree { + return None; + } + + let target = colon_colon.text_range(); + acc.add(AssistId("split_import", AssistKind::RefactorRewrite), "Split import", target, |edit| { + edit.replace_ast(use_tree, new_tree); + }) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; + + use super::*; + + #[test] + fn test_split_import() { + check_assist( + split_import, + "use crate::<|>db::RootDatabase;", + "use crate::{db::RootDatabase};", + ) + } + + #[test] + fn split_import_works_with_trees() { + check_assist( + split_import, + "use crate:<|>:db::{RootDatabase, FileSymbol}", + "use crate::{db::{RootDatabase, FileSymbol}}", + ) + } + + #[test] + fn split_import_target() { + check_assist_target(split_import, "use crate::<|>db::{RootDatabase, FileSymbol}", "::"); + } + + #[test] + fn issue4044() { + check_assist_not_applicable(split_import, "use crate::<|>:::self;") + } + + #[test] + fn test_empty_use() { + check_assist_not_applicable( + split_import, + r" +use std::<|> +fn main() {}", + ); + } +} diff --git a/crates/assists/src/handlers/unwrap_block.rs b/crates/assists/src/handlers/unwrap_block.rs new file mode 100644 index 0000000000..3851aeb3e7 --- /dev/null +++ b/crates/assists/src/handlers/unwrap_block.rs @@ -0,0 +1,517 @@ +use syntax::{ + ast::{ + self, + edit::{AstNodeEdit, IndentLevel}, + }, + AstNode, TextRange, T, +}; + +use crate::{utils::unwrap_trivial_block, AssistContext, AssistId, AssistKind, Assists}; + +// Assist: unwrap_block +// +// This assist removes if...else, for, while and loop control statements to just keep the body. +// +// ``` +// fn foo() { +// if true {<|> +// println!("foo"); +// } +// } +// ``` +// -> +// ``` +// fn foo() { +// println!("foo"); +// } +// ``` +pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { + let assist_id = AssistId("unwrap_block", AssistKind::RefactorRewrite); + let assist_label = "Unwrap block"; + + let l_curly_token = ctx.find_token_at_offset(T!['{'])?; + let mut block = ast::BlockExpr::cast(l_curly_token.parent())?; + let mut parent = block.syntax().parent()?; + if ast::MatchArm::can_cast(parent.kind()) { + parent = parent.ancestors().find(|it| ast::MatchExpr::can_cast(it.kind()))? + } + + let parent = ast::Expr::cast(parent)?; + + match parent.clone() { + ast::Expr::ForExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::LoopExpr(_) => (), + ast::Expr::MatchExpr(_) => block = block.dedent(IndentLevel(1)), + ast::Expr::IfExpr(if_expr) => { + let then_branch = if_expr.then_branch()?; + if then_branch == block { + if let Some(ancestor) = if_expr.syntax().parent().and_then(ast::IfExpr::cast) { + // For `else if` blocks + let ancestor_then_branch = ancestor.then_branch()?; + + let target = then_branch.syntax().text_range(); + return acc.add(assist_id, assist_label, target, |edit| { + let range_to_del_else_if = TextRange::new( + ancestor_then_branch.syntax().text_range().end(), + l_curly_token.text_range().start(), + ); + let range_to_del_rest = TextRange::new( + then_branch.syntax().text_range().end(), + if_expr.syntax().text_range().end(), + ); + + edit.delete(range_to_del_rest); + edit.delete(range_to_del_else_if); + edit.replace( + target, + update_expr_string(then_branch.to_string(), &[' ', '{']), + ); + }); + } + } else { + let target = block.syntax().text_range(); + return acc.add(assist_id, assist_label, target, |edit| { + let range_to_del = TextRange::new( + then_branch.syntax().text_range().end(), + l_curly_token.text_range().start(), + ); + + edit.delete(range_to_del); + edit.replace(target, update_expr_string(block.to_string(), &[' ', '{'])); + }); + } + } + _ => return None, + }; + + let unwrapped = unwrap_trivial_block(block); + let target = unwrapped.syntax().text_range(); + acc.add(assist_id, assist_label, target, |builder| { + builder.replace( + parent.syntax().text_range(), + update_expr_string(unwrapped.to_string(), &[' ', '{', '\n']), + ); + }) +} + +fn update_expr_string(expr_str: String, trim_start_pat: &[char]) -> String { + let expr_string = expr_str.trim_start_matches(trim_start_pat); + let mut expr_string_lines: Vec<&str> = expr_string.lines().collect(); + expr_string_lines.pop(); // Delete last line + + expr_string_lines + .into_iter() + .map(|line| line.replacen(" ", "", 1)) // Delete indentation + .collect::>() + .join("\n") +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn simple_if() { + check_assist( + unwrap_block, + r#" + fn main() { + bar(); + if true {<|> + foo(); + + //comment + bar(); + } else { + println!("bar"); + } + } + "#, + r#" + fn main() { + bar(); + foo(); + + //comment + bar(); + } + "#, + ); + } + + #[test] + fn simple_if_else() { + check_assist( + unwrap_block, + r#" + fn main() { + bar(); + if true { + foo(); + + //comment + bar(); + } else {<|> + println!("bar"); + } + } + "#, + r#" + fn main() { + bar(); + if true { + foo(); + + //comment + bar(); + } + println!("bar"); + } + "#, + ); + } + + #[test] + fn simple_if_else_if() { + check_assist( + unwrap_block, + r#" + fn main() { + //bar(); + if true { + println!("true"); + + //comment + //bar(); + } else if false {<|> + println!("bar"); + } else { + println!("foo"); + } + } + "#, + r#" + fn main() { + //bar(); + if true { + println!("true"); + + //comment + //bar(); + } + println!("bar"); + } + "#, + ); + } + + #[test] + fn simple_if_else_if_nested() { + check_assist( + unwrap_block, + r#" + fn main() { + //bar(); + if true { + println!("true"); + + //comment + //bar(); + } else if false { + println!("bar"); + } else if true {<|> + println!("foo"); + } + } + "#, + r#" + fn main() { + //bar(); + if true { + println!("true"); + + //comment + //bar(); + } else if false { + println!("bar"); + } + println!("foo"); + } + "#, + ); + } + + #[test] + fn simple_if_else_if_nested_else() { + check_assist( + unwrap_block, + r#" + fn main() { + //bar(); + if true { + println!("true"); + + //comment + //bar(); + } else if false { + println!("bar"); + } else if true { + println!("foo"); + } else {<|> + println!("else"); + } + } + "#, + r#" + fn main() { + //bar(); + if true { + println!("true"); + + //comment + //bar(); + } else if false { + println!("bar"); + } else if true { + println!("foo"); + } + println!("else"); + } + "#, + ); + } + + #[test] + fn simple_if_else_if_nested_middle() { + check_assist( + unwrap_block, + r#" + fn main() { + //bar(); + if true { + println!("true"); + + //comment + //bar(); + } else if false { + println!("bar"); + } else if true {<|> + println!("foo"); + } else { + println!("else"); + } + } + "#, + r#" + fn main() { + //bar(); + if true { + println!("true"); + + //comment + //bar(); + } else if false { + println!("bar"); + } + println!("foo"); + } + "#, + ); + } + + #[test] + fn simple_if_bad_cursor_position() { + check_assist_not_applicable( + unwrap_block, + r#" + fn main() { + bar();<|> + if true { + foo(); + + //comment + bar(); + } else { + println!("bar"); + } + } + "#, + ); + } + + #[test] + fn simple_for() { + check_assist( + unwrap_block, + r#" + fn main() { + for i in 0..5 {<|> + if true { + foo(); + + //comment + bar(); + } else { + println!("bar"); + } + } + } + "#, + r#" + fn main() { + if true { + foo(); + + //comment + bar(); + } else { + println!("bar"); + } + } + "#, + ); + } + + #[test] + fn simple_if_in_for() { + check_assist( + unwrap_block, + r#" + fn main() { + for i in 0..5 { + if true {<|> + foo(); + + //comment + bar(); + } else { + println!("bar"); + } + } + } + "#, + r#" + fn main() { + for i in 0..5 { + foo(); + + //comment + bar(); + } + } + "#, + ); + } + + #[test] + fn simple_loop() { + check_assist( + unwrap_block, + r#" + fn main() { + loop {<|> + if true { + foo(); + + //comment + bar(); + } else { + println!("bar"); + } + } + } + "#, + r#" + fn main() { + if true { + foo(); + + //comment + bar(); + } else { + println!("bar"); + } + } + "#, + ); + } + + #[test] + fn simple_while() { + check_assist( + unwrap_block, + r#" + fn main() { + while true {<|> + if true { + foo(); + + //comment + bar(); + } else { + println!("bar"); + } + } + } + "#, + r#" + fn main() { + if true { + foo(); + + //comment + bar(); + } else { + println!("bar"); + } + } + "#, + ); + } + + #[test] + fn unwrap_match_arm() { + check_assist( + unwrap_block, + r#" +fn main() { + match rel_path { + Ok(rel_path) => {<|> + let rel_path = RelativePathBuf::from_path(rel_path).ok()?; + Some((*id, rel_path)) + } + Err(_) => None, + } +} +"#, + r#" +fn main() { + let rel_path = RelativePathBuf::from_path(rel_path).ok()?; + Some((*id, rel_path)) +} +"#, + ); + } + + #[test] + fn simple_if_in_while_bad_cursor_position() { + check_assist_not_applicable( + unwrap_block, + r#" + fn main() { + while true { + if true { + foo();<|> + + //comment + bar(); + } else { + println!("bar"); + } + } + } + "#, + ); + } +} diff --git a/crates/assists/src/lib.rs b/crates/assists/src/lib.rs new file mode 100644 index 0000000000..ae90d68a35 --- /dev/null +++ b/crates/assists/src/lib.rs @@ -0,0 +1,241 @@ +//! `assists` crate provides a bunch of code assists, also known as code +//! actions (in LSP) or intentions (in IntelliJ). +//! +//! An assist is a micro-refactoring, which is automatically activated in +//! certain context. For example, if the cursor is over `,`, a "swap `,`" assist +//! becomes available. + +#[allow(unused)] +macro_rules! eprintln { + ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; +} + +mod assist_config; +mod assist_context; +#[cfg(test)] +mod tests; +pub mod utils; +pub mod ast_transform; + +use base_db::FileRange; +use hir::Semantics; +use ide_db::{source_change::SourceChange, RootDatabase}; +use syntax::TextRange; + +pub(crate) use crate::assist_context::{AssistContext, Assists}; + +pub use assist_config::AssistConfig; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum AssistKind { + None, + QuickFix, + Generate, + Refactor, + RefactorExtract, + RefactorInline, + RefactorRewrite, +} + +impl AssistKind { + pub fn contains(self, other: AssistKind) -> bool { + if self == other { + return true; + } + + match self { + AssistKind::None | AssistKind::Generate => return true, + AssistKind::Refactor => match other { + AssistKind::RefactorExtract + | AssistKind::RefactorInline + | AssistKind::RefactorRewrite => return true, + _ => return false, + }, + _ => return false, + } + } +} + +/// Unique identifier of the assist, should not be shown to the user +/// directly. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct AssistId(pub &'static str, pub AssistKind); + +#[derive(Clone, Debug)] +pub struct GroupLabel(pub String); + +#[derive(Debug, Clone)] +pub struct Assist { + id: AssistId, + /// Short description of the assist, as shown in the UI. + label: String, + group: Option, + /// Target ranges are used to sort assists: the smaller the target range, + /// the more specific assist is, and so it should be sorted first. + target: TextRange, +} + +#[derive(Debug, Clone)] +pub struct ResolvedAssist { + pub assist: Assist, + pub source_change: SourceChange, +} + +impl Assist { + /// Return all the assists applicable at the given position. + /// + /// Assists are returned in the "unresolved" state, that is only labels are + /// returned, without actual edits. + pub fn unresolved(db: &RootDatabase, config: &AssistConfig, range: FileRange) -> Vec { + let sema = Semantics::new(db); + let ctx = AssistContext::new(sema, config, range); + let mut acc = Assists::new_unresolved(&ctx); + handlers::all().iter().for_each(|handler| { + handler(&mut acc, &ctx); + }); + acc.finish_unresolved() + } + + /// Return all the assists applicable at the given position. + /// + /// Assists are returned in the "resolved" state, that is with edit fully + /// computed. + pub fn resolved( + db: &RootDatabase, + config: &AssistConfig, + range: FileRange, + ) -> Vec { + let sema = Semantics::new(db); + let ctx = AssistContext::new(sema, config, range); + let mut acc = Assists::new_resolved(&ctx); + handlers::all().iter().for_each(|handler| { + handler(&mut acc, &ctx); + }); + acc.finish_resolved() + } + + pub(crate) fn new( + id: AssistId, + label: String, + group: Option, + target: TextRange, + ) -> Assist { + assert!(label.starts_with(|c: char| c.is_uppercase())); + Assist { id, label, group, target } + } + + pub fn id(&self) -> AssistId { + self.id + } + + pub fn label(&self) -> String { + self.label.clone() + } + + pub fn group(&self) -> Option { + self.group.clone() + } + + pub fn target(&self) -> TextRange { + self.target + } +} + +mod handlers { + use crate::{AssistContext, Assists}; + + pub(crate) type Handler = fn(&mut Assists, &AssistContext) -> Option<()>; + + mod add_custom_impl; + mod add_explicit_type; + mod add_missing_impl_members; + mod add_turbo_fish; + mod apply_demorgan; + mod auto_import; + mod change_return_type_to_result; + mod change_visibility; + mod early_return; + mod expand_glob_import; + mod extract_struct_from_enum_variant; + mod extract_variable; + mod fill_match_arms; + mod fix_visibility; + mod flip_binexpr; + mod flip_comma; + mod flip_trait_bound; + mod generate_derive; + mod generate_from_impl_for_enum; + mod generate_function; + mod generate_impl; + mod generate_new; + mod inline_local_variable; + mod introduce_named_lifetime; + mod invert_if; + mod merge_imports; + mod merge_match_arms; + mod move_bounds; + mod move_guard; + mod raw_string; + mod remove_dbg; + mod remove_mut; + mod reorder_fields; + mod replace_if_let_with_match; + mod replace_let_with_if_let; + mod replace_qualified_name_with_use; + mod replace_unwrap_with_match; + mod split_import; + mod unwrap_block; + + pub(crate) fn all() -> &'static [Handler] { + &[ + // These are alphabetic for the foolish consistency + add_custom_impl::add_custom_impl, + add_explicit_type::add_explicit_type, + add_turbo_fish::add_turbo_fish, + apply_demorgan::apply_demorgan, + auto_import::auto_import, + change_return_type_to_result::change_return_type_to_result, + change_visibility::change_visibility, + early_return::convert_to_guarded_return, + expand_glob_import::expand_glob_import, + extract_struct_from_enum_variant::extract_struct_from_enum_variant, + extract_variable::extract_variable, + fill_match_arms::fill_match_arms, + fix_visibility::fix_visibility, + flip_binexpr::flip_binexpr, + flip_comma::flip_comma, + flip_trait_bound::flip_trait_bound, + generate_derive::generate_derive, + generate_from_impl_for_enum::generate_from_impl_for_enum, + generate_function::generate_function, + generate_impl::generate_impl, + generate_new::generate_new, + inline_local_variable::inline_local_variable, + introduce_named_lifetime::introduce_named_lifetime, + invert_if::invert_if, + merge_imports::merge_imports, + merge_match_arms::merge_match_arms, + move_bounds::move_bounds_to_where_clause, + move_guard::move_arm_cond_to_match_guard, + move_guard::move_guard_to_arm_body, + raw_string::add_hash, + raw_string::make_raw_string, + raw_string::make_usual_string, + raw_string::remove_hash, + remove_dbg::remove_dbg, + remove_mut::remove_mut, + reorder_fields::reorder_fields, + replace_if_let_with_match::replace_if_let_with_match, + replace_let_with_if_let::replace_let_with_if_let, + replace_qualified_name_with_use::replace_qualified_name_with_use, + replace_unwrap_with_match::replace_unwrap_with_match, + split_import::split_import, + unwrap_block::unwrap_block, + // These are manually sorted for better priorities + add_missing_impl_members::add_missing_impl_members, + add_missing_impl_members::add_missing_default_members, + // Are you sure you want to add new assist here, and not to the + // sorted list above? + ] + } +} diff --git a/crates/assists/src/tests.rs b/crates/assists/src/tests.rs new file mode 100644 index 0000000000..ba1fb543b8 --- /dev/null +++ b/crates/assists/src/tests.rs @@ -0,0 +1,179 @@ +mod generated; + +use base_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt}; +use hir::Semantics; +use ide_db::RootDatabase; +use syntax::TextRange; +use test_utils::{assert_eq_text, extract_offset, extract_range}; + +use crate::{handlers::Handler, Assist, AssistConfig, AssistContext, AssistKind, Assists}; +use stdx::trim_indent; + +pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) { + RootDatabase::with_single_file(text) +} + +pub(crate) fn check_assist(assist: Handler, ra_fixture_before: &str, ra_fixture_after: &str) { + let ra_fixture_after = trim_indent(ra_fixture_after); + check(assist, ra_fixture_before, ExpectedResult::After(&ra_fixture_after)); +} + +// FIXME: instead of having a separate function here, maybe use +// `extract_ranges` and mark the target as ` ` in the +// fixture? +pub(crate) fn check_assist_target(assist: Handler, ra_fixture: &str, target: &str) { + check(assist, ra_fixture, ExpectedResult::Target(target)); +} + +pub(crate) fn check_assist_not_applicable(assist: Handler, ra_fixture: &str) { + check(assist, ra_fixture, ExpectedResult::NotApplicable); +} + +fn check_doc_test(assist_id: &str, before: &str, after: &str) { + let after = trim_indent(after); + let (db, file_id, selection) = RootDatabase::with_range_or_offset(&before); + let before = db.file_text(file_id).to_string(); + let frange = FileRange { file_id, range: selection.into() }; + + let mut assist = Assist::resolved(&db, &AssistConfig::default(), frange) + .into_iter() + .find(|assist| assist.assist.id.0 == assist_id) + .unwrap_or_else(|| { + panic!( + "\n\nAssist is not applicable: {}\nAvailable assists: {}", + assist_id, + Assist::resolved(&db, &AssistConfig::default(), frange) + .into_iter() + .map(|assist| assist.assist.id.0) + .collect::>() + .join(", ") + ) + }); + + let actual = { + let change = assist.source_change.source_file_edits.pop().unwrap(); + let mut actual = before; + change.edit.apply(&mut actual); + actual + }; + assert_eq_text!(&after, &actual); +} + +enum ExpectedResult<'a> { + NotApplicable, + After(&'a str), + Target(&'a str), +} + +fn check(handler: Handler, before: &str, expected: ExpectedResult) { + let (db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before); + let text_without_caret = db.file_text(file_with_caret_id).to_string(); + + let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() }; + + let sema = Semantics::new(&db); + let config = AssistConfig::default(); + let ctx = AssistContext::new(sema, &config, frange); + let mut acc = Assists::new_resolved(&ctx); + handler(&mut acc, &ctx); + let mut res = acc.finish_resolved(); + let assist = res.pop(); + match (assist, expected) { + (Some(assist), ExpectedResult::After(after)) => { + let mut source_change = assist.source_change; + let change = source_change.source_file_edits.pop().unwrap(); + + let mut actual = db.file_text(change.file_id).as_ref().to_owned(); + change.edit.apply(&mut actual); + assert_eq_text!(after, &actual); + } + (Some(assist), ExpectedResult::Target(target)) => { + let range = assist.assist.target; + assert_eq_text!(&text_without_caret[range], target); + } + (Some(_), ExpectedResult::NotApplicable) => panic!("assist should not be applicable!"), + (None, ExpectedResult::After(_)) | (None, ExpectedResult::Target(_)) => { + panic!("code action is not applicable") + } + (None, ExpectedResult::NotApplicable) => (), + }; +} + +#[test] +fn assist_order_field_struct() { + let before = "struct Foo { <|>bar: u32 }"; + let (before_cursor_pos, before) = extract_offset(before); + let (db, file_id) = with_single_file(&before); + let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) }; + let assists = Assist::resolved(&db, &AssistConfig::default(), frange); + let mut assists = assists.iter(); + + assert_eq!( + assists.next().expect("expected assist").assist.label, + "Change visibility to pub(crate)" + ); + assert_eq!(assists.next().expect("expected assist").assist.label, "Add `#[derive]`"); +} + +#[test] +fn assist_order_if_expr() { + let before = " + pub fn test_some_range(a: int) -> bool { + if let 2..6 = <|>5<|> { + true + } else { + false + } + }"; + let (range, before) = extract_range(before); + let (db, file_id) = with_single_file(&before); + let frange = FileRange { file_id, range }; + let assists = Assist::resolved(&db, &AssistConfig::default(), frange); + let mut assists = assists.iter(); + + assert_eq!(assists.next().expect("expected assist").assist.label, "Extract into variable"); + assert_eq!(assists.next().expect("expected assist").assist.label, "Replace with match"); +} + +#[test] +fn assist_filter_works() { + let before = " + pub fn test_some_range(a: int) -> bool { + if let 2..6 = <|>5<|> { + true + } else { + false + } + }"; + let (range, before) = extract_range(before); + let (db, file_id) = with_single_file(&before); + let frange = FileRange { file_id, range }; + + { + let mut cfg = AssistConfig::default(); + cfg.allowed = Some(vec![AssistKind::Refactor]); + + let assists = Assist::resolved(&db, &cfg, frange); + let mut assists = assists.iter(); + + assert_eq!(assists.next().expect("expected assist").assist.label, "Extract into variable"); + assert_eq!(assists.next().expect("expected assist").assist.label, "Replace with match"); + } + + { + let mut cfg = AssistConfig::default(); + cfg.allowed = Some(vec![AssistKind::RefactorExtract]); + let assists = Assist::resolved(&db, &cfg, frange); + assert_eq!(assists.len(), 1); + + let mut assists = assists.iter(); + assert_eq!(assists.next().expect("expected assist").assist.label, "Extract into variable"); + } + + { + let mut cfg = AssistConfig::default(); + cfg.allowed = Some(vec![AssistKind::QuickFix]); + let assists = Assist::resolved(&db, &cfg, frange); + assert!(assists.is_empty(), "All asserts but quickfixes should be filtered out"); + } +} diff --git a/crates/assists/src/tests/generated.rs b/crates/assists/src/tests/generated.rs new file mode 100644 index 0000000000..d16e6fb0a6 --- /dev/null +++ b/crates/assists/src/tests/generated.rs @@ -0,0 +1,892 @@ +//! Generated file, do not edit by hand, see `xtask/src/codegen` + +use super::check_doc_test; + +#[test] +fn doctest_add_custom_impl() { + check_doc_test( + "add_custom_impl", + r#####" +#[derive(Deb<|>ug, Display)] +struct S; +"#####, + r#####" +#[derive(Display)] +struct S; + +impl Debug for S { + $0 +} +"#####, + ) +} + +#[test] +fn doctest_add_explicit_type() { + check_doc_test( + "add_explicit_type", + r#####" +fn main() { + let x<|> = 92; +} +"#####, + r#####" +fn main() { + let x: i32 = 92; +} +"#####, + ) +} + +#[test] +fn doctest_add_hash() { + check_doc_test( + "add_hash", + r#####" +fn main() { + r#"Hello,<|> World!"#; +} +"#####, + r#####" +fn main() { + r##"Hello, World!"##; +} +"#####, + ) +} + +#[test] +fn doctest_add_impl_default_members() { + check_doc_test( + "add_impl_default_members", + r#####" +trait Trait { + Type X; + fn foo(&self); + fn bar(&self) {} +} + +impl Trait for () { + Type X = (); + fn foo(&self) {}<|> + +} +"#####, + r#####" +trait Trait { + Type X; + fn foo(&self); + fn bar(&self) {} +} + +impl Trait for () { + Type X = (); + fn foo(&self) {} + $0fn bar(&self) {} + +} +"#####, + ) +} + +#[test] +fn doctest_add_impl_missing_members() { + check_doc_test( + "add_impl_missing_members", + r#####" +trait Trait { + Type X; + fn foo(&self) -> T; + fn bar(&self) {} +} + +impl Trait for () {<|> + +} +"#####, + r#####" +trait Trait { + Type X; + fn foo(&self) -> T; + fn bar(&self) {} +} + +impl Trait for () { + fn foo(&self) -> u32 { + ${0:todo!()} + } + +} +"#####, + ) +} + +#[test] +fn doctest_add_turbo_fish() { + check_doc_test( + "add_turbo_fish", + r#####" +fn make() -> T { todo!() } +fn main() { + let x = make<|>(); +} +"#####, + r#####" +fn make() -> T { todo!() } +fn main() { + let x = make::<${0:_}>(); +} +"#####, + ) +} + +#[test] +fn doctest_apply_demorgan() { + check_doc_test( + "apply_demorgan", + r#####" +fn main() { + if x != 4 ||<|> !y {} +} +"#####, + r#####" +fn main() { + if !(x == 4 && y) {} +} +"#####, + ) +} + +#[test] +fn doctest_auto_import() { + check_doc_test( + "auto_import", + r#####" +fn main() { + let map = HashMap<|>::new(); +} +pub mod std { pub mod collections { pub struct HashMap { } } } +"#####, + r#####" +use std::collections::HashMap; + +fn main() { + let map = HashMap::new(); +} +pub mod std { pub mod collections { pub struct HashMap { } } } +"#####, + ) +} + +#[test] +fn doctest_change_return_type_to_result() { + check_doc_test( + "change_return_type_to_result", + r#####" +fn foo() -> i32<|> { 42i32 } +"#####, + r#####" +fn foo() -> Result { Ok(42i32) } +"#####, + ) +} + +#[test] +fn doctest_change_visibility() { + check_doc_test( + "change_visibility", + r#####" +<|>fn frobnicate() {} +"#####, + r#####" +pub(crate) fn frobnicate() {} +"#####, + ) +} + +#[test] +fn doctest_convert_to_guarded_return() { + check_doc_test( + "convert_to_guarded_return", + r#####" +fn main() { + <|>if cond { + foo(); + bar(); + } +} +"#####, + r#####" +fn main() { + if !cond { + return; + } + foo(); + bar(); +} +"#####, + ) +} + +#[test] +fn doctest_expand_glob_import() { + check_doc_test( + "expand_glob_import", + r#####" +mod foo { + pub struct Bar; + pub struct Baz; +} + +use foo::*<|>; + +fn qux(bar: Bar, baz: Baz) {} +"#####, + r#####" +mod foo { + pub struct Bar; + pub struct Baz; +} + +use foo::{Baz, Bar}; + +fn qux(bar: Bar, baz: Baz) {} +"#####, + ) +} + +#[test] +fn doctest_extract_struct_from_enum_variant() { + check_doc_test( + "extract_struct_from_enum_variant", + r#####" +enum A { <|>One(u32, u32) } +"#####, + r#####" +struct One(pub u32, pub u32); + +enum A { One(One) } +"#####, + ) +} + +#[test] +fn doctest_extract_variable() { + check_doc_test( + "extract_variable", + r#####" +fn main() { + <|>(1 + 2)<|> * 4; +} +"#####, + r#####" +fn main() { + let $0var_name = (1 + 2); + var_name * 4; +} +"#####, + ) +} + +#[test] +fn doctest_fill_match_arms() { + check_doc_test( + "fill_match_arms", + r#####" +enum Action { Move { distance: u32 }, Stop } + +fn handle(action: Action) { + match action { + <|> + } +} +"#####, + r#####" +enum Action { Move { distance: u32 }, Stop } + +fn handle(action: Action) { + match action { + $0Action::Move { distance } => {} + Action::Stop => {} + } +} +"#####, + ) +} + +#[test] +fn doctest_fix_visibility() { + check_doc_test( + "fix_visibility", + r#####" +mod m { + fn frobnicate() {} +} +fn main() { + m::frobnicate<|>() {} +} +"#####, + r#####" +mod m { + $0pub(crate) fn frobnicate() {} +} +fn main() { + m::frobnicate() {} +} +"#####, + ) +} + +#[test] +fn doctest_flip_binexpr() { + check_doc_test( + "flip_binexpr", + r#####" +fn main() { + let _ = 90 +<|> 2; +} +"#####, + r#####" +fn main() { + let _ = 2 + 90; +} +"#####, + ) +} + +#[test] +fn doctest_flip_comma() { + check_doc_test( + "flip_comma", + r#####" +fn main() { + ((1, 2),<|> (3, 4)); +} +"#####, + r#####" +fn main() { + ((3, 4), (1, 2)); +} +"#####, + ) +} + +#[test] +fn doctest_flip_trait_bound() { + check_doc_test( + "flip_trait_bound", + r#####" +fn foo Copy>() { } +"#####, + r#####" +fn foo() { } +"#####, + ) +} + +#[test] +fn doctest_generate_derive() { + check_doc_test( + "generate_derive", + r#####" +struct Point { + x: u32, + y: u32,<|> +} +"#####, + r#####" +#[derive($0)] +struct Point { + x: u32, + y: u32, +} +"#####, + ) +} + +#[test] +fn doctest_generate_from_impl_for_enum() { + check_doc_test( + "generate_from_impl_for_enum", + r#####" +enum A { <|>One(u32) } +"#####, + r#####" +enum A { One(u32) } + +impl From for A { + fn from(v: u32) -> Self { + A::One(v) + } +} +"#####, + ) +} + +#[test] +fn doctest_generate_function() { + check_doc_test( + "generate_function", + r#####" +struct Baz; +fn baz() -> Baz { Baz } +fn foo() { + bar<|>("", baz()); +} + +"#####, + r#####" +struct Baz; +fn baz() -> Baz { Baz } +fn foo() { + bar("", baz()); +} + +fn bar(arg: &str, baz: Baz) { + ${0:todo!()} +} + +"#####, + ) +} + +#[test] +fn doctest_generate_impl() { + check_doc_test( + "generate_impl", + r#####" +struct Ctx { + data: T,<|> +} +"#####, + r#####" +struct Ctx { + data: T, +} + +impl Ctx { + $0 +} +"#####, + ) +} + +#[test] +fn doctest_generate_new() { + check_doc_test( + "generate_new", + r#####" +struct Ctx { + data: T,<|> +} +"#####, + r#####" +struct Ctx { + data: T, +} + +impl Ctx { + fn $0new(data: T) -> Self { Self { data } } +} + +"#####, + ) +} + +#[test] +fn doctest_inline_local_variable() { + check_doc_test( + "inline_local_variable", + r#####" +fn main() { + let x<|> = 1 + 2; + x * 4; +} +"#####, + r#####" +fn main() { + (1 + 2) * 4; +} +"#####, + ) +} + +#[test] +fn doctest_introduce_named_lifetime() { + check_doc_test( + "introduce_named_lifetime", + r#####" +impl Cursor<'_<|>> { + fn node(self) -> &SyntaxNode { + match self { + Cursor::Replace(node) | Cursor::Before(node) => node, + } + } +} +"#####, + r#####" +impl<'a> Cursor<'a> { + fn node(self) -> &SyntaxNode { + match self { + Cursor::Replace(node) | Cursor::Before(node) => node, + } + } +} +"#####, + ) +} + +#[test] +fn doctest_invert_if() { + check_doc_test( + "invert_if", + r#####" +fn main() { + if<|> !y { A } else { B } +} +"#####, + r#####" +fn main() { + if y { B } else { A } +} +"#####, + ) +} + +#[test] +fn doctest_make_raw_string() { + check_doc_test( + "make_raw_string", + r#####" +fn main() { + "Hello,<|> World!"; +} +"#####, + r#####" +fn main() { + r#"Hello, World!"#; +} +"#####, + ) +} + +#[test] +fn doctest_make_usual_string() { + check_doc_test( + "make_usual_string", + r#####" +fn main() { + r#"Hello,<|> "World!""#; +} +"#####, + r#####" +fn main() { + "Hello, \"World!\""; +} +"#####, + ) +} + +#[test] +fn doctest_merge_imports() { + check_doc_test( + "merge_imports", + r#####" +use std::<|>fmt::Formatter; +use std::io; +"#####, + r#####" +use std::{fmt::Formatter, io}; +"#####, + ) +} + +#[test] +fn doctest_merge_match_arms() { + check_doc_test( + "merge_match_arms", + r#####" +enum Action { Move { distance: u32 }, Stop } + +fn handle(action: Action) { + match action { + <|>Action::Move(..) => foo(), + Action::Stop => foo(), + } +} +"#####, + r#####" +enum Action { Move { distance: u32 }, Stop } + +fn handle(action: Action) { + match action { + Action::Move(..) | Action::Stop => foo(), + } +} +"#####, + ) +} + +#[test] +fn doctest_move_arm_cond_to_match_guard() { + check_doc_test( + "move_arm_cond_to_match_guard", + r#####" +enum Action { Move { distance: u32 }, Stop } + +fn handle(action: Action) { + match action { + Action::Move { distance } => <|>if distance > 10 { foo() }, + _ => (), + } +} +"#####, + r#####" +enum Action { Move { distance: u32 }, Stop } + +fn handle(action: Action) { + match action { + Action::Move { distance } if distance > 10 => foo(), + _ => (), + } +} +"#####, + ) +} + +#[test] +fn doctest_move_bounds_to_where_clause() { + check_doc_test( + "move_bounds_to_where_clause", + r#####" +fn applyF: FnOnce(T) -> U>(f: F, x: T) -> U { + f(x) +} +"#####, + r#####" +fn apply(f: F, x: T) -> U where F: FnOnce(T) -> U { + f(x) +} +"#####, + ) +} + +#[test] +fn doctest_move_guard_to_arm_body() { + check_doc_test( + "move_guard_to_arm_body", + r#####" +enum Action { Move { distance: u32 }, Stop } + +fn handle(action: Action) { + match action { + Action::Move { distance } <|>if distance > 10 => foo(), + _ => (), + } +} +"#####, + r#####" +enum Action { Move { distance: u32 }, Stop } + +fn handle(action: Action) { + match action { + Action::Move { distance } => if distance > 10 { + foo() + }, + _ => (), + } +} +"#####, + ) +} + +#[test] +fn doctest_remove_dbg() { + check_doc_test( + "remove_dbg", + r#####" +fn main() { + <|>dbg!(92); +} +"#####, + r#####" +fn main() { + 92; +} +"#####, + ) +} + +#[test] +fn doctest_remove_hash() { + check_doc_test( + "remove_hash", + r#####" +fn main() { + r#"Hello,<|> World!"#; +} +"#####, + r#####" +fn main() { + r"Hello, World!"; +} +"#####, + ) +} + +#[test] +fn doctest_remove_mut() { + check_doc_test( + "remove_mut", + r#####" +impl Walrus { + fn feed(&mut<|> self, amount: u32) {} +} +"#####, + r#####" +impl Walrus { + fn feed(&self, amount: u32) {} +} +"#####, + ) +} + +#[test] +fn doctest_reorder_fields() { + check_doc_test( + "reorder_fields", + r#####" +struct Foo {foo: i32, bar: i32}; +const test: Foo = <|>Foo {bar: 0, foo: 1} +"#####, + r#####" +struct Foo {foo: i32, bar: i32}; +const test: Foo = Foo {foo: 1, bar: 0} +"#####, + ) +} + +#[test] +fn doctest_replace_if_let_with_match() { + check_doc_test( + "replace_if_let_with_match", + r#####" +enum Action { Move { distance: u32 }, Stop } + +fn handle(action: Action) { + <|>if let Action::Move { distance } = action { + foo(distance) + } else { + bar() + } +} +"#####, + r#####" +enum Action { Move { distance: u32 }, Stop } + +fn handle(action: Action) { + match action { + Action::Move { distance } => foo(distance), + _ => bar(), + } +} +"#####, + ) +} + +#[test] +fn doctest_replace_let_with_if_let() { + check_doc_test( + "replace_let_with_if_let", + r#####" +enum Option { Some(T), None } + +fn main(action: Action) { + <|>let x = compute(); +} + +fn compute() -> Option { None } +"#####, + r#####" +enum Option { Some(T), None } + +fn main(action: Action) { + if let Some(x) = compute() { + } +} + +fn compute() -> Option { None } +"#####, + ) +} + +#[test] +fn doctest_replace_qualified_name_with_use() { + check_doc_test( + "replace_qualified_name_with_use", + r#####" +fn process(map: std::collections::<|>HashMap) {} +"#####, + r#####" +use std::collections::HashMap; + +fn process(map: HashMap) {} +"#####, + ) +} + +#[test] +fn doctest_replace_unwrap_with_match() { + check_doc_test( + "replace_unwrap_with_match", + r#####" +enum Result { Ok(T), Err(E) } +fn main() { + let x: Result = Result::Ok(92); + let y = x.<|>unwrap(); +} +"#####, + r#####" +enum Result { Ok(T), Err(E) } +fn main() { + let x: Result = Result::Ok(92); + let y = match x { + Ok(a) => a, + $0_ => unreachable!(), + }; +} +"#####, + ) +} + +#[test] +fn doctest_split_import() { + check_doc_test( + "split_import", + r#####" +use std::<|>collections::HashMap; +"#####, + r#####" +use std::{collections::HashMap}; +"#####, + ) +} + +#[test] +fn doctest_unwrap_block() { + check_doc_test( + "unwrap_block", + r#####" +fn foo() { + if true {<|> + println!("foo"); + } +} +"#####, + r#####" +fn foo() { + println!("foo"); +} +"#####, + ) +} diff --git a/crates/assists/src/utils.rs b/crates/assists/src/utils.rs new file mode 100644 index 0000000000..84ccacafe3 --- /dev/null +++ b/crates/assists/src/utils.rs @@ -0,0 +1,313 @@ +//! Assorted functions shared by several assists. +pub(crate) mod insert_use; + +use std::{iter, ops}; + +use hir::{Adt, Crate, Enum, ScopeDef, Semantics, Trait, Type}; +use ide_db::RootDatabase; +use itertools::Itertools; +use rustc_hash::FxHashSet; +use syntax::{ + ast::{self, make, NameOwner}, + AstNode, + SyntaxKind::*, + SyntaxNode, TextSize, T, +}; + +use crate::assist_config::SnippetCap; + +pub(crate) use insert_use::{find_insert_use_container, insert_use_statement}; + +pub(crate) fn unwrap_trivial_block(block: ast::BlockExpr) -> ast::Expr { + extract_trivial_expression(&block) + .filter(|expr| !expr.syntax().text().contains_char('\n')) + .unwrap_or_else(|| block.into()) +} + +pub fn extract_trivial_expression(block: &ast::BlockExpr) -> Option { + let has_anything_else = |thing: &SyntaxNode| -> bool { + let mut non_trivial_children = + block.syntax().children_with_tokens().filter(|it| match it.kind() { + WHITESPACE | T!['{'] | T!['}'] => false, + _ => it.as_node() != Some(thing), + }); + non_trivial_children.next().is_some() + }; + + if let Some(expr) = block.expr() { + if has_anything_else(expr.syntax()) { + return None; + } + return Some(expr); + } + // Unwrap `{ continue; }` + let (stmt,) = block.statements().next_tuple()?; + if let ast::Stmt::ExprStmt(expr_stmt) = stmt { + if has_anything_else(expr_stmt.syntax()) { + return None; + } + let expr = expr_stmt.expr()?; + match expr.syntax().kind() { + CONTINUE_EXPR | BREAK_EXPR | RETURN_EXPR => return Some(expr), + _ => (), + } + } + None +} + +#[derive(Clone, Copy, Debug)] +pub(crate) enum Cursor<'a> { + Replace(&'a SyntaxNode), + Before(&'a SyntaxNode), +} + +impl<'a> Cursor<'a> { + fn node(self) -> &'a SyntaxNode { + match self { + Cursor::Replace(node) | Cursor::Before(node) => node, + } + } +} + +pub(crate) fn render_snippet(_cap: SnippetCap, node: &SyntaxNode, cursor: Cursor) -> String { + assert!(cursor.node().ancestors().any(|it| it == *node)); + let range = cursor.node().text_range() - node.text_range().start(); + let range: ops::Range = range.into(); + + let mut placeholder = cursor.node().to_string(); + escape(&mut placeholder); + let tab_stop = match cursor { + Cursor::Replace(placeholder) => format!("${{0:{}}}", placeholder), + Cursor::Before(placeholder) => format!("$0{}", placeholder), + }; + + let mut buf = node.to_string(); + buf.replace_range(range, &tab_stop); + return buf; + + fn escape(buf: &mut String) { + stdx::replace(buf, '{', r"\{"); + stdx::replace(buf, '}', r"\}"); + stdx::replace(buf, '$', r"\$"); + } +} + +pub fn get_missing_assoc_items( + sema: &Semantics, + impl_def: &ast::Impl, +) -> Vec { + // Names must be unique between constants and functions. However, type aliases + // may share the same name as a function or constant. + let mut impl_fns_consts = FxHashSet::default(); + let mut impl_type = FxHashSet::default(); + + if let Some(item_list) = impl_def.assoc_item_list() { + for item in item_list.assoc_items() { + match item { + ast::AssocItem::Fn(f) => { + if let Some(n) = f.name() { + impl_fns_consts.insert(n.syntax().to_string()); + } + } + + ast::AssocItem::TypeAlias(t) => { + if let Some(n) = t.name() { + impl_type.insert(n.syntax().to_string()); + } + } + + ast::AssocItem::Const(c) => { + if let Some(n) = c.name() { + impl_fns_consts.insert(n.syntax().to_string()); + } + } + ast::AssocItem::MacroCall(_) => (), + } + } + } + + resolve_target_trait(sema, impl_def).map_or(vec![], |target_trait| { + target_trait + .items(sema.db) + .iter() + .filter(|i| match i { + hir::AssocItem::Function(f) => { + !impl_fns_consts.contains(&f.name(sema.db).to_string()) + } + hir::AssocItem::TypeAlias(t) => !impl_type.contains(&t.name(sema.db).to_string()), + hir::AssocItem::Const(c) => c + .name(sema.db) + .map(|n| !impl_fns_consts.contains(&n.to_string())) + .unwrap_or_default(), + }) + .cloned() + .collect() + }) +} + +pub(crate) fn resolve_target_trait( + sema: &Semantics, + impl_def: &ast::Impl, +) -> Option { + let ast_path = + impl_def.trait_().map(|it| it.syntax().clone()).and_then(ast::PathType::cast)?.path()?; + + match sema.resolve_path(&ast_path) { + Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def), + _ => None, + } +} + +pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize { + node.children_with_tokens() + .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) + .map(|it| it.text_range().start()) + .unwrap_or_else(|| node.text_range().start()) +} + +pub(crate) fn invert_boolean_expression(expr: ast::Expr) -> ast::Expr { + if let Some(expr) = invert_special_case(&expr) { + return expr; + } + make::expr_prefix(T![!], expr) +} + +fn invert_special_case(expr: &ast::Expr) -> Option { + match expr { + ast::Expr::BinExpr(bin) => match bin.op_kind()? { + ast::BinOp::NegatedEqualityTest => bin.replace_op(T![==]).map(|it| it.into()), + ast::BinOp::EqualityTest => bin.replace_op(T![!=]).map(|it| it.into()), + _ => None, + }, + ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::PrefixOp::Not => pe.expr(), + // FIXME: + // ast::Expr::Literal(true | false ) + _ => None, + } +} + +#[derive(Clone, Copy)] +pub enum TryEnum { + Result, + Option, +} + +impl TryEnum { + const ALL: [TryEnum; 2] = [TryEnum::Option, TryEnum::Result]; + + pub fn from_ty(sema: &Semantics, ty: &Type) -> Option { + let enum_ = match ty.as_adt() { + Some(Adt::Enum(it)) => it, + _ => return None, + }; + TryEnum::ALL.iter().find_map(|&var| { + if &enum_.name(sema.db).to_string() == var.type_name() { + return Some(var); + } + None + }) + } + + pub(crate) fn happy_case(self) -> &'static str { + match self { + TryEnum::Result => "Ok", + TryEnum::Option => "Some", + } + } + + pub(crate) fn sad_pattern(self) -> ast::Pat { + match self { + TryEnum::Result => make::tuple_struct_pat( + make::path_unqualified(make::path_segment(make::name_ref("Err"))), + iter::once(make::wildcard_pat().into()), + ) + .into(), + TryEnum::Option => make::ident_pat(make::name("None")).into(), + } + } + + fn type_name(self) -> &'static str { + match self { + TryEnum::Result => "Result", + TryEnum::Option => "Option", + } + } +} + +/// Helps with finding well-know things inside the standard library. This is +/// somewhat similar to the known paths infra inside hir, but it different; We +/// want to make sure that IDE specific paths don't become interesting inside +/// the compiler itself as well. +pub(crate) struct FamousDefs<'a, 'b>(pub(crate) &'a Semantics<'b, RootDatabase>, pub(crate) Crate); + +#[allow(non_snake_case)] +impl FamousDefs<'_, '_> { + #[cfg(test)] + pub(crate) const FIXTURE: &'static str = r#"//- /libcore.rs crate:core +pub mod convert { + pub trait From { + fn from(T) -> Self; + } +} + +pub mod option { + pub enum Option { None, Some(T)} +} + +pub mod prelude { + pub use crate::{convert::From, option::Option::{self, *}}; +} +#[prelude_import] +pub use prelude::*; +"#; + + pub(crate) fn core_convert_From(&self) -> Option { + self.find_trait("core:convert:From") + } + + pub(crate) fn core_option_Option(&self) -> Option { + self.find_enum("core:option:Option") + } + + fn find_trait(&self, path: &str) -> Option { + match self.find_def(path)? { + hir::ScopeDef::ModuleDef(hir::ModuleDef::Trait(it)) => Some(it), + _ => None, + } + } + + fn find_enum(&self, path: &str) -> Option { + match self.find_def(path)? { + hir::ScopeDef::ModuleDef(hir::ModuleDef::Adt(hir::Adt::Enum(it))) => Some(it), + _ => None, + } + } + + fn find_def(&self, path: &str) -> Option { + let db = self.0.db; + let mut path = path.split(':'); + let trait_ = path.next_back()?; + let std_crate = path.next()?; + let std_crate = self + .1 + .dependencies(db) + .into_iter() + .find(|dep| &dep.name.to_string() == std_crate)? + .krate; + + let mut module = std_crate.root_module(db); + for segment in path { + module = module.children(db).find_map(|child| { + let name = child.name(db)?; + if &name.to_string() == segment { + Some(child) + } else { + None + } + })?; + } + let def = + module.scope(db, None).into_iter().find(|(name, _def)| &name.to_string() == trait_)?.1; + Some(def) + } +} diff --git a/crates/assists/src/utils/insert_use.rs b/crates/assists/src/utils/insert_use.rs new file mode 100644 index 0000000000..50a62ee829 --- /dev/null +++ b/crates/assists/src/utils/insert_use.rs @@ -0,0 +1,547 @@ +//! Handle syntactic aspects of inserting a new `use`. +// FIXME: rewrite according to the plan, outlined in +// https://github.com/rust-analyzer/rust-analyzer/issues/3301#issuecomment-592931553 + +use std::iter::successors; + +use either::Either; +use hir::{self, ModPath}; +use syntax::{ + ast::{self, NameOwner, VisibilityOwner}, + AstNode, AstToken, Direction, SmolStr, + SyntaxKind::{PATH, PATH_SEGMENT}, + SyntaxNode, SyntaxToken, T, +}; +use text_edit::TextEditBuilder; + +use crate::assist_context::AssistContext; + +/// Determines the containing syntax node in which to insert a `use` statement affecting `position`. +pub(crate) fn find_insert_use_container( + position: &SyntaxNode, + ctx: &AssistContext, +) -> Option> { + ctx.sema.ancestors_with_macros(position.clone()).find_map(|n| { + if let Some(module) = ast::Module::cast(n.clone()) { + return module.item_list().map(|it| Either::Left(it)); + } + Some(Either::Right(ast::SourceFile::cast(n)?)) + }) +} + +/// Creates and inserts a use statement for the given path to import. +/// The use statement is inserted in the scope most appropriate to the +/// the cursor position given, additionally merged with the existing use imports. +pub(crate) fn insert_use_statement( + // Ideally the position of the cursor, used to + position: &SyntaxNode, + path_to_import: &ModPath, + ctx: &AssistContext, + builder: &mut TextEditBuilder, +) { + let target = path_to_import.to_string().split("::").map(SmolStr::new).collect::>(); + let container = find_insert_use_container(position, ctx); + + if let Some(container) = container { + let syntax = container.either(|l| l.syntax().clone(), |r| r.syntax().clone()); + let action = best_action_for_target(syntax, position.clone(), &target); + make_assist(&action, &target, builder); + } +} + +fn collect_path_segments_raw( + segments: &mut Vec, + mut path: ast::Path, +) -> Option { + let oldlen = segments.len(); + loop { + let mut children = path.syntax().children_with_tokens(); + let (first, second, third) = ( + children.next().map(|n| (n.clone(), n.kind())), + children.next().map(|n| (n.clone(), n.kind())), + children.next().map(|n| (n.clone(), n.kind())), + ); + match (first, second, third) { + (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => { + path = ast::Path::cast(subpath.as_node()?.clone())?; + segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?); + } + (Some((segment, PATH_SEGMENT)), _, _) => { + segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?); + break; + } + (_, _, _) => return None, + } + } + // We need to reverse only the new added segments + let only_new_segments = segments.split_at_mut(oldlen).1; + only_new_segments.reverse(); + Some(segments.len() - oldlen) +} + +fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) { + let mut iter = segments.iter(); + if let Some(s) = iter.next() { + buf.push_str(s); + } + for s in iter { + buf.push_str("::"); + buf.push_str(s); + } +} + +/// Returns the number of common segments. +fn compare_path_segments(left: &[SmolStr], right: &[ast::PathSegment]) -> usize { + left.iter().zip(right).take_while(|(l, r)| compare_path_segment(l, r)).count() +} + +fn compare_path_segment(a: &SmolStr, b: &ast::PathSegment) -> bool { + if let Some(kb) = b.kind() { + match kb { + ast::PathSegmentKind::Name(nameref_b) => a == nameref_b.text(), + ast::PathSegmentKind::SelfKw => a == "self", + ast::PathSegmentKind::SuperKw => a == "super", + ast::PathSegmentKind::CrateKw => a == "crate", + ast::PathSegmentKind::Type { .. } => false, // not allowed in imports + } + } else { + false + } +} + +fn compare_path_segment_with_name(a: &SmolStr, b: &ast::Name) -> bool { + a == b.text() +} + +#[derive(Clone, Debug)] +enum ImportAction { + Nothing, + // Add a brand new use statement. + AddNewUse { + anchor: Option, // anchor node + add_after_anchor: bool, + }, + + // To split an existing use statement creating a nested import. + AddNestedImport { + // how may segments matched with the target path + common_segments: usize, + path_to_split: ast::Path, + // the first segment of path_to_split we want to add into the new nested list + first_segment_to_split: Option, + // Wether to add 'self' in addition to the target path + add_self: bool, + }, + // To add the target path to an existing nested import tree list. + AddInTreeList { + common_segments: usize, + // The UseTreeList where to add the target path + tree_list: ast::UseTreeList, + add_self: bool, + }, +} + +impl ImportAction { + fn add_new_use(anchor: Option, add_after_anchor: bool) -> Self { + ImportAction::AddNewUse { anchor, add_after_anchor } + } + + fn add_nested_import( + common_segments: usize, + path_to_split: ast::Path, + first_segment_to_split: Option, + add_self: bool, + ) -> Self { + ImportAction::AddNestedImport { + common_segments, + path_to_split, + first_segment_to_split, + add_self, + } + } + + fn add_in_tree_list( + common_segments: usize, + tree_list: ast::UseTreeList, + add_self: bool, + ) -> Self { + ImportAction::AddInTreeList { common_segments, tree_list, add_self } + } + + fn better(left: ImportAction, right: ImportAction) -> ImportAction { + if left.is_better(&right) { + left + } else { + right + } + } + + fn is_better(&self, other: &ImportAction) -> bool { + match (self, other) { + (ImportAction::Nothing, _) => true, + (ImportAction::AddInTreeList { .. }, ImportAction::Nothing) => false, + ( + ImportAction::AddNestedImport { common_segments: n, .. }, + ImportAction::AddInTreeList { common_segments: m, .. }, + ) + | ( + ImportAction::AddInTreeList { common_segments: n, .. }, + ImportAction::AddNestedImport { common_segments: m, .. }, + ) + | ( + ImportAction::AddInTreeList { common_segments: n, .. }, + ImportAction::AddInTreeList { common_segments: m, .. }, + ) + | ( + ImportAction::AddNestedImport { common_segments: n, .. }, + ImportAction::AddNestedImport { common_segments: m, .. }, + ) => n > m, + (ImportAction::AddInTreeList { .. }, _) => true, + (ImportAction::AddNestedImport { .. }, ImportAction::Nothing) => false, + (ImportAction::AddNestedImport { .. }, _) => true, + (ImportAction::AddNewUse { .. }, _) => false, + } + } +} + +// Find out the best ImportAction to import target path against current_use_tree. +// If current_use_tree has a nested import the function gets called recursively on every UseTree inside a UseTreeList. +fn walk_use_tree_for_best_action( + current_path_segments: &mut Vec, // buffer containing path segments + current_parent_use_tree_list: Option, // will be Some value if we are in a nested import + current_use_tree: ast::UseTree, // the use tree we are currently examinating + target: &[SmolStr], // the path we want to import +) -> ImportAction { + // We save the number of segments in the buffer so we can restore the correct segments + // before returning. Recursive call will add segments so we need to delete them. + let prev_len = current_path_segments.len(); + + let tree_list = current_use_tree.use_tree_list(); + let alias = current_use_tree.rename(); + + let path = match current_use_tree.path() { + Some(path) => path, + None => { + // If the use item don't have a path, it means it's broken (syntax error) + return ImportAction::add_new_use( + current_use_tree + .syntax() + .ancestors() + .find_map(ast::Use::cast) + .map(|it| it.syntax().clone()), + true, + ); + } + }; + + // This can happen only if current_use_tree is a direct child of a UseItem + if let Some(name) = alias.and_then(|it| it.name()) { + if compare_path_segment_with_name(&target[0], &name) { + return ImportAction::Nothing; + } + } + + collect_path_segments_raw(current_path_segments, path.clone()); + + // We compare only the new segments added in the line just above. + // The first prev_len segments were already compared in 'parent' recursive calls. + let left = target.split_at(prev_len).1; + let right = current_path_segments.split_at(prev_len).1; + let common = compare_path_segments(left, &right); + let mut action = match common { + 0 => ImportAction::add_new_use( + // e.g: target is std::fmt and we can have + // use foo::bar + // We add a brand new use statement + current_use_tree + .syntax() + .ancestors() + .find_map(ast::Use::cast) + .map(|it| it.syntax().clone()), + true, + ), + common if common == left.len() && left.len() == right.len() => { + // e.g: target is std::fmt and we can have + // 1- use std::fmt; + // 2- use std::fmt::{ ... } + if let Some(list) = tree_list { + // In case 2 we need to add self to the nested list + // unless it's already there + let has_self = list.use_trees().map(|it| it.path()).any(|p| { + p.and_then(|it| it.segment()) + .and_then(|it| it.kind()) + .filter(|k| *k == ast::PathSegmentKind::SelfKw) + .is_some() + }); + + if has_self { + ImportAction::Nothing + } else { + ImportAction::add_in_tree_list(current_path_segments.len(), list, true) + } + } else { + // Case 1 + ImportAction::Nothing + } + } + common if common != left.len() && left.len() == right.len() => { + // e.g: target is std::fmt and we have + // use std::io; + // We need to split. + let segments_to_split = current_path_segments.split_at(prev_len + common).1; + ImportAction::add_nested_import( + prev_len + common, + path, + Some(segments_to_split[0].clone()), + false, + ) + } + common if common == right.len() && left.len() > right.len() => { + // e.g: target is std::fmt and we can have + // 1- use std; + // 2- use std::{ ... }; + + // fallback action + let mut better_action = ImportAction::add_new_use( + current_use_tree + .syntax() + .ancestors() + .find_map(ast::Use::cast) + .map(|it| it.syntax().clone()), + true, + ); + if let Some(list) = tree_list { + // Case 2, check recursively if the path is already imported in the nested list + for u in list.use_trees() { + let child_action = walk_use_tree_for_best_action( + current_path_segments, + Some(list.clone()), + u, + target, + ); + if child_action.is_better(&better_action) { + better_action = child_action; + if let ImportAction::Nothing = better_action { + return better_action; + } + } + } + } else { + // Case 1, split adding self + better_action = ImportAction::add_nested_import(prev_len + common, path, None, true) + } + better_action + } + common if common == left.len() && left.len() < right.len() => { + // e.g: target is std::fmt and we can have + // use std::fmt::Debug; + let segments_to_split = current_path_segments.split_at(prev_len + common).1; + ImportAction::add_nested_import( + prev_len + common, + path, + Some(segments_to_split[0].clone()), + true, + ) + } + common if common < left.len() && common < right.len() => { + // e.g: target is std::fmt::nested::Debug + // use std::fmt::Display + let segments_to_split = current_path_segments.split_at(prev_len + common).1; + ImportAction::add_nested_import( + prev_len + common, + path, + Some(segments_to_split[0].clone()), + false, + ) + } + _ => unreachable!(), + }; + + // If we are inside a UseTreeList adding a use statement become adding to the existing + // tree list. + action = match (current_parent_use_tree_list, action.clone()) { + (Some(use_tree_list), ImportAction::AddNewUse { .. }) => { + ImportAction::add_in_tree_list(prev_len, use_tree_list, false) + } + (_, _) => action, + }; + + // We remove the segments added + current_path_segments.truncate(prev_len); + action +} + +fn best_action_for_target( + container: SyntaxNode, + anchor: SyntaxNode, + target: &[SmolStr], +) -> ImportAction { + let mut storage = Vec::with_capacity(16); // this should be the only allocation + let best_action = container + .children() + .filter_map(ast::Use::cast) + .filter(|u| u.visibility().is_none()) + .filter_map(|it| it.use_tree()) + .map(|u| walk_use_tree_for_best_action(&mut storage, None, u, target)) + .fold(None, |best, a| match best { + Some(best) => Some(ImportAction::better(best, a)), + None => Some(a), + }); + + match best_action { + Some(action) => action, + None => { + // We have no action and no UseItem was found in container so we find + // another item and we use it as anchor. + // If there are no items above, we choose the target path itself as anchor. + // todo: we should include even whitespace blocks as anchor candidates + let anchor = container.children().next().or_else(|| Some(anchor)); + + let add_after_anchor = anchor + .clone() + .and_then(ast::Attr::cast) + .map(|attr| attr.kind() == ast::AttrKind::Inner) + .unwrap_or(false); + ImportAction::add_new_use(anchor, add_after_anchor) + } + } +} + +fn make_assist(action: &ImportAction, target: &[SmolStr], edit: &mut TextEditBuilder) { + match action { + ImportAction::AddNewUse { anchor, add_after_anchor } => { + make_assist_add_new_use(anchor, *add_after_anchor, target, edit) + } + ImportAction::AddInTreeList { common_segments, tree_list, add_self } => { + // We know that the fist n segments already exists in the use statement we want + // to modify, so we want to add only the last target.len() - n segments. + let segments_to_add = target.split_at(*common_segments).1; + make_assist_add_in_tree_list(tree_list, segments_to_add, *add_self, edit) + } + ImportAction::AddNestedImport { + common_segments, + path_to_split, + first_segment_to_split, + add_self, + } => { + let segments_to_add = target.split_at(*common_segments).1; + make_assist_add_nested_import( + path_to_split, + first_segment_to_split, + segments_to_add, + *add_self, + edit, + ) + } + _ => {} + } +} + +fn make_assist_add_new_use( + anchor: &Option, + after: bool, + target: &[SmolStr], + edit: &mut TextEditBuilder, +) { + if let Some(anchor) = anchor { + let indent = leading_indent(anchor); + let mut buf = String::new(); + if after { + buf.push_str("\n"); + if let Some(spaces) = &indent { + buf.push_str(spaces); + } + } + buf.push_str("use "); + fmt_segments_raw(target, &mut buf); + buf.push_str(";"); + if !after { + buf.push_str("\n\n"); + if let Some(spaces) = &indent { + buf.push_str(&spaces); + } + } + let position = if after { anchor.text_range().end() } else { anchor.text_range().start() }; + edit.insert(position, buf); + } +} + +fn make_assist_add_in_tree_list( + tree_list: &ast::UseTreeList, + target: &[SmolStr], + add_self: bool, + edit: &mut TextEditBuilder, +) { + let last = tree_list.use_trees().last(); + if let Some(last) = last { + let mut buf = String::new(); + let comma = last.syntax().siblings(Direction::Next).find(|n| n.kind() == T![,]); + let offset = if let Some(comma) = comma { + comma.text_range().end() + } else { + buf.push_str(","); + last.syntax().text_range().end() + }; + if add_self { + buf.push_str(" self") + } else { + buf.push_str(" "); + } + fmt_segments_raw(target, &mut buf); + edit.insert(offset, buf); + } else { + } +} + +fn make_assist_add_nested_import( + path: &ast::Path, + first_segment_to_split: &Option, + target: &[SmolStr], + add_self: bool, + edit: &mut TextEditBuilder, +) { + let use_tree = path.syntax().ancestors().find_map(ast::UseTree::cast); + if let Some(use_tree) = use_tree { + let (start, add_colon_colon) = if let Some(first_segment_to_split) = first_segment_to_split + { + (first_segment_to_split.syntax().text_range().start(), false) + } else { + (use_tree.syntax().text_range().end(), true) + }; + let end = use_tree.syntax().text_range().end(); + + let mut buf = String::new(); + if add_colon_colon { + buf.push_str("::"); + } + buf.push_str("{"); + if add_self { + buf.push_str("self, "); + } + fmt_segments_raw(target, &mut buf); + if !target.is_empty() { + buf.push_str(", "); + } + edit.insert(start, buf); + edit.insert(end, "}".to_string()); + } +} + +/// If the node is on the beginning of the line, calculate indent. +fn leading_indent(node: &SyntaxNode) -> Option { + for token in prev_tokens(node.first_token()?) { + if let Some(ws) = ast::Whitespace::cast(token.clone()) { + let ws_text = ws.text(); + if let Some(pos) = ws_text.rfind('\n') { + return Some(ws_text[pos + 1..].into()); + } + } + if token.text().contains('\n') { + break; + } + } + return None; + fn prev_tokens(token: SyntaxToken) -> impl Iterator { + successors(token.prev_token(), |token| token.prev_token()) + } +} diff --git a/crates/base_db/Cargo.toml b/crates/base_db/Cargo.toml new file mode 100644 index 0000000000..7347d7528c --- /dev/null +++ b/crates/base_db/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "base_db" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +salsa = "0.15.2" +rustc-hash = "1.1.0" + +syntax = { path = "../syntax" } +cfg = { path = "../cfg" } +profile = { path = "../profile" } +tt = { path = "../tt" } +test_utils = { path = "../test_utils" } +vfs = { path = "../vfs" } +stdx = { path = "../stdx" } diff --git a/crates/ra_db/src/cancellation.rs b/crates/base_db/src/cancellation.rs similarity index 100% rename from crates/ra_db/src/cancellation.rs rename to crates/base_db/src/cancellation.rs diff --git a/crates/base_db/src/fixture.rs b/crates/base_db/src/fixture.rs new file mode 100644 index 0000000000..5ff8ead0e5 --- /dev/null +++ b/crates/base_db/src/fixture.rs @@ -0,0 +1,228 @@ +//! Fixtures are strings containing rust source code with optional metadata. +//! A fixture without metadata is parsed into a single source file. +//! Use this to test functionality local to one file. +//! +//! Simple Example: +//! ``` +//! r#" +//! fn main() { +//! println!("Hello World") +//! } +//! "# +//! ``` +//! +//! Metadata can be added to a fixture after a `//-` comment. +//! The basic form is specifying filenames, +//! which is also how to define multiple files in a single test fixture +//! +//! Example using two files in the same crate: +//! ``` +//! " +//! //- /main.rs +//! mod foo; +//! fn main() { +//! foo::bar(); +//! } +//! +//! //- /foo.rs +//! pub fn bar() {} +//! " +//! ``` +//! +//! Example using two crates with one file each, with one crate depending on the other: +//! ``` +//! r#" +//! //- /main.rs crate:a deps:b +//! fn main() { +//! b::foo(); +//! } +//! //- /lib.rs crate:b +//! pub fn b() { +//! println!("Hello World") +//! } +//! "# +//! ``` +//! +//! Metadata allows specifying all settings and variables +//! that are available in a real rust project: +//! - crate names via `crate:cratename` +//! - dependencies via `deps:dep1,dep2` +//! - configuration settings via `cfg:dbg=false,opt_level=2` +//! - environment variables via `env:PATH=/bin,RUST_LOG=debug` +//! +//! Example using all available metadata: +//! ``` +//! " +//! //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo +//! fn insert_source_code_here() {} +//! " +//! ``` +use std::{str::FromStr, sync::Arc}; + +use cfg::CfgOptions; +use rustc_hash::FxHashMap; +use test_utils::{extract_range_or_offset, Fixture, RangeOrOffset, CURSOR_MARKER}; +use vfs::{file_set::FileSet, VfsPath}; + +use crate::{ + input::CrateName, CrateGraph, CrateId, Edition, Env, FileId, FilePosition, SourceDatabaseExt, + SourceRoot, SourceRootId, +}; + +pub const WORKSPACE: SourceRootId = SourceRootId(0); + +pub trait WithFixture: Default + SourceDatabaseExt + 'static { + fn with_single_file(text: &str) -> (Self, FileId) { + let mut db = Self::default(); + let (_, files) = with_files(&mut db, text); + assert_eq!(files.len(), 1); + (db, files[0]) + } + + fn with_files(ra_fixture: &str) -> Self { + let mut db = Self::default(); + let (pos, _) = with_files(&mut db, ra_fixture); + assert!(pos.is_none()); + db + } + + fn with_position(ra_fixture: &str) -> (Self, FilePosition) { + let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture); + let offset = match range_or_offset { + RangeOrOffset::Range(_) => panic!(), + RangeOrOffset::Offset(it) => it, + }; + (db, FilePosition { file_id, offset }) + } + + fn with_range_or_offset(ra_fixture: &str) -> (Self, FileId, RangeOrOffset) { + let mut db = Self::default(); + let (pos, _) = with_files(&mut db, ra_fixture); + let (file_id, range_or_offset) = pos.unwrap(); + (db, file_id, range_or_offset) + } + + fn test_crate(&self) -> CrateId { + let crate_graph = self.crate_graph(); + let mut it = crate_graph.iter(); + let res = it.next().unwrap(); + assert!(it.next().is_none()); + res + } +} + +impl WithFixture for DB {} + +fn with_files( + db: &mut dyn SourceDatabaseExt, + fixture: &str, +) -> (Option<(FileId, RangeOrOffset)>, Vec) { + let fixture = Fixture::parse(fixture); + + let mut files = Vec::new(); + let mut crate_graph = CrateGraph::default(); + let mut crates = FxHashMap::default(); + let mut crate_deps = Vec::new(); + let mut default_crate_root: Option = None; + + let mut file_set = FileSet::default(); + let source_root_id = WORKSPACE; + let source_root_prefix = "/".to_string(); + let mut file_id = FileId(0); + + let mut file_position = None; + + for entry in fixture { + let text = if entry.text.contains(CURSOR_MARKER) { + let (range_or_offset, text) = extract_range_or_offset(&entry.text); + assert!(file_position.is_none()); + file_position = Some((file_id, range_or_offset)); + text.to_string() + } else { + entry.text.clone() + }; + + let meta = FileMeta::from(entry); + assert!(meta.path.starts_with(&source_root_prefix)); + + if let Some(krate) = meta.krate { + let crate_id = crate_graph.add_crate_root( + file_id, + meta.edition, + Some(krate.clone()), + meta.cfg, + meta.env, + Default::default(), + ); + let crate_name = CrateName::new(&krate).unwrap(); + let prev = crates.insert(crate_name.clone(), crate_id); + assert!(prev.is_none()); + for dep in meta.deps { + let dep = CrateName::new(&dep).unwrap(); + crate_deps.push((crate_name.clone(), dep)) + } + } else if meta.path == "/main.rs" || meta.path == "/lib.rs" { + assert!(default_crate_root.is_none()); + default_crate_root = Some(file_id); + } + + db.set_file_text(file_id, Arc::new(text)); + db.set_file_source_root(file_id, source_root_id); + let path = VfsPath::new_virtual_path(meta.path); + file_set.insert(file_id, path.into()); + files.push(file_id); + file_id.0 += 1; + } + + if crates.is_empty() { + let crate_root = default_crate_root.unwrap(); + crate_graph.add_crate_root( + crate_root, + Edition::Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + } else { + for (from, to) in crate_deps { + let from_id = crates[&from]; + let to_id = crates[&to]; + crate_graph.add_dep(from_id, CrateName::new(&to).unwrap(), to_id).unwrap(); + } + } + + db.set_source_root(source_root_id, Arc::new(SourceRoot::new_local(file_set))); + db.set_crate_graph(Arc::new(crate_graph)); + + (file_position, files) +} + +struct FileMeta { + path: String, + krate: Option, + deps: Vec, + cfg: CfgOptions, + edition: Edition, + env: Env, +} + +impl From for FileMeta { + fn from(f: Fixture) -> FileMeta { + let mut cfg = CfgOptions::default(); + f.cfg_atoms.iter().for_each(|it| cfg.insert_atom(it.into())); + f.cfg_key_values.iter().for_each(|(k, v)| cfg.insert_key_value(k.into(), v.into())); + + FileMeta { + path: f.path, + krate: f.krate, + deps: f.deps, + cfg, + edition: f + .edition + .as_ref() + .map_or(Edition::Edition2018, |v| Edition::from_str(&v).unwrap()), + env: f.env.into_iter().collect(), + } + } +} diff --git a/crates/base_db/src/input.rs b/crates/base_db/src/input.rs new file mode 100644 index 0000000000..f3d65cdf02 --- /dev/null +++ b/crates/base_db/src/input.rs @@ -0,0 +1,453 @@ +//! This module specifies the input to rust-analyzer. In some sense, this is +//! **the** most important module, because all other fancy stuff is strictly +//! derived from this input. +//! +//! Note that neither this module, nor any other part of the analyzer's core do +//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how +//! actual IO is done and lowered to input. + +use std::{fmt, iter::FromIterator, ops, str::FromStr, sync::Arc}; + +use cfg::CfgOptions; +use rustc_hash::{FxHashMap, FxHashSet}; +use syntax::SmolStr; +use tt::TokenExpander; +use vfs::file_set::FileSet; + +pub use vfs::FileId; + +/// Files are grouped into source roots. A source root is a directory on the +/// file systems which is watched for changes. Typically it corresponds to a +/// Rust crate. Source roots *might* be nested: in this case, a file belongs to +/// the nearest enclosing source root. Paths to files are always relative to a +/// source root, and the analyzer does not know the root path of the source root at +/// all. So, a file from one source root can't refer to a file in another source +/// root by path. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct SourceRootId(pub u32); + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct SourceRoot { + /// Sysroot or crates.io library. + /// + /// Libraries are considered mostly immutable, this assumption is used to + /// optimize salsa's query structure + pub is_library: bool, + pub(crate) file_set: FileSet, +} + +impl SourceRoot { + pub fn new_local(file_set: FileSet) -> SourceRoot { + SourceRoot { is_library: false, file_set } + } + pub fn new_library(file_set: FileSet) -> SourceRoot { + SourceRoot { is_library: true, file_set } + } + pub fn iter(&self) -> impl Iterator + '_ { + self.file_set.iter() + } +} + +/// `CrateGraph` is a bit of information which turns a set of text files into a +/// number of Rust crates. Each crate is defined by the `FileId` of its root module, +/// the set of cfg flags (not yet implemented) and the set of dependencies. Note +/// that, due to cfg's, there might be several crates for a single `FileId`! As +/// in the rust-lang proper, a crate does not have a name. Instead, names are +/// specified on dependency edges. That is, a crate might be known under +/// different names in different dependent crates. +/// +/// Note that `CrateGraph` is build-system agnostic: it's a concept of the Rust +/// language proper, not a concept of the build system. In practice, we get +/// `CrateGraph` by lowering `cargo metadata` output. +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct CrateGraph { + arena: FxHashMap, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct CrateId(pub u32); + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct CrateName(SmolStr); + +impl CrateName { + /// Creates a crate name, checking for dashes in the string provided. + /// Dashes are not allowed in the crate names, + /// hence the input string is returned as `Err` for those cases. + pub fn new(name: &str) -> Result { + if name.contains('-') { + Err(name) + } else { + Ok(Self(SmolStr::new(name))) + } + } + + /// Creates a crate name, unconditionally replacing the dashes with underscores. + pub fn normalize_dashes(name: &str) -> CrateName { + Self(SmolStr::new(name.replace('-', "_"))) + } +} + +impl fmt::Display for CrateName { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl ops::Deref for CrateName { + type Target = str; + fn deref(&self) -> &Self::Target { + &*self.0 + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct ProcMacroId(pub u32); + +#[derive(Debug, Clone)] +pub struct ProcMacro { + pub name: SmolStr, + pub expander: Arc, +} + +impl Eq for ProcMacro {} +impl PartialEq for ProcMacro { + fn eq(&self, other: &ProcMacro) -> bool { + self.name == other.name && Arc::ptr_eq(&self.expander, &other.expander) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct CrateData { + pub root_file_id: FileId, + pub edition: Edition, + /// The name to display to the end user. + /// This actual crate name can be different in a particular dependent crate + /// or may even be missing for some cases, such as a dummy crate for the code snippet. + pub display_name: Option, + pub cfg_options: CfgOptions, + pub env: Env, + pub dependencies: Vec, + pub proc_macro: Vec, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum Edition { + Edition2018, + Edition2015, +} + +#[derive(Default, Debug, Clone, PartialEq, Eq)] +pub struct Env { + entries: FxHashMap, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Dependency { + pub crate_id: CrateId, + pub name: CrateName, +} + +impl CrateGraph { + pub fn add_crate_root( + &mut self, + file_id: FileId, + edition: Edition, + display_name: Option, + cfg_options: CfgOptions, + env: Env, + proc_macro: Vec<(SmolStr, Arc)>, + ) -> CrateId { + let proc_macro = + proc_macro.into_iter().map(|(name, it)| ProcMacro { name, expander: it }).collect(); + + let data = CrateData { + root_file_id: file_id, + edition, + display_name, + cfg_options, + env, + proc_macro, + dependencies: Vec::new(), + }; + let crate_id = CrateId(self.arena.len() as u32); + let prev = self.arena.insert(crate_id, data); + assert!(prev.is_none()); + crate_id + } + + pub fn add_dep( + &mut self, + from: CrateId, + name: CrateName, + to: CrateId, + ) -> Result<(), CyclicDependenciesError> { + if self.dfs_find(from, to, &mut FxHashSet::default()) { + return Err(CyclicDependenciesError); + } + self.arena.get_mut(&from).unwrap().add_dep(name, to); + Ok(()) + } + + pub fn is_empty(&self) -> bool { + self.arena.is_empty() + } + + pub fn iter(&self) -> impl Iterator + '_ { + self.arena.keys().copied() + } + + /// Returns an iterator over all transitive dependencies of the given crate. + pub fn transitive_deps(&self, of: CrateId) -> impl Iterator + '_ { + let mut worklist = vec![of]; + let mut deps = FxHashSet::default(); + + while let Some(krate) = worklist.pop() { + if !deps.insert(krate) { + continue; + } + + worklist.extend(self[krate].dependencies.iter().map(|dep| dep.crate_id)); + } + + deps.remove(&of); + deps.into_iter() + } + + // FIXME: this only finds one crate with the given root; we could have multiple + pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option { + let (&crate_id, _) = + self.arena.iter().find(|(_crate_id, data)| data.root_file_id == file_id)?; + Some(crate_id) + } + + /// Extends this crate graph by adding a complete disjoint second crate + /// graph. + /// + /// The ids of the crates in the `other` graph are shifted by the return + /// amount. + pub fn extend(&mut self, other: CrateGraph) -> u32 { + let start = self.arena.len() as u32; + self.arena.extend(other.arena.into_iter().map(|(id, mut data)| { + let new_id = id.shift(start); + for dep in &mut data.dependencies { + dep.crate_id = dep.crate_id.shift(start); + } + (new_id, data) + })); + start + } + + fn dfs_find(&self, target: CrateId, from: CrateId, visited: &mut FxHashSet) -> bool { + if !visited.insert(from) { + return false; + } + + if target == from { + return true; + } + + for dep in &self[from].dependencies { + let crate_id = dep.crate_id; + if self.dfs_find(target, crate_id, visited) { + return true; + } + } + false + } +} + +impl ops::Index for CrateGraph { + type Output = CrateData; + fn index(&self, crate_id: CrateId) -> &CrateData { + &self.arena[&crate_id] + } +} + +impl CrateId { + pub fn shift(self, amount: u32) -> CrateId { + CrateId(self.0 + amount) + } +} + +impl CrateData { + fn add_dep(&mut self, name: CrateName, crate_id: CrateId) { + self.dependencies.push(Dependency { name, crate_id }) + } +} + +impl FromStr for Edition { + type Err = ParseEditionError; + + fn from_str(s: &str) -> Result { + let res = match s { + "2015" => Edition::Edition2015, + "2018" => Edition::Edition2018, + _ => return Err(ParseEditionError { invalid_input: s.to_string() }), + }; + Ok(res) + } +} + +impl fmt::Display for Edition { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(match self { + Edition::Edition2015 => "2015", + Edition::Edition2018 => "2018", + }) + } +} + +impl FromIterator<(String, String)> for Env { + fn from_iter>(iter: T) -> Self { + Env { entries: FromIterator::from_iter(iter) } + } +} + +impl Env { + pub fn set(&mut self, env: &str, value: String) { + self.entries.insert(env.to_owned(), value); + } + + pub fn get(&self, env: &str) -> Option { + self.entries.get(env).cloned() + } +} + +#[derive(Debug)] +pub struct ParseEditionError { + invalid_input: String, +} + +impl fmt::Display for ParseEditionError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "invalid edition: {:?}", self.invalid_input) + } +} + +impl std::error::Error for ParseEditionError {} + +#[derive(Debug)] +pub struct CyclicDependenciesError; + +#[cfg(test)] +mod tests { + use super::{CfgOptions, CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId}; + + #[test] + fn detect_cyclic_dependency_indirect() { + let mut graph = CrateGraph::default(); + let crate1 = graph.add_crate_root( + FileId(1u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + let crate2 = graph.add_crate_root( + FileId(2u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + let crate3 = graph.add_crate_root( + FileId(3u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); + assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); + assert!(graph.add_dep(crate3, CrateName::new("crate1").unwrap(), crate1).is_err()); + } + + #[test] + fn detect_cyclic_dependency_direct() { + let mut graph = CrateGraph::default(); + let crate1 = graph.add_crate_root( + FileId(1u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + let crate2 = graph.add_crate_root( + FileId(2u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); + assert!(graph.add_dep(crate2, CrateName::new("crate2").unwrap(), crate2).is_err()); + } + + #[test] + fn it_works() { + let mut graph = CrateGraph::default(); + let crate1 = graph.add_crate_root( + FileId(1u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + let crate2 = graph.add_crate_root( + FileId(2u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + let crate3 = graph.add_crate_root( + FileId(3u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); + assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); + } + + #[test] + fn dashes_are_normalized() { + let mut graph = CrateGraph::default(); + let crate1 = graph.add_crate_root( + FileId(1u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + let crate2 = graph.add_crate_root( + FileId(2u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + assert!(graph + .add_dep(crate1, CrateName::normalize_dashes("crate-name-with-dashes"), crate2) + .is_ok()); + assert_eq!( + graph[crate1].dependencies, + vec![Dependency { + crate_id: crate2, + name: CrateName::new("crate_name_with_dashes").unwrap() + }] + ); + } +} diff --git a/crates/base_db/src/lib.rs b/crates/base_db/src/lib.rs new file mode 100644 index 0000000000..ee34158506 --- /dev/null +++ b/crates/base_db/src/lib.rs @@ -0,0 +1,167 @@ +//! base_db defines basic database traits. The concrete DB is defined by ide. +mod cancellation; +mod input; +pub mod fixture; + +use std::{panic, sync::Arc}; + +use rustc_hash::FxHashSet; +use syntax::{ast, Parse, SourceFile, TextRange, TextSize}; + +pub use crate::{ + cancellation::Canceled, + input::{ + CrateData, CrateGraph, CrateId, CrateName, Dependency, Edition, Env, FileId, ProcMacroId, + SourceRoot, SourceRootId, + }, +}; +pub use salsa; +pub use vfs::{file_set::FileSet, VfsPath}; + +#[macro_export] +macro_rules! impl_intern_key { + ($name:ident) => { + impl $crate::salsa::InternKey for $name { + fn from_intern_id(v: $crate::salsa::InternId) -> Self { + $name(v) + } + fn as_intern_id(&self) -> $crate::salsa::InternId { + self.0 + } + } + }; +} + +pub trait Upcast { + fn upcast(&self) -> &T; +} + +pub trait CheckCanceled { + /// Aborts current query if there are pending changes. + /// + /// rust-analyzer needs to be able to answer semantic questions about the + /// code while the code is being modified. A common problem is that a + /// long-running query is being calculated when a new change arrives. + /// + /// We can't just apply the change immediately: this will cause the pending + /// query to see inconsistent state (it will observe an absence of + /// repeatable read). So what we do is we **cancel** all pending queries + /// before applying the change. + /// + /// We implement cancellation by panicking with a special value and catching + /// it on the API boundary. Salsa explicitly supports this use-case. + fn check_canceled(&self); + + fn catch_canceled(&self, f: F) -> Result + where + Self: Sized + panic::RefUnwindSafe, + F: FnOnce(&Self) -> T + panic::UnwindSafe, + { + panic::catch_unwind(|| f(self)).map_err(|err| match err.downcast::() { + Ok(canceled) => *canceled, + Err(payload) => panic::resume_unwind(payload), + }) + } +} + +impl CheckCanceled for T { + fn check_canceled(&self) { + if self.salsa_runtime().is_current_revision_canceled() { + Canceled::throw() + } + } +} + +#[derive(Clone, Copy, Debug)] +pub struct FilePosition { + pub file_id: FileId, + pub offset: TextSize, +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub struct FileRange { + pub file_id: FileId, + pub range: TextRange, +} + +pub const DEFAULT_LRU_CAP: usize = 128; + +pub trait FileLoader { + /// Text of the file. + fn file_text(&self, file_id: FileId) -> Arc; + /// Note that we intentionally accept a `&str` and not a `&Path` here. This + /// method exists to handle `#[path = "/some/path.rs"] mod foo;` and such, + /// so the input is guaranteed to be utf-8 string. One might be tempted to + /// introduce some kind of "utf-8 path with / separators", but that's a bad idea. Behold + /// `#[path = "C://no/way"]` + fn resolve_path(&self, anchor: FileId, path: &str) -> Option; + fn relevant_crates(&self, file_id: FileId) -> Arc>; +} + +/// Database which stores all significant input facts: source code and project +/// model. Everything else in rust-analyzer is derived from these queries. +#[salsa::query_group(SourceDatabaseStorage)] +pub trait SourceDatabase: CheckCanceled + FileLoader + std::fmt::Debug { + // Parses the file into the syntax tree. + #[salsa::invoke(parse_query)] + fn parse(&self, file_id: FileId) -> Parse; + + /// The crate graph. + #[salsa::input] + fn crate_graph(&self) -> Arc; +} + +fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse { + let _p = profile::span("parse_query").detail(|| format!("{:?}", file_id)); + let text = db.file_text(file_id); + SourceFile::parse(&*text) +} + +/// We don't want to give HIR knowledge of source roots, hence we extract these +/// methods into a separate DB. +#[salsa::query_group(SourceDatabaseExtStorage)] +pub trait SourceDatabaseExt: SourceDatabase { + #[salsa::input] + fn file_text(&self, file_id: FileId) -> Arc; + /// Path to a file, relative to the root of its source root. + /// Source root of the file. + #[salsa::input] + fn file_source_root(&self, file_id: FileId) -> SourceRootId; + /// Contents of the source root. + #[salsa::input] + fn source_root(&self, id: SourceRootId) -> Arc; + + fn source_root_crates(&self, id: SourceRootId) -> Arc>; +} + +fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc> { + let graph = db.crate_graph(); + let res = graph + .iter() + .filter(|&krate| { + let root_file = graph[krate].root_file_id; + db.file_source_root(root_file) == id + }) + .collect::>(); + Arc::new(res) +} + +/// Silly workaround for cyclic deps between the traits +pub struct FileLoaderDelegate(pub T); + +impl FileLoader for FileLoaderDelegate<&'_ T> { + fn file_text(&self, file_id: FileId) -> Arc { + SourceDatabaseExt::file_text(self.0, file_id) + } + fn resolve_path(&self, anchor: FileId, path: &str) -> Option { + // FIXME: this *somehow* should be platform agnostic... + let source_root = self.0.file_source_root(anchor); + let source_root = self.0.source_root(source_root); + source_root.file_set.resolve_path(anchor, path) + } + + fn relevant_crates(&self, file_id: FileId) -> Arc> { + let source_root = self.0.file_source_root(file_id); + self.0.source_root_crates(source_root) + } +} diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml new file mode 100644 index 0000000000..d2ea551d18 --- /dev/null +++ b/crates/cfg/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "cfg" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +rustc-hash = "1.1.0" + +tt = { path = "../tt" } + +[dev-dependencies] +mbe = { path = "../mbe" } +syntax = { path = "../syntax" } diff --git a/crates/cfg/src/cfg_expr.rs b/crates/cfg/src/cfg_expr.rs new file mode 100644 index 0000000000..336fe25bc9 --- /dev/null +++ b/crates/cfg/src/cfg_expr.rs @@ -0,0 +1,133 @@ +//! The condition expression used in `#[cfg(..)]` attributes. +//! +//! See: https://doc.rust-lang.org/reference/conditional-compilation.html#conditional-compilation + +use std::slice::Iter as SliceIter; + +use tt::SmolStr; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum CfgExpr { + Invalid, + Atom(SmolStr), + KeyValue { key: SmolStr, value: SmolStr }, + All(Vec), + Any(Vec), + Not(Box), +} + +impl CfgExpr { + pub fn parse(tt: &tt::Subtree) -> CfgExpr { + next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid) + } + /// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates. + pub fn fold(&self, query: &dyn Fn(&SmolStr, Option<&SmolStr>) -> bool) -> Option { + match self { + CfgExpr::Invalid => None, + CfgExpr::Atom(name) => Some(query(name, None)), + CfgExpr::KeyValue { key, value } => Some(query(key, Some(value))), + CfgExpr::All(preds) => { + preds.iter().try_fold(true, |s, pred| Some(s && pred.fold(query)?)) + } + CfgExpr::Any(preds) => { + preds.iter().try_fold(false, |s, pred| Some(s || pred.fold(query)?)) + } + CfgExpr::Not(pred) => pred.fold(query).map(|s| !s), + } + } +} + +fn next_cfg_expr(it: &mut SliceIter) -> Option { + let name = match it.next() { + None => return None, + Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(), + Some(_) => return Some(CfgExpr::Invalid), + }; + + // Peek + let ret = match it.as_slice().first() { + Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => { + match it.as_slice().get(1) { + Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => { + it.next(); + it.next(); + // FIXME: escape? raw string? + let value = + SmolStr::new(literal.text.trim_start_matches('"').trim_end_matches('"')); + CfgExpr::KeyValue { key: name, value } + } + _ => return Some(CfgExpr::Invalid), + } + } + Some(tt::TokenTree::Subtree(subtree)) => { + it.next(); + let mut sub_it = subtree.token_trees.iter(); + let mut subs = std::iter::from_fn(|| next_cfg_expr(&mut sub_it)).collect(); + match name.as_str() { + "all" => CfgExpr::All(subs), + "any" => CfgExpr::Any(subs), + "not" => CfgExpr::Not(Box::new(subs.pop().unwrap_or(CfgExpr::Invalid))), + _ => CfgExpr::Invalid, + } + } + _ => CfgExpr::Atom(name), + }; + + // Eat comma separator + if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = it.as_slice().first() { + if punct.char == ',' { + it.next(); + } + } + Some(ret) +} + +#[cfg(test)] +mod tests { + use super::*; + + use mbe::ast_to_token_tree; + use syntax::ast::{self, AstNode}; + + fn assert_parse_result(input: &str, expected: CfgExpr) { + let (tt, _) = { + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + ast_to_token_tree(&tt).unwrap() + }; + let cfg = CfgExpr::parse(&tt); + assert_eq!(cfg, expected); + } + + #[test] + fn test_cfg_expr_parser() { + assert_parse_result("#![cfg(foo)]", CfgExpr::Atom("foo".into())); + assert_parse_result("#![cfg(foo,)]", CfgExpr::Atom("foo".into())); + assert_parse_result( + "#![cfg(not(foo))]", + CfgExpr::Not(Box::new(CfgExpr::Atom("foo".into()))), + ); + assert_parse_result("#![cfg(foo(bar))]", CfgExpr::Invalid); + + // Only take the first + assert_parse_result(r#"#![cfg(foo, bar = "baz")]"#, CfgExpr::Atom("foo".into())); + + assert_parse_result( + r#"#![cfg(all(foo, bar = "baz"))]"#, + CfgExpr::All(vec![ + CfgExpr::Atom("foo".into()), + CfgExpr::KeyValue { key: "bar".into(), value: "baz".into() }, + ]), + ); + + assert_parse_result( + r#"#![cfg(any(not(), all(), , bar = "baz",))]"#, + CfgExpr::Any(vec![ + CfgExpr::Not(Box::new(CfgExpr::Invalid)), + CfgExpr::All(vec![]), + CfgExpr::Invalid, + CfgExpr::KeyValue { key: "bar".into(), value: "baz".into() }, + ]), + ); + } +} diff --git a/crates/cfg/src/lib.rs b/crates/cfg/src/lib.rs new file mode 100644 index 0000000000..a9d50e698a --- /dev/null +++ b/crates/cfg/src/lib.rs @@ -0,0 +1,51 @@ +//! cfg defines conditional compiling options, `cfg` attibute parser and evaluator + +mod cfg_expr; + +use rustc_hash::FxHashSet; +use tt::SmolStr; + +pub use cfg_expr::CfgExpr; + +/// Configuration options used for conditional compilition on items with `cfg` attributes. +/// We have two kind of options in different namespaces: atomic options like `unix`, and +/// key-value options like `target_arch="x86"`. +/// +/// Note that for key-value options, one key can have multiple values (but not none). +/// `feature` is an example. We have both `feature="foo"` and `feature="bar"` if features +/// `foo` and `bar` are both enabled. And here, we store key-value options as a set of tuple +/// of key and value in `key_values`. +/// +/// See: https://doc.rust-lang.org/reference/conditional-compilation.html#set-configuration-options +#[derive(Debug, Clone, PartialEq, Eq, Default)] +pub struct CfgOptions { + atoms: FxHashSet, + key_values: FxHashSet<(SmolStr, SmolStr)>, +} + +impl CfgOptions { + pub fn check(&self, cfg: &CfgExpr) -> Option { + cfg.fold(&|key, value| match value { + None => self.atoms.contains(key), + Some(value) => self.key_values.contains(&(key.clone(), value.clone())), + }) + } + + pub fn insert_atom(&mut self, key: SmolStr) { + self.atoms.insert(key); + } + + pub fn insert_key_value(&mut self, key: SmolStr, value: SmolStr) { + self.key_values.insert((key, value)); + } + + pub fn append(&mut self, other: &CfgOptions) { + for atom in &other.atoms { + self.atoms.insert(atom.clone()); + } + + for (key, value) in &other.key_values { + self.key_values.insert((key.clone(), value.clone())); + } + } +} diff --git a/crates/expect/Cargo.toml b/crates/expect/Cargo.toml index 77775630dc..b54d3a60e3 100644 --- a/crates/expect/Cargo.toml +++ b/crates/expect/Cargo.toml @@ -1,9 +1,9 @@ [package] name = "expect" -version = "0.1.0" +version = "0.0.0" +license = "MIT OR Apache-2.0" authors = ["rust-analyzer developers"] edition = "2018" -license = "MIT OR Apache-2.0" [lib] doctest = false @@ -11,4 +11,5 @@ doctest = false [dependencies] once_cell = "1" difference = "2" + stdx = { path = "../stdx" } diff --git a/crates/expect/src/lib.rs b/crates/expect/src/lib.rs index 21a458d477..bd83895f78 100644 --- a/crates/expect/src/lib.rs +++ b/crates/expect/src/lib.rs @@ -74,7 +74,7 @@ impl fmt::Display for Position { impl Expect { pub fn assert_eq(&self, actual: &str) { let trimmed = self.trimmed(); - if &trimmed == actual { + if trimmed == actual { return; } Runtime::fail_expect(self, &trimmed, actual); diff --git a/crates/flycheck/Cargo.toml b/crates/flycheck/Cargo.toml index ff8a1e5689..262a66e4e3 100644 --- a/crates/flycheck/Cargo.toml +++ b/crates/flycheck/Cargo.toml @@ -1,9 +1,9 @@ [package] -edition = "2018" name = "flycheck" -version = "0.1.0" -authors = ["rust-analyzer developers"] +version = "0.0.0" license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" [lib] doctest = false @@ -14,4 +14,5 @@ log = "0.4.8" cargo_metadata = "0.11.1" serde_json = "1.0.48" jod-thread = "0.1.1" -ra_toolchain = { path = "../ra_toolchain" } + +toolchain = { path = "../toolchain" } diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 7c38f5ef9d..16078d1043 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -1,4 +1,4 @@ -//! cargo_check provides the functionality needed to run `cargo check` or +//! Flycheck provides the functionality needed to run `cargo check` or //! another compatible command (f.x. clippy) in a background thread and provide //! LSP diagnostics based on the output of the command. @@ -147,6 +147,12 @@ impl FlycheckActor { // avoid busy-waiting. let cargo_handle = self.cargo_handle.take().unwrap(); let res = cargo_handle.join(); + if res.is_err() { + log::error!( + "Flycheck failed to run the following command: {:?}", + self.check_command() + ) + } self.send(Message::Progress(Progress::DidFinish(res))); } Event::CheckEvent(Some(message)) => match message { @@ -187,7 +193,7 @@ impl FlycheckActor { extra_args, features, } => { - let mut cmd = Command::new(ra_toolchain::cargo()); + let mut cmd = Command::new(toolchain::cargo()); cmd.arg(command); cmd.args(&["--workspace", "--message-format=json", "--manifest-path"]) .arg(self.workspace_root.join("Cargo.toml")); @@ -253,7 +259,7 @@ impl CargoHandle { return Err(io::Error::new( io::ErrorKind::Other, format!( - "Cargo watcher failed,the command produced no valid metadata (exit code: {:?})", + "Cargo watcher failed, the command produced no valid metadata (exit code: {:?})", exit_status ), )); diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml new file mode 100644 index 0000000000..dbb2986b60 --- /dev/null +++ b/crates/hir/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "hir" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +log = "0.4.8" +rustc-hash = "1.1.0" +either = "1.5.3" +arrayvec = "0.5.1" +itertools = "0.9.0" + +stdx = { path = "../stdx" } +syntax = { path = "../syntax" } +base_db = { path = "../base_db" } +profile = { path = "../profile" } +hir_expand = { path = "../hir_expand" } +hir_def = { path = "../hir_def" } +hir_ty = { path = "../hir_ty" } diff --git a/crates/hir/src/code_model.rs b/crates/hir/src/code_model.rs new file mode 100644 index 0000000000..5dc3ae3b19 --- /dev/null +++ b/crates/hir/src/code_model.rs @@ -0,0 +1,1719 @@ +//! FIXME: write short doc here +use std::{iter, sync::Arc}; + +use arrayvec::ArrayVec; +use base_db::{CrateId, Edition, FileId}; +use either::Either; +use hir_def::{ + adt::ReprKind, + adt::StructKind, + adt::VariantData, + builtin_type::BuiltinType, + docs::Documentation, + expr::{BindingAnnotation, Pat, PatId}, + import_map, + per_ns::PerNs, + resolver::{HasResolver, Resolver}, + src::HasSource as _, + type_ref::{Mutability, TypeRef}, + AdtId, AssocContainerId, ConstId, DefWithBodyId, EnumId, FunctionId, GenericDefId, HasModule, + ImplId, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StaticId, StructId, + TraitId, TypeAliasId, TypeParamId, UnionId, +}; +use hir_expand::{ + diagnostics::DiagnosticSink, + name::{name, AsName}, + MacroDefId, MacroDefKind, +}; +use hir_ty::{ + autoderef, + display::{HirDisplayError, HirFormatter}, + method_resolution, ApplicationTy, CallableDefId, Canonical, FnSig, GenericPredicate, + InEnvironment, Substs, TraitEnvironment, Ty, TyDefId, TypeCtor, +}; +use rustc_hash::FxHashSet; +use stdx::impl_from; +use syntax::{ + ast::{self, AttrsOwner, NameOwner}, + AstNode, +}; + +use crate::{ + db::{DefDatabase, HirDatabase}, + has_source::HasSource, + HirDisplay, InFile, Name, +}; + +/// hir::Crate describes a single crate. It's the main interface with which +/// a crate's dependencies interact. Mostly, it should be just a proxy for the +/// root module. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Crate { + pub(crate) id: CrateId, +} + +#[derive(Debug)] +pub struct CrateDependency { + pub krate: Crate, + pub name: Name, +} + +impl Crate { + pub fn dependencies(self, db: &dyn HirDatabase) -> Vec { + db.crate_graph()[self.id] + .dependencies + .iter() + .map(|dep| { + let krate = Crate { id: dep.crate_id }; + let name = dep.as_name(); + CrateDependency { krate, name } + }) + .collect() + } + + // FIXME: add `transitive_reverse_dependencies`. + pub fn reverse_dependencies(self, db: &dyn HirDatabase) -> Vec { + let crate_graph = db.crate_graph(); + crate_graph + .iter() + .filter(|&krate| { + crate_graph[krate].dependencies.iter().any(|it| it.crate_id == self.id) + }) + .map(|id| Crate { id }) + .collect() + } + + pub fn root_module(self, db: &dyn HirDatabase) -> Module { + let module_id = db.crate_def_map(self.id).root; + Module::new(self, module_id) + } + + pub fn root_file(self, db: &dyn HirDatabase) -> FileId { + db.crate_graph()[self.id].root_file_id + } + + pub fn edition(self, db: &dyn HirDatabase) -> Edition { + db.crate_graph()[self.id].edition + } + + pub fn display_name(self, db: &dyn HirDatabase) -> Option { + db.crate_graph()[self.id].display_name.clone() + } + + pub fn query_external_importables( + self, + db: &dyn DefDatabase, + query: &str, + ) -> impl Iterator> { + import_map::search_dependencies( + db, + self.into(), + import_map::Query::new(query).anchor_end().case_sensitive().limit(40), + ) + .into_iter() + .map(|item| match item { + ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id.into()), + ItemInNs::Macros(mac_id) => Either::Right(mac_id.into()), + }) + } + + pub fn all(db: &dyn HirDatabase) -> Vec { + db.crate_graph().iter().map(|id| Crate { id }).collect() + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Module { + pub(crate) id: ModuleId, +} + +/// The defs which can be visible in the module. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ModuleDef { + Module(Module), + Function(Function), + Adt(Adt), + // Can't be directly declared, but can be imported. + EnumVariant(EnumVariant), + Const(Const), + Static(Static), + Trait(Trait), + TypeAlias(TypeAlias), + BuiltinType(BuiltinType), +} +impl_from!( + Module, + Function, + Adt(Struct, Enum, Union), + EnumVariant, + Const, + Static, + Trait, + TypeAlias, + BuiltinType + for ModuleDef +); + +impl ModuleDef { + pub fn module(self, db: &dyn HirDatabase) -> Option { + match self { + ModuleDef::Module(it) => it.parent(db), + ModuleDef::Function(it) => Some(it.module(db)), + ModuleDef::Adt(it) => Some(it.module(db)), + ModuleDef::EnumVariant(it) => Some(it.module(db)), + ModuleDef::Const(it) => Some(it.module(db)), + ModuleDef::Static(it) => Some(it.module(db)), + ModuleDef::Trait(it) => Some(it.module(db)), + ModuleDef::TypeAlias(it) => Some(it.module(db)), + ModuleDef::BuiltinType(_) => None, + } + } + + pub fn definition_visibility(&self, db: &dyn HirDatabase) -> Option { + let module = match self { + ModuleDef::Module(it) => it.parent(db)?, + ModuleDef::Function(it) => return Some(it.visibility(db)), + ModuleDef::Adt(it) => it.module(db), + ModuleDef::EnumVariant(it) => { + let parent = it.parent_enum(db); + let module = it.module(db); + return module.visibility_of(db, &ModuleDef::Adt(Adt::Enum(parent))); + } + ModuleDef::Const(it) => return Some(it.visibility(db)), + ModuleDef::Static(it) => it.module(db), + ModuleDef::Trait(it) => it.module(db), + ModuleDef::TypeAlias(it) => return Some(it.visibility(db)), + ModuleDef::BuiltinType(_) => return None, + }; + + module.visibility_of(db, self) + } + + pub fn name(self, db: &dyn HirDatabase) -> Option { + match self { + ModuleDef::Adt(it) => Some(it.name(db)), + ModuleDef::Trait(it) => Some(it.name(db)), + ModuleDef::Function(it) => Some(it.name(db)), + ModuleDef::EnumVariant(it) => Some(it.name(db)), + ModuleDef::TypeAlias(it) => Some(it.name(db)), + + ModuleDef::Module(it) => it.name(db), + ModuleDef::Const(it) => it.name(db), + ModuleDef::Static(it) => it.name(db), + + ModuleDef::BuiltinType(it) => Some(it.as_name()), + } + } +} + +pub use hir_def::{ + attr::Attrs, item_scope::ItemInNs, item_tree::ItemTreeNode, visibility::Visibility, + AssocItemId, AssocItemLoc, +}; + +impl Module { + pub(crate) fn new(krate: Crate, crate_module_id: LocalModuleId) -> Module { + Module { id: ModuleId { krate: krate.id, local_id: crate_module_id } } + } + + /// Name of this module. + pub fn name(self, db: &dyn HirDatabase) -> Option { + let def_map = db.crate_def_map(self.id.krate); + let parent = def_map[self.id.local_id].parent?; + def_map[parent].children.iter().find_map(|(name, module_id)| { + if *module_id == self.id.local_id { + Some(name.clone()) + } else { + None + } + }) + } + + /// Returns the crate this module is part of. + pub fn krate(self) -> Crate { + Crate { id: self.id.krate } + } + + /// Topmost parent of this module. Every module has a `crate_root`, but some + /// might be missing `krate`. This can happen if a module's file is not included + /// in the module tree of any target in `Cargo.toml`. + pub fn crate_root(self, db: &dyn HirDatabase) -> Module { + let def_map = db.crate_def_map(self.id.krate); + self.with_module_id(def_map.root) + } + + /// Iterates over all child modules. + pub fn children(self, db: &dyn HirDatabase) -> impl Iterator { + let def_map = db.crate_def_map(self.id.krate); + let children = def_map[self.id.local_id] + .children + .iter() + .map(|(_, module_id)| self.with_module_id(*module_id)) + .collect::>(); + children.into_iter() + } + + /// Finds a parent module. + pub fn parent(self, db: &dyn HirDatabase) -> Option { + let def_map = db.crate_def_map(self.id.krate); + let parent_id = def_map[self.id.local_id].parent?; + Some(self.with_module_id(parent_id)) + } + + pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec { + let mut res = vec![self]; + let mut curr = self; + while let Some(next) = curr.parent(db) { + res.push(next); + curr = next + } + res + } + + /// Returns a `ModuleScope`: a set of items, visible in this module. + pub fn scope( + self, + db: &dyn HirDatabase, + visible_from: Option, + ) -> Vec<(Name, ScopeDef)> { + db.crate_def_map(self.id.krate)[self.id.local_id] + .scope + .entries() + .filter_map(|(name, def)| { + if let Some(m) = visible_from { + let filtered = + def.filter_visibility(|vis| vis.is_visible_from(db.upcast(), m.id)); + if filtered.is_none() && !def.is_none() { + None + } else { + Some((name, filtered)) + } + } else { + Some((name, def)) + } + }) + .flat_map(|(name, def)| { + ScopeDef::all_items(def).into_iter().map(move |item| (name.clone(), item)) + }) + .collect() + } + + pub fn visibility_of(self, db: &dyn HirDatabase, def: &ModuleDef) -> Option { + db.crate_def_map(self.id.krate)[self.id.local_id].scope.visibility_of(def.clone().into()) + } + + pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { + let _p = profile::span("Module::diagnostics"); + let crate_def_map = db.crate_def_map(self.id.krate); + crate_def_map.add_diagnostics(db.upcast(), self.id.local_id, sink); + for decl in self.declarations(db) { + match decl { + crate::ModuleDef::Function(f) => f.diagnostics(db, sink), + crate::ModuleDef::Module(m) => { + // Only add diagnostics from inline modules + if crate_def_map[m.id.local_id].origin.is_inline() { + m.diagnostics(db, sink) + } + } + _ => (), + } + } + + for impl_def in self.impl_defs(db) { + for item in impl_def.items(db) { + if let AssocItem::Function(f) = item { + f.diagnostics(db, sink); + } + } + } + } + + pub fn declarations(self, db: &dyn HirDatabase) -> Vec { + let def_map = db.crate_def_map(self.id.krate); + def_map[self.id.local_id].scope.declarations().map(ModuleDef::from).collect() + } + + pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec { + let def_map = db.crate_def_map(self.id.krate); + def_map[self.id.local_id].scope.impls().map(ImplDef::from).collect() + } + + pub(crate) fn with_module_id(self, module_id: LocalModuleId) -> Module { + Module::new(self.krate(), module_id) + } + + /// Finds a path that can be used to refer to the given item from within + /// this module, if possible. + pub fn find_use_path( + self, + db: &dyn DefDatabase, + item: impl Into, + ) -> Option { + hir_def::find_path::find_path(db, item.into(), self.into()) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Field { + pub(crate) parent: VariantDef, + pub(crate) id: LocalFieldId, +} + +#[derive(Debug, PartialEq, Eq)] +pub enum FieldSource { + Named(ast::RecordField), + Pos(ast::TupleField), +} + +impl Field { + pub fn name(&self, db: &dyn HirDatabase) -> Name { + self.parent.variant_data(db).fields()[self.id].name.clone() + } + + /// Returns the type as in the signature of the struct (i.e., with + /// placeholder types for type parameters). This is good for showing + /// signature help, but not so good to actually get the type of the field + /// when you actually have a variable of the struct. + pub fn signature_ty(&self, db: &dyn HirDatabase) -> Type { + let var_id = self.parent.into(); + let generic_def_id: GenericDefId = match self.parent { + VariantDef::Struct(it) => it.id.into(), + VariantDef::Union(it) => it.id.into(), + VariantDef::EnumVariant(it) => it.parent.id.into(), + }; + let substs = Substs::type_params(db, generic_def_id); + let ty = db.field_types(var_id)[self.id].clone().subst(&substs); + Type::new(db, self.parent.module(db).id.krate, var_id, ty) + } + + pub fn parent_def(&self, _db: &dyn HirDatabase) -> VariantDef { + self.parent + } +} + +impl HasVisibility for Field { + fn visibility(&self, db: &dyn HirDatabase) -> Visibility { + let variant_data = self.parent.variant_data(db); + let visibility = &variant_data.fields()[self.id].visibility; + let parent_id: hir_def::VariantId = self.parent.into(); + visibility.resolve(db.upcast(), &parent_id.resolver(db.upcast())) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Struct { + pub(crate) id: StructId, +} + +impl Struct { + pub fn module(self, db: &dyn HirDatabase) -> Module { + Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) } + } + + pub fn krate(self, db: &dyn HirDatabase) -> Option { + Some(self.module(db).krate()) + } + + pub fn name(self, db: &dyn HirDatabase) -> Name { + db.struct_data(self.id).name.clone() + } + + pub fn fields(self, db: &dyn HirDatabase) -> Vec { + db.struct_data(self.id) + .variant_data + .fields() + .iter() + .map(|(id, _)| Field { parent: self.into(), id }) + .collect() + } + + pub fn ty(self, db: &dyn HirDatabase) -> Type { + Type::from_def(db, self.id.lookup(db.upcast()).container.module(db.upcast()).krate, self.id) + } + + pub fn repr(self, db: &dyn HirDatabase) -> Option { + db.struct_data(self.id).repr.clone() + } + + fn variant_data(self, db: &dyn HirDatabase) -> Arc { + db.struct_data(self.id).variant_data.clone() + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Union { + pub(crate) id: UnionId, +} + +impl Union { + pub fn name(self, db: &dyn HirDatabase) -> Name { + db.union_data(self.id).name.clone() + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) } + } + + pub fn ty(self, db: &dyn HirDatabase) -> Type { + Type::from_def(db, self.id.lookup(db.upcast()).container.module(db.upcast()).krate, self.id) + } + + pub fn fields(self, db: &dyn HirDatabase) -> Vec { + db.union_data(self.id) + .variant_data + .fields() + .iter() + .map(|(id, _)| Field { parent: self.into(), id }) + .collect() + } + + fn variant_data(self, db: &dyn HirDatabase) -> Arc { + db.union_data(self.id).variant_data.clone() + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Enum { + pub(crate) id: EnumId, +} + +impl Enum { + pub fn module(self, db: &dyn HirDatabase) -> Module { + Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) } + } + + pub fn krate(self, db: &dyn HirDatabase) -> Option { + Some(self.module(db).krate()) + } + + pub fn name(self, db: &dyn HirDatabase) -> Name { + db.enum_data(self.id).name.clone() + } + + pub fn variants(self, db: &dyn HirDatabase) -> Vec { + db.enum_data(self.id) + .variants + .iter() + .map(|(id, _)| EnumVariant { parent: self, id }) + .collect() + } + + pub fn ty(self, db: &dyn HirDatabase) -> Type { + Type::from_def(db, self.id.lookup(db.upcast()).container.module(db.upcast()).krate, self.id) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct EnumVariant { + pub(crate) parent: Enum, + pub(crate) id: LocalEnumVariantId, +} + +impl EnumVariant { + pub fn module(self, db: &dyn HirDatabase) -> Module { + self.parent.module(db) + } + pub fn parent_enum(self, _db: &dyn HirDatabase) -> Enum { + self.parent + } + + pub fn name(self, db: &dyn HirDatabase) -> Name { + db.enum_data(self.parent.id).variants[self.id].name.clone() + } + + pub fn fields(self, db: &dyn HirDatabase) -> Vec { + self.variant_data(db) + .fields() + .iter() + .map(|(id, _)| Field { parent: self.into(), id }) + .collect() + } + + pub fn kind(self, db: &dyn HirDatabase) -> StructKind { + self.variant_data(db).kind() + } + + pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc { + db.enum_data(self.parent.id).variants[self.id].variant_data.clone() + } +} + +/// A Data Type +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum Adt { + Struct(Struct), + Union(Union), + Enum(Enum), +} +impl_from!(Struct, Union, Enum for Adt); + +impl Adt { + pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool { + let subst = db.generic_defaults(self.into()); + subst.iter().any(|ty| &ty.value == &Ty::Unknown) + } + + /// Turns this ADT into a type. Any type parameters of the ADT will be + /// turned into unknown types, which is good for e.g. finding the most + /// general set of completions, but will not look very nice when printed. + pub fn ty(self, db: &dyn HirDatabase) -> Type { + let id = AdtId::from(self); + Type::from_def(db, id.module(db.upcast()).krate, id) + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + match self { + Adt::Struct(s) => s.module(db), + Adt::Union(s) => s.module(db), + Adt::Enum(e) => e.module(db), + } + } + + pub fn krate(self, db: &dyn HirDatabase) -> Option { + Some(self.module(db).krate()) + } + + pub fn name(self, db: &dyn HirDatabase) -> Name { + match self { + Adt::Struct(s) => s.name(db), + Adt::Union(u) => u.name(db), + Adt::Enum(e) => e.name(db), + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum VariantDef { + Struct(Struct), + Union(Union), + EnumVariant(EnumVariant), +} +impl_from!(Struct, Union, EnumVariant for VariantDef); + +impl VariantDef { + pub fn fields(self, db: &dyn HirDatabase) -> Vec { + match self { + VariantDef::Struct(it) => it.fields(db), + VariantDef::Union(it) => it.fields(db), + VariantDef::EnumVariant(it) => it.fields(db), + } + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + match self { + VariantDef::Struct(it) => it.module(db), + VariantDef::Union(it) => it.module(db), + VariantDef::EnumVariant(it) => it.module(db), + } + } + + pub fn name(&self, db: &dyn HirDatabase) -> Name { + match self { + VariantDef::Struct(s) => s.name(db), + VariantDef::Union(u) => u.name(db), + VariantDef::EnumVariant(e) => e.name(db), + } + } + + pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc { + match self { + VariantDef::Struct(it) => it.variant_data(db), + VariantDef::Union(it) => it.variant_data(db), + VariantDef::EnumVariant(it) => it.variant_data(db), + } + } +} + +/// The defs which have a body. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum DefWithBody { + Function(Function), + Static(Static), + Const(Const), +} +impl_from!(Function, Const, Static for DefWithBody); + +impl DefWithBody { + pub fn module(self, db: &dyn HirDatabase) -> Module { + match self { + DefWithBody::Const(c) => c.module(db), + DefWithBody::Function(f) => f.module(db), + DefWithBody::Static(s) => s.module(db), + } + } + + pub fn name(self, db: &dyn HirDatabase) -> Option { + match self { + DefWithBody::Function(f) => Some(f.name(db)), + DefWithBody::Static(s) => s.name(db), + DefWithBody::Const(c) => c.name(db), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Function { + pub(crate) id: FunctionId, +} + +impl Function { + pub fn module(self, db: &dyn HirDatabase) -> Module { + self.id.lookup(db.upcast()).module(db.upcast()).into() + } + + pub fn krate(self, db: &dyn HirDatabase) -> Option { + Some(self.module(db).krate()) + } + + pub fn name(self, db: &dyn HirDatabase) -> Name { + db.function_data(self.id).name.clone() + } + + pub fn has_self_param(self, db: &dyn HirDatabase) -> bool { + db.function_data(self.id).has_self_param + } + + pub fn params(self, db: &dyn HirDatabase) -> Vec { + db.function_data(self.id).params.clone() + } + + pub fn is_unsafe(self, db: &dyn HirDatabase) -> bool { + db.function_data(self.id).is_unsafe + } + + pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { + hir_ty::diagnostics::validate_body(db, self.id.into(), sink) + } +} + +impl HasVisibility for Function { + fn visibility(&self, db: &dyn HirDatabase) -> Visibility { + let function_data = db.function_data(self.id); + let visibility = &function_data.visibility; + visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Const { + pub(crate) id: ConstId, +} + +impl Const { + pub fn module(self, db: &dyn HirDatabase) -> Module { + Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } + } + + pub fn krate(self, db: &dyn HirDatabase) -> Option { + Some(self.module(db).krate()) + } + + pub fn name(self, db: &dyn HirDatabase) -> Option { + db.const_data(self.id).name.clone() + } +} + +impl HasVisibility for Const { + fn visibility(&self, db: &dyn HirDatabase) -> Visibility { + let function_data = db.const_data(self.id); + let visibility = &function_data.visibility; + visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Static { + pub(crate) id: StaticId, +} + +impl Static { + pub fn module(self, db: &dyn HirDatabase) -> Module { + Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } + } + + pub fn krate(self, db: &dyn HirDatabase) -> Option { + Some(self.module(db).krate()) + } + + pub fn name(self, db: &dyn HirDatabase) -> Option { + db.static_data(self.id).name.clone() + } + + pub fn is_mut(self, db: &dyn HirDatabase) -> bool { + db.static_data(self.id).mutable + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Trait { + pub(crate) id: TraitId, +} + +impl Trait { + pub fn module(self, db: &dyn HirDatabase) -> Module { + Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) } + } + + pub fn name(self, db: &dyn HirDatabase) -> Name { + db.trait_data(self.id).name.clone() + } + + pub fn items(self, db: &dyn HirDatabase) -> Vec { + db.trait_data(self.id).items.iter().map(|(_name, it)| (*it).into()).collect() + } + + pub fn is_auto(self, db: &dyn HirDatabase) -> bool { + db.trait_data(self.id).auto + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TypeAlias { + pub(crate) id: TypeAliasId, +} + +impl TypeAlias { + pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool { + let subst = db.generic_defaults(self.id.into()); + subst.iter().any(|ty| &ty.value == &Ty::Unknown) + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } + } + + pub fn krate(self, db: &dyn HirDatabase) -> Option { + Some(self.module(db).krate()) + } + + pub fn type_ref(self, db: &dyn HirDatabase) -> Option { + db.type_alias_data(self.id).type_ref.clone() + } + + pub fn ty(self, db: &dyn HirDatabase) -> Type { + Type::from_def(db, self.id.lookup(db.upcast()).module(db.upcast()).krate, self.id) + } + + pub fn name(self, db: &dyn HirDatabase) -> Name { + db.type_alias_data(self.id).name.clone() + } +} + +impl HasVisibility for TypeAlias { + fn visibility(&self, db: &dyn HirDatabase) -> Visibility { + let function_data = db.type_alias_data(self.id); + let visibility = &function_data.visibility; + visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct MacroDef { + pub(crate) id: MacroDefId, +} + +impl MacroDef { + /// FIXME: right now, this just returns the root module of the crate that + /// defines this macro. The reasons for this is that macros are expanded + /// early, in `hir_expand`, where modules simply do not exist yet. + pub fn module(self, db: &dyn HirDatabase) -> Option { + let krate = self.id.krate?; + let module_id = db.crate_def_map(krate).root; + Some(Module::new(Crate { id: krate }, module_id)) + } + + /// XXX: this parses the file + pub fn name(self, db: &dyn HirDatabase) -> Option { + self.source(db).value.name().map(|it| it.as_name()) + } + + /// Indicate it is a proc-macro + pub fn is_proc_macro(&self) -> bool { + matches!(self.id.kind, MacroDefKind::CustomDerive(_)) + } + + /// Indicate it is a derive macro + pub fn is_derive_macro(&self) -> bool { + matches!(self.id.kind, MacroDefKind::CustomDerive(_) | MacroDefKind::BuiltInDerive(_)) + } +} + +/// Invariant: `inner.as_assoc_item(db).is_some()` +/// We do not actively enforce this invariant. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum AssocItem { + Function(Function), + Const(Const), + TypeAlias(TypeAlias), +} +pub enum AssocItemContainer { + Trait(Trait), + ImplDef(ImplDef), +} +pub trait AsAssocItem { + fn as_assoc_item(self, db: &dyn HirDatabase) -> Option; +} + +impl AsAssocItem for Function { + fn as_assoc_item(self, db: &dyn HirDatabase) -> Option { + as_assoc_item(db, AssocItem::Function, self.id) + } +} +impl AsAssocItem for Const { + fn as_assoc_item(self, db: &dyn HirDatabase) -> Option { + as_assoc_item(db, AssocItem::Const, self.id) + } +} +impl AsAssocItem for TypeAlias { + fn as_assoc_item(self, db: &dyn HirDatabase) -> Option { + as_assoc_item(db, AssocItem::TypeAlias, self.id) + } +} +fn as_assoc_item(db: &dyn HirDatabase, ctor: CTOR, id: ID) -> Option +where + ID: Lookup>, + DEF: From, + CTOR: FnOnce(DEF) -> AssocItem, + AST: ItemTreeNode, +{ + match id.lookup(db.upcast()).container { + AssocContainerId::TraitId(_) | AssocContainerId::ImplId(_) => Some(ctor(DEF::from(id))), + AssocContainerId::ContainerId(_) => None, + } +} + +impl AssocItem { + pub fn name(self, db: &dyn HirDatabase) -> Option { + match self { + AssocItem::Function(it) => Some(it.name(db)), + AssocItem::Const(it) => it.name(db), + AssocItem::TypeAlias(it) => Some(it.name(db)), + } + } + pub fn module(self, db: &dyn HirDatabase) -> Module { + match self { + AssocItem::Function(f) => f.module(db), + AssocItem::Const(c) => c.module(db), + AssocItem::TypeAlias(t) => t.module(db), + } + } + pub fn container(self, db: &dyn HirDatabase) -> AssocItemContainer { + let container = match self { + AssocItem::Function(it) => it.id.lookup(db.upcast()).container, + AssocItem::Const(it) => it.id.lookup(db.upcast()).container, + AssocItem::TypeAlias(it) => it.id.lookup(db.upcast()).container, + }; + match container { + AssocContainerId::TraitId(id) => AssocItemContainer::Trait(id.into()), + AssocContainerId::ImplId(id) => AssocItemContainer::ImplDef(id.into()), + AssocContainerId::ContainerId(_) => panic!("invalid AssocItem"), + } + } +} + +impl HasVisibility for AssocItem { + fn visibility(&self, db: &dyn HirDatabase) -> Visibility { + match self { + AssocItem::Function(f) => f.visibility(db), + AssocItem::Const(c) => c.visibility(db), + AssocItem::TypeAlias(t) => t.visibility(db), + } + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] +pub enum GenericDef { + Function(Function), + Adt(Adt), + Trait(Trait), + TypeAlias(TypeAlias), + ImplDef(ImplDef), + // enum variants cannot have generics themselves, but their parent enums + // can, and this makes some code easier to write + EnumVariant(EnumVariant), + // consts can have type parameters from their parents (i.e. associated consts of traits) + Const(Const), +} +impl_from!( + Function, + Adt(Struct, Enum, Union), + Trait, + TypeAlias, + ImplDef, + EnumVariant, + Const + for GenericDef +); + +impl GenericDef { + pub fn params(self, db: &dyn HirDatabase) -> Vec { + let generics: Arc = db.generic_params(self.into()); + generics + .types + .iter() + .map(|(local_id, _)| TypeParam { id: TypeParamId { parent: self.into(), local_id } }) + .collect() + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct Local { + pub(crate) parent: DefWithBodyId, + pub(crate) pat_id: PatId, +} + +impl Local { + pub fn is_param(self, db: &dyn HirDatabase) -> bool { + let src = self.source(db); + match src.value { + Either::Left(bind_pat) => { + bind_pat.syntax().ancestors().any(|it| ast::Param::can_cast(it.kind())) + } + Either::Right(_self_param) => true, + } + } + + // FIXME: why is this an option? It shouldn't be? + pub fn name(self, db: &dyn HirDatabase) -> Option { + let body = db.body(self.parent.into()); + match &body[self.pat_id] { + Pat::Bind { name, .. } => Some(name.clone()), + _ => None, + } + } + + pub fn is_self(self, db: &dyn HirDatabase) -> bool { + self.name(db) == Some(name![self]) + } + + pub fn is_mut(self, db: &dyn HirDatabase) -> bool { + let body = db.body(self.parent.into()); + match &body[self.pat_id] { + Pat::Bind { mode, .. } => match mode { + BindingAnnotation::Mutable | BindingAnnotation::RefMut => true, + _ => false, + }, + _ => false, + } + } + + pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody { + self.parent.into() + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + self.parent(db).module(db) + } + + pub fn ty(self, db: &dyn HirDatabase) -> Type { + let def = DefWithBodyId::from(self.parent); + let infer = db.infer(def); + let ty = infer[self.pat_id].clone(); + let krate = def.module(db.upcast()).krate; + Type::new(db, krate, def, ty) + } + + pub fn source(self, db: &dyn HirDatabase) -> InFile> { + let (_body, source_map) = db.body_with_source_map(self.parent.into()); + let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm... + let root = src.file_syntax(db.upcast()); + src.map(|ast| { + ast.map_left(|it| it.cast().unwrap().to_node(&root)).map_right(|it| it.to_node(&root)) + }) + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct TypeParam { + pub(crate) id: TypeParamId, +} + +impl TypeParam { + pub fn name(self, db: &dyn HirDatabase) -> Name { + let params = db.generic_params(self.id.parent); + params.types[self.id.local_id].name.clone().unwrap_or_else(Name::missing) + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + self.id.parent.module(db.upcast()).into() + } + + pub fn ty(self, db: &dyn HirDatabase) -> Type { + let resolver = self.id.parent.resolver(db.upcast()); + let environment = TraitEnvironment::lower(db, &resolver); + let ty = Ty::Placeholder(self.id); + Type { + krate: self.id.parent.module(db.upcast()).krate, + ty: InEnvironment { value: ty, environment }, + } + } + + pub fn default(self, db: &dyn HirDatabase) -> Option { + let params = db.generic_defaults(self.id.parent); + let local_idx = hir_ty::param_idx(db, self.id)?; + let resolver = self.id.parent.resolver(db.upcast()); + let environment = TraitEnvironment::lower(db, &resolver); + let ty = params.get(local_idx)?.clone(); + let subst = Substs::type_params(db, self.id.parent); + let ty = ty.subst(&subst.prefix(local_idx)); + Some(Type { + krate: self.id.parent.module(db.upcast()).krate, + ty: InEnvironment { value: ty, environment }, + }) + } +} + +// FIXME: rename from `ImplDef` to `Impl` +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct ImplDef { + pub(crate) id: ImplId, +} + +impl ImplDef { + pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec { + let inherent = db.inherent_impls_in_crate(krate.id); + let trait_ = db.trait_impls_in_crate(krate.id); + + inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect() + } + pub fn for_trait(db: &dyn HirDatabase, krate: Crate, trait_: Trait) -> Vec { + let impls = db.trait_impls_in_crate(krate.id); + impls.for_trait(trait_.id).map(Self::from).collect() + } + + pub fn target_trait(self, db: &dyn HirDatabase) -> Option { + db.impl_data(self.id).target_trait.clone() + } + + pub fn target_type(self, db: &dyn HirDatabase) -> TypeRef { + db.impl_data(self.id).target_type.clone() + } + + pub fn target_ty(self, db: &dyn HirDatabase) -> Type { + let impl_data = db.impl_data(self.id); + let resolver = self.id.resolver(db.upcast()); + let ctx = hir_ty::TyLoweringContext::new(db, &resolver); + let environment = TraitEnvironment::lower(db, &resolver); + let ty = Ty::from_hir(&ctx, &impl_data.target_type); + Type { + krate: self.id.lookup(db.upcast()).container.module(db.upcast()).krate, + ty: InEnvironment { value: ty, environment }, + } + } + + pub fn items(self, db: &dyn HirDatabase) -> Vec { + db.impl_data(self.id).items.iter().map(|it| (*it).into()).collect() + } + + pub fn is_negative(self, db: &dyn HirDatabase) -> bool { + db.impl_data(self.id).is_negative + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + self.id.lookup(db.upcast()).container.module(db.upcast()).into() + } + + pub fn krate(self, db: &dyn HirDatabase) -> Crate { + Crate { id: self.module(db).id.krate } + } + + pub fn is_builtin_derive(self, db: &dyn HirDatabase) -> Option> { + let src = self.source(db); + let item = src.file_id.is_builtin_derive(db.upcast())?; + let hygenic = hir_expand::hygiene::Hygiene::new(db.upcast(), item.file_id); + + let attr = item + .value + .attrs() + .filter_map(|it| { + let path = hir_def::path::ModPath::from_src(it.path()?, &hygenic)?; + if path.as_ident()?.to_string() == "derive" { + Some(it) + } else { + None + } + }) + .last()?; + + Some(item.with_value(attr)) + } +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct Type { + krate: CrateId, + ty: InEnvironment, +} + +impl Type { + pub(crate) fn new_with_resolver( + db: &dyn HirDatabase, + resolver: &Resolver, + ty: Ty, + ) -> Option { + let krate = resolver.krate()?; + Some(Type::new_with_resolver_inner(db, krate, resolver, ty)) + } + pub(crate) fn new_with_resolver_inner( + db: &dyn HirDatabase, + krate: CrateId, + resolver: &Resolver, + ty: Ty, + ) -> Type { + let environment = TraitEnvironment::lower(db, &resolver); + Type { krate, ty: InEnvironment { value: ty, environment } } + } + + fn new(db: &dyn HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type { + let resolver = lexical_env.resolver(db.upcast()); + let environment = TraitEnvironment::lower(db, &resolver); + Type { krate, ty: InEnvironment { value: ty, environment } } + } + + fn from_def( + db: &dyn HirDatabase, + krate: CrateId, + def: impl HasResolver + Into + Into, + ) -> Type { + let substs = Substs::build_for_def(db, def).fill_with_unknown().build(); + let ty = db.ty(def.into()).subst(&substs); + Type::new(db, krate, def, ty) + } + + pub fn is_unit(&self) -> bool { + matches!( + self.ty.value, + Ty::Apply(ApplicationTy { ctor: TypeCtor::Tuple { cardinality: 0 }, .. }) + ) + } + pub fn is_bool(&self) -> bool { + matches!(self.ty.value, Ty::Apply(ApplicationTy { ctor: TypeCtor::Bool, .. })) + } + + pub fn is_mutable_reference(&self) -> bool { + matches!( + self.ty.value, + Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(Mutability::Mut), .. }) + ) + } + + pub fn is_unknown(&self) -> bool { + matches!(self.ty.value, Ty::Unknown) + } + + /// Checks that particular type `ty` implements `std::future::Future`. + /// This function is used in `.await` syntax completion. + pub fn impls_future(&self, db: &dyn HirDatabase) -> bool { + let krate = self.krate; + + let std_future_trait = + db.lang_item(krate, "future_trait".into()).and_then(|it| it.as_trait()); + let std_future_trait = match std_future_trait { + Some(it) => it, + None => return false, + }; + + let canonical_ty = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; + method_resolution::implements_trait( + &canonical_ty, + db, + self.ty.environment.clone(), + krate, + std_future_trait, + ) + } + + pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool { + let trait_ref = hir_ty::TraitRef { + trait_: trait_.id, + substs: Substs::build_for_def(db, trait_.id) + .push(self.ty.value.clone()) + .fill(args.iter().map(|t| t.ty.value.clone())) + .build(), + }; + + let goal = Canonical { + value: hir_ty::InEnvironment::new( + self.ty.environment.clone(), + hir_ty::Obligation::Trait(trait_ref), + ), + kinds: Arc::new([]), + }; + + db.trait_solve(self.krate, goal).is_some() + } + + pub fn as_callable(&self, db: &dyn HirDatabase) -> Option { + let def = match self.ty.value { + Ty::Apply(ApplicationTy { ctor: TypeCtor::FnDef(def), parameters: _ }) => Some(def), + _ => None, + }; + + let sig = self.ty.value.callable_sig(db)?; + Some(Callable { ty: self.clone(), sig, def, is_bound_method: false }) + } + + pub fn is_closure(&self) -> bool { + matches!(&self.ty.value, Ty::Apply(ApplicationTy { ctor: TypeCtor::Closure { .. }, .. })) + } + + pub fn is_fn(&self) -> bool { + matches!(&self.ty.value, + Ty::Apply(ApplicationTy { ctor: TypeCtor::FnDef(..), .. }) | + Ty::Apply(ApplicationTy { ctor: TypeCtor::FnPtr { .. }, .. }) + ) + } + + pub fn is_packed(&self, db: &dyn HirDatabase) -> bool { + let adt_id = match self.ty.value { + Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(adt_id), .. }) => adt_id, + _ => return false, + }; + + let adt = adt_id.into(); + match adt { + Adt::Struct(s) => matches!(s.repr(db), Some(ReprKind::Packed)), + _ => false, + } + } + + pub fn is_raw_ptr(&self) -> bool { + matches!(&self.ty.value, Ty::Apply(ApplicationTy { ctor: TypeCtor::RawPtr(..), .. })) + } + + pub fn contains_unknown(&self) -> bool { + return go(&self.ty.value); + + fn go(ty: &Ty) -> bool { + match ty { + Ty::Unknown => true, + Ty::Apply(a_ty) => a_ty.parameters.iter().any(go), + _ => false, + } + } + } + + pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> { + if let Ty::Apply(a_ty) = &self.ty.value { + let variant_id = match a_ty.ctor { + TypeCtor::Adt(AdtId::StructId(s)) => s.into(), + TypeCtor::Adt(AdtId::UnionId(u)) => u.into(), + _ => return Vec::new(), + }; + + return db + .field_types(variant_id) + .iter() + .map(|(local_id, ty)| { + let def = Field { parent: variant_id.into(), id: local_id }; + let ty = ty.clone().subst(&a_ty.parameters); + (def, self.derived(ty)) + }) + .collect(); + }; + Vec::new() + } + + pub fn tuple_fields(&self, _db: &dyn HirDatabase) -> Vec { + let mut res = Vec::new(); + if let Ty::Apply(a_ty) = &self.ty.value { + if let TypeCtor::Tuple { .. } = a_ty.ctor { + for ty in a_ty.parameters.iter() { + let ty = ty.clone(); + res.push(self.derived(ty)); + } + } + }; + res + } + + pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator + 'a { + // There should be no inference vars in types passed here + // FIXME check that? + let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; + let environment = self.ty.environment.clone(); + let ty = InEnvironment { value: canonical, environment }; + autoderef(db, Some(self.krate), ty) + .map(|canonical| canonical.value) + .map(move |ty| self.derived(ty)) + } + + // This would be nicer if it just returned an iterator, but that runs into + // lifetime problems, because we need to borrow temp `CrateImplDefs`. + pub fn iterate_assoc_items( + self, + db: &dyn HirDatabase, + krate: Crate, + mut callback: impl FnMut(AssocItem) -> Option, + ) -> Option { + for krate in self.ty.value.def_crates(db, krate.id)? { + let impls = db.inherent_impls_in_crate(krate); + + for impl_def in impls.for_self_ty(&self.ty.value) { + for &item in db.impl_data(*impl_def).items.iter() { + if let Some(result) = callback(item.into()) { + return Some(result); + } + } + } + } + None + } + + pub fn iterate_method_candidates( + &self, + db: &dyn HirDatabase, + krate: Crate, + traits_in_scope: &FxHashSet, + name: Option<&Name>, + mut callback: impl FnMut(&Ty, Function) -> Option, + ) -> Option { + // There should be no inference vars in types passed here + // FIXME check that? + // FIXME replace Unknown by bound vars here + let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; + + let env = self.ty.environment.clone(); + let krate = krate.id; + + method_resolution::iterate_method_candidates( + &canonical, + db, + env, + krate, + traits_in_scope, + name, + method_resolution::LookupMode::MethodCall, + |ty, it| match it { + AssocItemId::FunctionId(f) => callback(ty, f.into()), + _ => None, + }, + ) + } + + pub fn iterate_path_candidates( + &self, + db: &dyn HirDatabase, + krate: Crate, + traits_in_scope: &FxHashSet, + name: Option<&Name>, + mut callback: impl FnMut(&Ty, AssocItem) -> Option, + ) -> Option { + // There should be no inference vars in types passed here + // FIXME check that? + // FIXME replace Unknown by bound vars here + let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; + + let env = self.ty.environment.clone(); + let krate = krate.id; + + method_resolution::iterate_method_candidates( + &canonical, + db, + env, + krate, + traits_in_scope, + name, + method_resolution::LookupMode::Path, + |ty, it| callback(ty, it.into()), + ) + } + + pub fn as_adt(&self) -> Option { + let (adt, _subst) = self.ty.value.as_adt()?; + Some(adt.into()) + } + + pub fn as_dyn_trait(&self) -> Option { + self.ty.value.dyn_trait().map(Into::into) + } + + pub fn as_impl_traits(&self, db: &dyn HirDatabase) -> Option> { + self.ty.value.impl_trait_bounds(db).map(|it| { + it.into_iter() + .filter_map(|pred| match pred { + hir_ty::GenericPredicate::Implemented(trait_ref) => { + Some(Trait::from(trait_ref.trait_)) + } + _ => None, + }) + .collect() + }) + } + + pub fn as_associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option { + self.ty.value.associated_type_parent_trait(db).map(Into::into) + } + + // FIXME: provide required accessors such that it becomes implementable from outside. + pub fn is_equal_for_find_impls(&self, other: &Type) -> bool { + match (&self.ty.value, &other.ty.value) { + (Ty::Apply(a_original_ty), Ty::Apply(ApplicationTy { ctor, parameters })) => match ctor + { + TypeCtor::Ref(..) => match parameters.as_single() { + Ty::Apply(a_ty) => a_original_ty.ctor == a_ty.ctor, + _ => false, + }, + _ => a_original_ty.ctor == *ctor, + }, + _ => false, + } + } + + fn derived(&self, ty: Ty) -> Type { + Type { + krate: self.krate, + ty: InEnvironment { value: ty, environment: self.ty.environment.clone() }, + } + } + + pub fn walk(&self, db: &dyn HirDatabase, mut cb: impl FnMut(Type)) { + // TypeWalk::walk for a Ty at first visits parameters and only after that the Ty itself. + // We need a different order here. + + fn walk_substs( + db: &dyn HirDatabase, + type_: &Type, + substs: &Substs, + cb: &mut impl FnMut(Type), + ) { + for ty in substs.iter() { + walk_type(db, &type_.derived(ty.clone()), cb); + } + } + + fn walk_bounds( + db: &dyn HirDatabase, + type_: &Type, + bounds: &[GenericPredicate], + cb: &mut impl FnMut(Type), + ) { + for pred in bounds { + match pred { + GenericPredicate::Implemented(trait_ref) => { + cb(type_.clone()); + walk_substs(db, type_, &trait_ref.substs, cb); + } + _ => (), + } + } + } + + fn walk_type(db: &dyn HirDatabase, type_: &Type, cb: &mut impl FnMut(Type)) { + let ty = type_.ty.value.strip_references(); + match ty { + Ty::Apply(ApplicationTy { ctor, parameters }) => { + match ctor { + TypeCtor::Adt(_) => { + cb(type_.derived(ty.clone())); + } + TypeCtor::AssociatedType(_) => { + if let Some(_) = ty.associated_type_parent_trait(db) { + cb(type_.derived(ty.clone())); + } + } + _ => (), + } + + // adt params, tuples, etc... + walk_substs(db, type_, parameters, cb); + } + Ty::Opaque(opaque_ty) => { + if let Some(bounds) = ty.impl_trait_bounds(db) { + walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb); + } + + walk_substs(db, type_, &opaque_ty.parameters, cb); + } + Ty::Placeholder(_) => { + if let Some(bounds) = ty.impl_trait_bounds(db) { + walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb); + } + } + Ty::Dyn(bounds) => { + walk_bounds(db, &type_.derived(ty.clone()), bounds.as_ref(), cb); + } + + _ => (), + } + } + + walk_type(db, self, &mut cb); + } +} + +impl HirDisplay for Type { + fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { + self.ty.value.hir_fmt(f) + } +} + +// FIXME: closures +#[derive(Debug)] +pub struct Callable { + ty: Type, + sig: FnSig, + def: Option, + pub(crate) is_bound_method: bool, +} + +pub enum CallableKind { + Function(Function), + TupleStruct(Struct), + TupleEnumVariant(EnumVariant), + Closure, +} + +impl Callable { + pub fn kind(&self) -> CallableKind { + match self.def { + Some(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()), + Some(CallableDefId::StructId(it)) => CallableKind::TupleStruct(it.into()), + Some(CallableDefId::EnumVariantId(it)) => CallableKind::TupleEnumVariant(it.into()), + None => CallableKind::Closure, + } + } + pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option { + let func = match self.def { + Some(CallableDefId::FunctionId(it)) if self.is_bound_method => it, + _ => return None, + }; + let src = func.lookup(db.upcast()).source(db.upcast()); + let param_list = src.value.param_list()?; + param_list.self_param() + } + pub fn n_params(&self) -> usize { + self.sig.params().len() - if self.is_bound_method { 1 } else { 0 } + } + pub fn params( + &self, + db: &dyn HirDatabase, + ) -> Vec<(Option>, Type)> { + let types = self + .sig + .params() + .iter() + .skip(if self.is_bound_method { 1 } else { 0 }) + .map(|ty| self.ty.derived(ty.clone())); + let patterns = match self.def { + Some(CallableDefId::FunctionId(func)) => { + let src = func.lookup(db.upcast()).source(db.upcast()); + src.value.param_list().map(|param_list| { + param_list + .self_param() + .map(|it| Some(Either::Left(it))) + .filter(|_| !self.is_bound_method) + .into_iter() + .chain(param_list.params().map(|it| it.pat().map(Either::Right))) + }) + } + _ => None, + }; + patterns.into_iter().flatten().chain(iter::repeat(None)).zip(types).collect() + } + pub fn return_type(&self) -> Type { + self.ty.derived(self.sig.ret().clone()) + } +} + +/// For IDE only +#[derive(Debug)] +pub enum ScopeDef { + ModuleDef(ModuleDef), + MacroDef(MacroDef), + GenericParam(TypeParam), + ImplSelfType(ImplDef), + AdtSelfType(Adt), + Local(Local), + Unknown, +} + +impl ScopeDef { + pub fn all_items(def: PerNs) -> ArrayVec<[Self; 3]> { + let mut items = ArrayVec::new(); + + match (def.take_types(), def.take_values()) { + (Some(m1), None) => items.push(ScopeDef::ModuleDef(m1.into())), + (None, Some(m2)) => items.push(ScopeDef::ModuleDef(m2.into())), + (Some(m1), Some(m2)) => { + // Some items, like unit structs and enum variants, are + // returned as both a type and a value. Here we want + // to de-duplicate them. + if m1 != m2 { + items.push(ScopeDef::ModuleDef(m1.into())); + items.push(ScopeDef::ModuleDef(m2.into())); + } else { + items.push(ScopeDef::ModuleDef(m1.into())); + } + } + (None, None) => {} + }; + + if let Some(macro_def_id) = def.take_macros() { + items.push(ScopeDef::MacroDef(macro_def_id.into())); + } + + if items.is_empty() { + items.push(ScopeDef::Unknown); + } + + items + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum AttrDef { + Module(Module), + Field(Field), + Adt(Adt), + Function(Function), + EnumVariant(EnumVariant), + Static(Static), + Const(Const), + Trait(Trait), + TypeAlias(TypeAlias), + MacroDef(MacroDef), +} + +impl_from!( + Module, + Field, + Adt(Struct, Enum, Union), + EnumVariant, + Static, + Const, + Function, + Trait, + TypeAlias, + MacroDef + for AttrDef +); + +pub trait HasAttrs { + fn attrs(self, db: &dyn HirDatabase) -> Attrs; +} + +impl> HasAttrs for T { + fn attrs(self, db: &dyn HirDatabase) -> Attrs { + let def: AttrDef = self.into(); + db.attrs(def.into()) + } +} + +pub trait Docs { + fn docs(&self, db: &dyn HirDatabase) -> Option; +} +impl + Copy> Docs for T { + fn docs(&self, db: &dyn HirDatabase) -> Option { + let def: AttrDef = (*self).into(); + db.documentation(def.into()) + } +} + +pub trait HasVisibility { + fn visibility(&self, db: &dyn HirDatabase) -> Visibility; + fn is_visible_from(&self, db: &dyn HirDatabase, module: Module) -> bool { + let vis = self.visibility(db); + vis.is_visible_from(db.upcast(), module.id) + } +} diff --git a/crates/ra_hir/src/db.rs b/crates/hir/src/db.rs similarity index 100% rename from crates/ra_hir/src/db.rs rename to crates/hir/src/db.rs diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs new file mode 100644 index 0000000000..363164b9b4 --- /dev/null +++ b/crates/hir/src/diagnostics.rs @@ -0,0 +1,6 @@ +//! FIXME: write short doc here +pub use hir_def::diagnostics::UnresolvedModule; +pub use hir_expand::diagnostics::{Diagnostic, DiagnosticSink, DiagnosticSinkBuilder}; +pub use hir_ty::diagnostics::{ + MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkInTailExpr, NoSuchField, +}; diff --git a/crates/hir/src/from_id.rs b/crates/hir/src/from_id.rs new file mode 100644 index 0000000000..a53ac1e080 --- /dev/null +++ b/crates/hir/src/from_id.rs @@ -0,0 +1,247 @@ +//! Utility module for converting between hir_def ids and code_model wrappers. +//! +//! It's unclear if we need this long-term, but it's definitelly useful while we +//! are splitting the hir. + +use hir_def::{ + expr::PatId, AdtId, AssocItemId, AttrDefId, DefWithBodyId, EnumVariantId, FieldId, + GenericDefId, ModuleDefId, VariantId, +}; + +use crate::{ + code_model::ItemInNs, Adt, AssocItem, AttrDef, DefWithBody, EnumVariant, Field, GenericDef, + Local, MacroDef, ModuleDef, VariantDef, +}; + +macro_rules! from_id { + ($(($id:path, $ty:path)),*) => {$( + impl From<$id> for $ty { + fn from(id: $id) -> $ty { + $ty { id } + } + } + impl From<$ty> for $id { + fn from(ty: $ty) -> $id { + ty.id + } + } + )*} +} + +from_id![ + (base_db::CrateId, crate::Crate), + (hir_def::ModuleId, crate::Module), + (hir_def::StructId, crate::Struct), + (hir_def::UnionId, crate::Union), + (hir_def::EnumId, crate::Enum), + (hir_def::TypeAliasId, crate::TypeAlias), + (hir_def::TraitId, crate::Trait), + (hir_def::StaticId, crate::Static), + (hir_def::ConstId, crate::Const), + (hir_def::FunctionId, crate::Function), + (hir_def::ImplId, crate::ImplDef), + (hir_def::TypeParamId, crate::TypeParam), + (hir_expand::MacroDefId, crate::MacroDef) +]; + +impl From for Adt { + fn from(id: AdtId) -> Self { + match id { + AdtId::StructId(it) => Adt::Struct(it.into()), + AdtId::UnionId(it) => Adt::Union(it.into()), + AdtId::EnumId(it) => Adt::Enum(it.into()), + } + } +} + +impl From for AdtId { + fn from(id: Adt) -> Self { + match id { + Adt::Struct(it) => AdtId::StructId(it.id), + Adt::Union(it) => AdtId::UnionId(it.id), + Adt::Enum(it) => AdtId::EnumId(it.id), + } + } +} + +impl From for EnumVariant { + fn from(id: EnumVariantId) -> Self { + EnumVariant { parent: id.parent.into(), id: id.local_id } + } +} + +impl From for EnumVariantId { + fn from(def: EnumVariant) -> Self { + EnumVariantId { parent: def.parent.id, local_id: def.id } + } +} + +impl From for ModuleDef { + fn from(id: ModuleDefId) -> Self { + match id { + ModuleDefId::ModuleId(it) => ModuleDef::Module(it.into()), + ModuleDefId::FunctionId(it) => ModuleDef::Function(it.into()), + ModuleDefId::AdtId(it) => ModuleDef::Adt(it.into()), + ModuleDefId::EnumVariantId(it) => ModuleDef::EnumVariant(it.into()), + ModuleDefId::ConstId(it) => ModuleDef::Const(it.into()), + ModuleDefId::StaticId(it) => ModuleDef::Static(it.into()), + ModuleDefId::TraitId(it) => ModuleDef::Trait(it.into()), + ModuleDefId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()), + ModuleDefId::BuiltinType(it) => ModuleDef::BuiltinType(it), + } + } +} + +impl From for ModuleDefId { + fn from(id: ModuleDef) -> Self { + match id { + ModuleDef::Module(it) => ModuleDefId::ModuleId(it.into()), + ModuleDef::Function(it) => ModuleDefId::FunctionId(it.into()), + ModuleDef::Adt(it) => ModuleDefId::AdtId(it.into()), + ModuleDef::EnumVariant(it) => ModuleDefId::EnumVariantId(it.into()), + ModuleDef::Const(it) => ModuleDefId::ConstId(it.into()), + ModuleDef::Static(it) => ModuleDefId::StaticId(it.into()), + ModuleDef::Trait(it) => ModuleDefId::TraitId(it.into()), + ModuleDef::TypeAlias(it) => ModuleDefId::TypeAliasId(it.into()), + ModuleDef::BuiltinType(it) => ModuleDefId::BuiltinType(it), + } + } +} + +impl From for DefWithBodyId { + fn from(def: DefWithBody) -> Self { + match def { + DefWithBody::Function(it) => DefWithBodyId::FunctionId(it.id), + DefWithBody::Static(it) => DefWithBodyId::StaticId(it.id), + DefWithBody::Const(it) => DefWithBodyId::ConstId(it.id), + } + } +} + +impl From for DefWithBody { + fn from(def: DefWithBodyId) -> Self { + match def { + DefWithBodyId::FunctionId(it) => DefWithBody::Function(it.into()), + DefWithBodyId::StaticId(it) => DefWithBody::Static(it.into()), + DefWithBodyId::ConstId(it) => DefWithBody::Const(it.into()), + } + } +} + +impl From for AssocItem { + fn from(def: AssocItemId) -> Self { + match def { + AssocItemId::FunctionId(it) => AssocItem::Function(it.into()), + AssocItemId::TypeAliasId(it) => AssocItem::TypeAlias(it.into()), + AssocItemId::ConstId(it) => AssocItem::Const(it.into()), + } + } +} + +impl From for GenericDefId { + fn from(def: GenericDef) -> Self { + match def { + GenericDef::Function(it) => GenericDefId::FunctionId(it.id), + GenericDef::Adt(it) => GenericDefId::AdtId(it.into()), + GenericDef::Trait(it) => GenericDefId::TraitId(it.id), + GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id), + GenericDef::ImplDef(it) => GenericDefId::ImplId(it.id), + GenericDef::EnumVariant(it) => { + GenericDefId::EnumVariantId(EnumVariantId { parent: it.parent.id, local_id: it.id }) + } + GenericDef::Const(it) => GenericDefId::ConstId(it.id), + } + } +} + +impl From for GenericDefId { + fn from(id: Adt) -> Self { + match id { + Adt::Struct(it) => it.id.into(), + Adt::Union(it) => it.id.into(), + Adt::Enum(it) => it.id.into(), + } + } +} + +impl From for VariantDef { + fn from(def: VariantId) -> Self { + match def { + VariantId::StructId(it) => VariantDef::Struct(it.into()), + VariantId::EnumVariantId(it) => VariantDef::EnumVariant(it.into()), + VariantId::UnionId(it) => VariantDef::Union(it.into()), + } + } +} + +impl From for VariantId { + fn from(def: VariantDef) -> Self { + match def { + VariantDef::Struct(it) => VariantId::StructId(it.id), + VariantDef::EnumVariant(it) => VariantId::EnumVariantId(it.into()), + VariantDef::Union(it) => VariantId::UnionId(it.id), + } + } +} + +impl From for FieldId { + fn from(def: Field) -> Self { + FieldId { parent: def.parent.into(), local_id: def.id } + } +} + +impl From for Field { + fn from(def: FieldId) -> Self { + Field { parent: def.parent.into(), id: def.local_id } + } +} + +impl From for AttrDefId { + fn from(def: AttrDef) -> Self { + match def { + AttrDef::Module(it) => AttrDefId::ModuleId(it.id), + AttrDef::Field(it) => AttrDefId::FieldId(it.into()), + AttrDef::Adt(it) => AttrDefId::AdtId(it.into()), + AttrDef::Function(it) => AttrDefId::FunctionId(it.id), + AttrDef::EnumVariant(it) => AttrDefId::EnumVariantId(it.into()), + AttrDef::Static(it) => AttrDefId::StaticId(it.id), + AttrDef::Const(it) => AttrDefId::ConstId(it.id), + AttrDef::Trait(it) => AttrDefId::TraitId(it.id), + AttrDef::TypeAlias(it) => AttrDefId::TypeAliasId(it.id), + AttrDef::MacroDef(it) => AttrDefId::MacroDefId(it.id), + } + } +} + +impl From for GenericDefId { + fn from(item: AssocItem) -> Self { + match item { + AssocItem::Function(f) => f.id.into(), + AssocItem::Const(c) => c.id.into(), + AssocItem::TypeAlias(t) => t.id.into(), + } + } +} + +impl From<(DefWithBodyId, PatId)> for Local { + fn from((parent, pat_id): (DefWithBodyId, PatId)) -> Self { + Local { parent, pat_id } + } +} + +impl From for ItemInNs { + fn from(macro_def: MacroDef) -> Self { + ItemInNs::Macros(macro_def.into()) + } +} + +impl From for ItemInNs { + fn from(module_def: ModuleDef) -> Self { + match module_def { + ModuleDef::Static(_) | ModuleDef::Const(_) | ModuleDef::Function(_) => { + ItemInNs::Values(module_def.into()) + } + _ => ItemInNs::Types(module_def.into()), + } + } +} diff --git a/crates/hir/src/has_source.rs b/crates/hir/src/has_source.rs new file mode 100644 index 0000000000..a50d4ff029 --- /dev/null +++ b/crates/hir/src/has_source.rs @@ -0,0 +1,135 @@ +//! FIXME: write short doc here + +use either::Either; +use hir_def::{ + nameres::{ModuleOrigin, ModuleSource}, + src::{HasChildSource, HasSource as _}, + Lookup, VariantId, +}; +use syntax::ast; + +use crate::{ + db::HirDatabase, Const, Enum, EnumVariant, Field, FieldSource, Function, ImplDef, MacroDef, + Module, Static, Struct, Trait, TypeAlias, TypeParam, Union, +}; + +pub use hir_expand::InFile; + +pub trait HasSource { + type Ast; + fn source(self, db: &dyn HirDatabase) -> InFile; +} + +/// NB: Module is !HasSource, because it has two source nodes at the same time: +/// definition and declaration. +impl Module { + /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items. + pub fn definition_source(self, db: &dyn HirDatabase) -> InFile { + let def_map = db.crate_def_map(self.id.krate); + def_map[self.id.local_id].definition_source(db.upcast()) + } + + pub fn is_mod_rs(self, db: &dyn HirDatabase) -> bool { + let def_map = db.crate_def_map(self.id.krate); + match def_map[self.id.local_id].origin { + ModuleOrigin::File { is_mod_rs, .. } => is_mod_rs, + _ => false, + } + } + + /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. + /// `None` for the crate root. + pub fn declaration_source(self, db: &dyn HirDatabase) -> Option> { + let def_map = db.crate_def_map(self.id.krate); + def_map[self.id.local_id].declaration_source(db.upcast()) + } +} + +impl HasSource for Field { + type Ast = FieldSource; + fn source(self, db: &dyn HirDatabase) -> InFile { + let var = VariantId::from(self.parent); + let src = var.child_source(db.upcast()); + src.map(|it| match it[self.id].clone() { + Either::Left(it) => FieldSource::Pos(it), + Either::Right(it) => FieldSource::Named(it), + }) + } +} +impl HasSource for Struct { + type Ast = ast::Struct; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for Union { + type Ast = ast::Union; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for Enum { + type Ast = ast::Enum; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for EnumVariant { + type Ast = ast::Variant; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.parent.id.child_source(db.upcast()).map(|map| map[self.id].clone()) + } +} +impl HasSource for Function { + type Ast = ast::Fn; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for Const { + type Ast = ast::Const; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for Static { + type Ast = ast::Static; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for Trait { + type Ast = ast::Trait; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for TypeAlias { + type Ast = ast::TypeAlias; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for MacroDef { + type Ast = ast::MacroCall; + fn source(self, db: &dyn HirDatabase) -> InFile { + InFile { + file_id: self.id.ast_id.expect("MacroDef without ast_id").file_id, + value: self.id.ast_id.expect("MacroDef without ast_id").to_node(db.upcast()), + } + } +} +impl HasSource for ImplDef { + type Ast = ast::Impl; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} + +impl HasSource for TypeParam { + type Ast = Either; + fn source(self, db: &dyn HirDatabase) -> InFile { + let child_source = self.id.parent.child_source(db.upcast()); + child_source.map(|it| it[self.id.local_id].clone()) + } +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs new file mode 100644 index 0000000000..4ae2bd0855 --- /dev/null +++ b/crates/hir/src/lib.rs @@ -0,0 +1,63 @@ +//! HIR (previously known as descriptors) provides a high-level object oriented +//! access to Rust code. +//! +//! The principal difference between HIR and syntax trees is that HIR is bound +//! to a particular crate instance. That is, it has cfg flags and features +//! applied. So, the relation between syntax and HIR is many-to-one. +//! +//! HIR is the public API of the all of the compiler logic above syntax trees. +//! It is written in "OO" style. Each type is self contained (as in, it knows it's +//! parents and full context). It should be "clean code". +//! +//! `hir_*` crates are the implementation of the compiler logic. +//! They are written in "ECS" style, with relatively little abstractions. +//! Many types are not self-contained, and explicitly use local indexes, arenas, etc. +//! +//! `hir` is what insulates the "we don't know how to actually write an incremental compiler" +//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary: +//! https://www.tedinski.com/2018/02/06/system-boundaries.html. + +#![recursion_limit = "512"] + +mod semantics; +pub mod db; +mod source_analyzer; + +pub mod diagnostics; + +mod from_id; +mod code_model; + +mod has_source; + +pub use crate::{ + code_model::{ + Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrDef, Callable, CallableKind, Const, + Crate, CrateDependency, DefWithBody, Docs, Enum, EnumVariant, Field, FieldSource, Function, + GenericDef, HasAttrs, HasVisibility, ImplDef, Local, MacroDef, Module, ModuleDef, ScopeDef, + Static, Struct, Trait, Type, TypeAlias, TypeParam, Union, VariantDef, Visibility, + }, + has_source::HasSource, + semantics::{original_range, PathResolution, Semantics, SemanticsScope}, +}; + +pub use hir_def::{ + adt::StructKind, + attr::Attrs, + body::scope::ExprScopes, + builtin_type::BuiltinType, + docs::Documentation, + nameres::ModuleSource, + path::{ModPath, Path, PathKind}, + type_ref::{Mutability, TypeRef}, +}; +pub use hir_expand::{ + name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc, /* FIXME */ MacroDefId, + MacroFile, Origin, +}; +pub use hir_ty::display::HirDisplay; + +// These are negative re-exports: pub using these names is forbidden, they +// should remain private to hir internals. +#[allow(unused)] +use hir_expand::hygiene::Hygiene; diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs new file mode 100644 index 0000000000..3953017c3b --- /dev/null +++ b/crates/hir/src/semantics.rs @@ -0,0 +1,849 @@ +//! See `Semantics`. + +mod source_to_def; + +use std::{cell::RefCell, fmt, iter::successors}; + +use base_db::{FileId, FileRange}; +use hir_def::{ + resolver::{self, HasResolver, Resolver}, + AsMacroCall, FunctionId, TraitId, VariantId, +}; +use hir_expand::{hygiene::Hygiene, name::AsName, ExpansionInfo}; +use hir_ty::associated_type_shorthand_candidates; +use itertools::Itertools; +use rustc_hash::{FxHashMap, FxHashSet}; +use syntax::{ + algo::{find_node_at_offset, skip_trivia_token}, + ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextSize, +}; + +use crate::{ + db::HirDatabase, + diagnostics::Diagnostic, + semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, + source_analyzer::{resolve_hir_path, resolve_hir_path_qualifier, SourceAnalyzer}, + AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, + Module, ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, TypeRef, + VariantDef, +}; +use resolver::TypeNs; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum PathResolution { + /// An item + Def(ModuleDef), + /// A local binding (only value namespace) + Local(Local), + /// A generic parameter + TypeParam(TypeParam), + SelfType(ImplDef), + Macro(MacroDef), + AssocItem(AssocItem), +} + +impl PathResolution { + fn in_type_ns(&self) -> Option { + match self { + PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())), + PathResolution::Def(ModuleDef::BuiltinType(builtin)) => { + Some(TypeNs::BuiltinType(*builtin)) + } + PathResolution::Def(ModuleDef::Const(_)) + | PathResolution::Def(ModuleDef::EnumVariant(_)) + | PathResolution::Def(ModuleDef::Function(_)) + | PathResolution::Def(ModuleDef::Module(_)) + | PathResolution::Def(ModuleDef::Static(_)) + | PathResolution::Def(ModuleDef::Trait(_)) => None, + PathResolution::Def(ModuleDef::TypeAlias(alias)) => { + Some(TypeNs::TypeAliasId((*alias).into())) + } + PathResolution::Local(_) | PathResolution::Macro(_) => None, + PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())), + PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())), + PathResolution::AssocItem(AssocItem::Const(_)) + | PathResolution::AssocItem(AssocItem::Function(_)) => None, + PathResolution::AssocItem(AssocItem::TypeAlias(alias)) => { + Some(TypeNs::TypeAliasId((*alias).into())) + } + } + } + + /// Returns an iterator over associated types that may be specified after this path (using + /// `Ty::Assoc` syntax). + pub fn assoc_type_shorthand_candidates( + &self, + db: &dyn HirDatabase, + mut cb: impl FnMut(TypeAlias) -> Option, + ) -> Option { + associated_type_shorthand_candidates(db, self.in_type_ns()?, |_, _, id| cb(id.into())) + } +} + +/// Primary API to get semantic information, like types, from syntax trees. +pub struct Semantics<'db, DB> { + pub db: &'db DB, + imp: SemanticsImpl<'db>, +} + +pub struct SemanticsImpl<'db> { + pub db: &'db dyn HirDatabase, + s2d_cache: RefCell, + expansion_info_cache: RefCell>>, + cache: RefCell>, +} + +impl fmt::Debug for Semantics<'_, DB> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Semantics {{ ... }}") + } +} + +impl<'db, DB: HirDatabase> Semantics<'db, DB> { + pub fn new(db: &DB) -> Semantics { + let impl_ = SemanticsImpl::new(db); + Semantics { db, imp: impl_ } + } + + pub fn parse(&self, file_id: FileId) -> ast::SourceFile { + self.imp.parse(file_id) + } + + pub fn expand(&self, macro_call: &ast::MacroCall) -> Option { + self.imp.expand(macro_call) + } + pub fn speculative_expand( + &self, + actual_macro_call: &ast::MacroCall, + hypothetical_args: &ast::TokenTree, + token_to_map: SyntaxToken, + ) -> Option<(SyntaxNode, SyntaxToken)> { + self.imp.speculative_expand(actual_macro_call, hypothetical_args, token_to_map) + } + + pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { + self.imp.descend_into_macros(token) + } + + pub fn descend_node_at_offset( + &self, + node: &SyntaxNode, + offset: TextSize, + ) -> Option { + self.imp.descend_node_at_offset(node, offset).find_map(N::cast) + } + + pub fn original_range(&self, node: &SyntaxNode) -> FileRange { + self.imp.original_range(node) + } + + pub fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { + self.imp.diagnostics_display_range(diagnostics) + } + + pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator + '_ { + self.imp.ancestors_with_macros(node) + } + + pub fn ancestors_at_offset_with_macros( + &self, + node: &SyntaxNode, + offset: TextSize, + ) -> impl Iterator + '_ { + self.imp.ancestors_at_offset_with_macros(node, offset) + } + + /// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, + /// search up until it is of the target AstNode type + pub fn find_node_at_offset_with_macros( + &self, + node: &SyntaxNode, + offset: TextSize, + ) -> Option { + self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) + } + + /// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, + /// descend it and find again + pub fn find_node_at_offset_with_descend( + &self, + node: &SyntaxNode, + offset: TextSize, + ) -> Option { + if let Some(it) = find_node_at_offset(&node, offset) { + return Some(it); + } + + self.imp.descend_node_at_offset(node, offset).find_map(N::cast) + } + + pub fn type_of_expr(&self, expr: &ast::Expr) -> Option { + self.imp.type_of_expr(expr) + } + + pub fn type_of_pat(&self, pat: &ast::Pat) -> Option { + self.imp.type_of_pat(pat) + } + + pub fn type_of_self(&self, param: &ast::SelfParam) -> Option { + self.imp.type_of_self(param) + } + + pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option { + self.imp.resolve_method_call(call).map(Function::from) + } + + pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option { + self.imp.resolve_method_call_as_callable(call) + } + + pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option { + self.imp.resolve_field(field) + } + + pub fn resolve_record_field( + &self, + field: &ast::RecordExprField, + ) -> Option<(Field, Option)> { + self.imp.resolve_record_field(field) + } + + pub fn resolve_record_field_pat(&self, field: &ast::RecordPatField) -> Option { + self.imp.resolve_record_field_pat(field) + } + + pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option { + self.imp.resolve_macro_call(macro_call) + } + + pub fn resolve_path(&self, path: &ast::Path) -> Option { + self.imp.resolve_path(path) + } + + pub fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option { + self.imp.resolve_extern_crate(extern_crate) + } + + pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option { + self.imp.resolve_variant(record_lit).map(VariantDef::from) + } + + pub fn lower_path(&self, path: &ast::Path) -> Option { + self.imp.lower_path(path) + } + + pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option { + self.imp.resolve_bind_pat_to_const(pat) + } + + // FIXME: use this instead? + // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option; + + pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> { + self.imp.record_literal_missing_fields(literal) + } + + pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> { + self.imp.record_pattern_missing_fields(pattern) + } + + pub fn to_def(&self, src: &T) -> Option { + let src = self.imp.find_file(src.syntax().clone()).with_value(src).cloned(); + T::to_def(&self.imp, src) + } + + pub fn to_module_def(&self, file: FileId) -> Option { + self.imp.to_module_def(file) + } + + pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { + self.imp.scope(node) + } + + pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { + self.imp.scope_at_offset(node, offset) + } + + pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { + self.imp.scope_for_def(def) + } + + pub fn assert_contains_node(&self, node: &SyntaxNode) { + self.imp.assert_contains_node(node) + } + + pub fn is_unsafe_method_call(&self, method_call_expr: ast::MethodCallExpr) -> bool { + self.imp.is_unsafe_method_call(method_call_expr) + } + + pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool { + self.imp.is_unsafe_ref_expr(ref_expr) + } + + pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool { + self.imp.is_unsafe_ident_pat(ident_pat) + } +} + +impl<'db> SemanticsImpl<'db> { + fn new(db: &'db dyn HirDatabase) -> Self { + SemanticsImpl { + db, + s2d_cache: Default::default(), + cache: Default::default(), + expansion_info_cache: Default::default(), + } + } + + fn parse(&self, file_id: FileId) -> ast::SourceFile { + let tree = self.db.parse(file_id).tree(); + self.cache(tree.syntax().clone(), file_id.into()); + tree + } + + fn expand(&self, macro_call: &ast::MacroCall) -> Option { + let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); + let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); + let file_id = sa.expand(self.db, macro_call)?; + let node = self.db.parse_or_expand(file_id)?; + self.cache(node.clone(), file_id); + Some(node) + } + + fn speculative_expand( + &self, + actual_macro_call: &ast::MacroCall, + hypothetical_args: &ast::TokenTree, + token_to_map: SyntaxToken, + ) -> Option<(SyntaxNode, SyntaxToken)> { + let macro_call = + self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); + let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); + let krate = sa.resolver.krate()?; + let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { + sa.resolver.resolve_path_as_macro(self.db.upcast(), &path) + })?; + hir_expand::db::expand_hypothetical( + self.db.upcast(), + macro_call_id, + hypothetical_args, + token_to_map, + ) + } + + fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { + let _p = profile::span("descend_into_macros"); + let parent = token.parent(); + let parent = self.find_file(parent); + let sa = self.analyze2(parent.as_ref(), None); + + let token = successors(Some(parent.with_value(token)), |token| { + self.db.check_canceled(); + let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; + let tt = macro_call.token_tree()?; + if !tt.syntax().text_range().contains_range(token.value.text_range()) { + return None; + } + let file_id = sa.expand(self.db, token.with_value(¯o_call))?; + let token = self + .expansion_info_cache + .borrow_mut() + .entry(file_id) + .or_insert_with(|| file_id.expansion_info(self.db.upcast())) + .as_ref()? + .map_token_down(token.as_ref())?; + + self.cache(find_root(&token.value.parent()), token.file_id); + + Some(token) + }) + .last() + .unwrap(); + + token.value + } + + fn descend_node_at_offset( + &self, + node: &SyntaxNode, + offset: TextSize, + ) -> impl Iterator + '_ { + // Handle macro token cases + node.token_at_offset(offset) + .map(|token| self.descend_into_macros(token)) + .map(|it| self.ancestors_with_macros(it.parent())) + .flatten() + } + + fn original_range(&self, node: &SyntaxNode) -> FileRange { + let node = self.find_file(node.clone()); + original_range(self.db, node.as_ref()) + } + + fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { + let src = diagnostics.display_source(); + let root = self.db.parse_or_expand(src.file_id).unwrap(); + let node = src.value.to_node(&root); + self.cache(root, src.file_id); + original_range(self.db, src.with_value(&node)) + } + + fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator + '_ { + let node = self.find_file(node); + node.ancestors_with_macros(self.db.upcast()).map(|it| it.value) + } + + fn ancestors_at_offset_with_macros( + &self, + node: &SyntaxNode, + offset: TextSize, + ) -> impl Iterator + '_ { + node.token_at_offset(offset) + .map(|token| self.ancestors_with_macros(token.parent())) + .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) + } + + fn type_of_expr(&self, expr: &ast::Expr) -> Option { + self.analyze(expr.syntax()).type_of_expr(self.db, &expr) + } + + fn type_of_pat(&self, pat: &ast::Pat) -> Option { + self.analyze(pat.syntax()).type_of_pat(self.db, &pat) + } + + fn type_of_self(&self, param: &ast::SelfParam) -> Option { + self.analyze(param.syntax()).type_of_self(self.db, ¶m) + } + + fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option { + self.analyze(call.syntax()).resolve_method_call(self.db, call) + } + + fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option { + // FIXME: this erases Substs + let func = self.resolve_method_call(call)?; + let ty = self.db.value_ty(func.into()); + let resolver = self.analyze(call.syntax()).resolver; + let ty = Type::new_with_resolver(self.db, &resolver, ty.value)?; + let mut res = ty.as_callable(self.db)?; + res.is_bound_method = true; + Some(res) + } + + fn resolve_field(&self, field: &ast::FieldExpr) -> Option { + self.analyze(field.syntax()).resolve_field(self.db, field) + } + + fn resolve_record_field(&self, field: &ast::RecordExprField) -> Option<(Field, Option)> { + self.analyze(field.syntax()).resolve_record_field(self.db, field) + } + + fn resolve_record_field_pat(&self, field: &ast::RecordPatField) -> Option { + self.analyze(field.syntax()).resolve_record_field_pat(self.db, field) + } + + fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option { + let sa = self.analyze(macro_call.syntax()); + let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); + sa.resolve_macro_call(self.db, macro_call) + } + + fn resolve_path(&self, path: &ast::Path) -> Option { + self.analyze(path.syntax()).resolve_path(self.db, path) + } + + fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option { + let krate = self.scope(extern_crate.syntax()).krate()?; + krate.dependencies(self.db).into_iter().find_map(|dep| { + if dep.name == extern_crate.name_ref()?.as_name() { + Some(dep.krate) + } else { + None + } + }) + } + + fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option { + self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit) + } + + fn lower_path(&self, path: &ast::Path) -> Option { + let src = self.find_file(path.syntax().clone()); + Path::from_src(path.clone(), &Hygiene::new(self.db.upcast(), src.file_id.into())) + } + + fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option { + self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) + } + + fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> { + self.analyze(literal.syntax()) + .record_literal_missing_fields(self.db, literal) + .unwrap_or_default() + } + + fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> { + self.analyze(pattern.syntax()) + .record_pattern_missing_fields(self.db, pattern) + .unwrap_or_default() + } + + fn with_ctx T, T>(&self, f: F) -> T { + let mut cache = self.s2d_cache.borrow_mut(); + let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache }; + f(&mut ctx) + } + + fn to_module_def(&self, file: FileId) -> Option { + self.with_ctx(|ctx| ctx.file_to_def(file)).map(Module::from) + } + + fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { + let node = self.find_file(node.clone()); + let resolver = self.analyze2(node.as_ref(), None).resolver; + SemanticsScope { db: self.db, file_id: node.file_id, resolver } + } + + fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { + let node = self.find_file(node.clone()); + let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; + SemanticsScope { db: self.db, file_id: node.file_id, resolver } + } + + fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { + let file_id = self.db.lookup_intern_trait(def.id).id.file_id; + let resolver = def.id.resolver(self.db.upcast()); + SemanticsScope { db: self.db, file_id, resolver } + } + + fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { + let src = self.find_file(node.clone()); + self.analyze2(src.as_ref(), None) + } + + fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option) -> SourceAnalyzer { + let _p = profile::span("Semantics::analyze2"); + + let container = match self.with_ctx(|ctx| ctx.find_container(src)) { + Some(it) => it, + None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src), + }; + + let resolver = match container { + ChildContainer::DefWithBodyId(def) => { + return SourceAnalyzer::new_for_body(self.db, def, src, offset) + } + ChildContainer::TraitId(it) => it.resolver(self.db.upcast()), + ChildContainer::ImplId(it) => it.resolver(self.db.upcast()), + ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()), + ChildContainer::EnumId(it) => it.resolver(self.db.upcast()), + ChildContainer::VariantId(it) => it.resolver(self.db.upcast()), + ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()), + ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()), + }; + SourceAnalyzer::new_for_resolver(resolver, src) + } + + fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { + assert!(root_node.parent().is_none()); + let mut cache = self.cache.borrow_mut(); + let prev = cache.insert(root_node, file_id); + assert!(prev == None || prev == Some(file_id)) + } + + fn assert_contains_node(&self, node: &SyntaxNode) { + self.find_file(node.clone()); + } + + fn lookup(&self, root_node: &SyntaxNode) -> Option { + let cache = self.cache.borrow(); + cache.get(root_node).copied() + } + + fn find_file(&self, node: SyntaxNode) -> InFile { + let root_node = find_root(&node); + let file_id = self.lookup(&root_node).unwrap_or_else(|| { + panic!( + "\n\nFailed to lookup {:?} in this Semantics.\n\ + Make sure to use only query nodes, derived from this instance of Semantics.\n\ + root node: {:?}\n\ + known nodes: {}\n\n", + node, + root_node, + self.cache + .borrow() + .keys() + .map(|it| format!("{:?}", it)) + .collect::>() + .join(", ") + ) + }); + InFile::new(file_id, node) + } + + pub fn is_unsafe_method_call(&self, method_call_expr: ast::MethodCallExpr) -> bool { + method_call_expr + .expr() + .and_then(|expr| { + let field_expr = if let ast::Expr::FieldExpr(field_expr) = expr { + field_expr + } else { + return None; + }; + let ty = self.type_of_expr(&field_expr.expr()?)?; + if !ty.is_packed(self.db) { + return None; + } + + let func = self.resolve_method_call(&method_call_expr).map(Function::from)?; + let is_unsafe = func.has_self_param(self.db) + && matches!(func.params(self.db).first(), Some(TypeRef::Reference(..))); + Some(is_unsafe) + }) + .unwrap_or(false) + } + + pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool { + ref_expr + .expr() + .and_then(|expr| { + let field_expr = match expr { + ast::Expr::FieldExpr(field_expr) => field_expr, + _ => return None, + }; + let expr = field_expr.expr()?; + self.type_of_expr(&expr) + }) + // Binding a reference to a packed type is possibly unsafe. + .map(|ty| ty.is_packed(self.db)) + .unwrap_or(false) + + // FIXME This needs layout computation to be correct. It will highlight + // more than it should with the current implementation. + } + + pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool { + if !ident_pat.ref_token().is_some() { + return false; + } + + ident_pat + .syntax() + .parent() + .and_then(|parent| { + // `IdentPat` can live under `RecordPat` directly under `RecordPatField` or + // `RecordPatFieldList`. `RecordPatField` also lives under `RecordPatFieldList`, + // so this tries to lookup the `IdentPat` anywhere along that structure to the + // `RecordPat` so we can get the containing type. + let record_pat = ast::RecordPatField::cast(parent.clone()) + .and_then(|record_pat| record_pat.syntax().parent()) + .or_else(|| Some(parent.clone())) + .and_then(|parent| { + ast::RecordPatFieldList::cast(parent)? + .syntax() + .parent() + .and_then(ast::RecordPat::cast) + }); + + // If this doesn't match a `RecordPat`, fallback to a `LetStmt` to see if + // this is initialized from a `FieldExpr`. + if let Some(record_pat) = record_pat { + self.type_of_pat(&ast::Pat::RecordPat(record_pat)) + } else if let Some(let_stmt) = ast::LetStmt::cast(parent) { + let field_expr = match let_stmt.initializer()? { + ast::Expr::FieldExpr(field_expr) => field_expr, + _ => return None, + }; + + self.type_of_expr(&field_expr.expr()?) + } else { + None + } + }) + // Binding a reference to a packed type is possibly unsafe. + .map(|ty| ty.is_packed(self.db)) + .unwrap_or(false) + } +} + +pub trait ToDef: AstNode + Clone { + type Def; + + fn to_def(sema: &SemanticsImpl, src: InFile) -> Option; +} + +macro_rules! to_def_impls { + ($(($def:path, $ast:path, $meth:ident)),* ,) => {$( + impl ToDef for $ast { + type Def = $def; + fn to_def(sema: &SemanticsImpl, src: InFile) -> Option { + sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from) + } + } + )*} +} + +to_def_impls![ + (crate::Module, ast::Module, module_to_def), + (crate::Struct, ast::Struct, struct_to_def), + (crate::Enum, ast::Enum, enum_to_def), + (crate::Union, ast::Union, union_to_def), + (crate::Trait, ast::Trait, trait_to_def), + (crate::ImplDef, ast::Impl, impl_to_def), + (crate::TypeAlias, ast::TypeAlias, type_alias_to_def), + (crate::Const, ast::Const, const_to_def), + (crate::Static, ast::Static, static_to_def), + (crate::Function, ast::Fn, fn_to_def), + (crate::Field, ast::RecordField, record_field_to_def), + (crate::Field, ast::TupleField, tuple_field_to_def), + (crate::EnumVariant, ast::Variant, enum_variant_to_def), + (crate::TypeParam, ast::TypeParam, type_param_to_def), + (crate::MacroDef, ast::MacroCall, macro_call_to_def), // this one is dubious, not all calls are macros + (crate::Local, ast::IdentPat, bind_pat_to_def), +]; + +fn find_root(node: &SyntaxNode) -> SyntaxNode { + node.ancestors().last().unwrap() +} + +#[derive(Debug)] +pub struct SemanticsScope<'a> { + pub db: &'a dyn HirDatabase, + file_id: HirFileId, + resolver: Resolver, +} + +impl<'a> SemanticsScope<'a> { + pub fn module(&self) -> Option { + Some(Module { id: self.resolver.module()? }) + } + + pub fn krate(&self) -> Option { + Some(Crate { id: self.resolver.krate()? }) + } + + /// Note: `FxHashSet` should be treated as an opaque type, passed into `Type + // FIXME: rename to visible_traits to not repeat scope? + pub fn traits_in_scope(&self) -> FxHashSet { + let resolver = &self.resolver; + resolver.traits_in_scope(self.db.upcast()) + } + + pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) { + let resolver = &self.resolver; + + resolver.process_all_names(self.db.upcast(), &mut |name, def| { + let def = match def { + resolver::ScopeDef::PerNs(it) => { + let items = ScopeDef::all_items(it); + for item in items { + f(name.clone(), item); + } + return; + } + resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()), + resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()), + resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }), + resolver::ScopeDef::Local(pat_id) => { + let parent = resolver.body_owner().unwrap().into(); + ScopeDef::Local(Local { parent, pat_id }) + } + }; + f(name, def) + }) + } + + /// Resolve a path as-if it was written at the given scope. This is + /// necessary a heuristic, as it doesn't take hygiene into account. + pub fn speculative_resolve(&self, path: &ast::Path) -> Option { + let hygiene = Hygiene::new(self.db.upcast(), self.file_id); + let path = Path::from_src(path.clone(), &hygiene)?; + self.resolve_hir_path(&path) + } + + pub fn resolve_hir_path(&self, path: &Path) -> Option { + resolve_hir_path(self.db, &self.resolver, path) + } + + /// Resolves a path where we know it is a qualifier of another path. + /// + /// For example, if we have: + /// ``` + /// mod my { + /// pub mod foo { + /// struct Bar; + /// } + /// + /// pub fn foo() {} + /// } + /// ``` + /// then we know that `foo` in `my::foo::Bar` refers to the module, not the function. + pub fn resolve_hir_path_qualifier(&self, path: &Path) -> Option { + resolve_hir_path_qualifier(self.db, &self.resolver, path) + } +} + +// FIXME: Change `HasSource` trait to work with `Semantics` and remove this? +pub fn original_range(db: &dyn HirDatabase, node: InFile<&SyntaxNode>) -> FileRange { + if let Some(range) = original_range_opt(db, node) { + let original_file = range.file_id.original_file(db.upcast()); + if range.file_id == original_file.into() { + return FileRange { file_id: original_file, range: range.value }; + } + + log::error!("Fail to mapping up more for {:?}", range); + return FileRange { file_id: range.file_id.original_file(db.upcast()), range: range.value }; + } + + // Fall back to whole macro call + if let Some(expansion) = node.file_id.expansion_info(db.upcast()) { + if let Some(call_node) = expansion.call_node() { + return FileRange { + file_id: call_node.file_id.original_file(db.upcast()), + range: call_node.value.text_range(), + }; + } + } + + FileRange { file_id: node.file_id.original_file(db.upcast()), range: node.value.text_range() } +} + +fn original_range_opt( + db: &dyn HirDatabase, + node: InFile<&SyntaxNode>, +) -> Option> { + let expansion = node.file_id.expansion_info(db.upcast())?; + + // the input node has only one token ? + let single = skip_trivia_token(node.value.first_token()?, Direction::Next)? + == skip_trivia_token(node.value.last_token()?, Direction::Prev)?; + + Some(node.value.descendants().find_map(|it| { + let first = skip_trivia_token(it.first_token()?, Direction::Next)?; + let first = ascend_call_token(db, &expansion, node.with_value(first))?; + + let last = skip_trivia_token(it.last_token()?, Direction::Prev)?; + let last = ascend_call_token(db, &expansion, node.with_value(last))?; + + if (!single && first == last) || (first.file_id != last.file_id) { + return None; + } + + Some(first.with_value(first.value.text_range().cover(last.value.text_range()))) + })?) +} + +fn ascend_call_token( + db: &dyn HirDatabase, + expansion: &ExpansionInfo, + token: InFile, +) -> Option> { + let (mapped, origin) = expansion.map_token_up(token.as_ref())?; + if origin != Origin::Call { + return None; + } + if let Some(info) = mapped.file_id.expansion_info(db.upcast()) { + return ascend_call_token(db, &info, mapped); + } + Some(mapped) +} diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs new file mode 100644 index 0000000000..5918b9541b --- /dev/null +++ b/crates/hir/src/semantics/source_to_def.rs @@ -0,0 +1,275 @@ +//! Maps *syntax* of various definitions to their semantic ids. + +use base_db::FileId; +use hir_def::{ + child_by_source::ChildBySource, + dyn_map::DynMap, + expr::PatId, + keys::{self, Key}, + ConstId, DefWithBodyId, EnumId, EnumVariantId, FieldId, FunctionId, GenericDefId, ImplId, + ModuleId, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, VariantId, +}; +use hir_expand::{name::AsName, AstId, MacroDefKind}; +use rustc_hash::FxHashMap; +use stdx::impl_from; +use syntax::{ + ast::{self, NameOwner}, + match_ast, AstNode, SyntaxNode, +}; + +use crate::{db::HirDatabase, InFile, MacroDefId}; + +pub(super) type SourceToDefCache = FxHashMap; + +pub(super) struct SourceToDefCtx<'a, 'b> { + pub(super) db: &'b dyn HirDatabase, + pub(super) cache: &'a mut SourceToDefCache, +} + +impl SourceToDefCtx<'_, '_> { + pub(super) fn file_to_def(&mut self, file: FileId) -> Option { + let _p = profile::span("SourceBinder::to_module_def"); + let (krate, local_id) = self.db.relevant_crates(file).iter().find_map(|&crate_id| { + let crate_def_map = self.db.crate_def_map(crate_id); + let local_id = crate_def_map.modules_for_file(file).next()?; + Some((crate_id, local_id)) + })?; + Some(ModuleId { krate, local_id }) + } + + pub(super) fn module_to_def(&mut self, src: InFile) -> Option { + let _p = profile::span("module_to_def"); + let parent_declaration = src + .as_ref() + .map(|it| it.syntax()) + .cloned() + .ancestors_with_macros(self.db.upcast()) + .skip(1) + .find_map(|it| { + let m = ast::Module::cast(it.value.clone())?; + Some(it.with_value(m)) + }); + + let parent_module = match parent_declaration { + Some(parent_declaration) => self.module_to_def(parent_declaration), + None => { + let file_id = src.file_id.original_file(self.db.upcast()); + self.file_to_def(file_id) + } + }?; + + let child_name = src.value.name()?.as_name(); + let def_map = self.db.crate_def_map(parent_module.krate); + let child_id = *def_map[parent_module.local_id].children.get(&child_name)?; + Some(ModuleId { krate: parent_module.krate, local_id: child_id }) + } + + pub(super) fn trait_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::TRAIT) + } + pub(super) fn impl_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::IMPL) + } + pub(super) fn fn_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::FUNCTION) + } + pub(super) fn struct_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::STRUCT) + } + pub(super) fn enum_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::ENUM) + } + pub(super) fn union_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::UNION) + } + pub(super) fn static_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::STATIC) + } + pub(super) fn const_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::CONST) + } + pub(super) fn type_alias_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::TYPE_ALIAS) + } + pub(super) fn record_field_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::RECORD_FIELD) + } + pub(super) fn tuple_field_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::TUPLE_FIELD) + } + pub(super) fn enum_variant_to_def( + &mut self, + src: InFile, + ) -> Option { + self.to_def(src, keys::VARIANT) + } + pub(super) fn bind_pat_to_def( + &mut self, + src: InFile, + ) -> Option<(DefWithBodyId, PatId)> { + let container = self.find_pat_container(src.as_ref().map(|it| it.syntax()))?; + let (_body, source_map) = self.db.body_with_source_map(container); + let src = src.map(ast::Pat::from); + let pat_id = source_map.node_pat(src.as_ref())?; + Some((container, pat_id)) + } + + fn to_def( + &mut self, + src: InFile, + key: Key, + ) -> Option { + let container = self.find_container(src.as_ref().map(|it| it.syntax()))?; + let db = self.db; + let dyn_map = + &*self.cache.entry(container).or_insert_with(|| container.child_by_source(db)); + dyn_map[key].get(&src).copied() + } + + pub(super) fn type_param_to_def(&mut self, src: InFile) -> Option { + let container: ChildContainer = + self.find_type_param_container(src.as_ref().map(|it| it.syntax()))?.into(); + let db = self.db; + let dyn_map = + &*self.cache.entry(container).or_insert_with(|| container.child_by_source(db)); + dyn_map[keys::TYPE_PARAM].get(&src).copied() + } + + // FIXME: use DynMap as well? + pub(super) fn macro_call_to_def(&mut self, src: InFile) -> Option { + let kind = MacroDefKind::Declarative; + let file_id = src.file_id.original_file(self.db.upcast()); + let krate = self.file_to_def(file_id)?.krate; + let file_ast_id = self.db.ast_id_map(src.file_id).ast_id(&src.value); + let ast_id = Some(AstId::new(src.file_id, file_ast_id)); + Some(MacroDefId { krate: Some(krate), ast_id, kind, local_inner: false }) + } + + pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option { + for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) { + let res: ChildContainer = match_ast! { + match (container.value) { + ast::Module(it) => { + let def = self.module_to_def(container.with_value(it))?; + def.into() + }, + ast::Trait(it) => { + let def = self.trait_to_def(container.with_value(it))?; + def.into() + }, + ast::Impl(it) => { + let def = self.impl_to_def(container.with_value(it))?; + def.into() + }, + ast::Fn(it) => { + let def = self.fn_to_def(container.with_value(it))?; + DefWithBodyId::from(def).into() + }, + ast::Struct(it) => { + let def = self.struct_to_def(container.with_value(it))?; + VariantId::from(def).into() + }, + ast::Enum(it) => { + let def = self.enum_to_def(container.with_value(it))?; + def.into() + }, + ast::Union(it) => { + let def = self.union_to_def(container.with_value(it))?; + VariantId::from(def).into() + }, + ast::Static(it) => { + let def = self.static_to_def(container.with_value(it))?; + DefWithBodyId::from(def).into() + }, + ast::Const(it) => { + let def = self.const_to_def(container.with_value(it))?; + DefWithBodyId::from(def).into() + }, + ast::TypeAlias(it) => { + let def = self.type_alias_to_def(container.with_value(it))?; + def.into() + }, + _ => continue, + } + }; + return Some(res); + } + + let def = self.file_to_def(src.file_id.original_file(self.db.upcast()))?; + Some(def.into()) + } + + fn find_type_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option { + for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) { + let res: GenericDefId = match_ast! { + match (container.value) { + ast::Fn(it) => self.fn_to_def(container.with_value(it))?.into(), + ast::Struct(it) => self.struct_to_def(container.with_value(it))?.into(), + ast::Enum(it) => self.enum_to_def(container.with_value(it))?.into(), + ast::Trait(it) => self.trait_to_def(container.with_value(it))?.into(), + ast::TypeAlias(it) => self.type_alias_to_def(container.with_value(it))?.into(), + ast::Impl(it) => self.impl_to_def(container.with_value(it))?.into(), + _ => continue, + } + }; + return Some(res); + } + None + } + + fn find_pat_container(&mut self, src: InFile<&SyntaxNode>) -> Option { + for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) { + let res: DefWithBodyId = match_ast! { + match (container.value) { + ast::Const(it) => self.const_to_def(container.with_value(it))?.into(), + ast::Static(it) => self.static_to_def(container.with_value(it))?.into(), + ast::Fn(it) => self.fn_to_def(container.with_value(it))?.into(), + _ => continue, + } + }; + return Some(res); + } + None + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub(crate) enum ChildContainer { + DefWithBodyId(DefWithBodyId), + ModuleId(ModuleId), + TraitId(TraitId), + ImplId(ImplId), + EnumId(EnumId), + VariantId(VariantId), + TypeAliasId(TypeAliasId), + /// XXX: this might be the same def as, for example an `EnumId`. However, + /// here the children generic parameters, and not, eg enum variants. + GenericDefId(GenericDefId), +} +impl_from! { + DefWithBodyId, + ModuleId, + TraitId, + ImplId, + EnumId, + VariantId, + TypeAliasId, + GenericDefId + for ChildContainer +} + +impl ChildContainer { + fn child_by_source(self, db: &dyn HirDatabase) -> DynMap { + let db = db.upcast(); + match self { + ChildContainer::DefWithBodyId(it) => it.child_by_source(db), + ChildContainer::ModuleId(it) => it.child_by_source(db), + ChildContainer::TraitId(it) => it.child_by_source(db), + ChildContainer::ImplId(it) => it.child_by_source(db), + ChildContainer::EnumId(it) => it.child_by_source(db), + ChildContainer::VariantId(it) => it.child_by_source(db), + ChildContainer::TypeAliasId(_) => DynMap::default(), + ChildContainer::GenericDefId(it) => it.child_by_source(db), + } + } +} diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs new file mode 100644 index 0000000000..8750584f94 --- /dev/null +++ b/crates/hir/src/source_analyzer.rs @@ -0,0 +1,534 @@ +//! Lookup hir elements using positions in the source code. This is a lossy +//! transformation: in general, a single source might correspond to several +//! modules, functions, etc, due to macros, cfgs and `#[path=]` attributes on +//! modules. +//! +//! So, this modules should not be used during hir construction, it exists +//! purely for "IDE needs". +use std::{iter::once, sync::Arc}; + +use hir_def::{ + body::{ + scope::{ExprScopes, ScopeId}, + Body, BodySourceMap, + }, + expr::{ExprId, Pat, PatId}, + resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, + AsMacroCall, DefWithBodyId, FieldId, FunctionId, LocalFieldId, VariantId, +}; +use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile}; +use hir_ty::{ + diagnostics::{record_literal_missing_fields, record_pattern_missing_fields}, + InferenceResult, Substs, Ty, +}; +use syntax::{ + ast::{self, AstNode}, + SyntaxNode, TextRange, TextSize, +}; + +use crate::{ + db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Field, Function, Local, + MacroDef, ModPath, ModuleDef, Path, PathKind, Static, Struct, Trait, Type, TypeAlias, + TypeParam, +}; +use base_db::CrateId; + +/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of +/// original source files. It should not be used inside the HIR itself. +#[derive(Debug)] +pub(crate) struct SourceAnalyzer { + file_id: HirFileId, + pub(crate) resolver: Resolver, + body: Option>, + body_source_map: Option>, + infer: Option>, + scopes: Option>, +} + +impl SourceAnalyzer { + pub(crate) fn new_for_body( + db: &dyn HirDatabase, + def: DefWithBodyId, + node: InFile<&SyntaxNode>, + offset: Option, + ) -> SourceAnalyzer { + let (body, source_map) = db.body_with_source_map(def); + let scopes = db.expr_scopes(def); + let scope = match offset { + None => scope_for(&scopes, &source_map, node), + Some(offset) => scope_for_offset(db, &scopes, &source_map, node.with_value(offset)), + }; + let resolver = resolver_for_scope(db.upcast(), def, scope); + SourceAnalyzer { + resolver, + body: Some(body), + body_source_map: Some(source_map), + infer: Some(db.infer(def)), + scopes: Some(scopes), + file_id: node.file_id, + } + } + + pub(crate) fn new_for_resolver( + resolver: Resolver, + node: InFile<&SyntaxNode>, + ) -> SourceAnalyzer { + SourceAnalyzer { + resolver, + body: None, + body_source_map: None, + infer: None, + scopes: None, + file_id: node.file_id, + } + } + + fn expr_id(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option { + let src = match expr { + ast::Expr::MacroCall(call) => { + self.expand_expr(db, InFile::new(self.file_id, call.clone()))? + } + _ => InFile::new(self.file_id, expr.clone()), + }; + let sm = self.body_source_map.as_ref()?; + sm.node_expr(src.as_ref()) + } + + fn pat_id(&self, pat: &ast::Pat) -> Option { + // FIXME: macros, see `expr_id` + let src = InFile { file_id: self.file_id, value: pat }; + self.body_source_map.as_ref()?.node_pat(src) + } + + fn expand_expr( + &self, + db: &dyn HirDatabase, + expr: InFile, + ) -> Option> { + let macro_file = self.body_source_map.as_ref()?.node_macro_file(expr.as_ref())?; + let expanded = db.parse_or_expand(macro_file)?; + + let res = match ast::MacroCall::cast(expanded.clone()) { + Some(call) => self.expand_expr(db, InFile::new(macro_file, call))?, + _ => InFile::new(macro_file, ast::Expr::cast(expanded)?), + }; + Some(res) + } + + pub(crate) fn type_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option { + let expr_id = self.expr_id(db, expr)?; + let ty = self.infer.as_ref()?[expr_id].clone(); + Type::new_with_resolver(db, &self.resolver, ty) + } + + pub(crate) fn type_of_pat(&self, db: &dyn HirDatabase, pat: &ast::Pat) -> Option { + let pat_id = self.pat_id(pat)?; + let ty = self.infer.as_ref()?[pat_id].clone(); + Type::new_with_resolver(db, &self.resolver, ty) + } + + pub(crate) fn type_of_self( + &self, + db: &dyn HirDatabase, + param: &ast::SelfParam, + ) -> Option { + let src = InFile { file_id: self.file_id, value: param }; + let pat_id = self.body_source_map.as_ref()?.node_self_param(src)?; + let ty = self.infer.as_ref()?[pat_id].clone(); + Type::new_with_resolver(db, &self.resolver, ty) + } + + pub(crate) fn resolve_method_call( + &self, + db: &dyn HirDatabase, + call: &ast::MethodCallExpr, + ) -> Option { + let expr_id = self.expr_id(db, &call.clone().into())?; + self.infer.as_ref()?.method_resolution(expr_id) + } + + pub(crate) fn resolve_field( + &self, + db: &dyn HirDatabase, + field: &ast::FieldExpr, + ) -> Option { + let expr_id = self.expr_id(db, &field.clone().into())?; + self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into()) + } + + pub(crate) fn resolve_record_field( + &self, + db: &dyn HirDatabase, + field: &ast::RecordExprField, + ) -> Option<(Field, Option)> { + let expr = field.expr()?; + let expr_id = self.expr_id(db, &expr)?; + let local = if field.name_ref().is_some() { + None + } else { + let local_name = field.field_name()?.as_name(); + let path = ModPath::from_segments(PathKind::Plain, once(local_name)); + match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) { + Some(ValueNs::LocalBinding(pat_id)) => { + Some(Local { pat_id, parent: self.resolver.body_owner()? }) + } + _ => None, + } + }; + let struct_field = self.infer.as_ref()?.record_field_resolution(expr_id)?; + Some((struct_field.into(), local)) + } + + pub(crate) fn resolve_record_field_pat( + &self, + _db: &dyn HirDatabase, + field: &ast::RecordPatField, + ) -> Option { + let pat_id = self.pat_id(&field.pat()?)?; + let struct_field = self.infer.as_ref()?.record_field_pat_resolution(pat_id)?; + Some(struct_field.into()) + } + + pub(crate) fn resolve_macro_call( + &self, + db: &dyn HirDatabase, + macro_call: InFile<&ast::MacroCall>, + ) -> Option { + let hygiene = Hygiene::new(db.upcast(), macro_call.file_id); + let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &hygiene))?; + self.resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(|it| it.into()) + } + + pub(crate) fn resolve_bind_pat_to_const( + &self, + db: &dyn HirDatabase, + pat: &ast::IdentPat, + ) -> Option { + let pat_id = self.pat_id(&pat.clone().into())?; + let body = self.body.as_ref()?; + let path = match &body[pat_id] { + Pat::Path(path) => path, + _ => return None, + }; + let res = resolve_hir_path(db, &self.resolver, &path)?; + match res { + PathResolution::Def(def) => Some(def), + _ => None, + } + } + + pub(crate) fn resolve_path( + &self, + db: &dyn HirDatabase, + path: &ast::Path, + ) -> Option { + if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) { + let expr_id = self.expr_id(db, &path_expr.into())?; + if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) { + return Some(PathResolution::AssocItem(assoc.into())); + } + if let Some(VariantId::EnumVariantId(variant)) = + self.infer.as_ref()?.variant_resolution_for_expr(expr_id) + { + return Some(PathResolution::Def(ModuleDef::EnumVariant(variant.into()))); + } + } + + if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) { + let pat_id = self.pat_id(&path_pat.into())?; + if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) { + return Some(PathResolution::AssocItem(assoc.into())); + } + if let Some(VariantId::EnumVariantId(variant)) = + self.infer.as_ref()?.variant_resolution_for_pat(pat_id) + { + return Some(PathResolution::Def(ModuleDef::EnumVariant(variant.into()))); + } + } + + if let Some(rec_lit) = path.syntax().parent().and_then(ast::RecordExpr::cast) { + let expr_id = self.expr_id(db, &rec_lit.into())?; + if let Some(VariantId::EnumVariantId(variant)) = + self.infer.as_ref()?.variant_resolution_for_expr(expr_id) + { + return Some(PathResolution::Def(ModuleDef::EnumVariant(variant.into()))); + } + } + + if let Some(rec_pat) = path.syntax().parent().and_then(ast::RecordPat::cast) { + let pat_id = self.pat_id(&rec_pat.into())?; + if let Some(VariantId::EnumVariantId(variant)) = + self.infer.as_ref()?.variant_resolution_for_pat(pat_id) + { + return Some(PathResolution::Def(ModuleDef::EnumVariant(variant.into()))); + } + } + + // This must be a normal source file rather than macro file. + let hir_path = Path::from_src(path.clone(), &Hygiene::new(db.upcast(), self.file_id))?; + + // Case where path is a qualifier of another path, e.g. foo::bar::Baz where we + // trying to resolve foo::bar. + if let Some(outer_path) = path.syntax().parent().and_then(ast::Path::cast) { + if let Some(qualifier) = outer_path.qualifier() { + if path == &qualifier { + return resolve_hir_path_qualifier(db, &self.resolver, &hir_path); + } + } + } + + resolve_hir_path(db, &self.resolver, &hir_path) + } + + pub(crate) fn record_literal_missing_fields( + &self, + db: &dyn HirDatabase, + literal: &ast::RecordExpr, + ) -> Option> { + let krate = self.resolver.krate()?; + let body = self.body.as_ref()?; + let infer = self.infer.as_ref()?; + + let expr_id = self.expr_id(db, &literal.clone().into())?; + let substs = match &infer.type_of_expr[expr_id] { + Ty::Apply(a_ty) => &a_ty.parameters, + _ => return None, + }; + + let (variant, missing_fields, _exhaustive) = + record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?; + let res = self.missing_fields(db, krate, substs, variant, missing_fields); + Some(res) + } + + pub(crate) fn record_pattern_missing_fields( + &self, + db: &dyn HirDatabase, + pattern: &ast::RecordPat, + ) -> Option> { + let krate = self.resolver.krate()?; + let body = self.body.as_ref()?; + let infer = self.infer.as_ref()?; + + let pat_id = self.pat_id(&pattern.clone().into())?; + let substs = match &infer.type_of_pat[pat_id] { + Ty::Apply(a_ty) => &a_ty.parameters, + _ => return None, + }; + + let (variant, missing_fields, _exhaustive) = + record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?; + let res = self.missing_fields(db, krate, substs, variant, missing_fields); + Some(res) + } + + fn missing_fields( + &self, + db: &dyn HirDatabase, + krate: CrateId, + substs: &Substs, + variant: VariantId, + missing_fields: Vec, + ) -> Vec<(Field, Type)> { + let field_types = db.field_types(variant); + + missing_fields + .into_iter() + .map(|local_id| { + let field = FieldId { parent: variant, local_id }; + let ty = field_types[local_id].clone().subst(substs); + (field.into(), Type::new_with_resolver_inner(db, krate, &self.resolver, ty)) + }) + .collect() + } + + pub(crate) fn expand( + &self, + db: &dyn HirDatabase, + macro_call: InFile<&ast::MacroCall>, + ) -> Option { + let krate = self.resolver.krate()?; + let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| { + self.resolver.resolve_path_as_macro(db.upcast(), &path) + })?; + Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64) + } + + pub(crate) fn resolve_variant( + &self, + db: &dyn HirDatabase, + record_lit: ast::RecordExpr, + ) -> Option { + let infer = self.infer.as_ref()?; + let expr_id = self.expr_id(db, &record_lit.into())?; + infer.variant_resolution_for_expr(expr_id) + } +} + +fn scope_for( + scopes: &ExprScopes, + source_map: &BodySourceMap, + node: InFile<&SyntaxNode>, +) -> Option { + node.value + .ancestors() + .filter_map(ast::Expr::cast) + .filter_map(|it| source_map.node_expr(InFile::new(node.file_id, &it))) + .find_map(|it| scopes.scope_for(it)) +} + +fn scope_for_offset( + db: &dyn HirDatabase, + scopes: &ExprScopes, + source_map: &BodySourceMap, + offset: InFile, +) -> Option { + scopes + .scope_by_expr() + .iter() + .filter_map(|(id, scope)| { + let source = source_map.expr_syntax(*id).ok()?; + // FIXME: correctly handle macro expansion + if source.file_id != offset.file_id { + return None; + } + let root = source.file_syntax(db.upcast()); + let node = source.value.to_node(&root); + Some((node.syntax().text_range(), scope)) + }) + // find containing scope + .min_by_key(|(expr_range, _scope)| { + ( + !(expr_range.start() <= offset.value && offset.value <= expr_range.end()), + expr_range.len(), + ) + }) + .map(|(expr_range, scope)| { + adjust(db, scopes, source_map, expr_range, offset).unwrap_or(*scope) + }) +} + +// XXX: during completion, cursor might be outside of any particular +// expression. Try to figure out the correct scope... +fn adjust( + db: &dyn HirDatabase, + scopes: &ExprScopes, + source_map: &BodySourceMap, + expr_range: TextRange, + offset: InFile, +) -> Option { + let child_scopes = scopes + .scope_by_expr() + .iter() + .filter_map(|(id, scope)| { + let source = source_map.expr_syntax(*id).ok()?; + // FIXME: correctly handle macro expansion + if source.file_id != offset.file_id { + return None; + } + let root = source.file_syntax(db.upcast()); + let node = source.value.to_node(&root); + Some((node.syntax().text_range(), scope)) + }) + .filter(|&(range, _)| { + range.start() <= offset.value && expr_range.contains_range(range) && range != expr_range + }); + + child_scopes + .max_by(|&(r1, _), &(r2, _)| { + if r1.contains_range(r2) { + std::cmp::Ordering::Greater + } else if r2.contains_range(r1) { + std::cmp::Ordering::Less + } else { + r1.start().cmp(&r2.start()) + } + }) + .map(|(_ptr, scope)| *scope) +} + +pub(crate) fn resolve_hir_path( + db: &dyn HirDatabase, + resolver: &Resolver, + path: &Path, +) -> Option { + let types = + resolver.resolve_path_in_type_ns_fully(db.upcast(), path.mod_path()).map(|ty| match ty { + TypeNs::SelfType(it) => PathResolution::SelfType(it.into()), + TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }), + TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => { + PathResolution::Def(Adt::from(it).into()) + } + TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), + TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()), + TypeNs::BuiltinType(it) => PathResolution::Def(it.into()), + TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()), + }); + + let body_owner = resolver.body_owner(); + let values = + resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| { + let res = match val { + ValueNs::LocalBinding(pat_id) => { + let var = Local { parent: body_owner?.into(), pat_id }; + PathResolution::Local(var) + } + ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()), + ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()), + ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()), + ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()), + ValueNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), + ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()), + }; + Some(res) + }); + + let items = resolver + .resolve_module_path_in_items(db.upcast(), path.mod_path()) + .take_types() + .map(|it| PathResolution::Def(it.into())); + + types.or(values).or(items).or_else(|| { + resolver + .resolve_path_as_macro(db.upcast(), path.mod_path()) + .map(|def| PathResolution::Macro(def.into())) + }) +} + +/// Resolves a path where we know it is a qualifier of another path. +/// +/// For example, if we have: +/// ``` +/// mod my { +/// pub mod foo { +/// struct Bar; +/// } +/// +/// pub fn foo() {} +/// } +/// ``` +/// then we know that `foo` in `my::foo::Bar` refers to the module, not the function. +pub(crate) fn resolve_hir_path_qualifier( + db: &dyn HirDatabase, + resolver: &Resolver, + path: &Path, +) -> Option { + let items = resolver + .resolve_module_path_in_items(db.upcast(), path.mod_path()) + .take_types() + .map(|it| PathResolution::Def(it.into())); + + if items.is_some() { + return items; + } + + resolver.resolve_path_in_type_ns_fully(db.upcast(), path.mod_path()).map(|ty| match ty { + TypeNs::SelfType(it) => PathResolution::SelfType(it.into()), + TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }), + TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => PathResolution::Def(Adt::from(it).into()), + TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), + TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()), + TypeNs::BuiltinType(it) => PathResolution::Def(it.into()), + TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()), + }) +} diff --git a/crates/hir_def/Cargo.toml b/crates/hir_def/Cargo.toml new file mode 100644 index 0000000000..403bc2aff3 --- /dev/null +++ b/crates/hir_def/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "hir_def" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +log = "0.4.8" +once_cell = "1.3.1" +rustc-hash = "1.1.0" +either = "1.5.3" +anymap = "0.12.1" +drop_bomb = "0.1.4" +fst = { version = "0.4", default-features = false } +itertools = "0.9.0" +indexmap = "1.4.0" +smallvec = "1.4.0" + +stdx = { path = "../stdx" } +arena = { path = "../arena" } +base_db = { path = "../base_db" } +syntax = { path = "../syntax" } +profile = { path = "../profile" } +hir_expand = { path = "../hir_expand" } +test_utils = { path = "../test_utils" } +mbe = { path = "../mbe" } +cfg = { path = "../cfg" } +tt = { path = "../tt" } + +[dev-dependencies] +expect = { path = "../expect" } diff --git a/crates/hir_def/src/adt.rs b/crates/hir_def/src/adt.rs new file mode 100644 index 0000000000..d69ff2fc79 --- /dev/null +++ b/crates/hir_def/src/adt.rs @@ -0,0 +1,329 @@ +//! Defines hir-level representation of structs, enums and unions + +use std::sync::Arc; + +use arena::{map::ArenaMap, Arena}; +use either::Either; +use hir_expand::{ + name::{AsName, Name}, + InFile, +}; +use syntax::ast::{self, NameOwner, VisibilityOwner}; +use tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree}; + +use crate::{ + body::{CfgExpander, LowerCtx}, + db::DefDatabase, + item_tree::{AttrOwner, Field, Fields, ItemTree, ModItem}, + src::HasChildSource, + src::HasSource, + trace::Trace, + type_ref::TypeRef, + visibility::RawVisibility, + EnumId, HasModule, LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StructId, UnionId, + VariantId, +}; +use cfg::CfgOptions; + +/// Note that we use `StructData` for unions as well! +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct StructData { + pub name: Name, + pub variant_data: Arc, + pub repr: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct EnumData { + pub name: Name, + pub variants: Arena, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct EnumVariantData { + pub name: Name, + pub variant_data: Arc, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum VariantData { + Record(Arena), + Tuple(Arena), + Unit, +} + +/// A single field of an enum variant or struct +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct FieldData { + pub name: Name, + pub type_ref: TypeRef, + pub visibility: RawVisibility, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ReprKind { + Packed, + Other, +} + +fn repr_from_value(item_tree: &ItemTree, of: AttrOwner) -> Option { + item_tree.attrs(of).by_key("repr").tt_values().find_map(parse_repr_tt) +} + +fn parse_repr_tt(tt: &Subtree) -> Option { + match tt.delimiter { + Some(Delimiter { kind: DelimiterKind::Parenthesis, .. }) => {} + _ => return None, + } + + let mut it = tt.token_trees.iter(); + match it.next()? { + TokenTree::Leaf(Leaf::Ident(ident)) if ident.text == "packed" => Some(ReprKind::Packed), + _ => Some(ReprKind::Other), + } +} + +impl StructData { + pub(crate) fn struct_data_query(db: &dyn DefDatabase, id: StructId) -> Arc { + let loc = id.lookup(db); + let item_tree = db.item_tree(loc.id.file_id); + let repr = repr_from_value(&item_tree, ModItem::from(loc.id.value).into()); + let cfg_options = db.crate_graph()[loc.container.module(db).krate].cfg_options.clone(); + + let strukt = &item_tree[loc.id.value]; + let variant_data = lower_fields(&item_tree, &cfg_options, &strukt.fields); + Arc::new(StructData { + name: strukt.name.clone(), + variant_data: Arc::new(variant_data), + repr, + }) + } + pub(crate) fn union_data_query(db: &dyn DefDatabase, id: UnionId) -> Arc { + let loc = id.lookup(db); + let item_tree = db.item_tree(loc.id.file_id); + let repr = repr_from_value(&item_tree, ModItem::from(loc.id.value).into()); + let cfg_options = db.crate_graph()[loc.container.module(db).krate].cfg_options.clone(); + + let union = &item_tree[loc.id.value]; + let variant_data = lower_fields(&item_tree, &cfg_options, &union.fields); + + Arc::new(StructData { + name: union.name.clone(), + variant_data: Arc::new(variant_data), + repr, + }) + } +} + +impl EnumData { + pub(crate) fn enum_data_query(db: &dyn DefDatabase, e: EnumId) -> Arc { + let loc = e.lookup(db); + let item_tree = db.item_tree(loc.id.file_id); + let cfg_options = db.crate_graph()[loc.container.module(db).krate].cfg_options.clone(); + + let enum_ = &item_tree[loc.id.value]; + let mut variants = Arena::new(); + for var_id in enum_.variants.clone() { + if item_tree.attrs(var_id.into()).is_cfg_enabled(&cfg_options) { + let var = &item_tree[var_id]; + let var_data = lower_fields(&item_tree, &cfg_options, &var.fields); + + variants.alloc(EnumVariantData { + name: var.name.clone(), + variant_data: Arc::new(var_data), + }); + } + } + + Arc::new(EnumData { name: enum_.name.clone(), variants }) + } + + pub fn variant(&self, name: &Name) -> Option { + let (id, _) = self.variants.iter().find(|(_id, data)| &data.name == name)?; + Some(id) + } +} + +impl HasChildSource for EnumId { + type ChildId = LocalEnumVariantId; + type Value = ast::Variant; + fn child_source(&self, db: &dyn DefDatabase) -> InFile> { + let src = self.lookup(db).source(db); + let mut trace = Trace::new_for_map(); + lower_enum(db, &mut trace, &src, self.lookup(db).container.module(db)); + src.with_value(trace.into_map()) + } +} + +fn lower_enum( + db: &dyn DefDatabase, + trace: &mut Trace, + ast: &InFile, + module_id: ModuleId, +) { + let expander = CfgExpander::new(db, ast.file_id, module_id.krate); + let variants = ast + .value + .variant_list() + .into_iter() + .flat_map(|it| it.variants()) + .filter(|var| expander.is_cfg_enabled(var)); + for var in variants { + trace.alloc( + || var.clone(), + || EnumVariantData { + name: var.name().map_or_else(Name::missing, |it| it.as_name()), + variant_data: Arc::new(VariantData::new(db, ast.with_value(var.kind()), module_id)), + }, + ); + } +} + +impl VariantData { + fn new(db: &dyn DefDatabase, flavor: InFile, module_id: ModuleId) -> Self { + let mut expander = CfgExpander::new(db, flavor.file_id, module_id.krate); + let mut trace = Trace::new_for_arena(); + match lower_struct(db, &mut expander, &mut trace, &flavor) { + StructKind::Tuple => VariantData::Tuple(trace.into_arena()), + StructKind::Record => VariantData::Record(trace.into_arena()), + StructKind::Unit => VariantData::Unit, + } + } + + pub fn fields(&self) -> &Arena { + const EMPTY: &Arena = &Arena::new(); + match &self { + VariantData::Record(fields) | VariantData::Tuple(fields) => fields, + _ => EMPTY, + } + } + + pub fn field(&self, name: &Name) -> Option { + self.fields().iter().find_map(|(id, data)| if &data.name == name { Some(id) } else { None }) + } + + pub fn kind(&self) -> StructKind { + match self { + VariantData::Record(_) => StructKind::Record, + VariantData::Tuple(_) => StructKind::Tuple, + VariantData::Unit => StructKind::Unit, + } + } +} + +impl HasChildSource for VariantId { + type ChildId = LocalFieldId; + type Value = Either; + + fn child_source(&self, db: &dyn DefDatabase) -> InFile> { + let (src, module_id) = match self { + VariantId::EnumVariantId(it) => { + // I don't really like the fact that we call into parent source + // here, this might add to more queries then necessary. + let src = it.parent.child_source(db); + (src.map(|map| map[it.local_id].kind()), it.parent.lookup(db).container.module(db)) + } + VariantId::StructId(it) => { + (it.lookup(db).source(db).map(|it| it.kind()), it.lookup(db).container.module(db)) + } + VariantId::UnionId(it) => ( + it.lookup(db).source(db).map(|it| { + it.record_field_list() + .map(ast::StructKind::Record) + .unwrap_or(ast::StructKind::Unit) + }), + it.lookup(db).container.module(db), + ), + }; + let mut expander = CfgExpander::new(db, src.file_id, module_id.krate); + let mut trace = Trace::new_for_map(); + lower_struct(db, &mut expander, &mut trace, &src); + src.with_value(trace.into_map()) + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum StructKind { + Tuple, + Record, + Unit, +} + +fn lower_struct( + db: &dyn DefDatabase, + expander: &mut CfgExpander, + trace: &mut Trace>, + ast: &InFile, +) -> StructKind { + let ctx = LowerCtx::new(db, ast.file_id); + + match &ast.value { + ast::StructKind::Tuple(fl) => { + for (i, fd) in fl.fields().enumerate() { + if !expander.is_cfg_enabled(&fd) { + continue; + } + + trace.alloc( + || Either::Left(fd.clone()), + || FieldData { + name: Name::new_tuple_field(i), + type_ref: TypeRef::from_ast_opt(&ctx, fd.ty()), + visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())), + }, + ); + } + StructKind::Tuple + } + ast::StructKind::Record(fl) => { + for fd in fl.fields() { + if !expander.is_cfg_enabled(&fd) { + continue; + } + + trace.alloc( + || Either::Right(fd.clone()), + || FieldData { + name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing), + type_ref: TypeRef::from_ast_opt(&ctx, fd.ty()), + visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())), + }, + ); + } + StructKind::Record + } + ast::StructKind::Unit => StructKind::Unit, + } +} + +fn lower_fields(item_tree: &ItemTree, cfg_options: &CfgOptions, fields: &Fields) -> VariantData { + match fields { + Fields::Record(flds) => { + let mut arena = Arena::new(); + for field_id in flds.clone() { + if item_tree.attrs(field_id.into()).is_cfg_enabled(cfg_options) { + arena.alloc(lower_field(item_tree, &item_tree[field_id])); + } + } + VariantData::Record(arena) + } + Fields::Tuple(flds) => { + let mut arena = Arena::new(); + for field_id in flds.clone() { + if item_tree.attrs(field_id.into()).is_cfg_enabled(cfg_options) { + arena.alloc(lower_field(item_tree, &item_tree[field_id])); + } + } + VariantData::Tuple(arena) + } + Fields::Unit => VariantData::Unit, + } +} + +fn lower_field(item_tree: &ItemTree, field: &Field) -> FieldData { + FieldData { + name: field.name.clone(), + type_ref: field.type_ref.clone(), + visibility: item_tree[field.visibility].clone(), + } +} diff --git a/crates/hir_def/src/attr.rs b/crates/hir_def/src/attr.rs new file mode 100644 index 0000000000..dea552a605 --- /dev/null +++ b/crates/hir_def/src/attr.rs @@ -0,0 +1,212 @@ +//! A higher level attributes based on TokenTree, with also some shortcuts. + +use std::{ops, sync::Arc}; + +use cfg::{CfgExpr, CfgOptions}; +use either::Either; +use hir_expand::{hygiene::Hygiene, AstId, InFile}; +use mbe::ast_to_token_tree; +use syntax::{ + ast::{self, AstNode, AttrsOwner}, + SmolStr, +}; +use tt::Subtree; + +use crate::{ + db::DefDatabase, + item_tree::{ItemTreeId, ItemTreeNode}, + nameres::ModuleSource, + path::ModPath, + src::HasChildSource, + AdtId, AttrDefId, Lookup, +}; + +#[derive(Default, Debug, Clone, PartialEq, Eq)] +pub struct Attrs { + entries: Option>, +} + +impl ops::Deref for Attrs { + type Target = [Attr]; + + fn deref(&self) -> &[Attr] { + match &self.entries { + Some(it) => &*it, + None => &[], + } + } +} + +impl Attrs { + pub const EMPTY: Attrs = Attrs { entries: None }; + + pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs { + match def { + AttrDefId::ModuleId(module) => { + let def_map = db.crate_def_map(module.krate); + let mod_data = &def_map[module.local_id]; + match mod_data.declaration_source(db) { + Some(it) => { + Attrs::from_attrs_owner(db, it.as_ref().map(|it| it as &dyn AttrsOwner)) + } + None => Attrs::from_attrs_owner( + db, + mod_data.definition_source(db).as_ref().map(|src| match src { + ModuleSource::SourceFile(file) => file as &dyn AttrsOwner, + ModuleSource::Module(module) => module as &dyn AttrsOwner, + }), + ), + } + } + AttrDefId::FieldId(it) => { + let src = it.parent.child_source(db); + match &src.value[it.local_id] { + Either::Left(_tuple) => Attrs::default(), + Either::Right(record) => Attrs::from_attrs_owner(db, src.with_value(record)), + } + } + AttrDefId::EnumVariantId(var_id) => { + let src = var_id.parent.child_source(db); + let src = src.as_ref().map(|it| &it[var_id.local_id]); + Attrs::from_attrs_owner(db, src.map(|it| it as &dyn AttrsOwner)) + } + AttrDefId::AdtId(it) => match it { + AdtId::StructId(it) => attrs_from_item_tree(it.lookup(db).id, db), + AdtId::EnumId(it) => attrs_from_item_tree(it.lookup(db).id, db), + AdtId::UnionId(it) => attrs_from_item_tree(it.lookup(db).id, db), + }, + AttrDefId::TraitId(it) => attrs_from_item_tree(it.lookup(db).id, db), + AttrDefId::MacroDefId(it) => { + it.ast_id.map_or_else(Default::default, |ast_id| attrs_from_ast(ast_id, db)) + } + AttrDefId::ImplId(it) => attrs_from_item_tree(it.lookup(db).id, db), + AttrDefId::ConstId(it) => attrs_from_item_tree(it.lookup(db).id, db), + AttrDefId::StaticId(it) => attrs_from_item_tree(it.lookup(db).id, db), + AttrDefId::FunctionId(it) => attrs_from_item_tree(it.lookup(db).id, db), + AttrDefId::TypeAliasId(it) => attrs_from_item_tree(it.lookup(db).id, db), + } + } + + pub fn from_attrs_owner(db: &dyn DefDatabase, owner: InFile<&dyn AttrsOwner>) -> Attrs { + let hygiene = Hygiene::new(db.upcast(), owner.file_id); + Attrs::new(owner.value, &hygiene) + } + + pub(crate) fn new(owner: &dyn AttrsOwner, hygiene: &Hygiene) -> Attrs { + let docs = ast::CommentIter::from_syntax_node(owner.syntax()).doc_comment_text().map( + |docs_text| Attr { + input: Some(AttrInput::Literal(SmolStr::new(docs_text))), + path: ModPath::from(hir_expand::name!(doc)), + }, + ); + let mut attrs = owner.attrs().peekable(); + let entries = if attrs.peek().is_none() { + // Avoid heap allocation + None + } else { + Some(attrs.flat_map(|ast| Attr::from_src(ast, hygiene)).chain(docs).collect()) + }; + Attrs { entries } + } + + pub fn merge(&self, other: Attrs) -> Attrs { + match (&self.entries, &other.entries) { + (None, None) => Attrs { entries: None }, + (Some(entries), None) | (None, Some(entries)) => { + Attrs { entries: Some(entries.clone()) } + } + (Some(a), Some(b)) => { + Attrs { entries: Some(a.iter().chain(b.iter()).cloned().collect()) } + } + } + } + + pub fn by_key(&self, key: &'static str) -> AttrQuery<'_> { + AttrQuery { attrs: self, key } + } + + pub fn cfg(&self) -> impl Iterator + '_ { + // FIXME: handle cfg_attr :-) + self.by_key("cfg").tt_values().map(CfgExpr::parse) + } + pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> bool { + self.cfg().all(|cfg| cfg_options.check(&cfg) != Some(false)) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Attr { + pub(crate) path: ModPath, + pub(crate) input: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum AttrInput { + /// `#[attr = "string"]` + Literal(SmolStr), + /// `#[attr(subtree)]` + TokenTree(Subtree), +} + +impl Attr { + fn from_src(ast: ast::Attr, hygiene: &Hygiene) -> Option { + let path = ModPath::from_src(ast.path()?, hygiene)?; + let input = if let Some(lit) = ast.literal() { + // FIXME: escape? raw string? + let value = lit.syntax().first_token()?.text().trim_matches('"').into(); + Some(AttrInput::Literal(value)) + } else if let Some(tt) = ast.token_tree() { + Some(AttrInput::TokenTree(ast_to_token_tree(&tt)?.0)) + } else { + None + }; + Some(Attr { path, input }) + } +} + +#[derive(Debug, Clone, Copy)] +pub struct AttrQuery<'a> { + attrs: &'a Attrs, + key: &'static str, +} + +impl<'a> AttrQuery<'a> { + pub fn tt_values(self) -> impl Iterator { + self.attrs().filter_map(|attr| match attr.input.as_ref()? { + AttrInput::TokenTree(it) => Some(it), + _ => None, + }) + } + + pub fn string_value(self) -> Option<&'a SmolStr> { + self.attrs().find_map(|attr| match attr.input.as_ref()? { + AttrInput::Literal(it) => Some(it), + _ => None, + }) + } + + pub fn exists(self) -> bool { + self.attrs().next().is_some() + } + + fn attrs(self) -> impl Iterator { + let key = self.key; + self.attrs + .iter() + .filter(move |attr| attr.path.as_ident().map_or(false, |s| s.to_string() == key)) + } +} + +fn attrs_from_ast(src: AstId, db: &dyn DefDatabase) -> Attrs +where + N: ast::AttrsOwner, +{ + let src = InFile::new(src.file_id, src.to_node(db.upcast())); + Attrs::from_attrs_owner(db, src.as_ref().map(|it| it as &dyn AttrsOwner)) +} + +fn attrs_from_item_tree(id: ItemTreeId, db: &dyn DefDatabase) -> Attrs { + let tree = db.item_tree(id.file_id); + let mod_item = N::id_to_mod_item(id.value); + tree.attrs(mod_item.into()).clone() +} diff --git a/crates/hir_def/src/body.rs b/crates/hir_def/src/body.rs new file mode 100644 index 0000000000..9a9a605ddb --- /dev/null +++ b/crates/hir_def/src/body.rs @@ -0,0 +1,360 @@ +//! Defines `Body`: a lowered representation of bodies of functions, statics and +//! consts. +mod lower; +pub mod scope; + +use std::{mem, ops::Index, sync::Arc}; + +use arena::{map::ArenaMap, Arena}; +use base_db::CrateId; +use cfg::CfgOptions; +use drop_bomb::DropBomb; +use either::Either; +use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile, MacroDefId}; +use rustc_hash::FxHashMap; +use syntax::{ast, AstNode, AstPtr}; +use test_utils::mark; + +pub(crate) use lower::LowerCtx; + +use crate::{ + attr::Attrs, + db::DefDatabase, + expr::{Expr, ExprId, Pat, PatId}, + item_scope::BuiltinShadowMode, + item_scope::ItemScope, + nameres::CrateDefMap, + path::{ModPath, Path}, + src::HasSource, + AsMacroCall, DefWithBodyId, HasModule, Lookup, ModuleId, +}; + +/// A subset of Expander that only deals with cfg attributes. We only need it to +/// avoid cyclic queries in crate def map during enum processing. +pub(crate) struct CfgExpander { + cfg_options: CfgOptions, + hygiene: Hygiene, +} + +pub(crate) struct Expander { + cfg_expander: CfgExpander, + crate_def_map: Arc, + current_file_id: HirFileId, + ast_id_map: Arc, + module: ModuleId, + recursion_limit: usize, +} + +#[cfg(test)] +const EXPANSION_RECURSION_LIMIT: usize = 32; + +#[cfg(not(test))] +const EXPANSION_RECURSION_LIMIT: usize = 128; + +impl CfgExpander { + pub(crate) fn new( + db: &dyn DefDatabase, + current_file_id: HirFileId, + krate: CrateId, + ) -> CfgExpander { + let hygiene = Hygiene::new(db.upcast(), current_file_id); + let cfg_options = db.crate_graph()[krate].cfg_options.clone(); + CfgExpander { cfg_options, hygiene } + } + + pub(crate) fn parse_attrs(&self, owner: &dyn ast::AttrsOwner) -> Attrs { + Attrs::new(owner, &self.hygiene) + } + + pub(crate) fn is_cfg_enabled(&self, owner: &dyn ast::AttrsOwner) -> bool { + let attrs = self.parse_attrs(owner); + attrs.is_cfg_enabled(&self.cfg_options) + } +} + +impl Expander { + pub(crate) fn new( + db: &dyn DefDatabase, + current_file_id: HirFileId, + module: ModuleId, + ) -> Expander { + let cfg_expander = CfgExpander::new(db, current_file_id, module.krate); + let crate_def_map = db.crate_def_map(module.krate); + let ast_id_map = db.ast_id_map(current_file_id); + Expander { + cfg_expander, + crate_def_map, + current_file_id, + ast_id_map, + module, + recursion_limit: 0, + } + } + + pub(crate) fn enter_expand( + &mut self, + db: &dyn DefDatabase, + local_scope: Option<&ItemScope>, + macro_call: ast::MacroCall, + ) -> Option<(Mark, T)> { + self.recursion_limit += 1; + if self.recursion_limit > EXPANSION_RECURSION_LIMIT { + mark::hit!(your_stack_belongs_to_me); + return None; + } + + let macro_call = InFile::new(self.current_file_id, ¯o_call); + + if let Some(call_id) = macro_call.as_call_id(db, self.crate_def_map.krate, |path| { + if let Some(local_scope) = local_scope { + if let Some(def) = path.as_ident().and_then(|n| local_scope.get_legacy_macro(n)) { + return Some(def); + } + } + self.resolve_path_as_macro(db, &path) + }) { + let file_id = call_id.as_file(); + if let Some(node) = db.parse_or_expand(file_id) { + if let Some(expr) = T::cast(node) { + log::debug!("macro expansion {:#?}", expr.syntax()); + + let mark = Mark { + file_id: self.current_file_id, + ast_id_map: mem::take(&mut self.ast_id_map), + bomb: DropBomb::new("expansion mark dropped"), + }; + self.cfg_expander.hygiene = Hygiene::new(db.upcast(), file_id); + self.current_file_id = file_id; + self.ast_id_map = db.ast_id_map(file_id); + return Some((mark, expr)); + } + } + } + + // FIXME: Instead of just dropping the error from expansion + // report it + None + } + + pub(crate) fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) { + self.cfg_expander.hygiene = Hygiene::new(db.upcast(), mark.file_id); + self.current_file_id = mark.file_id; + self.ast_id_map = mem::take(&mut mark.ast_id_map); + self.recursion_limit -= 1; + mark.bomb.defuse(); + } + + pub(crate) fn to_source(&self, value: T) -> InFile { + InFile { file_id: self.current_file_id, value } + } + + pub(crate) fn is_cfg_enabled(&self, owner: &dyn ast::AttrsOwner) -> bool { + self.cfg_expander.is_cfg_enabled(owner) + } + + fn parse_path(&mut self, path: ast::Path) -> Option { + Path::from_src(path, &self.cfg_expander.hygiene) + } + + fn resolve_path_as_macro(&self, db: &dyn DefDatabase, path: &ModPath) -> Option { + self.crate_def_map + .resolve_path(db, self.module.local_id, path, BuiltinShadowMode::Other) + .0 + .take_macros() + } + + fn ast_id(&self, item: &N) -> AstId { + let file_local_id = self.ast_id_map.ast_id(item); + AstId::new(self.current_file_id, file_local_id) + } +} + +pub(crate) struct Mark { + file_id: HirFileId, + ast_id_map: Arc, + bomb: DropBomb, +} + +/// The body of an item (function, const etc.). +#[derive(Debug, Eq, PartialEq)] +pub struct Body { + pub exprs: Arena, + pub pats: Arena, + /// The patterns for the function's parameters. While the parameter types are + /// part of the function signature, the patterns are not (they don't change + /// the external type of the function). + /// + /// If this `Body` is for the body of a constant, this will just be + /// empty. + pub params: Vec, + /// The `ExprId` of the actual body expression. + pub body_expr: ExprId, + pub item_scope: ItemScope, +} + +pub type ExprPtr = AstPtr; +pub type ExprSource = InFile; + +pub type PatPtr = Either, AstPtr>; +pub type PatSource = InFile; + +/// An item body together with the mapping from syntax nodes to HIR expression +/// IDs. This is needed to go from e.g. a position in a file to the HIR +/// expression containing it; but for type inference etc., we want to operate on +/// a structure that is agnostic to the actual positions of expressions in the +/// file, so that we don't recompute types whenever some whitespace is typed. +/// +/// One complication here is that, due to macro expansion, a single `Body` might +/// be spread across several files. So, for each ExprId and PatId, we record +/// both the HirFileId and the position inside the file. However, we only store +/// AST -> ExprId mapping for non-macro files, as it is not clear how to handle +/// this properly for macros. +#[derive(Default, Debug, Eq, PartialEq)] +pub struct BodySourceMap { + expr_map: FxHashMap, + expr_map_back: ArenaMap>, + pat_map: FxHashMap, + pat_map_back: ArenaMap>, + field_map: FxHashMap<(ExprId, usize), InFile>>, + expansions: FxHashMap>, HirFileId>, +} + +#[derive(Default, Debug, Eq, PartialEq, Clone, Copy)] +pub struct SyntheticSyntax; + +impl Body { + pub(crate) fn body_with_source_map_query( + db: &dyn DefDatabase, + def: DefWithBodyId, + ) -> (Arc, Arc) { + let _p = profile::span("body_with_source_map_query"); + let mut params = None; + + let (file_id, module, body) = match def { + DefWithBodyId::FunctionId(f) => { + let f = f.lookup(db); + let src = f.source(db); + params = src.value.param_list(); + (src.file_id, f.module(db), src.value.body().map(ast::Expr::from)) + } + DefWithBodyId::ConstId(c) => { + let c = c.lookup(db); + let src = c.source(db); + (src.file_id, c.module(db), src.value.body()) + } + DefWithBodyId::StaticId(s) => { + let s = s.lookup(db); + let src = s.source(db); + (src.file_id, s.module(db), src.value.body()) + } + }; + let expander = Expander::new(db, file_id, module); + let (body, source_map) = Body::new(db, def, expander, params, body); + (Arc::new(body), Arc::new(source_map)) + } + + pub(crate) fn body_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc { + db.body_with_source_map(def).0 + } + + fn new( + db: &dyn DefDatabase, + def: DefWithBodyId, + expander: Expander, + params: Option, + body: Option, + ) -> (Body, BodySourceMap) { + lower::lower(db, def, expander, params, body) + } +} + +impl Index for Body { + type Output = Expr; + + fn index(&self, expr: ExprId) -> &Expr { + &self.exprs[expr] + } +} + +impl Index for Body { + type Output = Pat; + + fn index(&self, pat: PatId) -> &Pat { + &self.pats[pat] + } +} + +impl BodySourceMap { + pub fn expr_syntax(&self, expr: ExprId) -> Result { + self.expr_map_back[expr].clone() + } + + pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option { + let src = node.map(|it| AstPtr::new(it)); + self.expr_map.get(&src).cloned() + } + + pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option { + let src = node.map(|it| AstPtr::new(it)); + self.expansions.get(&src).cloned() + } + + pub fn pat_syntax(&self, pat: PatId) -> Result { + self.pat_map_back[pat].clone() + } + + pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option { + let src = node.map(|it| Either::Left(AstPtr::new(it))); + self.pat_map.get(&src).cloned() + } + + pub fn node_self_param(&self, node: InFile<&ast::SelfParam>) -> Option { + let src = node.map(|it| Either::Right(AstPtr::new(it))); + self.pat_map.get(&src).cloned() + } + + pub fn field_syntax(&self, expr: ExprId, field: usize) -> InFile> { + self.field_map[&(expr, field)].clone() + } +} + +#[cfg(test)] +mod tests { + use base_db::{fixture::WithFixture, SourceDatabase}; + use test_utils::mark; + + use crate::ModuleDefId; + + use super::*; + + fn lower(ra_fixture: &str) -> Arc { + let (db, file_id) = crate::test_db::TestDB::with_single_file(ra_fixture); + + let krate = db.crate_graph().iter().next().unwrap(); + let def_map = db.crate_def_map(krate); + let module = def_map.modules_for_file(file_id).next().unwrap(); + let module = &def_map[module]; + let fn_def = match module.scope.declarations().next().unwrap() { + ModuleDefId::FunctionId(it) => it, + _ => panic!(), + }; + + db.body(fn_def.into()) + } + + #[test] + fn your_stack_belongs_to_me() { + mark::check!(your_stack_belongs_to_me); + lower( + " +macro_rules! n_nuple { + ($e:tt) => (); + ($($rest:tt)*) => {{ + (n_nuple!($($rest)*)None,) + }}; +} +fn main() { n_nuple!(1,2,3); } +", + ); + } +} diff --git a/crates/hir_def/src/body/lower.rs b/crates/hir_def/src/body/lower.rs new file mode 100644 index 0000000000..a26251cdeb --- /dev/null +++ b/crates/hir_def/src/body/lower.rs @@ -0,0 +1,931 @@ +//! Transforms `ast::Expr` into an equivalent `hir_def::expr::Expr` +//! representation. + +use std::{any::type_name, sync::Arc}; + +use arena::Arena; +use either::Either; +use hir_expand::{ + hygiene::Hygiene, + name::{name, AsName, Name}, + HirFileId, MacroDefId, MacroDefKind, +}; +use rustc_hash::FxHashMap; +use syntax::{ + ast::{ + self, ArgListOwner, ArrayExprKind, AstChildren, LiteralKind, LoopBodyOwner, NameOwner, + SlicePatComponents, + }, + AstNode, AstPtr, +}; +use test_utils::mark; + +use crate::{ + adt::StructKind, + body::{Body, BodySourceMap, Expander, PatPtr, SyntheticSyntax}, + builtin_type::{BuiltinFloat, BuiltinInt}, + db::DefDatabase, + expr::{ + dummy_expr_id, ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, + LogicOp, MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, + }, + item_scope::BuiltinShadowMode, + item_tree::{ItemTree, ItemTreeId, ItemTreeNode}, + path::{GenericArgs, Path}, + type_ref::{Mutability, Rawness, TypeRef}, + AdtId, ConstLoc, ContainerId, DefWithBodyId, EnumLoc, FunctionLoc, Intern, ModuleDefId, + StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, +}; + +use super::{ExprSource, PatSource}; + +pub(crate) struct LowerCtx { + hygiene: Hygiene, +} + +impl LowerCtx { + pub fn new(db: &dyn DefDatabase, file_id: HirFileId) -> Self { + LowerCtx { hygiene: Hygiene::new(db.upcast(), file_id) } + } + pub fn with_hygiene(hygiene: &Hygiene) -> Self { + LowerCtx { hygiene: hygiene.clone() } + } + + pub fn lower_path(&self, ast: ast::Path) -> Option { + Path::from_src(ast, &self.hygiene) + } +} + +pub(super) fn lower( + db: &dyn DefDatabase, + def: DefWithBodyId, + expander: Expander, + params: Option, + body: Option, +) -> (Body, BodySourceMap) { + let item_tree = db.item_tree(expander.current_file_id); + ExprCollector { + db, + def, + source_map: BodySourceMap::default(), + body: Body { + exprs: Arena::default(), + pats: Arena::default(), + params: Vec::new(), + body_expr: dummy_expr_id(), + item_scope: Default::default(), + }, + item_trees: { + let mut map = FxHashMap::default(); + map.insert(expander.current_file_id, item_tree); + map + }, + expander, + } + .collect(params, body) +} + +struct ExprCollector<'a> { + db: &'a dyn DefDatabase, + def: DefWithBodyId, + expander: Expander, + body: Body, + source_map: BodySourceMap, + + item_trees: FxHashMap>, +} + +impl ExprCollector<'_> { + fn collect( + mut self, + param_list: Option, + body: Option, + ) -> (Body, BodySourceMap) { + if let Some(param_list) = param_list { + if let Some(self_param) = param_list.self_param() { + let ptr = AstPtr::new(&self_param); + let param_pat = self.alloc_pat( + Pat::Bind { + name: name![self], + mode: BindingAnnotation::Unannotated, + subpat: None, + }, + Either::Right(ptr), + ); + self.body.params.push(param_pat); + } + + for param in param_list.params() { + let pat = match param.pat() { + None => continue, + Some(pat) => pat, + }; + let param_pat = self.collect_pat(pat); + self.body.params.push(param_pat); + } + }; + + self.body.body_expr = self.collect_expr_opt(body); + (self.body, self.source_map) + } + + fn ctx(&self) -> LowerCtx { + LowerCtx::new(self.db, self.expander.current_file_id) + } + + fn alloc_expr(&mut self, expr: Expr, ptr: AstPtr) -> ExprId { + let src = self.expander.to_source(ptr); + let id = self.make_expr(expr, Ok(src.clone())); + self.source_map.expr_map.insert(src, id); + id + } + // desugared exprs don't have ptr, that's wrong and should be fixed + // somehow. + fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { + self.make_expr(expr, Err(SyntheticSyntax)) + } + fn empty_block(&mut self) -> ExprId { + self.alloc_expr_desugared(Expr::Block { statements: Vec::new(), tail: None, label: None }) + } + fn missing_expr(&mut self) -> ExprId { + self.alloc_expr_desugared(Expr::Missing) + } + fn make_expr(&mut self, expr: Expr, src: Result) -> ExprId { + let id = self.body.exprs.alloc(expr); + self.source_map.expr_map_back.insert(id, src); + id + } + + fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { + let src = self.expander.to_source(ptr); + let id = self.make_pat(pat, Ok(src.clone())); + self.source_map.pat_map.insert(src, id); + id + } + fn missing_pat(&mut self) -> PatId { + self.make_pat(Pat::Missing, Err(SyntheticSyntax)) + } + fn make_pat(&mut self, pat: Pat, src: Result) -> PatId { + let id = self.body.pats.alloc(pat); + self.source_map.pat_map_back.insert(id, src); + id + } + + fn collect_expr(&mut self, expr: ast::Expr) -> ExprId { + let syntax_ptr = AstPtr::new(&expr); + if !self.expander.is_cfg_enabled(&expr) { + return self.missing_expr(); + } + + match expr { + ast::Expr::IfExpr(e) => { + let then_branch = self.collect_block_opt(e.then_branch()); + + let else_branch = e.else_branch().map(|b| match b { + ast::ElseBranch::Block(it) => self.collect_block(it), + ast::ElseBranch::IfExpr(elif) => { + let expr: ast::Expr = ast::Expr::cast(elif.syntax().clone()).unwrap(); + self.collect_expr(expr) + } + }); + + let condition = match e.condition() { + None => self.missing_expr(), + Some(condition) => match condition.pat() { + None => self.collect_expr_opt(condition.expr()), + // if let -- desugar to match + Some(pat) => { + let pat = self.collect_pat(pat); + let match_expr = self.collect_expr_opt(condition.expr()); + let placeholder_pat = self.missing_pat(); + let arms = vec![ + MatchArm { pat, expr: then_branch, guard: None }, + MatchArm { + pat: placeholder_pat, + expr: else_branch.unwrap_or_else(|| self.empty_block()), + guard: None, + }, + ]; + return self + .alloc_expr(Expr::Match { expr: match_expr, arms }, syntax_ptr); + } + }, + }; + + self.alloc_expr(Expr::If { condition, then_branch, else_branch }, syntax_ptr) + } + ast::Expr::EffectExpr(e) => match e.effect() { + ast::Effect::Try(_) => { + let body = self.collect_block_opt(e.block_expr()); + self.alloc_expr(Expr::TryBlock { body }, syntax_ptr) + } + ast::Effect::Unsafe(_) => { + let body = self.collect_block_opt(e.block_expr()); + self.alloc_expr(Expr::Unsafe { body }, syntax_ptr) + } + // FIXME: we need to record these effects somewhere... + ast::Effect::Label(label) => match e.block_expr() { + Some(block) => { + let res = self.collect_block(block); + match &mut self.body.exprs[res] { + Expr::Block { label: block_label, .. } => { + *block_label = + label.lifetime_token().map(|t| Name::new_lifetime(&t)) + } + _ => unreachable!(), + } + res + } + None => self.missing_expr(), + }, + // FIXME: we need to record these effects somewhere... + ast::Effect::Async(_) => self.collect_block_opt(e.block_expr()), + }, + ast::Expr::BlockExpr(e) => self.collect_block(e), + ast::Expr::LoopExpr(e) => { + let body = self.collect_block_opt(e.loop_body()); + self.alloc_expr( + Expr::Loop { + body, + label: e + .label() + .and_then(|l| l.lifetime_token()) + .map(|l| Name::new_lifetime(&l)), + }, + syntax_ptr, + ) + } + ast::Expr::WhileExpr(e) => { + let body = self.collect_block_opt(e.loop_body()); + + let condition = match e.condition() { + None => self.missing_expr(), + Some(condition) => match condition.pat() { + None => self.collect_expr_opt(condition.expr()), + // if let -- desugar to match + Some(pat) => { + mark::hit!(infer_resolve_while_let); + let pat = self.collect_pat(pat); + let match_expr = self.collect_expr_opt(condition.expr()); + let placeholder_pat = self.missing_pat(); + let break_ = + self.alloc_expr_desugared(Expr::Break { expr: None, label: None }); + let arms = vec![ + MatchArm { pat, expr: body, guard: None }, + MatchArm { pat: placeholder_pat, expr: break_, guard: None }, + ]; + let match_expr = + self.alloc_expr_desugared(Expr::Match { expr: match_expr, arms }); + return self.alloc_expr( + Expr::Loop { + body: match_expr, + label: e + .label() + .and_then(|l| l.lifetime_token()) + .map(|l| Name::new_lifetime(&l)), + }, + syntax_ptr, + ); + } + }, + }; + + self.alloc_expr( + Expr::While { + condition, + body, + label: e + .label() + .and_then(|l| l.lifetime_token()) + .map(|l| Name::new_lifetime(&l)), + }, + syntax_ptr, + ) + } + ast::Expr::ForExpr(e) => { + let iterable = self.collect_expr_opt(e.iterable()); + let pat = self.collect_pat_opt(e.pat()); + let body = self.collect_block_opt(e.loop_body()); + self.alloc_expr( + Expr::For { + iterable, + pat, + body, + label: e + .label() + .and_then(|l| l.lifetime_token()) + .map(|l| Name::new_lifetime(&l)), + }, + syntax_ptr, + ) + } + ast::Expr::CallExpr(e) => { + let callee = self.collect_expr_opt(e.expr()); + let args = if let Some(arg_list) = e.arg_list() { + arg_list.args().map(|e| self.collect_expr(e)).collect() + } else { + Vec::new() + }; + self.alloc_expr(Expr::Call { callee, args }, syntax_ptr) + } + ast::Expr::MethodCallExpr(e) => { + let receiver = self.collect_expr_opt(e.expr()); + let args = if let Some(arg_list) = e.arg_list() { + arg_list.args().map(|e| self.collect_expr(e)).collect() + } else { + Vec::new() + }; + let method_name = e.name_ref().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); + let generic_args = + e.generic_arg_list().and_then(|it| GenericArgs::from_ast(&self.ctx(), it)); + self.alloc_expr( + Expr::MethodCall { receiver, method_name, args, generic_args }, + syntax_ptr, + ) + } + ast::Expr::MatchExpr(e) => { + let expr = self.collect_expr_opt(e.expr()); + let arms = if let Some(match_arm_list) = e.match_arm_list() { + match_arm_list + .arms() + .map(|arm| MatchArm { + pat: self.collect_pat_opt(arm.pat()), + expr: self.collect_expr_opt(arm.expr()), + guard: arm + .guard() + .and_then(|guard| guard.expr()) + .map(|e| self.collect_expr(e)), + }) + .collect() + } else { + Vec::new() + }; + self.alloc_expr(Expr::Match { expr, arms }, syntax_ptr) + } + ast::Expr::PathExpr(e) => { + let path = e + .path() + .and_then(|path| self.expander.parse_path(path)) + .map(Expr::Path) + .unwrap_or(Expr::Missing); + self.alloc_expr(path, syntax_ptr) + } + ast::Expr::ContinueExpr(e) => self.alloc_expr( + Expr::Continue { label: e.lifetime_token().map(|l| Name::new_lifetime(&l)) }, + syntax_ptr, + ), + ast::Expr::BreakExpr(e) => { + let expr = e.expr().map(|e| self.collect_expr(e)); + self.alloc_expr( + Expr::Break { expr, label: e.lifetime_token().map(|l| Name::new_lifetime(&l)) }, + syntax_ptr, + ) + } + ast::Expr::ParenExpr(e) => { + let inner = self.collect_expr_opt(e.expr()); + // make the paren expr point to the inner expression as well + let src = self.expander.to_source(syntax_ptr); + self.source_map.expr_map.insert(src, inner); + inner + } + ast::Expr::ReturnExpr(e) => { + let expr = e.expr().map(|e| self.collect_expr(e)); + self.alloc_expr(Expr::Return { expr }, syntax_ptr) + } + ast::Expr::RecordExpr(e) => { + let path = e.path().and_then(|path| self.expander.parse_path(path)); + let mut field_ptrs = Vec::new(); + let record_lit = if let Some(nfl) = e.record_expr_field_list() { + let fields = nfl + .fields() + .inspect(|field| field_ptrs.push(AstPtr::new(field))) + .filter_map(|field| { + if !self.expander.is_cfg_enabled(&field) { + return None; + } + let name = field.field_name()?.as_name(); + + Some(RecordLitField { + name, + expr: match field.expr() { + Some(e) => self.collect_expr(e), + None => self.missing_expr(), + }, + }) + }) + .collect(); + let spread = nfl.spread().map(|s| self.collect_expr(s)); + Expr::RecordLit { path, fields, spread } + } else { + Expr::RecordLit { path, fields: Vec::new(), spread: None } + }; + + let res = self.alloc_expr(record_lit, syntax_ptr); + for (i, ptr) in field_ptrs.into_iter().enumerate() { + let src = self.expander.to_source(ptr); + self.source_map.field_map.insert((res, i), src); + } + res + } + ast::Expr::FieldExpr(e) => { + let expr = self.collect_expr_opt(e.expr()); + let name = match e.field_access() { + Some(kind) => kind.as_name(), + _ => Name::missing(), + }; + self.alloc_expr(Expr::Field { expr, name }, syntax_ptr) + } + ast::Expr::AwaitExpr(e) => { + let expr = self.collect_expr_opt(e.expr()); + self.alloc_expr(Expr::Await { expr }, syntax_ptr) + } + ast::Expr::TryExpr(e) => { + let expr = self.collect_expr_opt(e.expr()); + self.alloc_expr(Expr::Try { expr }, syntax_ptr) + } + ast::Expr::CastExpr(e) => { + let expr = self.collect_expr_opt(e.expr()); + let type_ref = TypeRef::from_ast_opt(&self.ctx(), e.ty()); + self.alloc_expr(Expr::Cast { expr, type_ref }, syntax_ptr) + } + ast::Expr::RefExpr(e) => { + let expr = self.collect_expr_opt(e.expr()); + let raw_tok = e.raw_token().is_some(); + let mutability = if raw_tok { + if e.mut_token().is_some() { + Mutability::Mut + } else if e.const_token().is_some() { + Mutability::Shared + } else { + unreachable!("parser only remaps to raw_token() if matching mutability token follows") + } + } else { + Mutability::from_mutable(e.mut_token().is_some()) + }; + let rawness = Rawness::from_raw(raw_tok); + self.alloc_expr(Expr::Ref { expr, rawness, mutability }, syntax_ptr) + } + ast::Expr::PrefixExpr(e) => { + let expr = self.collect_expr_opt(e.expr()); + if let Some(op) = e.op_kind() { + self.alloc_expr(Expr::UnaryOp { expr, op }, syntax_ptr) + } else { + self.alloc_expr(Expr::Missing, syntax_ptr) + } + } + ast::Expr::ClosureExpr(e) => { + let mut args = Vec::new(); + let mut arg_types = Vec::new(); + if let Some(pl) = e.param_list() { + for param in pl.params() { + let pat = self.collect_pat_opt(param.pat()); + let type_ref = param.ty().map(|it| TypeRef::from_ast(&self.ctx(), it)); + args.push(pat); + arg_types.push(type_ref); + } + } + let ret_type = + e.ret_type().and_then(|r| r.ty()).map(|it| TypeRef::from_ast(&self.ctx(), it)); + let body = self.collect_expr_opt(e.body()); + self.alloc_expr(Expr::Lambda { args, arg_types, ret_type, body }, syntax_ptr) + } + ast::Expr::BinExpr(e) => { + let lhs = self.collect_expr_opt(e.lhs()); + let rhs = self.collect_expr_opt(e.rhs()); + let op = e.op_kind().map(BinaryOp::from); + self.alloc_expr(Expr::BinaryOp { lhs, rhs, op }, syntax_ptr) + } + ast::Expr::TupleExpr(e) => { + let exprs = e.fields().map(|expr| self.collect_expr(expr)).collect(); + self.alloc_expr(Expr::Tuple { exprs }, syntax_ptr) + } + ast::Expr::BoxExpr(e) => { + let expr = self.collect_expr_opt(e.expr()); + self.alloc_expr(Expr::Box { expr }, syntax_ptr) + } + + ast::Expr::ArrayExpr(e) => { + let kind = e.kind(); + + match kind { + ArrayExprKind::ElementList(e) => { + let exprs = e.map(|expr| self.collect_expr(expr)).collect(); + self.alloc_expr(Expr::Array(Array::ElementList(exprs)), syntax_ptr) + } + ArrayExprKind::Repeat { initializer, repeat } => { + let initializer = self.collect_expr_opt(initializer); + let repeat = self.collect_expr_opt(repeat); + self.alloc_expr( + Expr::Array(Array::Repeat { initializer, repeat }), + syntax_ptr, + ) + } + } + } + + ast::Expr::Literal(e) => self.alloc_expr(Expr::Literal(e.kind().into()), syntax_ptr), + ast::Expr::IndexExpr(e) => { + let base = self.collect_expr_opt(e.base()); + let index = self.collect_expr_opt(e.index()); + self.alloc_expr(Expr::Index { base, index }, syntax_ptr) + } + ast::Expr::RangeExpr(e) => { + let lhs = e.start().map(|lhs| self.collect_expr(lhs)); + let rhs = e.end().map(|rhs| self.collect_expr(rhs)); + match e.op_kind() { + Some(range_type) => { + self.alloc_expr(Expr::Range { lhs, rhs, range_type }, syntax_ptr) + } + None => self.alloc_expr(Expr::Missing, syntax_ptr), + } + } + ast::Expr::MacroCall(e) => { + if let Some(name) = e.is_macro_rules().map(|it| it.as_name()) { + let mac = MacroDefId { + krate: Some(self.expander.module.krate), + ast_id: Some(self.expander.ast_id(&e)), + kind: MacroDefKind::Declarative, + local_inner: false, + }; + self.body.item_scope.define_legacy_macro(name, mac); + + // FIXME: do we still need to allocate this as missing ? + self.alloc_expr(Expr::Missing, syntax_ptr) + } else { + let macro_call = self.expander.to_source(AstPtr::new(&e)); + match self.expander.enter_expand(self.db, Some(&self.body.item_scope), e) { + Some((mark, expansion)) => { + self.source_map + .expansions + .insert(macro_call, self.expander.current_file_id); + + let item_tree = self.db.item_tree(self.expander.current_file_id); + self.item_trees.insert(self.expander.current_file_id, item_tree); + let id = self.collect_expr(expansion); + self.expander.exit(self.db, mark); + id + } + None => self.alloc_expr(Expr::Missing, syntax_ptr), + } + } + } + } + } + + fn find_inner_item(&self, ast: &N::Source) -> Option> { + let id = self.expander.ast_id(ast); + let tree = &self.item_trees[&id.file_id]; + + // FIXME: This probably breaks with `use` items, since they produce multiple item tree nodes + + // Root file (non-macro). + let item_tree_id = tree + .all_inner_items() + .chain(tree.top_level_items().iter().copied()) + .filter_map(|mod_item| mod_item.downcast::()) + .find(|tree_id| tree[*tree_id].ast_id().upcast() == id.value.upcast()) + .or_else(|| { + log::debug!( + "couldn't find inner {} item for {:?} (AST: `{}` - {:?})", + type_name::(), + id, + ast.syntax(), + ast.syntax(), + ); + None + })?; + + Some(ItemTreeId::new(id.file_id, item_tree_id)) + } + + fn collect_expr_opt(&mut self, expr: Option) -> ExprId { + if let Some(expr) = expr { + self.collect_expr(expr) + } else { + self.missing_expr() + } + } + + fn collect_block(&mut self, block: ast::BlockExpr) -> ExprId { + let syntax_node_ptr = AstPtr::new(&block.clone().into()); + self.collect_block_items(&block); + let statements = block + .statements() + .filter_map(|s| { + let stmt = match s { + ast::Stmt::LetStmt(stmt) => { + let pat = self.collect_pat_opt(stmt.pat()); + let type_ref = stmt.ty().map(|it| TypeRef::from_ast(&self.ctx(), it)); + let initializer = stmt.initializer().map(|e| self.collect_expr(e)); + Statement::Let { pat, type_ref, initializer } + } + ast::Stmt::ExprStmt(stmt) => { + Statement::Expr(self.collect_expr_opt(stmt.expr())) + } + ast::Stmt::Item(_) => return None, + }; + Some(stmt) + }) + .collect(); + let tail = block.expr().map(|e| self.collect_expr(e)); + self.alloc_expr(Expr::Block { statements, tail, label: None }, syntax_node_ptr) + } + + fn collect_block_items(&mut self, block: &ast::BlockExpr) { + let container = ContainerId::DefWithBodyId(self.def); + + let items = block + .statements() + .filter_map(|stmt| match stmt { + ast::Stmt::Item(it) => Some(it), + ast::Stmt::LetStmt(_) | ast::Stmt::ExprStmt(_) => None, + }) + .filter_map(|item| { + let (def, name): (ModuleDefId, Option) = match item { + ast::Item::Fn(def) => { + let id = self.find_inner_item(&def)?; + ( + FunctionLoc { container: container.into(), id }.intern(self.db).into(), + def.name(), + ) + } + ast::Item::TypeAlias(def) => { + let id = self.find_inner_item(&def)?; + ( + TypeAliasLoc { container: container.into(), id }.intern(self.db).into(), + def.name(), + ) + } + ast::Item::Const(def) => { + let id = self.find_inner_item(&def)?; + ( + ConstLoc { container: container.into(), id }.intern(self.db).into(), + def.name(), + ) + } + ast::Item::Static(def) => { + let id = self.find_inner_item(&def)?; + (StaticLoc { container, id }.intern(self.db).into(), def.name()) + } + ast::Item::Struct(def) => { + let id = self.find_inner_item(&def)?; + (StructLoc { container, id }.intern(self.db).into(), def.name()) + } + ast::Item::Enum(def) => { + let id = self.find_inner_item(&def)?; + (EnumLoc { container, id }.intern(self.db).into(), def.name()) + } + ast::Item::Union(def) => { + let id = self.find_inner_item(&def)?; + (UnionLoc { container, id }.intern(self.db).into(), def.name()) + } + ast::Item::Trait(def) => { + let id = self.find_inner_item(&def)?; + (TraitLoc { container, id }.intern(self.db).into(), def.name()) + } + ast::Item::ExternBlock(_) => return None, // FIXME: collect from extern blocks + ast::Item::Impl(_) + | ast::Item::Use(_) + | ast::Item::ExternCrate(_) + | ast::Item::Module(_) + | ast::Item::MacroCall(_) => return None, + }; + + Some((def, name)) + }) + .collect::>(); + + for (def, name) in items { + self.body.item_scope.define_def(def); + if let Some(name) = name { + let vis = crate::visibility::Visibility::Public; // FIXME determine correctly + let has_constructor = match def { + ModuleDefId::AdtId(AdtId::StructId(s)) => { + self.db.struct_data(s).variant_data.kind() != StructKind::Record + } + _ => true, + }; + self.body.item_scope.push_res( + name.as_name(), + crate::per_ns::PerNs::from_def(def, vis, has_constructor), + ); + } + } + } + + fn collect_block_opt(&mut self, expr: Option) -> ExprId { + if let Some(block) = expr { + self.collect_block(block) + } else { + self.missing_expr() + } + } + + fn collect_pat(&mut self, pat: ast::Pat) -> PatId { + let pattern = match &pat { + ast::Pat::IdentPat(bp) => { + let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); + let annotation = + BindingAnnotation::new(bp.mut_token().is_some(), bp.ref_token().is_some()); + let subpat = bp.pat().map(|subpat| self.collect_pat(subpat)); + if annotation == BindingAnnotation::Unannotated && subpat.is_none() { + // This could also be a single-segment path pattern. To + // decide that, we need to try resolving the name. + let (resolved, _) = self.expander.crate_def_map.resolve_path( + self.db, + self.expander.module.local_id, + &name.clone().into(), + BuiltinShadowMode::Other, + ); + match resolved.take_values() { + Some(ModuleDefId::ConstId(_)) => Pat::Path(name.into()), + Some(ModuleDefId::EnumVariantId(_)) => { + // this is only really valid for unit variants, but + // shadowing other enum variants with a pattern is + // an error anyway + Pat::Path(name.into()) + } + Some(ModuleDefId::AdtId(AdtId::StructId(s))) + if self.db.struct_data(s).variant_data.kind() != StructKind::Record => + { + // Funnily enough, record structs *can* be shadowed + // by pattern bindings (but unit or tuple structs + // can't). + Pat::Path(name.into()) + } + // shadowing statics is an error as well, so we just ignore that case here + _ => Pat::Bind { name, mode: annotation, subpat }, + } + } else { + Pat::Bind { name, mode: annotation, subpat } + } + } + ast::Pat::TupleStructPat(p) => { + let path = p.path().and_then(|path| self.expander.parse_path(path)); + let (args, ellipsis) = self.collect_tuple_pat(p.fields()); + Pat::TupleStruct { path, args, ellipsis } + } + ast::Pat::RefPat(p) => { + let pat = self.collect_pat_opt(p.pat()); + let mutability = Mutability::from_mutable(p.mut_token().is_some()); + Pat::Ref { pat, mutability } + } + ast::Pat::PathPat(p) => { + let path = p.path().and_then(|path| self.expander.parse_path(path)); + path.map(Pat::Path).unwrap_or(Pat::Missing) + } + ast::Pat::OrPat(p) => { + let pats = p.pats().map(|p| self.collect_pat(p)).collect(); + Pat::Or(pats) + } + ast::Pat::ParenPat(p) => return self.collect_pat_opt(p.pat()), + ast::Pat::TuplePat(p) => { + let (args, ellipsis) = self.collect_tuple_pat(p.fields()); + Pat::Tuple { args, ellipsis } + } + ast::Pat::WildcardPat(_) => Pat::Wild, + ast::Pat::RecordPat(p) => { + let path = p.path().and_then(|path| self.expander.parse_path(path)); + let args: Vec<_> = p + .record_pat_field_list() + .expect("every struct should have a field list") + .fields() + .filter_map(|f| { + let ast_pat = f.pat()?; + let pat = self.collect_pat(ast_pat); + let name = f.field_name()?.as_name(); + Some(RecordFieldPat { name, pat }) + }) + .collect(); + + let ellipsis = p + .record_pat_field_list() + .expect("every struct should have a field list") + .dotdot_token() + .is_some(); + + Pat::Record { path, args, ellipsis } + } + ast::Pat::SlicePat(p) => { + let SlicePatComponents { prefix, slice, suffix } = p.components(); + + // FIXME properly handle `RestPat` + Pat::Slice { + prefix: prefix.into_iter().map(|p| self.collect_pat(p)).collect(), + slice: slice.map(|p| self.collect_pat(p)), + suffix: suffix.into_iter().map(|p| self.collect_pat(p)).collect(), + } + } + ast::Pat::LiteralPat(lit) => { + if let Some(ast_lit) = lit.literal() { + let expr = Expr::Literal(ast_lit.kind().into()); + let expr_ptr = AstPtr::new(&ast::Expr::Literal(ast_lit)); + let expr_id = self.alloc_expr(expr, expr_ptr); + Pat::Lit(expr_id) + } else { + Pat::Missing + } + } + ast::Pat::RestPat(_) => { + // `RestPat` requires special handling and should not be mapped + // to a Pat. Here we are using `Pat::Missing` as a fallback for + // when `RestPat` is mapped to `Pat`, which can easily happen + // when the source code being analyzed has a malformed pattern + // which includes `..` in a place where it isn't valid. + + Pat::Missing + } + // FIXME: implement + ast::Pat::BoxPat(_) | ast::Pat::RangePat(_) | ast::Pat::MacroPat(_) => Pat::Missing, + }; + let ptr = AstPtr::new(&pat); + self.alloc_pat(pattern, Either::Left(ptr)) + } + + fn collect_pat_opt(&mut self, pat: Option) -> PatId { + if let Some(pat) = pat { + self.collect_pat(pat) + } else { + self.missing_pat() + } + } + + fn collect_tuple_pat(&mut self, args: AstChildren) -> (Vec, Option) { + // Find the location of the `..`, if there is one. Note that we do not + // consider the possiblity of there being multiple `..` here. + let ellipsis = args.clone().position(|p| matches!(p, ast::Pat::RestPat(_))); + // We want to skip the `..` pattern here, since we account for it above. + let args = args + .filter(|p| !matches!(p, ast::Pat::RestPat(_))) + .map(|p| self.collect_pat(p)) + .collect(); + + (args, ellipsis) + } +} + +impl From for BinaryOp { + fn from(ast_op: ast::BinOp) -> Self { + match ast_op { + ast::BinOp::BooleanOr => BinaryOp::LogicOp(LogicOp::Or), + ast::BinOp::BooleanAnd => BinaryOp::LogicOp(LogicOp::And), + ast::BinOp::EqualityTest => BinaryOp::CmpOp(CmpOp::Eq { negated: false }), + ast::BinOp::NegatedEqualityTest => BinaryOp::CmpOp(CmpOp::Eq { negated: true }), + ast::BinOp::LesserEqualTest => { + BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Less, strict: false }) + } + ast::BinOp::GreaterEqualTest => { + BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Greater, strict: false }) + } + ast::BinOp::LesserTest => { + BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Less, strict: true }) + } + ast::BinOp::GreaterTest => { + BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Greater, strict: true }) + } + ast::BinOp::Addition => BinaryOp::ArithOp(ArithOp::Add), + ast::BinOp::Multiplication => BinaryOp::ArithOp(ArithOp::Mul), + ast::BinOp::Subtraction => BinaryOp::ArithOp(ArithOp::Sub), + ast::BinOp::Division => BinaryOp::ArithOp(ArithOp::Div), + ast::BinOp::Remainder => BinaryOp::ArithOp(ArithOp::Rem), + ast::BinOp::LeftShift => BinaryOp::ArithOp(ArithOp::Shl), + ast::BinOp::RightShift => BinaryOp::ArithOp(ArithOp::Shr), + ast::BinOp::BitwiseXor => BinaryOp::ArithOp(ArithOp::BitXor), + ast::BinOp::BitwiseOr => BinaryOp::ArithOp(ArithOp::BitOr), + ast::BinOp::BitwiseAnd => BinaryOp::ArithOp(ArithOp::BitAnd), + ast::BinOp::Assignment => BinaryOp::Assignment { op: None }, + ast::BinOp::AddAssign => BinaryOp::Assignment { op: Some(ArithOp::Add) }, + ast::BinOp::DivAssign => BinaryOp::Assignment { op: Some(ArithOp::Div) }, + ast::BinOp::MulAssign => BinaryOp::Assignment { op: Some(ArithOp::Mul) }, + ast::BinOp::RemAssign => BinaryOp::Assignment { op: Some(ArithOp::Rem) }, + ast::BinOp::ShlAssign => BinaryOp::Assignment { op: Some(ArithOp::Shl) }, + ast::BinOp::ShrAssign => BinaryOp::Assignment { op: Some(ArithOp::Shr) }, + ast::BinOp::SubAssign => BinaryOp::Assignment { op: Some(ArithOp::Sub) }, + ast::BinOp::BitOrAssign => BinaryOp::Assignment { op: Some(ArithOp::BitOr) }, + ast::BinOp::BitAndAssign => BinaryOp::Assignment { op: Some(ArithOp::BitAnd) }, + ast::BinOp::BitXorAssign => BinaryOp::Assignment { op: Some(ArithOp::BitXor) }, + } + } +} + +impl From for Literal { + fn from(ast_lit_kind: ast::LiteralKind) -> Self { + match ast_lit_kind { + LiteralKind::IntNumber { suffix } => { + let known_name = suffix.and_then(|it| BuiltinInt::from_suffix(&it)); + + Literal::Int(Default::default(), known_name) + } + LiteralKind::FloatNumber { suffix } => { + let known_name = suffix.and_then(|it| BuiltinFloat::from_suffix(&it)); + + Literal::Float(Default::default(), known_name) + } + LiteralKind::ByteString => Literal::ByteString(Default::default()), + LiteralKind::String => Literal::String(Default::default()), + LiteralKind::Byte => Literal::Int(Default::default(), Some(BuiltinInt::U8)), + LiteralKind::Bool(val) => Literal::Bool(val), + LiteralKind::Char => Literal::Char(Default::default()), + } + } +} diff --git a/crates/hir_def/src/body/scope.rs b/crates/hir_def/src/body/scope.rs new file mode 100644 index 0000000000..9142bc05b8 --- /dev/null +++ b/crates/hir_def/src/body/scope.rs @@ -0,0 +1,456 @@ +//! Name resolution for expressions. +use std::sync::Arc; + +use arena::{Arena, Idx}; +use hir_expand::name::Name; +use rustc_hash::FxHashMap; + +use crate::{ + body::Body, + db::DefDatabase, + expr::{Expr, ExprId, Pat, PatId, Statement}, + DefWithBodyId, +}; + +pub type ScopeId = Idx; + +#[derive(Debug, PartialEq, Eq)] +pub struct ExprScopes { + scopes: Arena, + scope_by_expr: FxHashMap, +} + +#[derive(Debug, PartialEq, Eq)] +pub struct ScopeEntry { + name: Name, + pat: PatId, +} + +impl ScopeEntry { + pub fn name(&self) -> &Name { + &self.name + } + + pub fn pat(&self) -> PatId { + self.pat + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct ScopeData { + parent: Option, + entries: Vec, +} + +impl ExprScopes { + pub(crate) fn expr_scopes_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc { + let body = db.body(def); + Arc::new(ExprScopes::new(&*body)) + } + + fn new(body: &Body) -> ExprScopes { + let mut scopes = + ExprScopes { scopes: Arena::default(), scope_by_expr: FxHashMap::default() }; + let root = scopes.root_scope(); + scopes.add_params_bindings(body, root, &body.params); + compute_expr_scopes(body.body_expr, body, &mut scopes, root); + scopes + } + + pub fn entries(&self, scope: ScopeId) -> &[ScopeEntry] { + &self.scopes[scope].entries + } + + pub fn scope_chain(&self, scope: Option) -> impl Iterator + '_ { + std::iter::successors(scope, move |&scope| self.scopes[scope].parent) + } + + pub fn resolve_name_in_scope(&self, scope: ScopeId, name: &Name) -> Option<&ScopeEntry> { + self.scope_chain(Some(scope)) + .find_map(|scope| self.entries(scope).iter().find(|it| it.name == *name)) + } + + pub fn scope_for(&self, expr: ExprId) -> Option { + self.scope_by_expr.get(&expr).copied() + } + + pub fn scope_by_expr(&self) -> &FxHashMap { + &self.scope_by_expr + } + + fn root_scope(&mut self) -> ScopeId { + self.scopes.alloc(ScopeData { parent: None, entries: vec![] }) + } + + fn new_scope(&mut self, parent: ScopeId) -> ScopeId { + self.scopes.alloc(ScopeData { parent: Some(parent), entries: vec![] }) + } + + fn add_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) { + let pattern = &body[pat]; + if let Pat::Bind { name, .. } = pattern { + let entry = ScopeEntry { name: name.clone(), pat }; + self.scopes[scope].entries.push(entry); + } + + pattern.walk_child_pats(|pat| self.add_bindings(body, scope, pat)); + } + + fn add_params_bindings(&mut self, body: &Body, scope: ScopeId, params: &[PatId]) { + params.iter().for_each(|pat| self.add_bindings(body, scope, *pat)); + } + + fn set_scope(&mut self, node: ExprId, scope: ScopeId) { + self.scope_by_expr.insert(node, scope); + } +} + +fn compute_block_scopes( + statements: &[Statement], + tail: Option, + body: &Body, + scopes: &mut ExprScopes, + mut scope: ScopeId, +) { + for stmt in statements { + match stmt { + Statement::Let { pat, initializer, .. } => { + if let Some(expr) = initializer { + scopes.set_scope(*expr, scope); + compute_expr_scopes(*expr, body, scopes, scope); + } + scope = scopes.new_scope(scope); + scopes.add_bindings(body, scope, *pat); + } + Statement::Expr(expr) => { + scopes.set_scope(*expr, scope); + compute_expr_scopes(*expr, body, scopes, scope); + } + } + } + if let Some(expr) = tail { + compute_expr_scopes(expr, body, scopes, scope); + } +} + +fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope: ScopeId) { + scopes.set_scope(expr, scope); + match &body[expr] { + Expr::Block { statements, tail, .. } => { + compute_block_scopes(&statements, *tail, body, scopes, scope); + } + Expr::For { iterable, pat, body: body_expr, .. } => { + compute_expr_scopes(*iterable, body, scopes, scope); + let scope = scopes.new_scope(scope); + scopes.add_bindings(body, scope, *pat); + compute_expr_scopes(*body_expr, body, scopes, scope); + } + Expr::Lambda { args, body: body_expr, .. } => { + let scope = scopes.new_scope(scope); + scopes.add_params_bindings(body, scope, &args); + compute_expr_scopes(*body_expr, body, scopes, scope); + } + Expr::Match { expr, arms } => { + compute_expr_scopes(*expr, body, scopes, scope); + for arm in arms { + let scope = scopes.new_scope(scope); + scopes.add_bindings(body, scope, arm.pat); + if let Some(guard) = arm.guard { + scopes.set_scope(guard, scope); + compute_expr_scopes(guard, body, scopes, scope); + } + scopes.set_scope(arm.expr, scope); + compute_expr_scopes(arm.expr, body, scopes, scope); + } + } + e => e.walk_child_exprs(|e| compute_expr_scopes(e, body, scopes, scope)), + }; +} + +#[cfg(test)] +mod tests { + use base_db::{fixture::WithFixture, FileId, SourceDatabase}; + use hir_expand::{name::AsName, InFile}; + use syntax::{algo::find_node_at_offset, ast, AstNode}; + use test_utils::{assert_eq_text, extract_offset, mark}; + + use crate::{db::DefDatabase, test_db::TestDB, FunctionId, ModuleDefId}; + + fn find_function(db: &TestDB, file_id: FileId) -> FunctionId { + let krate = db.test_crate(); + let crate_def_map = db.crate_def_map(krate); + + let module = crate_def_map.modules_for_file(file_id).next().unwrap(); + let (_, def) = crate_def_map[module].scope.entries().next().unwrap(); + match def.take_values().unwrap() { + ModuleDefId::FunctionId(it) => it, + _ => panic!(), + } + } + + fn do_check(ra_fixture: &str, expected: &[&str]) { + let (offset, code) = extract_offset(ra_fixture); + let code = { + let mut buf = String::new(); + let off: usize = offset.into(); + buf.push_str(&code[..off]); + buf.push_str("<|>marker"); + buf.push_str(&code[off..]); + buf + }; + + let (db, position) = TestDB::with_position(&code); + let file_id = position.file_id; + let offset = position.offset; + + let file_syntax = db.parse(file_id).syntax_node(); + let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap(); + let function = find_function(&db, file_id); + + let scopes = db.expr_scopes(function.into()); + let (_body, source_map) = db.body_with_source_map(function.into()); + + let expr_id = source_map + .node_expr(InFile { file_id: file_id.into(), value: &marker.into() }) + .unwrap(); + let scope = scopes.scope_for(expr_id); + + let actual = scopes + .scope_chain(scope) + .flat_map(|scope| scopes.entries(scope)) + .map(|it| it.name().to_string()) + .collect::>() + .join("\n"); + let expected = expected.join("\n"); + assert_eq_text!(&expected, &actual); + } + + #[test] + fn test_lambda_scope() { + do_check( + r" + fn quux(foo: i32) { + let f = |bar, baz: i32| { + <|> + }; + }", + &["bar", "baz", "foo"], + ); + } + + #[test] + fn test_call_scope() { + do_check( + r" + fn quux() { + f(|x| <|> ); + }", + &["x"], + ); + } + + #[test] + fn test_method_call_scope() { + do_check( + r" + fn quux() { + z.f(|x| <|> ); + }", + &["x"], + ); + } + + #[test] + fn test_loop_scope() { + do_check( + r" + fn quux() { + loop { + let x = (); + <|> + }; + }", + &["x"], + ); + } + + #[test] + fn test_match() { + do_check( + r" + fn quux() { + match () { + Some(x) => { + <|> + } + }; + }", + &["x"], + ); + } + + #[test] + fn test_shadow_variable() { + do_check( + r" + fn foo(x: String) { + let x : &str = &x<|>; + }", + &["x"], + ); + } + + #[test] + fn test_bindings_after_at() { + do_check( + r" +fn foo() { + match Some(()) { + opt @ Some(unit) => { + <|> + } + _ => {} + } +} +", + &["opt", "unit"], + ); + } + + #[test] + fn macro_inner_item() { + do_check( + r" + macro_rules! mac { + () => {{ + fn inner() {} + inner(); + }}; + } + + fn foo() { + mac!(); + <|> + } + ", + &[], + ); + } + + #[test] + fn broken_inner_item() { + do_check( + r" + fn foo() { + trait {} + <|> + } + ", + &[], + ); + } + + fn do_check_local_name(ra_fixture: &str, expected_offset: u32) { + let (db, position) = TestDB::with_position(ra_fixture); + let file_id = position.file_id; + let offset = position.offset; + + let file = db.parse(file_id).ok().unwrap(); + let expected_name = find_node_at_offset::(file.syntax(), expected_offset.into()) + .expect("failed to find a name at the target offset"); + let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap(); + + let function = find_function(&db, file_id); + + let scopes = db.expr_scopes(function.into()); + let (_body, source_map) = db.body_with_source_map(function.into()); + + let expr_scope = { + let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap(); + let expr_id = + source_map.node_expr(InFile { file_id: file_id.into(), value: &expr_ast }).unwrap(); + scopes.scope_for(expr_id).unwrap() + }; + + let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap(); + let pat_src = source_map.pat_syntax(resolved.pat()).unwrap(); + + let local_name = pat_src.value.either( + |it| it.syntax_node_ptr().to_node(file.syntax()), + |it| it.syntax_node_ptr().to_node(file.syntax()), + ); + assert_eq!(local_name.text_range(), expected_name.syntax().text_range()); + } + + #[test] + fn test_resolve_local_name() { + do_check_local_name( + r#" +fn foo(x: i32, y: u32) { + { + let z = x * 2; + } + { + let t = x<|> * 3; + } +} +"#, + 7, + ); + } + + #[test] + fn test_resolve_local_name_declaration() { + do_check_local_name( + r#" +fn foo(x: String) { + let x : &str = &x<|>; +} +"#, + 7, + ); + } + + #[test] + fn test_resolve_local_name_shadow() { + do_check_local_name( + r" +fn foo(x: String) { + let x : &str = &x; + x<|> +} +", + 28, + ); + } + + #[test] + fn ref_patterns_contribute_bindings() { + do_check_local_name( + r" +fn foo() { + if let Some(&from) = bar() { + from<|>; + } +} +", + 28, + ); + } + + #[test] + fn while_let_desugaring() { + mark::check!(infer_resolve_while_let); + do_check_local_name( + r#" +fn test() { + let foo: Option = None; + while let Option::Some(spam) = foo { + spam<|> + } +} +"#, + 75, + ); + } +} diff --git a/crates/ra_hir_def/src/builtin_type.rs b/crates/hir_def/src/builtin_type.rs similarity index 100% rename from crates/ra_hir_def/src/builtin_type.rs rename to crates/hir_def/src/builtin_type.rs diff --git a/crates/ra_hir_def/src/child_by_source.rs b/crates/hir_def/src/child_by_source.rs similarity index 100% rename from crates/ra_hir_def/src/child_by_source.rs rename to crates/hir_def/src/child_by_source.rs diff --git a/crates/hir_def/src/data.rs b/crates/hir_def/src/data.rs new file mode 100644 index 0000000000..9a8eb4edec --- /dev/null +++ b/crates/hir_def/src/data.rs @@ -0,0 +1,278 @@ +//! Contains basic data about various HIR declarations. + +use std::sync::Arc; + +use hir_expand::{name::Name, InFile}; +use syntax::ast; + +use crate::{ + attr::Attrs, + body::Expander, + db::DefDatabase, + item_tree::{AssocItem, ItemTreeId, ModItem}, + type_ref::{TypeBound, TypeRef}, + visibility::RawVisibility, + AssocContainerId, AssocItemId, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId, + Intern, Lookup, ModuleId, StaticId, TraitId, TypeAliasId, TypeAliasLoc, +}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct FunctionData { + pub name: Name, + pub params: Vec, + pub ret_type: TypeRef, + pub attrs: Attrs, + /// True if the first param is `self`. This is relevant to decide whether this + /// can be called as a method. + pub has_self_param: bool, + pub is_unsafe: bool, + pub is_varargs: bool, + pub visibility: RawVisibility, +} + +impl FunctionData { + pub(crate) fn fn_data_query(db: &dyn DefDatabase, func: FunctionId) -> Arc { + let loc = func.lookup(db); + let item_tree = db.item_tree(loc.id.file_id); + let func = &item_tree[loc.id.value]; + + Arc::new(FunctionData { + name: func.name.clone(), + params: func.params.to_vec(), + ret_type: func.ret_type.clone(), + attrs: item_tree.attrs(ModItem::from(loc.id.value).into()).clone(), + has_self_param: func.has_self_param, + is_unsafe: func.is_unsafe, + is_varargs: func.is_varargs, + visibility: item_tree[func.visibility].clone(), + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TypeAliasData { + pub name: Name, + pub type_ref: Option, + pub visibility: RawVisibility, + /// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl). + pub bounds: Vec, +} + +impl TypeAliasData { + pub(crate) fn type_alias_data_query( + db: &dyn DefDatabase, + typ: TypeAliasId, + ) -> Arc { + let loc = typ.lookup(db); + let item_tree = db.item_tree(loc.id.file_id); + let typ = &item_tree[loc.id.value]; + + Arc::new(TypeAliasData { + name: typ.name.clone(), + type_ref: typ.type_ref.clone(), + visibility: item_tree[typ.visibility].clone(), + bounds: typ.bounds.to_vec(), + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TraitData { + pub name: Name, + pub items: Vec<(Name, AssocItemId)>, + pub auto: bool, +} + +impl TraitData { + pub(crate) fn trait_data_query(db: &dyn DefDatabase, tr: TraitId) -> Arc { + let tr_loc = tr.lookup(db); + let item_tree = db.item_tree(tr_loc.id.file_id); + let tr_def = &item_tree[tr_loc.id.value]; + let name = tr_def.name.clone(); + let auto = tr_def.auto; + let module_id = tr_loc.container.module(db); + let container = AssocContainerId::TraitId(tr); + let mut expander = Expander::new(db, tr_loc.id.file_id, module_id); + + let items = collect_items( + db, + module_id, + &mut expander, + tr_def.items.iter().copied(), + tr_loc.id.file_id, + container, + 100, + ); + + Arc::new(TraitData { name, items, auto }) + } + + pub fn associated_types(&self) -> impl Iterator + '_ { + self.items.iter().filter_map(|(_name, item)| match item { + AssocItemId::TypeAliasId(t) => Some(*t), + _ => None, + }) + } + + pub fn associated_type_by_name(&self, name: &Name) -> Option { + self.items.iter().find_map(|(item_name, item)| match item { + AssocItemId::TypeAliasId(t) if item_name == name => Some(*t), + _ => None, + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ImplData { + pub target_trait: Option, + pub target_type: TypeRef, + pub items: Vec, + pub is_negative: bool, +} + +impl ImplData { + pub(crate) fn impl_data_query(db: &dyn DefDatabase, id: ImplId) -> Arc { + let _p = profile::span("impl_data_query"); + let impl_loc = id.lookup(db); + + let item_tree = db.item_tree(impl_loc.id.file_id); + let impl_def = &item_tree[impl_loc.id.value]; + let target_trait = impl_def.target_trait.clone(); + let target_type = impl_def.target_type.clone(); + let is_negative = impl_def.is_negative; + let module_id = impl_loc.container.module(db); + let container = AssocContainerId::ImplId(id); + let mut expander = Expander::new(db, impl_loc.id.file_id, module_id); + + let items = collect_items( + db, + module_id, + &mut expander, + impl_def.items.iter().copied(), + impl_loc.id.file_id, + container, + 100, + ); + let items = items.into_iter().map(|(_, item)| item).collect(); + + Arc::new(ImplData { target_trait, target_type, items, is_negative }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ConstData { + /// const _: () = (); + pub name: Option, + pub type_ref: TypeRef, + pub visibility: RawVisibility, +} + +impl ConstData { + pub(crate) fn const_data_query(db: &dyn DefDatabase, konst: ConstId) -> Arc { + let loc = konst.lookup(db); + let item_tree = db.item_tree(loc.id.file_id); + let konst = &item_tree[loc.id.value]; + + Arc::new(ConstData { + name: konst.name.clone(), + type_ref: konst.type_ref.clone(), + visibility: item_tree[konst.visibility].clone(), + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct StaticData { + pub name: Option, + pub type_ref: TypeRef, + pub visibility: RawVisibility, + pub mutable: bool, +} + +impl StaticData { + pub(crate) fn static_data_query(db: &dyn DefDatabase, konst: StaticId) -> Arc { + let node = konst.lookup(db); + let item_tree = db.item_tree(node.id.file_id); + let statik = &item_tree[node.id.value]; + + Arc::new(StaticData { + name: Some(statik.name.clone()), + type_ref: statik.type_ref.clone(), + visibility: item_tree[statik.visibility].clone(), + mutable: statik.mutable, + }) + } +} + +fn collect_items( + db: &dyn DefDatabase, + module: ModuleId, + expander: &mut Expander, + assoc_items: impl Iterator, + file_id: crate::HirFileId, + container: AssocContainerId, + limit: usize, +) -> Vec<(Name, AssocItemId)> { + if limit == 0 { + return Vec::new(); + } + + let item_tree = db.item_tree(file_id); + let cfg_options = db.crate_graph()[module.krate].cfg_options.clone(); + + let mut items = Vec::new(); + for item in assoc_items { + match item { + AssocItem::Function(id) => { + let item = &item_tree[id]; + let attrs = item_tree.attrs(ModItem::from(id).into()); + if !attrs.is_cfg_enabled(&cfg_options) { + continue; + } + let def = FunctionLoc { container, id: ItemTreeId::new(file_id, id) }.intern(db); + items.push((item.name.clone(), def.into())); + } + // FIXME: cfg? + AssocItem::Const(id) => { + let item = &item_tree[id]; + let name = match item.name.clone() { + Some(name) => name, + None => continue, + }; + let def = ConstLoc { container, id: ItemTreeId::new(file_id, id) }.intern(db); + items.push((name, def.into())); + } + AssocItem::TypeAlias(id) => { + let item = &item_tree[id]; + let def = TypeAliasLoc { container, id: ItemTreeId::new(file_id, id) }.intern(db); + items.push((item.name.clone(), def.into())); + } + AssocItem::MacroCall(call) => { + let call = &item_tree[call]; + let ast_id_map = db.ast_id_map(file_id); + let root = db.parse_or_expand(file_id).unwrap(); + let call = ast_id_map.get(call.ast_id).to_node(&root); + + if let Some((mark, mac)) = expander.enter_expand(db, None, call) { + let src: InFile = expander.to_source(mac); + let item_tree = db.item_tree(src.file_id); + let iter = + item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item); + items.extend(collect_items( + db, + module, + expander, + iter, + src.file_id, + container, + limit - 1, + )); + + expander.exit(db, mark); + } + } + } + } + + items +} diff --git a/crates/hir_def/src/db.rs b/crates/hir_def/src/db.rs new file mode 100644 index 0000000000..6d694de115 --- /dev/null +++ b/crates/hir_def/src/db.rs @@ -0,0 +1,120 @@ +//! Defines database & queries for name resolution. +use std::sync::Arc; + +use base_db::{salsa, CrateId, SourceDatabase, Upcast}; +use hir_expand::{db::AstDatabase, HirFileId}; +use syntax::SmolStr; + +use crate::{ + adt::{EnumData, StructData}, + attr::Attrs, + body::{scope::ExprScopes, Body, BodySourceMap}, + data::{ConstData, FunctionData, ImplData, StaticData, TraitData, TypeAliasData}, + docs::Documentation, + generics::GenericParams, + import_map::ImportMap, + item_tree::ItemTree, + lang_item::{LangItemTarget, LangItems}, + nameres::CrateDefMap, + AttrDefId, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, FunctionId, FunctionLoc, + GenericDefId, ImplId, ImplLoc, ModuleId, StaticId, StaticLoc, StructId, StructLoc, TraitId, + TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, +}; + +#[salsa::query_group(InternDatabaseStorage)] +pub trait InternDatabase: SourceDatabase { + #[salsa::interned] + fn intern_function(&self, loc: FunctionLoc) -> FunctionId; + #[salsa::interned] + fn intern_struct(&self, loc: StructLoc) -> StructId; + #[salsa::interned] + fn intern_union(&self, loc: UnionLoc) -> UnionId; + #[salsa::interned] + fn intern_enum(&self, loc: EnumLoc) -> EnumId; + #[salsa::interned] + fn intern_const(&self, loc: ConstLoc) -> ConstId; + #[salsa::interned] + fn intern_static(&self, loc: StaticLoc) -> StaticId; + #[salsa::interned] + fn intern_trait(&self, loc: TraitLoc) -> TraitId; + #[salsa::interned] + fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId; + #[salsa::interned] + fn intern_impl(&self, loc: ImplLoc) -> ImplId; +} + +#[salsa::query_group(DefDatabaseStorage)] +pub trait DefDatabase: InternDatabase + AstDatabase + Upcast { + #[salsa::invoke(ItemTree::item_tree_query)] + fn item_tree(&self, file_id: HirFileId) -> Arc; + + #[salsa::invoke(crate_def_map_wait)] + #[salsa::transparent] + fn crate_def_map(&self, krate: CrateId) -> Arc; + + #[salsa::invoke(CrateDefMap::crate_def_map_query)] + fn crate_def_map_query(&self, krate: CrateId) -> Arc; + + #[salsa::invoke(StructData::struct_data_query)] + fn struct_data(&self, id: StructId) -> Arc; + #[salsa::invoke(StructData::union_data_query)] + fn union_data(&self, id: UnionId) -> Arc; + + #[salsa::invoke(EnumData::enum_data_query)] + fn enum_data(&self, e: EnumId) -> Arc; + + #[salsa::invoke(ImplData::impl_data_query)] + fn impl_data(&self, e: ImplId) -> Arc; + + #[salsa::invoke(TraitData::trait_data_query)] + fn trait_data(&self, e: TraitId) -> Arc; + + #[salsa::invoke(TypeAliasData::type_alias_data_query)] + fn type_alias_data(&self, e: TypeAliasId) -> Arc; + + #[salsa::invoke(FunctionData::fn_data_query)] + fn function_data(&self, func: FunctionId) -> Arc; + + #[salsa::invoke(ConstData::const_data_query)] + fn const_data(&self, konst: ConstId) -> Arc; + + #[salsa::invoke(StaticData::static_data_query)] + fn static_data(&self, konst: StaticId) -> Arc; + + #[salsa::invoke(Body::body_with_source_map_query)] + fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc, Arc); + + #[salsa::invoke(Body::body_query)] + fn body(&self, def: DefWithBodyId) -> Arc; + + #[salsa::invoke(ExprScopes::expr_scopes_query)] + fn expr_scopes(&self, def: DefWithBodyId) -> Arc; + + #[salsa::invoke(GenericParams::generic_params_query)] + fn generic_params(&self, def: GenericDefId) -> Arc; + + #[salsa::invoke(Attrs::attrs_query)] + fn attrs(&self, def: AttrDefId) -> Attrs; + + #[salsa::invoke(LangItems::module_lang_items_query)] + fn module_lang_items(&self, module: ModuleId) -> Option>; + + #[salsa::invoke(LangItems::crate_lang_items_query)] + fn crate_lang_items(&self, krate: CrateId) -> Arc; + + #[salsa::invoke(LangItems::lang_item_query)] + fn lang_item(&self, start_crate: CrateId, item: SmolStr) -> Option; + + // FIXME(https://github.com/rust-analyzer/rust-analyzer/issues/2148#issuecomment-550519102) + // Remove this query completely, in favor of `Attrs::docs` method + #[salsa::invoke(Documentation::documentation_query)] + fn documentation(&self, def: AttrDefId) -> Option; + + #[salsa::invoke(ImportMap::import_map_query)] + fn import_map(&self, krate: CrateId) -> Arc; +} + +fn crate_def_map_wait(db: &impl DefDatabase, krate: CrateId) -> Arc { + let _p = profile::span("crate_def_map:wait"); + db.crate_def_map_query(krate) +} diff --git a/crates/hir_def/src/diagnostics.rs b/crates/hir_def/src/diagnostics.rs new file mode 100644 index 0000000000..2e38a978f8 --- /dev/null +++ b/crates/hir_def/src/diagnostics.rs @@ -0,0 +1,27 @@ +//! Diagnostics produced by `hir_def`. + +use std::any::Any; + +use hir_expand::diagnostics::Diagnostic; +use syntax::{ast, AstPtr, SyntaxNodePtr}; + +use hir_expand::{HirFileId, InFile}; + +#[derive(Debug)] +pub struct UnresolvedModule { + pub file: HirFileId, + pub decl: AstPtr, + pub candidate: String, +} + +impl Diagnostic for UnresolvedModule { + fn message(&self) -> String { + "unresolved module".to_string() + } + fn display_source(&self) -> InFile { + InFile::new(self.file, self.decl.clone().into()) + } + fn as_any(&self) -> &(dyn Any + Send + 'static) { + self + } +} diff --git a/crates/hir_def/src/docs.rs b/crates/hir_def/src/docs.rs new file mode 100644 index 0000000000..e9a02b11bb --- /dev/null +++ b/crates/hir_def/src/docs.rs @@ -0,0 +1,121 @@ +//! Defines hir documentation. +//! +//! This really shouldn't exist, instead, we should deshugar doc comments into attributes, see +//! https://github.com/rust-analyzer/rust-analyzer/issues/2148#issuecomment-550519102 + +use std::sync::Arc; + +use either::Either; +use syntax::ast; + +use crate::{ + db::DefDatabase, + src::{HasChildSource, HasSource}, + AdtId, AttrDefId, Lookup, +}; + +/// Holds documentation +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Documentation(Arc); + +impl Into for Documentation { + fn into(self) -> String { + self.as_str().to_owned() + } +} + +impl Documentation { + fn new(s: &str) -> Documentation { + Documentation(s.into()) + } + + pub fn from_ast(node: &N) -> Option + where + N: ast::DocCommentsOwner + ast::AttrsOwner, + { + docs_from_ast(node) + } + + pub fn as_str(&self) -> &str { + &*self.0 + } + + pub(crate) fn documentation_query( + db: &dyn DefDatabase, + def: AttrDefId, + ) -> Option { + match def { + AttrDefId::ModuleId(module) => { + let def_map = db.crate_def_map(module.krate); + let src = def_map[module.local_id].declaration_source(db)?; + docs_from_ast(&src.value) + } + AttrDefId::FieldId(it) => { + let src = it.parent.child_source(db); + match &src.value[it.local_id] { + Either::Left(_tuple) => None, + Either::Right(record) => docs_from_ast(record), + } + } + AttrDefId::AdtId(it) => match it { + AdtId::StructId(it) => docs_from_ast(&it.lookup(db).source(db).value), + AdtId::EnumId(it) => docs_from_ast(&it.lookup(db).source(db).value), + AdtId::UnionId(it) => docs_from_ast(&it.lookup(db).source(db).value), + }, + AttrDefId::EnumVariantId(it) => { + let src = it.parent.child_source(db); + docs_from_ast(&src.value[it.local_id]) + } + AttrDefId::TraitId(it) => docs_from_ast(&it.lookup(db).source(db).value), + AttrDefId::MacroDefId(it) => docs_from_ast(&it.ast_id?.to_node(db.upcast())), + AttrDefId::ConstId(it) => docs_from_ast(&it.lookup(db).source(db).value), + AttrDefId::StaticId(it) => docs_from_ast(&it.lookup(db).source(db).value), + AttrDefId::FunctionId(it) => docs_from_ast(&it.lookup(db).source(db).value), + AttrDefId::TypeAliasId(it) => docs_from_ast(&it.lookup(db).source(db).value), + AttrDefId::ImplId(_) => None, + } + } +} + +pub(crate) fn docs_from_ast(node: &N) -> Option +where + N: ast::DocCommentsOwner + ast::AttrsOwner, +{ + let doc_comment_text = node.doc_comment_text(); + let doc_attr_text = expand_doc_attrs(node); + let docs = merge_doc_comments_and_attrs(doc_comment_text, doc_attr_text); + docs.map(|it| Documentation::new(&it)) +} + +fn merge_doc_comments_and_attrs( + doc_comment_text: Option, + doc_attr_text: Option, +) -> Option { + match (doc_comment_text, doc_attr_text) { + (Some(mut comment_text), Some(attr_text)) => { + comment_text.push_str("\n\n"); + comment_text.push_str(&attr_text); + Some(comment_text) + } + (Some(comment_text), None) => Some(comment_text), + (None, Some(attr_text)) => Some(attr_text), + (None, None) => None, + } +} + +fn expand_doc_attrs(owner: &dyn ast::AttrsOwner) -> Option { + let mut docs = String::new(); + for attr in owner.attrs() { + if let Some(("doc", value)) = + attr.as_simple_key_value().as_ref().map(|(k, v)| (k.as_str(), v.as_str())) + { + docs.push_str(value); + docs.push_str("\n\n"); + } + } + if docs.is_empty() { + None + } else { + Some(docs.trim_end_matches("\n\n").to_owned()) + } +} diff --git a/crates/ra_hir_def/src/dyn_map.rs b/crates/hir_def/src/dyn_map.rs similarity index 100% rename from crates/ra_hir_def/src/dyn_map.rs rename to crates/hir_def/src/dyn_map.rs diff --git a/crates/hir_def/src/expr.rs b/crates/hir_def/src/expr.rs new file mode 100644 index 0000000000..c94b3a36f5 --- /dev/null +++ b/crates/hir_def/src/expr.rs @@ -0,0 +1,420 @@ +//! This module describes hir-level representation of expressions. +//! +//! This representaion is: +//! +//! 1. Identity-based. Each expression has an `id`, so we can distinguish +//! between different `1` in `1 + 1`. +//! 2. Independent of syntax. Though syntactic provenance information can be +//! attached separately via id-based side map. +//! 3. Unresolved. Paths are stored as sequences of names, and not as defs the +//! names refer to. +//! 4. Desugared. There's no `if let`. +//! +//! See also a neighboring `body` module. + +use arena::{Idx, RawId}; +use hir_expand::name::Name; +use syntax::ast::RangeOp; + +use crate::{ + builtin_type::{BuiltinFloat, BuiltinInt}, + path::{GenericArgs, Path}, + type_ref::{Mutability, Rawness, TypeRef}, +}; + +pub type ExprId = Idx; +pub(crate) fn dummy_expr_id() -> ExprId { + ExprId::from_raw(RawId::from(!0)) +} + +pub type PatId = Idx; + +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum Literal { + String(String), + ByteString(Vec), + Char(char), + Bool(bool), + Int(u64, Option), + Float(u64, Option), // FIXME: f64 is not Eq +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum Expr { + /// This is produced if the syntax tree does not have a required expression piece. + Missing, + Path(Path), + If { + condition: ExprId, + then_branch: ExprId, + else_branch: Option, + }, + Block { + statements: Vec, + tail: Option, + label: Option, + }, + Loop { + body: ExprId, + label: Option, + }, + While { + condition: ExprId, + body: ExprId, + label: Option, + }, + For { + iterable: ExprId, + pat: PatId, + body: ExprId, + label: Option, + }, + Call { + callee: ExprId, + args: Vec, + }, + MethodCall { + receiver: ExprId, + method_name: Name, + args: Vec, + generic_args: Option, + }, + Match { + expr: ExprId, + arms: Vec, + }, + Continue { + label: Option, + }, + Break { + expr: Option, + label: Option, + }, + Return { + expr: Option, + }, + RecordLit { + path: Option, + fields: Vec, + spread: Option, + }, + Field { + expr: ExprId, + name: Name, + }, + Await { + expr: ExprId, + }, + Try { + expr: ExprId, + }, + TryBlock { + body: ExprId, + }, + Cast { + expr: ExprId, + type_ref: TypeRef, + }, + Ref { + expr: ExprId, + rawness: Rawness, + mutability: Mutability, + }, + Box { + expr: ExprId, + }, + UnaryOp { + expr: ExprId, + op: UnaryOp, + }, + BinaryOp { + lhs: ExprId, + rhs: ExprId, + op: Option, + }, + Range { + lhs: Option, + rhs: Option, + range_type: RangeOp, + }, + Index { + base: ExprId, + index: ExprId, + }, + Lambda { + args: Vec, + arg_types: Vec>, + ret_type: Option, + body: ExprId, + }, + Tuple { + exprs: Vec, + }, + Unsafe { + body: ExprId, + }, + Array(Array), + Literal(Literal), +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum BinaryOp { + LogicOp(LogicOp), + ArithOp(ArithOp), + CmpOp(CmpOp), + Assignment { op: Option }, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum LogicOp { + And, + Or, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum CmpOp { + Eq { negated: bool }, + Ord { ordering: Ordering, strict: bool }, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum Ordering { + Less, + Greater, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum ArithOp { + Add, + Mul, + Sub, + Div, + Rem, + Shl, + Shr, + BitXor, + BitOr, + BitAnd, +} + +pub use syntax::ast::PrefixOp as UnaryOp; +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum Array { + ElementList(Vec), + Repeat { initializer: ExprId, repeat: ExprId }, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct MatchArm { + pub pat: PatId, + pub guard: Option, + pub expr: ExprId, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct RecordLitField { + pub name: Name, + pub expr: ExprId, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum Statement { + Let { pat: PatId, type_ref: Option, initializer: Option }, + Expr(ExprId), +} + +impl Expr { + pub fn walk_child_exprs(&self, mut f: impl FnMut(ExprId)) { + match self { + Expr::Missing => {} + Expr::Path(_) => {} + Expr::If { condition, then_branch, else_branch } => { + f(*condition); + f(*then_branch); + if let Some(else_branch) = else_branch { + f(*else_branch); + } + } + Expr::Block { statements, tail, .. } => { + for stmt in statements { + match stmt { + Statement::Let { initializer, .. } => { + if let Some(expr) = initializer { + f(*expr); + } + } + Statement::Expr(e) => f(*e), + } + } + if let Some(expr) = tail { + f(*expr); + } + } + Expr::TryBlock { body } | Expr::Unsafe { body } => f(*body), + Expr::Loop { body, .. } => f(*body), + Expr::While { condition, body, .. } => { + f(*condition); + f(*body); + } + Expr::For { iterable, body, .. } => { + f(*iterable); + f(*body); + } + Expr::Call { callee, args } => { + f(*callee); + for arg in args { + f(*arg); + } + } + Expr::MethodCall { receiver, args, .. } => { + f(*receiver); + for arg in args { + f(*arg); + } + } + Expr::Match { expr, arms } => { + f(*expr); + for arm in arms { + f(arm.expr); + } + } + Expr::Continue { .. } => {} + Expr::Break { expr, .. } | Expr::Return { expr } => { + if let Some(expr) = expr { + f(*expr); + } + } + Expr::RecordLit { fields, spread, .. } => { + for field in fields { + f(field.expr); + } + if let Some(expr) = spread { + f(*expr); + } + } + Expr::Lambda { body, .. } => { + f(*body); + } + Expr::BinaryOp { lhs, rhs, .. } => { + f(*lhs); + f(*rhs); + } + Expr::Range { lhs, rhs, .. } => { + if let Some(lhs) = rhs { + f(*lhs); + } + if let Some(rhs) = lhs { + f(*rhs); + } + } + Expr::Index { base, index } => { + f(*base); + f(*index); + } + Expr::Field { expr, .. } + | Expr::Await { expr } + | Expr::Try { expr } + | Expr::Cast { expr, .. } + | Expr::Ref { expr, .. } + | Expr::UnaryOp { expr, .. } + | Expr::Box { expr } => { + f(*expr); + } + Expr::Tuple { exprs } => { + for expr in exprs { + f(*expr); + } + } + Expr::Array(a) => match a { + Array::ElementList(exprs) => { + for expr in exprs { + f(*expr); + } + } + Array::Repeat { initializer, repeat } => { + f(*initializer); + f(*repeat) + } + }, + Expr::Literal(_) => {} + } + } +} + +/// Explicit binding annotations given in the HIR for a binding. Note +/// that this is not the final binding *mode* that we infer after type +/// inference. +#[derive(Clone, PartialEq, Eq, Debug, Copy)] +pub enum BindingAnnotation { + /// No binding annotation given: this means that the final binding mode + /// will depend on whether we have skipped through a `&` reference + /// when matching. For example, the `x` in `Some(x)` will have binding + /// mode `None`; if you do `let Some(x) = &Some(22)`, it will + /// ultimately be inferred to be by-reference. + Unannotated, + + /// Annotated with `mut x` -- could be either ref or not, similar to `None`. + Mutable, + + /// Annotated as `ref`, like `ref x` + Ref, + + /// Annotated as `ref mut x`. + RefMut, +} + +impl BindingAnnotation { + pub fn new(is_mutable: bool, is_ref: bool) -> Self { + match (is_mutable, is_ref) { + (true, true) => BindingAnnotation::RefMut, + (false, true) => BindingAnnotation::Ref, + (true, false) => BindingAnnotation::Mutable, + (false, false) => BindingAnnotation::Unannotated, + } + } +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct RecordFieldPat { + pub name: Name, + pub pat: PatId, +} + +/// Close relative to rustc's hir::PatKind +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum Pat { + Missing, + Wild, + Tuple { args: Vec, ellipsis: Option }, + Or(Vec), + Record { path: Option, args: Vec, ellipsis: bool }, + Range { start: ExprId, end: ExprId }, + Slice { prefix: Vec, slice: Option, suffix: Vec }, + Path(Path), + Lit(ExprId), + Bind { mode: BindingAnnotation, name: Name, subpat: Option }, + TupleStruct { path: Option, args: Vec, ellipsis: Option }, + Ref { pat: PatId, mutability: Mutability }, +} + +impl Pat { + pub fn walk_child_pats(&self, mut f: impl FnMut(PatId)) { + match self { + Pat::Range { .. } | Pat::Lit(..) | Pat::Path(..) | Pat::Wild | Pat::Missing => {} + Pat::Bind { subpat, .. } => { + subpat.iter().copied().for_each(f); + } + Pat::Or(args) | Pat::Tuple { args, .. } | Pat::TupleStruct { args, .. } => { + args.iter().copied().for_each(f); + } + Pat::Ref { pat, .. } => f(*pat), + Pat::Slice { prefix, slice, suffix } => { + let total_iter = prefix.iter().chain(slice.iter()).chain(suffix.iter()); + total_iter.copied().for_each(f); + } + Pat::Record { args, .. } => { + args.iter().map(|f| f.pat).for_each(f); + } + } + } +} diff --git a/crates/hir_def/src/find_path.rs b/crates/hir_def/src/find_path.rs new file mode 100644 index 0000000000..ac2c54ac53 --- /dev/null +++ b/crates/hir_def/src/find_path.rs @@ -0,0 +1,687 @@ +//! An algorithm to find a path to refer to a certain item. + +use hir_expand::name::{known, AsName, Name}; +use rustc_hash::FxHashSet; +use test_utils::mark; + +use crate::{ + db::DefDatabase, + item_scope::ItemInNs, + path::{ModPath, PathKind}, + visibility::Visibility, + ModuleDefId, ModuleId, +}; + +// FIXME: handle local items + +/// Find a path that can be used to refer to a certain item. This can depend on +/// *from where* you're referring to the item, hence the `from` parameter. +pub fn find_path(db: &dyn DefDatabase, item: ItemInNs, from: ModuleId) -> Option { + let _p = profile::span("find_path"); + find_path_inner(db, item, from, MAX_PATH_LEN) +} + +const MAX_PATH_LEN: usize = 15; + +impl ModPath { + fn starts_with_std(&self) -> bool { + self.segments.first() == Some(&known::std) + } + + // When std library is present, paths starting with `std::` + // should be preferred over paths starting with `core::` and `alloc::` + fn can_start_with_std(&self) -> bool { + let first_segment = self.segments.first(); + first_segment == Some(&known::alloc) || first_segment == Some(&known::core) + } +} + +fn find_path_inner( + db: &dyn DefDatabase, + item: ItemInNs, + from: ModuleId, + max_len: usize, +) -> Option { + if max_len == 0 { + return None; + } + + // Base cases: + + // - if the item is already in scope, return the name under which it is + let def_map = db.crate_def_map(from.krate); + let from_scope: &crate::item_scope::ItemScope = &def_map.modules[from.local_id].scope; + if let Some((name, _)) = from_scope.name_of(item) { + return Some(ModPath::from_segments(PathKind::Plain, vec![name.clone()])); + } + + // - if the item is the crate root, return `crate` + if item + == ItemInNs::Types(ModuleDefId::ModuleId(ModuleId { + krate: from.krate, + local_id: def_map.root, + })) + { + return Some(ModPath::from_segments(PathKind::Crate, Vec::new())); + } + + // - if the item is the module we're in, use `self` + if item == ItemInNs::Types(from.into()) { + return Some(ModPath::from_segments(PathKind::Super(0), Vec::new())); + } + + // - if the item is the parent module, use `super` (this is not used recursively, since `super::super` is ugly) + if let Some(parent_id) = def_map.modules[from.local_id].parent { + if item + == ItemInNs::Types(ModuleDefId::ModuleId(ModuleId { + krate: from.krate, + local_id: parent_id, + })) + { + return Some(ModPath::from_segments(PathKind::Super(1), Vec::new())); + } + } + + // - if the item is the crate root of a dependency crate, return the name from the extern prelude + for (name, def_id) in &def_map.extern_prelude { + if item == ItemInNs::Types(*def_id) { + return Some(ModPath::from_segments(PathKind::Plain, vec![name.clone()])); + } + } + + // - if the item is in the prelude, return the name from there + if let Some(prelude_module) = def_map.prelude { + let prelude_def_map = db.crate_def_map(prelude_module.krate); + let prelude_scope: &crate::item_scope::ItemScope = + &prelude_def_map.modules[prelude_module.local_id].scope; + if let Some((name, vis)) = prelude_scope.name_of(item) { + if vis.is_visible_from(db, from) { + return Some(ModPath::from_segments(PathKind::Plain, vec![name.clone()])); + } + } + } + + // - if the item is a builtin, it's in scope + if let ItemInNs::Types(ModuleDefId::BuiltinType(builtin)) = item { + return Some(ModPath::from_segments(PathKind::Plain, vec![builtin.as_name()])); + } + + // Recursive case: + // - if the item is an enum variant, refer to it via the enum + if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() { + if let Some(mut path) = find_path(db, ItemInNs::Types(variant.parent.into()), from) { + let data = db.enum_data(variant.parent); + path.segments.push(data.variants[variant.local_id].name.clone()); + return Some(path); + } + // If this doesn't work, it seems we have no way of referring to the + // enum; that's very weird, but there might still be a reexport of the + // variant somewhere + } + + // - otherwise, look for modules containing (reexporting) it and import it from one of those + + let crate_root = ModuleId { local_id: def_map.root, krate: from.krate }; + let crate_attrs = db.attrs(crate_root.into()); + let prefer_no_std = crate_attrs.by_key("no_std").exists(); + let mut best_path = None; + let mut best_path_len = max_len; + + if item.krate(db) == Some(from.krate) { + // Item was defined in the same crate that wants to import it. It cannot be found in any + // dependency in this case. + + let local_imports = find_local_import_locations(db, item, from); + for (module_id, name) in local_imports { + if let Some(mut path) = find_path_inner( + db, + ItemInNs::Types(ModuleDefId::ModuleId(module_id)), + from, + best_path_len - 1, + ) { + path.segments.push(name); + + let new_path = if let Some(best_path) = best_path { + select_best_path(best_path, path, prefer_no_std) + } else { + path + }; + best_path_len = new_path.len(); + best_path = Some(new_path); + } + } + } else { + // Item was defined in some upstream crate. This means that it must be exported from one, + // too (unless we can't name it at all). It could *also* be (re)exported by the same crate + // that wants to import it here, but we always prefer to use the external path here. + + let crate_graph = db.crate_graph(); + let extern_paths = crate_graph[from.krate].dependencies.iter().filter_map(|dep| { + let import_map = db.import_map(dep.crate_id); + import_map.import_info_for(item).and_then(|info| { + // Determine best path for containing module and append last segment from `info`. + let mut path = find_path_inner( + db, + ItemInNs::Types(ModuleDefId::ModuleId(info.container)), + from, + best_path_len - 1, + )?; + path.segments.push(info.path.segments.last().unwrap().clone()); + Some(path) + }) + }); + + for path in extern_paths { + let new_path = if let Some(best_path) = best_path { + select_best_path(best_path, path, prefer_no_std) + } else { + path + }; + best_path = Some(new_path); + } + } + + best_path +} + +fn select_best_path(old_path: ModPath, new_path: ModPath, prefer_no_std: bool) -> ModPath { + if old_path.starts_with_std() && new_path.can_start_with_std() { + if prefer_no_std { + mark::hit!(prefer_no_std_paths); + new_path + } else { + mark::hit!(prefer_std_paths); + old_path + } + } else if new_path.starts_with_std() && old_path.can_start_with_std() { + if prefer_no_std { + mark::hit!(prefer_no_std_paths); + old_path + } else { + mark::hit!(prefer_std_paths); + new_path + } + } else if new_path.len() < old_path.len() { + new_path + } else { + old_path + } +} + +/// Finds locations in `from.krate` from which `item` can be imported by `from`. +fn find_local_import_locations( + db: &dyn DefDatabase, + item: ItemInNs, + from: ModuleId, +) -> Vec<(ModuleId, Name)> { + let _p = profile::span("find_local_import_locations"); + + // `from` can import anything below `from` with visibility of at least `from`, and anything + // above `from` with any visibility. That means we do not need to descend into private siblings + // of `from` (and similar). + + let def_map = db.crate_def_map(from.krate); + + // Compute the initial worklist. We start with all direct child modules of `from` as well as all + // of its (recursive) parent modules. + let data = &def_map.modules[from.local_id]; + let mut worklist = data + .children + .values() + .map(|child| ModuleId { krate: from.krate, local_id: *child }) + .collect::>(); + let mut parent = data.parent; + while let Some(p) = parent { + worklist.push(ModuleId { krate: from.krate, local_id: p }); + parent = def_map.modules[p].parent; + } + + let mut seen: FxHashSet<_> = FxHashSet::default(); + + let mut locations = Vec::new(); + while let Some(module) = worklist.pop() { + if !seen.insert(module) { + continue; // already processed this module + } + + let ext_def_map; + let data = if module.krate == from.krate { + &def_map[module.local_id] + } else { + // The crate might reexport a module defined in another crate. + ext_def_map = db.crate_def_map(module.krate); + &ext_def_map[module.local_id] + }; + + if let Some((name, vis)) = data.scope.name_of(item) { + if vis.is_visible_from(db, from) { + let is_private = if let Visibility::Module(private_to) = vis { + private_to.local_id == module.local_id + } else { + false + }; + let is_original_def = if let Some(module_def_id) = item.as_module_def_id() { + data.scope.declarations().any(|it| it == module_def_id) + } else { + false + }; + + // Ignore private imports. these could be used if we are + // in a submodule of this module, but that's usually not + // what the user wants; and if this module can import + // the item and we're a submodule of it, so can we. + // Also this keeps the cached data smaller. + if !is_private || is_original_def { + locations.push((module, name.clone())); + } + } + } + + // Descend into all modules visible from `from`. + for (_, per_ns) in data.scope.entries() { + if let Some((ModuleDefId::ModuleId(module), vis)) = per_ns.take_types_vis() { + if vis.is_visible_from(db, from) { + worklist.push(module); + } + } + } + } + + locations +} + +#[cfg(test)] +mod tests { + use base_db::fixture::WithFixture; + use hir_expand::hygiene::Hygiene; + use syntax::ast::AstNode; + use test_utils::mark; + + use crate::test_db::TestDB; + + use super::*; + + /// `code` needs to contain a cursor marker; checks that `find_path` for the + /// item the `path` refers to returns that same path when called from the + /// module the cursor is in. + fn check_found_path(ra_fixture: &str, path: &str) { + let (db, pos) = TestDB::with_position(ra_fixture); + let module = db.module_for_file(pos.file_id); + let parsed_path_file = syntax::SourceFile::parse(&format!("use {};", path)); + let ast_path = + parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap(); + let mod_path = ModPath::from_src(ast_path, &Hygiene::new_unhygienic()).unwrap(); + + let crate_def_map = db.crate_def_map(module.krate); + let resolved = crate_def_map + .resolve_path( + &db, + module.local_id, + &mod_path, + crate::item_scope::BuiltinShadowMode::Module, + ) + .0 + .take_types() + .unwrap(); + + let found_path = find_path(&db, ItemInNs::Types(resolved), module); + + assert_eq!(found_path, Some(mod_path)); + } + + #[test] + fn same_module() { + let code = r#" + //- /main.rs + struct S; + <|> + "#; + check_found_path(code, "S"); + } + + #[test] + fn enum_variant() { + let code = r#" + //- /main.rs + enum E { A } + <|> + "#; + check_found_path(code, "E::A"); + } + + #[test] + fn sub_module() { + let code = r#" + //- /main.rs + mod foo { + pub struct S; + } + <|> + "#; + check_found_path(code, "foo::S"); + } + + #[test] + fn super_module() { + let code = r#" + //- /main.rs + mod foo; + //- /foo.rs + mod bar; + struct S; + //- /foo/bar.rs + <|> + "#; + check_found_path(code, "super::S"); + } + + #[test] + fn self_module() { + let code = r#" + //- /main.rs + mod foo; + //- /foo.rs + <|> + "#; + check_found_path(code, "self"); + } + + #[test] + fn crate_root() { + let code = r#" + //- /main.rs + mod foo; + //- /foo.rs + <|> + "#; + check_found_path(code, "crate"); + } + + #[test] + fn same_crate() { + let code = r#" + //- /main.rs + mod foo; + struct S; + //- /foo.rs + <|> + "#; + check_found_path(code, "crate::S"); + } + + #[test] + fn different_crate() { + let code = r#" + //- /main.rs crate:main deps:std + <|> + //- /std.rs crate:std + pub struct S; + "#; + check_found_path(code, "std::S"); + } + + #[test] + fn different_crate_renamed() { + let code = r#" + //- /main.rs crate:main deps:std + extern crate std as std_renamed; + <|> + //- /std.rs crate:std + pub struct S; + "#; + check_found_path(code, "std_renamed::S"); + } + + #[test] + fn partially_imported() { + // Tests that short paths are used even for external items, when parts of the path are + // already in scope. + check_found_path( + r#" + //- /main.rs crate:main deps:syntax + + use syntax::ast; + <|> + + //- /lib.rs crate:syntax + pub mod ast { + pub enum ModuleItem { + A, B, C, + } + } + "#, + "ast::ModuleItem", + ); + + check_found_path( + r#" + //- /main.rs crate:main deps:syntax + + <|> + + //- /lib.rs crate:syntax + pub mod ast { + pub enum ModuleItem { + A, B, C, + } + } + "#, + "syntax::ast::ModuleItem", + ); + } + + #[test] + fn same_crate_reexport() { + let code = r#" + //- /main.rs + mod bar { + mod foo { pub(super) struct S; } + pub(crate) use foo::*; + } + <|> + "#; + check_found_path(code, "bar::S"); + } + + #[test] + fn same_crate_reexport_rename() { + let code = r#" + //- /main.rs + mod bar { + mod foo { pub(super) struct S; } + pub(crate) use foo::S as U; + } + <|> + "#; + check_found_path(code, "bar::U"); + } + + #[test] + fn different_crate_reexport() { + let code = r#" + //- /main.rs crate:main deps:std + <|> + //- /std.rs crate:std deps:core + pub use core::S; + //- /core.rs crate:core + pub struct S; + "#; + check_found_path(code, "std::S"); + } + + #[test] + fn prelude() { + let code = r#" + //- /main.rs crate:main deps:std + <|> + //- /std.rs crate:std + pub mod prelude { pub struct S; } + #[prelude_import] + pub use prelude::*; + "#; + check_found_path(code, "S"); + } + + #[test] + fn enum_variant_from_prelude() { + let code = r#" + //- /main.rs crate:main deps:std + <|> + //- /std.rs crate:std + pub mod prelude { + pub enum Option { Some(T), None } + pub use Option::*; + } + #[prelude_import] + pub use prelude::*; + "#; + check_found_path(code, "None"); + check_found_path(code, "Some"); + } + + #[test] + fn shortest_path() { + let code = r#" + //- /main.rs + pub mod foo; + pub mod baz; + struct S; + <|> + //- /foo.rs + pub mod bar { pub struct S; } + //- /baz.rs + pub use crate::foo::bar::S; + "#; + check_found_path(code, "baz::S"); + } + + #[test] + fn discount_private_imports() { + let code = r#" + //- /main.rs + mod foo; + pub mod bar { pub struct S; } + use bar::S; + //- /foo.rs + <|> + "#; + // crate::S would be shorter, but using private imports seems wrong + check_found_path(code, "crate::bar::S"); + } + + #[test] + fn import_cycle() { + let code = r#" + //- /main.rs + pub mod foo; + pub mod bar; + pub mod baz; + //- /bar.rs + <|> + //- /foo.rs + pub use super::baz; + pub struct S; + //- /baz.rs + pub use super::foo; + "#; + check_found_path(code, "crate::foo::S"); + } + + #[test] + fn prefer_std_paths_over_alloc() { + mark::check!(prefer_std_paths); + let code = r#" + //- /main.rs crate:main deps:alloc,std + <|> + + //- /std.rs crate:std deps:alloc + pub mod sync { + pub use alloc::sync::Arc; + } + + //- /zzz.rs crate:alloc + pub mod sync { + pub struct Arc; + } + "#; + check_found_path(code, "std::sync::Arc"); + } + + #[test] + fn prefer_core_paths_over_std() { + mark::check!(prefer_no_std_paths); + let code = r#" + //- /main.rs crate:main deps:core,std + #![no_std] + + <|> + + //- /std.rs crate:std deps:core + + pub mod fmt { + pub use core::fmt::Error; + } + + //- /zzz.rs crate:core + + pub mod fmt { + pub struct Error; + } + "#; + check_found_path(code, "core::fmt::Error"); + } + + #[test] + fn prefer_alloc_paths_over_std() { + let code = r#" + //- /main.rs crate:main deps:alloc,std + #![no_std] + + <|> + + //- /std.rs crate:std deps:alloc + + pub mod sync { + pub use alloc::sync::Arc; + } + + //- /zzz.rs crate:alloc + + pub mod sync { + pub struct Arc; + } + "#; + check_found_path(code, "alloc::sync::Arc"); + } + + #[test] + fn prefer_shorter_paths_if_not_alloc() { + let code = r#" + //- /main.rs crate:main deps:megaalloc,std + <|> + + //- /std.rs crate:std deps:megaalloc + pub mod sync { + pub use megaalloc::sync::Arc; + } + + //- /zzz.rs crate:megaalloc + pub struct Arc; + "#; + check_found_path(code, "megaalloc::Arc"); + } + + #[test] + fn builtins_are_in_scope() { + let code = r#" + //- /main.rs + <|> + + pub mod primitive { + pub use u8; + } + "#; + check_found_path(code, "u8"); + check_found_path(code, "u16"); + } +} diff --git a/crates/hir_def/src/generics.rs b/crates/hir_def/src/generics.rs new file mode 100644 index 0000000000..835fe3fbdc --- /dev/null +++ b/crates/hir_def/src/generics.rs @@ -0,0 +1,339 @@ +//! Many kinds of items or constructs can have generic parameters: functions, +//! structs, impls, traits, etc. This module provides a common HIR for these +//! generic parameters. See also the `Generics` type and the `generics_of` query +//! in rustc. +use std::sync::Arc; + +use arena::{map::ArenaMap, Arena}; +use base_db::FileId; +use either::Either; +use hir_expand::{ + name::{name, AsName, Name}, + InFile, +}; +use syntax::ast::{self, GenericParamsOwner, NameOwner, TypeBoundsOwner}; + +use crate::{ + body::LowerCtx, + child_by_source::ChildBySource, + db::DefDatabase, + dyn_map::DynMap, + keys, + src::HasChildSource, + src::HasSource, + type_ref::{TypeBound, TypeRef}, + AdtId, GenericDefId, LocalTypeParamId, Lookup, TypeParamId, +}; + +/// Data about a generic parameter (to a function, struct, impl, ...). +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct TypeParamData { + pub name: Option, + pub default: Option, + pub provenance: TypeParamProvenance, +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub enum TypeParamProvenance { + TypeParamList, + TraitSelf, + ArgumentImplTrait, +} + +/// Data about the generic parameters of a function, struct, impl, etc. +#[derive(Clone, PartialEq, Eq, Debug, Default)] +pub struct GenericParams { + pub types: Arena, + // lifetimes: Arena, + pub where_predicates: Vec, +} + +/// A single predicate from a where clause, i.e. `where Type: Trait`. Combined +/// where clauses like `where T: Foo + Bar` are turned into multiple of these. +/// It might still result in multiple actual predicates though, because of +/// associated type bindings like `Iterator`. +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct WherePredicate { + pub target: WherePredicateTarget, + pub bound: TypeBound, +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub enum WherePredicateTarget { + TypeRef(TypeRef), + /// For desugared where predicates that can directly refer to a type param. + TypeParam(LocalTypeParamId), +} + +type SourceMap = ArenaMap>; + +impl GenericParams { + pub(crate) fn generic_params_query( + db: &dyn DefDatabase, + def: GenericDefId, + ) -> Arc { + let _p = profile::span("generic_params_query"); + + let generics = match def { + GenericDefId::FunctionId(id) => { + let id = id.lookup(db).id; + let tree = db.item_tree(id.file_id); + let item = &tree[id.value]; + tree[item.generic_params].clone() + } + GenericDefId::AdtId(AdtId::StructId(id)) => { + let id = id.lookup(db).id; + let tree = db.item_tree(id.file_id); + let item = &tree[id.value]; + tree[item.generic_params].clone() + } + GenericDefId::AdtId(AdtId::EnumId(id)) => { + let id = id.lookup(db).id; + let tree = db.item_tree(id.file_id); + let item = &tree[id.value]; + tree[item.generic_params].clone() + } + GenericDefId::AdtId(AdtId::UnionId(id)) => { + let id = id.lookup(db).id; + let tree = db.item_tree(id.file_id); + let item = &tree[id.value]; + tree[item.generic_params].clone() + } + GenericDefId::TraitId(id) => { + let id = id.lookup(db).id; + let tree = db.item_tree(id.file_id); + let item = &tree[id.value]; + tree[item.generic_params].clone() + } + GenericDefId::TypeAliasId(id) => { + let id = id.lookup(db).id; + let tree = db.item_tree(id.file_id); + let item = &tree[id.value]; + tree[item.generic_params].clone() + } + GenericDefId::ImplId(id) => { + let id = id.lookup(db).id; + let tree = db.item_tree(id.file_id); + let item = &tree[id.value]; + tree[item.generic_params].clone() + } + GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => GenericParams::default(), + }; + Arc::new(generics) + } + + fn new(db: &dyn DefDatabase, def: GenericDefId) -> (GenericParams, InFile) { + let mut generics = GenericParams { types: Arena::default(), where_predicates: Vec::new() }; + let mut sm = ArenaMap::default(); + + // FIXME: add `: Sized` bound for everything except for `Self` in traits + let file_id = match def { + GenericDefId::FunctionId(it) => { + let src = it.lookup(db).source(db); + let lower_ctx = LowerCtx::new(db, src.file_id); + generics.fill(&lower_ctx, &mut sm, &src.value); + // lower `impl Trait` in arguments + let data = db.function_data(it); + for param in &data.params { + generics.fill_implicit_impl_trait_args(param); + } + src.file_id + } + GenericDefId::AdtId(AdtId::StructId(it)) => { + let src = it.lookup(db).source(db); + let lower_ctx = LowerCtx::new(db, src.file_id); + generics.fill(&lower_ctx, &mut sm, &src.value); + src.file_id + } + GenericDefId::AdtId(AdtId::UnionId(it)) => { + let src = it.lookup(db).source(db); + let lower_ctx = LowerCtx::new(db, src.file_id); + generics.fill(&lower_ctx, &mut sm, &src.value); + src.file_id + } + GenericDefId::AdtId(AdtId::EnumId(it)) => { + let src = it.lookup(db).source(db); + let lower_ctx = LowerCtx::new(db, src.file_id); + generics.fill(&lower_ctx, &mut sm, &src.value); + src.file_id + } + GenericDefId::TraitId(it) => { + let src = it.lookup(db).source(db); + let lower_ctx = LowerCtx::new(db, src.file_id); + + // traits get the Self type as an implicit first type parameter + let self_param_id = generics.types.alloc(TypeParamData { + name: Some(name![Self]), + default: None, + provenance: TypeParamProvenance::TraitSelf, + }); + sm.insert(self_param_id, Either::Left(src.value.clone())); + // add super traits as bounds on Self + // i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar + let self_param = TypeRef::Path(name![Self].into()); + generics.fill_bounds(&lower_ctx, &src.value, self_param); + + generics.fill(&lower_ctx, &mut sm, &src.value); + src.file_id + } + GenericDefId::TypeAliasId(it) => { + let src = it.lookup(db).source(db); + let lower_ctx = LowerCtx::new(db, src.file_id); + + generics.fill(&lower_ctx, &mut sm, &src.value); + src.file_id + } + // Note that we don't add `Self` here: in `impl`s, `Self` is not a + // type-parameter, but rather is a type-alias for impl's target + // type, so this is handled by the resolver. + GenericDefId::ImplId(it) => { + let src = it.lookup(db).source(db); + let lower_ctx = LowerCtx::new(db, src.file_id); + + generics.fill(&lower_ctx, &mut sm, &src.value); + src.file_id + } + // We won't be using this ID anyway + GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => FileId(!0).into(), + }; + + (generics, InFile::new(file_id, sm)) + } + + pub(crate) fn fill( + &mut self, + lower_ctx: &LowerCtx, + sm: &mut SourceMap, + node: &dyn GenericParamsOwner, + ) { + if let Some(params) = node.generic_param_list() { + self.fill_params(lower_ctx, sm, params) + } + if let Some(where_clause) = node.where_clause() { + self.fill_where_predicates(lower_ctx, where_clause); + } + } + + pub(crate) fn fill_bounds( + &mut self, + lower_ctx: &LowerCtx, + node: &dyn ast::TypeBoundsOwner, + type_ref: TypeRef, + ) { + for bound in + node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds()) + { + self.add_where_predicate_from_bound(lower_ctx, bound, type_ref.clone()); + } + } + + fn fill_params( + &mut self, + lower_ctx: &LowerCtx, + sm: &mut SourceMap, + params: ast::GenericParamList, + ) { + for type_param in params.type_params() { + let name = type_param.name().map_or_else(Name::missing, |it| it.as_name()); + // FIXME: Use `Path::from_src` + let default = type_param.default_type().map(|it| TypeRef::from_ast(lower_ctx, it)); + let param = TypeParamData { + name: Some(name.clone()), + default, + provenance: TypeParamProvenance::TypeParamList, + }; + let param_id = self.types.alloc(param); + sm.insert(param_id, Either::Right(type_param.clone())); + + let type_ref = TypeRef::Path(name.into()); + self.fill_bounds(&lower_ctx, &type_param, type_ref); + } + } + + fn fill_where_predicates(&mut self, lower_ctx: &LowerCtx, where_clause: ast::WhereClause) { + for pred in where_clause.predicates() { + let type_ref = match pred.ty() { + Some(type_ref) => type_ref, + None => continue, + }; + let type_ref = TypeRef::from_ast(lower_ctx, type_ref); + for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) { + self.add_where_predicate_from_bound(lower_ctx, bound, type_ref.clone()); + } + } + } + + fn add_where_predicate_from_bound( + &mut self, + lower_ctx: &LowerCtx, + bound: ast::TypeBound, + type_ref: TypeRef, + ) { + if bound.question_mark_token().is_some() { + // FIXME: remove this bound + return; + } + let bound = TypeBound::from_ast(lower_ctx, bound); + self.where_predicates + .push(WherePredicate { target: WherePredicateTarget::TypeRef(type_ref), bound }); + } + + pub(crate) fn fill_implicit_impl_trait_args(&mut self, type_ref: &TypeRef) { + type_ref.walk(&mut |type_ref| { + if let TypeRef::ImplTrait(bounds) = type_ref { + let param = TypeParamData { + name: None, + default: None, + provenance: TypeParamProvenance::ArgumentImplTrait, + }; + let param_id = self.types.alloc(param); + for bound in bounds { + self.where_predicates.push(WherePredicate { + target: WherePredicateTarget::TypeParam(param_id), + bound: bound.clone(), + }); + } + } + }); + } + + pub fn find_by_name(&self, name: &Name) -> Option { + self.types + .iter() + .find_map(|(id, p)| if p.name.as_ref() == Some(name) { Some(id) } else { None }) + } + + pub fn find_trait_self_param(&self) -> Option { + self.types.iter().find_map(|(id, p)| { + if p.provenance == TypeParamProvenance::TraitSelf { + Some(id) + } else { + None + } + }) + } +} + +impl HasChildSource for GenericDefId { + type ChildId = LocalTypeParamId; + type Value = Either; + fn child_source(&self, db: &dyn DefDatabase) -> InFile { + let (_, sm) = GenericParams::new(db, *self); + sm + } +} + +impl ChildBySource for GenericDefId { + fn child_by_source(&self, db: &dyn DefDatabase) -> DynMap { + let mut res = DynMap::default(); + let arena_map = self.child_source(db); + let arena_map = arena_map.as_ref(); + for (local_id, src) in arena_map.value.iter() { + let id = TypeParamId { parent: *self, local_id }; + if let Either::Right(type_param) = src { + res[keys::TYPE_PARAM].insert(arena_map.with_value(type_param.clone()), id) + } + } + res + } +} diff --git a/crates/hir_def/src/import_map.rs b/crates/hir_def/src/import_map.rs new file mode 100644 index 0000000000..d32a0bdaf2 --- /dev/null +++ b/crates/hir_def/src/import_map.rs @@ -0,0 +1,745 @@ +//! A map of all publicly exported items in a crate. + +use std::{cmp::Ordering, fmt, hash::BuildHasherDefault, sync::Arc}; + +use base_db::CrateId; +use fst::{self, Streamer}; +use indexmap::{map::Entry, IndexMap}; +use rustc_hash::{FxHashMap, FxHasher}; +use smallvec::SmallVec; +use syntax::SmolStr; + +use crate::{ + db::DefDatabase, + item_scope::ItemInNs, + path::{ModPath, PathKind}, + visibility::Visibility, + AssocItemId, ModuleDefId, ModuleId, TraitId, +}; + +type FxIndexMap = IndexMap>; + +/// Item import details stored in the `ImportMap`. +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct ImportInfo { + /// A path that can be used to import the item, relative to the crate's root. + pub path: ModPath, + /// The module containing this item. + pub container: ModuleId, +} + +/// A map from publicly exported items to the path needed to import/name them from a downstream +/// crate. +/// +/// Reexports of items are taken into account, ie. if something is exported under multiple +/// names, the one with the shortest import path will be used. +/// +/// Note that all paths are relative to the containing crate's root, so the crate name still needs +/// to be prepended to the `ModPath` before the path is valid. +#[derive(Default)] +pub struct ImportMap { + map: FxIndexMap, + + /// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the + /// values returned by running `fst`. + /// + /// Since a path can refer to multiple items due to namespacing, we store all items with the + /// same path right after each other. This allows us to find all items after the FST gives us + /// the index of the first one. + importables: Vec, + fst: fst::Map>, + + /// Maps names of associated items to the item's ID. Only includes items whose defining trait is + /// exported. + assoc_map: FxHashMap>, +} + +impl ImportMap { + pub fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc { + let _p = profile::span("import_map_query"); + let def_map = db.crate_def_map(krate); + let mut import_map = Self::default(); + + // We look only into modules that are public(ly reexported), starting with the crate root. + let empty = ModPath { kind: PathKind::Plain, segments: vec![] }; + let root = ModuleId { krate, local_id: def_map.root }; + let mut worklist = vec![(root, empty)]; + while let Some((module, mod_path)) = worklist.pop() { + let ext_def_map; + let mod_data = if module.krate == krate { + &def_map[module.local_id] + } else { + // The crate might reexport a module defined in another crate. + ext_def_map = db.crate_def_map(module.krate); + &ext_def_map[module.local_id] + }; + + let visible_items = mod_data.scope.entries().filter_map(|(name, per_ns)| { + let per_ns = per_ns.filter_visibility(|vis| vis == Visibility::Public); + if per_ns.is_none() { + None + } else { + Some((name, per_ns)) + } + }); + + for (name, per_ns) in visible_items { + let mk_path = || { + let mut path = mod_path.clone(); + path.segments.push(name.clone()); + path + }; + + for item in per_ns.iter_items() { + let path = mk_path(); + match import_map.map.entry(item) { + Entry::Vacant(entry) => { + entry.insert(ImportInfo { path, container: module }); + } + Entry::Occupied(mut entry) => { + // If the new path is shorter, prefer that one. + if path.len() < entry.get().path.len() { + *entry.get_mut() = ImportInfo { path, container: module }; + } else { + continue; + } + } + } + + // If we've just added a path to a module, descend into it. We might traverse + // modules multiple times, but only if the new path to it is shorter than the + // first (else we `continue` above). + if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() { + worklist.push((mod_id, mk_path())); + } + + // If we've added a path to a trait, add the trait's methods to the method map. + if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() { + import_map.collect_trait_methods(db, tr); + } + } + } + } + + let mut importables = import_map.map.iter().collect::>(); + + importables.sort_by(cmp); + + // Build the FST, taking care not to insert duplicate values. + + let mut builder = fst::MapBuilder::memory(); + let mut last_batch_start = 0; + + for idx in 0..importables.len() { + if let Some(next_item) = importables.get(idx + 1) { + if cmp(&importables[last_batch_start], next_item) == Ordering::Equal { + continue; + } + } + + let start = last_batch_start; + last_batch_start = idx + 1; + + let key = fst_path(&importables[start].1.path); + + builder.insert(key, start as u64).unwrap(); + } + + import_map.fst = fst::Map::new(builder.into_inner().unwrap()).unwrap(); + import_map.importables = importables.iter().map(|(item, _)| **item).collect(); + + Arc::new(import_map) + } + + /// Returns the `ModPath` needed to import/mention `item`, relative to this crate's root. + pub fn path_of(&self, item: ItemInNs) -> Option<&ModPath> { + Some(&self.map.get(&item)?.path) + } + + pub fn import_info_for(&self, item: ItemInNs) -> Option<&ImportInfo> { + self.map.get(&item) + } + + fn collect_trait_methods(&mut self, db: &dyn DefDatabase, tr: TraitId) { + let data = db.trait_data(tr); + for (name, item) in data.items.iter() { + self.assoc_map.entry(name.to_string().into()).or_default().push(*item); + } + } +} + +impl PartialEq for ImportMap { + fn eq(&self, other: &Self) -> bool { + // `fst` and `importables` are built from `map`, so we don't need to compare them. + self.map == other.map + } +} + +impl Eq for ImportMap {} + +impl fmt::Debug for ImportMap { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut importable_paths: Vec<_> = self + .map + .iter() + .map(|(item, info)| { + let ns = match item { + ItemInNs::Types(_) => "t", + ItemInNs::Values(_) => "v", + ItemInNs::Macros(_) => "m", + }; + format!("- {} ({})", info.path, ns) + }) + .collect(); + + importable_paths.sort(); + f.write_str(&importable_paths.join("\n")) + } +} + +fn fst_path(path: &ModPath) -> String { + let mut s = path.to_string(); + s.make_ascii_lowercase(); + s +} + +fn cmp((_, lhs): &(&ItemInNs, &ImportInfo), (_, rhs): &(&ItemInNs, &ImportInfo)) -> Ordering { + let lhs_str = fst_path(&lhs.path); + let rhs_str = fst_path(&rhs.path); + lhs_str.cmp(&rhs_str) +} + +#[derive(Debug)] +pub struct Query { + query: String, + lowercased: String, + anchor_end: bool, + case_sensitive: bool, + limit: usize, +} + +impl Query { + pub fn new(query: &str) -> Self { + Self { + lowercased: query.to_lowercase(), + query: query.to_string(), + anchor_end: false, + case_sensitive: false, + limit: usize::max_value(), + } + } + + /// Only returns items whose paths end with the (case-insensitive) query string as their last + /// segment. + pub fn anchor_end(self) -> Self { + Self { anchor_end: true, ..self } + } + + /// Limits the returned number of items to `limit`. + pub fn limit(self, limit: usize) -> Self { + Self { limit, ..self } + } + + /// Respect casing of the query string when matching. + pub fn case_sensitive(self) -> Self { + Self { case_sensitive: true, ..self } + } +} + +/// Searches dependencies of `krate` for an importable path matching `query`. +/// +/// This returns a list of items that could be imported from dependencies of `krate`. +pub fn search_dependencies<'a>( + db: &'a dyn DefDatabase, + krate: CrateId, + query: Query, +) -> Vec { + let _p = profile::span("search_dependencies").detail(|| format!("{:?}", query)); + + let graph = db.crate_graph(); + let import_maps: Vec<_> = + graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect(); + + let automaton = fst::automaton::Subsequence::new(&query.lowercased); + + let mut op = fst::map::OpBuilder::new(); + for map in &import_maps { + op = op.add(map.fst.search(&automaton)); + } + + let mut stream = op.union(); + let mut res = Vec::new(); + while let Some((_, indexed_values)) = stream.next() { + for indexed_value in indexed_values { + let import_map = &import_maps[indexed_value.index]; + let importables = &import_map.importables[indexed_value.value as usize..]; + + // Path shared by the importable items in this group. + let path = &import_map.map[&importables[0]].path; + + if query.anchor_end { + // Last segment must match query. + let last = path.segments.last().unwrap().to_string(); + if last.to_lowercase() != query.lowercased { + continue; + } + } + + // Add the items from this `ModPath` group. Those are all subsequent items in + // `importables` whose paths match `path`. + let iter = importables.iter().copied().take_while(|item| { + let item_path = &import_map.map[item].path; + fst_path(item_path) == fst_path(path) + }); + + if query.case_sensitive { + // FIXME: This does not do a subsequence match. + res.extend(iter.filter(|item| { + let item_path = &import_map.map[item].path; + item_path.to_string().contains(&query.query) + })); + } else { + res.extend(iter); + } + + if res.len() >= query.limit { + res.truncate(query.limit); + return res; + } + } + } + + // Add all exported associated items whose names match the query (exactly). + for map in &import_maps { + if let Some(v) = map.assoc_map.get(&*query.query) { + res.extend(v.iter().map(|&assoc| { + ItemInNs::Types(match assoc { + AssocItemId::FunctionId(it) => it.into(), + AssocItemId::ConstId(it) => it.into(), + AssocItemId::TypeAliasId(it) => it.into(), + }) + })); + } + } + + res +} + +#[cfg(test)] +mod tests { + use base_db::{fixture::WithFixture, SourceDatabase, Upcast}; + use expect::{expect, Expect}; + + use crate::{test_db::TestDB, AssocContainerId, Lookup}; + + use super::*; + + fn check_search(ra_fixture: &str, krate_name: &str, query: Query, expect: Expect) { + let db = TestDB::with_files(ra_fixture); + let crate_graph = db.crate_graph(); + let krate = crate_graph + .iter() + .find(|krate| { + crate_graph[*krate].display_name.as_ref().map(|n| n.to_string()) + == Some(krate_name.to_string()) + }) + .unwrap(); + + let actual = search_dependencies(db.upcast(), krate, query) + .into_iter() + .filter_map(|item| { + let mark = match item { + ItemInNs::Types(_) => "t", + ItemInNs::Values(_) => "v", + ItemInNs::Macros(_) => "m", + }; + let item = assoc_to_trait(&db, item); + item.krate(db.upcast()).map(|krate| { + let map = db.import_map(krate); + let path = map.path_of(item).unwrap(); + format!( + "{}::{} ({})\n", + crate_graph[krate].display_name.as_ref().unwrap(), + path, + mark + ) + }) + }) + .collect::(); + expect.assert_eq(&actual) + } + + fn assoc_to_trait(db: &dyn DefDatabase, item: ItemInNs) -> ItemInNs { + let assoc: AssocItemId = match item { + ItemInNs::Types(it) | ItemInNs::Values(it) => match it { + ModuleDefId::TypeAliasId(it) => it.into(), + ModuleDefId::FunctionId(it) => it.into(), + ModuleDefId::ConstId(it) => it.into(), + _ => return item, + }, + _ => return item, + }; + + let container = match assoc { + AssocItemId::FunctionId(it) => it.lookup(db).container, + AssocItemId::ConstId(it) => it.lookup(db).container, + AssocItemId::TypeAliasId(it) => it.lookup(db).container, + }; + + match container { + AssocContainerId::TraitId(it) => ItemInNs::Types(it.into()), + _ => item, + } + } + + fn check(ra_fixture: &str, expect: Expect) { + let db = TestDB::with_files(ra_fixture); + let crate_graph = db.crate_graph(); + + let actual = crate_graph + .iter() + .filter_map(|krate| { + let cdata = &crate_graph[krate]; + let name = cdata.display_name.as_ref()?; + + let map = db.import_map(krate); + + Some(format!("{}:\n{:?}\n", name, map)) + }) + .collect::(); + + expect.assert_eq(&actual) + } + + #[test] + fn smoke() { + check( + r" + //- /main.rs crate:main deps:lib + + mod private { + pub use lib::Pub; + pub struct InPrivateModule; + } + + pub mod publ1 { + use lib::Pub; + } + + pub mod real_pub { + pub use lib::Pub; + } + pub mod real_pu2 { // same path length as above + pub use lib::Pub; + } + + //- /lib.rs crate:lib + pub struct Pub {} + pub struct Pub2; // t + v + struct Priv; + ", + expect![[r#" + main: + - publ1 (t) + - real_pu2 (t) + - real_pub (t) + - real_pub::Pub (t) + lib: + - Pub (t) + - Pub2 (t) + - Pub2 (v) + "#]], + ); + } + + #[test] + fn prefers_shortest_path() { + check( + r" + //- /main.rs crate:main + + pub mod sub { + pub mod subsub { + pub struct Def {} + } + + pub use super::sub::subsub::Def; + } + ", + expect![[r#" + main: + - sub (t) + - sub::Def (t) + - sub::subsub (t) + "#]], + ); + } + + #[test] + fn type_reexport_cross_crate() { + // Reexports need to be visible from a crate, even if the original crate exports the item + // at a shorter path. + check( + r" + //- /main.rs crate:main deps:lib + pub mod m { + pub use lib::S; + } + //- /lib.rs crate:lib + pub struct S; + ", + expect![[r#" + main: + - m (t) + - m::S (t) + - m::S (v) + lib: + - S (t) + - S (v) + "#]], + ); + } + + #[test] + fn macro_reexport() { + check( + r" + //- /main.rs crate:main deps:lib + pub mod m { + pub use lib::pub_macro; + } + //- /lib.rs crate:lib + #[macro_export] + macro_rules! pub_macro { + () => {}; + } + ", + expect![[r#" + main: + - m (t) + - m::pub_macro (m) + lib: + - pub_macro (m) + "#]], + ); + } + + #[test] + fn module_reexport() { + // Reexporting modules from a dependency adds all contents to the import map. + check( + r" + //- /main.rs crate:main deps:lib + pub use lib::module as reexported_module; + //- /lib.rs crate:lib + pub mod module { + pub struct S; + } + ", + expect![[r#" + main: + - reexported_module (t) + - reexported_module::S (t) + - reexported_module::S (v) + lib: + - module (t) + - module::S (t) + - module::S (v) + "#]], + ); + } + + #[test] + fn cyclic_module_reexport() { + // A cyclic reexport does not hang. + check( + r" + //- /lib.rs crate:lib + pub mod module { + pub struct S; + pub use super::sub::*; + } + + pub mod sub { + pub use super::module; + } + ", + expect![[r#" + lib: + - module (t) + - module::S (t) + - module::S (v) + - sub (t) + "#]], + ); + } + + #[test] + fn private_macro() { + check( + r" + //- /lib.rs crate:lib + macro_rules! private_macro { + () => {}; + } + ", + expect![[r#" + lib: + + "#]], + ); + } + + #[test] + fn namespacing() { + check( + r" + //- /lib.rs crate:lib + pub struct Thing; // t + v + #[macro_export] + macro_rules! Thing { // m + () => {}; + } + ", + expect![[r#" + lib: + - Thing (m) + - Thing (t) + - Thing (v) + "#]], + ); + + check( + r" + //- /lib.rs crate:lib + pub mod Thing {} // t + #[macro_export] + macro_rules! Thing { // m + () => {}; + } + ", + expect![[r#" + lib: + - Thing (m) + - Thing (t) + "#]], + ); + } + + #[test] + fn search() { + let ra_fixture = r#" + //- /main.rs crate:main deps:dep + //- /dep.rs crate:dep deps:tdep + use tdep::fmt as fmt_dep; + pub mod fmt { + pub trait Display { + fn fmt(); + } + } + #[macro_export] + macro_rules! Fmt { + () => {}; + } + pub struct Fmt; + + pub fn format() {} + pub fn no() {} + + //- /tdep.rs crate:tdep + pub mod fmt { + pub struct NotImportableFromMain; + } + "#; + + check_search( + ra_fixture, + "main", + Query::new("fmt"), + expect![[r#" + dep::fmt (t) + dep::Fmt (t) + dep::Fmt (v) + dep::Fmt (m) + dep::fmt::Display (t) + dep::format (v) + dep::fmt::Display (t) + "#]], + ); + + check_search( + ra_fixture, + "main", + Query::new("fmt").anchor_end(), + expect![[r#" + dep::fmt (t) + dep::Fmt (t) + dep::Fmt (v) + dep::Fmt (m) + dep::fmt::Display (t) + "#]], + ); + } + + #[test] + fn search_casing() { + let ra_fixture = r#" + //- /main.rs crate:main deps:dep + //- /dep.rs crate:dep + + pub struct fmt; + pub struct FMT; + "#; + + check_search( + ra_fixture, + "main", + Query::new("FMT"), + expect![[r#" + dep::fmt (t) + dep::fmt (v) + dep::FMT (t) + dep::FMT (v) + "#]], + ); + + check_search( + ra_fixture, + "main", + Query::new("FMT").case_sensitive(), + expect![[r#" + dep::FMT (t) + dep::FMT (v) + "#]], + ); + } + + #[test] + fn search_limit() { + check_search( + r#" + //- /main.rs crate:main deps:dep + //- /dep.rs crate:dep + pub mod fmt { + pub trait Display { + fn fmt(); + } + } + #[macro_export] + macro_rules! Fmt { + () => {}; + } + pub struct Fmt; + + pub fn format() {} + pub fn no() {} + "#, + "main", + Query::new("").limit(2), + expect![[r#" + dep::fmt (t) + dep::Fmt (t) + "#]], + ); + } +} diff --git a/crates/hir_def/src/item_scope.rs b/crates/hir_def/src/item_scope.rs new file mode 100644 index 0000000000..f1e9dfd5b1 --- /dev/null +++ b/crates/hir_def/src/item_scope.rs @@ -0,0 +1,341 @@ +//! Describes items defined or visible (ie, imported) in a certain scope. +//! This is shared between modules and blocks. + +use std::collections::hash_map::Entry; + +use base_db::CrateId; +use hir_expand::name::Name; +use once_cell::sync::Lazy; +use rustc_hash::{FxHashMap, FxHashSet}; +use test_utils::mark; + +use crate::{ + db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, HasModule, ImplId, + LocalModuleId, Lookup, MacroDefId, ModuleDefId, TraitId, +}; + +#[derive(Copy, Clone)] +pub(crate) enum ImportType { + Glob, + Named, +} + +#[derive(Debug, Default)] +pub struct PerNsGlobImports { + types: FxHashSet<(LocalModuleId, Name)>, + values: FxHashSet<(LocalModuleId, Name)>, + macros: FxHashSet<(LocalModuleId, Name)>, +} + +#[derive(Debug, Default, PartialEq, Eq)] +pub struct ItemScope { + types: FxHashMap, + values: FxHashMap, + macros: FxHashMap, + unresolved: FxHashSet, + + defs: Vec, + impls: Vec, + /// Traits imported via `use Trait as _;`. + unnamed_trait_imports: FxHashMap, + /// Macros visible in current module in legacy textual scope + /// + /// For macros invoked by an unqualified identifier like `bar!()`, `legacy_macros` will be searched in first. + /// If it yields no result, then it turns to module scoped `macros`. + /// It macros with name qualified with a path like `crate::foo::bar!()`, `legacy_macros` will be skipped, + /// and only normal scoped `macros` will be searched in. + /// + /// Note that this automatically inherit macros defined textually before the definition of module itself. + /// + /// Module scoped macros will be inserted into `items` instead of here. + // FIXME: Macro shadowing in one module is not properly handled. Non-item place macros will + // be all resolved to the last one defined if shadowing happens. + legacy_macros: FxHashMap, +} + +pub(crate) static BUILTIN_SCOPE: Lazy> = Lazy::new(|| { + BuiltinType::ALL + .iter() + .map(|(name, ty)| (name.clone(), PerNs::types(ty.clone().into(), Visibility::Public))) + .collect() +}); + +/// Shadow mode for builtin type which can be shadowed by module. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub(crate) enum BuiltinShadowMode { + /// Prefer user-defined modules (or other types) over builtins. + Module, + /// Prefer builtins over user-defined modules (but not other types). + Other, +} + +/// Legacy macros can only be accessed through special methods like `get_legacy_macros`. +/// Other methods will only resolve values, types and module scoped macros only. +impl ItemScope { + pub fn entries<'a>(&'a self) -> impl Iterator + 'a { + // FIXME: shadowing + let keys: FxHashSet<_> = self + .types + .keys() + .chain(self.values.keys()) + .chain(self.macros.keys()) + .chain(self.unresolved.iter()) + .collect(); + + keys.into_iter().map(move |name| (name, self.get(name))) + } + + pub fn declarations(&self) -> impl Iterator + '_ { + self.defs.iter().copied() + } + + pub fn impls(&self) -> impl Iterator + ExactSizeIterator + '_ { + self.impls.iter().copied() + } + + pub fn visibility_of(&self, def: ModuleDefId) -> Option { + self.name_of(ItemInNs::Types(def)) + .or_else(|| self.name_of(ItemInNs::Values(def))) + .map(|(_, v)| v) + } + + /// Iterate over all module scoped macros + pub(crate) fn macros<'a>(&'a self) -> impl Iterator + 'a { + self.entries().filter_map(|(name, def)| def.take_macros().map(|macro_| (name, macro_))) + } + + /// Iterate over all legacy textual scoped macros visible at the end of the module + pub(crate) fn legacy_macros<'a>(&'a self) -> impl Iterator + 'a { + self.legacy_macros.iter().map(|(name, def)| (name, *def)) + } + + /// Get a name from current module scope, legacy macros are not included + pub(crate) fn get(&self, name: &Name) -> PerNs { + PerNs { + types: self.types.get(name).copied(), + values: self.values.get(name).copied(), + macros: self.macros.get(name).copied(), + } + } + + pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> { + for (name, per_ns) in self.entries() { + if let Some(vis) = item.match_with(per_ns) { + return Some((name, vis)); + } + } + None + } + + pub(crate) fn traits<'a>(&'a self) -> impl Iterator + 'a { + self.types + .values() + .filter_map(|(def, _)| match def { + ModuleDefId::TraitId(t) => Some(*t), + _ => None, + }) + .chain(self.unnamed_trait_imports.keys().copied()) + } + + pub(crate) fn define_def(&mut self, def: ModuleDefId) { + self.defs.push(def) + } + + pub(crate) fn get_legacy_macro(&self, name: &Name) -> Option { + self.legacy_macros.get(name).copied() + } + + pub(crate) fn define_impl(&mut self, imp: ImplId) { + self.impls.push(imp) + } + + pub(crate) fn define_legacy_macro(&mut self, name: Name, mac: MacroDefId) { + self.legacy_macros.insert(name, mac); + } + + pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option { + self.unnamed_trait_imports.get(&tr).copied() + } + + pub(crate) fn push_unnamed_trait(&mut self, tr: TraitId, vis: Visibility) { + self.unnamed_trait_imports.insert(tr, vis); + } + + pub(crate) fn push_res(&mut self, name: Name, def: PerNs) -> bool { + let mut changed = false; + + if let Some(types) = def.types { + self.types.entry(name.clone()).or_insert_with(|| { + changed = true; + types + }); + } + if let Some(values) = def.values { + self.values.entry(name.clone()).or_insert_with(|| { + changed = true; + values + }); + } + if let Some(macros) = def.macros { + self.macros.entry(name.clone()).or_insert_with(|| { + changed = true; + macros + }); + } + + if def.is_none() { + if self.unresolved.insert(name) { + changed = true; + } + } + + changed + } + + pub(crate) fn push_res_with_import( + &mut self, + glob_imports: &mut PerNsGlobImports, + lookup: (LocalModuleId, Name), + def: PerNs, + def_import_type: ImportType, + ) -> bool { + let mut changed = false; + + macro_rules! check_changed { + ( + $changed:ident, + ( $this:ident / $def:ident ) . $field:ident, + $glob_imports:ident [ $lookup:ident ], + $def_import_type:ident + ) => {{ + let existing = $this.$field.entry($lookup.1.clone()); + match (existing, $def.$field) { + (Entry::Vacant(entry), Some(_)) => { + match $def_import_type { + ImportType::Glob => { + $glob_imports.$field.insert($lookup.clone()); + } + ImportType::Named => { + $glob_imports.$field.remove(&$lookup); + } + } + + if let Some(fld) = $def.$field { + entry.insert(fld); + } + $changed = true; + } + (Entry::Occupied(mut entry), Some(_)) + if $glob_imports.$field.contains(&$lookup) + && matches!($def_import_type, ImportType::Named) => + { + mark::hit!(import_shadowed); + $glob_imports.$field.remove(&$lookup); + if let Some(fld) = $def.$field { + entry.insert(fld); + } + $changed = true; + } + _ => {} + } + }}; + } + + check_changed!(changed, (self / def).types, glob_imports[lookup], def_import_type); + check_changed!(changed, (self / def).values, glob_imports[lookup], def_import_type); + check_changed!(changed, (self / def).macros, glob_imports[lookup], def_import_type); + + if def.is_none() { + if self.unresolved.insert(lookup.1) { + changed = true; + } + } + + changed + } + + pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator, PerNs)> + 'a { + self.entries().map(|(name, res)| (Some(name.clone()), res)).chain( + self.unnamed_trait_imports + .iter() + .map(|(tr, vis)| (None, PerNs::types(ModuleDefId::TraitId(*tr), *vis))), + ) + } + + pub(crate) fn collect_legacy_macros(&self) -> FxHashMap { + self.legacy_macros.clone() + } +} + +impl PerNs { + pub(crate) fn from_def(def: ModuleDefId, v: Visibility, has_constructor: bool) -> PerNs { + match def { + ModuleDefId::ModuleId(_) => PerNs::types(def, v), + ModuleDefId::FunctionId(_) => PerNs::values(def, v), + ModuleDefId::AdtId(adt) => match adt { + AdtId::UnionId(_) => PerNs::types(def, v), + AdtId::EnumId(_) => PerNs::types(def, v), + AdtId::StructId(_) => { + if has_constructor { + PerNs::both(def, def, v) + } else { + PerNs::types(def, v) + } + } + }, + ModuleDefId::EnumVariantId(_) => PerNs::both(def, def, v), + ModuleDefId::ConstId(_) | ModuleDefId::StaticId(_) => PerNs::values(def, v), + ModuleDefId::TraitId(_) => PerNs::types(def, v), + ModuleDefId::TypeAliasId(_) => PerNs::types(def, v), + ModuleDefId::BuiltinType(_) => PerNs::types(def, v), + } + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] +pub enum ItemInNs { + Types(ModuleDefId), + Values(ModuleDefId), + Macros(MacroDefId), +} + +impl ItemInNs { + fn match_with(self, per_ns: PerNs) -> Option { + match self { + ItemInNs::Types(def) => { + per_ns.types.filter(|(other_def, _)| *other_def == def).map(|(_, vis)| vis) + } + ItemInNs::Values(def) => { + per_ns.values.filter(|(other_def, _)| *other_def == def).map(|(_, vis)| vis) + } + ItemInNs::Macros(def) => { + per_ns.macros.filter(|(other_def, _)| *other_def == def).map(|(_, vis)| vis) + } + } + } + + pub fn as_module_def_id(self) -> Option { + match self { + ItemInNs::Types(id) | ItemInNs::Values(id) => Some(id), + ItemInNs::Macros(_) => None, + } + } + + /// Returns the crate defining this item (or `None` if `self` is built-in). + pub fn krate(&self, db: &dyn DefDatabase) -> Option { + Some(match self { + ItemInNs::Types(did) | ItemInNs::Values(did) => match did { + ModuleDefId::ModuleId(id) => id.krate, + ModuleDefId::FunctionId(id) => id.lookup(db).module(db).krate, + ModuleDefId::AdtId(id) => id.module(db).krate, + ModuleDefId::EnumVariantId(id) => id.parent.lookup(db).container.module(db).krate, + ModuleDefId::ConstId(id) => id.lookup(db).container.module(db).krate, + ModuleDefId::StaticId(id) => id.lookup(db).container.module(db).krate, + ModuleDefId::TraitId(id) => id.lookup(db).container.module(db).krate, + ModuleDefId::TypeAliasId(id) => id.lookup(db).module(db).krate, + ModuleDefId::BuiltinType(_) => return None, + }, + ItemInNs::Macros(id) => return id.krate, + }) + } +} diff --git a/crates/hir_def/src/item_tree.rs b/crates/hir_def/src/item_tree.rs new file mode 100644 index 0000000000..e14722caeb --- /dev/null +++ b/crates/hir_def/src/item_tree.rs @@ -0,0 +1,754 @@ +//! A simplified AST that only contains items. + +mod lower; +#[cfg(test)] +mod tests; + +use std::{ + any::type_name, + fmt::{self, Debug}, + hash::{Hash, Hasher}, + marker::PhantomData, + ops::{Index, Range}, + sync::Arc, +}; + +use arena::{Arena, Idx, RawId}; +use ast::{AstNode, AttrsOwner, NameOwner, StructKind}; +use either::Either; +use hir_expand::{ + ast_id_map::FileAstId, + hygiene::Hygiene, + name::{name, AsName, Name}, + HirFileId, InFile, +}; +use rustc_hash::FxHashMap; +use smallvec::SmallVec; +use syntax::{ast, match_ast}; +use test_utils::mark; + +use crate::{ + attr::Attrs, + db::DefDatabase, + generics::GenericParams, + path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind}, + type_ref::{Mutability, TypeBound, TypeRef}, + visibility::RawVisibility, +}; + +#[derive(Copy, Clone, Eq, PartialEq)] +pub struct RawVisibilityId(u32); + +impl RawVisibilityId { + pub const PUB: Self = RawVisibilityId(u32::max_value()); + pub const PRIV: Self = RawVisibilityId(u32::max_value() - 1); + pub const PUB_CRATE: Self = RawVisibilityId(u32::max_value() - 2); +} + +impl fmt::Debug for RawVisibilityId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut f = f.debug_tuple("RawVisibilityId"); + match *self { + Self::PUB => f.field(&"pub"), + Self::PRIV => f.field(&"pub(self)"), + Self::PUB_CRATE => f.field(&"pub(crate)"), + _ => f.field(&self.0), + }; + f.finish() + } +} + +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +pub struct GenericParamsId(u32); + +impl GenericParamsId { + pub const EMPTY: Self = GenericParamsId(u32::max_value()); +} + +/// The item tree of a source file. +#[derive(Debug, Eq, PartialEq)] +pub struct ItemTree { + top_level: SmallVec<[ModItem; 1]>, + attrs: FxHashMap, + inner_items: FxHashMap, SmallVec<[ModItem; 1]>>, + + data: Option>, +} + +impl ItemTree { + pub fn item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc { + let _p = profile::span("item_tree_query").detail(|| format!("{:?}", file_id)); + let syntax = if let Some(node) = db.parse_or_expand(file_id) { + node + } else { + return Arc::new(Self::empty()); + }; + + let hygiene = Hygiene::new(db.upcast(), file_id); + let ctx = lower::Ctx::new(db, hygiene.clone(), file_id); + let mut top_attrs = None; + let mut item_tree = match_ast! { + match syntax { + ast::SourceFile(file) => { + top_attrs = Some(Attrs::new(&file, &hygiene)); + ctx.lower_module_items(&file) + }, + ast::MacroItems(items) => { + ctx.lower_module_items(&items) + }, + // Macros can expand to expressions. We return an empty item tree in this case, but + // still need to collect inner items. + ast::Expr(e) => { + ctx.lower_inner_items(e.syntax()) + }, + _ => { + panic!("cannot create item tree from {:?}", syntax); + }, + } + }; + + if let Some(attrs) = top_attrs { + item_tree.attrs.insert(AttrOwner::TopLevel, attrs); + } + item_tree.shrink_to_fit(); + Arc::new(item_tree) + } + + fn empty() -> Self { + Self { + top_level: Default::default(), + attrs: Default::default(), + inner_items: Default::default(), + data: Default::default(), + } + } + + fn shrink_to_fit(&mut self) { + if let Some(data) = &mut self.data { + let ItemTreeData { + imports, + extern_crates, + functions, + structs, + fields, + unions, + enums, + variants, + consts, + statics, + traits, + impls, + type_aliases, + mods, + macro_calls, + exprs, + vis, + generics, + } = &mut **data; + + imports.shrink_to_fit(); + extern_crates.shrink_to_fit(); + functions.shrink_to_fit(); + structs.shrink_to_fit(); + fields.shrink_to_fit(); + unions.shrink_to_fit(); + enums.shrink_to_fit(); + variants.shrink_to_fit(); + consts.shrink_to_fit(); + statics.shrink_to_fit(); + traits.shrink_to_fit(); + impls.shrink_to_fit(); + type_aliases.shrink_to_fit(); + mods.shrink_to_fit(); + macro_calls.shrink_to_fit(); + exprs.shrink_to_fit(); + + vis.arena.shrink_to_fit(); + generics.arena.shrink_to_fit(); + } + } + + /// Returns an iterator over all items located at the top level of the `HirFileId` this + /// `ItemTree` was created from. + pub fn top_level_items(&self) -> &[ModItem] { + &self.top_level + } + + /// Returns the inner attributes of the source file. + pub fn top_level_attrs(&self) -> &Attrs { + self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&Attrs::EMPTY) + } + + pub fn attrs(&self, of: AttrOwner) -> &Attrs { + self.attrs.get(&of).unwrap_or(&Attrs::EMPTY) + } + + /// Returns the lowered inner items that `ast` corresponds to. + /// + /// Most AST items are lowered to a single `ModItem`, but some (eg. `use` items) may be lowered + /// to multiple items in the `ItemTree`. + pub fn inner_items(&self, ast: FileAstId) -> &[ModItem] { + &self.inner_items[&ast] + } + + pub fn all_inner_items(&self) -> impl Iterator + '_ { + self.inner_items.values().flatten().copied() + } + + pub fn source(&self, db: &dyn DefDatabase, of: ItemTreeId) -> S::Source { + // This unwrap cannot fail, since it has either succeeded above, or resulted in an empty + // ItemTree (in which case there is no valid `FileItemTreeId` to call this method with). + let root = + db.parse_or_expand(of.file_id).expect("parse_or_expand failed on constructed ItemTree"); + + let id = self[of.value].ast_id(); + let map = db.ast_id_map(of.file_id); + let ptr = map.get(id); + ptr.to_node(&root) + } + + fn data(&self) -> &ItemTreeData { + self.data.as_ref().expect("attempted to access data of empty ItemTree") + } + + fn data_mut(&mut self) -> &mut ItemTreeData { + self.data.get_or_insert_with(Box::default) + } +} + +#[derive(Default, Debug, Eq, PartialEq)] +struct ItemVisibilities { + arena: Arena, +} + +impl ItemVisibilities { + fn alloc(&mut self, vis: RawVisibility) -> RawVisibilityId { + match &vis { + RawVisibility::Public => RawVisibilityId::PUB, + RawVisibility::Module(path) if path.segments.is_empty() => match &path.kind { + PathKind::Super(0) => RawVisibilityId::PRIV, + PathKind::Crate => RawVisibilityId::PUB_CRATE, + _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()), + }, + _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()), + } + } +} + +static VIS_PUB: RawVisibility = RawVisibility::Public; +static VIS_PRIV: RawVisibility = + RawVisibility::Module(ModPath { kind: PathKind::Super(0), segments: Vec::new() }); +static VIS_PUB_CRATE: RawVisibility = + RawVisibility::Module(ModPath { kind: PathKind::Crate, segments: Vec::new() }); + +#[derive(Default, Debug, Eq, PartialEq)] +struct GenericParamsStorage { + arena: Arena, +} + +impl GenericParamsStorage { + fn alloc(&mut self, params: GenericParams) -> GenericParamsId { + if params.types.is_empty() && params.where_predicates.is_empty() { + return GenericParamsId::EMPTY; + } + + GenericParamsId(self.arena.alloc(params).into_raw().into()) + } +} + +static EMPTY_GENERICS: GenericParams = + GenericParams { types: Arena::new(), where_predicates: Vec::new() }; + +#[derive(Default, Debug, Eq, PartialEq)] +struct ItemTreeData { + imports: Arena, + extern_crates: Arena, + functions: Arena, + structs: Arena, + fields: Arena, + unions: Arena, + enums: Arena, + variants: Arena, + consts: Arena, + statics: Arena, + traits: Arena, + impls: Arena, + type_aliases: Arena, + mods: Arena, + macro_calls: Arena, + exprs: Arena, + + vis: ItemVisibilities, + generics: GenericParamsStorage, +} + +#[derive(Debug, Eq, PartialEq, Hash)] +pub enum AttrOwner { + /// Attributes on an item. + ModItem(ModItem), + /// Inner attributes of the source file. + TopLevel, + + Variant(Idx), + Field(Idx), + // FIXME: Store variant and field attrs, and stop reparsing them in `attrs_query`. +} + +macro_rules! from_attrs { + ( $( $var:ident($t:ty) ),+ ) => { + $( + impl From<$t> for AttrOwner { + fn from(t: $t) -> AttrOwner { + AttrOwner::$var(t) + } + } + )+ + }; +} + +from_attrs!(ModItem(ModItem), Variant(Idx), Field(Idx)); + +/// Trait implemented by all item nodes in the item tree. +pub trait ItemTreeNode: Clone { + type Source: AstNode + Into; + + fn ast_id(&self) -> FileAstId; + + /// Looks up an instance of `Self` in an item tree. + fn lookup(tree: &ItemTree, index: Idx) -> &Self; + + /// Downcasts a `ModItem` to a `FileItemTreeId` specific to this type. + fn id_from_mod_item(mod_item: ModItem) -> Option>; + + /// Upcasts a `FileItemTreeId` to a generic `ModItem`. + fn id_to_mod_item(id: FileItemTreeId) -> ModItem; +} + +pub struct FileItemTreeId { + index: Idx, + _p: PhantomData, +} + +impl Clone for FileItemTreeId { + fn clone(&self) -> Self { + Self { index: self.index, _p: PhantomData } + } +} +impl Copy for FileItemTreeId {} + +impl PartialEq for FileItemTreeId { + fn eq(&self, other: &FileItemTreeId) -> bool { + self.index == other.index + } +} +impl Eq for FileItemTreeId {} + +impl Hash for FileItemTreeId { + fn hash(&self, state: &mut H) { + self.index.hash(state) + } +} + +impl fmt::Debug for FileItemTreeId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.index.fmt(f) + } +} + +pub type ItemTreeId = InFile>; + +macro_rules! mod_items { + ( $( $typ:ident in $fld:ident -> $ast:ty ),+ $(,)? ) => { + #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] + pub enum ModItem { + $( + $typ(FileItemTreeId<$typ>), + )+ + } + + $( + impl From> for ModItem { + fn from(id: FileItemTreeId<$typ>) -> ModItem { + ModItem::$typ(id) + } + } + )+ + + $( + impl ItemTreeNode for $typ { + type Source = $ast; + + fn ast_id(&self) -> FileAstId { + self.ast_id + } + + fn lookup(tree: &ItemTree, index: Idx) -> &Self { + &tree.data().$fld[index] + } + + fn id_from_mod_item(mod_item: ModItem) -> Option> { + if let ModItem::$typ(id) = mod_item { + Some(id) + } else { + None + } + } + + fn id_to_mod_item(id: FileItemTreeId) -> ModItem { + ModItem::$typ(id) + } + } + + impl Index> for ItemTree { + type Output = $typ; + + fn index(&self, index: Idx<$typ>) -> &Self::Output { + &self.data().$fld[index] + } + } + )+ + }; +} + +mod_items! { + Import in imports -> ast::Use, + ExternCrate in extern_crates -> ast::ExternCrate, + Function in functions -> ast::Fn, + Struct in structs -> ast::Struct, + Union in unions -> ast::Union, + Enum in enums -> ast::Enum, + Const in consts -> ast::Const, + Static in statics -> ast::Static, + Trait in traits -> ast::Trait, + Impl in impls -> ast::Impl, + TypeAlias in type_aliases -> ast::TypeAlias, + Mod in mods -> ast::Module, + MacroCall in macro_calls -> ast::MacroCall, +} + +macro_rules! impl_index { + ( $($fld:ident: $t:ty),+ $(,)? ) => { + $( + impl Index> for ItemTree { + type Output = $t; + + fn index(&self, index: Idx<$t>) -> &Self::Output { + &self.data().$fld[index] + } + } + )+ + }; +} + +impl_index!(fields: Field, variants: Variant, exprs: Expr); + +impl Index for ItemTree { + type Output = RawVisibility; + fn index(&self, index: RawVisibilityId) -> &Self::Output { + match index { + RawVisibilityId::PRIV => &VIS_PRIV, + RawVisibilityId::PUB => &VIS_PUB, + RawVisibilityId::PUB_CRATE => &VIS_PUB_CRATE, + _ => &self.data().vis.arena[Idx::from_raw(index.0.into())], + } + } +} + +impl Index for ItemTree { + type Output = GenericParams; + + fn index(&self, index: GenericParamsId) -> &Self::Output { + match index { + GenericParamsId::EMPTY => &EMPTY_GENERICS, + _ => &self.data().generics.arena[Idx::from_raw(index.0.into())], + } + } +} + +impl Index> for ItemTree { + type Output = N; + fn index(&self, id: FileItemTreeId) -> &N { + N::lookup(self, id.index) + } +} + +/// A desugared `use` import. +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct Import { + pub path: ModPath, + pub alias: Option, + pub visibility: RawVisibilityId, + pub is_glob: bool, + pub is_prelude: bool, + /// AST ID of the `use` or `extern crate` item this import was derived from. Note that many + /// `Import`s can map to the same `use` item. + pub ast_id: FileAstId, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct ExternCrate { + pub path: ModPath, + pub alias: Option, + pub visibility: RawVisibilityId, + /// Whether this is a `#[macro_use] extern crate ...`. + pub is_macro_use: bool, + pub ast_id: FileAstId, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct Function { + pub name: Name, + pub visibility: RawVisibilityId, + pub generic_params: GenericParamsId, + pub has_self_param: bool, + pub is_unsafe: bool, + pub params: Box<[TypeRef]>, + pub is_varargs: bool, + pub ret_type: TypeRef, + pub ast_id: FileAstId, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct Struct { + pub name: Name, + pub visibility: RawVisibilityId, + pub generic_params: GenericParamsId, + pub fields: Fields, + pub ast_id: FileAstId, + pub kind: StructDefKind, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum StructDefKind { + /// `struct S { ... }` - type namespace only. + Record, + /// `struct S(...);` + Tuple, + /// `struct S;` + Unit, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct Union { + pub name: Name, + pub visibility: RawVisibilityId, + pub generic_params: GenericParamsId, + pub fields: Fields, + pub ast_id: FileAstId, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct Enum { + pub name: Name, + pub visibility: RawVisibilityId, + pub generic_params: GenericParamsId, + pub variants: IdRange, + pub ast_id: FileAstId, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct Const { + /// const _: () = (); + pub name: Option, + pub visibility: RawVisibilityId, + pub type_ref: TypeRef, + pub ast_id: FileAstId, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct Static { + pub name: Name, + pub visibility: RawVisibilityId, + pub mutable: bool, + pub type_ref: TypeRef, + pub ast_id: FileAstId, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct Trait { + pub name: Name, + pub visibility: RawVisibilityId, + pub generic_params: GenericParamsId, + pub auto: bool, + pub items: Box<[AssocItem]>, + pub ast_id: FileAstId, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct Impl { + pub generic_params: GenericParamsId, + pub target_trait: Option, + pub target_type: TypeRef, + pub is_negative: bool, + pub items: Box<[AssocItem]>, + pub ast_id: FileAstId, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TypeAlias { + pub name: Name, + pub visibility: RawVisibilityId, + /// Bounds on the type alias itself. Only valid in trait declarations, eg. `type Assoc: Copy;`. + pub bounds: Box<[TypeBound]>, + pub generic_params: GenericParamsId, + pub type_ref: Option, + pub ast_id: FileAstId, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct Mod { + pub name: Name, + pub visibility: RawVisibilityId, + pub kind: ModKind, + pub ast_id: FileAstId, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum ModKind { + /// `mod m { ... }` + Inline { items: Box<[ModItem]> }, + + /// `mod m;` + Outline {}, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct MacroCall { + /// For `macro_rules!` declarations, this is the name of the declared macro. + pub name: Option, + /// Path to the called macro. + pub path: ModPath, + /// Has `#[macro_export]`. + pub is_export: bool, + /// Has `#[macro_export(local_inner_macros)]`. + pub is_local_inner: bool, + /// Has `#[rustc_builtin_macro]`. + pub is_builtin: bool, + pub ast_id: FileAstId, +} + +// NB: There's no `FileAstId` for `Expr`. The only case where this would be useful is for array +// lengths, but we don't do much with them yet. +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct Expr; + +macro_rules! impl_froms { + ($e:ident { $($v:ident ($t:ty)),* $(,)? }) => { + $( + impl From<$t> for $e { + fn from(it: $t) -> $e { + $e::$v(it) + } + } + )* + } +} + +impl ModItem { + pub fn as_assoc_item(&self) -> Option { + match self { + ModItem::Import(_) + | ModItem::ExternCrate(_) + | ModItem::Struct(_) + | ModItem::Union(_) + | ModItem::Enum(_) + | ModItem::Static(_) + | ModItem::Trait(_) + | ModItem::Impl(_) + | ModItem::Mod(_) => None, + ModItem::MacroCall(call) => Some(AssocItem::MacroCall(*call)), + ModItem::Const(konst) => Some(AssocItem::Const(*konst)), + ModItem::TypeAlias(alias) => Some(AssocItem::TypeAlias(*alias)), + ModItem::Function(func) => Some(AssocItem::Function(*func)), + } + } + + pub fn downcast(self) -> Option> { + N::id_from_mod_item(self) + } +} + +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +pub enum AssocItem { + Function(FileItemTreeId), + TypeAlias(FileItemTreeId), + Const(FileItemTreeId), + MacroCall(FileItemTreeId), +} + +impl_froms!(AssocItem { + Function(FileItemTreeId), + TypeAlias(FileItemTreeId), + Const(FileItemTreeId), + MacroCall(FileItemTreeId), +}); + +impl From for ModItem { + fn from(item: AssocItem) -> Self { + match item { + AssocItem::Function(it) => it.into(), + AssocItem::TypeAlias(it) => it.into(), + AssocItem::Const(it) => it.into(), + AssocItem::MacroCall(it) => it.into(), + } + } +} + +#[derive(Debug, Eq, PartialEq)] +pub struct Variant { + pub name: Name, + pub fields: Fields, +} + +pub struct IdRange { + range: Range, + _p: PhantomData, +} + +impl IdRange { + fn new(range: Range>) -> Self { + Self { range: range.start.into_raw().into()..range.end.into_raw().into(), _p: PhantomData } + } +} + +impl Iterator for IdRange { + type Item = Idx; + fn next(&mut self) -> Option { + self.range.next().map(|raw| Idx::from_raw(raw.into())) + } +} + +impl fmt::Debug for IdRange { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple(&format!("IdRange::<{}>", type_name::())).field(&self.range).finish() + } +} + +impl Clone for IdRange { + fn clone(&self) -> Self { + Self { range: self.range.clone(), _p: PhantomData } + } +} + +impl PartialEq for IdRange { + fn eq(&self, other: &Self) -> bool { + self.range == other.range + } +} + +impl Eq for IdRange {} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Fields { + Record(IdRange), + Tuple(IdRange), + Unit, +} + +/// A single field of an enum variant or struct +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Field { + pub name: Name, + pub type_ref: TypeRef, + pub visibility: RawVisibilityId, +} diff --git a/crates/hir_def/src/item_tree/lower.rs b/crates/hir_def/src/item_tree/lower.rs new file mode 100644 index 0000000000..391ab5d392 --- /dev/null +++ b/crates/hir_def/src/item_tree/lower.rs @@ -0,0 +1,705 @@ +//! AST -> `ItemTree` lowering code. + +use std::{collections::hash_map::Entry, mem, sync::Arc}; + +use arena::map::ArenaMap; +use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId}; +use smallvec::SmallVec; +use syntax::{ + ast::{self, ModuleItemOwner}, + SyntaxNode, +}; + +use crate::{ + attr::Attrs, + generics::{GenericParams, TypeParamData, TypeParamProvenance}, +}; + +use super::*; + +fn id(index: Idx) -> FileItemTreeId { + FileItemTreeId { index, _p: PhantomData } +} + +struct ModItems(SmallVec<[ModItem; 1]>); + +impl From for ModItems +where + T: Into, +{ + fn from(t: T) -> Self { + ModItems(SmallVec::from_buf([t.into(); 1])) + } +} + +pub(super) struct Ctx { + tree: ItemTree, + hygiene: Hygiene, + file: HirFileId, + source_ast_id_map: Arc, + body_ctx: crate::body::LowerCtx, + inner_items: Vec, + forced_visibility: Option, +} + +impl Ctx { + pub(super) fn new(db: &dyn DefDatabase, hygiene: Hygiene, file: HirFileId) -> Self { + Self { + tree: ItemTree::empty(), + hygiene, + file, + source_ast_id_map: db.ast_id_map(file), + body_ctx: crate::body::LowerCtx::new(db, file), + inner_items: Vec::new(), + forced_visibility: None, + } + } + + pub(super) fn lower_module_items(mut self, item_owner: &dyn ModuleItemOwner) -> ItemTree { + self.tree.top_level = item_owner + .items() + .flat_map(|item| self.lower_mod_item(&item, false)) + .flat_map(|items| items.0) + .collect(); + self.tree + } + + pub(super) fn lower_inner_items(mut self, within: &SyntaxNode) -> ItemTree { + self.collect_inner_items(within); + self.tree + } + + fn data(&mut self) -> &mut ItemTreeData { + self.tree.data_mut() + } + + fn lower_mod_item(&mut self, item: &ast::Item, inner: bool) -> Option { + assert!(inner || self.inner_items.is_empty()); + + // Collect inner items for 1-to-1-lowered items. + match item { + ast::Item::Struct(_) + | ast::Item::Union(_) + | ast::Item::Enum(_) + | ast::Item::Fn(_) + | ast::Item::TypeAlias(_) + | ast::Item::Const(_) + | ast::Item::Static(_) + | ast::Item::MacroCall(_) => { + // Skip this if we're already collecting inner items. We'll descend into all nodes + // already. + if !inner { + self.collect_inner_items(item.syntax()); + } + } + + // These are handled in their respective `lower_X` method (since we can't just blindly + // walk them). + ast::Item::Trait(_) | ast::Item::Impl(_) | ast::Item::ExternBlock(_) => {} + + // These don't have inner items. + ast::Item::Module(_) | ast::Item::ExternCrate(_) | ast::Item::Use(_) => {} + }; + + let attrs = Attrs::new(item, &self.hygiene); + let items = match item { + ast::Item::Struct(ast) => self.lower_struct(ast).map(Into::into), + ast::Item::Union(ast) => self.lower_union(ast).map(Into::into), + ast::Item::Enum(ast) => self.lower_enum(ast).map(Into::into), + ast::Item::Fn(ast) => self.lower_function(ast).map(Into::into), + ast::Item::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into), + ast::Item::Static(ast) => self.lower_static(ast).map(Into::into), + ast::Item::Const(ast) => Some(self.lower_const(ast).into()), + ast::Item::Module(ast) => self.lower_module(ast).map(Into::into), + ast::Item::Trait(ast) => self.lower_trait(ast).map(Into::into), + ast::Item::Impl(ast) => self.lower_impl(ast).map(Into::into), + ast::Item::Use(ast) => Some(ModItems( + self.lower_use(ast).into_iter().map(Into::into).collect::>(), + )), + ast::Item::ExternCrate(ast) => self.lower_extern_crate(ast).map(Into::into), + ast::Item::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into), + ast::Item::ExternBlock(ast) => { + Some(ModItems(self.lower_extern_block(ast).into_iter().collect::>())) + } + }; + + if !attrs.is_empty() { + for item in items.iter().flat_map(|items| &items.0) { + self.add_attrs((*item).into(), attrs.clone()); + } + } + + items + } + + fn add_attrs(&mut self, item: AttrOwner, attrs: Attrs) { + match self.tree.attrs.entry(item) { + Entry::Occupied(mut entry) => { + *entry.get_mut() = entry.get().merge(attrs); + } + Entry::Vacant(entry) => { + entry.insert(attrs); + } + } + } + + fn collect_inner_items(&mut self, container: &SyntaxNode) { + let forced_vis = self.forced_visibility.take(); + let mut inner_items = mem::take(&mut self.tree.inner_items); + inner_items.extend(container.descendants().skip(1).filter_map(ast::Item::cast).filter_map( + |item| { + let ast_id = self.source_ast_id_map.ast_id(&item); + Some((ast_id, self.lower_mod_item(&item, true)?.0)) + }, + )); + self.tree.inner_items = inner_items; + self.forced_visibility = forced_vis; + } + + fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option { + match item { + ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into), + ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into), + ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()), + ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into), + } + } + + fn lower_struct(&mut self, strukt: &ast::Struct) -> Option> { + let visibility = self.lower_visibility(strukt); + let name = strukt.name()?.as_name(); + let generic_params = self.lower_generic_params(GenericsOwner::Struct, strukt); + let fields = self.lower_fields(&strukt.kind()); + let ast_id = self.source_ast_id_map.ast_id(strukt); + let kind = match strukt.kind() { + ast::StructKind::Record(_) => StructDefKind::Record, + ast::StructKind::Tuple(_) => StructDefKind::Tuple, + ast::StructKind::Unit => StructDefKind::Unit, + }; + let res = Struct { name, visibility, generic_params, fields, ast_id, kind }; + Some(id(self.data().structs.alloc(res))) + } + + fn lower_fields(&mut self, strukt_kind: &ast::StructKind) -> Fields { + match strukt_kind { + ast::StructKind::Record(it) => { + let range = self.lower_record_fields(it); + Fields::Record(range) + } + ast::StructKind::Tuple(it) => { + let range = self.lower_tuple_fields(it); + Fields::Tuple(range) + } + ast::StructKind::Unit => Fields::Unit, + } + } + + fn lower_record_fields(&mut self, fields: &ast::RecordFieldList) -> IdRange { + let start = self.next_field_idx(); + for field in fields.fields() { + if let Some(data) = self.lower_record_field(&field) { + let idx = self.data().fields.alloc(data); + self.add_attrs(idx.into(), Attrs::new(&field, &self.hygiene)); + } + } + let end = self.next_field_idx(); + IdRange::new(start..end) + } + + fn lower_record_field(&mut self, field: &ast::RecordField) -> Option { + let name = field.name()?.as_name(); + let visibility = self.lower_visibility(field); + let type_ref = self.lower_type_ref_opt(field.ty()); + let res = Field { name, type_ref, visibility }; + Some(res) + } + + fn lower_tuple_fields(&mut self, fields: &ast::TupleFieldList) -> IdRange { + let start = self.next_field_idx(); + for (i, field) in fields.fields().enumerate() { + let data = self.lower_tuple_field(i, &field); + let idx = self.data().fields.alloc(data); + self.add_attrs(idx.into(), Attrs::new(&field, &self.hygiene)); + } + let end = self.next_field_idx(); + IdRange::new(start..end) + } + + fn lower_tuple_field(&mut self, idx: usize, field: &ast::TupleField) -> Field { + let name = Name::new_tuple_field(idx); + let visibility = self.lower_visibility(field); + let type_ref = self.lower_type_ref_opt(field.ty()); + let res = Field { name, type_ref, visibility }; + res + } + + fn lower_union(&mut self, union: &ast::Union) -> Option> { + let visibility = self.lower_visibility(union); + let name = union.name()?.as_name(); + let generic_params = self.lower_generic_params(GenericsOwner::Union, union); + let fields = match union.record_field_list() { + Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)), + None => Fields::Record(IdRange::new(self.next_field_idx()..self.next_field_idx())), + }; + let ast_id = self.source_ast_id_map.ast_id(union); + let res = Union { name, visibility, generic_params, fields, ast_id }; + Some(id(self.data().unions.alloc(res))) + } + + fn lower_enum(&mut self, enum_: &ast::Enum) -> Option> { + let visibility = self.lower_visibility(enum_); + let name = enum_.name()?.as_name(); + let generic_params = self.lower_generic_params(GenericsOwner::Enum, enum_); + let variants = match &enum_.variant_list() { + Some(variant_list) => self.lower_variants(variant_list), + None => IdRange::new(self.next_variant_idx()..self.next_variant_idx()), + }; + let ast_id = self.source_ast_id_map.ast_id(enum_); + let res = Enum { name, visibility, generic_params, variants, ast_id }; + Some(id(self.data().enums.alloc(res))) + } + + fn lower_variants(&mut self, variants: &ast::VariantList) -> IdRange { + let start = self.next_variant_idx(); + for variant in variants.variants() { + if let Some(data) = self.lower_variant(&variant) { + let idx = self.data().variants.alloc(data); + self.add_attrs(idx.into(), Attrs::new(&variant, &self.hygiene)); + } + } + let end = self.next_variant_idx(); + IdRange::new(start..end) + } + + fn lower_variant(&mut self, variant: &ast::Variant) -> Option { + let name = variant.name()?.as_name(); + let fields = self.lower_fields(&variant.kind()); + let res = Variant { name, fields }; + Some(res) + } + + fn lower_function(&mut self, func: &ast::Fn) -> Option> { + let visibility = self.lower_visibility(func); + let name = func.name()?.as_name(); + + let mut params = Vec::new(); + let mut has_self_param = false; + if let Some(param_list) = func.param_list() { + if let Some(self_param) = param_list.self_param() { + let self_type = match self_param.ty() { + Some(type_ref) => TypeRef::from_ast(&self.body_ctx, type_ref), + None => { + let self_type = TypeRef::Path(name![Self].into()); + match self_param.kind() { + ast::SelfParamKind::Owned => self_type, + ast::SelfParamKind::Ref => { + TypeRef::Reference(Box::new(self_type), Mutability::Shared) + } + ast::SelfParamKind::MutRef => { + TypeRef::Reference(Box::new(self_type), Mutability::Mut) + } + } + } + }; + params.push(self_type); + has_self_param = true; + } + for param in param_list.params() { + let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty()); + params.push(type_ref); + } + } + + let mut is_varargs = false; + if let Some(params) = func.param_list() { + if let Some(last) = params.params().last() { + is_varargs = last.dotdotdot_token().is_some(); + } + } + + let ret_type = match func.ret_type().and_then(|rt| rt.ty()) { + Some(type_ref) => TypeRef::from_ast(&self.body_ctx, type_ref), + _ => TypeRef::unit(), + }; + + let ret_type = if func.async_token().is_some() { + let future_impl = desugar_future_path(ret_type); + let ty_bound = TypeBound::Path(future_impl); + TypeRef::ImplTrait(vec![ty_bound]) + } else { + ret_type + }; + + let ast_id = self.source_ast_id_map.ast_id(func); + let mut res = Function { + name, + visibility, + generic_params: GenericParamsId::EMPTY, + has_self_param, + is_unsafe: func.unsafe_token().is_some(), + params: params.into_boxed_slice(), + is_varargs, + ret_type, + ast_id, + }; + res.generic_params = self.lower_generic_params(GenericsOwner::Function(&res), func); + + Some(id(self.data().functions.alloc(res))) + } + + fn lower_type_alias( + &mut self, + type_alias: &ast::TypeAlias, + ) -> Option> { + let name = type_alias.name()?.as_name(); + let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it)); + let visibility = self.lower_visibility(type_alias); + let bounds = self.lower_type_bounds(type_alias); + let generic_params = self.lower_generic_params(GenericsOwner::TypeAlias, type_alias); + let ast_id = self.source_ast_id_map.ast_id(type_alias); + let res = TypeAlias { + name, + visibility, + bounds: bounds.into_boxed_slice(), + generic_params, + type_ref, + ast_id, + }; + Some(id(self.data().type_aliases.alloc(res))) + } + + fn lower_static(&mut self, static_: &ast::Static) -> Option> { + let name = static_.name()?.as_name(); + let type_ref = self.lower_type_ref_opt(static_.ty()); + let visibility = self.lower_visibility(static_); + let mutable = static_.mut_token().is_some(); + let ast_id = self.source_ast_id_map.ast_id(static_); + let res = Static { name, visibility, mutable, type_ref, ast_id }; + Some(id(self.data().statics.alloc(res))) + } + + fn lower_const(&mut self, konst: &ast::Const) -> FileItemTreeId { + let name = konst.name().map(|it| it.as_name()); + let type_ref = self.lower_type_ref_opt(konst.ty()); + let visibility = self.lower_visibility(konst); + let ast_id = self.source_ast_id_map.ast_id(konst); + let res = Const { name, visibility, type_ref, ast_id }; + id(self.data().consts.alloc(res)) + } + + fn lower_module(&mut self, module: &ast::Module) -> Option> { + let name = module.name()?.as_name(); + let visibility = self.lower_visibility(module); + let kind = if module.semicolon_token().is_some() { + ModKind::Outline {} + } else { + ModKind::Inline { + items: module + .item_list() + .map(|list| { + list.items() + .flat_map(|item| self.lower_mod_item(&item, false)) + .flat_map(|items| items.0) + .collect() + }) + .unwrap_or_else(|| { + mark::hit!(name_res_works_for_broken_modules); + Box::new([]) as Box<[_]> + }), + } + }; + let ast_id = self.source_ast_id_map.ast_id(module); + let res = Mod { name, visibility, kind, ast_id }; + Some(id(self.data().mods.alloc(res))) + } + + fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option> { + let name = trait_def.name()?.as_name(); + let visibility = self.lower_visibility(trait_def); + let generic_params = + self.lower_generic_params_and_inner_items(GenericsOwner::Trait(trait_def), trait_def); + let auto = trait_def.auto_token().is_some(); + let items = trait_def.assoc_item_list().map(|list| { + self.with_inherited_visibility(visibility, |this| { + list.assoc_items() + .filter_map(|item| { + let attrs = Attrs::new(&item, &this.hygiene); + this.collect_inner_items(item.syntax()); + this.lower_assoc_item(&item).map(|item| { + this.add_attrs(ModItem::from(item).into(), attrs); + item + }) + }) + .collect() + }) + }); + let ast_id = self.source_ast_id_map.ast_id(trait_def); + let res = Trait { + name, + visibility, + generic_params, + auto, + items: items.unwrap_or_default(), + ast_id, + }; + Some(id(self.data().traits.alloc(res))) + } + + fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option> { + let generic_params = + self.lower_generic_params_and_inner_items(GenericsOwner::Impl, impl_def); + let target_trait = impl_def.trait_().map(|tr| self.lower_type_ref(&tr)); + let target_type = self.lower_type_ref(&impl_def.self_ty()?); + let is_negative = impl_def.excl_token().is_some(); + + // We cannot use `assoc_items()` here as that does not include macro calls. + let items = impl_def + .assoc_item_list() + .into_iter() + .flat_map(|it| it.assoc_items()) + .filter_map(|item| { + self.collect_inner_items(item.syntax()); + let assoc = self.lower_assoc_item(&item)?; + let attrs = Attrs::new(&item, &self.hygiene); + self.add_attrs(ModItem::from(assoc).into(), attrs); + Some(assoc) + }) + .collect(); + let ast_id = self.source_ast_id_map.ast_id(impl_def); + let res = Impl { generic_params, target_trait, target_type, is_negative, items, ast_id }; + Some(id(self.data().impls.alloc(res))) + } + + fn lower_use(&mut self, use_item: &ast::Use) -> Vec> { + // FIXME: cfg_attr + let is_prelude = use_item.has_atom_attr("prelude_import"); + let visibility = self.lower_visibility(use_item); + let ast_id = self.source_ast_id_map.ast_id(use_item); + + // Every use item can expand to many `Import`s. + let mut imports = Vec::new(); + let tree = self.tree.data_mut(); + ModPath::expand_use_item( + InFile::new(self.file, use_item.clone()), + &self.hygiene, + |path, _tree, is_glob, alias| { + imports.push(id(tree.imports.alloc(Import { + path, + alias, + visibility, + is_glob, + is_prelude, + ast_id, + }))); + }, + ); + + imports + } + + fn lower_extern_crate( + &mut self, + extern_crate: &ast::ExternCrate, + ) -> Option> { + let path = ModPath::from_name_ref(&extern_crate.name_ref()?); + let alias = extern_crate.rename().map(|a| { + a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias) + }); + let visibility = self.lower_visibility(extern_crate); + let ast_id = self.source_ast_id_map.ast_id(extern_crate); + // FIXME: cfg_attr + let is_macro_use = extern_crate.has_atom_attr("macro_use"); + + let res = ExternCrate { path, alias, visibility, is_macro_use, ast_id }; + Some(id(self.data().extern_crates.alloc(res))) + } + + fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option> { + let name = m.name().map(|it| it.as_name()); + let attrs = Attrs::new(m, &self.hygiene); + let path = ModPath::from_src(m.path()?, &self.hygiene)?; + + let ast_id = self.source_ast_id_map.ast_id(m); + + // FIXME: cfg_attr + let export_attr = attrs.by_key("macro_export"); + + let is_export = export_attr.exists(); + let is_local_inner = if is_export { + export_attr.tt_values().map(|it| &it.token_trees).flatten().any(|it| match it { + tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { + ident.text.contains("local_inner_macros") + } + _ => false, + }) + } else { + false + }; + + let is_builtin = attrs.by_key("rustc_builtin_macro").exists(); + let res = MacroCall { name, path, is_export, is_builtin, is_local_inner, ast_id }; + Some(id(self.data().macro_calls.alloc(res))) + } + + fn lower_extern_block(&mut self, block: &ast::ExternBlock) -> Vec { + block.extern_item_list().map_or(Vec::new(), |list| { + list.extern_items() + .filter_map(|item| { + self.collect_inner_items(item.syntax()); + let attrs = Attrs::new(&item, &self.hygiene); + let id: ModItem = match item { + ast::ExternItem::Fn(ast) => { + let func = self.lower_function(&ast)?; + self.data().functions[func.index].is_unsafe = true; + func.into() + } + ast::ExternItem::Static(ast) => { + let statik = self.lower_static(&ast)?; + statik.into() + } + ast::ExternItem::MacroCall(_) => return None, + }; + self.add_attrs(id.into(), attrs); + Some(id) + }) + .collect() + }) + } + + /// Lowers generics defined on `node` and collects inner items defined within. + fn lower_generic_params_and_inner_items( + &mut self, + owner: GenericsOwner<'_>, + node: &impl ast::GenericParamsOwner, + ) -> GenericParamsId { + // Generics are part of item headers and may contain inner items we need to collect. + if let Some(params) = node.generic_param_list() { + self.collect_inner_items(params.syntax()); + } + if let Some(clause) = node.where_clause() { + self.collect_inner_items(clause.syntax()); + } + + self.lower_generic_params(owner, node) + } + + fn lower_generic_params( + &mut self, + owner: GenericsOwner<'_>, + node: &impl ast::GenericParamsOwner, + ) -> GenericParamsId { + let mut sm = &mut ArenaMap::default(); + let mut generics = GenericParams::default(); + match owner { + GenericsOwner::Function(func) => { + generics.fill(&self.body_ctx, sm, node); + // lower `impl Trait` in arguments + for param in &*func.params { + generics.fill_implicit_impl_trait_args(param); + } + } + GenericsOwner::Struct + | GenericsOwner::Enum + | GenericsOwner::Union + | GenericsOwner::TypeAlias => { + generics.fill(&self.body_ctx, sm, node); + } + GenericsOwner::Trait(trait_def) => { + // traits get the Self type as an implicit first type parameter + let self_param_id = generics.types.alloc(TypeParamData { + name: Some(name![Self]), + default: None, + provenance: TypeParamProvenance::TraitSelf, + }); + sm.insert(self_param_id, Either::Left(trait_def.clone())); + // add super traits as bounds on Self + // i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar + let self_param = TypeRef::Path(name![Self].into()); + generics.fill_bounds(&self.body_ctx, trait_def, self_param); + + generics.fill(&self.body_ctx, &mut sm, node); + } + GenericsOwner::Impl => { + // Note that we don't add `Self` here: in `impl`s, `Self` is not a + // type-parameter, but rather is a type-alias for impl's target + // type, so this is handled by the resolver. + generics.fill(&self.body_ctx, &mut sm, node); + } + } + + self.data().generics.alloc(generics) + } + + fn lower_type_bounds(&mut self, node: &impl ast::TypeBoundsOwner) -> Vec { + match node.type_bound_list() { + Some(bound_list) => { + bound_list.bounds().map(|it| TypeBound::from_ast(&self.body_ctx, it)).collect() + } + None => Vec::new(), + } + } + + fn lower_visibility(&mut self, item: &impl ast::VisibilityOwner) -> RawVisibilityId { + let vis = match self.forced_visibility { + Some(vis) => return vis, + None => RawVisibility::from_ast_with_hygiene(item.visibility(), &self.hygiene), + }; + + self.data().vis.alloc(vis) + } + + fn lower_type_ref(&self, type_ref: &ast::Type) -> TypeRef { + TypeRef::from_ast(&self.body_ctx, type_ref.clone()) + } + fn lower_type_ref_opt(&self, type_ref: Option) -> TypeRef { + type_ref.map(|ty| self.lower_type_ref(&ty)).unwrap_or(TypeRef::Error) + } + + /// Forces the visibility `vis` to be used for all items lowered during execution of `f`. + fn with_inherited_visibility( + &mut self, + vis: RawVisibilityId, + f: impl FnOnce(&mut Self) -> R, + ) -> R { + let old = mem::replace(&mut self.forced_visibility, Some(vis)); + let res = f(self); + self.forced_visibility = old; + res + } + + fn next_field_idx(&self) -> Idx { + Idx::from_raw(RawId::from( + self.tree.data.as_ref().map_or(0, |data| data.fields.len() as u32), + )) + } + fn next_variant_idx(&self) -> Idx { + Idx::from_raw(RawId::from( + self.tree.data.as_ref().map_or(0, |data| data.variants.len() as u32), + )) + } +} + +fn desugar_future_path(orig: TypeRef) -> Path { + let path = path![core::future::Future]; + let mut generic_args: Vec<_> = std::iter::repeat(None).take(path.segments.len() - 1).collect(); + let mut last = GenericArgs::empty(); + let binding = + AssociatedTypeBinding { name: name![Output], type_ref: Some(orig), bounds: Vec::new() }; + last.bindings.push(binding); + generic_args.push(Some(Arc::new(last))); + + Path::from_known_path(path, generic_args) +} + +enum GenericsOwner<'a> { + /// We need access to the partially-lowered `Function` for lowering `impl Trait` in argument + /// position. + Function(&'a Function), + Struct, + Enum, + Union, + /// The `TraitDef` is needed to fill the source map for the implicit `Self` parameter. + Trait(&'a ast::Trait), + TypeAlias, + Impl, +} diff --git a/crates/hir_def/src/item_tree/tests.rs b/crates/hir_def/src/item_tree/tests.rs new file mode 100644 index 0000000000..9c5bf72bd5 --- /dev/null +++ b/crates/hir_def/src/item_tree/tests.rs @@ -0,0 +1,439 @@ +use base_db::fixture::WithFixture; +use expect::{expect, Expect}; +use hir_expand::{db::AstDatabase, HirFileId, InFile}; +use rustc_hash::FxHashSet; +use std::sync::Arc; +use stdx::format_to; +use syntax::{ast, AstNode}; + +use crate::{db::DefDatabase, test_db::TestDB}; + +use super::{ItemTree, ModItem, ModKind}; + +fn test_inner_items(ra_fixture: &str) { + let (db, file_id) = TestDB::with_single_file(ra_fixture); + let file_id = HirFileId::from(file_id); + let tree = db.item_tree(file_id); + let root = db.parse_or_expand(file_id).unwrap(); + let ast_id_map = db.ast_id_map(file_id); + + // Traverse the item tree and collect all module/impl/trait-level items as AST nodes. + let mut outer_items = FxHashSet::default(); + let mut worklist = tree.top_level_items().to_vec(); + while let Some(item) = worklist.pop() { + let node: ast::Item = match item { + ModItem::Import(it) => tree.source(&db, InFile::new(file_id, it)).into(), + ModItem::ExternCrate(it) => tree.source(&db, InFile::new(file_id, it)).into(), + ModItem::Function(it) => tree.source(&db, InFile::new(file_id, it)).into(), + ModItem::Struct(it) => tree.source(&db, InFile::new(file_id, it)).into(), + ModItem::Union(it) => tree.source(&db, InFile::new(file_id, it)).into(), + ModItem::Enum(it) => tree.source(&db, InFile::new(file_id, it)).into(), + ModItem::Const(it) => tree.source(&db, InFile::new(file_id, it)).into(), + ModItem::Static(it) => tree.source(&db, InFile::new(file_id, it)).into(), + ModItem::TypeAlias(it) => tree.source(&db, InFile::new(file_id, it)).into(), + ModItem::Mod(it) => { + if let ModKind::Inline { items } = &tree[it].kind { + worklist.extend(&**items); + } + tree.source(&db, InFile::new(file_id, it)).into() + } + ModItem::Trait(it) => { + worklist.extend(tree[it].items.iter().map(|item| ModItem::from(*item))); + tree.source(&db, InFile::new(file_id, it)).into() + } + ModItem::Impl(it) => { + worklist.extend(tree[it].items.iter().map(|item| ModItem::from(*item))); + tree.source(&db, InFile::new(file_id, it)).into() + } + ModItem::MacroCall(_) => continue, + }; + + outer_items.insert(node); + } + + // Now descend the root node and check that all `ast::ModuleItem`s are either recorded above, or + // registered as inner items. + for item in root.descendants().skip(1).filter_map(ast::Item::cast) { + if outer_items.contains(&item) { + continue; + } + + let ast_id = ast_id_map.ast_id(&item); + assert!(!tree.inner_items(ast_id).is_empty()); + } +} + +fn item_tree(ra_fixture: &str) -> Arc { + let (db, file_id) = TestDB::with_single_file(ra_fixture); + db.item_tree(file_id.into()) +} + +fn print_item_tree(ra_fixture: &str) -> String { + let tree = item_tree(ra_fixture); + let mut out = String::new(); + + format_to!(out, "inner attrs: {:?}\n\n", tree.top_level_attrs()); + format_to!(out, "top-level items:\n"); + for item in tree.top_level_items() { + fmt_mod_item(&mut out, &tree, *item); + format_to!(out, "\n"); + } + + if !tree.inner_items.is_empty() { + format_to!(out, "\ninner items:\n\n"); + for (ast_id, items) in &tree.inner_items { + format_to!(out, "for AST {:?}:\n", ast_id); + for inner in items { + fmt_mod_item(&mut out, &tree, *inner); + format_to!(out, "\n\n"); + } + } + } + + out +} + +fn fmt_mod_item(out: &mut String, tree: &ItemTree, item: ModItem) { + let attrs = tree.attrs(item.into()); + if !attrs.is_empty() { + format_to!(out, "#[{:?}]\n", attrs); + } + + let mut children = String::new(); + match item { + ModItem::ExternCrate(it) => { + format_to!(out, "{:?}", tree[it]); + } + ModItem::Import(it) => { + format_to!(out, "{:?}", tree[it]); + } + ModItem::Function(it) => { + format_to!(out, "{:?}", tree[it]); + } + ModItem::Struct(it) => { + format_to!(out, "{:?}", tree[it]); + } + ModItem::Union(it) => { + format_to!(out, "{:?}", tree[it]); + } + ModItem::Enum(it) => { + format_to!(out, "{:?}", tree[it]); + } + ModItem::Const(it) => { + format_to!(out, "{:?}", tree[it]); + } + ModItem::Static(it) => { + format_to!(out, "{:?}", tree[it]); + } + ModItem::Trait(it) => { + format_to!(out, "{:?}", tree[it]); + for item in &*tree[it].items { + fmt_mod_item(&mut children, tree, ModItem::from(*item)); + format_to!(children, "\n"); + } + } + ModItem::Impl(it) => { + format_to!(out, "{:?}", tree[it]); + for item in &*tree[it].items { + fmt_mod_item(&mut children, tree, ModItem::from(*item)); + format_to!(children, "\n"); + } + } + ModItem::TypeAlias(it) => { + format_to!(out, "{:?}", tree[it]); + } + ModItem::Mod(it) => { + format_to!(out, "{:?}", tree[it]); + match &tree[it].kind { + ModKind::Inline { items } => { + for item in &**items { + fmt_mod_item(&mut children, tree, *item); + format_to!(children, "\n"); + } + } + ModKind::Outline {} => {} + } + } + ModItem::MacroCall(it) => { + format_to!(out, "{:?}", tree[it]); + } + } + + for line in children.lines() { + format_to!(out, "\n> {}", line); + } +} + +fn check(ra_fixture: &str, expect: Expect) { + let actual = print_item_tree(ra_fixture); + expect.assert_eq(&actual); +} + +#[test] +fn smoke() { + check( + r" + #![attr] + + #[attr_on_use] + use {a, b::*}; + + #[ext_crate] + extern crate krate; + + #[on_trait] + trait Tr { + #[assoc_ty] + type AssocTy: Tr<()>; + + #[assoc_const] + const CONST: u8; + + #[assoc_method] + fn method(&self); + + #[assoc_dfl_method] + fn dfl_method(&mut self) {} + } + + #[struct0] + struct Struct0; + + #[struct1] + struct Struct1(#[struct1fld] u8); + + #[struct2] + struct Struct2 { + #[struct2fld] + fld: (T, ), + } + + #[en] + enum En { + #[enum_variant] + Variant { + #[enum_field] + field: u8, + }, + } + + #[un] + union Un { + #[union_fld] + fld: u16, + } + ", + expect![[r##" + inner attrs: Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr"))] }, input: None }]) } + + top-level items: + #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_on_use"))] }, input: None }]) }] + Import { path: ModPath { kind: Plain, segments: [Name(Text("a"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_glob: false, is_prelude: false, ast_id: FileAstId::(0) } + #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_on_use"))] }, input: None }]) }] + Import { path: ModPath { kind: Plain, segments: [Name(Text("b"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_glob: true, is_prelude: false, ast_id: FileAstId::(0) } + #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("ext_crate"))] }, input: None }]) }] + ExternCrate { path: ModPath { kind: Plain, segments: [Name(Text("krate"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_macro_use: false, ast_id: FileAstId::(1) } + #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("on_trait"))] }, input: None }]) }] + Trait { name: Name(Text("Tr")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(0), auto: false, items: [TypeAlias(Idx::(0)), Const(Idx::(0)), Function(Idx::(0)), Function(Idx::(1))], ast_id: FileAstId::(2) } + > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_ty"))] }, input: None }]) }] + > TypeAlias { name: Name(Text("AssocTy")), visibility: RawVisibilityId("pub(self)"), bounds: [Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Tr"))] }, generic_args: [Some(GenericArgs { args: [Type(Tuple([]))], has_self_type: false, bindings: [] })] })], generic_params: GenericParamsId(4294967295), type_ref: None, ast_id: FileAstId::(8) } + > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_const"))] }, input: None }]) }] + > Const { name: Some(Name(Text("CONST"))), visibility: RawVisibilityId("pub(self)"), type_ref: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("u8"))] }, generic_args: [None] }), ast_id: FileAstId::(9) } + > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_method"))] }, input: None }]) }] + > Function { name: Name(Text("method")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: true, is_unsafe: false, params: [Reference(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Self"))] }, generic_args: [None] }), Shared)], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(10) } + > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_dfl_method"))] }, input: None }]) }] + > Function { name: Name(Text("dfl_method")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: true, is_unsafe: false, params: [Reference(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Self"))] }, generic_args: [None] }), Mut)], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(11) } + #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct0"))] }, input: None }]) }] + Struct { name: Name(Text("Struct0")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), fields: Unit, ast_id: FileAstId::(3), kind: Unit } + #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct1"))] }, input: None }]) }] + Struct { name: Name(Text("Struct1")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(2), fields: Tuple(IdRange::(0..1)), ast_id: FileAstId::(4), kind: Tuple } + #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct2"))] }, input: None }]) }] + Struct { name: Name(Text("Struct2")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(3), fields: Record(IdRange::(1..2)), ast_id: FileAstId::(5), kind: Record } + #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("en"))] }, input: None }]) }] + Enum { name: Name(Text("En")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), variants: IdRange::(0..1), ast_id: FileAstId::(6) } + #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("un"))] }, input: None }]) }] + Union { name: Name(Text("Un")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), fields: Record(IdRange::(3..4)), ast_id: FileAstId::(7) } + "##]], + ); +} + +#[test] +fn simple_inner_items() { + check( + r" + impl D for Response { + fn foo() { + end(); + fn end() { + let _x: T = loop {}; + } + } + } + ", + expect![[r#" + inner attrs: Attrs { entries: None } + + top-level items: + Impl { generic_params: GenericParamsId(0), target_trait: Some(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("D"))] }, generic_args: [None] })), target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Response"))] }, generic_args: [Some(GenericArgs { args: [Type(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("T"))] }, generic_args: [None] }))], has_self_type: false, bindings: [] })] }), is_negative: false, items: [Function(Idx::(1))], ast_id: FileAstId::(0) } + > Function { name: Name(Text("foo")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(1) } + + inner items: + + for AST FileAstId::(2): + Function { name: Name(Text("end")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(2) } + + "#]], + ); +} + +#[test] +fn extern_attrs() { + check( + r#" + #[block_attr] + extern "C" { + #[attr_a] + fn a() {} + #[attr_b] + fn b() {} + } + "#, + expect![[r##" + inner attrs: Attrs { entries: None } + + top-level items: + #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_a"))] }, input: None }, Attr { path: ModPath { kind: Plain, segments: [Name(Text("block_attr"))] }, input: None }]) }] + Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: true, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(1) } + #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_b"))] }, input: None }, Attr { path: ModPath { kind: Plain, segments: [Name(Text("block_attr"))] }, input: None }]) }] + Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: true, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(2) } + "##]], + ); +} + +#[test] +fn trait_attrs() { + check( + r#" + #[trait_attr] + trait Tr { + #[attr_a] + fn a() {} + #[attr_b] + fn b() {} + } + "#, + expect![[r##" + inner attrs: Attrs { entries: None } + + top-level items: + #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("trait_attr"))] }, input: None }]) }] + Trait { name: Name(Text("Tr")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(0), auto: false, items: [Function(Idx::(0)), Function(Idx::(1))], ast_id: FileAstId::(0) } + > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_a"))] }, input: None }]) }] + > Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(1) } + > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_b"))] }, input: None }]) }] + > Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(2) } + "##]], + ); +} + +#[test] +fn impl_attrs() { + check( + r#" + #[impl_attr] + impl Ty { + #[attr_a] + fn a() {} + #[attr_b] + fn b() {} + } + "#, + expect![[r##" + inner attrs: Attrs { entries: None } + + top-level items: + #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("impl_attr"))] }, input: None }]) }] + Impl { generic_params: GenericParamsId(4294967295), target_trait: None, target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Ty"))] }, generic_args: [None] }), is_negative: false, items: [Function(Idx::(0)), Function(Idx::(1))], ast_id: FileAstId::(0) } + > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_a"))] }, input: None }]) }] + > Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(1) } + > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_b"))] }, input: None }]) }] + > Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(2) } + "##]], + ); +} + +#[test] +fn cursed_inner_items() { + test_inner_items( + r" + struct S(T); + + enum En { + Var1 { + t: [(); { trait Inner {} 0 }], + }, + + Var2([u16; { enum Inner {} 0 }]), + } + + type Ty = [En; { struct Inner; 0 }]; + + impl En { + fn assoc() { + trait InnerTrait {} + struct InnerStruct {} + impl InnerTrait for InnerStruct {} + } + } + + trait Tr { + type AssocTy = [u8; { fn f() {} }]; + + const AssocConst: [u8; { fn f() {} }]; + } + ", + ); +} + +#[test] +fn inner_item_attrs() { + check( + r" + fn foo() { + #[on_inner] + fn inner() {} + } + ", + expect![[r##" + inner attrs: Attrs { entries: None } + + top-level items: + Function { name: Name(Text("foo")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(0) } + + inner items: + + for AST FileAstId::(1): + #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("on_inner"))] }, input: None }]) }] + Function { name: Name(Text("inner")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(1) } + + "##]], + ); +} + +#[test] +fn assoc_item_macros() { + check( + r" + impl S { + items!(); + } + ", + expect![[r#" + inner attrs: Attrs { entries: None } + + top-level items: + Impl { generic_params: GenericParamsId(4294967295), target_trait: None, target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("S"))] }, generic_args: [None] }), is_negative: false, items: [MacroCall(Idx::(0))], ast_id: FileAstId::(0) } + > MacroCall { name: None, path: ModPath { kind: Plain, segments: [Name(Text("items"))] }, is_export: false, is_local_inner: false, is_builtin: false, ast_id: FileAstId::(1) } + "#]], + ); +} diff --git a/crates/hir_def/src/keys.rs b/crates/hir_def/src/keys.rs new file mode 100644 index 0000000000..40a5d92b50 --- /dev/null +++ b/crates/hir_def/src/keys.rs @@ -0,0 +1,58 @@ +//! keys to be used with `DynMap` + +use std::marker::PhantomData; + +use hir_expand::{InFile, MacroDefId}; +use rustc_hash::FxHashMap; +use syntax::{ast, AstNode, AstPtr}; + +use crate::{ + dyn_map::{DynMap, Policy}, + ConstId, EnumId, EnumVariantId, FieldId, FunctionId, ImplId, StaticId, StructId, TraitId, + TypeAliasId, TypeParamId, UnionId, +}; + +pub type Key = crate::dyn_map::Key, V, AstPtrPolicy>; + +pub const FUNCTION: Key = Key::new(); +pub const CONST: Key = Key::new(); +pub const STATIC: Key = Key::new(); +pub const TYPE_ALIAS: Key = Key::new(); +pub const IMPL: Key = Key::new(); +pub const TRAIT: Key = Key::new(); +pub const STRUCT: Key = Key::new(); +pub const UNION: Key = Key::new(); +pub const ENUM: Key = Key::new(); + +pub const VARIANT: Key = Key::new(); +pub const TUPLE_FIELD: Key = Key::new(); +pub const RECORD_FIELD: Key = Key::new(); +pub const TYPE_PARAM: Key = Key::new(); + +pub const MACRO: Key = Key::new(); + +/// XXX: AST Nodes and SyntaxNodes have identity equality semantics: nodes are +/// equal if they point to exactly the same object. +/// +/// In general, we do not guarantee that we have exactly one instance of a +/// syntax tree for each file. We probably should add such guarantee, but, for +/// the time being, we will use identity-less AstPtr comparison. +pub struct AstPtrPolicy { + _phantom: PhantomData<(AST, ID)>, +} + +impl Policy for AstPtrPolicy { + type K = InFile; + type V = ID; + fn insert(map: &mut DynMap, key: InFile, value: ID) { + let key = key.as_ref().map(AstPtr::new); + map.map + .entry::>, ID>>() + .or_insert_with(Default::default) + .insert(key, value); + } + fn get<'a>(map: &'a DynMap, key: &InFile) -> Option<&'a ID> { + let key = key.as_ref().map(AstPtr::new); + map.map.get::>, ID>>()?.get(&key) + } +} diff --git a/crates/hir_def/src/lang_item.rs b/crates/hir_def/src/lang_item.rs new file mode 100644 index 0000000000..063eadccb2 --- /dev/null +++ b/crates/hir_def/src/lang_item.rs @@ -0,0 +1,174 @@ +//! Collects lang items: items marked with `#[lang = "..."]` attribute. +//! +//! This attribute to tell the compiler about semi built-in std library +//! features, such as Fn family of traits. +use std::sync::Arc; + +use rustc_hash::FxHashMap; +use syntax::SmolStr; + +use crate::{ + db::DefDatabase, AdtId, AttrDefId, CrateId, EnumId, FunctionId, ImplId, ModuleDefId, ModuleId, + StaticId, StructId, TraitId, +}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum LangItemTarget { + EnumId(EnumId), + FunctionId(FunctionId), + ImplDefId(ImplId), + StaticId(StaticId), + StructId(StructId), + TraitId(TraitId), +} + +impl LangItemTarget { + pub fn as_enum(self) -> Option { + match self { + LangItemTarget::EnumId(id) => Some(id), + _ => None, + } + } + + pub fn as_function(self) -> Option { + match self { + LangItemTarget::FunctionId(id) => Some(id), + _ => None, + } + } + + pub fn as_impl_def(self) -> Option { + match self { + LangItemTarget::ImplDefId(id) => Some(id), + _ => None, + } + } + + pub fn as_static(self) -> Option { + match self { + LangItemTarget::StaticId(id) => Some(id), + _ => None, + } + } + + pub fn as_struct(self) -> Option { + match self { + LangItemTarget::StructId(id) => Some(id), + _ => None, + } + } + + pub fn as_trait(self) -> Option { + match self { + LangItemTarget::TraitId(id) => Some(id), + _ => None, + } + } +} + +#[derive(Default, Debug, Clone, PartialEq, Eq)] +pub struct LangItems { + items: FxHashMap, +} + +impl LangItems { + pub fn target(&self, item: &str) -> Option { + self.items.get(item).copied() + } + + /// Salsa query. This will look for lang items in a specific crate. + pub(crate) fn crate_lang_items_query(db: &dyn DefDatabase, krate: CrateId) -> Arc { + let _p = profile::span("crate_lang_items_query"); + + let mut lang_items = LangItems::default(); + + let crate_def_map = db.crate_def_map(krate); + + crate_def_map + .modules + .iter() + .filter_map(|(local_id, _)| db.module_lang_items(ModuleId { krate, local_id })) + .for_each(|it| lang_items.items.extend(it.items.iter().map(|(k, v)| (k.clone(), *v)))); + + Arc::new(lang_items) + } + + pub(crate) fn module_lang_items_query( + db: &dyn DefDatabase, + module: ModuleId, + ) -> Option> { + let _p = profile::span("module_lang_items_query"); + let mut lang_items = LangItems::default(); + lang_items.collect_lang_items(db, module); + if lang_items.items.is_empty() { + None + } else { + Some(Arc::new(lang_items)) + } + } + + /// Salsa query. Look for a lang item, starting from the specified crate and recursively + /// traversing its dependencies. + pub(crate) fn lang_item_query( + db: &dyn DefDatabase, + start_crate: CrateId, + item: SmolStr, + ) -> Option { + let _p = profile::span("lang_item_query"); + let lang_items = db.crate_lang_items(start_crate); + let start_crate_target = lang_items.items.get(&item); + if let Some(target) = start_crate_target { + return Some(*target); + } + db.crate_graph()[start_crate] + .dependencies + .iter() + .find_map(|dep| db.lang_item(dep.crate_id, item.clone())) + } + + fn collect_lang_items(&mut self, db: &dyn DefDatabase, module: ModuleId) { + // Look for impl targets + let def_map = db.crate_def_map(module.krate); + let module_data = &def_map[module.local_id]; + for impl_def in module_data.scope.impls() { + self.collect_lang_item(db, impl_def, LangItemTarget::ImplDefId) + } + + for def in module_data.scope.declarations() { + match def { + ModuleDefId::TraitId(trait_) => { + self.collect_lang_item(db, trait_, LangItemTarget::TraitId) + } + ModuleDefId::AdtId(AdtId::EnumId(e)) => { + self.collect_lang_item(db, e, LangItemTarget::EnumId) + } + ModuleDefId::AdtId(AdtId::StructId(s)) => { + self.collect_lang_item(db, s, LangItemTarget::StructId) + } + ModuleDefId::FunctionId(f) => { + self.collect_lang_item(db, f, LangItemTarget::FunctionId) + } + ModuleDefId::StaticId(s) => self.collect_lang_item(db, s, LangItemTarget::StaticId), + _ => {} + } + } + } + + fn collect_lang_item( + &mut self, + db: &dyn DefDatabase, + item: T, + constructor: fn(T) -> LangItemTarget, + ) where + T: Into + Copy, + { + if let Some(lang_item_name) = lang_attr(db, item) { + self.items.entry(lang_item_name).or_insert_with(|| constructor(item)); + } + } +} + +pub fn lang_attr(db: &dyn DefDatabase, item: impl Into + Copy) -> Option { + let attrs = db.attrs(item.into()); + attrs.by_key("lang").string_value().cloned() +} diff --git a/crates/hir_def/src/lib.rs b/crates/hir_def/src/lib.rs new file mode 100644 index 0000000000..f24a1dd77c --- /dev/null +++ b/crates/hir_def/src/lib.rs @@ -0,0 +1,541 @@ +//! `hir_def` crate contains everything between macro expansion and type +//! inference. +//! +//! It defines various items (structs, enums, traits) which comprises Rust code, +//! as well as an algorithm for resolving paths to such entities. +//! +//! Note that `hir_def` is a work in progress, so not all of the above is +//! actually true. + +#[allow(unused)] +macro_rules! eprintln { + ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; +} + +pub mod db; + +pub mod attr; +pub mod path; +pub mod type_ref; +pub mod builtin_type; +pub mod diagnostics; +pub mod per_ns; +pub mod item_scope; + +pub mod dyn_map; +pub mod keys; + +pub mod item_tree; + +pub mod adt; +pub mod data; +pub mod generics; +pub mod lang_item; +pub mod docs; + +pub mod expr; +pub mod body; +pub mod resolver; + +mod trace; +pub mod nameres; + +pub mod src; +pub mod child_by_source; + +pub mod visibility; +pub mod find_path; +pub mod import_map; + +#[cfg(test)] +mod test_db; + +use std::hash::{Hash, Hasher}; + +use arena::Idx; +use base_db::{impl_intern_key, salsa, CrateId}; +use hir_expand::{ + ast_id_map::FileAstId, eager::expand_eager_macro, hygiene::Hygiene, AstId, HirFileId, InFile, + MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, +}; +use syntax::ast; + +use crate::builtin_type::BuiltinType; +use item_tree::{ + Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, ModItem, Static, Struct, Trait, + TypeAlias, Union, +}; +use stdx::impl_from; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct ModuleId { + pub krate: CrateId, + pub local_id: LocalModuleId, +} + +/// An ID of a module, **local** to a specific crate +pub type LocalModuleId = Idx; + +#[derive(Debug)] +pub struct ItemLoc { + pub container: ContainerId, + pub id: ItemTreeId, +} + +impl Clone for ItemLoc { + fn clone(&self) -> Self { + Self { container: self.container, id: self.id } + } +} + +impl Copy for ItemLoc {} + +impl PartialEq for ItemLoc { + fn eq(&self, other: &Self) -> bool { + self.container == other.container && self.id == other.id + } +} + +impl Eq for ItemLoc {} + +impl Hash for ItemLoc { + fn hash(&self, state: &mut H) { + self.container.hash(state); + self.id.hash(state); + } +} + +#[derive(Debug)] +pub struct AssocItemLoc { + pub container: AssocContainerId, + pub id: ItemTreeId, +} + +impl Clone for AssocItemLoc { + fn clone(&self) -> Self { + Self { container: self.container, id: self.id } + } +} + +impl Copy for AssocItemLoc {} + +impl PartialEq for AssocItemLoc { + fn eq(&self, other: &Self) -> bool { + self.container == other.container && self.id == other.id + } +} + +impl Eq for AssocItemLoc {} + +impl Hash for AssocItemLoc { + fn hash(&self, state: &mut H) { + self.container.hash(state); + self.id.hash(state); + } +} + +macro_rules! impl_intern { + ($id:ident, $loc:ident, $intern:ident, $lookup:ident) => { + impl_intern_key!($id); + + impl Intern for $loc { + type ID = $id; + fn intern(self, db: &dyn db::DefDatabase) -> $id { + db.$intern(self) + } + } + + impl Lookup for $id { + type Data = $loc; + fn lookup(&self, db: &dyn db::DefDatabase) -> $loc { + db.$lookup(*self) + } + } + }; +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct FunctionId(salsa::InternId); +type FunctionLoc = AssocItemLoc; +impl_intern!(FunctionId, FunctionLoc, intern_function, lookup_intern_function); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct StructId(salsa::InternId); +type StructLoc = ItemLoc; +impl_intern!(StructId, StructLoc, intern_struct, lookup_intern_struct); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct UnionId(salsa::InternId); +pub type UnionLoc = ItemLoc; +impl_intern!(UnionId, UnionLoc, intern_union, lookup_intern_union); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct EnumId(salsa::InternId); +pub type EnumLoc = ItemLoc; +impl_intern!(EnumId, EnumLoc, intern_enum, lookup_intern_enum); + +// FIXME: rename to `VariantId`, only enums can ave variants +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct EnumVariantId { + pub parent: EnumId, + pub local_id: LocalEnumVariantId, +} + +pub type LocalEnumVariantId = Idx; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct FieldId { + pub parent: VariantId, + pub local_id: LocalFieldId, +} + +pub type LocalFieldId = Idx; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct ConstId(salsa::InternId); +type ConstLoc = AssocItemLoc; +impl_intern!(ConstId, ConstLoc, intern_const, lookup_intern_const); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct StaticId(salsa::InternId); +pub type StaticLoc = ItemLoc; +impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TraitId(salsa::InternId); +pub type TraitLoc = ItemLoc; +impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TypeAliasId(salsa::InternId); +type TypeAliasLoc = AssocItemLoc; +impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub struct ImplId(salsa::InternId); +type ImplLoc = ItemLoc; +impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TypeParamId { + pub parent: GenericDefId, + pub local_id: LocalTypeParamId, +} + +pub type LocalTypeParamId = Idx; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ContainerId { + ModuleId(ModuleId), + DefWithBodyId(DefWithBodyId), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum AssocContainerId { + ContainerId(ContainerId), + ImplId(ImplId), + TraitId(TraitId), +} +impl_from!(ContainerId for AssocContainerId); + +/// A Data Type +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub enum AdtId { + StructId(StructId), + UnionId(UnionId), + EnumId(EnumId), +} +impl_from!(StructId, UnionId, EnumId for AdtId); + +/// The defs which can be visible in the module. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ModuleDefId { + ModuleId(ModuleId), + FunctionId(FunctionId), + AdtId(AdtId), + // Can't be directly declared, but can be imported. + EnumVariantId(EnumVariantId), + ConstId(ConstId), + StaticId(StaticId), + TraitId(TraitId), + TypeAliasId(TypeAliasId), + BuiltinType(BuiltinType), +} +impl_from!( + ModuleId, + FunctionId, + AdtId(StructId, EnumId, UnionId), + EnumVariantId, + ConstId, + StaticId, + TraitId, + TypeAliasId, + BuiltinType + for ModuleDefId +); + +/// The defs which have a body. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum DefWithBodyId { + FunctionId(FunctionId), + StaticId(StaticId), + ConstId(ConstId), +} + +impl_from!(FunctionId, ConstId, StaticId for DefWithBodyId); + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum AssocItemId { + FunctionId(FunctionId), + ConstId(ConstId), + TypeAliasId(TypeAliasId), +} +// FIXME: not every function, ... is actually an assoc item. maybe we should make +// sure that you can only turn actual assoc items into AssocItemIds. This would +// require not implementing From, and instead having some checked way of +// casting them, and somehow making the constructors private, which would be annoying. +impl_from!(FunctionId, ConstId, TypeAliasId for AssocItemId); + +#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] +pub enum GenericDefId { + FunctionId(FunctionId), + AdtId(AdtId), + TraitId(TraitId), + TypeAliasId(TypeAliasId), + ImplId(ImplId), + // enum variants cannot have generics themselves, but their parent enums + // can, and this makes some code easier to write + EnumVariantId(EnumVariantId), + // consts can have type parameters from their parents (i.e. associated consts of traits) + ConstId(ConstId), +} +impl_from!( + FunctionId, + AdtId(StructId, EnumId, UnionId), + TraitId, + TypeAliasId, + ImplId, + EnumVariantId, + ConstId + for GenericDefId +); + +impl From for GenericDefId { + fn from(item: AssocItemId) -> Self { + match item { + AssocItemId::FunctionId(f) => f.into(), + AssocItemId::ConstId(c) => c.into(), + AssocItemId::TypeAliasId(t) => t.into(), + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum AttrDefId { + ModuleId(ModuleId), + FieldId(FieldId), + AdtId(AdtId), + FunctionId(FunctionId), + EnumVariantId(EnumVariantId), + StaticId(StaticId), + ConstId(ConstId), + TraitId(TraitId), + TypeAliasId(TypeAliasId), + MacroDefId(MacroDefId), + ImplId(ImplId), +} + +impl_from!( + ModuleId, + FieldId, + AdtId(StructId, EnumId, UnionId), + EnumVariantId, + StaticId, + ConstId, + FunctionId, + TraitId, + TypeAliasId, + MacroDefId, + ImplId + for AttrDefId +); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum VariantId { + EnumVariantId(EnumVariantId), + StructId(StructId), + UnionId(UnionId), +} +impl_from!(EnumVariantId, StructId, UnionId for VariantId); + +trait Intern { + type ID; + fn intern(self, db: &dyn db::DefDatabase) -> Self::ID; +} + +pub trait Lookup { + type Data; + fn lookup(&self, db: &dyn db::DefDatabase) -> Self::Data; +} + +pub trait HasModule { + fn module(&self, db: &dyn db::DefDatabase) -> ModuleId; +} + +impl HasModule for ContainerId { + fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + match *self { + ContainerId::ModuleId(it) => it, + ContainerId::DefWithBodyId(it) => it.module(db), + } + } +} + +impl HasModule for AssocContainerId { + fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + match *self { + AssocContainerId::ContainerId(it) => it.module(db), + AssocContainerId::ImplId(it) => it.lookup(db).container.module(db), + AssocContainerId::TraitId(it) => it.lookup(db).container.module(db), + } + } +} + +impl HasModule for AssocItemLoc { + fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + self.container.module(db) + } +} + +impl HasModule for AdtId { + fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + match self { + AdtId::StructId(it) => it.lookup(db).container, + AdtId::UnionId(it) => it.lookup(db).container, + AdtId::EnumId(it) => it.lookup(db).container, + } + .module(db) + } +} + +impl HasModule for DefWithBodyId { + fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + match self { + DefWithBodyId::FunctionId(it) => it.lookup(db).module(db), + DefWithBodyId::StaticId(it) => it.lookup(db).module(db), + DefWithBodyId::ConstId(it) => it.lookup(db).module(db), + } + } +} + +impl DefWithBodyId { + pub fn as_mod_item(self, db: &dyn db::DefDatabase) -> ModItem { + match self { + DefWithBodyId::FunctionId(it) => it.lookup(db).id.value.into(), + DefWithBodyId::StaticId(it) => it.lookup(db).id.value.into(), + DefWithBodyId::ConstId(it) => it.lookup(db).id.value.into(), + } + } +} + +impl HasModule for GenericDefId { + fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + match self { + GenericDefId::FunctionId(it) => it.lookup(db).module(db), + GenericDefId::AdtId(it) => it.module(db), + GenericDefId::TraitId(it) => it.lookup(db).container.module(db), + GenericDefId::TypeAliasId(it) => it.lookup(db).module(db), + GenericDefId::ImplId(it) => it.lookup(db).container.module(db), + GenericDefId::EnumVariantId(it) => it.parent.lookup(db).container.module(db), + GenericDefId::ConstId(it) => it.lookup(db).module(db), + } + } +} + +impl HasModule for StaticLoc { + fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + self.container.module(db) + } +} + +/// A helper trait for converting to MacroCallId +pub trait AsMacroCall { + fn as_call_id( + &self, + db: &dyn db::DefDatabase, + krate: CrateId, + resolver: impl Fn(path::ModPath) -> Option, + ) -> Option; +} + +impl AsMacroCall for InFile<&ast::MacroCall> { + fn as_call_id( + &self, + db: &dyn db::DefDatabase, + krate: CrateId, + resolver: impl Fn(path::ModPath) -> Option, + ) -> Option { + let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value)); + let h = Hygiene::new(db.upcast(), self.file_id); + let path = path::ModPath::from_src(self.value.path()?, &h)?; + + AstIdWithPath::new(ast_id.file_id, ast_id.value, path).as_call_id(db, krate, resolver) + } +} + +/// Helper wrapper for `AstId` with `ModPath` +#[derive(Clone, Debug, Eq, PartialEq)] +struct AstIdWithPath { + pub ast_id: AstId, + pub path: path::ModPath, +} + +impl AstIdWithPath { + pub fn new(file_id: HirFileId, ast_id: FileAstId, path: path::ModPath) -> AstIdWithPath { + AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path } + } +} + +impl AsMacroCall for AstIdWithPath { + fn as_call_id( + &self, + db: &dyn db::DefDatabase, + krate: CrateId, + resolver: impl Fn(path::ModPath) -> Option, + ) -> Option { + let def: MacroDefId = resolver(self.path.clone())?; + + if let MacroDefKind::BuiltInEager(_) = def.kind { + let macro_call = InFile::new(self.ast_id.file_id, self.ast_id.to_node(db.upcast())); + let hygiene = Hygiene::new(db.upcast(), self.ast_id.file_id); + + Some( + expand_eager_macro(db.upcast(), krate, macro_call, def, &|path: ast::Path| { + resolver(path::ModPath::from_src(path, &hygiene)?) + })? + .into(), + ) + } else { + Some(def.as_lazy_macro(db.upcast(), krate, MacroCallKind::FnLike(self.ast_id)).into()) + } + } +} + +impl AsMacroCall for AstIdWithPath { + fn as_call_id( + &self, + db: &dyn db::DefDatabase, + krate: CrateId, + resolver: impl Fn(path::ModPath) -> Option, + ) -> Option { + let def = resolver(self.path.clone())?; + Some( + def.as_lazy_macro( + db.upcast(), + krate, + MacroCallKind::Attr(self.ast_id, self.path.segments.last()?.to_string()), + ) + .into(), + ) + } +} diff --git a/crates/hir_def/src/nameres.rs b/crates/hir_def/src/nameres.rs new file mode 100644 index 0000000000..bf302172d0 --- /dev/null +++ b/crates/hir_def/src/nameres.rs @@ -0,0 +1,326 @@ +//! This module implements import-resolution/macro expansion algorithm. +//! +//! The result of this module is `CrateDefMap`: a data structure which contains: +//! +//! * a tree of modules for the crate +//! * for each module, a set of items visible in the module (directly declared +//! or imported) +//! +//! Note that `CrateDefMap` contains fully macro expanded code. +//! +//! Computing `CrateDefMap` can be partitioned into several logically +//! independent "phases". The phases are mutually recursive though, there's no +//! strict ordering. +//! +//! ## Collecting RawItems +//! +//! This happens in the `raw` module, which parses a single source file into a +//! set of top-level items. Nested imports are desugared to flat imports in this +//! phase. Macro calls are represented as a triple of (Path, Option, +//! TokenTree). +//! +//! ## Collecting Modules +//! +//! This happens in the `collector` module. In this phase, we recursively walk +//! tree of modules, collect raw items from submodules, populate module scopes +//! with defined items (so, we assign item ids in this phase) and record the set +//! of unresolved imports and macros. +//! +//! While we walk tree of modules, we also record macro_rules definitions and +//! expand calls to macro_rules defined macros. +//! +//! ## Resolving Imports +//! +//! We maintain a list of currently unresolved imports. On every iteration, we +//! try to resolve some imports from this list. If the import is resolved, we +//! record it, by adding an item to current module scope and, if necessary, by +//! recursively populating glob imports. +//! +//! ## Resolving Macros +//! +//! macro_rules from the same crate use a global mutable namespace. We expand +//! them immediately, when we collect modules. +//! +//! Macros from other crates (including proc-macros) can be used with +//! `foo::bar!` syntax. We handle them similarly to imports. There's a list of +//! unexpanded macros. On every iteration, we try to resolve each macro call +//! path and, upon success, we run macro expansion and "collect module" phase on +//! the result + +mod collector; +mod mod_resolution; +mod path_resolution; + +#[cfg(test)] +mod tests; + +use std::sync::Arc; + +use arena::Arena; +use base_db::{CrateId, Edition, FileId}; +use hir_expand::{diagnostics::DiagnosticSink, name::Name, InFile}; +use rustc_hash::FxHashMap; +use stdx::format_to; +use syntax::ast; + +use crate::{ + db::DefDatabase, + item_scope::{BuiltinShadowMode, ItemScope}, + nameres::{diagnostics::DefDiagnostic, path_resolution::ResolveMode}, + path::ModPath, + per_ns::PerNs, + AstId, LocalModuleId, ModuleDefId, ModuleId, +}; + +/// Contains all top-level defs from a macro-expanded crate +#[derive(Debug, PartialEq, Eq)] +pub struct CrateDefMap { + pub root: LocalModuleId, + pub modules: Arena, + pub(crate) krate: CrateId, + /// The prelude module for this crate. This either comes from an import + /// marked with the `prelude_import` attribute, or (in the normal case) from + /// a dependency (`std` or `core`). + pub(crate) prelude: Option, + pub(crate) extern_prelude: FxHashMap, + + edition: Edition, + diagnostics: Vec, +} + +impl std::ops::Index for CrateDefMap { + type Output = ModuleData; + fn index(&self, id: LocalModuleId) -> &ModuleData { + &self.modules[id] + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] +pub enum ModuleOrigin { + CrateRoot { + definition: FileId, + }, + /// Note that non-inline modules, by definition, live inside non-macro file. + File { + is_mod_rs: bool, + declaration: AstId, + definition: FileId, + }, + Inline { + definition: AstId, + }, +} + +impl Default for ModuleOrigin { + fn default() -> Self { + ModuleOrigin::CrateRoot { definition: FileId(0) } + } +} + +impl ModuleOrigin { + fn declaration(&self) -> Option> { + match self { + ModuleOrigin::File { declaration: module, .. } + | ModuleOrigin::Inline { definition: module, .. } => Some(*module), + ModuleOrigin::CrateRoot { .. } => None, + } + } + + pub fn file_id(&self) -> Option { + match self { + ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => { + Some(*definition) + } + _ => None, + } + } + + pub fn is_inline(&self) -> bool { + match self { + ModuleOrigin::Inline { .. } => true, + ModuleOrigin::CrateRoot { .. } | ModuleOrigin::File { .. } => false, + } + } + + /// Returns a node which defines this module. + /// That is, a file or a `mod foo {}` with items. + fn definition_source(&self, db: &dyn DefDatabase) -> InFile { + match self { + ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => { + let file_id = *definition; + let sf = db.parse(file_id).tree(); + InFile::new(file_id.into(), ModuleSource::SourceFile(sf)) + } + ModuleOrigin::Inline { definition } => InFile::new( + definition.file_id, + ModuleSource::Module(definition.to_node(db.upcast())), + ), + } + } +} + +#[derive(Default, Debug, PartialEq, Eq)] +pub struct ModuleData { + pub parent: Option, + pub children: FxHashMap, + pub scope: ItemScope, + + /// Where does this module come from? + pub origin: ModuleOrigin, +} + +impl CrateDefMap { + pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc { + let _p = profile::span("crate_def_map_query").detail(|| { + db.crate_graph()[krate] + .display_name + .as_ref() + .map(ToString::to_string) + .unwrap_or_default() + }); + let def_map = { + let edition = db.crate_graph()[krate].edition; + let mut modules: Arena = Arena::default(); + let root = modules.alloc(ModuleData::default()); + CrateDefMap { + krate, + edition, + extern_prelude: FxHashMap::default(), + prelude: None, + root, + modules, + diagnostics: Vec::new(), + } + }; + let def_map = collector::collect_defs(db, def_map); + Arc::new(def_map) + } + + pub fn add_diagnostics( + &self, + db: &dyn DefDatabase, + module: LocalModuleId, + sink: &mut DiagnosticSink, + ) { + self.diagnostics.iter().for_each(|it| it.add_to(db, module, sink)) + } + + pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator + '_ { + self.modules + .iter() + .filter(move |(_id, data)| data.origin.file_id() == Some(file_id)) + .map(|(id, _data)| id) + } + + pub(crate) fn resolve_path( + &self, + db: &dyn DefDatabase, + original_module: LocalModuleId, + path: &ModPath, + shadow: BuiltinShadowMode, + ) -> (PerNs, Option) { + let res = + self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path, shadow); + (res.resolved_def, res.segment_index) + } + + // FIXME: this can use some more human-readable format (ideally, an IR + // even), as this should be a great debugging aid. + pub fn dump(&self) -> String { + let mut buf = String::new(); + go(&mut buf, self, "crate", self.root); + return buf; + + fn go(buf: &mut String, map: &CrateDefMap, path: &str, module: LocalModuleId) { + format_to!(buf, "{}\n", path); + + let mut entries: Vec<_> = map.modules[module].scope.resolutions().collect(); + entries.sort_by_key(|(name, _)| name.clone()); + + for (name, def) in entries { + format_to!(buf, "{}:", name.map_or("_".to_string(), |name| name.to_string())); + + if def.types.is_some() { + buf.push_str(" t"); + } + if def.values.is_some() { + buf.push_str(" v"); + } + if def.macros.is_some() { + buf.push_str(" m"); + } + if def.is_none() { + buf.push_str(" _"); + } + + buf.push_str("\n"); + } + + for (name, child) in map.modules[module].children.iter() { + let path = format!("{}::{}", path, name); + buf.push('\n'); + go(buf, map, &path, *child); + } + } + } +} + +impl ModuleData { + /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items. + pub fn definition_source(&self, db: &dyn DefDatabase) -> InFile { + self.origin.definition_source(db) + } + + /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. + /// `None` for the crate root or block. + pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option> { + let decl = self.origin.declaration()?; + let value = decl.to_node(db.upcast()); + Some(InFile { file_id: decl.file_id, value }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ModuleSource { + SourceFile(ast::SourceFile), + Module(ast::Module), +} + +mod diagnostics { + use hir_expand::diagnostics::DiagnosticSink; + use syntax::{ast, AstPtr}; + + use crate::{db::DefDatabase, diagnostics::UnresolvedModule, nameres::LocalModuleId, AstId}; + + #[derive(Debug, PartialEq, Eq)] + pub(super) enum DefDiagnostic { + UnresolvedModule { + module: LocalModuleId, + declaration: AstId, + candidate: String, + }, + } + + impl DefDiagnostic { + pub(super) fn add_to( + &self, + db: &dyn DefDatabase, + target_module: LocalModuleId, + sink: &mut DiagnosticSink, + ) { + match self { + DefDiagnostic::UnresolvedModule { module, declaration, candidate } => { + if *module != target_module { + return; + } + let decl = declaration.to_node(db.upcast()); + sink.push(UnresolvedModule { + file: declaration.file_id, + decl: AstPtr::new(&decl), + candidate: candidate.clone(), + }) + } + } + } + } +} diff --git a/crates/hir_def/src/nameres/collector.rs b/crates/hir_def/src/nameres/collector.rs new file mode 100644 index 0000000000..3e99c87737 --- /dev/null +++ b/crates/hir_def/src/nameres/collector.rs @@ -0,0 +1,1279 @@ +//! The core of the module-level name resolution algorithm. +//! +//! `DefCollector::collect` contains the fixed-point iteration loop which +//! resolves imports and expands macros. + +use base_db::{CrateId, FileId, ProcMacroId}; +use cfg::CfgOptions; +use hir_expand::{ + ast_id_map::FileAstId, + builtin_derive::find_builtin_derive, + builtin_macro::find_builtin_macro, + name::{name, AsName, Name}, + proc_macro::ProcMacroExpander, + HirFileId, MacroCallId, MacroDefId, MacroDefKind, +}; +use rustc_hash::FxHashMap; +use syntax::ast; +use test_utils::mark; + +use crate::{ + attr::Attrs, + db::DefDatabase, + item_scope::{ImportType, PerNsGlobImports}, + item_tree::{ + self, FileItemTreeId, ItemTree, ItemTreeId, MacroCall, Mod, ModItem, ModKind, StructDefKind, + }, + nameres::{ + diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint, + BuiltinShadowMode, CrateDefMap, ModuleData, ModuleOrigin, ResolveMode, + }, + path::{ImportAlias, ModPath, PathKind}, + per_ns::PerNs, + visibility::{RawVisibility, Visibility}, + AdtId, AsMacroCall, AstId, AstIdWithPath, ConstLoc, ContainerId, EnumLoc, EnumVariantId, + FunctionLoc, ImplLoc, Intern, LocalModuleId, ModuleDefId, ModuleId, StaticLoc, StructLoc, + TraitLoc, TypeAliasLoc, UnionLoc, +}; + +const GLOB_RECURSION_LIMIT: usize = 100; +const EXPANSION_DEPTH_LIMIT: usize = 128; +const FIXED_POINT_LIMIT: usize = 8192; + +pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap { + let crate_graph = db.crate_graph(); + + // populate external prelude + for dep in &crate_graph[def_map.krate].dependencies { + log::debug!("crate dep {:?} -> {:?}", dep.name, dep.crate_id); + let dep_def_map = db.crate_def_map(dep.crate_id); + def_map.extern_prelude.insert( + dep.as_name(), + ModuleId { krate: dep.crate_id, local_id: dep_def_map.root }.into(), + ); + + // look for the prelude + // If the dependency defines a prelude, we overwrite an already defined + // prelude. This is necessary to import the "std" prelude if a crate + // depends on both "core" and "std". + if dep_def_map.prelude.is_some() { + def_map.prelude = dep_def_map.prelude; + } + } + + let cfg_options = &crate_graph[def_map.krate].cfg_options; + let proc_macros = &crate_graph[def_map.krate].proc_macro; + let proc_macros = proc_macros + .iter() + .enumerate() + .map(|(idx, it)| { + // FIXME: a hacky way to create a Name from string. + let name = tt::Ident { text: it.name.clone(), id: tt::TokenId::unspecified() }; + (name.as_name(), ProcMacroExpander::new(def_map.krate, ProcMacroId(idx as u32))) + }) + .collect(); + + let mut collector = DefCollector { + db, + def_map, + glob_imports: FxHashMap::default(), + unresolved_imports: Vec::new(), + resolved_imports: Vec::new(), + + unexpanded_macros: Vec::new(), + unexpanded_attribute_macros: Vec::new(), + mod_dirs: FxHashMap::default(), + cfg_options, + proc_macros, + from_glob_import: Default::default(), + }; + collector.collect(); + collector.finish() +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +enum PartialResolvedImport { + /// None of any namespaces is resolved + Unresolved, + /// One of namespaces is resolved + Indeterminate(PerNs), + /// All namespaces are resolved, OR it is came from other crate + Resolved(PerNs), +} + +impl PartialResolvedImport { + fn namespaces(&self) -> PerNs { + match self { + PartialResolvedImport::Unresolved => PerNs::none(), + PartialResolvedImport::Indeterminate(ns) => *ns, + PartialResolvedImport::Resolved(ns) => *ns, + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +struct Import { + pub path: ModPath, + pub alias: Option, + pub visibility: RawVisibility, + pub is_glob: bool, + pub is_prelude: bool, + pub is_extern_crate: bool, + pub is_macro_use: bool, +} + +impl Import { + fn from_use(tree: &ItemTree, id: FileItemTreeId) -> Self { + let it = &tree[id]; + let visibility = &tree[it.visibility]; + Self { + path: it.path.clone(), + alias: it.alias.clone(), + visibility: visibility.clone(), + is_glob: it.is_glob, + is_prelude: it.is_prelude, + is_extern_crate: false, + is_macro_use: false, + } + } + + fn from_extern_crate(tree: &ItemTree, id: FileItemTreeId) -> Self { + let it = &tree[id]; + let visibility = &tree[it.visibility]; + Self { + path: it.path.clone(), + alias: it.alias.clone(), + visibility: visibility.clone(), + is_glob: false, + is_prelude: false, + is_extern_crate: true, + is_macro_use: it.is_macro_use, + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +struct ImportDirective { + module_id: LocalModuleId, + import: Import, + status: PartialResolvedImport, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +struct MacroDirective { + module_id: LocalModuleId, + ast_id: AstIdWithPath, + legacy: Option, + depth: usize, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +struct DeriveDirective { + module_id: LocalModuleId, + ast_id: AstIdWithPath, +} + +struct DefData<'a> { + id: ModuleDefId, + name: &'a Name, + visibility: &'a RawVisibility, + has_constructor: bool, +} + +/// Walks the tree of module recursively +struct DefCollector<'a> { + db: &'a dyn DefDatabase, + def_map: CrateDefMap, + glob_imports: FxHashMap>, + unresolved_imports: Vec, + resolved_imports: Vec, + unexpanded_macros: Vec, + unexpanded_attribute_macros: Vec, + mod_dirs: FxHashMap, + cfg_options: &'a CfgOptions, + proc_macros: Vec<(Name, ProcMacroExpander)>, + from_glob_import: PerNsGlobImports, +} + +impl DefCollector<'_> { + fn collect(&mut self) { + let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id; + let item_tree = self.db.item_tree(file_id.into()); + let module_id = self.def_map.root; + self.def_map.modules[module_id].origin = ModuleOrigin::CrateRoot { definition: file_id }; + ModCollector { + def_collector: &mut *self, + macro_depth: 0, + module_id, + file_id: file_id.into(), + item_tree: &item_tree, + mod_dir: ModDir::root(), + } + .collect(item_tree.top_level_items()); + + // main name resolution fixed-point loop. + let mut i = 0; + loop { + self.db.check_canceled(); + self.resolve_imports(); + + match self.resolve_macros() { + ReachedFixedPoint::Yes => break, + ReachedFixedPoint::No => i += 1, + } + if i == FIXED_POINT_LIMIT { + log::error!("name resolution is stuck"); + break; + } + } + + // Resolve all indeterminate resolved imports again + // As some of the macros will expand newly import shadowing partial resolved imports + // FIXME: We maybe could skip this, if we handle the Indetermine imports in `resolve_imports` + // correctly + let partial_resolved = self.resolved_imports.iter().filter_map(|directive| { + if let PartialResolvedImport::Indeterminate(_) = directive.status { + let mut directive = directive.clone(); + directive.status = PartialResolvedImport::Unresolved; + Some(directive) + } else { + None + } + }); + self.unresolved_imports.extend(partial_resolved); + self.resolve_imports(); + + let unresolved_imports = std::mem::replace(&mut self.unresolved_imports, Vec::new()); + // show unresolved imports in completion, etc + for directive in unresolved_imports { + self.record_resolved_import(&directive) + } + + // Record proc-macros + self.collect_proc_macro(); + } + + fn collect_proc_macro(&mut self) { + let proc_macros = std::mem::take(&mut self.proc_macros); + for (name, expander) in proc_macros { + let krate = self.def_map.krate; + + let macro_id = MacroDefId { + ast_id: None, + krate: Some(krate), + kind: MacroDefKind::CustomDerive(expander), + local_inner: false, + }; + + self.define_proc_macro(name.clone(), macro_id); + } + } + + /// Define a macro with `macro_rules`. + /// + /// It will define the macro in legacy textual scope, and if it has `#[macro_export]`, + /// then it is also defined in the root module scope. + /// You can `use` or invoke it by `crate::macro_name` anywhere, before or after the definition. + /// + /// It is surprising that the macro will never be in the current module scope. + /// These code fails with "unresolved import/macro", + /// ```rust,compile_fail + /// mod m { macro_rules! foo { () => {} } } + /// use m::foo as bar; + /// ``` + /// + /// ```rust,compile_fail + /// macro_rules! foo { () => {} } + /// self::foo!(); + /// crate::foo!(); + /// ``` + /// + /// Well, this code compiles, because the plain path `foo` in `use` is searched + /// in the legacy textual scope only. + /// ```rust + /// macro_rules! foo { () => {} } + /// use foo as bar; + /// ``` + fn define_macro( + &mut self, + module_id: LocalModuleId, + name: Name, + macro_: MacroDefId, + export: bool, + ) { + // Textual scoping + self.define_legacy_macro(module_id, name.clone(), macro_); + + // Module scoping + // In Rust, `#[macro_export]` macros are unconditionally visible at the + // crate root, even if the parent modules is **not** visible. + if export { + self.update( + self.def_map.root, + &[(Some(name), PerNs::macros(macro_, Visibility::Public))], + Visibility::Public, + ImportType::Named, + ); + } + } + + /// Define a legacy textual scoped macro in module + /// + /// We use a map `legacy_macros` to store all legacy textual scoped macros visible per module. + /// It will clone all macros from parent legacy scope, whose definition is prior to + /// the definition of current module. + /// And also, `macro_use` on a module will import all legacy macros visible inside to + /// current legacy scope, with possible shadowing. + fn define_legacy_macro(&mut self, module_id: LocalModuleId, name: Name, mac: MacroDefId) { + // Always shadowing + self.def_map.modules[module_id].scope.define_legacy_macro(name, mac); + } + + /// Define a proc macro + /// + /// A proc macro is similar to normal macro scope, but it would not visiable in legacy textual scoped. + /// And unconditionally exported. + fn define_proc_macro(&mut self, name: Name, macro_: MacroDefId) { + self.update( + self.def_map.root, + &[(Some(name), PerNs::macros(macro_, Visibility::Public))], + Visibility::Public, + ImportType::Named, + ); + } + + /// Import macros from `#[macro_use] extern crate`. + fn import_macros_from_extern_crate( + &mut self, + current_module_id: LocalModuleId, + import: &item_tree::ExternCrate, + ) { + log::debug!( + "importing macros from extern crate: {:?} ({:?})", + import, + self.def_map.edition, + ); + + let res = self.def_map.resolve_name_in_extern_prelude( + &import + .path + .as_ident() + .expect("extern crate should have been desugared to one-element path"), + ); + + if let Some(ModuleDefId::ModuleId(m)) = res.take_types() { + mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use); + self.import_all_macros_exported(current_module_id, m.krate); + } + } + + /// Import all exported macros from another crate + /// + /// Exported macros are just all macros in the root module scope. + /// Note that it contains not only all `#[macro_export]` macros, but also all aliases + /// created by `use` in the root module, ignoring the visibility of `use`. + fn import_all_macros_exported(&mut self, current_module_id: LocalModuleId, krate: CrateId) { + let def_map = self.db.crate_def_map(krate); + for (name, def) in def_map[def_map.root].scope.macros() { + // `macro_use` only bring things into legacy scope. + self.define_legacy_macro(current_module_id, name.clone(), def); + } + } + + /// Import resolution + /// + /// This is a fix point algorithm. We resolve imports until no forward + /// progress in resolving imports is made + fn resolve_imports(&mut self) { + let mut n_previous_unresolved = self.unresolved_imports.len() + 1; + + while self.unresolved_imports.len() < n_previous_unresolved { + n_previous_unresolved = self.unresolved_imports.len(); + let imports = std::mem::replace(&mut self.unresolved_imports, Vec::new()); + for mut directive in imports { + directive.status = self.resolve_import(directive.module_id, &directive.import); + match directive.status { + PartialResolvedImport::Indeterminate(_) => { + self.record_resolved_import(&directive); + // FIXME: For avoid performance regression, + // we consider an imported resolved if it is indeterminate (i.e not all namespace resolved) + self.resolved_imports.push(directive) + } + PartialResolvedImport::Resolved(_) => { + self.record_resolved_import(&directive); + self.resolved_imports.push(directive) + } + PartialResolvedImport::Unresolved => { + self.unresolved_imports.push(directive); + } + } + } + } + } + + fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport { + log::debug!("resolving import: {:?} ({:?})", import, self.def_map.edition); + if import.is_extern_crate { + let res = self.def_map.resolve_name_in_extern_prelude( + &import + .path + .as_ident() + .expect("extern crate should have been desugared to one-element path"), + ); + PartialResolvedImport::Resolved(res) + } else { + let res = self.def_map.resolve_path_fp_with_macro( + self.db, + ResolveMode::Import, + module_id, + &import.path, + BuiltinShadowMode::Module, + ); + + let def = res.resolved_def; + if res.reached_fixedpoint == ReachedFixedPoint::No || def.is_none() { + return PartialResolvedImport::Unresolved; + } + + if let Some(krate) = res.krate { + if krate != self.def_map.krate { + return PartialResolvedImport::Resolved(def); + } + } + + // Check whether all namespace is resolved + if def.take_types().is_some() + && def.take_values().is_some() + && def.take_macros().is_some() + { + PartialResolvedImport::Resolved(def) + } else { + PartialResolvedImport::Indeterminate(def) + } + } + } + + fn record_resolved_import(&mut self, directive: &ImportDirective) { + let module_id = directive.module_id; + let import = &directive.import; + let def = directive.status.namespaces(); + let vis = self + .def_map + .resolve_visibility(self.db, module_id, &directive.import.visibility) + .unwrap_or(Visibility::Public); + + if import.is_glob { + log::debug!("glob import: {:?}", import); + match def.take_types() { + Some(ModuleDefId::ModuleId(m)) => { + if import.is_prelude { + mark::hit!(std_prelude); + self.def_map.prelude = Some(m); + } else if m.krate != self.def_map.krate { + mark::hit!(glob_across_crates); + // glob import from other crate => we can just import everything once + let item_map = self.db.crate_def_map(m.krate); + let scope = &item_map[m.local_id].scope; + + // Module scoped macros is included + let items = scope + .resolutions() + // only keep visible names... + .map(|(n, res)| { + (n, res.filter_visibility(|v| v.is_visible_from_other_crate())) + }) + .filter(|(_, res)| !res.is_none()) + .collect::>(); + + self.update(module_id, &items, vis, ImportType::Glob); + } else { + // glob import from same crate => we do an initial + // import, and then need to propagate any further + // additions + let scope = &self.def_map[m.local_id].scope; + + // Module scoped macros is included + let items = scope + .resolutions() + // only keep visible names... + .map(|(n, res)| { + ( + n, + res.filter_visibility(|v| { + v.is_visible_from_def_map(&self.def_map, module_id) + }), + ) + }) + .filter(|(_, res)| !res.is_none()) + .collect::>(); + + self.update(module_id, &items, vis, ImportType::Glob); + // record the glob import in case we add further items + let glob = self.glob_imports.entry(m.local_id).or_default(); + if !glob.iter().any(|(mid, _)| *mid == module_id) { + glob.push((module_id, vis)); + } + } + } + Some(ModuleDefId::AdtId(AdtId::EnumId(e))) => { + mark::hit!(glob_enum); + // glob import from enum => just import all the variants + + // XXX: urgh, so this works by accident! Here, we look at + // the enum data, and, in theory, this might require us to + // look back at the crate_def_map, creating a cycle. For + // example, `enum E { crate::some_macro!(); }`. Luckely, the + // only kind of macro that is allowed inside enum is a + // `cfg_macro`, and we don't need to run name resolution for + // it, but this is sheer luck! + let enum_data = self.db.enum_data(e); + let resolutions = enum_data + .variants + .iter() + .map(|(local_id, variant_data)| { + let name = variant_data.name.clone(); + let variant = EnumVariantId { parent: e, local_id }; + let res = PerNs::both(variant.into(), variant.into(), vis); + (Some(name), res) + }) + .collect::>(); + self.update(module_id, &resolutions, vis, ImportType::Glob); + } + Some(d) => { + log::debug!("glob import {:?} from non-module/enum {:?}", import, d); + } + None => { + log::debug!("glob import {:?} didn't resolve as type", import); + } + } + } else { + match import.path.segments.last() { + Some(last_segment) => { + let name = match &import.alias { + Some(ImportAlias::Alias(name)) => Some(name.clone()), + Some(ImportAlias::Underscore) => None, + None => Some(last_segment.clone()), + }; + log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def); + + // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658 + if import.is_extern_crate && module_id == self.def_map.root { + if let (Some(def), Some(name)) = (def.take_types(), name.as_ref()) { + self.def_map.extern_prelude.insert(name.clone(), def); + } + } + + self.update(module_id, &[(name, def)], vis, ImportType::Named); + } + None => mark::hit!(bogus_paths), + } + } + } + + fn update( + &mut self, + module_id: LocalModuleId, + resolutions: &[(Option, PerNs)], + vis: Visibility, + import_type: ImportType, + ) { + self.db.check_canceled(); + self.update_recursive(module_id, resolutions, vis, import_type, 0) + } + + fn update_recursive( + &mut self, + module_id: LocalModuleId, + resolutions: &[(Option, PerNs)], + // All resolutions are imported with this visibility; the visibilies in + // the `PerNs` values are ignored and overwritten + vis: Visibility, + import_type: ImportType, + depth: usize, + ) { + if depth > GLOB_RECURSION_LIMIT { + // prevent stack overflows (but this shouldn't be possible) + panic!("infinite recursion in glob imports!"); + } + let mut changed = false; + + for (name, res) in resolutions { + match name { + Some(name) => { + let scope = &mut self.def_map.modules[module_id].scope; + changed |= scope.push_res_with_import( + &mut self.from_glob_import, + (module_id, name.clone()), + res.with_visibility(vis), + import_type, + ); + } + None => { + let tr = match res.take_types() { + Some(ModuleDefId::TraitId(tr)) => tr, + Some(other) => { + log::debug!("non-trait `_` import of {:?}", other); + continue; + } + None => continue, + }; + let old_vis = self.def_map.modules[module_id].scope.unnamed_trait_vis(tr); + let should_update = match old_vis { + None => true, + Some(old_vis) => { + let max_vis = old_vis.max(vis, &self.def_map).unwrap_or_else(|| { + panic!("`Tr as _` imports with unrelated visibilities {:?} and {:?} (trait {:?})", old_vis, vis, tr); + }); + + if max_vis == old_vis { + false + } else { + mark::hit!(upgrade_underscore_visibility); + true + } + } + }; + + if should_update { + changed = true; + self.def_map.modules[module_id].scope.push_unnamed_trait(tr, vis); + } + } + } + } + + if !changed { + return; + } + let glob_imports = self + .glob_imports + .get(&module_id) + .into_iter() + .flat_map(|v| v.iter()) + .filter(|(glob_importing_module, _)| { + // we know all resolutions have the same visibility (`vis`), so we + // just need to check that once + vis.is_visible_from_def_map(&self.def_map, *glob_importing_module) + }) + .cloned() + .collect::>(); + + for (glob_importing_module, glob_import_vis) in glob_imports { + self.update_recursive( + glob_importing_module, + resolutions, + glob_import_vis, + ImportType::Glob, + depth + 1, + ); + } + } + + fn resolve_macros(&mut self) -> ReachedFixedPoint { + let mut macros = std::mem::replace(&mut self.unexpanded_macros, Vec::new()); + let mut attribute_macros = + std::mem::replace(&mut self.unexpanded_attribute_macros, Vec::new()); + let mut resolved = Vec::new(); + let mut res = ReachedFixedPoint::Yes; + macros.retain(|directive| { + if let Some(call_id) = directive.legacy { + res = ReachedFixedPoint::No; + resolved.push((directive.module_id, call_id, directive.depth)); + return false; + } + + if let Some(call_id) = + directive.ast_id.as_call_id(self.db, self.def_map.krate, |path| { + let resolved_res = self.def_map.resolve_path_fp_with_macro( + self.db, + ResolveMode::Other, + directive.module_id, + &path, + BuiltinShadowMode::Module, + ); + resolved_res.resolved_def.take_macros() + }) + { + resolved.push((directive.module_id, call_id, directive.depth)); + res = ReachedFixedPoint::No; + return false; + } + + true + }); + attribute_macros.retain(|directive| { + if let Some(call_id) = + directive.ast_id.as_call_id(self.db, self.def_map.krate, |path| { + self.resolve_attribute_macro(&directive, &path) + }) + { + resolved.push((directive.module_id, call_id, 0)); + res = ReachedFixedPoint::No; + return false; + } + + true + }); + + self.unexpanded_macros = macros; + self.unexpanded_attribute_macros = attribute_macros; + + for (module_id, macro_call_id, depth) in resolved { + self.collect_macro_expansion(module_id, macro_call_id, depth); + } + + res + } + + fn resolve_attribute_macro( + &self, + directive: &DeriveDirective, + path: &ModPath, + ) -> Option { + if let Some(name) = path.as_ident() { + // FIXME this should actually be handled with the normal name + // resolution; the std lib defines built-in stubs for the derives, + // but these are new-style `macro`s, which we don't support yet + if let Some(def_id) = find_builtin_derive(name) { + return Some(def_id); + } + } + let resolved_res = self.def_map.resolve_path_fp_with_macro( + self.db, + ResolveMode::Other, + directive.module_id, + &path, + BuiltinShadowMode::Module, + ); + + resolved_res.resolved_def.take_macros() + } + + fn collect_macro_expansion( + &mut self, + module_id: LocalModuleId, + macro_call_id: MacroCallId, + depth: usize, + ) { + if depth > EXPANSION_DEPTH_LIMIT { + mark::hit!(macro_expansion_overflow); + log::warn!("macro expansion is too deep"); + return; + } + let file_id: HirFileId = macro_call_id.as_file(); + let item_tree = self.db.item_tree(file_id); + let mod_dir = self.mod_dirs[&module_id].clone(); + ModCollector { + def_collector: &mut *self, + macro_depth: depth, + file_id, + module_id, + item_tree: &item_tree, + mod_dir, + } + .collect(item_tree.top_level_items()); + } + + fn finish(self) -> CrateDefMap { + self.def_map + } +} + +/// Walks a single module, populating defs, imports and macros +struct ModCollector<'a, 'b> { + def_collector: &'a mut DefCollector<'b>, + macro_depth: usize, + module_id: LocalModuleId, + file_id: HirFileId, + item_tree: &'a ItemTree, + mod_dir: ModDir, +} + +impl ModCollector<'_, '_> { + fn collect(&mut self, items: &[ModItem]) { + // Note: don't assert that inserted value is fresh: it's simply not true + // for macros. + self.def_collector.mod_dirs.insert(self.module_id, self.mod_dir.clone()); + + // Prelude module is always considered to be `#[macro_use]`. + if let Some(prelude_module) = self.def_collector.def_map.prelude { + if prelude_module.krate != self.def_collector.def_map.krate { + mark::hit!(prelude_is_macro_use); + self.def_collector.import_all_macros_exported(self.module_id, prelude_module.krate); + } + } + + // This should be processed eagerly instead of deferred to resolving. + // `#[macro_use] extern crate` is hoisted to imports macros before collecting + // any other items. + for item in items { + if self.is_cfg_enabled(self.item_tree.attrs((*item).into())) { + if let ModItem::ExternCrate(id) = item { + let import = self.item_tree[*id].clone(); + if import.is_macro_use { + self.def_collector.import_macros_from_extern_crate(self.module_id, &import); + } + } + } + } + + for &item in items { + let attrs = self.item_tree.attrs(item.into()); + if self.is_cfg_enabled(attrs) { + let module = + ModuleId { krate: self.def_collector.def_map.krate, local_id: self.module_id }; + let container = ContainerId::ModuleId(module); + + let mut def = None; + match item { + ModItem::Mod(m) => self.collect_module(&self.item_tree[m], attrs), + ModItem::Import(import_id) => { + self.def_collector.unresolved_imports.push(ImportDirective { + module_id: self.module_id, + import: Import::from_use(&self.item_tree, import_id), + status: PartialResolvedImport::Unresolved, + }) + } + ModItem::ExternCrate(import_id) => { + self.def_collector.unresolved_imports.push(ImportDirective { + module_id: self.module_id, + import: Import::from_extern_crate(&self.item_tree, import_id), + status: PartialResolvedImport::Unresolved, + }) + } + ModItem::MacroCall(mac) => self.collect_macro(&self.item_tree[mac]), + ModItem::Impl(imp) => { + let module = ModuleId { + krate: self.def_collector.def_map.krate, + local_id: self.module_id, + }; + let container = ContainerId::ModuleId(module); + let impl_id = ImplLoc { container, id: ItemTreeId::new(self.file_id, imp) } + .intern(self.def_collector.db); + self.def_collector.def_map.modules[self.module_id] + .scope + .define_impl(impl_id) + } + ModItem::Function(id) => { + let func = &self.item_tree[id]; + def = Some(DefData { + id: FunctionLoc { + container: container.into(), + id: ItemTreeId::new(self.file_id, id), + } + .intern(self.def_collector.db) + .into(), + name: &func.name, + visibility: &self.item_tree[func.visibility], + has_constructor: false, + }); + } + ModItem::Struct(id) => { + let it = &self.item_tree[id]; + + // FIXME: check attrs to see if this is an attribute macro invocation; + // in which case we don't add the invocation, just a single attribute + // macro invocation + self.collect_derives(attrs, it.ast_id.upcast()); + + def = Some(DefData { + id: StructLoc { container, id: ItemTreeId::new(self.file_id, id) } + .intern(self.def_collector.db) + .into(), + name: &it.name, + visibility: &self.item_tree[it.visibility], + has_constructor: it.kind != StructDefKind::Record, + }); + } + ModItem::Union(id) => { + let it = &self.item_tree[id]; + + // FIXME: check attrs to see if this is an attribute macro invocation; + // in which case we don't add the invocation, just a single attribute + // macro invocation + self.collect_derives(attrs, it.ast_id.upcast()); + + def = Some(DefData { + id: UnionLoc { container, id: ItemTreeId::new(self.file_id, id) } + .intern(self.def_collector.db) + .into(), + name: &it.name, + visibility: &self.item_tree[it.visibility], + has_constructor: false, + }); + } + ModItem::Enum(id) => { + let it = &self.item_tree[id]; + + // FIXME: check attrs to see if this is an attribute macro invocation; + // in which case we don't add the invocation, just a single attribute + // macro invocation + self.collect_derives(attrs, it.ast_id.upcast()); + + def = Some(DefData { + id: EnumLoc { container, id: ItemTreeId::new(self.file_id, id) } + .intern(self.def_collector.db) + .into(), + name: &it.name, + visibility: &self.item_tree[it.visibility], + has_constructor: false, + }); + } + ModItem::Const(id) => { + let it = &self.item_tree[id]; + + if let Some(name) = &it.name { + def = Some(DefData { + id: ConstLoc { + container: container.into(), + id: ItemTreeId::new(self.file_id, id), + } + .intern(self.def_collector.db) + .into(), + name, + visibility: &self.item_tree[it.visibility], + has_constructor: false, + }); + } + } + ModItem::Static(id) => { + let it = &self.item_tree[id]; + + def = Some(DefData { + id: StaticLoc { container, id: ItemTreeId::new(self.file_id, id) } + .intern(self.def_collector.db) + .into(), + name: &it.name, + visibility: &self.item_tree[it.visibility], + has_constructor: false, + }); + } + ModItem::Trait(id) => { + let it = &self.item_tree[id]; + + def = Some(DefData { + id: TraitLoc { container, id: ItemTreeId::new(self.file_id, id) } + .intern(self.def_collector.db) + .into(), + name: &it.name, + visibility: &self.item_tree[it.visibility], + has_constructor: false, + }); + } + ModItem::TypeAlias(id) => { + let it = &self.item_tree[id]; + + def = Some(DefData { + id: TypeAliasLoc { + container: container.into(), + id: ItemTreeId::new(self.file_id, id), + } + .intern(self.def_collector.db) + .into(), + name: &it.name, + visibility: &self.item_tree[it.visibility], + has_constructor: false, + }); + } + } + + if let Some(DefData { id, name, visibility, has_constructor }) = def { + self.def_collector.def_map.modules[self.module_id].scope.define_def(id); + let vis = self + .def_collector + .def_map + .resolve_visibility(self.def_collector.db, self.module_id, visibility) + .unwrap_or(Visibility::Public); + self.def_collector.update( + self.module_id, + &[(Some(name.clone()), PerNs::from_def(id, vis, has_constructor))], + vis, + ImportType::Named, + ) + } + } + } + } + + fn collect_module(&mut self, module: &Mod, attrs: &Attrs) { + let path_attr = attrs.by_key("path").string_value(); + let is_macro_use = attrs.by_key("macro_use").exists(); + match &module.kind { + // inline module, just recurse + ModKind::Inline { items } => { + let module_id = self.push_child_module( + module.name.clone(), + AstId::new(self.file_id, module.ast_id), + None, + &self.item_tree[module.visibility], + ); + + ModCollector { + def_collector: &mut *self.def_collector, + macro_depth: self.macro_depth, + module_id, + file_id: self.file_id, + item_tree: self.item_tree, + mod_dir: self.mod_dir.descend_into_definition(&module.name, path_attr), + } + .collect(&*items); + if is_macro_use { + self.import_all_legacy_macros(module_id); + } + } + // out of line module, resolve, parse and recurse + ModKind::Outline {} => { + let ast_id = AstId::new(self.file_id, module.ast_id); + match self.mod_dir.resolve_declaration( + self.def_collector.db, + self.file_id, + &module.name, + path_attr, + ) { + Ok((file_id, is_mod_rs, mod_dir)) => { + let module_id = self.push_child_module( + module.name.clone(), + ast_id, + Some((file_id, is_mod_rs)), + &self.item_tree[module.visibility], + ); + let item_tree = self.def_collector.db.item_tree(file_id.into()); + ModCollector { + def_collector: &mut *self.def_collector, + macro_depth: self.macro_depth, + module_id, + file_id: file_id.into(), + item_tree: &item_tree, + mod_dir, + } + .collect(item_tree.top_level_items()); + if is_macro_use { + self.import_all_legacy_macros(module_id); + } + } + Err(candidate) => self.def_collector.def_map.diagnostics.push( + DefDiagnostic::UnresolvedModule { + module: self.module_id, + declaration: ast_id, + candidate, + }, + ), + }; + } + } + } + + fn push_child_module( + &mut self, + name: Name, + declaration: AstId, + definition: Option<(FileId, bool)>, + visibility: &crate::visibility::RawVisibility, + ) -> LocalModuleId { + let vis = self + .def_collector + .def_map + .resolve_visibility(self.def_collector.db, self.module_id, visibility) + .unwrap_or(Visibility::Public); + let modules = &mut self.def_collector.def_map.modules; + let res = modules.alloc(ModuleData::default()); + modules[res].parent = Some(self.module_id); + modules[res].origin = match definition { + None => ModuleOrigin::Inline { definition: declaration }, + Some((definition, is_mod_rs)) => { + ModuleOrigin::File { declaration, definition, is_mod_rs } + } + }; + for (name, mac) in modules[self.module_id].scope.collect_legacy_macros() { + modules[res].scope.define_legacy_macro(name, mac) + } + modules[self.module_id].children.insert(name.clone(), res); + let module = ModuleId { krate: self.def_collector.def_map.krate, local_id: res }; + let def: ModuleDefId = module.into(); + self.def_collector.def_map.modules[self.module_id].scope.define_def(def); + self.def_collector.update( + self.module_id, + &[(Some(name), PerNs::from_def(def, vis, false))], + vis, + ImportType::Named, + ); + res + } + + fn collect_derives(&mut self, attrs: &Attrs, ast_id: FileAstId) { + for derive_subtree in attrs.by_key("derive").tt_values() { + // for #[derive(Copy, Clone)], `derive_subtree` is the `(Copy, Clone)` subtree + for tt in &derive_subtree.token_trees { + let ident = match &tt { + tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => ident, + tt::TokenTree::Leaf(tt::Leaf::Punct(_)) => continue, // , is ok + _ => continue, // anything else would be an error (which we currently ignore) + }; + let path = ModPath::from_tt_ident(ident); + + let ast_id = AstIdWithPath::new(self.file_id, ast_id, path); + self.def_collector + .unexpanded_attribute_macros + .push(DeriveDirective { module_id: self.module_id, ast_id }); + } + } + } + + fn collect_macro(&mut self, mac: &MacroCall) { + let mut ast_id = AstIdWithPath::new(self.file_id, mac.ast_id, mac.path.clone()); + + // Case 0: builtin macros + if mac.is_builtin { + if let Some(name) = &mac.name { + let krate = self.def_collector.def_map.krate; + if let Some(macro_id) = find_builtin_macro(name, krate, ast_id.ast_id) { + self.def_collector.define_macro( + self.module_id, + name.clone(), + macro_id, + mac.is_export, + ); + return; + } + } + } + + // Case 1: macro rules, define a macro in crate-global mutable scope + if is_macro_rules(&mac.path) { + if let Some(name) = &mac.name { + let macro_id = MacroDefId { + ast_id: Some(ast_id.ast_id), + krate: Some(self.def_collector.def_map.krate), + kind: MacroDefKind::Declarative, + local_inner: mac.is_local_inner, + }; + self.def_collector.define_macro( + self.module_id, + name.clone(), + macro_id, + mac.is_export, + ); + } + return; + } + + // Case 2: try to resolve in legacy scope and expand macro_rules + if let Some(macro_call_id) = + ast_id.as_call_id(self.def_collector.db, self.def_collector.def_map.krate, |path| { + path.as_ident().and_then(|name| { + self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name) + }) + }) + { + self.def_collector.unexpanded_macros.push(MacroDirective { + module_id: self.module_id, + ast_id, + legacy: Some(macro_call_id), + depth: self.macro_depth + 1, + }); + + return; + } + + // Case 3: resolve in module scope, expand during name resolution. + // We rewrite simple path `macro_name` to `self::macro_name` to force resolve in module scope only. + if ast_id.path.is_ident() { + ast_id.path.kind = PathKind::Super(0); + } + + self.def_collector.unexpanded_macros.push(MacroDirective { + module_id: self.module_id, + ast_id, + legacy: None, + depth: self.macro_depth + 1, + }); + } + + fn import_all_legacy_macros(&mut self, module_id: LocalModuleId) { + let macros = self.def_collector.def_map[module_id].scope.collect_legacy_macros(); + for (name, macro_) in macros { + self.def_collector.define_legacy_macro(self.module_id, name.clone(), macro_); + } + } + + fn is_cfg_enabled(&self, attrs: &Attrs) -> bool { + attrs.is_cfg_enabled(self.def_collector.cfg_options) + } +} + +fn is_macro_rules(path: &ModPath) -> bool { + path.as_ident() == Some(&name![macro_rules]) +} + +#[cfg(test)] +mod tests { + use crate::{db::DefDatabase, test_db::TestDB}; + use arena::Arena; + use base_db::{fixture::WithFixture, SourceDatabase}; + + use super::*; + + fn do_collect_defs(db: &dyn DefDatabase, def_map: CrateDefMap) -> CrateDefMap { + let mut collector = DefCollector { + db, + def_map, + glob_imports: FxHashMap::default(), + unresolved_imports: Vec::new(), + resolved_imports: Vec::new(), + unexpanded_macros: Vec::new(), + unexpanded_attribute_macros: Vec::new(), + mod_dirs: FxHashMap::default(), + cfg_options: &CfgOptions::default(), + proc_macros: Default::default(), + from_glob_import: Default::default(), + }; + collector.collect(); + collector.def_map + } + + fn do_resolve(code: &str) -> CrateDefMap { + let (db, _file_id) = TestDB::with_single_file(&code); + let krate = db.test_crate(); + + let def_map = { + let edition = db.crate_graph()[krate].edition; + let mut modules: Arena = Arena::default(); + let root = modules.alloc(ModuleData::default()); + CrateDefMap { + krate, + edition, + extern_prelude: FxHashMap::default(), + prelude: None, + root, + modules, + diagnostics: Vec::new(), + } + }; + do_collect_defs(&db, def_map) + } + + #[test] + fn test_macro_expand_will_stop_1() { + do_resolve( + r#" + macro_rules! foo { + ($($ty:ty)*) => { foo!($($ty)*); } + } + foo!(KABOOM); + "#, + ); + } + + #[ignore] // this test does succeed, but takes quite a while :/ + #[test] + fn test_macro_expand_will_stop_2() { + do_resolve( + r#" + macro_rules! foo { + ($($ty:ty)*) => { foo!($($ty)* $($ty)*); } + } + foo!(KABOOM); + "#, + ); + } +} diff --git a/crates/hir_def/src/nameres/mod_resolution.rs b/crates/hir_def/src/nameres/mod_resolution.rs new file mode 100644 index 0000000000..e8389b4846 --- /dev/null +++ b/crates/hir_def/src/nameres/mod_resolution.rs @@ -0,0 +1,139 @@ +//! This module resolves `mod foo;` declaration to file. +use base_db::FileId; +use hir_expand::name::Name; +use syntax::SmolStr; + +use crate::{db::DefDatabase, HirFileId}; + +#[derive(Clone, Debug)] +pub(super) struct ModDir { + /// `` for `mod.rs`, `lib.rs` + /// `foo/` for `foo.rs` + /// `foo/bar/` for `mod bar { mod x; }` nested in `foo.rs` + /// Invariant: path.is_empty() || path.ends_with('/') + dir_path: DirPath, + /// inside `./foo.rs`, mods with `#[path]` should *not* be relative to `./foo/` + root_non_dir_owner: bool, +} + +impl ModDir { + pub(super) fn root() -> ModDir { + ModDir { dir_path: DirPath::empty(), root_non_dir_owner: false } + } + + pub(super) fn descend_into_definition( + &self, + name: &Name, + attr_path: Option<&SmolStr>, + ) -> ModDir { + let path = match attr_path.map(|it| it.as_str()) { + None => { + let mut path = self.dir_path.clone(); + path.push(&name.to_string()); + path + } + Some(attr_path) => { + let mut path = self.dir_path.join_attr(attr_path, self.root_non_dir_owner); + if !(path.is_empty() || path.ends_with('/')) { + path.push('/') + } + DirPath::new(path) + } + }; + ModDir { dir_path: path, root_non_dir_owner: false } + } + + pub(super) fn resolve_declaration( + &self, + db: &dyn DefDatabase, + file_id: HirFileId, + name: &Name, + attr_path: Option<&SmolStr>, + ) -> Result<(FileId, bool, ModDir), String> { + let file_id = file_id.original_file(db.upcast()); + + let mut candidate_files = Vec::new(); + match attr_path { + Some(attr_path) => { + candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner)) + } + None => { + candidate_files.push(format!("{}{}.rs", self.dir_path.0, name)); + candidate_files.push(format!("{}{}/mod.rs", self.dir_path.0, name)); + } + }; + + for candidate in candidate_files.iter() { + if let Some(file_id) = db.resolve_path(file_id, candidate.as_str()) { + let is_mod_rs = candidate.ends_with("mod.rs"); + + let (dir_path, root_non_dir_owner) = if is_mod_rs || attr_path.is_some() { + (DirPath::empty(), false) + } else { + (DirPath::new(format!("{}/", name)), true) + }; + return Ok((file_id, is_mod_rs, ModDir { dir_path, root_non_dir_owner })); + } + } + Err(candidate_files.remove(0)) + } +} + +#[derive(Clone, Debug)] +struct DirPath(String); + +impl DirPath { + fn assert_invariant(&self) { + assert!(self.0.is_empty() || self.0.ends_with('/')); + } + fn new(repr: String) -> DirPath { + let res = DirPath(repr); + res.assert_invariant(); + res + } + fn empty() -> DirPath { + DirPath::new(String::new()) + } + fn push(&mut self, name: &str) { + self.0.push_str(name); + self.0.push('/'); + self.assert_invariant(); + } + fn parent(&self) -> Option<&str> { + if self.0.is_empty() { + return None; + }; + let idx = + self.0[..self.0.len() - '/'.len_utf8()].rfind('/').map_or(0, |it| it + '/'.len_utf8()); + Some(&self.0[..idx]) + } + /// So this is the case which doesn't really work I think if we try to be + /// 100% platform agnostic: + /// + /// ``` + /// mod a { + /// #[path="C://sad/face"] + /// mod b { mod c; } + /// } + /// ``` + /// + /// Here, we need to join logical dir path to a string path from an + /// attribute. Ideally, we should somehow losslessly communicate the whole + /// construction to `FileLoader`. + fn join_attr(&self, mut attr: &str, relative_to_parent: bool) -> String { + let base = if relative_to_parent { self.parent().unwrap() } else { &self.0 }; + + if attr.starts_with("./") { + attr = &attr["./".len()..]; + } + let tmp; + let attr = if attr.contains('\\') { + tmp = attr.replace('\\', "/"); + &tmp + } else { + attr + }; + let res = format!("{}{}", base, attr); + res + } +} diff --git a/crates/hir_def/src/nameres/path_resolution.rs b/crates/hir_def/src/nameres/path_resolution.rs new file mode 100644 index 0000000000..88e10574ef --- /dev/null +++ b/crates/hir_def/src/nameres/path_resolution.rs @@ -0,0 +1,330 @@ +//! This modules implements a function to resolve a path `foo::bar::baz` to a +//! def, which is used within the name resolution. +//! +//! When name resolution is finished, the result of resolving a path is either +//! `Some(def)` or `None`. However, when we are in process of resolving imports +//! or macros, there's a third possibility: +//! +//! I can't resolve this path right now, but I might be resolve this path +//! later, when more macros are expanded. +//! +//! `ReachedFixedPoint` signals about this. + +use std::iter::successors; + +use base_db::Edition; +use hir_expand::name::Name; +use test_utils::mark; + +use crate::{ + db::DefDatabase, + item_scope::BUILTIN_SCOPE, + nameres::{BuiltinShadowMode, CrateDefMap}, + path::{ModPath, PathKind}, + per_ns::PerNs, + visibility::{RawVisibility, Visibility}, + AdtId, CrateId, EnumVariantId, LocalModuleId, ModuleDefId, ModuleId, +}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) enum ResolveMode { + Import, + Other, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) enum ReachedFixedPoint { + Yes, + No, +} + +#[derive(Debug, Clone)] +pub(super) struct ResolvePathResult { + pub(super) resolved_def: PerNs, + pub(super) segment_index: Option, + pub(super) reached_fixedpoint: ReachedFixedPoint, + pub(super) krate: Option, +} + +impl ResolvePathResult { + fn empty(reached_fixedpoint: ReachedFixedPoint) -> ResolvePathResult { + ResolvePathResult::with(PerNs::none(), reached_fixedpoint, None, None) + } + + fn with( + resolved_def: PerNs, + reached_fixedpoint: ReachedFixedPoint, + segment_index: Option, + krate: Option, + ) -> ResolvePathResult { + ResolvePathResult { resolved_def, reached_fixedpoint, segment_index, krate } + } +} + +impl CrateDefMap { + pub(super) fn resolve_name_in_extern_prelude(&self, name: &Name) -> PerNs { + self.extern_prelude + .get(name) + .map_or(PerNs::none(), |&it| PerNs::types(it, Visibility::Public)) + } + + pub(crate) fn resolve_visibility( + &self, + db: &dyn DefDatabase, + original_module: LocalModuleId, + visibility: &RawVisibility, + ) -> Option { + match visibility { + RawVisibility::Module(path) => { + let (result, remaining) = + self.resolve_path(db, original_module, &path, BuiltinShadowMode::Module); + if remaining.is_some() { + return None; + } + let types = result.take_types()?; + match types { + ModuleDefId::ModuleId(m) => Some(Visibility::Module(m)), + _ => { + // error: visibility needs to refer to module + None + } + } + } + RawVisibility::Public => Some(Visibility::Public), + } + } + + // Returns Yes if we are sure that additions to `ItemMap` wouldn't change + // the result. + pub(super) fn resolve_path_fp_with_macro( + &self, + db: &dyn DefDatabase, + mode: ResolveMode, + original_module: LocalModuleId, + path: &ModPath, + shadow: BuiltinShadowMode, + ) -> ResolvePathResult { + let mut segments = path.segments.iter().enumerate(); + let mut curr_per_ns: PerNs = match path.kind { + PathKind::DollarCrate(krate) => { + if krate == self.krate { + mark::hit!(macro_dollar_crate_self); + PerNs::types( + ModuleId { krate: self.krate, local_id: self.root }.into(), + Visibility::Public, + ) + } else { + let def_map = db.crate_def_map(krate); + let module = ModuleId { krate, local_id: def_map.root }; + mark::hit!(macro_dollar_crate_other); + PerNs::types(module.into(), Visibility::Public) + } + } + PathKind::Crate => PerNs::types( + ModuleId { krate: self.krate, local_id: self.root }.into(), + Visibility::Public, + ), + // plain import or absolute path in 2015: crate-relative with + // fallback to extern prelude (with the simplification in + // rust-lang/rust#57745) + // FIXME there must be a nicer way to write this condition + PathKind::Plain | PathKind::Abs + if self.edition == Edition::Edition2015 + && (path.kind == PathKind::Abs || mode == ResolveMode::Import) => + { + let (_, segment) = match segments.next() { + Some((idx, segment)) => (idx, segment), + None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), + }; + log::debug!("resolving {:?} in crate root (+ extern prelude)", segment); + self.resolve_name_in_crate_root_or_extern_prelude(&segment) + } + PathKind::Plain => { + let (_, segment) = match segments.next() { + Some((idx, segment)) => (idx, segment), + None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), + }; + // The first segment may be a builtin type. If the path has more + // than one segment, we first try resolving it as a module + // anyway. + // FIXME: If the next segment doesn't resolve in the module and + // BuiltinShadowMode wasn't Module, then we need to try + // resolving it as a builtin. + let prefer_module = + if path.segments.len() == 1 { shadow } else { BuiltinShadowMode::Module }; + + log::debug!("resolving {:?} in module", segment); + self.resolve_name_in_module(db, original_module, &segment, prefer_module) + } + PathKind::Super(lvl) => { + let m = successors(Some(original_module), |m| self.modules[*m].parent) + .nth(lvl as usize); + if let Some(local_id) = m { + PerNs::types( + ModuleId { krate: self.krate, local_id }.into(), + Visibility::Public, + ) + } else { + log::debug!("super path in root module"); + return ResolvePathResult::empty(ReachedFixedPoint::Yes); + } + } + PathKind::Abs => { + // 2018-style absolute path -- only extern prelude + let segment = match segments.next() { + Some((_, segment)) => segment, + None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), + }; + if let Some(def) = self.extern_prelude.get(&segment) { + log::debug!("absolute path {:?} resolved to crate {:?}", path, def); + PerNs::types(*def, Visibility::Public) + } else { + return ResolvePathResult::empty(ReachedFixedPoint::No); // extern crate declarations can add to the extern prelude + } + } + }; + + for (i, segment) in segments { + let (curr, vis) = match curr_per_ns.take_types_vis() { + Some(r) => r, + None => { + // we still have path segments left, but the path so far + // didn't resolve in the types namespace => no resolution + // (don't break here because `curr_per_ns` might contain + // something in the value namespace, and it would be wrong + // to return that) + return ResolvePathResult::empty(ReachedFixedPoint::No); + } + }; + // resolve segment in curr + + curr_per_ns = match curr { + ModuleDefId::ModuleId(module) => { + if module.krate != self.krate { + let path = ModPath { + segments: path.segments[i..].to_vec(), + kind: PathKind::Super(0), + }; + log::debug!("resolving {:?} in other crate", path); + let defp_map = db.crate_def_map(module.krate); + let (def, s) = defp_map.resolve_path(db, module.local_id, &path, shadow); + return ResolvePathResult::with( + def, + ReachedFixedPoint::Yes, + s.map(|s| s + i), + Some(module.krate), + ); + } + + // Since it is a qualified path here, it should not contains legacy macros + self[module.local_id].scope.get(&segment) + } + ModuleDefId::AdtId(AdtId::EnumId(e)) => { + // enum variant + mark::hit!(can_import_enum_variant); + let enum_data = db.enum_data(e); + match enum_data.variant(&segment) { + Some(local_id) => { + let variant = EnumVariantId { parent: e, local_id }; + match &*enum_data.variants[local_id].variant_data { + crate::adt::VariantData::Record(_) => { + PerNs::types(variant.into(), Visibility::Public) + } + crate::adt::VariantData::Tuple(_) + | crate::adt::VariantData::Unit => { + PerNs::both(variant.into(), variant.into(), Visibility::Public) + } + } + } + None => { + return ResolvePathResult::with( + PerNs::types(e.into(), vis), + ReachedFixedPoint::Yes, + Some(i), + Some(self.krate), + ); + } + } + } + s => { + // could be an inherent method call in UFCS form + // (`Struct::method`), or some other kind of associated item + log::debug!( + "path segment {:?} resolved to non-module {:?}, but is not last", + segment, + curr, + ); + + return ResolvePathResult::with( + PerNs::types(s, vis), + ReachedFixedPoint::Yes, + Some(i), + Some(self.krate), + ); + } + }; + } + + ResolvePathResult::with(curr_per_ns, ReachedFixedPoint::Yes, None, Some(self.krate)) + } + + fn resolve_name_in_module( + &self, + db: &dyn DefDatabase, + module: LocalModuleId, + name: &Name, + shadow: BuiltinShadowMode, + ) -> PerNs { + // Resolve in: + // - legacy scope of macro + // - current module / scope + // - extern prelude + // - std prelude + let from_legacy_macro = self[module] + .scope + .get_legacy_macro(name) + .map_or_else(PerNs::none, |m| PerNs::macros(m, Visibility::Public)); + let from_scope = self[module].scope.get(name); + let from_builtin = BUILTIN_SCOPE.get(name).copied().unwrap_or_else(PerNs::none); + let from_scope_or_builtin = match shadow { + BuiltinShadowMode::Module => from_scope.or(from_builtin), + BuiltinShadowMode::Other => { + if let Some(ModuleDefId::ModuleId(_)) = from_scope.take_types() { + from_builtin.or(from_scope) + } else { + from_scope.or(from_builtin) + } + } + }; + let from_extern_prelude = self + .extern_prelude + .get(name) + .map_or(PerNs::none(), |&it| PerNs::types(it, Visibility::Public)); + let from_prelude = self.resolve_in_prelude(db, name); + + from_legacy_macro.or(from_scope_or_builtin).or(from_extern_prelude).or(from_prelude) + } + + fn resolve_name_in_crate_root_or_extern_prelude(&self, name: &Name) -> PerNs { + let from_crate_root = self[self.root].scope.get(name); + let from_extern_prelude = self.resolve_name_in_extern_prelude(name); + + from_crate_root.or(from_extern_prelude) + } + + fn resolve_in_prelude(&self, db: &dyn DefDatabase, name: &Name) -> PerNs { + if let Some(prelude) = self.prelude { + let keep; + let def_map = if prelude.krate == self.krate { + self + } else { + // Extend lifetime + keep = db.crate_def_map(prelude.krate); + &keep + }; + def_map[prelude.local_id].scope.get(name) + } else { + PerNs::none() + } + } +} diff --git a/crates/hir_def/src/nameres/tests.rs b/crates/hir_def/src/nameres/tests.rs new file mode 100644 index 0000000000..b105d56b24 --- /dev/null +++ b/crates/hir_def/src/nameres/tests.rs @@ -0,0 +1,690 @@ +mod globs; +mod incremental; +mod macros; +mod mod_resolution; +mod primitives; + +use std::sync::Arc; + +use base_db::{fixture::WithFixture, SourceDatabase}; +use expect::{expect, Expect}; +use test_utils::mark; + +use crate::{db::DefDatabase, nameres::*, test_db::TestDB}; + +fn compute_crate_def_map(fixture: &str) -> Arc { + let db = TestDB::with_files(fixture); + let krate = db.crate_graph().iter().next().unwrap(); + db.crate_def_map(krate) +} + +fn check(ra_fixture: &str, expect: Expect) { + let db = TestDB::with_files(ra_fixture); + let krate = db.crate_graph().iter().next().unwrap(); + let actual = db.crate_def_map(krate).dump(); + expect.assert_eq(&actual); +} + +#[test] +fn crate_def_map_smoke_test() { + check( + r#" +//- /lib.rs +mod foo; +struct S; +use crate::foo::bar::E; +use self::E::V; + +//- /foo/mod.rs +pub mod bar; +fn f() {} + +//- /foo/bar.rs +pub struct Baz; + +union U { to_be: bool, not_to_be: u8 } +enum E { V } + +extern { + static EXT: u8; + fn ext(); +} +"#, + expect![[r#" + crate + E: t + S: t v + V: t v + foo: t + + crate::foo + bar: t + f: v + + crate::foo::bar + Baz: t v + E: t + EXT: v + U: t + ext: v + "#]], + ); +} + +#[test] +fn crate_def_map_super_super() { + check( + r#" +mod a { + const A: usize = 0; + mod b { + const B: usize = 0; + mod c { + use super::super::*; + } + } +} +"#, + expect![[r#" + crate + a: t + + crate::a + A: v + b: t + + crate::a::b + B: v + c: t + + crate::a::b::c + A: v + b: t + "#]], + ); +} + +#[test] +fn crate_def_map_fn_mod_same_name() { + check( + r#" +mod m { + pub mod z {} + pub fn z() {} +} +"#, + expect![[r#" + crate + m: t + + crate::m + z: t v + + crate::m::z + "#]], + ); +} + +#[test] +fn bogus_paths() { + mark::check!(bogus_paths); + check( + r#" +//- /lib.rs +mod foo; +struct S; +use self; + +//- /foo/mod.rs +use super; +use crate; +"#, + expect![[r#" + crate + S: t v + foo: t + + crate::foo + "#]], + ); +} + +#[test] +fn use_as() { + check( + r#" +//- /lib.rs +mod foo; +use crate::foo::Baz as Foo; + +//- /foo/mod.rs +pub struct Baz; +"#, + expect![[r#" + crate + Foo: t v + foo: t + + crate::foo + Baz: t v + "#]], + ); +} + +#[test] +fn use_trees() { + check( + r#" +//- /lib.rs +mod foo; +use crate::foo::bar::{Baz, Quux}; + +//- /foo/mod.rs +pub mod bar; + +//- /foo/bar.rs +pub struct Baz; +pub enum Quux {}; +"#, + expect![[r#" + crate + Baz: t v + Quux: t + foo: t + + crate::foo + bar: t + + crate::foo::bar + Baz: t v + Quux: t + "#]], + ); +} + +#[test] +fn re_exports() { + check( + r#" +//- /lib.rs +mod foo; +use self::foo::Baz; + +//- /foo/mod.rs +pub mod bar; +pub use self::bar::Baz; + +//- /foo/bar.rs +pub struct Baz; +"#, + expect![[r#" + crate + Baz: t v + foo: t + + crate::foo + Baz: t v + bar: t + + crate::foo::bar + Baz: t v + "#]], + ); +} + +#[test] +fn std_prelude() { + mark::check!(std_prelude); + check( + r#" +//- /main.rs crate:main deps:test_crate +use Foo::*; + +//- /lib.rs crate:test_crate +mod prelude; +#[prelude_import] +use prelude::*; + +//- /prelude.rs +pub enum Foo { Bar, Baz }; +"#, + expect![[r#" + crate + Bar: t v + Baz: t v + "#]], + ); +} + +#[test] +fn can_import_enum_variant() { + mark::check!(can_import_enum_variant); + check( + r#" +enum E { V } +use self::E::V; +"#, + expect![[r#" + crate + E: t + V: t v + "#]], + ); +} + +#[test] +fn edition_2015_imports() { + check( + r#" +//- /main.rs crate:main deps:other_crate edition:2015 +mod foo; +mod bar; + +//- /bar.rs +struct Bar; + +//- /foo.rs +use bar::Bar; +use other_crate::FromLib; + +//- /lib.rs crate:other_crate edition:2018 +struct FromLib; +"#, + expect![[r#" + crate + bar: t + foo: t + + crate::bar + Bar: t v + + crate::foo + Bar: t v + FromLib: t v + "#]], + ); +} + +#[test] +fn item_map_using_self() { + check( + r#" +//- /lib.rs +mod foo; +use crate::foo::bar::Baz::{self}; + +//- /foo/mod.rs +pub mod bar; + +//- /foo/bar.rs +pub struct Baz; +"#, + expect![[r#" + crate + Baz: t v + foo: t + + crate::foo + bar: t + + crate::foo::bar + Baz: t v + "#]], + ); +} + +#[test] +fn item_map_across_crates() { + check( + r#" +//- /main.rs crate:main deps:test_crate +use test_crate::Baz; + +//- /lib.rs crate:test_crate +pub struct Baz; +"#, + expect![[r#" + crate + Baz: t v + "#]], + ); +} + +#[test] +fn extern_crate_rename() { + check( + r#" +//- /main.rs crate:main deps:alloc +extern crate alloc as alloc_crate; +mod alloc; +mod sync; + +//- /sync.rs +use alloc_crate::Arc; + +//- /lib.rs crate:alloc +struct Arc; +"#, + expect![[r#" + crate + alloc_crate: t + sync: t + + crate::sync + Arc: t v + "#]], + ); +} + +#[test] +fn extern_crate_rename_2015_edition() { + check( + r#" +//- /main.rs crate:main deps:alloc edition:2015 +extern crate alloc as alloc_crate; +mod alloc; +mod sync; + +//- /sync.rs +use alloc_crate::Arc; + +//- /lib.rs crate:alloc +struct Arc; +"#, + expect![[r#" + crate + alloc_crate: t + sync: t + + crate::sync + Arc: t v + "#]], + ); +} + +#[test] +fn reexport_across_crates() { + check( + r#" +//- /main.rs crate:main deps:test_crate +use test_crate::Baz; + +//- /lib.rs crate:test_crate +pub use foo::Baz; +mod foo; + +//- /foo.rs +pub struct Baz; +"#, + expect![[r#" + crate + Baz: t v + "#]], + ); +} + +#[test] +fn values_dont_shadow_extern_crates() { + check( + r#" +//- /main.rs crate:main deps:foo +fn foo() {} +use foo::Bar; + +//- /foo/lib.rs crate:foo +pub struct Bar; +"#, + expect![[r#" + crate + Bar: t v + foo: v + "#]], + ); +} + +#[test] +fn std_prelude_takes_precedence_above_core_prelude() { + check( + r#" +//- /main.rs crate:main deps:core,std +use {Foo, Bar}; + +//- /std.rs crate:std deps:core +#[prelude_import] +pub use self::prelude::*; +mod prelude { + pub struct Foo; + pub use core::prelude::Bar; +} + +//- /core.rs crate:core +#[prelude_import] +pub use self::prelude::*; +mod prelude { + pub struct Bar; +} +"#, + expect![[r#" + crate + Bar: t v + Foo: t v + "#]], + ); +} + +#[test] +fn cfg_not_test() { + check( + r#" +//- /main.rs crate:main deps:std +use {Foo, Bar, Baz}; + +//- /lib.rs crate:std +#[prelude_import] +pub use self::prelude::*; +mod prelude { + #[cfg(test)] + pub struct Foo; + #[cfg(not(test))] + pub struct Bar; + #[cfg(all(not(any()), feature = "foo", feature = "bar", opt = "42"))] + pub struct Baz; +} +"#, + expect![[r#" + crate + Bar: t v + Baz: _ + Foo: _ + "#]], + ); +} + +#[test] +fn cfg_test() { + check( + r#" +//- /main.rs crate:main deps:std +use {Foo, Bar, Baz}; + +//- /lib.rs crate:std cfg:test,feature=foo,feature=bar,opt=42 +#[prelude_import] +pub use self::prelude::*; +mod prelude { + #[cfg(test)] + pub struct Foo; + #[cfg(not(test))] + pub struct Bar; + #[cfg(all(not(any()), feature = "foo", feature = "bar", opt = "42"))] + pub struct Baz; +} +"#, + expect![[r#" + crate + Bar: _ + Baz: t v + Foo: t v + "#]], + ); +} + +#[test] +fn infer_multiple_namespace() { + check( + r#" +//- /main.rs +mod a { + pub type T = (); + pub use crate::b::*; +} + +use crate::a::T; + +mod b { + pub const T: () = (); +} +"#, + expect![[r#" + crate + T: t v + a: t + b: t + + crate::b + T: v + + crate::a + T: t v + "#]], + ); +} + +#[test] +fn underscore_import() { + check( + r#" +//- /main.rs +use tr::Tr as _; +use tr::Tr2 as _; + +mod tr { + pub trait Tr {} + pub trait Tr2 {} +} + "#, + expect![[r#" + crate + _: t + _: t + tr: t + + crate::tr + Tr: t + Tr2: t + "#]], + ); +} + +#[test] +fn underscore_reexport() { + check( + r#" +//- /main.rs +mod tr { + pub trait PubTr {} + pub trait PrivTr {} +} +mod reex { + use crate::tr::PrivTr as _; + pub use crate::tr::PubTr as _; +} +use crate::reex::*; + "#, + expect![[r#" + crate + _: t + reex: t + tr: t + + crate::tr + PrivTr: t + PubTr: t + + crate::reex + _: t + _: t + "#]], + ); +} + +#[test] +fn underscore_pub_crate_reexport() { + mark::check!(upgrade_underscore_visibility); + check( + r#" +//- /main.rs crate:main deps:lib +use lib::*; + +//- /lib.rs crate:lib +use tr::Tr as _; +pub use tr::Tr as _; + +mod tr { + pub trait Tr {} +} + "#, + expect![[r#" + crate + _: t + "#]], + ); +} + +#[test] +fn underscore_nontrait() { + check( + r#" +//- /main.rs +mod m { + pub struct Struct; + pub enum Enum {} + pub const CONST: () = (); +} +use crate::m::{Struct as _, Enum as _, CONST as _}; + "#, + expect![[r#" + crate + m: t + + crate::m + CONST: v + Enum: t + Struct: t v + "#]], + ); +} + +#[test] +fn underscore_name_conflict() { + check( + r#" +//- /main.rs +struct Tr; + +use tr::Tr as _; + +mod tr { + pub trait Tr {} +} + "#, + expect![[r#" + crate + _: t + Tr: t v + tr: t + + crate::tr + Tr: t + "#]], + ); +} diff --git a/crates/ra_hir_def/src/nameres/tests/globs.rs b/crates/hir_def/src/nameres/tests/globs.rs similarity index 100% rename from crates/ra_hir_def/src/nameres/tests/globs.rs rename to crates/hir_def/src/nameres/tests/globs.rs diff --git a/crates/hir_def/src/nameres/tests/incremental.rs b/crates/hir_def/src/nameres/tests/incremental.rs new file mode 100644 index 0000000000..cfbc62cc43 --- /dev/null +++ b/crates/hir_def/src/nameres/tests/incremental.rs @@ -0,0 +1,101 @@ +use std::sync::Arc; + +use base_db::SourceDatabaseExt; + +use super::*; + +fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) { + let (mut db, pos) = TestDB::with_position(ra_fixture_initial); + let krate = db.test_crate(); + { + let events = db.log_executed(|| { + db.crate_def_map(krate); + }); + assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) + } + db.set_file_text(pos.file_id, Arc::new(ra_fixture_change.to_string())); + + { + let events = db.log_executed(|| { + db.crate_def_map(krate); + }); + assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) + } +} + +#[test] +fn typing_inside_a_function_should_not_invalidate_def_map() { + check_def_map_is_not_recomputed( + r" + //- /lib.rs + mod foo;<|> + + use crate::foo::bar::Baz; + + enum E { A, B } + use E::*; + + fn foo() -> i32 { + 1 + 1 + } + //- /foo/mod.rs + pub mod bar; + + //- /foo/bar.rs + pub struct Baz; + ", + r" + mod foo; + + use crate::foo::bar::Baz; + + enum E { A, B } + use E::*; + + fn foo() -> i32 { 92 } + ", + ); +} + +#[test] +fn typing_inside_a_macro_should_not_invalidate_def_map() { + let (mut db, pos) = TestDB::with_position( + r" + //- /lib.rs + macro_rules! m { + ($ident:ident) => { + fn f() { + $ident + $ident; + }; + } + } + mod foo; + + //- /foo/mod.rs + pub mod bar; + + //- /foo/bar.rs + <|> + m!(X); + ", + ); + let krate = db.test_crate(); + { + let events = db.log_executed(|| { + let crate_def_map = db.crate_def_map(krate); + let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); + assert_eq!(module_data.scope.resolutions().count(), 1); + }); + assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) + } + db.set_file_text(pos.file_id, Arc::new("m!(Y);".to_string())); + + { + let events = db.log_executed(|| { + let crate_def_map = db.crate_def_map(krate); + let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); + assert_eq!(module_data.scope.resolutions().count(), 1); + }); + assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) + } +} diff --git a/crates/ra_hir_def/src/nameres/tests/macros.rs b/crates/hir_def/src/nameres/tests/macros.rs similarity index 100% rename from crates/ra_hir_def/src/nameres/tests/macros.rs rename to crates/hir_def/src/nameres/tests/macros.rs diff --git a/crates/hir_def/src/nameres/tests/mod_resolution.rs b/crates/hir_def/src/nameres/tests/mod_resolution.rs new file mode 100644 index 0000000000..1f619787e1 --- /dev/null +++ b/crates/hir_def/src/nameres/tests/mod_resolution.rs @@ -0,0 +1,796 @@ +use super::*; + +#[test] +fn name_res_works_for_broken_modules() { + mark::check!(name_res_works_for_broken_modules); + check( + r" +//- /lib.rs +mod foo // no `;`, no body +use self::foo::Baz; + +//- /foo/mod.rs +pub mod bar; +pub use self::bar::Baz; + +//- /foo/bar.rs +pub struct Baz; +", + expect![[r#" + crate + Baz: _ + foo: t + + crate::foo + "#]], + ); +} + +#[test] +fn nested_module_resolution() { + check( + r#" +//- /lib.rs +mod n1; + +//- /n1.rs +mod n2; + +//- /n1/n2.rs +struct X; +"#, + expect![[r#" + crate + n1: t + + crate::n1 + n2: t + + crate::n1::n2 + X: t v + "#]], + ); +} + +#[test] +fn nested_module_resolution_2() { + check( + r#" +//- /lib.rs +mod prelude; +mod iter; + +//- /prelude.rs +pub use crate::iter::Iterator; + +//- /iter.rs +pub use self::traits::Iterator; +mod traits; + +//- /iter/traits.rs +pub use self::iterator::Iterator; +mod iterator; + +//- /iter/traits/iterator.rs +pub trait Iterator; +"#, + expect![[r#" + crate + iter: t + prelude: t + + crate::iter + Iterator: t + traits: t + + crate::iter::traits + Iterator: t + iterator: t + + crate::iter::traits::iterator + Iterator: t + + crate::prelude + Iterator: t + "#]], + ); +} + +#[test] +fn module_resolution_works_for_non_standard_filenames() { + check( + r#" +//- /my_library.rs crate:my_library +mod foo; +use self::foo::Bar; + +//- /foo/mod.rs +pub struct Bar; +"#, + expect![[r#" + crate + Bar: t v + foo: t + + crate::foo + Bar: t v + "#]], + ); +} + +#[test] +fn module_resolution_works_for_raw_modules() { + check( + r#" +//- /lib.rs +mod r#async; +use self::r#async::Bar; + +//- /async.rs +pub struct Bar; +"#, + expect![[r#" + crate + Bar: t v + async: t + + crate::async + Bar: t v + "#]], + ); +} + +#[test] +fn module_resolution_decl_path() { + check( + r#" +//- /lib.rs +#[path = "bar/baz/foo.rs"] +mod foo; +use self::foo::Bar; + +//- /bar/baz/foo.rs +pub struct Bar; +"#, + expect![[r#" + crate + Bar: t v + foo: t + + crate::foo + Bar: t v + "#]], + ); +} + +#[test] +fn module_resolution_module_with_path_in_mod_rs() { + check( + r#" +//- /main.rs +mod foo; + +//- /foo/mod.rs +#[path = "baz.rs"] +pub mod bar; +use self::bar::Baz; + +//- /foo/baz.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + Baz: t v + bar: t + + crate::foo::bar + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_module_with_path_non_crate_root() { + check( + r#" +//- /main.rs +mod foo; + +//- /foo.rs +#[path = "baz.rs"] +pub mod bar; +use self::bar::Baz; + +//- /baz.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + Baz: t v + bar: t + + crate::foo::bar + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_module_decl_path_super() { + check( + r#" +//- /main.rs +#[path = "bar/baz/module.rs"] +mod foo; +pub struct Baz; + +//- /bar/baz/module.rs +use super::Baz; +"#, + expect![[r#" + crate + Baz: t v + foo: t + + crate::foo + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_explicit_path_mod_rs() { + check( + r#" +//- /main.rs +#[path = "module/mod.rs"] +mod foo; + +//- /module/mod.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_relative_path() { + check( + r#" +//- /main.rs +mod foo; + +//- /foo.rs +#[path = "./sub.rs"] +pub mod foo_bar; + +//- /sub.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + foo_bar: t + + crate::foo::foo_bar + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_relative_path_2() { + check( + r#" +//- /main.rs +mod foo; + +//- /foo/mod.rs +#[path="../sub.rs"] +pub mod foo_bar; + +//- /sub.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + foo_bar: t + + crate::foo::foo_bar + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_relative_path_outside_root() { + check( + r#" +//- /main.rs +#[path="../../../../../outside.rs"] +mod foo; +"#, + expect![[r#" + crate + "#]], + ); +} + +#[test] +fn module_resolution_explicit_path_mod_rs_2() { + check( + r#" +//- /main.rs +#[path = "module/bar/mod.rs"] +mod foo; + +//- /module/bar/mod.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_explicit_path_mod_rs_with_win_separator() { + check( + r#" +//- /main.rs +#[path = "module\bar\mod.rs"] +mod foo; + +//- /module/bar/mod.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_decl_inside_inline_module_with_path_attribute() { + check( + r#" +//- /main.rs +#[path = "models"] +mod foo { mod bar; } + +//- /models/bar.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + bar: t + + crate::foo::bar + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_decl_inside_inline_module() { + check( + r#" +//- /main.rs +mod foo { mod bar; } + +//- /foo/bar.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + bar: t + + crate::foo::bar + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_decl_inside_inline_module_2_with_path_attribute() { + check( + r#" +//- /main.rs +#[path = "models/db"] +mod foo { mod bar; } + +//- /models/db/bar.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + bar: t + + crate::foo::bar + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_decl_inside_inline_module_3() { + check( + r#" +//- /main.rs +#[path = "models/db"] +mod foo { + #[path = "users.rs"] + mod bar; +} + +//- /models/db/users.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + bar: t + + crate::foo::bar + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_decl_inside_inline_module_empty_path() { + check( + r#" +//- /main.rs +#[path = ""] +mod foo { + #[path = "users.rs"] + mod bar; +} + +//- /users.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + bar: t + + crate::foo::bar + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_decl_empty_path() { + check( + r#" +//- /main.rs +#[path = ""] // Should try to read `/` (a directory) +mod foo; + +//- /foo.rs +pub struct Baz; +"#, + expect![[r#" + crate + "#]], + ); +} + +#[test] +fn module_resolution_decl_inside_inline_module_relative_path() { + check( + r#" +//- /main.rs +#[path = "./models"] +mod foo { mod bar; } + +//- /models/bar.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + bar: t + + crate::foo::bar + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_decl_inside_inline_module_in_crate_root() { + check( + r#" +//- /main.rs +mod foo { + #[path = "baz.rs"] + mod bar; +} +use self::foo::bar::Baz; + +//- /foo/baz.rs +pub struct Baz; +"#, + expect![[r#" + crate + Baz: t v + foo: t + + crate::foo + bar: t + + crate::foo::bar + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_decl_inside_inline_module_in_mod_rs() { + check( + r#" +//- /main.rs +mod foo; + +//- /foo/mod.rs +mod bar { + #[path = "qwe.rs"] + pub mod baz; +} +use self::bar::baz::Baz; + +//- /foo/bar/qwe.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + Baz: t v + bar: t + + crate::foo::bar + baz: t + + crate::foo::bar::baz + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_decl_inside_inline_module_in_non_crate_root() { + check( + r#" +//- /main.rs +mod foo; + +//- /foo.rs +mod bar { + #[path = "qwe.rs"] + pub mod baz; +} +use self::bar::baz::Baz; + +//- /foo/bar/qwe.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + Baz: t v + bar: t + + crate::foo::bar + baz: t + + crate::foo::bar::baz + Baz: t v + "#]], + ); +} + +#[test] +fn module_resolution_decl_inside_inline_module_in_non_crate_root_2() { + check( + r#" +//- /main.rs +mod foo; + +//- /foo.rs +#[path = "bar"] +mod bar { + pub mod baz; +} +use self::bar::baz::Baz; + +//- /bar/baz.rs +pub struct Baz; +"#, + expect![[r#" + crate + foo: t + + crate::foo + Baz: t v + bar: t + + crate::foo::bar + baz: t + + crate::foo::bar::baz + Baz: t v + "#]], + ); +} + +#[test] +fn unresolved_module_diagnostics() { + let db = TestDB::with_files( + r" + //- /lib.rs + mod foo; + mod bar; + mod baz {} + //- /foo.rs + ", + ); + let krate = db.test_crate(); + + let crate_def_map = db.crate_def_map(krate); + + expect![[r#" + [ + UnresolvedModule { + module: Idx::(0), + declaration: InFile { + file_id: HirFileId( + FileId( + FileId( + 0, + ), + ), + ), + value: FileAstId::(1), + }, + candidate: "bar.rs", + }, + ] + "#]] + .assert_debug_eq(&crate_def_map.diagnostics); +} + +#[test] +fn module_resolution_decl_inside_module_in_non_crate_root_2() { + check( + r#" +//- /main.rs +#[path="module/m2.rs"] +mod module; + +//- /module/m2.rs +pub mod submod; + +//- /module/submod.rs +pub struct Baz; +"#, + expect![[r#" + crate + module: t + + crate::module + submod: t + + crate::module::submod + Baz: t v + "#]], + ); +} + +#[test] +fn nested_out_of_line_module() { + check( + r#" +//- /lib.rs +mod a { + mod b { + mod c; + } +} + +//- /a/b/c.rs +struct X; +"#, + expect![[r#" + crate + a: t + + crate::a + b: t + + crate::a::b + c: t + + crate::a::b::c + X: t v + "#]], + ); +} + +#[test] +fn nested_out_of_line_module_with_path() { + check( + r#" +//- /lib.rs +mod a { + #[path = "d/e"] + mod b { + mod c; + } +} + +//- /a/d/e/c.rs +struct X; +"#, + expect![[r#" + crate + a: t + + crate::a + b: t + + crate::a::b + c: t + + crate::a::b::c + X: t v + "#]], + ); +} diff --git a/crates/ra_hir_def/src/nameres/tests/primitives.rs b/crates/hir_def/src/nameres/tests/primitives.rs similarity index 100% rename from crates/ra_hir_def/src/nameres/tests/primitives.rs rename to crates/hir_def/src/nameres/tests/primitives.rs diff --git a/crates/hir_def/src/path.rs b/crates/hir_def/src/path.rs new file mode 100644 index 0000000000..74d26f08b3 --- /dev/null +++ b/crates/hir_def/src/path.rs @@ -0,0 +1,351 @@ +//! A desugared representation of paths like `crate::foo` or `::bar`. +mod lower; + +use std::{ + fmt::{self, Display}, + iter, + sync::Arc, +}; + +use crate::body::LowerCtx; +use base_db::CrateId; +use hir_expand::{ + hygiene::Hygiene, + name::{AsName, Name}, +}; +use syntax::ast; + +use crate::{ + type_ref::{TypeBound, TypeRef}, + InFile, +}; + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ModPath { + pub kind: PathKind, + pub segments: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum PathKind { + Plain, + /// `self::` is `Super(0)` + Super(u8), + Crate, + /// Absolute path (::foo) + Abs, + /// `$crate` from macro expansion + DollarCrate(CrateId), +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ImportAlias { + /// Unnamed alias, as in `use Foo as _;` + Underscore, + /// Named alias + Alias(Name), +} + +impl ModPath { + pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option { + lower::lower_path(path, hygiene).map(|it| it.mod_path) + } + + pub fn from_segments(kind: PathKind, segments: impl IntoIterator) -> ModPath { + let segments = segments.into_iter().collect::>(); + ModPath { kind, segments } + } + + pub(crate) fn from_name_ref(name_ref: &ast::NameRef) -> ModPath { + name_ref.as_name().into() + } + + /// Converts an `tt::Ident` into a single-identifier `Path`. + pub(crate) fn from_tt_ident(ident: &tt::Ident) -> ModPath { + ident.as_name().into() + } + + /// Calls `cb` with all paths, represented by this use item. + pub(crate) fn expand_use_item( + item_src: InFile, + hygiene: &Hygiene, + mut cb: impl FnMut(ModPath, &ast::UseTree, /* is_glob */ bool, Option), + ) { + if let Some(tree) = item_src.value.use_tree() { + lower::lower_use_tree(None, tree, hygiene, &mut cb); + } + } + + /// Returns the number of segments in the path (counting special segments like `$crate` and + /// `super`). + pub fn len(&self) -> usize { + self.segments.len() + + match self.kind { + PathKind::Plain => 0, + PathKind::Super(i) => i as usize, + PathKind::Crate => 1, + PathKind::Abs => 0, + PathKind::DollarCrate(_) => 1, + } + } + + pub fn is_ident(&self) -> bool { + self.kind == PathKind::Plain && self.segments.len() == 1 + } + + pub fn is_self(&self) -> bool { + self.kind == PathKind::Super(0) && self.segments.is_empty() + } + + /// If this path is a single identifier, like `foo`, return its name. + pub fn as_ident(&self) -> Option<&Name> { + if self.kind != PathKind::Plain || self.segments.len() > 1 { + return None; + } + self.segments.first() + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Path { + /// Type based path like `::foo`. + /// Note that paths like `::foo` are desugard to `Trait::::foo`. + type_anchor: Option>, + mod_path: ModPath, + /// Invariant: the same len as `self.mod_path.segments` + generic_args: Vec>>, +} + +/// Generic arguments to a path segment (e.g. the `i32` in `Option`). This +/// also includes bindings of associated types, like in `Iterator`. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct GenericArgs { + pub args: Vec, + /// This specifies whether the args contain a Self type as the first + /// element. This is the case for path segments like ``, where + /// `T` is actually a type parameter for the path `Trait` specifying the + /// Self type. Otherwise, when we have a path `Trait`, the Self type + /// is left out. + pub has_self_type: bool, + /// Associated type bindings like in `Iterator`. + pub bindings: Vec, +} + +/// An associated type binding like in `Iterator`. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AssociatedTypeBinding { + /// The name of the associated type. + pub name: Name, + /// The type bound to this associated type (in `Item = T`, this would be the + /// `T`). This can be `None` if there are bounds instead. + pub type_ref: Option, + /// Bounds for the associated type, like in `Iterator`. (This is the unstable `associated_type_bounds` + /// feature.) + pub bounds: Vec, +} + +/// A single generic argument. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum GenericArg { + Type(TypeRef), + // or lifetime... +} + +impl Path { + /// Converts an `ast::Path` to `Path`. Works with use trees. + #[deprecated = "Doesn't handle hygiene, don't add new calls, remove old ones"] + pub fn from_ast(path: ast::Path) -> Option { + lower::lower_path(path, &Hygiene::new_unhygienic()) + } + + /// Converts an `ast::Path` to `Path`. Works with use trees. + /// It correctly handles `$crate` based path from macro call. + pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option { + lower::lower_path(path, hygiene) + } + + /// Converts a known mod path to `Path`. + pub(crate) fn from_known_path( + path: ModPath, + generic_args: Vec>>, + ) -> Path { + Path { type_anchor: None, mod_path: path, generic_args } + } + + pub fn kind(&self) -> &PathKind { + &self.mod_path.kind + } + + pub fn type_anchor(&self) -> Option<&TypeRef> { + self.type_anchor.as_deref() + } + + pub fn segments(&self) -> PathSegments<'_> { + PathSegments { + segments: self.mod_path.segments.as_slice(), + generic_args: self.generic_args.as_slice(), + } + } + + pub fn mod_path(&self) -> &ModPath { + &self.mod_path + } + + pub fn qualifier(&self) -> Option { + if self.mod_path.is_ident() { + return None; + } + let res = Path { + type_anchor: self.type_anchor.clone(), + mod_path: ModPath { + kind: self.mod_path.kind.clone(), + segments: self.mod_path.segments[..self.mod_path.segments.len() - 1].to_vec(), + }, + generic_args: self.generic_args[..self.generic_args.len() - 1].to_vec(), + }; + Some(res) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct PathSegment<'a> { + pub name: &'a Name, + pub args_and_bindings: Option<&'a GenericArgs>, +} + +pub struct PathSegments<'a> { + segments: &'a [Name], + generic_args: &'a [Option>], +} + +impl<'a> PathSegments<'a> { + pub const EMPTY: PathSegments<'static> = PathSegments { segments: &[], generic_args: &[] }; + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + pub fn len(&self) -> usize { + self.segments.len() + } + pub fn first(&self) -> Option> { + self.get(0) + } + pub fn last(&self) -> Option> { + self.get(self.len().checked_sub(1)?) + } + pub fn get(&self, idx: usize) -> Option> { + assert_eq!(self.segments.len(), self.generic_args.len()); + let res = PathSegment { + name: self.segments.get(idx)?, + args_and_bindings: self.generic_args.get(idx).unwrap().as_ref().map(|it| &**it), + }; + Some(res) + } + pub fn skip(&self, len: usize) -> PathSegments<'a> { + assert_eq!(self.segments.len(), self.generic_args.len()); + PathSegments { segments: &self.segments[len..], generic_args: &self.generic_args[len..] } + } + pub fn take(&self, len: usize) -> PathSegments<'a> { + assert_eq!(self.segments.len(), self.generic_args.len()); + PathSegments { segments: &self.segments[..len], generic_args: &self.generic_args[..len] } + } + pub fn iter(&self) -> impl Iterator> { + self.segments.iter().zip(self.generic_args.iter()).map(|(name, args)| PathSegment { + name, + args_and_bindings: args.as_ref().map(|it| &**it), + }) + } +} + +impl GenericArgs { + pub(crate) fn from_ast(lower_ctx: &LowerCtx, node: ast::GenericArgList) -> Option { + lower::lower_generic_args(lower_ctx, node) + } + + pub(crate) fn empty() -> GenericArgs { + GenericArgs { args: Vec::new(), has_self_type: false, bindings: Vec::new() } + } +} + +impl From for Path { + fn from(name: Name) -> Path { + Path { + type_anchor: None, + mod_path: ModPath::from_segments(PathKind::Plain, iter::once(name)), + generic_args: vec![None], + } + } +} + +impl From for ModPath { + fn from(name: Name) -> ModPath { + ModPath::from_segments(PathKind::Plain, iter::once(name)) + } +} + +impl Display for ModPath { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut first_segment = true; + let mut add_segment = |s| -> fmt::Result { + if !first_segment { + f.write_str("::")?; + } + first_segment = false; + f.write_str(s)?; + Ok(()) + }; + match self.kind { + PathKind::Plain => {} + PathKind::Super(n) => { + if n == 0 { + add_segment("self")?; + } + for _ in 0..n { + add_segment("super")?; + } + } + PathKind::Crate => add_segment("crate")?, + PathKind::Abs => add_segment("")?, + PathKind::DollarCrate(_) => add_segment("$crate")?, + } + for segment in &self.segments { + if !first_segment { + f.write_str("::")?; + } + first_segment = false; + write!(f, "{}", segment)?; + } + Ok(()) + } +} + +pub use hir_expand::name as __name; + +#[macro_export] +macro_rules! __known_path { + (core::iter::IntoIterator) => {}; + (core::result::Result) => {}; + (core::ops::Range) => {}; + (core::ops::RangeFrom) => {}; + (core::ops::RangeFull) => {}; + (core::ops::RangeTo) => {}; + (core::ops::RangeToInclusive) => {}; + (core::ops::RangeInclusive) => {}; + (core::future::Future) => {}; + (core::ops::Try) => {}; + ($path:path) => { + compile_error!("Please register your known path in the path module") + }; +} + +#[macro_export] +macro_rules! __path { + ($start:ident $(:: $seg:ident)*) => ({ + $crate::__known_path!($start $(:: $seg)*); + $crate::path::ModPath::from_segments($crate::path::PathKind::Abs, vec![ + $crate::path::__name![$start], $($crate::path::__name![$seg],)* + ]) + }); +} + +pub use crate::__path as path; diff --git a/crates/hir_def/src/path/lower.rs b/crates/hir_def/src/path/lower.rs new file mode 100644 index 0000000000..07b9723ce0 --- /dev/null +++ b/crates/hir_def/src/path/lower.rs @@ -0,0 +1,215 @@ +//! Transforms syntax into `Path` objects, ideally with accounting for hygiene + +mod lower_use; + +use std::sync::Arc; + +use either::Either; +use hir_expand::{ + hygiene::Hygiene, + name::{name, AsName}, +}; +use syntax::ast::{self, AstNode, TypeBoundsOwner}; + +use super::AssociatedTypeBinding; +use crate::{ + body::LowerCtx, + path::{GenericArg, GenericArgs, ModPath, Path, PathKind}, + type_ref::{TypeBound, TypeRef}, +}; + +pub(super) use lower_use::lower_use_tree; + +/// Converts an `ast::Path` to `Path`. Works with use trees. +/// It correctly handles `$crate` based path from macro call. +pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option { + let mut kind = PathKind::Plain; + let mut type_anchor = None; + let mut segments = Vec::new(); + let mut generic_args = Vec::new(); + let ctx = LowerCtx::with_hygiene(hygiene); + loop { + let segment = path.segment()?; + + if segment.coloncolon_token().is_some() { + kind = PathKind::Abs; + } + + match segment.kind()? { + ast::PathSegmentKind::Name(name_ref) => { + // FIXME: this should just return name + match hygiene.name_ref_to_name(name_ref) { + Either::Left(name) => { + let args = segment + .generic_arg_list() + .and_then(|it| lower_generic_args(&ctx, it)) + .or_else(|| { + lower_generic_args_from_fn_path( + &ctx, + segment.param_list(), + segment.ret_type(), + ) + }) + .map(Arc::new); + segments.push(name); + generic_args.push(args) + } + Either::Right(crate_id) => { + kind = PathKind::DollarCrate(crate_id); + break; + } + } + } + ast::PathSegmentKind::Type { type_ref, trait_ref } => { + assert!(path.qualifier().is_none()); // this can only occur at the first segment + + let self_type = TypeRef::from_ast(&ctx, type_ref?); + + match trait_ref { + // ::foo + None => { + type_anchor = Some(Box::new(self_type)); + kind = PathKind::Plain; + } + // >::Foo desugars to Trait::Foo + Some(trait_ref) => { + let path = Path::from_src(trait_ref.path()?, hygiene)?; + kind = path.mod_path.kind; + + let mut prefix_segments = path.mod_path.segments; + prefix_segments.reverse(); + segments.extend(prefix_segments); + + let mut prefix_args = path.generic_args; + prefix_args.reverse(); + generic_args.extend(prefix_args); + + // Insert the type reference (T in the above example) as Self parameter for the trait + let last_segment = generic_args.last_mut()?; + if last_segment.is_none() { + *last_segment = Some(Arc::new(GenericArgs::empty())); + }; + let args = last_segment.as_mut().unwrap(); + let mut args_inner = Arc::make_mut(args); + args_inner.has_self_type = true; + args_inner.args.insert(0, GenericArg::Type(self_type)); + } + } + } + ast::PathSegmentKind::CrateKw => { + kind = PathKind::Crate; + break; + } + ast::PathSegmentKind::SelfKw => { + kind = PathKind::Super(0); + break; + } + ast::PathSegmentKind::SuperKw => { + let nested_super_count = if let PathKind::Super(n) = kind { n } else { 0 }; + kind = PathKind::Super(nested_super_count + 1); + } + } + path = match qualifier(&path) { + Some(it) => it, + None => break, + }; + } + segments.reverse(); + generic_args.reverse(); + + // handle local_inner_macros : + // Basically, even in rustc it is quite hacky: + // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456 + // We follow what it did anyway :) + if segments.len() == 1 && kind == PathKind::Plain { + if let Some(macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { + if macro_call.is_bang() { + if let Some(crate_id) = hygiene.local_inner_macros() { + kind = PathKind::DollarCrate(crate_id); + } + } + } + } + + let mod_path = ModPath { kind, segments }; + return Some(Path { type_anchor, mod_path, generic_args }); + + fn qualifier(path: &ast::Path) -> Option { + if let Some(q) = path.qualifier() { + return Some(q); + } + // FIXME: this bottom up traversal is not too precise. + // Should we handle do a top-down analysis, recording results? + let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?; + let use_tree = use_tree_list.parent_use_tree(); + use_tree.path() + } +} + +pub(super) fn lower_generic_args( + lower_ctx: &LowerCtx, + node: ast::GenericArgList, +) -> Option { + let mut args = Vec::new(); + let mut bindings = Vec::new(); + for generic_arg in node.generic_args() { + match generic_arg { + ast::GenericArg::TypeArg(type_arg) => { + let type_ref = TypeRef::from_ast_opt(lower_ctx, type_arg.ty()); + args.push(GenericArg::Type(type_ref)); + } + ast::GenericArg::AssocTypeArg(assoc_type_arg) => { + if let Some(name_ref) = assoc_type_arg.name_ref() { + let name = name_ref.as_name(); + let type_ref = assoc_type_arg.ty().map(|it| TypeRef::from_ast(lower_ctx, it)); + let bounds = if let Some(l) = assoc_type_arg.type_bound_list() { + l.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect() + } else { + Vec::new() + }; + bindings.push(AssociatedTypeBinding { name, type_ref, bounds }); + } + } + // Lifetimes and constants are ignored for now. + ast::GenericArg::LifetimeArg(_) | ast::GenericArg::ConstArg(_) => (), + } + } + + if args.is_empty() && bindings.is_empty() { + return None; + } + Some(GenericArgs { args, has_self_type: false, bindings }) +} + +/// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y) +/// -> Z` (which desugars to `Fn<(X, Y), Output=Z>`). +fn lower_generic_args_from_fn_path( + ctx: &LowerCtx, + params: Option, + ret_type: Option, +) -> Option { + let mut args = Vec::new(); + let mut bindings = Vec::new(); + if let Some(params) = params { + let mut param_types = Vec::new(); + for param in params.params() { + let type_ref = TypeRef::from_ast_opt(&ctx, param.ty()); + param_types.push(type_ref); + } + let arg = GenericArg::Type(TypeRef::Tuple(param_types)); + args.push(arg); + } + if let Some(ret_type) = ret_type { + let type_ref = TypeRef::from_ast_opt(&ctx, ret_type.ty()); + bindings.push(AssociatedTypeBinding { + name: name![Output], + type_ref: Some(type_ref), + bounds: Vec::new(), + }); + } + if args.is_empty() && bindings.is_empty() { + None + } else { + Some(GenericArgs { args, has_self_type: false, bindings }) + } +} diff --git a/crates/hir_def/src/path/lower/lower_use.rs b/crates/hir_def/src/path/lower/lower_use.rs new file mode 100644 index 0000000000..53cecb05fb --- /dev/null +++ b/crates/hir_def/src/path/lower/lower_use.rs @@ -0,0 +1,120 @@ +//! Lowers a single complex use like `use foo::{bar, baz};` into a list of paths like +//! `foo::bar`, `foo::baz`; + +use std::iter; + +use either::Either; +use hir_expand::{hygiene::Hygiene, name::AsName}; +use syntax::ast::{self, NameOwner}; +use test_utils::mark; + +use crate::path::{ImportAlias, ModPath, PathKind}; + +pub(crate) fn lower_use_tree( + prefix: Option, + tree: ast::UseTree, + hygiene: &Hygiene, + cb: &mut dyn FnMut(ModPath, &ast::UseTree, bool, Option), +) { + if let Some(use_tree_list) = tree.use_tree_list() { + let prefix = match tree.path() { + // E.g. use something::{{{inner}}}; + None => prefix, + // E.g. `use something::{inner}` (prefix is `None`, path is `something`) + // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`) + Some(path) => match convert_path(prefix, path, hygiene) { + Some(it) => Some(it), + None => return, // FIXME: report errors somewhere + }, + }; + for child_tree in use_tree_list.use_trees() { + lower_use_tree(prefix.clone(), child_tree, hygiene, cb); + } + } else { + let alias = tree.rename().map(|a| { + a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias) + }); + let is_glob = tree.star_token().is_some(); + if let Some(ast_path) = tree.path() { + // Handle self in a path. + // E.g. `use something::{self, <...>}` + if ast_path.qualifier().is_none() { + if let Some(segment) = ast_path.segment() { + if segment.kind() == Some(ast::PathSegmentKind::SelfKw) { + if let Some(prefix) = prefix { + cb(prefix, &tree, false, alias); + return; + } + } + } + } + if let Some(path) = convert_path(prefix, ast_path, hygiene) { + cb(path, &tree, is_glob, alias) + } + // FIXME: report errors somewhere + // We get here if we do + } else if is_glob { + mark::hit!(glob_enum_group); + if let Some(prefix) = prefix { + cb(prefix, &tree, is_glob, None) + } + } + } +} + +fn convert_path(prefix: Option, path: ast::Path, hygiene: &Hygiene) -> Option { + let prefix = if let Some(qual) = path.qualifier() { + Some(convert_path(prefix, qual, hygiene)?) + } else { + prefix + }; + + let segment = path.segment()?; + let res = match segment.kind()? { + ast::PathSegmentKind::Name(name_ref) => { + match hygiene.name_ref_to_name(name_ref) { + Either::Left(name) => { + // no type args in use + let mut res = prefix.unwrap_or_else(|| ModPath { + kind: PathKind::Plain, + segments: Vec::with_capacity(1), + }); + res.segments.push(name); + res + } + Either::Right(crate_id) => { + return Some(ModPath::from_segments( + PathKind::DollarCrate(crate_id), + iter::empty(), + )) + } + } + } + ast::PathSegmentKind::CrateKw => { + if prefix.is_some() { + return None; + } + ModPath::from_segments(PathKind::Crate, iter::empty()) + } + ast::PathSegmentKind::SelfKw => { + if prefix.is_some() { + return None; + } + ModPath::from_segments(PathKind::Super(0), iter::empty()) + } + ast::PathSegmentKind::SuperKw => { + let nested_super_count = match prefix.map(|p| p.kind) { + Some(PathKind::Super(n)) => n, + Some(_) => return None, + None => 0, + }; + + ModPath::from_segments(PathKind::Super(nested_super_count + 1), iter::empty()) + } + ast::PathSegmentKind::Type { .. } => { + // not allowed in imports + return None; + } + }; + Some(res) +} diff --git a/crates/ra_hir_def/src/per_ns.rs b/crates/hir_def/src/per_ns.rs similarity index 100% rename from crates/ra_hir_def/src/per_ns.rs rename to crates/hir_def/src/per_ns.rs diff --git a/crates/hir_def/src/resolver.rs b/crates/hir_def/src/resolver.rs new file mode 100644 index 0000000000..f8cc5e075e --- /dev/null +++ b/crates/hir_def/src/resolver.rs @@ -0,0 +1,713 @@ +//! Name resolution façade. +use std::sync::Arc; + +use base_db::CrateId; +use hir_expand::{ + name::{name, Name}, + MacroDefId, +}; +use rustc_hash::FxHashSet; + +use crate::{ + body::scope::{ExprScopes, ScopeId}, + body::Body, + builtin_type::BuiltinType, + db::DefDatabase, + expr::{ExprId, PatId}, + generics::GenericParams, + item_scope::{BuiltinShadowMode, BUILTIN_SCOPE}, + nameres::CrateDefMap, + path::{ModPath, PathKind}, + per_ns::PerNs, + visibility::{RawVisibility, Visibility}, + AdtId, AssocContainerId, ConstId, ContainerId, DefWithBodyId, EnumId, EnumVariantId, + FunctionId, GenericDefId, HasModule, ImplId, LocalModuleId, Lookup, ModuleDefId, ModuleId, + StaticId, StructId, TraitId, TypeAliasId, TypeParamId, VariantId, +}; + +#[derive(Debug, Clone, Default)] +pub struct Resolver { + // FIXME: all usages generally call `.rev`, so maybe reverse once in consturciton? + scopes: Vec, +} + +// FIXME how to store these best +#[derive(Debug, Clone)] +struct ModuleItemMap { + crate_def_map: Arc, + module_id: LocalModuleId, +} + +#[derive(Debug, Clone)] +struct ExprScope { + owner: DefWithBodyId, + expr_scopes: Arc, + scope_id: ScopeId, +} + +#[derive(Debug, Clone)] +enum Scope { + /// All the items and imported names of a module + ModuleScope(ModuleItemMap), + /// Brings the generic parameters of an item into scope + GenericParams { def: GenericDefId, params: Arc }, + /// Brings `Self` in `impl` block into scope + ImplDefScope(ImplId), + /// Brings `Self` in enum, struct and union definitions into scope + AdtScope(AdtId), + /// Local bindings + ExprScope(ExprScope), + /// Temporary hack to support local items. + LocalItemsScope(Arc), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TypeNs { + SelfType(ImplId), + GenericParam(TypeParamId), + AdtId(AdtId), + AdtSelfType(AdtId), + // Yup, enum variants are added to the types ns, but any usage of variant as + // type is an error. + EnumVariantId(EnumVariantId), + TypeAliasId(TypeAliasId), + BuiltinType(BuiltinType), + TraitId(TraitId), + // Module belong to type ns, but the resolver is used when all module paths + // are fully resolved. + // ModuleId(ModuleId) +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ResolveValueResult { + ValueNs(ValueNs), + Partial(TypeNs, usize), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ValueNs { + ImplSelf(ImplId), + LocalBinding(PatId), + FunctionId(FunctionId), + ConstId(ConstId), + StaticId(StaticId), + StructId(StructId), + EnumVariantId(EnumVariantId), +} + +impl Resolver { + /// Resolve known trait from std, like `std::futures::Future` + pub fn resolve_known_trait(&self, db: &dyn DefDatabase, path: &ModPath) -> Option { + let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?; + match res { + ModuleDefId::TraitId(it) => Some(it), + _ => None, + } + } + + /// Resolve known struct from std, like `std::boxed::Box` + pub fn resolve_known_struct(&self, db: &dyn DefDatabase, path: &ModPath) -> Option { + let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?; + match res { + ModuleDefId::AdtId(AdtId::StructId(it)) => Some(it), + _ => None, + } + } + + /// Resolve known enum from std, like `std::result::Result` + pub fn resolve_known_enum(&self, db: &dyn DefDatabase, path: &ModPath) -> Option { + let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?; + match res { + ModuleDefId::AdtId(AdtId::EnumId(it)) => Some(it), + _ => None, + } + } + + fn resolve_module_path( + &self, + db: &dyn DefDatabase, + path: &ModPath, + shadow: BuiltinShadowMode, + ) -> PerNs { + let (item_map, module) = match self.module_scope() { + Some(it) => it, + None => return PerNs::none(), + }; + let (module_res, segment_index) = item_map.resolve_path(db, module, &path, shadow); + if segment_index.is_some() { + return PerNs::none(); + } + module_res + } + + pub fn resolve_module_path_in_items(&self, db: &dyn DefDatabase, path: &ModPath) -> PerNs { + self.resolve_module_path(db, path, BuiltinShadowMode::Module) + } + + pub fn resolve_path_in_type_ns( + &self, + db: &dyn DefDatabase, + path: &ModPath, + ) -> Option<(TypeNs, Option)> { + let first_name = path.segments.first()?; + let skip_to_mod = path.kind != PathKind::Plain; + for scope in self.scopes.iter().rev() { + match scope { + Scope::ExprScope(_) => continue, + Scope::GenericParams { .. } + | Scope::ImplDefScope(_) + | Scope::LocalItemsScope(_) + if skip_to_mod => + { + continue + } + + Scope::GenericParams { params, def } => { + if let Some(local_id) = params.find_by_name(first_name) { + let idx = if path.segments.len() == 1 { None } else { Some(1) }; + return Some(( + TypeNs::GenericParam(TypeParamId { local_id, parent: *def }), + idx, + )); + } + } + Scope::ImplDefScope(impl_) => { + if first_name == &name![Self] { + let idx = if path.segments.len() == 1 { None } else { Some(1) }; + return Some((TypeNs::SelfType(*impl_), idx)); + } + } + Scope::AdtScope(adt) => { + if first_name == &name![Self] { + let idx = if path.segments.len() == 1 { None } else { Some(1) }; + return Some((TypeNs::AdtSelfType(*adt), idx)); + } + } + Scope::ModuleScope(m) => { + let (module_def, idx) = m.crate_def_map.resolve_path( + db, + m.module_id, + &path, + BuiltinShadowMode::Other, + ); + let res = to_type_ns(module_def)?; + return Some((res, idx)); + } + Scope::LocalItemsScope(body) => { + let def = body.item_scope.get(first_name); + if let Some(res) = to_type_ns(def) { + return Some((res, None)); + } + } + } + } + return None; + fn to_type_ns(per_ns: PerNs) -> Option { + let res = match per_ns.take_types()? { + ModuleDefId::AdtId(it) => TypeNs::AdtId(it), + ModuleDefId::EnumVariantId(it) => TypeNs::EnumVariantId(it), + + ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it), + ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it), + + ModuleDefId::TraitId(it) => TypeNs::TraitId(it), + + ModuleDefId::FunctionId(_) + | ModuleDefId::ConstId(_) + | ModuleDefId::StaticId(_) + | ModuleDefId::ModuleId(_) => return None, + }; + Some(res) + } + } + + pub fn resolve_path_in_type_ns_fully( + &self, + db: &dyn DefDatabase, + path: &ModPath, + ) -> Option { + let (res, unresolved) = self.resolve_path_in_type_ns(db, path)?; + if unresolved.is_some() { + return None; + } + Some(res) + } + + pub fn resolve_visibility( + &self, + db: &dyn DefDatabase, + visibility: &RawVisibility, + ) -> Option { + match visibility { + RawVisibility::Module(_) => { + let (item_map, module) = match self.module_scope() { + Some(it) => it, + None => return None, + }; + item_map.resolve_visibility(db, module, visibility) + } + RawVisibility::Public => Some(Visibility::Public), + } + } + + pub fn resolve_path_in_value_ns( + &self, + db: &dyn DefDatabase, + path: &ModPath, + ) -> Option { + let n_segments = path.segments.len(); + let tmp = name![self]; + let first_name = if path.is_self() { &tmp } else { &path.segments.first()? }; + let skip_to_mod = path.kind != PathKind::Plain && !path.is_self(); + for scope in self.scopes.iter().rev() { + match scope { + Scope::AdtScope(_) + | Scope::ExprScope(_) + | Scope::GenericParams { .. } + | Scope::ImplDefScope(_) + | Scope::LocalItemsScope(_) + if skip_to_mod => + { + continue + } + + Scope::ExprScope(scope) if n_segments <= 1 => { + let entry = scope + .expr_scopes + .entries(scope.scope_id) + .iter() + .find(|entry| entry.name() == first_name); + + if let Some(e) = entry { + return Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(e.pat()))); + } + } + Scope::ExprScope(_) => continue, + + Scope::GenericParams { params, def } if n_segments > 1 => { + if let Some(local_id) = params.find_by_name(first_name) { + let ty = TypeNs::GenericParam(TypeParamId { local_id, parent: *def }); + return Some(ResolveValueResult::Partial(ty, 1)); + } + } + Scope::GenericParams { .. } => continue, + + Scope::ImplDefScope(impl_) => { + if first_name == &name![Self] { + if n_segments > 1 { + let ty = TypeNs::SelfType(*impl_); + return Some(ResolveValueResult::Partial(ty, 1)); + } else { + return Some(ResolveValueResult::ValueNs(ValueNs::ImplSelf(*impl_))); + } + } + } + Scope::AdtScope(adt) => { + if n_segments == 1 { + // bare `Self` doesn't work in the value namespace in a struct/enum definition + continue; + } + if first_name == &name![Self] { + let ty = TypeNs::AdtSelfType(*adt); + return Some(ResolveValueResult::Partial(ty, 1)); + } + } + + Scope::ModuleScope(m) => { + let (module_def, idx) = m.crate_def_map.resolve_path( + db, + m.module_id, + &path, + BuiltinShadowMode::Other, + ); + return match idx { + None => { + let value = to_value_ns(module_def)?; + Some(ResolveValueResult::ValueNs(value)) + } + Some(idx) => { + let ty = match module_def.take_types()? { + ModuleDefId::AdtId(it) => TypeNs::AdtId(it), + ModuleDefId::TraitId(it) => TypeNs::TraitId(it), + ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it), + ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it), + + ModuleDefId::ModuleId(_) + | ModuleDefId::FunctionId(_) + | ModuleDefId::EnumVariantId(_) + | ModuleDefId::ConstId(_) + | ModuleDefId::StaticId(_) => return None, + }; + Some(ResolveValueResult::Partial(ty, idx)) + } + }; + } + Scope::LocalItemsScope(body) => { + // we don't bother looking in the builtin scope here because there are no builtin values + let def = to_value_ns(body.item_scope.get(first_name)); + + if let Some(res) = def { + return Some(ResolveValueResult::ValueNs(res)); + } + } + } + } + return None; + + fn to_value_ns(per_ns: PerNs) -> Option { + let res = match per_ns.take_values()? { + ModuleDefId::FunctionId(it) => ValueNs::FunctionId(it), + ModuleDefId::AdtId(AdtId::StructId(it)) => ValueNs::StructId(it), + ModuleDefId::EnumVariantId(it) => ValueNs::EnumVariantId(it), + ModuleDefId::ConstId(it) => ValueNs::ConstId(it), + ModuleDefId::StaticId(it) => ValueNs::StaticId(it), + + ModuleDefId::AdtId(AdtId::EnumId(_)) + | ModuleDefId::AdtId(AdtId::UnionId(_)) + | ModuleDefId::TraitId(_) + | ModuleDefId::TypeAliasId(_) + | ModuleDefId::BuiltinType(_) + | ModuleDefId::ModuleId(_) => return None, + }; + Some(res) + } + } + + pub fn resolve_path_in_value_ns_fully( + &self, + db: &dyn DefDatabase, + path: &ModPath, + ) -> Option { + match self.resolve_path_in_value_ns(db, path)? { + ResolveValueResult::ValueNs(it) => Some(it), + ResolveValueResult::Partial(..) => None, + } + } + + pub fn resolve_path_as_macro( + &self, + db: &dyn DefDatabase, + path: &ModPath, + ) -> Option { + // Search item scope legacy macro first + if let Some(def) = self.resolve_local_macro_def(path) { + return Some(def); + } + + let (item_map, module) = self.module_scope()?; + item_map.resolve_path(db, module, &path, BuiltinShadowMode::Other).0.take_macros() + } + + pub fn process_all_names(&self, db: &dyn DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) { + for scope in self.scopes.iter().rev() { + scope.process_names(db, f); + } + } + + pub fn traits_in_scope(&self, db: &dyn DefDatabase) -> FxHashSet { + let mut traits = FxHashSet::default(); + for scope in &self.scopes { + if let Scope::ModuleScope(m) = scope { + if let Some(prelude) = m.crate_def_map.prelude { + let prelude_def_map = db.crate_def_map(prelude.krate); + traits.extend(prelude_def_map[prelude.local_id].scope.traits()); + } + traits.extend(m.crate_def_map[m.module_id].scope.traits()); + } + } + traits + } + + fn module_scope(&self) -> Option<(&CrateDefMap, LocalModuleId)> { + self.scopes.iter().rev().find_map(|scope| match scope { + Scope::ModuleScope(m) => Some((&*m.crate_def_map, m.module_id)), + + _ => None, + }) + } + + fn resolve_local_macro_def(&self, path: &ModPath) -> Option { + let name = path.as_ident()?; + self.scopes.iter().rev().find_map(|scope| { + if let Scope::LocalItemsScope(body) = scope { + return body.item_scope.get_legacy_macro(name); + } + None + }) + } + + pub fn module(&self) -> Option { + let (def_map, local_id) = self.module_scope()?; + Some(ModuleId { krate: def_map.krate, local_id }) + } + + pub fn krate(&self) -> Option { + self.module_scope().map(|t| t.0.krate) + } + + pub fn where_predicates_in_scope<'a>( + &'a self, + ) -> impl Iterator + 'a { + self.scopes + .iter() + .rev() + .filter_map(|scope| match scope { + Scope::GenericParams { params, .. } => Some(params), + _ => None, + }) + .flat_map(|params| params.where_predicates.iter()) + } + + pub fn generic_def(&self) -> Option { + self.scopes.iter().rev().find_map(|scope| match scope { + Scope::GenericParams { def, .. } => Some(*def), + _ => None, + }) + } + + pub fn body_owner(&self) -> Option { + self.scopes.iter().rev().find_map(|scope| match scope { + Scope::ExprScope(it) => Some(it.owner), + _ => None, + }) + } +} + +pub enum ScopeDef { + PerNs(PerNs), + ImplSelfType(ImplId), + AdtSelfType(AdtId), + GenericParam(TypeParamId), + Local(PatId), +} + +impl Scope { + fn process_names(&self, db: &dyn DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) { + match self { + Scope::ModuleScope(m) => { + // FIXME: should we provide `self` here? + // f( + // Name::self_param(), + // PerNs::types(Resolution::Def { + // def: m.module.into(), + // }), + // ); + m.crate_def_map[m.module_id].scope.entries().for_each(|(name, def)| { + f(name.clone(), ScopeDef::PerNs(def)); + }); + m.crate_def_map[m.module_id].scope.legacy_macros().for_each(|(name, macro_)| { + f(name.clone(), ScopeDef::PerNs(PerNs::macros(macro_, Visibility::Public))); + }); + m.crate_def_map.extern_prelude.iter().for_each(|(name, &def)| { + f(name.clone(), ScopeDef::PerNs(PerNs::types(def, Visibility::Public))); + }); + BUILTIN_SCOPE.iter().for_each(|(name, &def)| { + f(name.clone(), ScopeDef::PerNs(def)); + }); + if let Some(prelude) = m.crate_def_map.prelude { + let prelude_def_map = db.crate_def_map(prelude.krate); + prelude_def_map[prelude.local_id].scope.entries().for_each(|(name, def)| { + f(name.clone(), ScopeDef::PerNs(def)); + }); + } + } + Scope::LocalItemsScope(body) => body.item_scope.entries().for_each(|(name, def)| { + f(name.clone(), ScopeDef::PerNs(def)); + }), + Scope::GenericParams { params, def } => { + for (local_id, param) in params.types.iter() { + if let Some(name) = ¶m.name { + f( + name.clone(), + ScopeDef::GenericParam(TypeParamId { local_id, parent: *def }), + ) + } + } + } + Scope::ImplDefScope(i) => { + f(name![Self], ScopeDef::ImplSelfType(*i)); + } + Scope::AdtScope(i) => { + f(name![Self], ScopeDef::AdtSelfType(*i)); + } + Scope::ExprScope(scope) => { + scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| { + f(e.name().clone(), ScopeDef::Local(e.pat())); + }); + } + } + } +} + +// needs arbitrary_self_types to be a method... or maybe move to the def? +pub fn resolver_for_expr(db: &dyn DefDatabase, owner: DefWithBodyId, expr_id: ExprId) -> Resolver { + let scopes = db.expr_scopes(owner); + resolver_for_scope(db, owner, scopes.scope_for(expr_id)) +} + +pub fn resolver_for_scope( + db: &dyn DefDatabase, + owner: DefWithBodyId, + scope_id: Option, +) -> Resolver { + let mut r = owner.resolver(db); + r = r.push_local_items_scope(db.body(owner)); + let scopes = db.expr_scopes(owner); + let scope_chain = scopes.scope_chain(scope_id).collect::>(); + for scope in scope_chain.into_iter().rev() { + r = r.push_expr_scope(owner, Arc::clone(&scopes), scope); + } + r +} + +impl Resolver { + fn push_scope(mut self, scope: Scope) -> Resolver { + self.scopes.push(scope); + self + } + + fn push_generic_params_scope(self, db: &dyn DefDatabase, def: GenericDefId) -> Resolver { + let params = db.generic_params(def); + self.push_scope(Scope::GenericParams { def, params }) + } + + fn push_impl_def_scope(self, impl_def: ImplId) -> Resolver { + self.push_scope(Scope::ImplDefScope(impl_def)) + } + + fn push_module_scope( + self, + crate_def_map: Arc, + module_id: LocalModuleId, + ) -> Resolver { + self.push_scope(Scope::ModuleScope(ModuleItemMap { crate_def_map, module_id })) + } + + fn push_local_items_scope(self, body: Arc) -> Resolver { + self.push_scope(Scope::LocalItemsScope(body)) + } + + fn push_expr_scope( + self, + owner: DefWithBodyId, + expr_scopes: Arc, + scope_id: ScopeId, + ) -> Resolver { + self.push_scope(Scope::ExprScope(ExprScope { owner, expr_scopes, scope_id })) + } +} + +pub trait HasResolver: Copy { + /// Builds a resolver for type references inside this def. + fn resolver(self, db: &dyn DefDatabase) -> Resolver; +} + +impl HasResolver for ModuleId { + fn resolver(self, db: &dyn DefDatabase) -> Resolver { + let def_map = db.crate_def_map(self.krate); + Resolver::default().push_module_scope(def_map, self.local_id) + } +} + +impl HasResolver for TraitId { + fn resolver(self, db: &dyn DefDatabase) -> Resolver { + self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) + } +} + +impl + Copy> HasResolver for T { + fn resolver(self, db: &dyn DefDatabase) -> Resolver { + let def = self.into(); + def.module(db) + .resolver(db) + .push_generic_params_scope(db, def.into()) + .push_scope(Scope::AdtScope(def)) + } +} + +impl HasResolver for FunctionId { + fn resolver(self, db: &dyn DefDatabase) -> Resolver { + self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) + } +} + +impl HasResolver for ConstId { + fn resolver(self, db: &dyn DefDatabase) -> Resolver { + self.lookup(db).container.resolver(db) + } +} + +impl HasResolver for StaticId { + fn resolver(self, db: &dyn DefDatabase) -> Resolver { + self.lookup(db).container.resolver(db) + } +} + +impl HasResolver for TypeAliasId { + fn resolver(self, db: &dyn DefDatabase) -> Resolver { + self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) + } +} + +impl HasResolver for ImplId { + fn resolver(self, db: &dyn DefDatabase) -> Resolver { + self.lookup(db) + .container + .resolver(db) + .push_generic_params_scope(db, self.into()) + .push_impl_def_scope(self) + } +} + +impl HasResolver for DefWithBodyId { + fn resolver(self, db: &dyn DefDatabase) -> Resolver { + match self { + DefWithBodyId::ConstId(c) => c.resolver(db), + DefWithBodyId::FunctionId(f) => f.resolver(db), + DefWithBodyId::StaticId(s) => s.resolver(db), + } + } +} + +impl HasResolver for ContainerId { + fn resolver(self, db: &dyn DefDatabase) -> Resolver { + match self { + ContainerId::ModuleId(it) => it.resolver(db), + ContainerId::DefWithBodyId(it) => it.module(db).resolver(db), + } + } +} + +impl HasResolver for AssocContainerId { + fn resolver(self, db: &dyn DefDatabase) -> Resolver { + match self { + AssocContainerId::ContainerId(it) => it.resolver(db), + AssocContainerId::TraitId(it) => it.resolver(db), + AssocContainerId::ImplId(it) => it.resolver(db), + } + } +} + +impl HasResolver for GenericDefId { + fn resolver(self, db: &dyn DefDatabase) -> Resolver { + match self { + GenericDefId::FunctionId(inner) => inner.resolver(db), + GenericDefId::AdtId(adt) => adt.resolver(db), + GenericDefId::TraitId(inner) => inner.resolver(db), + GenericDefId::TypeAliasId(inner) => inner.resolver(db), + GenericDefId::ImplId(inner) => inner.resolver(db), + GenericDefId::EnumVariantId(inner) => inner.parent.resolver(db), + GenericDefId::ConstId(inner) => inner.resolver(db), + } + } +} + +impl HasResolver for VariantId { + fn resolver(self, db: &dyn DefDatabase) -> Resolver { + match self { + VariantId::EnumVariantId(it) => it.parent.resolver(db), + VariantId::StructId(it) => it.resolver(db), + VariantId::UnionId(it) => it.resolver(db), + } + } +} diff --git a/crates/hir_def/src/src.rs b/crates/hir_def/src/src.rs new file mode 100644 index 0000000000..7a79b03149 --- /dev/null +++ b/crates/hir_def/src/src.rs @@ -0,0 +1,43 @@ +//! Utilities for mapping between hir IDs and the surface syntax. + +use arena::map::ArenaMap; +use hir_expand::InFile; + +use crate::{db::DefDatabase, item_tree::ItemTreeNode, AssocItemLoc, ItemLoc}; + +pub trait HasSource { + type Value; + fn source(&self, db: &dyn DefDatabase) -> InFile; +} + +impl HasSource for AssocItemLoc { + type Value = N::Source; + + fn source(&self, db: &dyn DefDatabase) -> InFile { + let tree = db.item_tree(self.id.file_id); + let ast_id_map = db.ast_id_map(self.id.file_id); + let root = db.parse_or_expand(self.id.file_id).unwrap(); + let node = &tree[self.id.value]; + + InFile::new(self.id.file_id, ast_id_map.get(node.ast_id()).to_node(&root)) + } +} + +impl HasSource for ItemLoc { + type Value = N::Source; + + fn source(&self, db: &dyn DefDatabase) -> InFile { + let tree = db.item_tree(self.id.file_id); + let ast_id_map = db.ast_id_map(self.id.file_id); + let root = db.parse_or_expand(self.id.file_id).unwrap(); + let node = &tree[self.id.value]; + + InFile::new(self.id.file_id, ast_id_map.get(node.ast_id()).to_node(&root)) + } +} + +pub trait HasChildSource { + type ChildId; + type Value; + fn child_source(&self, db: &dyn DefDatabase) -> InFile>; +} diff --git a/crates/hir_def/src/test_db.rs b/crates/hir_def/src/test_db.rs new file mode 100644 index 0000000000..42a762936d --- /dev/null +++ b/crates/hir_def/src/test_db.rs @@ -0,0 +1,101 @@ +//! Database used for testing `hir_def`. + +use std::{ + fmt, panic, + sync::{Arc, Mutex}, +}; + +use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, Upcast}; +use hir_expand::db::AstDatabase; +use rustc_hash::FxHashSet; + +use crate::db::DefDatabase; + +#[salsa::database( + base_db::SourceDatabaseExtStorage, + base_db::SourceDatabaseStorage, + hir_expand::db::AstDatabaseStorage, + crate::db::InternDatabaseStorage, + crate::db::DefDatabaseStorage +)] +#[derive(Default)] +pub struct TestDB { + storage: salsa::Storage, + events: Mutex>>, +} + +impl Upcast for TestDB { + fn upcast(&self) -> &(dyn AstDatabase + 'static) { + &*self + } +} + +impl Upcast for TestDB { + fn upcast(&self) -> &(dyn DefDatabase + 'static) { + &*self + } +} + +impl salsa::Database for TestDB { + fn salsa_event(&self, event: salsa::Event) { + let mut events = self.events.lock().unwrap(); + if let Some(events) = &mut *events { + events.push(event); + } + } +} + +impl fmt::Debug for TestDB { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("TestDB").finish() + } +} + +impl panic::RefUnwindSafe for TestDB {} + +impl FileLoader for TestDB { + fn file_text(&self, file_id: FileId) -> Arc { + FileLoaderDelegate(self).file_text(file_id) + } + fn resolve_path(&self, anchor: FileId, path: &str) -> Option { + FileLoaderDelegate(self).resolve_path(anchor, path) + } + fn relevant_crates(&self, file_id: FileId) -> Arc> { + FileLoaderDelegate(self).relevant_crates(file_id) + } +} + +impl TestDB { + pub fn module_for_file(&self, file_id: FileId) -> crate::ModuleId { + for &krate in self.relevant_crates(file_id).iter() { + let crate_def_map = self.crate_def_map(krate); + for (local_id, data) in crate_def_map.modules.iter() { + if data.origin.file_id() == Some(file_id) { + return crate::ModuleId { krate, local_id }; + } + } + } + panic!("Can't find module for file") + } + + pub fn log(&self, f: impl FnOnce()) -> Vec { + *self.events.lock().unwrap() = Some(Vec::new()); + f(); + self.events.lock().unwrap().take().unwrap() + } + + pub fn log_executed(&self, f: impl FnOnce()) -> Vec { + let events = self.log(f); + events + .into_iter() + .filter_map(|e| match e.kind { + // This pretty horrible, but `Debug` is the only way to inspect + // QueryDescriptor at the moment. + salsa::EventKind::WillExecute { database_key } => { + Some(format!("{:?}", database_key.debug(self))) + } + _ => None, + }) + .collect() + } +} diff --git a/crates/hir_def/src/trace.rs b/crates/hir_def/src/trace.rs new file mode 100644 index 0000000000..fd64e70188 --- /dev/null +++ b/crates/hir_def/src/trace.rs @@ -0,0 +1,51 @@ +//! Trace is a pretty niche data structure which is used when lowering a CST +//! into HIR. +//! +//! Lowering process calculates two bits of information: +//! * the lowered syntax itself +//! * a mapping between lowered syntax and original syntax +//! +//! Due to the way salsa works, the mapping is usually hot lava, as it contains +//! absolute offsets. The `Trace` structure (inspired, at least in name, by +//! Kotlin's `BindingTrace`) allows use the same code to compute both +//! projections. +use arena::{map::ArenaMap, Arena, Idx, RawId}; + +pub(crate) struct Trace { + arena: Option>, + map: Option, V>>, + len: u32, +} + +impl Trace { + pub(crate) fn new_for_arena() -> Trace { + Trace { arena: Some(Arena::default()), map: None, len: 0 } + } + + pub(crate) fn new_for_map() -> Trace { + Trace { arena: None, map: Some(ArenaMap::default()), len: 0 } + } + + pub(crate) fn alloc(&mut self, value: impl FnOnce() -> V, data: impl FnOnce() -> T) -> Idx { + let id = if let Some(arena) = &mut self.arena { + arena.alloc(data()) + } else { + let id = Idx::::from_raw(RawId::from(self.len)); + self.len += 1; + id + }; + + if let Some(map) = &mut self.map { + map.insert(id, value()); + } + id + } + + pub(crate) fn into_arena(mut self) -> Arena { + self.arena.take().unwrap() + } + + pub(crate) fn into_map(mut self) -> ArenaMap, V> { + self.map.take().unwrap() + } +} diff --git a/crates/hir_def/src/type_ref.rs b/crates/hir_def/src/type_ref.rs new file mode 100644 index 0000000000..1a78c1444c --- /dev/null +++ b/crates/hir_def/src/type_ref.rs @@ -0,0 +1,245 @@ +//! HIR for references to types. Paths in these are not yet resolved. They can +//! be directly created from an ast::TypeRef, without further queries. +use syntax::ast::{self}; + +use crate::{body::LowerCtx, path::Path}; + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub enum Mutability { + Shared, + Mut, +} + +impl Mutability { + pub fn from_mutable(mutable: bool) -> Mutability { + if mutable { + Mutability::Mut + } else { + Mutability::Shared + } + } + + pub fn as_keyword_for_ref(self) -> &'static str { + match self { + Mutability::Shared => "", + Mutability::Mut => "mut ", + } + } + + pub fn as_keyword_for_ptr(self) -> &'static str { + match self { + Mutability::Shared => "const ", + Mutability::Mut => "mut ", + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub enum Rawness { + RawPtr, + Ref, +} + +impl Rawness { + pub fn from_raw(is_raw: bool) -> Rawness { + if is_raw { + Rawness::RawPtr + } else { + Rawness::Ref + } + } +} + +/// Compare ty::Ty +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub enum TypeRef { + Never, + Placeholder, + Tuple(Vec), + Path(Path), + RawPtr(Box, Mutability), + Reference(Box, Mutability), + Array(Box /*, Expr*/), + Slice(Box), + /// A fn pointer. Last element of the vector is the return type. + Fn(Vec, bool /*varargs*/), + // For + ImplTrait(Vec), + DynTrait(Vec), + Error, +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub enum TypeBound { + Path(Path), + // also for<> bounds + // also Lifetimes + Error, +} + +impl TypeRef { + /// Converts an `ast::TypeRef` to a `hir::TypeRef`. + pub(crate) fn from_ast(ctx: &LowerCtx, node: ast::Type) -> Self { + match node { + ast::Type::ParenType(inner) => TypeRef::from_ast_opt(&ctx, inner.ty()), + ast::Type::TupleType(inner) => { + TypeRef::Tuple(inner.fields().map(|it| TypeRef::from_ast(ctx, it)).collect()) + } + ast::Type::NeverType(..) => TypeRef::Never, + ast::Type::PathType(inner) => { + // FIXME: Use `Path::from_src` + inner + .path() + .and_then(|it| ctx.lower_path(it)) + .map(TypeRef::Path) + .unwrap_or(TypeRef::Error) + } + ast::Type::PtrType(inner) => { + let inner_ty = TypeRef::from_ast_opt(&ctx, inner.ty()); + let mutability = Mutability::from_mutable(inner.mut_token().is_some()); + TypeRef::RawPtr(Box::new(inner_ty), mutability) + } + ast::Type::ArrayType(inner) => { + TypeRef::Array(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty()))) + } + ast::Type::SliceType(inner) => { + TypeRef::Slice(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty()))) + } + ast::Type::RefType(inner) => { + let inner_ty = TypeRef::from_ast_opt(&ctx, inner.ty()); + let mutability = Mutability::from_mutable(inner.mut_token().is_some()); + TypeRef::Reference(Box::new(inner_ty), mutability) + } + ast::Type::InferType(_inner) => TypeRef::Placeholder, + ast::Type::FnPtrType(inner) => { + let ret_ty = inner + .ret_type() + .and_then(|rt| rt.ty()) + .map(|it| TypeRef::from_ast(ctx, it)) + .unwrap_or_else(|| TypeRef::Tuple(Vec::new())); + let mut is_varargs = false; + let mut params = if let Some(pl) = inner.param_list() { + if let Some(param) = pl.params().last() { + is_varargs = param.dotdotdot_token().is_some(); + } + + pl.params().map(|p| p.ty()).map(|it| TypeRef::from_ast_opt(&ctx, it)).collect() + } else { + Vec::new() + }; + params.push(ret_ty); + TypeRef::Fn(params, is_varargs) + } + // for types are close enough for our purposes to the inner type for now... + ast::Type::ForType(inner) => TypeRef::from_ast_opt(&ctx, inner.ty()), + ast::Type::ImplTraitType(inner) => { + TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) + } + ast::Type::DynTraitType(inner) => { + TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) + } + } + } + + pub(crate) fn from_ast_opt(ctx: &LowerCtx, node: Option) -> Self { + if let Some(node) = node { + TypeRef::from_ast(ctx, node) + } else { + TypeRef::Error + } + } + + pub(crate) fn unit() -> TypeRef { + TypeRef::Tuple(Vec::new()) + } + + pub fn walk(&self, f: &mut impl FnMut(&TypeRef)) { + go(self, f); + + fn go(type_ref: &TypeRef, f: &mut impl FnMut(&TypeRef)) { + f(type_ref); + match type_ref { + TypeRef::Fn(types, _) | TypeRef::Tuple(types) => { + types.iter().for_each(|t| go(t, f)) + } + TypeRef::RawPtr(type_ref, _) + | TypeRef::Reference(type_ref, _) + | TypeRef::Array(type_ref) + | TypeRef::Slice(type_ref) => go(&type_ref, f), + TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => { + for bound in bounds { + match bound { + TypeBound::Path(path) => go_path(path, f), + TypeBound::Error => (), + } + } + } + TypeRef::Path(path) => go_path(path, f), + TypeRef::Never | TypeRef::Placeholder | TypeRef::Error => {} + }; + } + + fn go_path(path: &Path, f: &mut impl FnMut(&TypeRef)) { + if let Some(type_ref) = path.type_anchor() { + go(type_ref, f); + } + for segment in path.segments().iter() { + if let Some(args_and_bindings) = segment.args_and_bindings { + for arg in &args_and_bindings.args { + let crate::path::GenericArg::Type(type_ref) = arg; + go(type_ref, f); + } + for binding in &args_and_bindings.bindings { + if let Some(type_ref) = &binding.type_ref { + go(type_ref, f); + } + for bound in &binding.bounds { + match bound { + TypeBound::Path(path) => go_path(path, f), + TypeBound::Error => (), + } + } + } + } + } + } + } +} + +pub(crate) fn type_bounds_from_ast( + lower_ctx: &LowerCtx, + type_bounds_opt: Option, +) -> Vec { + if let Some(type_bounds) = type_bounds_opt { + type_bounds.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect() + } else { + vec![] + } +} + +impl TypeBound { + pub(crate) fn from_ast(ctx: &LowerCtx, node: ast::TypeBound) -> Self { + match node.kind() { + ast::TypeBoundKind::PathType(path_type) => { + let path = match path_type.path() { + Some(p) => p, + None => return TypeBound::Error, + }; + + let path = match ctx.lower_path(path) { + Some(p) => p, + None => return TypeBound::Error, + }; + TypeBound::Path(path) + } + ast::TypeBoundKind::ForType(_) | ast::TypeBoundKind::Lifetime(_) => TypeBound::Error, + } + } + + pub fn as_path(&self) -> Option<&Path> { + match self { + TypeBound::Path(p) => Some(p), + _ => None, + } + } +} diff --git a/crates/hir_def/src/visibility.rs b/crates/hir_def/src/visibility.rs new file mode 100644 index 0000000000..e6e0853a30 --- /dev/null +++ b/crates/hir_def/src/visibility.rs @@ -0,0 +1,171 @@ +//! Defines hir-level representation of visibility (e.g. `pub` and `pub(crate)`). + +use hir_expand::{hygiene::Hygiene, InFile}; +use syntax::ast; + +use crate::{ + db::DefDatabase, + nameres::CrateDefMap, + path::{ModPath, PathKind}, + ModuleId, +}; + +/// Visibility of an item, not yet resolved. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum RawVisibility { + /// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is + /// equivalent to `pub(self)`. + Module(ModPath), + /// `pub`. + Public, +} + +impl RawVisibility { + pub(crate) const fn private() -> RawVisibility { + let path = ModPath { kind: PathKind::Super(0), segments: Vec::new() }; + RawVisibility::Module(path) + } + + pub(crate) fn from_ast( + db: &dyn DefDatabase, + node: InFile>, + ) -> RawVisibility { + Self::from_ast_with_hygiene(node.value, &Hygiene::new(db.upcast(), node.file_id)) + } + + pub(crate) fn from_ast_with_hygiene( + node: Option, + hygiene: &Hygiene, + ) -> RawVisibility { + Self::from_ast_with_hygiene_and_default(node, RawVisibility::private(), hygiene) + } + + pub(crate) fn from_ast_with_hygiene_and_default( + node: Option, + default: RawVisibility, + hygiene: &Hygiene, + ) -> RawVisibility { + let node = match node { + None => return default, + Some(node) => node, + }; + match node.kind() { + ast::VisibilityKind::In(path) => { + let path = ModPath::from_src(path, hygiene); + let path = match path { + None => return RawVisibility::private(), + Some(path) => path, + }; + RawVisibility::Module(path) + } + ast::VisibilityKind::PubCrate => { + let path = ModPath { kind: PathKind::Crate, segments: Vec::new() }; + RawVisibility::Module(path) + } + ast::VisibilityKind::PubSuper => { + let path = ModPath { kind: PathKind::Super(1), segments: Vec::new() }; + RawVisibility::Module(path) + } + ast::VisibilityKind::PubSelf => { + let path = ModPath { kind: PathKind::Plain, segments: Vec::new() }; + RawVisibility::Module(path) + } + ast::VisibilityKind::Pub => RawVisibility::Public, + } + } + + pub fn resolve( + &self, + db: &dyn DefDatabase, + resolver: &crate::resolver::Resolver, + ) -> Visibility { + // we fall back to public visibility (i.e. fail open) if the path can't be resolved + resolver.resolve_visibility(db, self).unwrap_or(Visibility::Public) + } +} + +/// Visibility of an item, with the path resolved. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum Visibility { + /// Visibility is restricted to a certain module. + Module(ModuleId), + /// Visibility is unrestricted. + Public, +} + +impl Visibility { + pub fn is_visible_from(self, db: &dyn DefDatabase, from_module: ModuleId) -> bool { + let to_module = match self { + Visibility::Module(m) => m, + Visibility::Public => return true, + }; + // if they're not in the same crate, it can't be visible + if from_module.krate != to_module.krate { + return false; + } + let def_map = db.crate_def_map(from_module.krate); + self.is_visible_from_def_map(&def_map, from_module.local_id) + } + + pub(crate) fn is_visible_from_other_crate(self) -> bool { + match self { + Visibility::Module(_) => false, + Visibility::Public => true, + } + } + + pub(crate) fn is_visible_from_def_map( + self, + def_map: &CrateDefMap, + from_module: crate::LocalModuleId, + ) -> bool { + let to_module = match self { + Visibility::Module(m) => m, + Visibility::Public => return true, + }; + // from_module needs to be a descendant of to_module + let mut ancestors = std::iter::successors(Some(from_module), |m| { + let parent_id = def_map[*m].parent?; + Some(parent_id) + }); + ancestors.any(|m| m == to_module.local_id) + } + + /// Returns the most permissive visibility of `self` and `other`. + /// + /// If there is no subset relation between `self` and `other`, returns `None` (ie. they're only + /// visible in unrelated modules). + pub(crate) fn max(self, other: Visibility, def_map: &CrateDefMap) -> Option { + match (self, other) { + (Visibility::Module(_), Visibility::Public) + | (Visibility::Public, Visibility::Module(_)) + | (Visibility::Public, Visibility::Public) => Some(Visibility::Public), + (Visibility::Module(mod_a), Visibility::Module(mod_b)) => { + if mod_a.krate != mod_b.krate { + return None; + } + + let mut a_ancestors = std::iter::successors(Some(mod_a.local_id), |m| { + let parent_id = def_map[*m].parent?; + Some(parent_id) + }); + let mut b_ancestors = std::iter::successors(Some(mod_b.local_id), |m| { + let parent_id = def_map[*m].parent?; + Some(parent_id) + }); + + if a_ancestors.any(|m| m == mod_b.local_id) { + // B is above A + return Some(Visibility::Module(mod_b)); + } + + if b_ancestors.any(|m| m == mod_a.local_id) { + // A is above B + return Some(Visibility::Module(mod_a)); + } + + None + } + } + } +} diff --git a/crates/hir_expand/Cargo.toml b/crates/hir_expand/Cargo.toml new file mode 100644 index 0000000000..1c46992915 --- /dev/null +++ b/crates/hir_expand/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "hir_expand" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +log = "0.4.8" +either = "1.5.3" +rustc-hash = "1.0.0" + +arena = { path = "../arena" } +base_db = { path = "../base_db" } +syntax = { path = "../syntax" } +parser = { path = "../parser" } +profile = { path = "../profile" } +tt = { path = "../tt" } +mbe = { path = "../mbe" } +test_utils = { path = "../test_utils"} diff --git a/crates/hir_expand/src/ast_id_map.rs b/crates/hir_expand/src/ast_id_map.rs new file mode 100644 index 0000000000..f63629b305 --- /dev/null +++ b/crates/hir_expand/src/ast_id_map.rs @@ -0,0 +1,119 @@ +//! `AstIdMap` allows to create stable IDs for "large" syntax nodes like items +//! and macro calls. +//! +//! Specifically, it enumerates all items in a file and uses position of a an +//! item as an ID. That way, id's don't change unless the set of items itself +//! changes. + +use std::{ + any::type_name, + fmt, + hash::{Hash, Hasher}, + marker::PhantomData, +}; + +use arena::{Arena, Idx}; +use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; + +/// `AstId` points to an AST node in a specific file. +pub struct FileAstId { + raw: ErasedFileAstId, + _ty: PhantomData N>, +} + +impl Clone for FileAstId { + fn clone(&self) -> FileAstId { + *self + } +} +impl Copy for FileAstId {} + +impl PartialEq for FileAstId { + fn eq(&self, other: &Self) -> bool { + self.raw == other.raw + } +} +impl Eq for FileAstId {} +impl Hash for FileAstId { + fn hash(&self, hasher: &mut H) { + self.raw.hash(hasher); + } +} + +impl fmt::Debug for FileAstId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "FileAstId::<{}>({})", type_name::(), self.raw.into_raw()) + } +} + +impl FileAstId { + // Can't make this a From implementation because of coherence + pub fn upcast(self) -> FileAstId + where + N: Into, + { + FileAstId { raw: self.raw, _ty: PhantomData } + } +} + +type ErasedFileAstId = Idx; + +/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back. +#[derive(Debug, PartialEq, Eq, Default)] +pub struct AstIdMap { + arena: Arena, +} + +impl AstIdMap { + pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap { + assert!(node.parent().is_none()); + let mut res = AstIdMap { arena: Arena::default() }; + // By walking the tree in breadth-first order we make sure that parents + // get lower ids then children. That is, adding a new child does not + // change parent's id. This means that, say, adding a new function to a + // trait does not change ids of top-level items, which helps caching. + bfs(node, |it| { + if let Some(module_item) = ast::Item::cast(it) { + res.alloc(module_item.syntax()); + } + }); + res + } + + pub fn ast_id(&self, item: &N) -> FileAstId { + let raw = self.erased_ast_id(item.syntax()); + FileAstId { raw, _ty: PhantomData } + } + fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId { + let ptr = SyntaxNodePtr::new(item); + match self.arena.iter().find(|(_id, i)| **i == ptr) { + Some((it, _)) => it, + None => panic!( + "Can't find {:?} in AstIdMap:\n{:?}", + item, + self.arena.iter().map(|(_id, i)| i).collect::>(), + ), + } + } + + pub fn get(&self, id: FileAstId) -> AstPtr { + self.arena[id.raw].clone().cast::().unwrap() + } + + fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId { + self.arena.alloc(SyntaxNodePtr::new(item)) + } +} + +/// Walks the subtree in bfs order, calling `f` for each node. +fn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) { + let mut curr_layer = vec![node.clone()]; + let mut next_layer = vec![]; + while !curr_layer.is_empty() { + curr_layer.drain(..).for_each(|node| { + next_layer.extend(node.children()); + f(node); + }); + std::mem::swap(&mut curr_layer, &mut next_layer); + } +} diff --git a/crates/hir_expand/src/builtin_derive.rs b/crates/hir_expand/src/builtin_derive.rs new file mode 100644 index 0000000000..988a60d56a --- /dev/null +++ b/crates/hir_expand/src/builtin_derive.rs @@ -0,0 +1,361 @@ +//! Builtin derives. + +use log::debug; + +use parser::FragmentKind; +use syntax::{ + ast::{self, AstNode, GenericParamsOwner, ModuleItemOwner, NameOwner}, + match_ast, +}; + +use crate::{db::AstDatabase, name, quote, LazyMacroId, MacroDefId, MacroDefKind}; + +macro_rules! register_builtin { + ( $($trait:ident => $expand:ident),* ) => { + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] + pub enum BuiltinDeriveExpander { + $($trait),* + } + + impl BuiltinDeriveExpander { + pub fn expand( + &self, + db: &dyn AstDatabase, + id: LazyMacroId, + tt: &tt::Subtree, + ) -> Result { + let expander = match *self { + $( BuiltinDeriveExpander::$trait => $expand, )* + }; + expander(db, id, tt) + } + } + + pub fn find_builtin_derive(ident: &name::Name) -> Option { + let kind = match ident { + $( id if id == &name::name![$trait] => BuiltinDeriveExpander::$trait, )* + _ => return None, + }; + + Some(MacroDefId { krate: None, ast_id: None, kind: MacroDefKind::BuiltInDerive(kind), local_inner: false }) + } + }; +} + +register_builtin! { + Copy => copy_expand, + Clone => clone_expand, + Default => default_expand, + Debug => debug_expand, + Hash => hash_expand, + Ord => ord_expand, + PartialOrd => partial_ord_expand, + Eq => eq_expand, + PartialEq => partial_eq_expand +} + +struct BasicAdtInfo { + name: tt::Ident, + type_params: usize, +} + +fn parse_adt(tt: &tt::Subtree) -> Result { + let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, FragmentKind::Items)?; // FragmentKind::Items doesn't parse attrs? + let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| { + debug!("derive node didn't parse"); + mbe::ExpandError::UnexpectedToken + })?; + let item = macro_items.items().next().ok_or_else(|| { + debug!("no module item parsed"); + mbe::ExpandError::NoMatchingRule + })?; + let node = item.syntax(); + let (name, params) = match_ast! { + match node { + ast::Struct(it) => (it.name(), it.generic_param_list()), + ast::Enum(it) => (it.name(), it.generic_param_list()), + ast::Union(it) => (it.name(), it.generic_param_list()), + _ => { + debug!("unexpected node is {:?}", node); + return Err(mbe::ExpandError::ConversionError) + }, + } + }; + let name = name.ok_or_else(|| { + debug!("parsed item has no name"); + mbe::ExpandError::NoMatchingRule + })?; + let name_token_id = token_map.token_by_range(name.syntax().text_range()).ok_or_else(|| { + debug!("name token not found"); + mbe::ExpandError::ConversionError + })?; + let name_token = tt::Ident { id: name_token_id, text: name.text().clone() }; + let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count()); + Ok(BasicAdtInfo { name: name_token, type_params }) +} + +fn make_type_args(n: usize, bound: Vec) -> Vec { + let mut result = Vec::::new(); + result.push( + tt::Leaf::Punct(tt::Punct { + char: '<', + spacing: tt::Spacing::Alone, + id: tt::TokenId::unspecified(), + }) + .into(), + ); + for i in 0..n { + if i > 0 { + result.push( + tt::Leaf::Punct(tt::Punct { + char: ',', + spacing: tt::Spacing::Alone, + id: tt::TokenId::unspecified(), + }) + .into(), + ); + } + result.push( + tt::Leaf::Ident(tt::Ident { + id: tt::TokenId::unspecified(), + text: format!("T{}", i).into(), + }) + .into(), + ); + result.extend(bound.iter().cloned()); + } + result.push( + tt::Leaf::Punct(tt::Punct { + char: '>', + spacing: tt::Spacing::Alone, + id: tt::TokenId::unspecified(), + }) + .into(), + ); + result +} + +fn expand_simple_derive( + tt: &tt::Subtree, + trait_path: tt::Subtree, +) -> Result { + let info = parse_adt(tt)?; + let name = info.name; + let trait_path_clone = trait_path.token_trees.clone(); + let bound = (quote! { : ##trait_path_clone }).token_trees; + let type_params = make_type_args(info.type_params, bound); + let type_args = make_type_args(info.type_params, Vec::new()); + let trait_path = trait_path.token_trees; + let expanded = quote! { + impl ##type_params ##trait_path for #name ##type_args {} + }; + Ok(expanded) +} + +fn find_builtin_crate(db: &dyn AstDatabase, id: LazyMacroId) -> tt::TokenTree { + // FIXME: make hygiene works for builtin derive macro + // such that $crate can be used here. + let cg = db.crate_graph(); + let krate = db.lookup_intern_macro(id).krate; + + // XXX + // All crates except core itself should have a dependency on core, + // We detect `core` by seeing whether it doesn't have such a dependency. + let tt = if cg[krate].dependencies.iter().any(|dep| &*dep.name == "core") { + quote! { core } + } else { + quote! { crate } + }; + + tt.token_trees[0].clone() +} + +fn copy_expand( + db: &dyn AstDatabase, + id: LazyMacroId, + tt: &tt::Subtree, +) -> Result { + let krate = find_builtin_crate(db, id); + expand_simple_derive(tt, quote! { #krate::marker::Copy }) +} + +fn clone_expand( + db: &dyn AstDatabase, + id: LazyMacroId, + tt: &tt::Subtree, +) -> Result { + let krate = find_builtin_crate(db, id); + expand_simple_derive(tt, quote! { #krate::clone::Clone }) +} + +fn default_expand( + db: &dyn AstDatabase, + id: LazyMacroId, + tt: &tt::Subtree, +) -> Result { + let krate = find_builtin_crate(db, id); + expand_simple_derive(tt, quote! { #krate::default::Default }) +} + +fn debug_expand( + db: &dyn AstDatabase, + id: LazyMacroId, + tt: &tt::Subtree, +) -> Result { + let krate = find_builtin_crate(db, id); + expand_simple_derive(tt, quote! { #krate::fmt::Debug }) +} + +fn hash_expand( + db: &dyn AstDatabase, + id: LazyMacroId, + tt: &tt::Subtree, +) -> Result { + let krate = find_builtin_crate(db, id); + expand_simple_derive(tt, quote! { #krate::hash::Hash }) +} + +fn eq_expand( + db: &dyn AstDatabase, + id: LazyMacroId, + tt: &tt::Subtree, +) -> Result { + let krate = find_builtin_crate(db, id); + expand_simple_derive(tt, quote! { #krate::cmp::Eq }) +} + +fn partial_eq_expand( + db: &dyn AstDatabase, + id: LazyMacroId, + tt: &tt::Subtree, +) -> Result { + let krate = find_builtin_crate(db, id); + expand_simple_derive(tt, quote! { #krate::cmp::PartialEq }) +} + +fn ord_expand( + db: &dyn AstDatabase, + id: LazyMacroId, + tt: &tt::Subtree, +) -> Result { + let krate = find_builtin_crate(db, id); + expand_simple_derive(tt, quote! { #krate::cmp::Ord }) +} + +fn partial_ord_expand( + db: &dyn AstDatabase, + id: LazyMacroId, + tt: &tt::Subtree, +) -> Result { + let krate = find_builtin_crate(db, id); + expand_simple_derive(tt, quote! { #krate::cmp::PartialOrd }) +} + +#[cfg(test)] +mod tests { + use base_db::{fixture::WithFixture, CrateId, SourceDatabase}; + use name::{known, Name}; + + use crate::{test_db::TestDB, AstId, MacroCallId, MacroCallKind, MacroCallLoc}; + + use super::*; + + fn expand_builtin_derive(s: &str, name: Name) -> String { + let def = find_builtin_derive(&name).unwrap(); + let fixture = format!( + r#"//- /main.rs crate:main deps:core +<|> +{} +//- /lib.rs crate:core +// empty +"#, + s + ); + + let (db, file_pos) = TestDB::with_position(&fixture); + let file_id = file_pos.file_id; + let parsed = db.parse(file_id); + let items: Vec<_> = + parsed.syntax_node().descendants().filter_map(ast::Item::cast).collect(); + + let ast_id_map = db.ast_id_map(file_id.into()); + + let attr_id = AstId::new(file_id.into(), ast_id_map.ast_id(&items[0])); + + let loc = MacroCallLoc { + def, + krate: CrateId(0), + kind: MacroCallKind::Attr(attr_id, name.to_string()), + }; + + let id: MacroCallId = db.intern_macro(loc).into(); + let parsed = db.parse_or_expand(id.as_file()).unwrap(); + + // FIXME text() for syntax nodes parsed from token tree looks weird + // because there's no whitespace, see below + parsed.text().to_string() + } + + #[test] + fn test_copy_expand_simple() { + let expanded = expand_builtin_derive( + r#" + #[derive(Copy)] + struct Foo; +"#, + known::Copy, + ); + + assert_eq!(expanded, "impl< >core::marker::CopyforFoo< >{}"); + } + + #[test] + fn test_copy_expand_with_type_params() { + let expanded = expand_builtin_derive( + r#" + #[derive(Copy)] + struct Foo; +"#, + known::Copy, + ); + + assert_eq!( + expanded, + "implcore::marker::CopyforFoo{}" + ); + } + + #[test] + fn test_copy_expand_with_lifetimes() { + let expanded = expand_builtin_derive( + r#" + #[derive(Copy)] + struct Foo; +"#, + known::Copy, + ); + + // We currently just ignore lifetimes + + assert_eq!( + expanded, + "implcore::marker::CopyforFoo{}" + ); + } + + #[test] + fn test_clone_expand() { + let expanded = expand_builtin_derive( + r#" + #[derive(Clone)] + struct Foo; +"#, + known::Clone, + ); + + assert_eq!( + expanded, + "implcore::clone::CloneforFoo{}" + ); + } +} diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs new file mode 100644 index 0000000000..86918b6269 --- /dev/null +++ b/crates/hir_expand/src/builtin_macro.rs @@ -0,0 +1,649 @@ +//! Builtin macro +use crate::{ + db::AstDatabase, name, quote, AstId, CrateId, EagerMacroId, LazyMacroId, MacroCallId, + MacroDefId, MacroDefKind, TextSize, +}; + +use base_db::FileId; +use either::Either; +use mbe::parse_to_token_tree; +use parser::FragmentKind; +use syntax::ast::{self, AstToken, HasStringValue}; + +macro_rules! register_builtin { + ( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => { + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] + pub enum BuiltinFnLikeExpander { + $($kind),* + } + + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] + pub enum EagerExpander { + $($e_kind),* + } + + impl BuiltinFnLikeExpander { + pub fn expand( + &self, + db: &dyn AstDatabase, + id: LazyMacroId, + tt: &tt::Subtree, + ) -> Result { + let expander = match *self { + $( BuiltinFnLikeExpander::$kind => $expand, )* + }; + expander(db, id, tt) + } + } + + impl EagerExpander { + pub fn expand( + &self, + db: &dyn AstDatabase, + arg_id: EagerMacroId, + tt: &tt::Subtree, + ) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { + let expander = match *self { + $( EagerExpander::$e_kind => $e_expand, )* + }; + expander(db,arg_id,tt) + } + } + + fn find_by_name(ident: &name::Name) -> Option> { + match ident { + $( id if id == &name::name![$name] => Some(Either::Left(BuiltinFnLikeExpander::$kind)), )* + $( id if id == &name::name![$e_name] => Some(Either::Right(EagerExpander::$e_kind)), )* + _ => return None, + } + } + }; +} + +pub fn find_builtin_macro( + ident: &name::Name, + krate: CrateId, + ast_id: AstId, +) -> Option { + let kind = find_by_name(ident)?; + + match kind { + Either::Left(kind) => Some(MacroDefId { + krate: Some(krate), + ast_id: Some(ast_id), + kind: MacroDefKind::BuiltIn(kind), + local_inner: false, + }), + Either::Right(kind) => Some(MacroDefId { + krate: Some(krate), + ast_id: Some(ast_id), + kind: MacroDefKind::BuiltInEager(kind), + local_inner: false, + }), + } +} + +register_builtin! { + LAZY: + (column, Column) => column_expand, + (compile_error, CompileError) => compile_error_expand, + (file, File) => file_expand, + (line, Line) => line_expand, + (assert, Assert) => assert_expand, + (stringify, Stringify) => stringify_expand, + (format_args, FormatArgs) => format_args_expand, + // format_args_nl only differs in that it adds a newline in the end, + // so we use the same stub expansion for now + (format_args_nl, FormatArgsNl) => format_args_expand, + + EAGER: + (concat, Concat) => concat_expand, + (include, Include) => include_expand, + (include_bytes, IncludeBytes) => include_bytes_expand, + (include_str, IncludeStr) => include_str_expand, + (env, Env) => env_expand, + (option_env, OptionEnv) => option_env_expand +} + +fn line_expand( + _db: &dyn AstDatabase, + _id: LazyMacroId, + _tt: &tt::Subtree, +) -> Result { + // dummy implementation for type-checking purposes + let line_num = 0; + let expanded = quote! { + #line_num + }; + + Ok(expanded) +} + +fn stringify_expand( + db: &dyn AstDatabase, + id: LazyMacroId, + _tt: &tt::Subtree, +) -> Result { + let loc = db.lookup_intern_macro(id); + + let macro_content = { + let arg = loc.kind.arg(db).ok_or_else(|| mbe::ExpandError::UnexpectedToken)?; + let macro_args = arg; + let text = macro_args.text(); + let without_parens = TextSize::of('(')..text.len() - TextSize::of(')'); + text.slice(without_parens).to_string() + }; + + let expanded = quote! { + #macro_content + }; + + Ok(expanded) +} + +fn column_expand( + _db: &dyn AstDatabase, + _id: LazyMacroId, + _tt: &tt::Subtree, +) -> Result { + // dummy implementation for type-checking purposes + let col_num = 0; + let expanded = quote! { + #col_num + }; + + Ok(expanded) +} + +fn assert_expand( + _db: &dyn AstDatabase, + _id: LazyMacroId, + tt: &tt::Subtree, +) -> Result { + // A hacky implementation for goto def and hover + // We expand `assert!(cond, arg1, arg2)` to + // ``` + // {(cond, &(arg1), &(arg2));} + // ```, + // which is wrong but useful. + + let mut args = Vec::new(); + let mut current = Vec::new(); + for tt in tt.token_trees.iter().cloned() { + match tt { + tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => { + args.push(current); + current = Vec::new(); + } + _ => { + current.push(tt); + } + } + } + if !current.is_empty() { + args.push(current); + } + + let arg_tts = args.into_iter().flat_map(|arg| { + quote! { &(##arg), } + }.token_trees).collect::>(); + + let expanded = quote! { + { { (##arg_tts); } } + }; + Ok(expanded) +} + +fn file_expand( + _db: &dyn AstDatabase, + _id: LazyMacroId, + _tt: &tt::Subtree, +) -> Result { + // FIXME: RA purposefully lacks knowledge of absolute file names + // so just return "". + let file_name = ""; + + let expanded = quote! { + #file_name + }; + + Ok(expanded) +} + +fn compile_error_expand( + _db: &dyn AstDatabase, + _id: LazyMacroId, + tt: &tt::Subtree, +) -> Result { + if tt.count() == 1 { + if let tt::TokenTree::Leaf(tt::Leaf::Literal(it)) = &tt.token_trees[0] { + let s = it.text.as_str(); + if s.contains('"') { + return Ok(quote! { loop { #it }}); + } + }; + } + + Err(mbe::ExpandError::BindingError("Must be a string".into())) +} + +fn format_args_expand( + _db: &dyn AstDatabase, + _id: LazyMacroId, + tt: &tt::Subtree, +) -> Result { + // We expand `format_args!("", a1, a2)` to + // ``` + // std::fmt::Arguments::new_v1(&[], &[ + // std::fmt::ArgumentV1::new(&arg1,std::fmt::Display::fmt), + // std::fmt::ArgumentV1::new(&arg2,std::fmt::Display::fmt), + // ]) + // ```, + // which is still not really correct, but close enough for now + let mut args = Vec::new(); + let mut current = Vec::new(); + for tt in tt.token_trees.iter().cloned() { + match tt { + tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => { + args.push(current); + current = Vec::new(); + } + _ => { + current.push(tt); + } + } + } + if !current.is_empty() { + args.push(current); + } + if args.is_empty() { + return Err(mbe::ExpandError::NoMatchingRule); + } + let _format_string = args.remove(0); + let arg_tts = args.into_iter().flat_map(|arg| { + quote! { std::fmt::ArgumentV1::new(&(##arg), std::fmt::Display::fmt), } + }.token_trees).collect::>(); + let expanded = quote! { + std::fmt::Arguments::new_v1(&[], &[##arg_tts]) + }; + Ok(expanded) +} + +fn unquote_str(lit: &tt::Literal) -> Option { + let lit = ast::make::tokens::literal(&lit.to_string()); + let token = ast::String::cast(lit)?; + token.value().map(|it| it.into_owned()) +} + +fn concat_expand( + _db: &dyn AstDatabase, + _arg_id: EagerMacroId, + tt: &tt::Subtree, +) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { + let mut text = String::new(); + for (i, t) in tt.token_trees.iter().enumerate() { + match t { + tt::TokenTree::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => { + text += &unquote_str(&it).ok_or_else(|| mbe::ExpandError::ConversionError)?; + } + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), + _ => return Err(mbe::ExpandError::UnexpectedToken), + } + } + + Ok((quote!(#text), FragmentKind::Expr)) +} + +fn relative_file( + db: &dyn AstDatabase, + call_id: MacroCallId, + path: &str, + allow_recursion: bool, +) -> Option { + let call_site = call_id.as_file().original_file(db); + let res = db.resolve_path(call_site, path)?; + // Prevent include itself + if res == call_site && !allow_recursion { + None + } else { + Some(res) + } +} + +fn parse_string(tt: &tt::Subtree) -> Result { + tt.token_trees + .get(0) + .and_then(|tt| match tt { + tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(&it), + _ => None, + }) + .ok_or_else(|| mbe::ExpandError::ConversionError) +} + +fn include_expand( + db: &dyn AstDatabase, + arg_id: EagerMacroId, + tt: &tt::Subtree, +) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { + let path = parse_string(tt)?; + let file_id = relative_file(db, arg_id.into(), &path, false) + .ok_or_else(|| mbe::ExpandError::ConversionError)?; + + // FIXME: + // Handle include as expression + let res = parse_to_token_tree(&db.file_text(file_id)) + .ok_or_else(|| mbe::ExpandError::ConversionError)? + .0; + + Ok((res, FragmentKind::Items)) +} + +fn include_bytes_expand( + _db: &dyn AstDatabase, + _arg_id: EagerMacroId, + tt: &tt::Subtree, +) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { + let _path = parse_string(tt)?; + + // FIXME: actually read the file here if the user asked for macro expansion + let res = tt::Subtree { + delimiter: None, + token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + text: r#"b"""#.into(), + id: tt::TokenId::unspecified(), + }))], + }; + Ok((res, FragmentKind::Expr)) +} + +fn include_str_expand( + db: &dyn AstDatabase, + arg_id: EagerMacroId, + tt: &tt::Subtree, +) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { + let path = parse_string(tt)?; + + // FIXME: we're not able to read excluded files (which is most of them because + // it's unusual to `include_str!` a Rust file), but we can return an empty string. + // Ideally, we'd be able to offer a precise expansion if the user asks for macro + // expansion. + let file_id = match relative_file(db, arg_id.into(), &path, true) { + Some(file_id) => file_id, + None => { + return Ok((quote!(""), FragmentKind::Expr)); + } + }; + + let text = db.file_text(file_id); + let text = &*text; + + Ok((quote!(#text), FragmentKind::Expr)) +} + +fn get_env_inner(db: &dyn AstDatabase, arg_id: EagerMacroId, key: &str) -> Option { + let krate = db.lookup_intern_eager_expansion(arg_id).krate; + db.crate_graph()[krate].env.get(key) +} + +fn env_expand( + db: &dyn AstDatabase, + arg_id: EagerMacroId, + tt: &tt::Subtree, +) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { + let key = parse_string(tt)?; + + // FIXME: + // If the environment variable is not defined int rustc, then a compilation error will be emitted. + // We might do the same if we fully support all other stuffs. + // But for now on, we should return some dummy string for better type infer purpose. + // However, we cannot use an empty string here, because for + // `include!(concat!(env!("OUT_DIR"), "/foo.rs"))` will become + // `include!("foo.rs"), which might go to infinite loop + let s = get_env_inner(db, arg_id, &key).unwrap_or_else(|| "__RA_UNIMPLEMENTED__".to_string()); + let expanded = quote! { #s }; + + Ok((expanded, FragmentKind::Expr)) +} + +fn option_env_expand( + db: &dyn AstDatabase, + arg_id: EagerMacroId, + tt: &tt::Subtree, +) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { + let key = parse_string(tt)?; + let expanded = match get_env_inner(db, arg_id, &key) { + None => quote! { std::option::Option::None::<&str> }, + Some(s) => quote! { std::option::Some(#s) }, + }; + + Ok((expanded, FragmentKind::Expr)) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + name::AsName, test_db::TestDB, AstNode, EagerCallLoc, MacroCallId, MacroCallKind, + MacroCallLoc, + }; + use base_db::{fixture::WithFixture, SourceDatabase}; + use std::sync::Arc; + use syntax::ast::NameOwner; + + fn expand_builtin_macro(ra_fixture: &str) -> String { + let (db, file_id) = TestDB::with_single_file(&ra_fixture); + let parsed = db.parse(file_id); + let macro_calls: Vec<_> = + parsed.syntax_node().descendants().filter_map(ast::MacroCall::cast).collect(); + + let ast_id_map = db.ast_id_map(file_id.into()); + + let expander = find_by_name(¯o_calls[0].name().unwrap().as_name()).unwrap(); + + let krate = CrateId(0); + let file_id = match expander { + Either::Left(expander) => { + // the first one should be a macro_rules + let def = MacroDefId { + krate: Some(CrateId(0)), + ast_id: Some(AstId::new(file_id.into(), ast_id_map.ast_id(¯o_calls[0]))), + kind: MacroDefKind::BuiltIn(expander), + local_inner: false, + }; + + let loc = MacroCallLoc { + def, + krate, + kind: MacroCallKind::FnLike(AstId::new( + file_id.into(), + ast_id_map.ast_id(¯o_calls[1]), + )), + }; + + let id: MacroCallId = db.intern_macro(loc).into(); + id.as_file() + } + Either::Right(expander) => { + // the first one should be a macro_rules + let def = MacroDefId { + krate: Some(krate), + ast_id: Some(AstId::new(file_id.into(), ast_id_map.ast_id(¯o_calls[0]))), + kind: MacroDefKind::BuiltInEager(expander), + local_inner: false, + }; + + let args = macro_calls[1].token_tree().unwrap(); + let parsed_args = mbe::ast_to_token_tree(&args).unwrap().0; + + let arg_id = db.intern_eager_expansion({ + EagerCallLoc { + def, + fragment: FragmentKind::Expr, + subtree: Arc::new(parsed_args.clone()), + krate, + file_id: file_id.into(), + } + }); + + let (subtree, fragment) = expander.expand(&db, arg_id, &parsed_args).unwrap(); + let eager = EagerCallLoc { + def, + fragment, + subtree: Arc::new(subtree), + krate, + file_id: file_id.into(), + }; + + let id: MacroCallId = db.intern_eager_expansion(eager).into(); + id.as_file() + } + }; + + db.parse_or_expand(file_id).unwrap().to_string() + } + + #[test] + fn test_column_expand() { + let expanded = expand_builtin_macro( + r#" + #[rustc_builtin_macro] + macro_rules! column {() => {}} + column!() + "#, + ); + + assert_eq!(expanded, "0"); + } + + #[test] + fn test_line_expand() { + let expanded = expand_builtin_macro( + r#" + #[rustc_builtin_macro] + macro_rules! line {() => {}} + line!() + "#, + ); + + assert_eq!(expanded, "0"); + } + + #[test] + fn test_stringify_expand() { + let expanded = expand_builtin_macro( + r#" + #[rustc_builtin_macro] + macro_rules! stringify {() => {}} + stringify!(a b c) + "#, + ); + + assert_eq!(expanded, "\"a b c\""); + } + + #[test] + fn test_env_expand() { + let expanded = expand_builtin_macro( + r#" + #[rustc_builtin_macro] + macro_rules! env {() => {}} + env!("TEST_ENV_VAR") + "#, + ); + + assert_eq!(expanded, "\"__RA_UNIMPLEMENTED__\""); + } + + #[test] + fn test_option_env_expand() { + let expanded = expand_builtin_macro( + r#" + #[rustc_builtin_macro] + macro_rules! option_env {() => {}} + option_env!("TEST_ENV_VAR") + "#, + ); + + assert_eq!(expanded, "std::option::Option::None:: < &str>"); + } + + #[test] + fn test_file_expand() { + let expanded = expand_builtin_macro( + r#" + #[rustc_builtin_macro] + macro_rules! file {() => {}} + file!() + "#, + ); + + assert_eq!(expanded, "\"\""); + } + + #[test] + fn test_assert_expand() { + let expanded = expand_builtin_macro( + r#" + #[rustc_builtin_macro] + macro_rules! assert { + ($cond:expr) => ({ /* compiler built-in */ }); + ($cond:expr, $($args:tt)*) => ({ /* compiler built-in */ }) + } + assert!(true, "{} {:?}", arg1(a, b, c), arg2); + "#, + ); + + assert_eq!(expanded, "{{(&(true), &(\"{} {:?}\"), &(arg1(a,b,c)), &(arg2),);}}"); + } + + #[test] + fn test_compile_error_expand() { + let expanded = expand_builtin_macro( + r#" + #[rustc_builtin_macro] + macro_rules! compile_error { + ($msg:expr) => ({ /* compiler built-in */ }); + ($msg:expr,) => ({ /* compiler built-in */ }) + } + compile_error!("error!"); + "#, + ); + + assert_eq!(expanded, r#"loop{"error!"}"#); + } + + #[test] + fn test_format_args_expand() { + let expanded = expand_builtin_macro( + r#" + #[rustc_builtin_macro] + macro_rules! format_args { + ($fmt:expr) => ({ /* compiler built-in */ }); + ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ }) + } + format_args!("{} {:?}", arg1(a, b, c), arg2); + "#, + ); + + assert_eq!( + expanded, + r#"std::fmt::Arguments::new_v1(&[], &[std::fmt::ArgumentV1::new(&(arg1(a,b,c)),std::fmt::Display::fmt),std::fmt::ArgumentV1::new(&(arg2),std::fmt::Display::fmt),])"# + ); + } + + #[test] + fn test_include_bytes_expand() { + let expanded = expand_builtin_macro( + r#" + #[rustc_builtin_macro] + macro_rules! include_bytes { + ($file:expr) => {{ /* compiler built-in */ }}; + ($file:expr,) => {{ /* compiler built-in */ }}; + } + include_bytes("foo"); + "#, + ); + + assert_eq!(expanded, r#"b"""#); + } +} diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs new file mode 100644 index 0000000000..dcc038bcd8 --- /dev/null +++ b/crates/hir_expand/src/db.rs @@ -0,0 +1,403 @@ +//! Defines database & queries for macro expansion. + +use std::sync::Arc; + +use base_db::{salsa, SourceDatabase}; +use mbe::{ExpandResult, MacroRules}; +use parser::FragmentKind; +use syntax::{algo::diff, AstNode, GreenNode, Parse, SyntaxKind::*, SyntaxNode}; + +use crate::{ + ast_id_map::AstIdMap, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallLoc, EagerMacroId, + HirFileId, HirFileIdRepr, LazyMacroId, MacroCallId, MacroCallLoc, MacroDefId, MacroDefKind, + MacroFile, ProcMacroExpander, +}; + +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum TokenExpander { + MacroRules(mbe::MacroRules), + Builtin(BuiltinFnLikeExpander), + BuiltinDerive(BuiltinDeriveExpander), + ProcMacro(ProcMacroExpander), +} + +impl TokenExpander { + pub fn expand( + &self, + db: &dyn AstDatabase, + id: LazyMacroId, + tt: &tt::Subtree, + ) -> mbe::ExpandResult { + match self { + TokenExpander::MacroRules(it) => it.expand(tt), + // FIXME switch these to ExpandResult as well + TokenExpander::Builtin(it) => it.expand(db, id, tt).into(), + TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(), + TokenExpander::ProcMacro(_) => { + // We store the result in salsa db to prevent non-determinisc behavior in + // some proc-macro implementation + // See #4315 for details + db.expand_proc_macro(id.into()).into() + } + } + } + + pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { + match self { + TokenExpander::MacroRules(it) => it.map_id_down(id), + TokenExpander::Builtin(..) => id, + TokenExpander::BuiltinDerive(..) => id, + TokenExpander::ProcMacro(..) => id, + } + } + + pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { + match self { + TokenExpander::MacroRules(it) => it.map_id_up(id), + TokenExpander::Builtin(..) => (id, mbe::Origin::Call), + TokenExpander::BuiltinDerive(..) => (id, mbe::Origin::Call), + TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), + } + } +} + +// FIXME: rename to ExpandDatabase +#[salsa::query_group(AstDatabaseStorage)] +pub trait AstDatabase: SourceDatabase { + fn ast_id_map(&self, file_id: HirFileId) -> Arc; + + #[salsa::transparent] + fn parse_or_expand(&self, file_id: HirFileId) -> Option; + + #[salsa::interned] + fn intern_macro(&self, macro_call: MacroCallLoc) -> LazyMacroId; + fn macro_arg_text(&self, id: MacroCallId) -> Option; + #[salsa::transparent] + fn macro_arg(&self, id: MacroCallId) -> Option>; + fn macro_def(&self, id: MacroDefId) -> Option>; + fn parse_macro(&self, macro_file: MacroFile) + -> Option<(Parse, Arc)>; + fn macro_expand(&self, macro_call: MacroCallId) -> (Option>, Option); + + #[salsa::interned] + fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; + + fn expand_proc_macro(&self, call: MacroCallId) -> Result; +} + +/// This expands the given macro call, but with different arguments. This is +/// used for completion, where we want to see what 'would happen' if we insert a +/// token. The `token_to_map` mapped down into the expansion, with the mapped +/// token returned. +pub fn expand_hypothetical( + db: &dyn AstDatabase, + actual_macro_call: MacroCallId, + hypothetical_args: &syntax::ast::TokenTree, + token_to_map: syntax::SyntaxToken, +) -> Option<(SyntaxNode, syntax::SyntaxToken)> { + let macro_file = MacroFile { macro_call_id: actual_macro_call }; + let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()).unwrap(); + let range = + token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; + let token_id = tmap_1.token_by_range(range)?; + let macro_def = expander(db, actual_macro_call)?; + let (node, tmap_2) = + parse_macro_with_arg(db, macro_file, Some(std::sync::Arc::new((tt, tmap_1))))?; + let token_id = macro_def.0.map_id_down(token_id); + let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; + let token = syntax::algo::find_covering_element(&node.syntax_node(), range).into_token()?; + Some((node.syntax_node(), token)) +} + +pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc { + let map = + db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it)); + Arc::new(map) +} + +pub(crate) fn macro_def( + db: &dyn AstDatabase, + id: MacroDefId, +) -> Option> { + match id.kind { + MacroDefKind::Declarative => { + let macro_call = id.ast_id?.to_node(db); + let arg = macro_call.token_tree()?; + let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| { + log::warn!("fail on macro_def to token tree: {:#?}", arg); + None + })?; + let rules = match MacroRules::parse(&tt) { + Ok(it) => it, + Err(err) => { + log::warn!("fail on macro_def parse: error: {:#?} {:#?}", err, tt); + return None; + } + }; + Some(Arc::new((TokenExpander::MacroRules(rules), tmap))) + } + MacroDefKind::BuiltIn(expander) => { + Some(Arc::new((TokenExpander::Builtin(expander), mbe::TokenMap::default()))) + } + MacroDefKind::BuiltInDerive(expander) => { + Some(Arc::new((TokenExpander::BuiltinDerive(expander), mbe::TokenMap::default()))) + } + MacroDefKind::BuiltInEager(_) => None, + MacroDefKind::CustomDerive(expander) => { + Some(Arc::new((TokenExpander::ProcMacro(expander), mbe::TokenMap::default()))) + } + } +} + +pub(crate) fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option { + let id = match id { + MacroCallId::LazyMacro(id) => id, + MacroCallId::EagerMacro(_id) => { + // FIXME: support macro_arg for eager macro + return None; + } + }; + let loc = db.lookup_intern_macro(id); + let arg = loc.kind.arg(db)?; + Some(arg.green().clone()) +} + +pub(crate) fn macro_arg( + db: &dyn AstDatabase, + id: MacroCallId, +) -> Option> { + let arg = db.macro_arg_text(id)?; + let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg))?; + Some(Arc::new((tt, tmap))) +} + +pub(crate) fn macro_expand( + db: &dyn AstDatabase, + id: MacroCallId, +) -> (Option>, Option) { + macro_expand_with_arg(db, id, None) +} + +fn expander(db: &dyn AstDatabase, id: MacroCallId) -> Option> { + let lazy_id = match id { + MacroCallId::LazyMacro(id) => id, + MacroCallId::EagerMacro(_id) => { + return None; + } + }; + + let loc = db.lookup_intern_macro(lazy_id); + let macro_rules = db.macro_def(loc.def)?; + Some(macro_rules) +} + +fn macro_expand_with_arg( + db: &dyn AstDatabase, + id: MacroCallId, + arg: Option>, +) -> (Option>, Option) { + let lazy_id = match id { + MacroCallId::LazyMacro(id) => id, + MacroCallId::EagerMacro(id) => { + if arg.is_some() { + return ( + None, + Some("hypothetical macro expansion not implemented for eager macro".to_owned()), + ); + } else { + return (Some(db.lookup_intern_eager_expansion(id).subtree), None); + } + } + }; + + let loc = db.lookup_intern_macro(lazy_id); + let macro_arg = match arg.or_else(|| db.macro_arg(id)) { + Some(it) => it, + None => return (None, Some("Fail to args in to tt::TokenTree".into())), + }; + + let macro_rules = match db.macro_def(loc.def) { + Some(it) => it, + None => return (None, Some("Fail to find macro definition".into())), + }; + let ExpandResult(tt, err) = macro_rules.0.expand(db, lazy_id, ¯o_arg.0); + // Set a hard limit for the expanded tt + let count = tt.count(); + if count > 65536 { + return (None, Some(format!("Total tokens count exceed limit : count = {}", count))); + } + (Some(Arc::new(tt)), err.map(|e| format!("{:?}", e))) +} + +pub(crate) fn expand_proc_macro( + db: &dyn AstDatabase, + id: MacroCallId, +) -> Result { + let lazy_id = match id { + MacroCallId::LazyMacro(id) => id, + MacroCallId::EagerMacro(_) => unreachable!(), + }; + + let loc = db.lookup_intern_macro(lazy_id); + let macro_arg = match db.macro_arg(id) { + Some(it) => it, + None => { + return Err( + tt::ExpansionError::Unknown("No arguments for proc-macro".to_string()).into() + ) + } + }; + + let expander = match loc.def.kind { + MacroDefKind::CustomDerive(expander) => expander, + _ => unreachable!(), + }; + + expander.expand(db, lazy_id, ¯o_arg.0) +} + +pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option { + match file_id.0 { + HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), + HirFileIdRepr::MacroFile(macro_file) => { + db.parse_macro(macro_file).map(|(it, _)| it.syntax_node()) + } + } +} + +pub(crate) fn parse_macro( + db: &dyn AstDatabase, + macro_file: MacroFile, +) -> Option<(Parse, Arc)> { + parse_macro_with_arg(db, macro_file, None) +} + +pub fn parse_macro_with_arg( + db: &dyn AstDatabase, + macro_file: MacroFile, + arg: Option>, +) -> Option<(Parse, Arc)> { + let _p = profile::span("parse_macro_query"); + + let macro_call_id = macro_file.macro_call_id; + let (tt, err) = if let Some(arg) = arg { + macro_expand_with_arg(db, macro_call_id, Some(arg)) + } else { + db.macro_expand(macro_call_id) + }; + if let Some(err) = &err { + // Note: + // The final goal we would like to make all parse_macro success, + // such that the following log will not call anyway. + match macro_call_id { + MacroCallId::LazyMacro(id) => { + let loc: MacroCallLoc = db.lookup_intern_macro(id); + let node = loc.kind.node(db); + + // collect parent information for warning log + let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| { + it.file_id.call_node(db) + }) + .map(|n| format!("{:#}", n.value)) + .collect::>() + .join("\n"); + + log::warn!( + "fail on macro_parse: (reason: {} macro_call: {:#}) parents: {}", + err, + node.value, + parents + ); + } + _ => { + log::warn!("fail on macro_parse: (reason: {})", err); + } + } + }; + let tt = tt?; + + let fragment_kind = to_fragment_kind(db, macro_call_id); + + let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?; + + if err.is_none() { + Some((parse, Arc::new(rev_token_map))) + } else { + // FIXME: + // In future, we should propagate the actual error with recovery information + // instead of ignore the error here. + + // Safe check for recurisve identity macro + let node = parse.syntax_node(); + let file: HirFileId = macro_file.into(); + let call_node = file.call_node(db)?; + + if !diff(&node, &call_node.value).is_empty() { + Some((parse, Arc::new(rev_token_map))) + } else { + None + } + } +} + +/// Given a `MacroCallId`, return what `FragmentKind` it belongs to. +/// FIXME: Not completed +fn to_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind { + let lazy_id = match id { + MacroCallId::LazyMacro(id) => id, + MacroCallId::EagerMacro(id) => { + return db.lookup_intern_eager_expansion(id).fragment; + } + }; + let syn = db.lookup_intern_macro(lazy_id).kind.node(db).value; + + let parent = match syn.parent() { + Some(it) => it, + None => { + // FIXME: + // If it is root, which means the parent HirFile + // MacroKindFile must be non-items + // return expr now. + return FragmentKind::Expr; + } + }; + + match parent.kind() { + MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items, + ITEM_LIST => FragmentKind::Items, + LET_STMT => { + // FIXME: Handle Pattern + FragmentKind::Expr + } + // FIXME: Expand to statements in appropriate positions; HIR lowering needs to handle that + EXPR_STMT | BLOCK_EXPR => FragmentKind::Expr, + ARG_LIST => FragmentKind::Expr, + TRY_EXPR => FragmentKind::Expr, + TUPLE_EXPR => FragmentKind::Expr, + PAREN_EXPR => FragmentKind::Expr, + + FOR_EXPR => FragmentKind::Expr, + PATH_EXPR => FragmentKind::Expr, + CLOSURE_EXPR => FragmentKind::Expr, + CONDITION => FragmentKind::Expr, + BREAK_EXPR => FragmentKind::Expr, + RETURN_EXPR => FragmentKind::Expr, + MATCH_EXPR => FragmentKind::Expr, + MATCH_ARM => FragmentKind::Expr, + MATCH_GUARD => FragmentKind::Expr, + RECORD_EXPR_FIELD => FragmentKind::Expr, + CALL_EXPR => FragmentKind::Expr, + INDEX_EXPR => FragmentKind::Expr, + METHOD_CALL_EXPR => FragmentKind::Expr, + AWAIT_EXPR => FragmentKind::Expr, + CAST_EXPR => FragmentKind::Expr, + REF_EXPR => FragmentKind::Expr, + PREFIX_EXPR => FragmentKind::Expr, + RANGE_EXPR => FragmentKind::Expr, + BIN_EXPR => FragmentKind::Expr, + _ => { + // Unknown , Just guess it is `Items` + FragmentKind::Items + } + } +} diff --git a/crates/hir_expand/src/diagnostics.rs b/crates/hir_expand/src/diagnostics.rs new file mode 100644 index 0000000000..59d35debe3 --- /dev/null +++ b/crates/hir_expand/src/diagnostics.rs @@ -0,0 +1,95 @@ +//! Semantic errors and warnings. +//! +//! The `Diagnostic` trait defines a trait object which can represent any +//! diagnostic. +//! +//! `DiagnosticSink` struct is used as an emitter for diagnostic. When creating +//! a `DiagnosticSink`, you supply a callback which can react to a `dyn +//! Diagnostic` or to any concrete diagnostic (downcasting is sued internally). +//! +//! Because diagnostics store file offsets, it's a bad idea to store them +//! directly in salsa. For this reason, every hir subsytem defines it's own +//! strongly-typed closed set of diagnostics which use hir ids internally, are +//! stored in salsa and do *not* implement the `Diagnostic` trait. Instead, a +//! subsystem provides a separate, non-query-based API which can walk all stored +//! values and transform them into instances of `Diagnostic`. + +use std::{any::Any, fmt}; + +use syntax::SyntaxNodePtr; + +use crate::InFile; + +pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static { + fn message(&self) -> String; + /// Used in highlighting and related purposes + fn display_source(&self) -> InFile; + fn as_any(&self) -> &(dyn Any + Send + 'static); + fn is_experimental(&self) -> bool { + false + } +} + +pub struct DiagnosticSink<'a> { + callbacks: Vec Result<(), ()> + 'a>>, + filters: Vec bool + 'a>>, + default_callback: Box, +} + +impl<'a> DiagnosticSink<'a> { + pub fn push(&mut self, d: impl Diagnostic) { + let d: &dyn Diagnostic = &d; + self._push(d); + } + + fn _push(&mut self, d: &dyn Diagnostic) { + for filter in &mut self.filters { + if !filter(d) { + return; + } + } + for cb in &mut self.callbacks { + match cb(d) { + Ok(()) => return, + Err(()) => (), + } + } + (self.default_callback)(d) + } +} + +pub struct DiagnosticSinkBuilder<'a> { + callbacks: Vec Result<(), ()> + 'a>>, + filters: Vec bool + 'a>>, +} + +impl<'a> DiagnosticSinkBuilder<'a> { + pub fn new() -> Self { + Self { callbacks: Vec::new(), filters: Vec::new() } + } + + pub fn filter bool + 'a>(mut self, cb: F) -> Self { + self.filters.push(Box::new(cb)); + self + } + + pub fn on(mut self, mut cb: F) -> Self { + let cb = move |diag: &dyn Diagnostic| match diag.as_any().downcast_ref::() { + Some(d) => { + cb(d); + Ok(()) + } + None => Err(()), + }; + self.callbacks.push(Box::new(cb)); + self + } + + pub fn build(self, default_callback: F) -> DiagnosticSink<'a> { + DiagnosticSink { + callbacks: self.callbacks, + filters: self.filters, + default_callback: Box::new(default_callback), + } + } +} diff --git a/crates/hir_expand/src/eager.rs b/crates/hir_expand/src/eager.rs new file mode 100644 index 0000000000..10c45646f0 --- /dev/null +++ b/crates/hir_expand/src/eager.rs @@ -0,0 +1,144 @@ +//! Eager expansion related utils +//! +//! Here is a dump of a discussion from Vadim Petrochenkov about Eager Expansion and +//! Its name resolution : +//! +//! > Eagerly expanded macros (and also macros eagerly expanded by eagerly expanded macros, +//! > which actually happens in practice too!) are resolved at the location of the "root" macro +//! > that performs the eager expansion on its arguments. +//! > If some name cannot be resolved at the eager expansion time it's considered unresolved, +//! > even if becomes available later (e.g. from a glob import or other macro). +//! +//! > Eagerly expanded macros don't add anything to the module structure of the crate and +//! > don't build any speculative module structures, i.e. they are expanded in a "flat" +//! > way even if tokens in them look like modules. +//! +//! > In other words, it kinda works for simple cases for which it was originally intended, +//! > and we need to live with it because it's available on stable and widely relied upon. +//! +//! +//! See the full discussion : https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros + +use crate::{ + ast::{self, AstNode}, + db::AstDatabase, + EagerCallLoc, EagerMacroId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, +}; + +use base_db::CrateId; +use parser::FragmentKind; +use std::sync::Arc; +use syntax::{algo::SyntaxRewriter, SyntaxNode}; + +pub fn expand_eager_macro( + db: &dyn AstDatabase, + krate: CrateId, + macro_call: InFile, + def: MacroDefId, + resolver: &dyn Fn(ast::Path) -> Option, +) -> Option { + let args = macro_call.value.token_tree()?; + let parsed_args = mbe::ast_to_token_tree(&args)?.0; + + // Note: + // When `lazy_expand` is called, its *parent* file must be already exists. + // Here we store an eager macro id for the argument expanded subtree here + // for that purpose. + let arg_id = db.intern_eager_expansion({ + EagerCallLoc { + def, + fragment: FragmentKind::Expr, + subtree: Arc::new(parsed_args.clone()), + krate, + file_id: macro_call.file_id, + } + }); + let arg_file_id: MacroCallId = arg_id.into(); + + let parsed_args = mbe::token_tree_to_syntax_node(&parsed_args, FragmentKind::Expr).ok()?.0; + let result = eager_macro_recur( + db, + InFile::new(arg_file_id.as_file(), parsed_args.syntax_node()), + krate, + resolver, + )?; + let subtree = to_subtree(&result)?; + + if let MacroDefKind::BuiltInEager(eager) = def.kind { + let (subtree, fragment) = eager.expand(db, arg_id, &subtree).ok()?; + let eager = EagerCallLoc { + def, + fragment, + subtree: Arc::new(subtree), + krate, + file_id: macro_call.file_id, + }; + + Some(db.intern_eager_expansion(eager)) + } else { + None + } +} + +fn to_subtree(node: &SyntaxNode) -> Option { + let mut subtree = mbe::syntax_node_to_token_tree(node)?.0; + subtree.delimiter = None; + Some(subtree) +} + +fn lazy_expand( + db: &dyn AstDatabase, + def: &MacroDefId, + macro_call: InFile, + krate: CrateId, +) -> Option> { + let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); + + let id: MacroCallId = + def.as_lazy_macro(db, krate, MacroCallKind::FnLike(macro_call.with_value(ast_id))).into(); + + db.parse_or_expand(id.as_file()).map(|node| InFile::new(id.as_file(), node)) +} + +fn eager_macro_recur( + db: &dyn AstDatabase, + curr: InFile, + krate: CrateId, + macro_resolver: &dyn Fn(ast::Path) -> Option, +) -> Option { + let original = curr.value.clone(); + + let children = curr.value.descendants().filter_map(ast::MacroCall::cast); + let mut rewriter = SyntaxRewriter::default(); + + // Collect replacement + for child in children { + let def: MacroDefId = macro_resolver(child.path()?)?; + let insert = match def.kind { + MacroDefKind::BuiltInEager(_) => { + let id: MacroCallId = expand_eager_macro( + db, + krate, + curr.with_value(child.clone()), + def, + macro_resolver, + )? + .into(); + db.parse_or_expand(id.as_file())? + } + MacroDefKind::Declarative + | MacroDefKind::BuiltIn(_) + | MacroDefKind::BuiltInDerive(_) + | MacroDefKind::CustomDerive(_) => { + let expanded = lazy_expand(db, &def, curr.with_value(child.clone()), krate)?; + // replace macro inside + eager_macro_recur(db, expanded, krate, macro_resolver)? + } + }; + + rewriter.replace(child.syntax(), &insert); + } + + let res = rewriter.rewrite(&original); + Some(res) +} diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs new file mode 100644 index 0000000000..845e9cbc19 --- /dev/null +++ b/crates/hir_expand/src/hygiene.rs @@ -0,0 +1,66 @@ +//! This modules handles hygiene information. +//! +//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at +//! this moment, this is horribly incomplete and handles only `$crate`. +use base_db::CrateId; +use either::Either; +use syntax::ast; + +use crate::{ + db::AstDatabase, + name::{AsName, Name}, + HirFileId, HirFileIdRepr, MacroCallId, MacroDefKind, +}; + +#[derive(Clone, Debug)] +pub struct Hygiene { + // This is what `$crate` expands to + def_crate: Option, + + // Indicate this is a local inner macro + local_inner: bool, +} + +impl Hygiene { + pub fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Hygiene { + let (def_crate, local_inner) = match file_id.0 { + HirFileIdRepr::FileId(_) => (None, false), + HirFileIdRepr::MacroFile(macro_file) => match macro_file.macro_call_id { + MacroCallId::LazyMacro(id) => { + let loc = db.lookup_intern_macro(id); + match loc.def.kind { + MacroDefKind::Declarative => (loc.def.krate, loc.def.local_inner), + MacroDefKind::BuiltIn(_) => (None, false), + MacroDefKind::BuiltInDerive(_) => (None, false), + MacroDefKind::BuiltInEager(_) => (None, false), + MacroDefKind::CustomDerive(_) => (None, false), + } + } + MacroCallId::EagerMacro(_id) => (None, false), + }, + }; + Hygiene { def_crate, local_inner } + } + + pub fn new_unhygienic() -> Hygiene { + Hygiene { def_crate: None, local_inner: false } + } + + // FIXME: this should just return name + pub fn name_ref_to_name(&self, name_ref: ast::NameRef) -> Either { + if let Some(def_crate) = self.def_crate { + if name_ref.text() == "$crate" { + return Either::Right(def_crate); + } + } + Either::Left(name_ref.as_name()) + } + + pub fn local_inner_macros(&self) -> Option { + if self.local_inner { + self.def_crate + } else { + None + } + } +} diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs new file mode 100644 index 0000000000..2be15e8413 --- /dev/null +++ b/crates/hir_expand/src/lib.rs @@ -0,0 +1,453 @@ +//! `hir_expand` deals with macro expansion. +//! +//! Specifically, it implements a concept of `MacroFile` -- a file whose syntax +//! tree originates not from the text of some `FileId`, but from some macro +//! expansion. + +pub mod db; +pub mod ast_id_map; +pub mod name; +pub mod hygiene; +pub mod diagnostics; +pub mod builtin_derive; +pub mod builtin_macro; +pub mod proc_macro; +pub mod quote; +pub mod eager; + +use std::hash::Hash; +use std::sync::Arc; + +use base_db::{impl_intern_key, salsa, CrateId, FileId}; +use syntax::{ + algo, + ast::{self, AstNode}, + SyntaxNode, SyntaxToken, TextSize, +}; + +use crate::ast_id_map::FileAstId; +use crate::builtin_derive::BuiltinDeriveExpander; +use crate::builtin_macro::{BuiltinFnLikeExpander, EagerExpander}; +use crate::proc_macro::ProcMacroExpander; + +#[cfg(test)] +mod test_db; + +/// Input to the analyzer is a set of files, where each file is identified by +/// `FileId` and contains source code. However, another source of source code in +/// Rust are macros: each macro can be thought of as producing a "temporary +/// file". To assign an id to such a file, we use the id of the macro call that +/// produced the file. So, a `HirFileId` is either a `FileId` (source code +/// written by user), or a `MacroCallId` (source code produced by macro). +/// +/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file +/// containing the call plus the offset of the macro call in the file. Note that +/// this is a recursive definition! However, the size_of of `HirFileId` is +/// finite (because everything bottoms out at the real `FileId`) and small +/// (`MacroCallId` uses the location interning. You can check details here: +/// https://en.wikipedia.org/wiki/String_interning). +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct HirFileId(HirFileIdRepr); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +enum HirFileIdRepr { + FileId(FileId), + MacroFile(MacroFile), +} + +impl From for HirFileId { + fn from(id: FileId) -> Self { + HirFileId(HirFileIdRepr::FileId(id)) + } +} + +impl From for HirFileId { + fn from(id: MacroFile) -> Self { + HirFileId(HirFileIdRepr::MacroFile(id)) + } +} + +impl HirFileId { + /// For macro-expansion files, returns the file original source file the + /// expansion originated from. + pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId { + match self.0 { + HirFileIdRepr::FileId(file_id) => file_id, + HirFileIdRepr::MacroFile(macro_file) => { + let file_id = match macro_file.macro_call_id { + MacroCallId::LazyMacro(id) => { + let loc = db.lookup_intern_macro(id); + loc.kind.file_id() + } + MacroCallId::EagerMacro(id) => { + let loc = db.lookup_intern_eager_expansion(id); + loc.file_id + } + }; + file_id.original_file(db) + } + } + } + + pub fn expansion_level(self, db: &dyn db::AstDatabase) -> u32 { + let mut level = 0; + let mut curr = self; + while let HirFileIdRepr::MacroFile(macro_file) = curr.0 { + level += 1; + curr = match macro_file.macro_call_id { + MacroCallId::LazyMacro(id) => { + let loc = db.lookup_intern_macro(id); + loc.kind.file_id() + } + MacroCallId::EagerMacro(id) => { + let loc = db.lookup_intern_eager_expansion(id); + loc.file_id + } + }; + } + level + } + + /// If this is a macro call, returns the syntax node of the call. + pub fn call_node(self, db: &dyn db::AstDatabase) -> Option> { + match self.0 { + HirFileIdRepr::FileId(_) => None, + HirFileIdRepr::MacroFile(macro_file) => { + let lazy_id = match macro_file.macro_call_id { + MacroCallId::LazyMacro(id) => id, + MacroCallId::EagerMacro(_id) => { + // FIXME: handle call node for eager macro + return None; + } + }; + let loc = db.lookup_intern_macro(lazy_id); + Some(loc.kind.node(db)) + } + } + } + + /// Return expansion information if it is a macro-expansion file + pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option { + match self.0 { + HirFileIdRepr::FileId(_) => None, + HirFileIdRepr::MacroFile(macro_file) => { + let lazy_id = match macro_file.macro_call_id { + MacroCallId::LazyMacro(id) => id, + MacroCallId::EagerMacro(_id) => { + // FIXME: handle expansion_info for eager macro + return None; + } + }; + let loc: MacroCallLoc = db.lookup_intern_macro(lazy_id); + + let arg_tt = loc.kind.arg(db)?; + let def_tt = loc.def.ast_id?.to_node(db).token_tree()?; + + let macro_def = db.macro_def(loc.def)?; + let (parse, exp_map) = db.parse_macro(macro_file)?; + let macro_arg = db.macro_arg(macro_file.macro_call_id)?; + + Some(ExpansionInfo { + expanded: InFile::new(self, parse.syntax_node()), + arg: InFile::new(loc.kind.file_id(), arg_tt), + def: InFile::new(loc.def.ast_id?.file_id, def_tt), + macro_arg, + macro_def, + exp_map, + }) + } + } + } + + /// Indicate it is macro file generated for builtin derive + pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option> { + match self.0 { + HirFileIdRepr::FileId(_) => None, + HirFileIdRepr::MacroFile(macro_file) => { + let lazy_id = match macro_file.macro_call_id { + MacroCallId::LazyMacro(id) => id, + MacroCallId::EagerMacro(_id) => { + return None; + } + }; + let loc: MacroCallLoc = db.lookup_intern_macro(lazy_id); + let item = match loc.def.kind { + MacroDefKind::BuiltInDerive(_) => loc.kind.node(db), + _ => return None, + }; + Some(item.with_value(ast::Item::cast(item.value.clone())?)) + } + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct MacroFile { + macro_call_id: MacroCallId, +} + +/// `MacroCallId` identifies a particular macro invocation, like +/// `println!("Hello, {}", world)`. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum MacroCallId { + LazyMacro(LazyMacroId), + EagerMacro(EagerMacroId), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct LazyMacroId(salsa::InternId); +impl_intern_key!(LazyMacroId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct EagerMacroId(salsa::InternId); +impl_intern_key!(EagerMacroId); + +impl From for MacroCallId { + fn from(it: LazyMacroId) -> Self { + MacroCallId::LazyMacro(it) + } +} +impl From for MacroCallId { + fn from(it: EagerMacroId) -> Self { + MacroCallId::EagerMacro(it) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct MacroDefId { + // FIXME: krate and ast_id are currently optional because we don't have a + // definition location for built-in derives. There is one, though: the + // standard library defines them. The problem is that it uses the new + // `macro` syntax for this, which we don't support yet. As soon as we do + // (which will probably require touching this code), we can instead use + // that (and also remove the hacks for resolving built-in derives). + pub krate: Option, + pub ast_id: Option>, + pub kind: MacroDefKind, + + pub local_inner: bool, +} + +impl MacroDefId { + pub fn as_lazy_macro( + self, + db: &dyn db::AstDatabase, + krate: CrateId, + kind: MacroCallKind, + ) -> LazyMacroId { + db.intern_macro(MacroCallLoc { def: self, krate, kind }) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum MacroDefKind { + Declarative, + BuiltIn(BuiltinFnLikeExpander), + // FIXME: maybe just Builtin and rename BuiltinFnLikeExpander to BuiltinExpander + BuiltInDerive(BuiltinDeriveExpander), + BuiltInEager(EagerExpander), + CustomDerive(ProcMacroExpander), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct MacroCallLoc { + pub(crate) def: MacroDefId, + pub(crate) krate: CrateId, + pub(crate) kind: MacroCallKind, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum MacroCallKind { + FnLike(AstId), + Attr(AstId, String), +} + +impl MacroCallKind { + fn file_id(&self) -> HirFileId { + match self { + MacroCallKind::FnLike(ast_id) => ast_id.file_id, + MacroCallKind::Attr(ast_id, _) => ast_id.file_id, + } + } + + fn node(&self, db: &dyn db::AstDatabase) -> InFile { + match self { + MacroCallKind::FnLike(ast_id) => ast_id.with_value(ast_id.to_node(db).syntax().clone()), + MacroCallKind::Attr(ast_id, _) => { + ast_id.with_value(ast_id.to_node(db).syntax().clone()) + } + } + } + + fn arg(&self, db: &dyn db::AstDatabase) -> Option { + match self { + MacroCallKind::FnLike(ast_id) => { + Some(ast_id.to_node(db).token_tree()?.syntax().clone()) + } + MacroCallKind::Attr(ast_id, _) => Some(ast_id.to_node(db).syntax().clone()), + } + } +} + +impl MacroCallId { + pub fn as_file(self) -> HirFileId { + MacroFile { macro_call_id: self }.into() + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct EagerCallLoc { + pub(crate) def: MacroDefId, + pub(crate) fragment: FragmentKind, + pub(crate) subtree: Arc, + pub(crate) krate: CrateId, + pub(crate) file_id: HirFileId, +} + +/// ExpansionInfo mainly describes how to map text range between src and expanded macro +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpansionInfo { + expanded: InFile, + arg: InFile, + def: InFile, + + macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, + macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, + exp_map: Arc, +} + +pub use mbe::Origin; +use parser::FragmentKind; + +impl ExpansionInfo { + pub fn call_node(&self) -> Option> { + Some(self.arg.with_value(self.arg.value.parent()?)) + } + + pub fn map_token_down(&self, token: InFile<&SyntaxToken>) -> Option> { + assert_eq!(token.file_id, self.arg.file_id); + let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?; + let token_id = self.macro_arg.1.token_by_range(range)?; + let token_id = self.macro_def.0.map_id_down(token_id); + + let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?; + + let token = algo::find_covering_element(&self.expanded.value, range).into_token()?; + + Some(self.expanded.with_value(token)) + } + + pub fn map_token_up( + &self, + token: InFile<&SyntaxToken>, + ) -> Option<(InFile, Origin)> { + let token_id = self.exp_map.token_by_range(token.value.text_range())?; + + let (token_id, origin) = self.macro_def.0.map_id_up(token_id); + let (token_map, tt) = match origin { + mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), + mbe::Origin::Def => { + (&self.macro_def.1, self.def.as_ref().map(|tt| tt.syntax().clone())) + } + }; + + let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?; + let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start()) + .into_token()?; + Some((tt.with_value(token), origin)) + } +} + +/// `AstId` points to an AST node in any file. +/// +/// It is stable across reparses, and can be used as salsa key/value. +// FIXME: isn't this just a `Source>` ? +pub type AstId = InFile>; + +impl AstId { + pub fn to_node(&self, db: &dyn db::AstDatabase) -> N { + let root = db.parse_or_expand(self.file_id).unwrap(); + db.ast_id_map(self.file_id).get(self.value).to_node(&root) + } +} + +/// `InFile` stores a value of `T` inside a particular file/syntax tree. +/// +/// Typical usages are: +/// +/// * `InFile` -- syntax node in a file +/// * `InFile` -- ast node in a file +/// * `InFile` -- offset in a file +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] +pub struct InFile { + pub file_id: HirFileId, + pub value: T, +} + +impl InFile { + pub fn new(file_id: HirFileId, value: T) -> InFile { + InFile { file_id, value } + } + + // Similarly, naming here is stupid... + pub fn with_value(&self, value: U) -> InFile { + InFile::new(self.file_id, value) + } + + pub fn map U, U>(self, f: F) -> InFile { + InFile::new(self.file_id, f(self.value)) + } + pub fn as_ref(&self) -> InFile<&T> { + self.with_value(&self.value) + } + pub fn file_syntax(&self, db: &dyn db::AstDatabase) -> SyntaxNode { + db.parse_or_expand(self.file_id).expect("source created from invalid file") + } +} + +impl InFile<&T> { + pub fn cloned(&self) -> InFile { + self.with_value(self.value.clone()) + } +} + +impl InFile> { + pub fn transpose(self) -> Option> { + let value = self.value?; + Some(InFile::new(self.file_id, value)) + } +} + +impl InFile { + pub fn ancestors_with_macros( + self, + db: &dyn db::AstDatabase, + ) -> impl Iterator> + '_ { + std::iter::successors(Some(self), move |node| match node.value.parent() { + Some(parent) => Some(node.with_value(parent)), + None => { + let parent_node = node.file_id.call_node(db)?; + Some(parent_node) + } + }) + } +} + +impl InFile { + pub fn ancestors_with_macros( + self, + db: &dyn db::AstDatabase, + ) -> impl Iterator> + '_ { + self.map(|it| it.parent()).ancestors_with_macros(db) + } +} + +impl InFile { + pub fn descendants(self) -> impl Iterator> { + self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n)) + } + + pub fn syntax(&self) -> InFile<&SyntaxNode> { + self.with_value(self.value.syntax()) + } +} diff --git a/crates/hir_expand/src/name.rs b/crates/hir_expand/src/name.rs new file mode 100644 index 0000000000..49841c7a12 --- /dev/null +++ b/crates/hir_expand/src/name.rs @@ -0,0 +1,230 @@ +//! FIXME: write short doc here + +use std::fmt; + +use syntax::{ast, SmolStr}; + +/// `Name` is a wrapper around string, which is used in hir for both references +/// and declarations. In theory, names should also carry hygiene info, but we are +/// not there yet! +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct Name(Repr); + +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +enum Repr { + Text(SmolStr), + TupleField(usize), +} + +impl fmt::Display for Name { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match &self.0 { + Repr::Text(text) => fmt::Display::fmt(&text, f), + Repr::TupleField(idx) => fmt::Display::fmt(&idx, f), + } + } +} + +impl Name { + /// Note: this is private to make creating name from random string hard. + /// Hopefully, this should allow us to integrate hygiene cleaner in the + /// future, and to switch to interned representation of names. + const fn new_text(text: SmolStr) -> Name { + Name(Repr::Text(text)) + } + + pub fn new_tuple_field(idx: usize) -> Name { + Name(Repr::TupleField(idx)) + } + + pub fn new_lifetime(lt: &syntax::SyntaxToken) -> Name { + assert!(lt.kind() == syntax::SyntaxKind::LIFETIME); + Name(Repr::Text(lt.text().clone())) + } + + /// Shortcut to create inline plain text name + const fn new_inline_ascii(text: &[u8]) -> Name { + Name::new_text(SmolStr::new_inline_from_ascii(text.len(), text)) + } + + /// Resolve a name from the text of token. + fn resolve(raw_text: &SmolStr) -> Name { + let raw_start = "r#"; + if raw_text.as_str().starts_with(raw_start) { + Name::new_text(SmolStr::new(&raw_text[raw_start.len()..])) + } else { + Name::new_text(raw_text.clone()) + } + } + + pub fn missing() -> Name { + Name::new_text("[missing name]".into()) + } + + pub fn as_tuple_index(&self) -> Option { + match self.0 { + Repr::TupleField(idx) => Some(idx), + _ => None, + } + } +} + +pub trait AsName { + fn as_name(&self) -> Name; +} + +impl AsName for ast::NameRef { + fn as_name(&self) -> Name { + match self.as_tuple_field() { + Some(idx) => Name::new_tuple_field(idx), + None => Name::resolve(self.text()), + } + } +} + +impl AsName for ast::Name { + fn as_name(&self) -> Name { + Name::resolve(self.text()) + } +} + +impl AsName for ast::NameOrNameRef { + fn as_name(&self) -> Name { + match self { + ast::NameOrNameRef::Name(it) => it.as_name(), + ast::NameOrNameRef::NameRef(it) => it.as_name(), + } + } +} + +impl AsName for tt::Ident { + fn as_name(&self) -> Name { + Name::resolve(&self.text) + } +} + +impl AsName for ast::FieldKind { + fn as_name(&self) -> Name { + match self { + ast::FieldKind::Name(nr) => nr.as_name(), + ast::FieldKind::Index(idx) => { + let idx = idx.text().parse::().unwrap_or(0); + Name::new_tuple_field(idx) + } + } + } +} + +impl AsName for base_db::Dependency { + fn as_name(&self) -> Name { + Name::new_text(SmolStr::new(&*self.name)) + } +} + +pub mod known { + macro_rules! known_names { + ($($ident:ident),* $(,)?) => { + $( + #[allow(bad_style)] + pub const $ident: super::Name = + super::Name::new_inline_ascii(stringify!($ident).as_bytes()); + )* + }; + } + + known_names!( + // Primitives + isize, + i8, + i16, + i32, + i64, + i128, + usize, + u8, + u16, + u32, + u64, + u128, + f32, + f64, + bool, + char, + str, + // Special names + macro_rules, + doc, + // Components of known path (value or mod name) + std, + core, + alloc, + iter, + ops, + future, + result, + boxed, + // Components of known path (type name) + IntoIterator, + Item, + Try, + Ok, + Future, + Result, + Output, + Target, + Box, + RangeFrom, + RangeFull, + RangeInclusive, + RangeToInclusive, + RangeTo, + Range, + Neg, + Not, + Index, + // Builtin macros + file, + column, + compile_error, + line, + assert, + stringify, + concat, + include, + include_bytes, + include_str, + format_args, + format_args_nl, + env, + option_env, + // Builtin derives + Copy, + Clone, + Default, + Debug, + Hash, + Ord, + PartialOrd, + Eq, + PartialEq, + ); + + // self/Self cannot be used as an identifier + pub const SELF_PARAM: super::Name = super::Name::new_inline_ascii(b"self"); + pub const SELF_TYPE: super::Name = super::Name::new_inline_ascii(b"Self"); + + #[macro_export] + macro_rules! name { + (self) => { + $crate::name::known::SELF_PARAM + }; + (Self) => { + $crate::name::known::SELF_TYPE + }; + ($ident:ident) => { + $crate::name::known::$ident + }; + } +} + +pub use crate::name; diff --git a/crates/hir_expand/src/proc_macro.rs b/crates/hir_expand/src/proc_macro.rs new file mode 100644 index 0000000000..80255ea327 --- /dev/null +++ b/crates/hir_expand/src/proc_macro.rs @@ -0,0 +1,143 @@ +//! Proc Macro Expander stub + +use crate::{db::AstDatabase, LazyMacroId}; +use base_db::{CrateId, ProcMacroId}; +use tt::buffer::{Cursor, TokenBuffer}; + +#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] +pub struct ProcMacroExpander { + krate: CrateId, + proc_macro_id: ProcMacroId, +} + +macro_rules! err { + ($fmt:literal, $($tt:tt),*) => { + mbe::ExpandError::ProcMacroError(tt::ExpansionError::Unknown(format!($fmt, $($tt),*))) + }; + ($fmt:literal) => { + mbe::ExpandError::ProcMacroError(tt::ExpansionError::Unknown($fmt.to_string())) + } +} + +impl ProcMacroExpander { + pub fn new(krate: CrateId, proc_macro_id: ProcMacroId) -> ProcMacroExpander { + ProcMacroExpander { krate, proc_macro_id } + } + + pub fn expand( + self, + db: &dyn AstDatabase, + _id: LazyMacroId, + tt: &tt::Subtree, + ) -> Result { + let krate_graph = db.crate_graph(); + let proc_macro = krate_graph[self.krate] + .proc_macro + .get(self.proc_macro_id.0 as usize) + .clone() + .ok_or_else(|| err!("No derive macro found."))?; + + let tt = remove_derive_attrs(tt) + .ok_or_else(|| err!("Fail to remove derive for custom derive"))?; + + proc_macro.expander.expand(&tt, None).map_err(mbe::ExpandError::from) + } +} + +fn eat_punct(cursor: &mut Cursor, c: char) -> bool { + if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = cursor.token_tree() { + if punct.char == c { + *cursor = cursor.bump(); + return true; + } + } + false +} + +fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool { + if let Some(tt::TokenTree::Subtree(subtree)) = cursor.token_tree() { + if Some(kind) == subtree.delimiter_kind() { + *cursor = cursor.bump_subtree(); + return true; + } + } + false +} + +fn eat_ident(cursor: &mut Cursor, t: &str) -> bool { + if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = cursor.token_tree() { + if t == ident.text.as_str() { + *cursor = cursor.bump(); + return true; + } + } + false +} + +fn remove_derive_attrs(tt: &tt::Subtree) -> Option { + let buffer = TokenBuffer::new(&tt.token_trees); + let mut p = buffer.begin(); + let mut result = tt::Subtree::default(); + + while !p.eof() { + let curr = p; + + if eat_punct(&mut p, '#') { + eat_punct(&mut p, '!'); + let parent = p; + if eat_subtree(&mut p, tt::DelimiterKind::Bracket) { + if eat_ident(&mut p, "derive") { + p = parent.bump(); + continue; + } + } + } + + result.token_trees.push(curr.token_tree()?.clone()); + p = curr.bump(); + } + + Some(result) +} + +#[cfg(test)] +mod tests { + use super::*; + use test_utils::assert_eq_text; + + #[test] + fn test_remove_derive_attrs() { + let tt = mbe::parse_to_token_tree( + r#" + #[allow(unused)] + #[derive(Copy)] + #[derive(Hello)] + struct A { + bar: u32 + } +"#, + ) + .unwrap() + .0; + let result = format!("{:#?}", remove_derive_attrs(&tt).unwrap()); + + assert_eq_text!( + &result, + r#" +SUBTREE $ + PUNCH # [alone] 0 + SUBTREE [] 1 + IDENT allow 2 + SUBTREE () 3 + IDENT unused 4 + IDENT struct 15 + IDENT A 16 + SUBTREE {} 17 + IDENT bar 18 + PUNCH : [alone] 19 + IDENT u32 20 +"# + .trim() + ); + } +} diff --git a/crates/ra_hir_expand/src/quote.rs b/crates/hir_expand/src/quote.rs similarity index 100% rename from crates/ra_hir_expand/src/quote.rs rename to crates/hir_expand/src/quote.rs diff --git a/crates/hir_expand/src/test_db.rs b/crates/hir_expand/src/test_db.rs new file mode 100644 index 0000000000..86a5d867e6 --- /dev/null +++ b/crates/hir_expand/src/test_db.rs @@ -0,0 +1,49 @@ +//! Database used for testing `hir_expand`. + +use std::{ + fmt, panic, + sync::{Arc, Mutex}, +}; + +use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate}; +use rustc_hash::FxHashSet; + +#[salsa::database( + base_db::SourceDatabaseExtStorage, + base_db::SourceDatabaseStorage, + crate::db::AstDatabaseStorage +)] +#[derive(Default)] +pub struct TestDB { + storage: salsa::Storage, + events: Mutex>>, +} + +impl fmt::Debug for TestDB { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("TestDB").finish() + } +} + +impl salsa::Database for TestDB { + fn salsa_event(&self, event: salsa::Event) { + let mut events = self.events.lock().unwrap(); + if let Some(events) = &mut *events { + events.push(event); + } + } +} + +impl panic::RefUnwindSafe for TestDB {} + +impl FileLoader for TestDB { + fn file_text(&self, file_id: FileId) -> Arc { + FileLoaderDelegate(self).file_text(file_id) + } + fn resolve_path(&self, anchor: FileId, path: &str) -> Option { + FileLoaderDelegate(self).resolve_path(anchor, path) + } + fn relevant_crates(&self, file_id: FileId) -> Arc> { + FileLoaderDelegate(self).relevant_crates(file_id) + } +} diff --git a/crates/hir_ty/Cargo.toml b/crates/hir_ty/Cargo.toml new file mode 100644 index 0000000000..83b5013a90 --- /dev/null +++ b/crates/hir_ty/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "hir_ty" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +itertools = "0.9.0" +arrayvec = "0.5.1" +smallvec = "1.2.0" +ena = "0.14.0" +log = "0.4.8" +rustc-hash = "1.1.0" +scoped-tls = "1" +chalk-solve = { version = "0.21.0" } +chalk-ir = { version = "0.21.0" } +chalk-recursive = { version = "0.21.0" } + +stdx = { path = "../stdx" } +hir_def = { path = "../hir_def" } +hir_expand = { path = "../hir_expand" } +arena = { path = "../arena" } +base_db = { path = "../base_db" } +profile = { path = "../profile" } +syntax = { path = "../syntax" } +test_utils = { path = "../test_utils" } + +[dev-dependencies] +tracing = "0.1" +tracing-subscriber = { version = "0.2", default-features = false, features = ["env-filter", "registry"] } +tracing-tree = { version = "0.1.4" } + +expect = { path = "../expect" } diff --git a/crates/hir_ty/src/autoderef.rs b/crates/hir_ty/src/autoderef.rs new file mode 100644 index 0000000000..ece68183e7 --- /dev/null +++ b/crates/hir_ty/src/autoderef.rs @@ -0,0 +1,131 @@ +//! In certain situations, rust automatically inserts derefs as necessary: for +//! example, field accesses `foo.bar` still work when `foo` is actually a +//! reference to a type with the field `bar`. This is an approximation of the +//! logic in rustc (which lives in librustc_typeck/check/autoderef.rs). + +use std::iter::successors; + +use base_db::CrateId; +use hir_def::lang_item::LangItemTarget; +use hir_expand::name::name; +use log::{info, warn}; + +use crate::{ + db::HirDatabase, + traits::{InEnvironment, Solution}, + utils::generics, + BoundVar, Canonical, DebruijnIndex, Obligation, Substs, TraitRef, Ty, +}; + +const AUTODEREF_RECURSION_LIMIT: usize = 10; + +pub fn autoderef<'a>( + db: &'a dyn HirDatabase, + krate: Option, + ty: InEnvironment>, +) -> impl Iterator> + 'a { + let InEnvironment { value: ty, environment } = ty; + successors(Some(ty), move |ty| { + deref(db, krate?, InEnvironment { value: ty, environment: environment.clone() }) + }) + .take(AUTODEREF_RECURSION_LIMIT) +} + +pub(crate) fn deref( + db: &dyn HirDatabase, + krate: CrateId, + ty: InEnvironment<&Canonical>, +) -> Option> { + if let Some(derefed) = ty.value.value.builtin_deref() { + Some(Canonical { value: derefed, kinds: ty.value.kinds.clone() }) + } else { + deref_by_trait(db, krate, ty) + } +} + +fn deref_by_trait( + db: &dyn HirDatabase, + krate: CrateId, + ty: InEnvironment<&Canonical>, +) -> Option> { + let deref_trait = match db.lang_item(krate, "deref".into())? { + LangItemTarget::TraitId(it) => it, + _ => return None, + }; + let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?; + + let generic_params = generics(db.upcast(), target.into()); + if generic_params.len() != 1 { + // the Target type + Deref trait should only have one generic parameter, + // namely Deref's Self type + return None; + } + + // FIXME make the Canonical / bound var handling nicer + + let parameters = + Substs::build_for_generics(&generic_params).push(ty.value.value.clone()).build(); + + // Check that the type implements Deref at all + let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() }; + let implements_goal = Canonical { + kinds: ty.value.kinds.clone(), + value: InEnvironment { + value: Obligation::Trait(trait_ref), + environment: ty.environment.clone(), + }, + }; + if db.trait_solve(krate, implements_goal).is_none() { + return None; + } + + // Now do the assoc type projection + let projection = super::traits::ProjectionPredicate { + ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.kinds.len())), + projection_ty: super::ProjectionTy { associated_ty: target, parameters }, + }; + + let obligation = super::Obligation::Projection(projection); + + let in_env = InEnvironment { value: obligation, environment: ty.environment }; + + let canonical = + Canonical::new(in_env, ty.value.kinds.iter().copied().chain(Some(super::TyKind::General))); + + let solution = db.trait_solve(krate, canonical)?; + + match &solution { + Solution::Unique(vars) => { + // FIXME: vars may contain solutions for any inference variables + // that happened to be inside ty. To correctly handle these, we + // would have to pass the solution up to the inference context, but + // that requires a larger refactoring (especially if the deref + // happens during method resolution). So for the moment, we just + // check that we're not in the situation we're we would actually + // need to handle the values of the additional variables, i.e. + // they're just being 'passed through'. In the 'standard' case where + // we have `impl Deref for Foo { Target = T }`, that should be + // the case. + + // FIXME: if the trait solver decides to truncate the type, these + // assumptions will be broken. We would need to properly introduce + // new variables in that case + + for i in 1..vars.0.kinds.len() { + if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1)) + { + warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.value, solution); + return None; + } + } + Some(Canonical { + value: vars.0.value[vars.0.value.len() - 1].clone(), + kinds: vars.0.kinds.clone(), + }) + } + Solution::Ambig(_) => { + info!("Ambiguous solution for derefing {:?}: {:?}", ty.value, solution); + None + } + } +} diff --git a/crates/hir_ty/src/db.rs b/crates/hir_ty/src/db.rs new file mode 100644 index 0000000000..25cf9eb7f1 --- /dev/null +++ b/crates/hir_ty/src/db.rs @@ -0,0 +1,158 @@ +//! FIXME: write short doc here + +use std::sync::Arc; + +use arena::map::ArenaMap; +use base_db::{impl_intern_key, salsa, CrateId, Upcast}; +use hir_def::{ + db::DefDatabase, expr::ExprId, DefWithBodyId, FunctionId, GenericDefId, ImplId, LocalFieldId, + TypeParamId, VariantId, +}; + +use crate::{ + method_resolution::{InherentImpls, TraitImpls}, + traits::chalk, + Binders, CallableDefId, GenericPredicate, InferenceResult, OpaqueTyId, PolyFnSig, + ReturnTypeImplTraits, TraitRef, Ty, TyDefId, ValueTyDefId, +}; +use hir_expand::name::Name; + +#[salsa::query_group(HirDatabaseStorage)] +pub trait HirDatabase: DefDatabase + Upcast { + #[salsa::invoke(infer_wait)] + #[salsa::transparent] + fn infer(&self, def: DefWithBodyId) -> Arc; + + #[salsa::invoke(crate::infer::infer_query)] + fn infer_query(&self, def: DefWithBodyId) -> Arc; + + #[salsa::invoke(crate::lower::ty_query)] + #[salsa::cycle(crate::lower::ty_recover)] + fn ty(&self, def: TyDefId) -> Binders; + + #[salsa::invoke(crate::lower::value_ty_query)] + fn value_ty(&self, def: ValueTyDefId) -> Binders; + + #[salsa::invoke(crate::lower::impl_self_ty_query)] + #[salsa::cycle(crate::lower::impl_self_ty_recover)] + fn impl_self_ty(&self, def: ImplId) -> Binders; + + #[salsa::invoke(crate::lower::impl_trait_query)] + fn impl_trait(&self, def: ImplId) -> Option>; + + #[salsa::invoke(crate::lower::field_types_query)] + fn field_types(&self, var: VariantId) -> Arc>>; + + #[salsa::invoke(crate::callable_item_sig)] + fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig; + + #[salsa::invoke(crate::lower::return_type_impl_traits)] + fn return_type_impl_traits( + &self, + def: FunctionId, + ) -> Option>>; + + #[salsa::invoke(crate::lower::generic_predicates_for_param_query)] + #[salsa::cycle(crate::lower::generic_predicates_for_param_recover)] + fn generic_predicates_for_param( + &self, + param_id: TypeParamId, + ) -> Arc<[Binders]>; + + #[salsa::invoke(crate::lower::generic_predicates_query)] + fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders]>; + + #[salsa::invoke(crate::lower::generic_defaults_query)] + fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders]>; + + #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)] + fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc; + + #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)] + fn trait_impls_in_crate(&self, krate: CrateId) -> Arc; + + #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)] + fn trait_impls_in_deps(&self, krate: CrateId) -> Arc; + + // Interned IDs for Chalk integration + #[salsa::interned] + fn intern_callable_def(&self, callable_def: CallableDefId) -> InternedCallableDefId; + #[salsa::interned] + fn intern_type_param_id(&self, param_id: TypeParamId) -> GlobalTypeParamId; + #[salsa::interned] + fn intern_impl_trait_id(&self, id: OpaqueTyId) -> InternedOpaqueTyId; + #[salsa::interned] + fn intern_closure(&self, id: (DefWithBodyId, ExprId)) -> ClosureId; + + #[salsa::invoke(chalk::associated_ty_data_query)] + fn associated_ty_data(&self, id: chalk::AssocTypeId) -> Arc; + + #[salsa::invoke(chalk::trait_datum_query)] + fn trait_datum(&self, krate: CrateId, trait_id: chalk::TraitId) -> Arc; + + #[salsa::invoke(chalk::struct_datum_query)] + fn struct_datum(&self, krate: CrateId, struct_id: chalk::AdtId) -> Arc; + + #[salsa::invoke(crate::traits::chalk::impl_datum_query)] + fn impl_datum(&self, krate: CrateId, impl_id: chalk::ImplId) -> Arc; + + #[salsa::invoke(crate::traits::chalk::fn_def_datum_query)] + fn fn_def_datum(&self, krate: CrateId, fn_def_id: chalk::FnDefId) -> Arc; + + #[salsa::invoke(crate::traits::chalk::associated_ty_value_query)] + fn associated_ty_value( + &self, + krate: CrateId, + id: chalk::AssociatedTyValueId, + ) -> Arc; + + #[salsa::invoke(crate::traits::trait_solve_query)] + fn trait_solve( + &self, + krate: CrateId, + goal: crate::Canonical>, + ) -> Option; + + #[salsa::invoke(crate::traits::chalk::program_clauses_for_chalk_env_query)] + fn program_clauses_for_chalk_env( + &self, + krate: CrateId, + env: chalk_ir::Environment, + ) -> chalk_ir::ProgramClauses; +} + +fn infer_wait(db: &impl HirDatabase, def: DefWithBodyId) -> Arc { + let _p = profile::span("infer:wait").detail(|| match def { + DefWithBodyId::FunctionId(it) => db.function_data(it).name.to_string(), + DefWithBodyId::StaticId(it) => { + db.static_data(it).name.clone().unwrap_or_else(Name::missing).to_string() + } + DefWithBodyId::ConstId(it) => { + db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string() + } + }); + db.infer_query(def) +} + +#[test] +fn hir_database_is_object_safe() { + fn _assert_object_safe(_: &dyn HirDatabase) {} +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct GlobalTypeParamId(salsa::InternId); +impl_intern_key!(GlobalTypeParamId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct InternedOpaqueTyId(salsa::InternId); +impl_intern_key!(InternedOpaqueTyId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct ClosureId(salsa::InternId); +impl_intern_key!(ClosureId); + +/// This exists just for Chalk, because Chalk just has a single `FnDefId` where +/// we have different IDs for struct and enum variant constructors. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub struct InternedCallableDefId(salsa::InternId); +impl_intern_key!(InternedCallableDefId); diff --git a/crates/hir_ty/src/diagnostics.rs b/crates/hir_ty/src/diagnostics.rs new file mode 100644 index 0000000000..ae0cf8d09b --- /dev/null +++ b/crates/hir_ty/src/diagnostics.rs @@ -0,0 +1,444 @@ +//! FIXME: write short doc here +mod expr; +mod match_check; +mod unsafe_check; + +use std::any::Any; + +use hir_def::DefWithBodyId; +use hir_expand::diagnostics::{Diagnostic, DiagnosticSink}; +use hir_expand::{name::Name, HirFileId, InFile}; +use stdx::format_to; +use syntax::{ast, AstPtr, SyntaxNodePtr}; + +use crate::db::HirDatabase; + +pub use crate::diagnostics::expr::{record_literal_missing_fields, record_pattern_missing_fields}; + +pub fn validate_body(db: &dyn HirDatabase, owner: DefWithBodyId, sink: &mut DiagnosticSink<'_>) { + let _p = profile::span("validate_body"); + let infer = db.infer(owner); + infer.add_diagnostics(db, owner, sink); + let mut validator = expr::ExprValidator::new(owner, infer.clone(), sink); + validator.validate_body(db); + let mut validator = unsafe_check::UnsafeValidator::new(owner, infer, sink); + validator.validate_body(db); +} + +#[derive(Debug)] +pub struct NoSuchField { + pub file: HirFileId, + pub field: AstPtr, +} + +impl Diagnostic for NoSuchField { + fn message(&self) -> String { + "no such field".to_string() + } + + fn display_source(&self) -> InFile { + InFile::new(self.file, self.field.clone().into()) + } + + fn as_any(&self) -> &(dyn Any + Send + 'static) { + self + } +} + +#[derive(Debug)] +pub struct MissingFields { + pub file: HirFileId, + pub field_list_parent: AstPtr, + pub field_list_parent_path: Option>, + pub missed_fields: Vec, +} + +impl Diagnostic for MissingFields { + fn message(&self) -> String { + let mut buf = String::from("Missing structure fields:\n"); + for field in &self.missed_fields { + format_to!(buf, "- {}\n", field); + } + buf + } + + fn display_source(&self) -> InFile { + InFile { + file_id: self.file, + value: self + .field_list_parent_path + .clone() + .map(SyntaxNodePtr::from) + .unwrap_or_else(|| self.field_list_parent.clone().into()), + } + } + + fn as_any(&self) -> &(dyn Any + Send + 'static) { + self + } +} + +#[derive(Debug)] +pub struct MissingPatFields { + pub file: HirFileId, + pub field_list_parent: AstPtr, + pub field_list_parent_path: Option>, + pub missed_fields: Vec, +} + +impl Diagnostic for MissingPatFields { + fn message(&self) -> String { + let mut buf = String::from("Missing structure fields:\n"); + for field in &self.missed_fields { + format_to!(buf, "- {}\n", field); + } + buf + } + fn display_source(&self) -> InFile { + InFile { + file_id: self.file, + value: self + .field_list_parent_path + .clone() + .map(SyntaxNodePtr::from) + .unwrap_or_else(|| self.field_list_parent.clone().into()), + } + } + fn as_any(&self) -> &(dyn Any + Send + 'static) { + self + } +} + +#[derive(Debug)] +pub struct MissingMatchArms { + pub file: HirFileId, + pub match_expr: AstPtr, + pub arms: AstPtr, +} + +impl Diagnostic for MissingMatchArms { + fn message(&self) -> String { + String::from("Missing match arm") + } + fn display_source(&self) -> InFile { + InFile { file_id: self.file, value: self.match_expr.clone().into() } + } + fn as_any(&self) -> &(dyn Any + Send + 'static) { + self + } +} + +#[derive(Debug)] +pub struct MissingOkInTailExpr { + pub file: HirFileId, + pub expr: AstPtr, +} + +impl Diagnostic for MissingOkInTailExpr { + fn message(&self) -> String { + "wrap return expression in Ok".to_string() + } + fn display_source(&self) -> InFile { + InFile { file_id: self.file, value: self.expr.clone().into() } + } + fn as_any(&self) -> &(dyn Any + Send + 'static) { + self + } +} + +#[derive(Debug)] +pub struct BreakOutsideOfLoop { + pub file: HirFileId, + pub expr: AstPtr, +} + +impl Diagnostic for BreakOutsideOfLoop { + fn message(&self) -> String { + "break outside of loop".to_string() + } + fn display_source(&self) -> InFile { + InFile { file_id: self.file, value: self.expr.clone().into() } + } + fn as_any(&self) -> &(dyn Any + Send + 'static) { + self + } +} + +#[derive(Debug)] +pub struct MissingUnsafe { + pub file: HirFileId, + pub expr: AstPtr, +} + +impl Diagnostic for MissingUnsafe { + fn message(&self) -> String { + format!("This operation is unsafe and requires an unsafe function or block") + } + fn display_source(&self) -> InFile { + InFile { file_id: self.file, value: self.expr.clone().into() } + } + fn as_any(&self) -> &(dyn Any + Send + 'static) { + self + } +} + +#[derive(Debug)] +pub struct MismatchedArgCount { + pub file: HirFileId, + pub call_expr: AstPtr, + pub expected: usize, + pub found: usize, +} + +impl Diagnostic for MismatchedArgCount { + fn message(&self) -> String { + let s = if self.expected == 1 { "" } else { "s" }; + format!("Expected {} argument{}, found {}", self.expected, s, self.found) + } + fn display_source(&self) -> InFile { + InFile { file_id: self.file, value: self.call_expr.clone().into() } + } + fn as_any(&self) -> &(dyn Any + Send + 'static) { + self + } + fn is_experimental(&self) -> bool { + true + } +} + +#[cfg(test)] +mod tests { + use base_db::{fixture::WithFixture, FileId, SourceDatabase, SourceDatabaseExt}; + use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId}; + use hir_expand::{ + db::AstDatabase, + diagnostics::{Diagnostic, DiagnosticSinkBuilder}, + }; + use rustc_hash::FxHashMap; + use syntax::{TextRange, TextSize}; + + use crate::{diagnostics::validate_body, test_db::TestDB}; + + impl TestDB { + fn diagnostics(&self, mut cb: F) { + let crate_graph = self.crate_graph(); + for krate in crate_graph.iter() { + let crate_def_map = self.crate_def_map(krate); + + let mut fns = Vec::new(); + for (module_id, _) in crate_def_map.modules.iter() { + for decl in crate_def_map[module_id].scope.declarations() { + if let ModuleDefId::FunctionId(f) = decl { + fns.push(f) + } + } + + for impl_id in crate_def_map[module_id].scope.impls() { + let impl_data = self.impl_data(impl_id); + for item in impl_data.items.iter() { + if let AssocItemId::FunctionId(f) = item { + fns.push(*f) + } + } + } + } + + for f in fns { + let mut sink = DiagnosticSinkBuilder::new().build(&mut cb); + validate_body(self, f.into(), &mut sink); + } + } + } + } + + pub(crate) fn check_diagnostics(ra_fixture: &str) { + let db = TestDB::with_files(ra_fixture); + let annotations = db.extract_annotations(); + + let mut actual: FxHashMap> = FxHashMap::default(); + db.diagnostics(|d| { + let src = d.display_source(); + let root = db.parse_or_expand(src.file_id).unwrap(); + // FIXME: macros... + let file_id = src.file_id.original_file(&db); + let range = src.value.to_node(&root).text_range(); + let message = d.message().to_owned(); + actual.entry(file_id).or_default().push((range, message)); + }); + + for (file_id, diags) in actual.iter_mut() { + diags.sort_by_key(|it| it.0.start()); + let text = db.file_text(*file_id); + // For multiline spans, place them on line start + for (range, content) in diags { + if text[*range].contains('\n') { + *range = TextRange::new(range.start(), range.start() + TextSize::from(1)); + *content = format!("... {}", content); + } + } + } + + assert_eq!(annotations, actual); + } + + #[test] + fn no_such_field_diagnostics() { + check_diagnostics( + r#" +struct S { foo: i32, bar: () } +impl S { + fn new() -> S { + S { + //^ Missing structure fields: + //| - bar + foo: 92, + baz: 62, + //^^^^^^^ no such field + } + } +} +"#, + ); + } + #[test] + fn no_such_field_with_feature_flag_diagnostics() { + check_diagnostics( + r#" +//- /lib.rs crate:foo cfg:feature=foo +struct MyStruct { + my_val: usize, + #[cfg(feature = "foo")] + bar: bool, +} + +impl MyStruct { + #[cfg(feature = "foo")] + pub(crate) fn new(my_val: usize, bar: bool) -> Self { + Self { my_val, bar } + } + #[cfg(not(feature = "foo"))] + pub(crate) fn new(my_val: usize, _bar: bool) -> Self { + Self { my_val } + } +} +"#, + ); + } + + #[test] + fn no_such_field_enum_with_feature_flag_diagnostics() { + check_diagnostics( + r#" +//- /lib.rs crate:foo cfg:feature=foo +enum Foo { + #[cfg(not(feature = "foo"))] + Buz, + #[cfg(feature = "foo")] + Bar, + Baz +} + +fn test_fn(f: Foo) { + match f { + Foo::Bar => {}, + Foo::Baz => {}, + } +} +"#, + ); + } + + #[test] + fn no_such_field_with_feature_flag_diagnostics_on_struct_lit() { + check_diagnostics( + r#" +//- /lib.rs crate:foo cfg:feature=foo +struct S { + #[cfg(feature = "foo")] + foo: u32, + #[cfg(not(feature = "foo"))] + bar: u32, +} + +impl S { + #[cfg(feature = "foo")] + fn new(foo: u32) -> Self { + Self { foo } + } + #[cfg(not(feature = "foo"))] + fn new(bar: u32) -> Self { + Self { bar } + } + fn new2(bar: u32) -> Self { + #[cfg(feature = "foo")] + { Self { foo: bar } } + #[cfg(not(feature = "foo"))] + { Self { bar } } + } + fn new2(val: u32) -> Self { + Self { + #[cfg(feature = "foo")] + foo: val, + #[cfg(not(feature = "foo"))] + bar: val, + } + } +} +"#, + ); + } + + #[test] + fn no_such_field_with_type_macro() { + check_diagnostics( + r#" +macro_rules! Type { () => { u32 }; } +struct Foo { bar: Type![] } + +impl Foo { + fn new() -> Self { + Foo { bar: 0 } + } +} +"#, + ); + } + + #[test] + fn missing_record_pat_field_diagnostic() { + check_diagnostics( + r#" +struct S { foo: i32, bar: () } +fn baz(s: S) { + let S { foo: _ } = s; + //^ Missing structure fields: + //| - bar +} +"#, + ); + } + + #[test] + fn missing_record_pat_field_no_diagnostic_if_not_exhaustive() { + check_diagnostics( + r" +struct S { foo: i32, bar: () } +fn baz(s: S) -> i32 { + match s { + S { foo, .. } => foo, + } +} +", + ) + } + + #[test] + fn break_outside_of_loop() { + check_diagnostics( + r#" +fn foo() { break; } + //^^^^^ break outside of loop +"#, + ); + } +} diff --git a/crates/hir_ty/src/diagnostics/expr.rs b/crates/hir_ty/src/diagnostics/expr.rs new file mode 100644 index 0000000000..fb76e2e4ec --- /dev/null +++ b/crates/hir_ty/src/diagnostics/expr.rs @@ -0,0 +1,569 @@ +//! FIXME: write short doc here + +use std::sync::Arc; + +use hir_def::{path::path, resolver::HasResolver, AdtId, DefWithBodyId}; +use hir_expand::diagnostics::DiagnosticSink; +use rustc_hash::FxHashSet; +use syntax::{ast, AstPtr}; + +use crate::{ + db::HirDatabase, + diagnostics::{ + match_check::{is_useful, MatchCheckCtx, Matrix, PatStack, Usefulness}, + MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkInTailExpr, MissingPatFields, + }, + utils::variant_data, + ApplicationTy, InferenceResult, Ty, TypeCtor, +}; + +pub use hir_def::{ + body::{ + scope::{ExprScopes, ScopeEntry, ScopeId}, + Body, BodySourceMap, ExprPtr, ExprSource, PatPtr, PatSource, + }, + expr::{ + ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp, + MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp, + }, + src::HasSource, + LocalFieldId, Lookup, VariantId, +}; + +pub(super) struct ExprValidator<'a, 'b: 'a> { + owner: DefWithBodyId, + infer: Arc, + sink: &'a mut DiagnosticSink<'b>, +} + +impl<'a, 'b> ExprValidator<'a, 'b> { + pub(super) fn new( + owner: DefWithBodyId, + infer: Arc, + sink: &'a mut DiagnosticSink<'b>, + ) -> ExprValidator<'a, 'b> { + ExprValidator { owner, infer, sink } + } + + pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) { + let body = db.body(self.owner.into()); + + for (id, expr) in body.exprs.iter() { + if let Some((variant_def, missed_fields, true)) = + record_literal_missing_fields(db, &self.infer, id, expr) + { + self.create_record_literal_missing_fields_diagnostic( + id, + db, + variant_def, + missed_fields, + ); + } + + match expr { + Expr::Match { expr, arms } => { + self.validate_match(id, *expr, arms, db, self.infer.clone()); + } + Expr::Call { .. } | Expr::MethodCall { .. } => { + self.validate_call(db, id, expr); + } + _ => {} + } + } + for (id, pat) in body.pats.iter() { + if let Some((variant_def, missed_fields, true)) = + record_pattern_missing_fields(db, &self.infer, id, pat) + { + self.create_record_pattern_missing_fields_diagnostic( + id, + db, + variant_def, + missed_fields, + ); + } + } + let body_expr = &body[body.body_expr]; + if let Expr::Block { tail: Some(t), .. } = body_expr { + self.validate_results_in_tail_expr(body.body_expr, *t, db); + } + } + + fn create_record_literal_missing_fields_diagnostic( + &mut self, + id: ExprId, + db: &dyn HirDatabase, + variant_def: VariantId, + missed_fields: Vec, + ) { + // XXX: only look at source_map if we do have missing fields + let (_, source_map) = db.body_with_source_map(self.owner.into()); + + if let Ok(source_ptr) = source_map.expr_syntax(id) { + let root = source_ptr.file_syntax(db.upcast()); + if let ast::Expr::RecordExpr(record_expr) = &source_ptr.value.to_node(&root) { + if let Some(_) = record_expr.record_expr_field_list() { + let variant_data = variant_data(db.upcast(), variant_def); + let missed_fields = missed_fields + .into_iter() + .map(|idx| variant_data.fields()[idx].name.clone()) + .collect(); + self.sink.push(MissingFields { + file: source_ptr.file_id, + field_list_parent: AstPtr::new(&record_expr), + field_list_parent_path: record_expr.path().map(|path| AstPtr::new(&path)), + missed_fields, + }) + } + } + } + } + + fn create_record_pattern_missing_fields_diagnostic( + &mut self, + id: PatId, + db: &dyn HirDatabase, + variant_def: VariantId, + missed_fields: Vec, + ) { + // XXX: only look at source_map if we do have missing fields + let (_, source_map) = db.body_with_source_map(self.owner.into()); + + if let Ok(source_ptr) = source_map.pat_syntax(id) { + if let Some(expr) = source_ptr.value.as_ref().left() { + let root = source_ptr.file_syntax(db.upcast()); + if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) { + if let Some(_) = record_pat.record_pat_field_list() { + let variant_data = variant_data(db.upcast(), variant_def); + let missed_fields = missed_fields + .into_iter() + .map(|idx| variant_data.fields()[idx].name.clone()) + .collect(); + self.sink.push(MissingPatFields { + file: source_ptr.file_id, + field_list_parent: AstPtr::new(&record_pat), + field_list_parent_path: record_pat + .path() + .map(|path| AstPtr::new(&path)), + missed_fields, + }) + } + } + } + } + } + + fn validate_call(&mut self, db: &dyn HirDatabase, call_id: ExprId, expr: &Expr) -> Option<()> { + // Check that the number of arguments matches the number of parameters. + + // FIXME: Due to shortcomings in the current type system implementation, only emit this + // diagnostic if there are no type mismatches in the containing function. + if self.infer.type_mismatches.iter().next().is_some() { + return Some(()); + } + + let is_method_call = matches!(expr, Expr::MethodCall { .. }); + let (sig, args) = match expr { + Expr::Call { callee, args } => { + let callee = &self.infer.type_of_expr[*callee]; + let sig = callee.callable_sig(db)?; + (sig, args.clone()) + } + Expr::MethodCall { receiver, args, .. } => { + let mut args = args.clone(); + args.insert(0, *receiver); + + // FIXME: note that we erase information about substs here. This + // is not right, but, luckily, doesn't matter as we care only + // about the number of params + let callee = self.infer.method_resolution(call_id)?; + let sig = db.callable_item_signature(callee.into()).value; + + (sig, args) + } + _ => return None, + }; + + if sig.is_varargs { + return None; + } + + let params = sig.params(); + + let mut param_count = params.len(); + let mut arg_count = args.len(); + + if arg_count != param_count { + let (_, source_map) = db.body_with_source_map(self.owner.into()); + if let Ok(source_ptr) = source_map.expr_syntax(call_id) { + if is_method_call { + param_count -= 1; + arg_count -= 1; + } + self.sink.push(MismatchedArgCount { + file: source_ptr.file_id, + call_expr: source_ptr.value, + expected: param_count, + found: arg_count, + }); + } + } + + None + } + + fn validate_match( + &mut self, + id: ExprId, + match_expr: ExprId, + arms: &[MatchArm], + db: &dyn HirDatabase, + infer: Arc, + ) { + let (body, source_map): (Arc, Arc) = + db.body_with_source_map(self.owner.into()); + + let match_expr_ty = match infer.type_of_expr.get(match_expr) { + Some(ty) => ty, + // If we can't resolve the type of the match expression + // we cannot perform exhaustiveness checks. + None => return, + }; + + let cx = MatchCheckCtx { match_expr, body, infer: infer.clone(), db }; + let pats = arms.iter().map(|arm| arm.pat); + + let mut seen = Matrix::empty(); + for pat in pats { + if let Some(pat_ty) = infer.type_of_pat.get(pat) { + // We only include patterns whose type matches the type + // of the match expression. If we had a InvalidMatchArmPattern + // diagnostic or similar we could raise that in an else + // block here. + // + // When comparing the types, we also have to consider that rustc + // will automatically de-reference the match expression type if + // necessary. + // + // FIXME we should use the type checker for this. + if pat_ty == match_expr_ty + || match_expr_ty + .as_reference() + .map(|(match_expr_ty, _)| match_expr_ty == pat_ty) + .unwrap_or(false) + { + // If we had a NotUsefulMatchArm diagnostic, we could + // check the usefulness of each pattern as we added it + // to the matrix here. + let v = PatStack::from_pattern(pat); + seen.push(&cx, v); + continue; + } + } + + // If we can't resolve the type of a pattern, or the pattern type doesn't + // fit the match expression, we skip this diagnostic. Skipping the entire + // diagnostic rather than just not including this match arm is preferred + // to avoid the chance of false positives. + return; + } + + match is_useful(&cx, &seen, &PatStack::from_wild()) { + Ok(Usefulness::Useful) => (), + // if a wildcard pattern is not useful, then all patterns are covered + Ok(Usefulness::NotUseful) => return, + // this path is for unimplemented checks, so we err on the side of not + // reporting any errors + _ => return, + } + + if let Ok(source_ptr) = source_map.expr_syntax(id) { + let root = source_ptr.file_syntax(db.upcast()); + if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) { + if let (Some(match_expr), Some(arms)) = + (match_expr.expr(), match_expr.match_arm_list()) + { + self.sink.push(MissingMatchArms { + file: source_ptr.file_id, + match_expr: AstPtr::new(&match_expr), + arms: AstPtr::new(&arms), + }) + } + } + } + } + + fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) { + // the mismatch will be on the whole block currently + let mismatch = match self.infer.type_mismatch_for_expr(body_id) { + Some(m) => m, + None => return, + }; + + let core_result_path = path![core::result::Result]; + + let resolver = self.owner.resolver(db.upcast()); + let core_result_enum = match resolver.resolve_known_enum(db.upcast(), &core_result_path) { + Some(it) => it, + _ => return, + }; + + let core_result_ctor = TypeCtor::Adt(AdtId::EnumId(core_result_enum)); + let params = match &mismatch.expected { + Ty::Apply(ApplicationTy { ctor, parameters }) if ctor == &core_result_ctor => { + parameters + } + _ => return, + }; + + if params.len() == 2 && params[0] == mismatch.actual { + let (_, source_map) = db.body_with_source_map(self.owner.into()); + + if let Ok(source_ptr) = source_map.expr_syntax(id) { + self.sink + .push(MissingOkInTailExpr { file: source_ptr.file_id, expr: source_ptr.value }); + } + } + } +} + +pub fn record_literal_missing_fields( + db: &dyn HirDatabase, + infer: &InferenceResult, + id: ExprId, + expr: &Expr, +) -> Option<(VariantId, Vec, /*exhaustive*/ bool)> { + let (fields, exhausitve) = match expr { + Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()), + _ => return None, + }; + + let variant_def = infer.variant_resolution_for_expr(id)?; + if let VariantId::UnionId(_) = variant_def { + return None; + } + + let variant_data = variant_data(db.upcast(), variant_def); + + let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect(); + let missed_fields: Vec = variant_data + .fields() + .iter() + .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) }) + .collect(); + if missed_fields.is_empty() { + return None; + } + Some((variant_def, missed_fields, exhausitve)) +} + +pub fn record_pattern_missing_fields( + db: &dyn HirDatabase, + infer: &InferenceResult, + id: PatId, + pat: &Pat, +) -> Option<(VariantId, Vec, /*exhaustive*/ bool)> { + let (fields, exhaustive) = match pat { + Pat::Record { path: _, args, ellipsis } => (args, !ellipsis), + _ => return None, + }; + + let variant_def = infer.variant_resolution_for_pat(id)?; + if let VariantId::UnionId(_) = variant_def { + return None; + } + + let variant_data = variant_data(db.upcast(), variant_def); + + let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect(); + let missed_fields: Vec = variant_data + .fields() + .iter() + .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) }) + .collect(); + if missed_fields.is_empty() { + return None; + } + Some((variant_def, missed_fields, exhaustive)) +} + +#[cfg(test)] +mod tests { + use crate::diagnostics::tests::check_diagnostics; + + #[test] + fn simple_free_fn_zero() { + check_diagnostics( + r#" +fn zero() {} +fn f() { zero(1); } + //^^^^^^^ Expected 0 arguments, found 1 +"#, + ); + + check_diagnostics( + r#" +fn zero() {} +fn f() { zero(); } +"#, + ); + } + + #[test] + fn simple_free_fn_one() { + check_diagnostics( + r#" +fn one(arg: u8) {} +fn f() { one(); } + //^^^^^ Expected 1 argument, found 0 +"#, + ); + + check_diagnostics( + r#" +fn one(arg: u8) {} +fn f() { one(1); } +"#, + ); + } + + #[test] + fn method_as_fn() { + check_diagnostics( + r#" +struct S; +impl S { fn method(&self) {} } + +fn f() { + S::method(); +} //^^^^^^^^^^^ Expected 1 argument, found 0 +"#, + ); + + check_diagnostics( + r#" +struct S; +impl S { fn method(&self) {} } + +fn f() { + S::method(&S); + S.method(); +} +"#, + ); + } + + #[test] + fn method_with_arg() { + check_diagnostics( + r#" +struct S; +impl S { fn method(&self, arg: u8) {} } + + fn f() { + S.method(); + } //^^^^^^^^^^ Expected 1 argument, found 0 + "#, + ); + + check_diagnostics( + r#" +struct S; +impl S { fn method(&self, arg: u8) {} } + +fn f() { + S::method(&S, 0); + S.method(1); +} +"#, + ); + } + + #[test] + fn tuple_struct() { + check_diagnostics( + r#" +struct Tup(u8, u16); +fn f() { + Tup(0); +} //^^^^^^ Expected 2 arguments, found 1 +"#, + ) + } + + #[test] + fn enum_variant() { + check_diagnostics( + r#" +enum En { Variant(u8, u16), } +fn f() { + En::Variant(0); +} //^^^^^^^^^^^^^^ Expected 2 arguments, found 1 +"#, + ) + } + + #[test] + fn enum_variant_type_macro() { + check_diagnostics( + r#" +macro_rules! Type { + () => { u32 }; +} +enum Foo { + Bar(Type![]) +} +impl Foo { + fn new() { + Foo::Bar(0); + Foo::Bar(0, 1); + //^^^^^^^^^^^^^^ Expected 1 argument, found 2 + Foo::Bar(); + //^^^^^^^^^^ Expected 1 argument, found 0 + } +} + "#, + ); + } + + #[test] + fn varargs() { + check_diagnostics( + r#" +extern "C" { + fn fixed(fixed: u8); + fn varargs(fixed: u8, ...); + fn varargs2(...); +} + +fn f() { + unsafe { + fixed(0); + fixed(0, 1); + //^^^^^^^^^^^ Expected 1 argument, found 2 + varargs(0); + varargs(0, 1); + varargs2(); + varargs2(0); + varargs2(0, 1); + } +} + "#, + ) + } + + #[test] + fn arg_count_lambda() { + check_diagnostics( + r#" +fn main() { + let f = |()| (); + f(); + //^^^ Expected 1 argument, found 0 + f(()); + f((), ()); + //^^^^^^^^^ Expected 1 argument, found 2 +} +"#, + ) + } +} diff --git a/crates/hir_ty/src/diagnostics/match_check.rs b/crates/hir_ty/src/diagnostics/match_check.rs new file mode 100644 index 0000000000..7f007f1d65 --- /dev/null +++ b/crates/hir_ty/src/diagnostics/match_check.rs @@ -0,0 +1,1421 @@ +//! This module implements match statement exhaustiveness checking and usefulness checking +//! for match arms. +//! +//! It is modeled on the rustc module `librustc_mir_build::hair::pattern::_match`, which +//! contains very detailed documentation about the algorithms used here. I've duplicated +//! most of that documentation below. +//! +//! This file includes the logic for exhaustiveness and usefulness checking for +//! pattern-matching. Specifically, given a list of patterns for a type, we can +//! tell whether: +//! - (a) the patterns cover every possible constructor for the type (exhaustiveness). +//! - (b) each pattern is necessary (usefulness). +//! +//! The algorithm implemented here is a modified version of the one described in +//! . +//! However, to save future implementors from reading the original paper, we +//! summarise the algorithm here to hopefully save time and be a little clearer +//! (without being so rigorous). +//! +//! The core of the algorithm revolves about a "usefulness" check. In particular, we +//! are trying to compute a predicate `U(P, p)` where `P` is a list of patterns (we refer to this as +//! a matrix). `U(P, p)` represents whether, given an existing list of patterns +//! `P_1 ..= P_m`, adding a new pattern `p` will be "useful" (that is, cover previously- +//! uncovered values of the type). +//! +//! If we have this predicate, then we can easily compute both exhaustiveness of an +//! entire set of patterns and the individual usefulness of each one. +//! (a) the set of patterns is exhaustive iff `U(P, _)` is false (i.e., adding a wildcard +//! match doesn't increase the number of values we're matching) +//! (b) a pattern `P_i` is not useful if `U(P[0..=(i-1), P_i)` is false (i.e., adding a +//! pattern to those that have come before it doesn't increase the number of values +//! we're matching). +//! +//! During the course of the algorithm, the rows of the matrix won't just be individual patterns, +//! but rather partially-deconstructed patterns in the form of a list of patterns. The paper +//! calls those pattern-vectors, and we will call them pattern-stacks. The same holds for the +//! new pattern `p`. +//! +//! For example, say we have the following: +//! +//! ```ignore +//! // x: (Option, Result<()>) +//! match x { +//! (Some(true), _) => (), +//! (None, Err(())) => (), +//! (None, Err(_)) => (), +//! } +//! ``` +//! +//! Here, the matrix `P` starts as: +//! +//! ```text +//! [ +//! [(Some(true), _)], +//! [(None, Err(()))], +//! [(None, Err(_))], +//! ] +//! ``` +//! +//! We can tell it's not exhaustive, because `U(P, _)` is true (we're not covering +//! `[(Some(false), _)]`, for instance). In addition, row 3 is not useful, because +//! all the values it covers are already covered by row 2. +//! +//! A list of patterns can be thought of as a stack, because we are mainly interested in the top of +//! the stack at any given point, and we can pop or apply constructors to get new pattern-stacks. +//! To match the paper, the top of the stack is at the beginning / on the left. +//! +//! There are two important operations on pattern-stacks necessary to understand the algorithm: +//! +//! 1. We can pop a given constructor off the top of a stack. This operation is called +//! `specialize`, and is denoted `S(c, p)` where `c` is a constructor (like `Some` or +//! `None`) and `p` a pattern-stack. +//! If the pattern on top of the stack can cover `c`, this removes the constructor and +//! pushes its arguments onto the stack. It also expands OR-patterns into distinct patterns. +//! Otherwise the pattern-stack is discarded. +//! This essentially filters those pattern-stacks whose top covers the constructor `c` and +//! discards the others. +//! +//! For example, the first pattern above initially gives a stack `[(Some(true), _)]`. If we +//! pop the tuple constructor, we are left with `[Some(true), _]`, and if we then pop the +//! `Some` constructor we get `[true, _]`. If we had popped `None` instead, we would get +//! nothing back. +//! +//! This returns zero or more new pattern-stacks, as follows. We look at the pattern `p_1` +//! on top of the stack, and we have four cases: +//! +//! * 1.1. `p_1 = c(r_1, .., r_a)`, i.e. the top of the stack has constructor `c`. We push onto +//! the stack the arguments of this constructor, and return the result: +//! +//! r_1, .., r_a, p_2, .., p_n +//! +//! * 1.2. `p_1 = c'(r_1, .., r_a')` where `c ≠ c'`. We discard the current stack and return +//! nothing. +//! * 1.3. `p_1 = _`. We push onto the stack as many wildcards as the constructor `c` has +//! arguments (its arity), and return the resulting stack: +//! +//! _, .., _, p_2, .., p_n +//! +//! * 1.4. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting stack: +//! +//! S(c, (r_1, p_2, .., p_n)) +//! S(c, (r_2, p_2, .., p_n)) +//! +//! 2. We can pop a wildcard off the top of the stack. This is called `D(p)`, where `p` is +//! a pattern-stack. +//! This is used when we know there are missing constructor cases, but there might be +//! existing wildcard patterns, so to check the usefulness of the matrix, we have to check +//! all its *other* components. +//! +//! It is computed as follows. We look at the pattern `p_1` on top of the stack, +//! and we have three cases: +//! * 1.1. `p_1 = c(r_1, .., r_a)`. We discard the current stack and return nothing. +//! * 1.2. `p_1 = _`. We return the rest of the stack: +//! +//! p_2, .., p_n +//! +//! * 1.3. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting stack: +//! +//! D((r_1, p_2, .., p_n)) +//! D((r_2, p_2, .., p_n)) +//! +//! Note that the OR-patterns are not always used directly in Rust, but are used to derive the +//! exhaustive integer matching rules, so they're written here for posterity. +//! +//! Both those operations extend straightforwardly to a list or pattern-stacks, i.e. a matrix, by +//! working row-by-row. Popping a constructor ends up keeping only the matrix rows that start with +//! the given constructor, and popping a wildcard keeps those rows that start with a wildcard. +//! +//! +//! The algorithm for computing `U` +//! ------------------------------- +//! The algorithm is inductive (on the number of columns: i.e., components of tuple patterns). +//! That means we're going to check the components from left-to-right, so the algorithm +//! operates principally on the first component of the matrix and new pattern-stack `p`. +//! This algorithm is realised in the `is_useful` function. +//! +//! Base case (`n = 0`, i.e., an empty tuple pattern): +//! - If `P` already contains an empty pattern (i.e., if the number of patterns `m > 0`), then +//! `U(P, p)` is false. +//! - Otherwise, `P` must be empty, so `U(P, p)` is true. +//! +//! Inductive step (`n > 0`, i.e., whether there's at least one column [which may then be expanded +//! into further columns later]). We're going to match on the top of the new pattern-stack, `p_1`: +//! +//! - If `p_1 == c(r_1, .., r_a)`, i.e. we have a constructor pattern. +//! Then, the usefulness of `p_1` can be reduced to whether it is useful when +//! we ignore all the patterns in the first column of `P` that involve other constructors. +//! This is where `S(c, P)` comes in: +//! +//! ```text +//! U(P, p) := U(S(c, P), S(c, p)) +//! ``` +//! +//! This special case is handled in `is_useful_specialized`. +//! +//! For example, if `P` is: +//! +//! ```text +//! [ +//! [Some(true), _], +//! [None, 0], +//! ] +//! ``` +//! +//! and `p` is `[Some(false), 0]`, then we don't care about row 2 since we know `p` only +//! matches values that row 2 doesn't. For row 1 however, we need to dig into the +//! arguments of `Some` to know whether some new value is covered. So we compute +//! `U([[true, _]], [false, 0])`. +//! +//! - If `p_1 == _`, then we look at the list of constructors that appear in the first component of +//! the rows of `P`: +//! - If there are some constructors that aren't present, then we might think that the +//! wildcard `_` is useful, since it covers those constructors that weren't covered +//! before. +//! That's almost correct, but only works if there were no wildcards in those first +//! components. So we need to check that `p` is useful with respect to the rows that +//! start with a wildcard, if there are any. This is where `D` comes in: +//! `U(P, p) := U(D(P), D(p))` +//! +//! For example, if `P` is: +//! ```text +//! [ +//! [_, true, _], +//! [None, false, 1], +//! ] +//! ``` +//! and `p` is `[_, false, _]`, the `Some` constructor doesn't appear in `P`. So if we +//! only had row 2, we'd know that `p` is useful. However row 1 starts with a +//! wildcard, so we need to check whether `U([[true, _]], [false, 1])`. +//! +//! - Otherwise, all possible constructors (for the relevant type) are present. In this +//! case we must check whether the wildcard pattern covers any unmatched value. For +//! that, we can think of the `_` pattern as a big OR-pattern that covers all +//! possible constructors. For `Option`, that would mean `_ = None | Some(_)` for +//! example. The wildcard pattern is useful in this case if it is useful when +//! specialized to one of the possible constructors. So we compute: +//! `U(P, p) := ∃(k ϵ constructors) U(S(k, P), S(k, p))` +//! +//! For example, if `P` is: +//! ```text +//! [ +//! [Some(true), _], +//! [None, false], +//! ] +//! ``` +//! and `p` is `[_, false]`, both `None` and `Some` constructors appear in the first +//! components of `P`. We will therefore try popping both constructors in turn: we +//! compute `U([[true, _]], [_, false])` for the `Some` constructor, and `U([[false]], +//! [false])` for the `None` constructor. The first case returns true, so we know that +//! `p` is useful for `P`. Indeed, it matches `[Some(false), _]` that wasn't matched +//! before. +//! +//! - If `p_1 == r_1 | r_2`, then the usefulness depends on each `r_i` separately: +//! +//! ```text +//! U(P, p) := U(P, (r_1, p_2, .., p_n)) +//! || U(P, (r_2, p_2, .., p_n)) +//! ``` +use std::sync::Arc; + +use arena::Idx; +use hir_def::{ + adt::VariantData, + body::Body, + expr::{Expr, Literal, Pat, PatId}, + AdtId, EnumVariantId, VariantId, +}; +use smallvec::{smallvec, SmallVec}; + +use crate::{db::HirDatabase, ApplicationTy, InferenceResult, Ty, TypeCtor}; + +#[derive(Debug, Clone, Copy)] +/// Either a pattern from the source code being analyzed, represented as +/// as `PatId`, or a `Wild` pattern which is created as an intermediate +/// step in the match checking algorithm and thus is not backed by a +/// real `PatId`. +/// +/// Note that it is totally valid for the `PatId` variant to contain +/// a `PatId` which resolves to a `Wild` pattern, if that wild pattern +/// exists in the source code being analyzed. +enum PatIdOrWild { + PatId(PatId), + Wild, +} + +impl PatIdOrWild { + fn as_pat(self, cx: &MatchCheckCtx) -> Pat { + match self { + PatIdOrWild::PatId(id) => cx.body.pats[id].clone(), + PatIdOrWild::Wild => Pat::Wild, + } + } + + fn as_id(self) -> Option { + match self { + PatIdOrWild::PatId(id) => Some(id), + PatIdOrWild::Wild => None, + } + } +} + +impl From for PatIdOrWild { + fn from(pat_id: PatId) -> Self { + Self::PatId(pat_id) + } +} + +impl From<&PatId> for PatIdOrWild { + fn from(pat_id: &PatId) -> Self { + Self::PatId(*pat_id) + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub(super) enum MatchCheckErr { + NotImplemented, + MalformedMatchArm, + /// Used when type inference cannot resolve the type of + /// a pattern or expression. + Unknown, +} + +/// The return type of `is_useful` is either an indication of usefulness +/// of the match arm, or an error in the case the match statement +/// is made up of types for which exhaustiveness checking is currently +/// not completely implemented. +/// +/// The `std::result::Result` type is used here rather than a custom enum +/// to allow the use of `?`. +pub(super) type MatchCheckResult = Result; + +#[derive(Debug)] +/// A row in a Matrix. +/// +/// This type is modeled from the struct of the same name in `rustc`. +pub(super) struct PatStack(PatStackInner); +type PatStackInner = SmallVec<[PatIdOrWild; 2]>; + +impl PatStack { + pub(super) fn from_pattern(pat_id: PatId) -> PatStack { + Self(smallvec!(pat_id.into())) + } + + pub(super) fn from_wild() -> PatStack { + Self(smallvec!(PatIdOrWild::Wild)) + } + + fn from_slice(slice: &[PatIdOrWild]) -> PatStack { + Self(SmallVec::from_slice(slice)) + } + + fn from_vec(v: PatStackInner) -> PatStack { + Self(v) + } + + fn get_head(&self) -> Option { + self.0.first().copied() + } + + fn tail(&self) -> &[PatIdOrWild] { + self.0.get(1..).unwrap_or(&[]) + } + + fn to_tail(&self) -> PatStack { + Self::from_slice(self.tail()) + } + + fn replace_head_with(&self, pats: I) -> PatStack + where + I: Iterator, + T: Into, + { + let mut patterns: PatStackInner = smallvec![]; + for pat in pats { + patterns.push(pat.into()); + } + for pat in &self.0[1..] { + patterns.push(*pat); + } + PatStack::from_vec(patterns) + } + + /// Computes `D(self)`. + /// + /// See the module docs and the associated documentation in rustc for details. + fn specialize_wildcard(&self, cx: &MatchCheckCtx) -> Option { + if matches!(self.get_head()?.as_pat(cx), Pat::Wild) { + Some(self.to_tail()) + } else { + None + } + } + + /// Computes `S(constructor, self)`. + /// + /// See the module docs and the associated documentation in rustc for details. + fn specialize_constructor( + &self, + cx: &MatchCheckCtx, + constructor: &Constructor, + ) -> MatchCheckResult> { + let head = match self.get_head() { + Some(head) => head, + None => return Ok(None), + }; + + let head_pat = head.as_pat(cx); + let result = match (head_pat, constructor) { + (Pat::Tuple { args: ref pat_ids, ellipsis }, Constructor::Tuple { arity: _ }) => { + if ellipsis.is_some() { + // If there are ellipsis here, we should add the correct number of + // Pat::Wild patterns to `pat_ids`. We should be able to use the + // constructors arity for this, but at the time of writing we aren't + // correctly calculating this arity when ellipsis are present. + return Err(MatchCheckErr::NotImplemented); + } + + Some(self.replace_head_with(pat_ids.iter())) + } + (Pat::Lit(lit_expr), Constructor::Bool(constructor_val)) => { + match cx.body.exprs[lit_expr] { + Expr::Literal(Literal::Bool(pat_val)) if *constructor_val == pat_val => { + Some(self.to_tail()) + } + // it was a bool but the value doesn't match + Expr::Literal(Literal::Bool(_)) => None, + // perhaps this is actually unreachable given we have + // already checked that these match arms have the appropriate type? + _ => return Err(MatchCheckErr::NotImplemented), + } + } + (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?), + (Pat::Path(_), Constructor::Enum(constructor)) => { + // unit enum variants become `Pat::Path` + let pat_id = head.as_id().expect("we know this isn't a wild"); + if !enum_variant_matches(cx, pat_id, *constructor) { + None + } else { + Some(self.to_tail()) + } + } + ( + Pat::TupleStruct { args: ref pat_ids, ellipsis, .. }, + Constructor::Enum(enum_constructor), + ) => { + let pat_id = head.as_id().expect("we know this isn't a wild"); + if !enum_variant_matches(cx, pat_id, *enum_constructor) { + None + } else { + let constructor_arity = constructor.arity(cx)?; + if let Some(ellipsis_position) = ellipsis { + // If there are ellipsis in the pattern, the ellipsis must take the place + // of at least one sub-pattern, so `pat_ids` should be smaller than the + // constructor arity. + if pat_ids.len() < constructor_arity { + let mut new_patterns: Vec = vec![]; + + for pat_id in &pat_ids[0..ellipsis_position] { + new_patterns.push((*pat_id).into()); + } + + for _ in 0..(constructor_arity - pat_ids.len()) { + new_patterns.push(PatIdOrWild::Wild); + } + + for pat_id in &pat_ids[ellipsis_position..pat_ids.len()] { + new_patterns.push((*pat_id).into()); + } + + Some(self.replace_head_with(new_patterns.into_iter())) + } else { + return Err(MatchCheckErr::MalformedMatchArm); + } + } else { + // If there is no ellipsis in the tuple pattern, the number + // of patterns must equal the constructor arity. + if pat_ids.len() == constructor_arity { + Some(self.replace_head_with(pat_ids.into_iter())) + } else { + return Err(MatchCheckErr::MalformedMatchArm); + } + } + } + } + (Pat::Record { args: ref arg_patterns, .. }, Constructor::Enum(e)) => { + let pat_id = head.as_id().expect("we know this isn't a wild"); + if !enum_variant_matches(cx, pat_id, *e) { + None + } else { + match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { + VariantData::Record(struct_field_arena) => { + // Here we treat any missing fields in the record as the wild pattern, as + // if the record has ellipsis. We want to do this here even if the + // record does not contain ellipsis, because it allows us to continue + // enforcing exhaustiveness for the rest of the match statement. + // + // Creating the diagnostic for the missing field in the pattern + // should be done in a different diagnostic. + let patterns = struct_field_arena.iter().map(|(_, struct_field)| { + arg_patterns + .iter() + .find(|pat| pat.name == struct_field.name) + .map(|pat| PatIdOrWild::from(pat.pat)) + .unwrap_or(PatIdOrWild::Wild) + }); + + Some(self.replace_head_with(patterns)) + } + _ => return Err(MatchCheckErr::Unknown), + } + } + } + (Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented), + (_, _) => return Err(MatchCheckErr::NotImplemented), + }; + + Ok(result) + } + + /// A special case of `specialize_constructor` where the head of the pattern stack + /// is a Wild pattern. + /// + /// Replaces the Wild pattern at the head of the pattern stack with N Wild patterns + /// (N >= 0), where N is the arity of the given constructor. + fn expand_wildcard( + &self, + cx: &MatchCheckCtx, + constructor: &Constructor, + ) -> MatchCheckResult { + assert_eq!( + Pat::Wild, + self.get_head().expect("expand_wildcard called on empty PatStack").as_pat(cx), + "expand_wildcard must only be called on PatStack with wild at head", + ); + + let mut patterns: PatStackInner = smallvec![]; + + for _ in 0..constructor.arity(cx)? { + patterns.push(PatIdOrWild::Wild); + } + + for pat in &self.0[1..] { + patterns.push(*pat); + } + + Ok(PatStack::from_vec(patterns)) + } +} + +/// A collection of PatStack. +/// +/// This type is modeled from the struct of the same name in `rustc`. +pub(super) struct Matrix(Vec); + +impl Matrix { + pub(super) fn empty() -> Self { + Self(vec![]) + } + + pub(super) fn push(&mut self, cx: &MatchCheckCtx, row: PatStack) { + if let Some(Pat::Or(pat_ids)) = row.get_head().map(|pat_id| pat_id.as_pat(cx)) { + // Or patterns are expanded here + for pat_id in pat_ids { + self.0.push(PatStack::from_pattern(pat_id)); + } + } else { + self.0.push(row); + } + } + + fn is_empty(&self) -> bool { + self.0.is_empty() + } + + fn heads(&self) -> Vec { + self.0.iter().flat_map(|p| p.get_head()).collect() + } + + /// Computes `D(self)` for each contained PatStack. + /// + /// See the module docs and the associated documentation in rustc for details. + fn specialize_wildcard(&self, cx: &MatchCheckCtx) -> Self { + Self::collect(cx, self.0.iter().filter_map(|r| r.specialize_wildcard(cx))) + } + + /// Computes `S(constructor, self)` for each contained PatStack. + /// + /// See the module docs and the associated documentation in rustc for details. + fn specialize_constructor( + &self, + cx: &MatchCheckCtx, + constructor: &Constructor, + ) -> MatchCheckResult { + let mut new_matrix = Matrix::empty(); + for pat in &self.0 { + if let Some(pat) = pat.specialize_constructor(cx, constructor)? { + new_matrix.push(cx, pat); + } + } + + Ok(new_matrix) + } + + fn collect>(cx: &MatchCheckCtx, iter: T) -> Self { + let mut matrix = Matrix::empty(); + + for pat in iter { + // using push ensures we expand or-patterns + matrix.push(cx, pat); + } + + matrix + } +} + +#[derive(Clone, Debug, PartialEq)] +/// An indication of the usefulness of a given match arm, where +/// usefulness is defined as matching some patterns which were +/// not matched by an prior match arms. +/// +/// We may eventually need an `Unknown` variant here. +pub(super) enum Usefulness { + Useful, + NotUseful, +} + +pub(super) struct MatchCheckCtx<'a> { + pub(super) match_expr: Idx, + pub(super) body: Arc, + pub(super) infer: Arc, + pub(super) db: &'a dyn HirDatabase, +} + +/// Given a set of patterns `matrix`, and pattern to consider `v`, determines +/// whether `v` is useful. A pattern is useful if it covers cases which were +/// not previously covered. +/// +/// When calling this function externally (that is, not the recursive calls) it +/// expected that you have already type checked the match arms. All patterns in +/// matrix should be the same type as v, as well as they should all be the same +/// type as the match expression. +pub(super) fn is_useful( + cx: &MatchCheckCtx, + matrix: &Matrix, + v: &PatStack, +) -> MatchCheckResult { + // Handle two special cases: + // - enum with no variants + // - `!` type + // In those cases, no match arm is useful. + match cx.infer[cx.match_expr].strip_references() { + Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(AdtId::EnumId(enum_id)), .. }) => { + if cx.db.enum_data(*enum_id).variants.is_empty() { + return Ok(Usefulness::NotUseful); + } + } + Ty::Apply(ApplicationTy { ctor: TypeCtor::Never, .. }) => { + return Ok(Usefulness::NotUseful); + } + _ => (), + } + + let head = match v.get_head() { + Some(head) => head, + None => { + let result = if matrix.is_empty() { Usefulness::Useful } else { Usefulness::NotUseful }; + + return Ok(result); + } + }; + + if let Pat::Or(pat_ids) = head.as_pat(cx) { + let mut found_unimplemented = false; + let any_useful = pat_ids.iter().any(|&pat_id| { + let v = PatStack::from_pattern(pat_id); + + match is_useful(cx, matrix, &v) { + Ok(Usefulness::Useful) => true, + Ok(Usefulness::NotUseful) => false, + _ => { + found_unimplemented = true; + false + } + } + }); + + return if any_useful { + Ok(Usefulness::Useful) + } else if found_unimplemented { + Err(MatchCheckErr::NotImplemented) + } else { + Ok(Usefulness::NotUseful) + }; + } + + if let Some(constructor) = pat_constructor(cx, head)? { + let matrix = matrix.specialize_constructor(&cx, &constructor)?; + let v = v + .specialize_constructor(&cx, &constructor)? + .expect("we know this can't fail because we get the constructor from `v.head()` above"); + + is_useful(&cx, &matrix, &v) + } else { + // expanding wildcard + let mut used_constructors: Vec = vec![]; + for pat in matrix.heads() { + if let Some(constructor) = pat_constructor(cx, pat)? { + used_constructors.push(constructor); + } + } + + // We assume here that the first constructor is the "correct" type. Since we + // only care about the "type" of the constructor (i.e. if it is a bool we + // don't care about the value), this assumption should be valid as long as + // the match statement is well formed. We currently uphold this invariant by + // filtering match arms before calling `is_useful`, only passing in match arms + // whose type matches the type of the match expression. + match &used_constructors.first() { + Some(constructor) if all_constructors_covered(&cx, constructor, &used_constructors) => { + // If all constructors are covered, then we need to consider whether + // any values are covered by this wildcard. + // + // For example, with matrix '[[Some(true)], [None]]', all + // constructors are covered (`Some`/`None`), so we need + // to perform specialization to see that our wildcard will cover + // the `Some(false)` case. + // + // Here we create a constructor for each variant and then check + // usefulness after specializing for that constructor. + let mut found_unimplemented = false; + for constructor in constructor.all_constructors(cx) { + let matrix = matrix.specialize_constructor(&cx, &constructor)?; + let v = v.expand_wildcard(&cx, &constructor)?; + + match is_useful(&cx, &matrix, &v) { + Ok(Usefulness::Useful) => return Ok(Usefulness::Useful), + Ok(Usefulness::NotUseful) => continue, + _ => found_unimplemented = true, + }; + } + + if found_unimplemented { + Err(MatchCheckErr::NotImplemented) + } else { + Ok(Usefulness::NotUseful) + } + } + _ => { + // Either not all constructors are covered, or the only other arms + // are wildcards. Either way, this pattern is useful if it is useful + // when compared to those arms with wildcards. + let matrix = matrix.specialize_wildcard(&cx); + let v = v.to_tail(); + + is_useful(&cx, &matrix, &v) + } + } + } +} + +#[derive(Debug, Clone, Copy)] +/// Similar to TypeCtor, but includes additional information about the specific +/// value being instantiated. For example, TypeCtor::Bool doesn't contain the +/// boolean value. +enum Constructor { + Bool(bool), + Tuple { arity: usize }, + Enum(EnumVariantId), +} + +impl Constructor { + fn arity(&self, cx: &MatchCheckCtx) -> MatchCheckResult { + let arity = match self { + Constructor::Bool(_) => 0, + Constructor::Tuple { arity } => *arity, + Constructor::Enum(e) => { + match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { + VariantData::Tuple(struct_field_data) => struct_field_data.len(), + VariantData::Record(struct_field_data) => struct_field_data.len(), + VariantData::Unit => 0, + } + } + }; + + Ok(arity) + } + + fn all_constructors(&self, cx: &MatchCheckCtx) -> Vec { + match self { + Constructor::Bool(_) => vec![Constructor::Bool(true), Constructor::Bool(false)], + Constructor::Tuple { .. } => vec![*self], + Constructor::Enum(e) => cx + .db + .enum_data(e.parent) + .variants + .iter() + .map(|(local_id, _)| { + Constructor::Enum(EnumVariantId { parent: e.parent, local_id }) + }) + .collect(), + } + } +} + +/// Returns the constructor for the given pattern. Should only return None +/// in the case of a Wild pattern. +fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult> { + let res = match pat.as_pat(cx) { + Pat::Wild => None, + // FIXME somehow create the Tuple constructor with the proper arity. If there are + // ellipsis, the arity is not equal to the number of patterns. + Pat::Tuple { args: pats, ellipsis } if ellipsis.is_none() => { + Some(Constructor::Tuple { arity: pats.len() }) + } + Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] { + Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)), + _ => return Err(MatchCheckErr::NotImplemented), + }, + Pat::TupleStruct { .. } | Pat::Path(_) | Pat::Record { .. } => { + let pat_id = pat.as_id().expect("we already know this pattern is not a wild"); + let variant_id = + cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckErr::Unknown)?; + match variant_id { + VariantId::EnumVariantId(enum_variant_id) => { + Some(Constructor::Enum(enum_variant_id)) + } + _ => return Err(MatchCheckErr::NotImplemented), + } + } + _ => return Err(MatchCheckErr::NotImplemented), + }; + + Ok(res) +} + +fn all_constructors_covered( + cx: &MatchCheckCtx, + constructor: &Constructor, + used_constructors: &[Constructor], +) -> bool { + match constructor { + Constructor::Tuple { arity } => { + used_constructors.iter().any(|constructor| match constructor { + Constructor::Tuple { arity: used_arity } => arity == used_arity, + _ => false, + }) + } + Constructor::Bool(_) => { + if used_constructors.is_empty() { + return false; + } + + let covers_true = + used_constructors.iter().any(|c| matches!(c, Constructor::Bool(true))); + let covers_false = + used_constructors.iter().any(|c| matches!(c, Constructor::Bool(false))); + + covers_true && covers_false + } + Constructor::Enum(e) => cx.db.enum_data(e.parent).variants.iter().all(|(id, _)| { + for constructor in used_constructors { + if let Constructor::Enum(e) = constructor { + if id == e.local_id { + return true; + } + } + } + + false + }), + } +} + +fn enum_variant_matches(cx: &MatchCheckCtx, pat_id: PatId, enum_variant_id: EnumVariantId) -> bool { + Some(enum_variant_id.into()) == cx.infer.variant_resolution_for_pat(pat_id) +} + +#[cfg(test)] +mod tests { + use crate::diagnostics::tests::check_diagnostics; + + #[test] + fn empty_tuple() { + check_diagnostics( + r#" +fn main() { + match () { } + //^^ Missing match arm + match (()) { } + //^^^^ Missing match arm + + match () { _ => (), } + match () { () => (), } + match (()) { (()) => (), } +} +"#, + ); + } + + #[test] + fn tuple_of_two_empty_tuple() { + check_diagnostics( + r#" +fn main() { + match ((), ()) { } + //^^^^^^^^ Missing match arm + + match ((), ()) { ((), ()) => (), } +} +"#, + ); + } + + #[test] + fn boolean() { + check_diagnostics( + r#" +fn test_main() { + match false { } + //^^^^^ Missing match arm + match false { true => (), } + //^^^^^ Missing match arm + match (false, true) {} + //^^^^^^^^^^^^^ Missing match arm + match (false, true) { (true, true) => (), } + //^^^^^^^^^^^^^ Missing match arm + match (false, true) { + //^^^^^^^^^^^^^ Missing match arm + (false, true) => (), + (false, false) => (), + (true, false) => (), + } + match (false, true) { (true, _x) => (), } + //^^^^^^^^^^^^^ Missing match arm + + match false { true => (), false => (), } + match (false, true) { + (false, _) => (), + (true, false) => (), + (_, true) => (), + } + match (false, true) { + (true, true) => (), + (true, false) => (), + (false, true) => (), + (false, false) => (), + } + match (false, true) { + (true, _x) => (), + (false, true) => (), + (false, false) => (), + } + match (false, true, false) { + (false, ..) => (), + (true, ..) => (), + } + match (false, true, false) { + (.., false) => (), + (.., true) => (), + } + match (false, true, false) { (..) => (), } +} +"#, + ); + } + + #[test] + fn tuple_of_tuple_and_bools() { + check_diagnostics( + r#" +fn main() { + match (false, ((), false)) {} + //^^^^^^^^^^^^^^^^^^^^ Missing match arm + match (false, ((), false)) { (true, ((), true)) => (), } + //^^^^^^^^^^^^^^^^^^^^ Missing match arm + match (false, ((), false)) { (true, _) => (), } + //^^^^^^^^^^^^^^^^^^^^ Missing match arm + + match (false, ((), false)) { + (true, ((), true)) => (), + (true, ((), false)) => (), + (false, ((), true)) => (), + (false, ((), false)) => (), + } + match (false, ((), false)) { + (true, ((), true)) => (), + (true, ((), false)) => (), + (false, _) => (), + } +} +"#, + ); + } + + #[test] + fn enums() { + check_diagnostics( + r#" +enum Either { A, B, } + +fn main() { + match Either::A { } + //^^^^^^^^^ Missing match arm + match Either::B { Either::A => (), } + //^^^^^^^^^ Missing match arm + + match &Either::B { + //^^^^^^^^^^ Missing match arm + Either::A => (), + } + + match Either::B { + Either::A => (), Either::B => (), + } + match &Either::B { + Either::A => (), Either::B => (), + } +} +"#, + ); + } + + #[test] + fn enum_containing_bool() { + check_diagnostics( + r#" +enum Either { A(bool), B } + +fn main() { + match Either::B { } + //^^^^^^^^^ Missing match arm + match Either::B { + //^^^^^^^^^ Missing match arm + Either::A(true) => (), Either::B => () + } + + match Either::B { + Either::A(true) => (), + Either::A(false) => (), + Either::B => (), + } + match Either::B { + Either::B => (), + _ => (), + } + match Either::B { + Either::A(_) => (), + Either::B => (), + } + +} + "#, + ); + } + + #[test] + fn enum_different_sizes() { + check_diagnostics( + r#" +enum Either { A(bool), B(bool, bool) } + +fn main() { + match Either::A(false) { + //^^^^^^^^^^^^^^^^ Missing match arm + Either::A(_) => (), + Either::B(false, _) => (), + } + + match Either::A(false) { + Either::A(_) => (), + Either::B(true, _) => (), + Either::B(false, _) => (), + } + match Either::A(false) { + Either::A(true) | Either::A(false) => (), + Either::B(true, _) => (), + Either::B(false, _) => (), + } +} +"#, + ); + } + + #[test] + fn tuple_of_enum_no_diagnostic() { + check_diagnostics( + r#" +enum Either { A(bool), B(bool, bool) } +enum Either2 { C, D } + +fn main() { + match (Either::A(false), Either2::C) { + (Either::A(true), _) | (Either::A(false), _) => (), + (Either::B(true, _), Either2::C) => (), + (Either::B(false, _), Either2::C) => (), + (Either::B(_, _), Either2::D) => (), + } +} +"#, + ); + } + + #[test] + fn mismatched_types() { + // Match statements with arms that don't match the + // expression pattern do not fire this diagnostic. + check_diagnostics( + r#" +enum Either { A, B } +enum Either2 { C, D } + +fn main() { + match Either::A { + Either2::C => (), + Either2::D => (), + } + match (true, false) { + (true, false, true) => (), + (true) => (), + } + match (0) { () => () } + match Unresolved::Bar { Unresolved::Baz => () } +} + "#, + ); + } + + #[test] + fn malformed_match_arm_tuple_enum_missing_pattern() { + // We are testing to be sure we don't panic here when the match + // arm `Either::B` is missing its pattern. + check_diagnostics( + r#" +enum Either { A, B(u32) } + +fn main() { + match Either::A { + Either::A => (), + Either::B() => (), + } +} +"#, + ); + } + + #[test] + fn expr_diverges() { + check_diagnostics( + r#" +enum Either { A, B } + +fn main() { + match loop {} { + Either::A => (), + Either::B => (), + } + match loop {} { + Either::A => (), + } + match loop { break Foo::A } { + //^^^^^^^^^^^^^^^^^^^^^ Missing match arm + Either::A => (), + } + match loop { break Foo::A } { + Either::A => (), + Either::B => (), + } +} +"#, + ); + } + + #[test] + fn expr_partially_diverges() { + check_diagnostics( + r#" +enum Either { A(T), B } + +fn foo() -> Either { Either::B } +fn main() -> u32 { + match foo() { + Either::A(val) => val, + Either::B => 0, + } +} +"#, + ); + } + + #[test] + fn enum_record() { + check_diagnostics( + r#" +enum Either { A { foo: bool }, B } + +fn main() { + let a = Either::A { foo: true }; + match a { } + //^ Missing match arm + match a { Either::A { foo: true } => () } + //^ Missing match arm + match a { + Either::A { } => (), + //^^^^^^^^^ Missing structure fields: + // | - foo + Either::B => (), + } + match a { + //^ Missing match arm + Either::A { } => (), + } //^^^^^^^^^ Missing structure fields: + // | - foo + + match a { + Either::A { foo: true } => (), + Either::A { foo: false } => (), + Either::B => (), + } + match a { + Either::A { foo: _ } => (), + Either::B => (), + } +} +"#, + ); + } + + #[test] + fn enum_record_fields_out_of_order() { + check_diagnostics( + r#" +enum Either { + A { foo: bool, bar: () }, + B, +} + +fn main() { + let a = Either::A { foo: true, bar: () }; + match a { + //^ Missing match arm + Either::A { bar: (), foo: false } => (), + Either::A { foo: true, bar: () } => (), + } + + match a { + Either::A { bar: (), foo: false } => (), + Either::A { foo: true, bar: () } => (), + Either::B => (), + } +} +"#, + ); + } + + #[test] + fn enum_record_ellipsis() { + check_diagnostics( + r#" +enum Either { + A { foo: bool, bar: bool }, + B, +} + +fn main() { + let a = Either::B; + match a { + //^ Missing match arm + Either::A { foo: true, .. } => (), + Either::B => (), + } + match a { + //^ Missing match arm + Either::A { .. } => (), + } + + match a { + Either::A { foo: true, .. } => (), + Either::A { foo: false, .. } => (), + Either::B => (), + } + + match a { + Either::A { .. } => (), + Either::B => (), + } +} +"#, + ); + } + + #[test] + fn enum_tuple_partial_ellipsis() { + check_diagnostics( + r#" +enum Either { + A(bool, bool, bool, bool), + B, +} + +fn main() { + match Either::B { + //^^^^^^^^^ Missing match arm + Either::A(true, .., true) => (), + Either::A(true, .., false) => (), + Either::A(false, .., false) => (), + Either::B => (), + } + match Either::B { + //^^^^^^^^^ Missing match arm + Either::A(true, .., true) => (), + Either::A(true, .., false) => (), + Either::A(.., true) => (), + Either::B => (), + } + + match Either::B { + Either::A(true, .., true) => (), + Either::A(true, .., false) => (), + Either::A(false, .., true) => (), + Either::A(false, .., false) => (), + Either::B => (), + } + match Either::B { + Either::A(true, .., true) => (), + Either::A(true, .., false) => (), + Either::A(.., true) => (), + Either::A(.., false) => (), + Either::B => (), + } +} +"#, + ); + } + + #[test] + fn never() { + check_diagnostics( + r#" +enum Never {} + +fn enum_(never: Never) { + match never {} +} +fn enum_ref(never: &Never) { + match never {} +} +fn bang(never: !) { + match never {} +} +"#, + ); + } + + #[test] + fn or_pattern_panic() { + check_diagnostics( + r#" +pub enum Category { Infinity, Zero } + +fn panic(a: Category, b: Category) { + match (a, b) { + (Category::Zero | Category::Infinity, _) => (), + (_, Category::Zero | Category::Infinity) => (), + } + + // FIXME: This is a false positive, but the code used to cause a panic in the match checker, + // so this acts as a regression test for that. + match (a, b) { + //^^^^^^ Missing match arm + (Category::Infinity, Category::Infinity) | (Category::Zero, Category::Zero) => (), + (Category::Infinity | Category::Zero, _) => (), + } +} +"#, + ); + } + + mod false_negatives { + //! The implementation of match checking here is a work in progress. As we roll this out, we + //! prefer false negatives to false positives (ideally there would be no false positives). This + //! test module should document known false negatives. Eventually we will have a complete + //! implementation of match checking and this module will be empty. + //! + //! The reasons for documenting known false negatives: + //! + //! 1. It acts as a backlog of work that can be done to improve the behavior of the system. + //! 2. It ensures the code doesn't panic when handling these cases. + use super::*; + + #[test] + fn integers() { + // We don't currently check integer exhaustiveness. + check_diagnostics( + r#" +fn main() { + match 5 { + 10 => (), + 11..20 => (), + } +} +"#, + ); + } + + #[test] + fn internal_or() { + // We do not currently handle patterns with internal `or`s. + check_diagnostics( + r#" +fn main() { + enum Either { A(bool), B } + match Either::B { + Either::A(true | false) => (), + } +} +"#, + ); + } + + #[test] + fn tuple_of_bools_with_ellipsis_at_end_missing_arm() { + // We don't currently handle tuple patterns with ellipsis. + check_diagnostics( + r#" +fn main() { + match (false, true, false) { + (false, ..) => (), + } +} +"#, + ); + } + + #[test] + fn tuple_of_bools_with_ellipsis_at_beginning_missing_arm() { + // We don't currently handle tuple patterns with ellipsis. + check_diagnostics( + r#" +fn main() { + match (false, true, false) { + (.., false) => (), + } +} +"#, + ); + } + + #[test] + fn struct_missing_arm() { + // We don't currently handle structs. + check_diagnostics( + r#" +struct Foo { a: bool } +fn main(f: Foo) { + match f { Foo { a: true } => () } +} +"#, + ); + } + } +} diff --git a/crates/ra_hir_ty/src/diagnostics/unsafe_check.rs b/crates/hir_ty/src/diagnostics/unsafe_check.rs similarity index 100% rename from crates/ra_hir_ty/src/diagnostics/unsafe_check.rs rename to crates/hir_ty/src/diagnostics/unsafe_check.rs diff --git a/crates/hir_ty/src/display.rs b/crates/hir_ty/src/display.rs new file mode 100644 index 0000000000..64b68014d8 --- /dev/null +++ b/crates/hir_ty/src/display.rs @@ -0,0 +1,632 @@ +//! FIXME: write short doc here + +use std::fmt; + +use crate::{ + db::HirDatabase, utils::generics, ApplicationTy, CallableDefId, FnSig, GenericPredicate, + Obligation, OpaqueTyId, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, +}; +use hir_def::{ + find_path, generics::TypeParamProvenance, item_scope::ItemInNs, AdtId, AssocContainerId, + Lookup, ModuleId, +}; +use hir_expand::name::Name; + +pub struct HirFormatter<'a> { + pub db: &'a dyn HirDatabase, + fmt: &'a mut dyn fmt::Write, + buf: String, + curr_size: usize, + pub(crate) max_size: Option, + omit_verbose_types: bool, + display_target: DisplayTarget, +} + +pub trait HirDisplay { + fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError>; + + /// Returns a `Display`able type that is human-readable. + /// Use this for showing types to the user (e.g. diagnostics) + fn display<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self> + where + Self: Sized, + { + HirDisplayWrapper { + db, + t: self, + max_size: None, + omit_verbose_types: false, + display_target: DisplayTarget::Diagnostics, + } + } + + /// Returns a `Display`able type that is human-readable and tries to be succinct. + /// Use this for showing types to the user where space is constrained (e.g. doc popups) + fn display_truncated<'a>( + &'a self, + db: &'a dyn HirDatabase, + max_size: Option, + ) -> HirDisplayWrapper<'a, Self> + where + Self: Sized, + { + HirDisplayWrapper { + db, + t: self, + max_size, + omit_verbose_types: true, + display_target: DisplayTarget::Diagnostics, + } + } + + /// Returns a String representation of `self` that can be inserted into the given module. + /// Use this when generating code (e.g. assists) + fn display_source_code<'a>( + &'a self, + db: &'a dyn HirDatabase, + module_id: ModuleId, + ) -> Result { + let mut result = String::new(); + match self.hir_fmt(&mut HirFormatter { + db, + fmt: &mut result, + buf: String::with_capacity(20), + curr_size: 0, + max_size: None, + omit_verbose_types: false, + display_target: DisplayTarget::SourceCode { module_id }, + }) { + Ok(()) => {} + Err(HirDisplayError::FmtError) => panic!("Writing to String can't fail!"), + Err(HirDisplayError::DisplaySourceCodeError(e)) => return Err(e), + }; + Ok(result) + } +} + +impl<'a> HirFormatter<'a> { + pub fn write_joined( + &mut self, + iter: impl IntoIterator, + sep: &str, + ) -> Result<(), HirDisplayError> { + let mut first = true; + for e in iter { + if !first { + write!(self, "{}", sep)?; + } + first = false; + e.hir_fmt(self)?; + } + Ok(()) + } + + /// This allows using the `write!` macro directly with a `HirFormatter`. + pub fn write_fmt(&mut self, args: fmt::Arguments) -> Result<(), HirDisplayError> { + // We write to a buffer first to track output size + self.buf.clear(); + fmt::write(&mut self.buf, args)?; + self.curr_size += self.buf.len(); + + // Then we write to the internal formatter from the buffer + self.fmt.write_str(&self.buf).map_err(HirDisplayError::from) + } + + pub fn should_truncate(&self) -> bool { + if let Some(max_size) = self.max_size { + self.curr_size >= max_size + } else { + false + } + } + + pub fn omit_verbose_types(&self) -> bool { + self.omit_verbose_types + } +} + +#[derive(Clone, Copy)] +enum DisplayTarget { + /// Display types for inlays, doc popups, autocompletion, etc... + /// Showing `{unknown}` or not qualifying paths is fine here. + /// There's no reason for this to fail. + Diagnostics, + /// Display types for inserting them in source files. + /// The generated code should compile, so paths need to be qualified. + SourceCode { module_id: ModuleId }, +} + +impl DisplayTarget { + fn is_source_code(&self) -> bool { + matches!(self, Self::SourceCode {..}) + } +} + +#[derive(Debug)] +pub enum DisplaySourceCodeError { + PathNotFound, +} + +pub enum HirDisplayError { + /// Errors that can occur when generating source code + DisplaySourceCodeError(DisplaySourceCodeError), + /// `FmtError` is required to be compatible with std::fmt::Display + FmtError, +} +impl From for HirDisplayError { + fn from(_: fmt::Error) -> Self { + Self::FmtError + } +} + +pub struct HirDisplayWrapper<'a, T> { + db: &'a dyn HirDatabase, + t: &'a T, + max_size: Option, + omit_verbose_types: bool, + display_target: DisplayTarget, +} + +impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T> +where + T: HirDisplay, +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.t.hir_fmt(&mut HirFormatter { + db: self.db, + fmt: f, + buf: String::with_capacity(20), + curr_size: 0, + max_size: self.max_size, + omit_verbose_types: self.omit_verbose_types, + display_target: self.display_target, + }) { + Ok(()) => Ok(()), + Err(HirDisplayError::FmtError) => Err(fmt::Error), + Err(HirDisplayError::DisplaySourceCodeError(_)) => { + // This should never happen + panic!("HirDisplay failed when calling Display::fmt!") + } + } + } +} + +const TYPE_HINT_TRUNCATION: &str = "…"; + +impl HirDisplay for &Ty { + fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { + HirDisplay::hir_fmt(*self, f) + } +} + +impl HirDisplay for ApplicationTy { + fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { + if f.should_truncate() { + return write!(f, "{}", TYPE_HINT_TRUNCATION); + } + + match self.ctor { + TypeCtor::Bool => write!(f, "bool")?, + TypeCtor::Char => write!(f, "char")?, + TypeCtor::Int(t) => write!(f, "{}", t)?, + TypeCtor::Float(t) => write!(f, "{}", t)?, + TypeCtor::Str => write!(f, "str")?, + TypeCtor::Slice => { + let t = self.parameters.as_single(); + write!(f, "[{}]", t.display(f.db))?; + } + TypeCtor::Array => { + let t = self.parameters.as_single(); + write!(f, "[{}; _]", t.display(f.db))?; + } + TypeCtor::RawPtr(m) => { + let t = self.parameters.as_single(); + write!(f, "*{}{}", m.as_keyword_for_ptr(), t.display(f.db))?; + } + TypeCtor::Ref(m) => { + let t = self.parameters.as_single(); + let ty_display = if f.omit_verbose_types() { + t.display_truncated(f.db, f.max_size) + } else { + t.display(f.db) + }; + write!(f, "&{}{}", m.as_keyword_for_ref(), ty_display)?; + } + TypeCtor::Never => write!(f, "!")?, + TypeCtor::Tuple { .. } => { + let ts = &self.parameters; + if ts.len() == 1 { + write!(f, "({},)", ts[0].display(f.db))?; + } else { + write!(f, "(")?; + f.write_joined(&*ts.0, ", ")?; + write!(f, ")")?; + } + } + TypeCtor::FnPtr { is_varargs, .. } => { + let sig = FnSig::from_fn_ptr_substs(&self.parameters, is_varargs); + write!(f, "fn(")?; + f.write_joined(sig.params(), ", ")?; + if is_varargs { + if sig.params().is_empty() { + write!(f, "...")?; + } else { + write!(f, ", ...")?; + } + } + write!(f, ")")?; + let ret = sig.ret(); + if *ret != Ty::unit() { + let ret_display = if f.omit_verbose_types() { + ret.display_truncated(f.db, f.max_size) + } else { + ret.display(f.db) + }; + write!(f, " -> {}", ret_display)?; + } + } + TypeCtor::FnDef(def) => { + let sig = f.db.callable_item_signature(def).subst(&self.parameters); + match def { + CallableDefId::FunctionId(ff) => { + write!(f, "fn {}", f.db.function_data(ff).name)? + } + CallableDefId::StructId(s) => write!(f, "{}", f.db.struct_data(s).name)?, + CallableDefId::EnumVariantId(e) => { + write!(f, "{}", f.db.enum_data(e.parent).variants[e.local_id].name)? + } + }; + if self.parameters.len() > 0 { + let generics = generics(f.db.upcast(), def.into()); + let (parent_params, self_param, type_params, _impl_trait_params) = + generics.provenance_split(); + let total_len = parent_params + self_param + type_params; + // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self? + if total_len > 0 { + write!(f, "<")?; + f.write_joined(&self.parameters.0[..total_len], ", ")?; + write!(f, ">")?; + } + } + write!(f, "(")?; + f.write_joined(sig.params(), ", ")?; + write!(f, ")")?; + let ret = sig.ret(); + if *ret != Ty::unit() { + let ret_display = if f.omit_verbose_types() { + ret.display_truncated(f.db, f.max_size) + } else { + ret.display(f.db) + }; + write!(f, " -> {}", ret_display)?; + } + } + TypeCtor::Adt(def_id) => { + match f.display_target { + DisplayTarget::Diagnostics => { + let name = match def_id { + AdtId::StructId(it) => f.db.struct_data(it).name.clone(), + AdtId::UnionId(it) => f.db.union_data(it).name.clone(), + AdtId::EnumId(it) => f.db.enum_data(it).name.clone(), + }; + write!(f, "{}", name)?; + } + DisplayTarget::SourceCode { module_id } => { + if let Some(path) = find_path::find_path( + f.db.upcast(), + ItemInNs::Types(def_id.into()), + module_id, + ) { + write!(f, "{}", path)?; + } else { + return Err(HirDisplayError::DisplaySourceCodeError( + DisplaySourceCodeError::PathNotFound, + )); + } + } + } + + if self.parameters.len() > 0 { + let parameters_to_write = + if f.display_target.is_source_code() || f.omit_verbose_types() { + match self + .ctor + .as_generic_def() + .map(|generic_def_id| f.db.generic_defaults(generic_def_id)) + .filter(|defaults| !defaults.is_empty()) + { + None => self.parameters.0.as_ref(), + Some(default_parameters) => { + let mut default_from = 0; + for (i, parameter) in self.parameters.iter().enumerate() { + match (parameter, default_parameters.get(i)) { + (&Ty::Unknown, _) | (_, None) => { + default_from = i + 1; + } + (_, Some(default_parameter)) => { + let actual_default = default_parameter + .clone() + .subst(&self.parameters.prefix(i)); + if parameter != &actual_default { + default_from = i + 1; + } + } + } + } + &self.parameters.0[0..default_from] + } + } + } else { + self.parameters.0.as_ref() + }; + if !parameters_to_write.is_empty() { + write!(f, "<")?; + f.write_joined(parameters_to_write, ", ")?; + write!(f, ">")?; + } + } + } + TypeCtor::AssociatedType(type_alias) => { + let trait_ = match type_alias.lookup(f.db.upcast()).container { + AssocContainerId::TraitId(it) => it, + _ => panic!("not an associated type"), + }; + let trait_ = f.db.trait_data(trait_); + let type_alias = f.db.type_alias_data(type_alias); + write!(f, "{}::{}", trait_.name, type_alias.name)?; + if self.parameters.len() > 0 { + write!(f, "<")?; + f.write_joined(&*self.parameters.0, ", ")?; + write!(f, ">")?; + } + } + TypeCtor::OpaqueType(opaque_ty_id) => { + let bounds = match opaque_ty_id { + OpaqueTyId::ReturnTypeImplTrait(func, idx) => { + let datas = + f.db.return_type_impl_traits(func).expect("impl trait id without data"); + let data = (*datas) + .as_ref() + .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); + data.subst(&self.parameters) + } + }; + write!(f, "impl ")?; + write_bounds_like_dyn_trait(&bounds.value, f)?; + // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution + } + TypeCtor::Closure { .. } => { + let sig = self.parameters[0].callable_sig(f.db); + if let Some(sig) = sig { + if sig.params().is_empty() { + write!(f, "||")?; + } else if f.omit_verbose_types() { + write!(f, "|{}|", TYPE_HINT_TRUNCATION)?; + } else { + write!(f, "|")?; + f.write_joined(sig.params(), ", ")?; + write!(f, "|")?; + }; + + let ret_display = if f.omit_verbose_types() { + sig.ret().display_truncated(f.db, f.max_size) + } else { + sig.ret().display(f.db) + }; + write!(f, " -> {}", ret_display)?; + } else { + write!(f, "{{closure}}")?; + } + } + } + Ok(()) + } +} + +impl HirDisplay for ProjectionTy { + fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { + if f.should_truncate() { + return write!(f, "{}", TYPE_HINT_TRUNCATION); + } + + let trait_ = f.db.trait_data(self.trait_(f.db)); + write!(f, "<{} as {}", self.parameters[0].display(f.db), trait_.name)?; + if self.parameters.len() > 1 { + write!(f, "<")?; + f.write_joined(&self.parameters[1..], ", ")?; + write!(f, ">")?; + } + write!(f, ">::{}", f.db.type_alias_data(self.associated_ty).name)?; + Ok(()) + } +} + +impl HirDisplay for Ty { + fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { + if f.should_truncate() { + return write!(f, "{}", TYPE_HINT_TRUNCATION); + } + + match self { + Ty::Apply(a_ty) => a_ty.hir_fmt(f)?, + Ty::Projection(p_ty) => p_ty.hir_fmt(f)?, + Ty::Placeholder(id) => { + let generics = generics(f.db.upcast(), id.parent); + let param_data = &generics.params.types[id.local_id]; + match param_data.provenance { + TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => { + write!(f, "{}", param_data.name.clone().unwrap_or_else(Name::missing))? + } + TypeParamProvenance::ArgumentImplTrait => { + write!(f, "impl ")?; + let bounds = f.db.generic_predicates_for_param(*id); + let substs = Substs::type_params_for_generics(&generics); + write_bounds_like_dyn_trait( + &bounds.iter().map(|b| b.clone().subst(&substs)).collect::>(), + f, + )?; + } + } + } + Ty::Bound(idx) => write!(f, "?{}.{}", idx.debruijn.depth(), idx.index)?, + Ty::Dyn(predicates) => { + write!(f, "dyn ")?; + write_bounds_like_dyn_trait(predicates, f)?; + } + Ty::Opaque(opaque_ty) => { + let bounds = match opaque_ty.opaque_ty_id { + OpaqueTyId::ReturnTypeImplTrait(func, idx) => { + let datas = + f.db.return_type_impl_traits(func).expect("impl trait id without data"); + let data = (*datas) + .as_ref() + .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); + data.subst(&opaque_ty.parameters) + } + }; + write!(f, "impl ")?; + write_bounds_like_dyn_trait(&bounds.value, f)?; + } + Ty::Unknown => write!(f, "{{unknown}}")?, + Ty::Infer(..) => write!(f, "_")?, + } + Ok(()) + } +} + +fn write_bounds_like_dyn_trait( + predicates: &[GenericPredicate], + f: &mut HirFormatter, +) -> Result<(), HirDisplayError> { + // Note: This code is written to produce nice results (i.e. + // corresponding to surface Rust) for types that can occur in + // actual Rust. It will have weird results if the predicates + // aren't as expected (i.e. self types = $0, projection + // predicates for a certain trait come after the Implemented + // predicate for that trait). + let mut first = true; + let mut angle_open = false; + for p in predicates.iter() { + match p { + GenericPredicate::Implemented(trait_ref) => { + if angle_open { + write!(f, ">")?; + angle_open = false; + } + if !first { + write!(f, " + ")?; + } + // We assume that the self type is $0 (i.e. the + // existential) here, which is the only thing that's + // possible in actual Rust, and hence don't print it + write!(f, "{}", f.db.trait_data(trait_ref.trait_).name)?; + if trait_ref.substs.len() > 1 { + write!(f, "<")?; + f.write_joined(&trait_ref.substs[1..], ", ")?; + // there might be assoc type bindings, so we leave the angle brackets open + angle_open = true; + } + } + GenericPredicate::Projection(projection_pred) => { + // in types in actual Rust, these will always come + // after the corresponding Implemented predicate + if angle_open { + write!(f, ", ")?; + } else { + write!(f, "<")?; + angle_open = true; + } + let type_alias = f.db.type_alias_data(projection_pred.projection_ty.associated_ty); + write!(f, "{} = ", type_alias.name)?; + projection_pred.ty.hir_fmt(f)?; + } + GenericPredicate::Error => { + if angle_open { + // impl Trait + write!(f, ", ")?; + } else if !first { + // impl Trait + {error} + write!(f, " + ")?; + } + p.hir_fmt(f)?; + } + } + first = false; + } + if angle_open { + write!(f, ">")?; + } + Ok(()) +} + +impl TraitRef { + fn hir_fmt_ext(&self, f: &mut HirFormatter, use_as: bool) -> Result<(), HirDisplayError> { + if f.should_truncate() { + return write!(f, "{}", TYPE_HINT_TRUNCATION); + } + + self.substs[0].hir_fmt(f)?; + if use_as { + write!(f, " as ")?; + } else { + write!(f, ": ")?; + } + write!(f, "{}", f.db.trait_data(self.trait_).name)?; + if self.substs.len() > 1 { + write!(f, "<")?; + f.write_joined(&self.substs[1..], ", ")?; + write!(f, ">")?; + } + Ok(()) + } +} + +impl HirDisplay for TraitRef { + fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { + self.hir_fmt_ext(f, false) + } +} + +impl HirDisplay for &GenericPredicate { + fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { + HirDisplay::hir_fmt(*self, f) + } +} + +impl HirDisplay for GenericPredicate { + fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { + if f.should_truncate() { + return write!(f, "{}", TYPE_HINT_TRUNCATION); + } + + match self { + GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?, + GenericPredicate::Projection(projection_pred) => { + write!(f, "<")?; + projection_pred.projection_ty.trait_ref(f.db).hir_fmt_ext(f, true)?; + write!( + f, + ">::{} = {}", + f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name, + projection_pred.ty.display(f.db) + )?; + } + GenericPredicate::Error => write!(f, "{{error}}")?, + } + Ok(()) + } +} + +impl HirDisplay for Obligation { + fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { + Ok(match self { + Obligation::Trait(tr) => write!(f, "Implements({})", tr.display(f.db))?, + Obligation::Projection(proj) => write!( + f, + "Normalize({} => {})", + proj.projection_ty.display(f.db), + proj.ty.display(f.db) + )?, + }) + } +} diff --git a/crates/hir_ty/src/infer.rs b/crates/hir_ty/src/infer.rs new file mode 100644 index 0000000000..03b00b101c --- /dev/null +++ b/crates/hir_ty/src/infer.rs @@ -0,0 +1,802 @@ +//! Type inference, i.e. the process of walking through the code and determining +//! the type of each expression and pattern. +//! +//! For type inference, compare the implementations in rustc (the various +//! check_* methods in librustc_typeck/check/mod.rs are a good entry point) and +//! IntelliJ-Rust (org.rust.lang.core.types.infer). Our entry point for +//! inference here is the `infer` function, which infers the types of all +//! expressions in a given function. +//! +//! During inference, types (i.e. the `Ty` struct) can contain type 'variables' +//! which represent currently unknown types; as we walk through the expressions, +//! we might determine that certain variables need to be equal to each other, or +//! to certain types. To record this, we use the union-find implementation from +//! the `ena` crate, which is extracted from rustc. + +use std::borrow::Cow; +use std::mem; +use std::ops::Index; +use std::sync::Arc; + +use arena::map::ArenaMap; +use hir_def::{ + body::Body, + data::{ConstData, FunctionData, StaticData}, + expr::{BindingAnnotation, ExprId, PatId}, + lang_item::LangItemTarget, + path::{path, Path}, + resolver::{HasResolver, Resolver, TypeNs}, + type_ref::{Mutability, TypeRef}, + AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, Lookup, TraitId, + TypeAliasId, VariantId, +}; +use hir_expand::{diagnostics::DiagnosticSink, name::name}; +use rustc_hash::FxHashMap; +use stdx::impl_from; +use syntax::SmolStr; + +use super::{ + primitive::{FloatTy, IntTy}, + traits::{Guidance, Obligation, ProjectionPredicate, Solution}, + InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk, +}; +use crate::{ + db::HirDatabase, infer::diagnostics::InferenceDiagnostic, lower::ImplTraitLoweringMode, +}; + +pub(crate) use unify::unify; + +macro_rules! ty_app { + ($ctor:pat, $param:pat) => { + crate::Ty::Apply(crate::ApplicationTy { ctor: $ctor, parameters: $param }) + }; + ($ctor:pat) => { + ty_app!($ctor, _) + }; +} + +mod unify; +mod path; +mod expr; +mod pat; +mod coerce; + +/// The entry point of type inference. +pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc { + let _p = profile::span("infer_query"); + let resolver = def.resolver(db.upcast()); + let mut ctx = InferenceContext::new(db, def, resolver); + + match def { + DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)), + DefWithBodyId::FunctionId(f) => ctx.collect_fn(&db.function_data(f)), + DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)), + } + + ctx.infer_body(); + + Arc::new(ctx.resolve_all()) +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +enum ExprOrPatId { + ExprId(ExprId), + PatId(PatId), +} +impl_from!(ExprId, PatId for ExprOrPatId); + +/// Binding modes inferred for patterns. +/// https://doc.rust-lang.org/reference/patterns.html#binding-modes +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +enum BindingMode { + Move, + Ref(Mutability), +} + +impl BindingMode { + pub fn convert(annotation: BindingAnnotation) -> BindingMode { + match annotation { + BindingAnnotation::Unannotated | BindingAnnotation::Mutable => BindingMode::Move, + BindingAnnotation::Ref => BindingMode::Ref(Mutability::Shared), + BindingAnnotation::RefMut => BindingMode::Ref(Mutability::Mut), + } + } +} + +impl Default for BindingMode { + fn default() -> Self { + BindingMode::Move + } +} + +/// A mismatch between an expected and an inferred type. +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub struct TypeMismatch { + pub expected: Ty, + pub actual: Ty, +} + +/// The result of type inference: A mapping from expressions and patterns to types. +#[derive(Clone, PartialEq, Eq, Debug, Default)] +pub struct InferenceResult { + /// For each method call expr, records the function it resolves to. + method_resolutions: FxHashMap, + /// For each field access expr, records the field it resolves to. + field_resolutions: FxHashMap, + /// For each field in record literal, records the field it resolves to. + record_field_resolutions: FxHashMap, + record_field_pat_resolutions: FxHashMap, + /// For each struct literal, records the variant it resolves to. + variant_resolutions: FxHashMap, + /// For each associated item record what it resolves to + assoc_resolutions: FxHashMap, + diagnostics: Vec, + pub type_of_expr: ArenaMap, + pub type_of_pat: ArenaMap, + pub(super) type_mismatches: ArenaMap, +} + +impl InferenceResult { + pub fn method_resolution(&self, expr: ExprId) -> Option { + self.method_resolutions.get(&expr).copied() + } + pub fn field_resolution(&self, expr: ExprId) -> Option { + self.field_resolutions.get(&expr).copied() + } + pub fn record_field_resolution(&self, expr: ExprId) -> Option { + self.record_field_resolutions.get(&expr).copied() + } + pub fn record_field_pat_resolution(&self, pat: PatId) -> Option { + self.record_field_pat_resolutions.get(&pat).copied() + } + pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option { + self.variant_resolutions.get(&id.into()).copied() + } + pub fn variant_resolution_for_pat(&self, id: PatId) -> Option { + self.variant_resolutions.get(&id.into()).copied() + } + pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option { + self.assoc_resolutions.get(&id.into()).copied() + } + pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option { + self.assoc_resolutions.get(&id.into()).copied() + } + pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> { + self.type_mismatches.get(expr) + } + pub fn add_diagnostics( + &self, + db: &dyn HirDatabase, + owner: DefWithBodyId, + sink: &mut DiagnosticSink, + ) { + self.diagnostics.iter().for_each(|it| it.add_to(db, owner, sink)) + } +} + +impl Index for InferenceResult { + type Output = Ty; + + fn index(&self, expr: ExprId) -> &Ty { + self.type_of_expr.get(expr).unwrap_or(&Ty::Unknown) + } +} + +impl Index for InferenceResult { + type Output = Ty; + + fn index(&self, pat: PatId) -> &Ty { + self.type_of_pat.get(pat).unwrap_or(&Ty::Unknown) + } +} + +/// The inference context contains all information needed during type inference. +#[derive(Clone, Debug)] +struct InferenceContext<'a> { + db: &'a dyn HirDatabase, + owner: DefWithBodyId, + body: Arc, + resolver: Resolver, + table: unify::InferenceTable, + trait_env: Arc, + obligations: Vec, + result: InferenceResult, + /// The return type of the function being inferred, or the closure if we're + /// currently within one. + /// + /// We might consider using a nested inference context for checking + /// closures, but currently this is the only field that will change there, + /// so it doesn't make sense. + return_ty: Ty, + diverges: Diverges, + breakables: Vec, +} + +#[derive(Clone, Debug)] +struct BreakableContext { + pub may_break: bool, + pub break_ty: Ty, + pub label: Option, +} + +fn find_breakable<'c>( + ctxs: &'c mut [BreakableContext], + label: Option<&name::Name>, +) -> Option<&'c mut BreakableContext> { + match label { + Some(_) => ctxs.iter_mut().rev().find(|ctx| ctx.label.as_ref() == label), + None => ctxs.last_mut(), + } +} + +impl<'a> InferenceContext<'a> { + fn new(db: &'a dyn HirDatabase, owner: DefWithBodyId, resolver: Resolver) -> Self { + InferenceContext { + result: InferenceResult::default(), + table: unify::InferenceTable::new(), + obligations: Vec::default(), + return_ty: Ty::Unknown, // set in collect_fn_signature + trait_env: TraitEnvironment::lower(db, &resolver), + db, + owner, + body: db.body(owner), + resolver, + diverges: Diverges::Maybe, + breakables: Vec::new(), + } + } + + fn resolve_all(mut self) -> InferenceResult { + // FIXME resolve obligations as well (use Guidance if necessary) + let mut result = std::mem::take(&mut self.result); + for ty in result.type_of_expr.values_mut() { + let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown)); + *ty = resolved; + } + for ty in result.type_of_pat.values_mut() { + let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown)); + *ty = resolved; + } + result + } + + fn write_expr_ty(&mut self, expr: ExprId, ty: Ty) { + self.result.type_of_expr.insert(expr, ty); + } + + fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId) { + self.result.method_resolutions.insert(expr, func); + } + + fn write_field_resolution(&mut self, expr: ExprId, field: FieldId) { + self.result.field_resolutions.insert(expr, field); + } + + fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) { + self.result.variant_resolutions.insert(id, variant); + } + + fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) { + self.result.assoc_resolutions.insert(id, item); + } + + fn write_pat_ty(&mut self, pat: PatId, ty: Ty) { + self.result.type_of_pat.insert(pat, ty); + } + + fn push_diagnostic(&mut self, diagnostic: InferenceDiagnostic) { + self.result.diagnostics.push(diagnostic); + } + + fn make_ty_with_mode( + &mut self, + type_ref: &TypeRef, + impl_trait_mode: ImplTraitLoweringMode, + ) -> Ty { + // FIXME use right resolver for block + let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver) + .with_impl_trait_mode(impl_trait_mode); + let ty = Ty::from_hir(&ctx, type_ref); + let ty = self.insert_type_vars(ty); + self.normalize_associated_types_in(ty) + } + + fn make_ty(&mut self, type_ref: &TypeRef) -> Ty { + self.make_ty_with_mode(type_ref, ImplTraitLoweringMode::Disallowed) + } + + /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it. + fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty { + match ty { + Ty::Unknown => self.table.new_type_var(), + _ => ty, + } + } + + fn insert_type_vars(&mut self, ty: Ty) -> Ty { + ty.fold(&mut |ty| self.insert_type_vars_shallow(ty)) + } + + fn resolve_obligations_as_possible(&mut self) { + let obligations = mem::replace(&mut self.obligations, Vec::new()); + for obligation in obligations { + let in_env = InEnvironment::new(self.trait_env.clone(), obligation.clone()); + let canonicalized = self.canonicalizer().canonicalize_obligation(in_env); + let solution = + self.db.trait_solve(self.resolver.krate().unwrap(), canonicalized.value.clone()); + + match solution { + Some(Solution::Unique(substs)) => { + canonicalized.apply_solution(self, substs.0); + } + Some(Solution::Ambig(Guidance::Definite(substs))) => { + canonicalized.apply_solution(self, substs.0); + self.obligations.push(obligation); + } + Some(_) => { + // FIXME use this when trying to resolve everything at the end + self.obligations.push(obligation); + } + None => { + // FIXME obligation cannot be fulfilled => diagnostic + } + }; + } + } + + fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool { + self.table.unify(ty1, ty2) + } + + /// Resolves the type as far as currently possible, replacing type variables + /// by their known types. All types returned by the infer_* functions should + /// be resolved as far as possible, i.e. contain no type variables with + /// known type. + fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty { + self.resolve_obligations_as_possible(); + + self.table.resolve_ty_as_possible(ty) + } + + fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> { + self.table.resolve_ty_shallow(ty) + } + + fn resolve_associated_type(&mut self, inner_ty: Ty, assoc_ty: Option) -> Ty { + self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[]) + } + + fn resolve_associated_type_with_params( + &mut self, + inner_ty: Ty, + assoc_ty: Option, + params: &[Ty], + ) -> Ty { + match assoc_ty { + Some(res_assoc_ty) => { + let trait_ = match res_assoc_ty.lookup(self.db.upcast()).container { + hir_def::AssocContainerId::TraitId(trait_) => trait_, + _ => panic!("resolve_associated_type called with non-associated type"), + }; + let ty = self.table.new_type_var(); + let substs = Substs::build_for_def(self.db, res_assoc_ty) + .push(inner_ty) + .fill(params.iter().cloned()) + .build(); + let trait_ref = TraitRef { trait_, substs: substs.clone() }; + let projection = ProjectionPredicate { + ty: ty.clone(), + projection_ty: ProjectionTy { associated_ty: res_assoc_ty, parameters: substs }, + }; + self.obligations.push(Obligation::Trait(trait_ref)); + self.obligations.push(Obligation::Projection(projection)); + self.resolve_ty_as_possible(ty) + } + None => Ty::Unknown, + } + } + + /// Recurses through the given type, normalizing associated types mentioned + /// in it by replacing them by type variables and registering obligations to + /// resolve later. This should be done once for every type we get from some + /// type annotation (e.g. from a let type annotation, field type or function + /// call). `make_ty` handles this already, but e.g. for field types we need + /// to do it as well. + fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty { + let ty = self.resolve_ty_as_possible(ty); + ty.fold(&mut |ty| match ty { + Ty::Projection(proj_ty) => self.normalize_projection_ty(proj_ty), + _ => ty, + }) + } + + fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty { + let var = self.table.new_type_var(); + let predicate = ProjectionPredicate { projection_ty: proj_ty, ty: var.clone() }; + let obligation = Obligation::Projection(predicate); + self.obligations.push(obligation); + var + } + + fn resolve_variant(&mut self, path: Option<&Path>) -> (Ty, Option) { + let path = match path { + Some(path) => path, + None => return (Ty::Unknown, None), + }; + let resolver = &self.resolver; + let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver); + // FIXME: this should resolve assoc items as well, see this example: + // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521 + let (resolution, unresolved) = + match resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) { + Some(it) => it, + None => return (Ty::Unknown, None), + }; + return match resolution { + TypeNs::AdtId(AdtId::StructId(strukt)) => { + let substs = Ty::substs_from_path(&ctx, path, strukt.into(), true); + let ty = self.db.ty(strukt.into()); + let ty = self.insert_type_vars(ty.subst(&substs)); + forbid_unresolved_segments((ty, Some(strukt.into())), unresolved) + } + TypeNs::AdtId(AdtId::UnionId(u)) => { + let substs = Ty::substs_from_path(&ctx, path, u.into(), true); + let ty = self.db.ty(u.into()); + let ty = self.insert_type_vars(ty.subst(&substs)); + forbid_unresolved_segments((ty, Some(u.into())), unresolved) + } + TypeNs::EnumVariantId(var) => { + let substs = Ty::substs_from_path(&ctx, path, var.into(), true); + let ty = self.db.ty(var.parent.into()); + let ty = self.insert_type_vars(ty.subst(&substs)); + forbid_unresolved_segments((ty, Some(var.into())), unresolved) + } + TypeNs::SelfType(impl_id) => { + let generics = crate::utils::generics(self.db.upcast(), impl_id.into()); + let substs = Substs::type_params_for_generics(&generics); + let ty = self.db.impl_self_ty(impl_id).subst(&substs); + match unresolved { + None => { + let variant = ty_variant(&ty); + (ty, variant) + } + Some(1) => { + let segment = path.mod_path().segments.last().unwrap(); + // this could be an enum variant or associated type + if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() { + let enum_data = self.db.enum_data(enum_id); + if let Some(local_id) = enum_data.variant(segment) { + let variant = EnumVariantId { parent: enum_id, local_id }; + return (ty, Some(variant.into())); + } + } + // FIXME potentially resolve assoc type + (Ty::Unknown, None) + } + Some(_) => { + // FIXME diagnostic + (Ty::Unknown, None) + } + } + } + TypeNs::TypeAliasId(it) => { + let substs = Substs::build_for_def(self.db, it) + .fill(std::iter::repeat_with(|| self.table.new_type_var())) + .build(); + let ty = self.db.ty(it.into()).subst(&substs); + let variant = ty_variant(&ty); + forbid_unresolved_segments((ty, variant), unresolved) + } + TypeNs::AdtSelfType(_) => { + // FIXME this could happen in array size expressions, once we're checking them + (Ty::Unknown, None) + } + TypeNs::GenericParam(_) => { + // FIXME potentially resolve assoc type + (Ty::Unknown, None) + } + TypeNs::AdtId(AdtId::EnumId(_)) | TypeNs::BuiltinType(_) | TypeNs::TraitId(_) => { + // FIXME diagnostic + (Ty::Unknown, None) + } + }; + + fn forbid_unresolved_segments( + result: (Ty, Option), + unresolved: Option, + ) -> (Ty, Option) { + if unresolved.is_none() { + result + } else { + // FIXME diagnostic + (Ty::Unknown, None) + } + } + + fn ty_variant(ty: &Ty) -> Option { + ty.as_adt().and_then(|(adt_id, _)| match adt_id { + AdtId::StructId(s) => Some(VariantId::StructId(s)), + AdtId::UnionId(u) => Some(VariantId::UnionId(u)), + AdtId::EnumId(_) => { + // FIXME Error E0071, expected struct, variant or union type, found enum `Foo` + None + } + }) + } + } + + fn collect_const(&mut self, data: &ConstData) { + self.return_ty = self.make_ty(&data.type_ref); + } + + fn collect_static(&mut self, data: &StaticData) { + self.return_ty = self.make_ty(&data.type_ref); + } + + fn collect_fn(&mut self, data: &FunctionData) { + let body = Arc::clone(&self.body); // avoid borrow checker problem + let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver) + .with_impl_trait_mode(ImplTraitLoweringMode::Param); + let param_tys = + data.params.iter().map(|type_ref| Ty::from_hir(&ctx, type_ref)).collect::>(); + for (ty, pat) in param_tys.into_iter().zip(body.params.iter()) { + let ty = self.insert_type_vars(ty); + let ty = self.normalize_associated_types_in(ty); + + self.infer_pat(*pat, &ty, BindingMode::default()); + } + let return_ty = self.make_ty_with_mode(&data.ret_type, ImplTraitLoweringMode::Disallowed); // FIXME implement RPIT + self.return_ty = return_ty; + } + + fn infer_body(&mut self) { + self.infer_expr_coerce(self.body.body_expr, &Expectation::has_type(self.return_ty.clone())); + } + + fn resolve_lang_item(&self, name: &str) -> Option { + let krate = self.resolver.krate()?; + let name = SmolStr::new_inline_from_ascii(name.len(), name.as_bytes()); + self.db.lang_item(krate, name) + } + + fn resolve_into_iter_item(&self) -> Option { + let path = path![core::iter::IntoIterator]; + let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?; + self.db.trait_data(trait_).associated_type_by_name(&name![Item]) + } + + fn resolve_ops_try_ok(&self) -> Option { + let path = path![core::ops::Try]; + let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?; + self.db.trait_data(trait_).associated_type_by_name(&name![Ok]) + } + + fn resolve_ops_neg_output(&self) -> Option { + let trait_ = self.resolve_lang_item("neg")?.as_trait()?; + self.db.trait_data(trait_).associated_type_by_name(&name![Output]) + } + + fn resolve_ops_not_output(&self) -> Option { + let trait_ = self.resolve_lang_item("not")?.as_trait()?; + self.db.trait_data(trait_).associated_type_by_name(&name![Output]) + } + + fn resolve_future_future_output(&self) -> Option { + let trait_ = self.resolve_lang_item("future_trait")?.as_trait()?; + self.db.trait_data(trait_).associated_type_by_name(&name![Output]) + } + + fn resolve_boxed_box(&self) -> Option { + let struct_ = self.resolve_lang_item("owned_box")?.as_struct()?; + Some(struct_.into()) + } + + fn resolve_range_full(&self) -> Option { + let path = path![core::ops::RangeFull]; + let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; + Some(struct_.into()) + } + + fn resolve_range(&self) -> Option { + let path = path![core::ops::Range]; + let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; + Some(struct_.into()) + } + + fn resolve_range_inclusive(&self) -> Option { + let path = path![core::ops::RangeInclusive]; + let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; + Some(struct_.into()) + } + + fn resolve_range_from(&self) -> Option { + let path = path![core::ops::RangeFrom]; + let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; + Some(struct_.into()) + } + + fn resolve_range_to(&self) -> Option { + let path = path![core::ops::RangeTo]; + let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; + Some(struct_.into()) + } + + fn resolve_range_to_inclusive(&self) -> Option { + let path = path![core::ops::RangeToInclusive]; + let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; + Some(struct_.into()) + } + + fn resolve_ops_index(&self) -> Option { + self.resolve_lang_item("index")?.as_trait() + } + + fn resolve_ops_index_output(&self) -> Option { + let trait_ = self.resolve_ops_index()?; + self.db.trait_data(trait_).associated_type_by_name(&name![Output]) + } +} + +/// The kinds of placeholders we need during type inference. There's separate +/// values for general types, and for integer and float variables. The latter +/// two are used for inference of literal values (e.g. `100` could be one of +/// several integer types). +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub enum InferTy { + TypeVar(unify::TypeVarId), + IntVar(unify::TypeVarId), + FloatVar(unify::TypeVarId), + MaybeNeverTypeVar(unify::TypeVarId), +} + +impl InferTy { + fn to_inner(self) -> unify::TypeVarId { + match self { + InferTy::TypeVar(ty) + | InferTy::IntVar(ty) + | InferTy::FloatVar(ty) + | InferTy::MaybeNeverTypeVar(ty) => ty, + } + } + + fn fallback_value(self) -> Ty { + match self { + InferTy::TypeVar(..) => Ty::Unknown, + InferTy::IntVar(..) => Ty::simple(TypeCtor::Int(IntTy::i32())), + InferTy::FloatVar(..) => Ty::simple(TypeCtor::Float(FloatTy::f64())), + InferTy::MaybeNeverTypeVar(..) => Ty::simple(TypeCtor::Never), + } + } +} + +/// When inferring an expression, we propagate downward whatever type hint we +/// are able in the form of an `Expectation`. +#[derive(Clone, PartialEq, Eq, Debug)] +struct Expectation { + ty: Ty, + /// See the `rvalue_hint` method. + rvalue_hint: bool, +} + +impl Expectation { + /// The expectation that the type of the expression needs to equal the given + /// type. + fn has_type(ty: Ty) -> Self { + Expectation { ty, rvalue_hint: false } + } + + /// The following explanation is copied straight from rustc: + /// Provides an expectation for an rvalue expression given an *optional* + /// hint, which is not required for type safety (the resulting type might + /// be checked higher up, as is the case with `&expr` and `box expr`), but + /// is useful in determining the concrete type. + /// + /// The primary use case is where the expected type is a fat pointer, + /// like `&[isize]`. For example, consider the following statement: + /// + /// let x: &[isize] = &[1, 2, 3]; + /// + /// In this case, the expected type for the `&[1, 2, 3]` expression is + /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the + /// expectation `ExpectHasType([isize])`, that would be too strong -- + /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`. + /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced + /// to the type `&[isize]`. Therefore, we propagate this more limited hint, + /// which still is useful, because it informs integer literals and the like. + /// See the test case `test/ui/coerce-expect-unsized.rs` and #20169 + /// for examples of where this comes up,. + fn rvalue_hint(ty: Ty) -> Self { + Expectation { ty, rvalue_hint: true } + } + + /// This expresses no expectation on the type. + fn none() -> Self { + Expectation { ty: Ty::Unknown, rvalue_hint: false } + } + + fn coercion_target(&self) -> &Ty { + if self.rvalue_hint { + &Ty::Unknown + } else { + &self.ty + } + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +enum Diverges { + Maybe, + Always, +} + +impl Diverges { + fn is_always(self) -> bool { + self == Diverges::Always + } +} + +impl std::ops::BitAnd for Diverges { + type Output = Self; + fn bitand(self, other: Self) -> Self { + std::cmp::min(self, other) + } +} + +impl std::ops::BitOr for Diverges { + type Output = Self; + fn bitor(self, other: Self) -> Self { + std::cmp::max(self, other) + } +} + +impl std::ops::BitAndAssign for Diverges { + fn bitand_assign(&mut self, other: Self) { + *self = *self & other; + } +} + +impl std::ops::BitOrAssign for Diverges { + fn bitor_assign(&mut self, other: Self) { + *self = *self | other; + } +} + +mod diagnostics { + use hir_def::{expr::ExprId, DefWithBodyId}; + use hir_expand::diagnostics::DiagnosticSink; + + use crate::{ + db::HirDatabase, + diagnostics::{BreakOutsideOfLoop, NoSuchField}, + }; + + #[derive(Debug, PartialEq, Eq, Clone)] + pub(super) enum InferenceDiagnostic { + NoSuchField { expr: ExprId, field: usize }, + BreakOutsideOfLoop { expr: ExprId }, + } + + impl InferenceDiagnostic { + pub(super) fn add_to( + &self, + db: &dyn HirDatabase, + owner: DefWithBodyId, + sink: &mut DiagnosticSink, + ) { + match self { + InferenceDiagnostic::NoSuchField { expr, field } => { + let (_, source_map) = db.body_with_source_map(owner); + let field = source_map.field_syntax(*expr, *field); + sink.push(NoSuchField { file: field.file_id, field: field.value }) + } + InferenceDiagnostic::BreakOutsideOfLoop { expr } => { + let (_, source_map) = db.body_with_source_map(owner); + let ptr = source_map + .expr_syntax(*expr) + .expect("break outside of loop in synthetic syntax"); + sink.push(BreakOutsideOfLoop { file: ptr.file_id, expr: ptr.value }) + } + } + } + } +} diff --git a/crates/ra_hir_ty/src/infer/coerce.rs b/crates/hir_ty/src/infer/coerce.rs similarity index 100% rename from crates/ra_hir_ty/src/infer/coerce.rs rename to crates/hir_ty/src/infer/coerce.rs diff --git a/crates/hir_ty/src/infer/expr.rs b/crates/hir_ty/src/infer/expr.rs new file mode 100644 index 0000000000..a2f849d021 --- /dev/null +++ b/crates/hir_ty/src/infer/expr.rs @@ -0,0 +1,873 @@ +//! Type inference for expressions. + +use std::iter::{repeat, repeat_with}; +use std::{mem, sync::Arc}; + +use hir_def::{ + builtin_type::Signedness, + expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp}, + path::{GenericArg, GenericArgs}, + resolver::resolver_for_expr, + AdtId, AssocContainerId, FieldId, Lookup, +}; +use hir_expand::name::{name, Name}; +use syntax::ast::RangeOp; + +use crate::{ + autoderef, method_resolution, op, + traits::{FnTrait, InEnvironment}, + utils::{generics, variant_data, Generics}, + ApplicationTy, Binders, CallableDefId, InferTy, IntTy, Mutability, Obligation, Rawness, Substs, + TraitRef, Ty, TypeCtor, +}; + +use super::{ + find_breakable, BindingMode, BreakableContext, Diverges, Expectation, InferenceContext, + InferenceDiagnostic, TypeMismatch, +}; + +impl<'a> InferenceContext<'a> { + pub(super) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { + let ty = self.infer_expr_inner(tgt_expr, expected); + if ty.is_never() { + // Any expression that produces a value of type `!` must have diverged + self.diverges = Diverges::Always; + } + let could_unify = self.unify(&ty, &expected.ty); + if !could_unify { + self.result.type_mismatches.insert( + tgt_expr, + TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() }, + ); + } + self.resolve_ty_as_possible(ty) + } + + /// Infer type of expression with possibly implicit coerce to the expected type. + /// Return the type after possible coercion. + pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty { + let ty = self.infer_expr_inner(expr, &expected); + let ty = if !self.coerce(&ty, &expected.coercion_target()) { + self.result + .type_mismatches + .insert(expr, TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() }); + // Return actual type when type mismatch. + // This is needed for diagnostic when return type mismatch. + ty + } else if expected.coercion_target() == &Ty::Unknown { + ty + } else { + expected.ty.clone() + }; + + self.resolve_ty_as_possible(ty) + } + + fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { + let krate = self.resolver.krate()?; + let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?; + let output_assoc_type = + self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?; + let generic_params = generics(self.db.upcast(), fn_once_trait.into()); + if generic_params.len() != 2 { + return None; + } + + let mut param_builder = Substs::builder(num_args); + let mut arg_tys = vec![]; + for _ in 0..num_args { + let arg = self.table.new_type_var(); + param_builder = param_builder.push(arg.clone()); + arg_tys.push(arg); + } + let parameters = param_builder.build(); + let arg_ty = Ty::Apply(ApplicationTy { + ctor: TypeCtor::Tuple { cardinality: num_args as u16 }, + parameters, + }); + let substs = + Substs::build_for_generics(&generic_params).push(ty.clone()).push(arg_ty).build(); + + let trait_env = Arc::clone(&self.trait_env); + let implements_fn_trait = + Obligation::Trait(TraitRef { trait_: fn_once_trait, substs: substs.clone() }); + let goal = self.canonicalizer().canonicalize_obligation(InEnvironment { + value: implements_fn_trait.clone(), + environment: trait_env, + }); + if self.db.trait_solve(krate, goal.value).is_some() { + self.obligations.push(implements_fn_trait); + let output_proj_ty = + crate::ProjectionTy { associated_ty: output_assoc_type, parameters: substs }; + let return_ty = self.normalize_projection_ty(output_proj_ty); + Some((arg_tys, return_ty)) + } else { + None + } + } + + pub fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { + match ty.callable_sig(self.db) { + Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())), + None => self.callable_sig_from_fn_trait(ty, num_args), + } + } + + fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { + let body = Arc::clone(&self.body); // avoid borrow checker problem + let ty = match &body[tgt_expr] { + Expr::Missing => Ty::Unknown, + Expr::If { condition, then_branch, else_branch } => { + // if let is desugared to match, so this is always simple if + self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool))); + + let condition_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); + let mut both_arms_diverge = Diverges::Always; + + let then_ty = self.infer_expr_inner(*then_branch, &expected); + both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe); + let else_ty = match else_branch { + Some(else_branch) => self.infer_expr_inner(*else_branch, &expected), + None => Ty::unit(), + }; + both_arms_diverge &= self.diverges; + + self.diverges = condition_diverges | both_arms_diverge; + + self.coerce_merge_branch(&then_ty, &else_ty) + } + Expr::Block { statements, tail, .. } => { + // FIXME: Breakable block inference + self.infer_block(statements, *tail, expected) + } + Expr::Unsafe { body } => self.infer_expr(*body, expected), + Expr::TryBlock { body } => { + let _inner = self.infer_expr(*body, expected); + // FIXME should be std::result::Result<{inner}, _> + Ty::Unknown + } + Expr::Loop { body, label } => { + self.breakables.push(BreakableContext { + may_break: false, + break_ty: self.table.new_type_var(), + label: label.clone(), + }); + self.infer_expr(*body, &Expectation::has_type(Ty::unit())); + + let ctxt = self.breakables.pop().expect("breakable stack broken"); + if ctxt.may_break { + self.diverges = Diverges::Maybe; + } + + if ctxt.may_break { + ctxt.break_ty + } else { + Ty::simple(TypeCtor::Never) + } + } + Expr::While { condition, body, label } => { + self.breakables.push(BreakableContext { + may_break: false, + break_ty: Ty::Unknown, + label: label.clone(), + }); + // while let is desugared to a match loop, so this is always simple while + self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool))); + self.infer_expr(*body, &Expectation::has_type(Ty::unit())); + let _ctxt = self.breakables.pop().expect("breakable stack broken"); + // the body may not run, so it diverging doesn't mean we diverge + self.diverges = Diverges::Maybe; + Ty::unit() + } + Expr::For { iterable, body, pat, label } => { + let iterable_ty = self.infer_expr(*iterable, &Expectation::none()); + + self.breakables.push(BreakableContext { + may_break: false, + break_ty: Ty::Unknown, + label: label.clone(), + }); + let pat_ty = + self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item()); + + self.infer_pat(*pat, &pat_ty, BindingMode::default()); + + self.infer_expr(*body, &Expectation::has_type(Ty::unit())); + let _ctxt = self.breakables.pop().expect("breakable stack broken"); + // the body may not run, so it diverging doesn't mean we diverge + self.diverges = Diverges::Maybe; + Ty::unit() + } + Expr::Lambda { body, args, ret_type, arg_types } => { + assert_eq!(args.len(), arg_types.len()); + + let mut sig_tys = Vec::new(); + + // collect explicitly written argument types + for arg_type in arg_types.iter() { + let arg_ty = if let Some(type_ref) = arg_type { + self.make_ty(type_ref) + } else { + self.table.new_type_var() + }; + sig_tys.push(arg_ty); + } + + // add return type + let ret_ty = match ret_type { + Some(type_ref) => self.make_ty(type_ref), + None => self.table.new_type_var(), + }; + sig_tys.push(ret_ty.clone()); + let sig_ty = Ty::apply( + TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1, is_varargs: false }, + Substs(sig_tys.clone().into()), + ); + let closure_ty = + Ty::apply_one(TypeCtor::Closure { def: self.owner, expr: tgt_expr }, sig_ty); + + // Eagerly try to relate the closure type with the expected + // type, otherwise we often won't have enough information to + // infer the body. + self.coerce(&closure_ty, &expected.ty); + + // Now go through the argument patterns + for (arg_pat, arg_ty) in args.iter().zip(sig_tys) { + let resolved = self.resolve_ty_as_possible(arg_ty); + self.infer_pat(*arg_pat, &resolved, BindingMode::default()); + } + + let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); + let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone()); + + self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty)); + + self.diverges = prev_diverges; + self.return_ty = prev_ret_ty; + + closure_ty + } + Expr::Call { callee, args } => { + let callee_ty = self.infer_expr(*callee, &Expectation::none()); + let canonicalized = self.canonicalizer().canonicalize_ty(callee_ty.clone()); + let mut derefs = autoderef( + self.db, + self.resolver.krate(), + InEnvironment { + value: canonicalized.value.clone(), + environment: self.trait_env.clone(), + }, + ); + let (param_tys, ret_ty): (Vec, Ty) = derefs + .find_map(|callee_deref_ty| { + self.callable_sig( + &canonicalized.decanonicalize_ty(callee_deref_ty.value), + args.len(), + ) + }) + .unwrap_or((Vec::new(), Ty::Unknown)); + self.register_obligations_for_call(&callee_ty); + self.check_call_arguments(args, ¶m_tys); + self.normalize_associated_types_in(ret_ty) + } + Expr::MethodCall { receiver, args, method_name, generic_args } => self + .infer_method_call(tgt_expr, *receiver, &args, &method_name, generic_args.as_ref()), + Expr::Match { expr, arms } => { + let input_ty = self.infer_expr(*expr, &Expectation::none()); + + let mut result_ty = if arms.is_empty() { + Ty::simple(TypeCtor::Never) + } else { + self.table.new_type_var() + }; + + let matchee_diverges = self.diverges; + let mut all_arms_diverge = Diverges::Always; + + for arm in arms { + self.diverges = Diverges::Maybe; + let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default()); + if let Some(guard_expr) = arm.guard { + self.infer_expr( + guard_expr, + &Expectation::has_type(Ty::simple(TypeCtor::Bool)), + ); + } + + let arm_ty = self.infer_expr_inner(arm.expr, &expected); + all_arms_diverge &= self.diverges; + result_ty = self.coerce_merge_branch(&result_ty, &arm_ty); + } + + self.diverges = matchee_diverges | all_arms_diverge; + + result_ty + } + Expr::Path(p) => { + // FIXME this could be more efficient... + let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr); + self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown) + } + Expr::Continue { .. } => Ty::simple(TypeCtor::Never), + Expr::Break { expr, label } => { + let val_ty = if let Some(expr) = expr { + self.infer_expr(*expr, &Expectation::none()) + } else { + Ty::unit() + }; + + let last_ty = + if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) { + ctxt.break_ty.clone() + } else { + Ty::Unknown + }; + + let merged_type = self.coerce_merge_branch(&last_ty, &val_ty); + + if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) { + ctxt.break_ty = merged_type; + ctxt.may_break = true; + } else { + self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop { + expr: tgt_expr, + }); + } + + Ty::simple(TypeCtor::Never) + } + Expr::Return { expr } => { + if let Some(expr) = expr { + self.infer_expr_coerce(*expr, &Expectation::has_type(self.return_ty.clone())); + } else { + let unit = Ty::unit(); + self.coerce(&unit, &self.return_ty.clone()); + } + Ty::simple(TypeCtor::Never) + } + Expr::RecordLit { path, fields, spread } => { + let (ty, def_id) = self.resolve_variant(path.as_ref()); + if let Some(variant) = def_id { + self.write_variant_resolution(tgt_expr.into(), variant); + } + + self.unify(&ty, &expected.ty); + + let substs = ty.substs().unwrap_or_else(Substs::empty); + let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default(); + let variant_data = def_id.map(|it| variant_data(self.db.upcast(), it)); + for (field_idx, field) in fields.iter().enumerate() { + let field_def = + variant_data.as_ref().and_then(|it| match it.field(&field.name) { + Some(local_id) => Some(FieldId { parent: def_id.unwrap(), local_id }), + None => { + self.push_diagnostic(InferenceDiagnostic::NoSuchField { + expr: tgt_expr, + field: field_idx, + }); + None + } + }); + if let Some(field_def) = field_def { + self.result.record_field_resolutions.insert(field.expr, field_def); + } + let field_ty = field_def + .map_or(Ty::Unknown, |it| field_types[it.local_id].clone().subst(&substs)); + self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty)); + } + if let Some(expr) = spread { + self.infer_expr(*expr, &Expectation::has_type(ty.clone())); + } + ty + } + Expr::Field { expr, name } => { + let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none()); + let canonicalized = self.canonicalizer().canonicalize_ty(receiver_ty); + let ty = autoderef::autoderef( + self.db, + self.resolver.krate(), + InEnvironment { + value: canonicalized.value.clone(), + environment: self.trait_env.clone(), + }, + ) + .find_map(|derefed_ty| match canonicalized.decanonicalize_ty(derefed_ty.value) { + Ty::Apply(a_ty) => match a_ty.ctor { + TypeCtor::Tuple { .. } => name + .as_tuple_index() + .and_then(|idx| a_ty.parameters.0.get(idx).cloned()), + TypeCtor::Adt(AdtId::StructId(s)) => { + self.db.struct_data(s).variant_data.field(name).map(|local_id| { + let field = FieldId { parent: s.into(), local_id }; + self.write_field_resolution(tgt_expr, field); + self.db.field_types(s.into())[field.local_id] + .clone() + .subst(&a_ty.parameters) + }) + } + TypeCtor::Adt(AdtId::UnionId(u)) => { + self.db.union_data(u).variant_data.field(name).map(|local_id| { + let field = FieldId { parent: u.into(), local_id }; + self.write_field_resolution(tgt_expr, field); + self.db.field_types(u.into())[field.local_id] + .clone() + .subst(&a_ty.parameters) + }) + } + _ => None, + }, + _ => None, + }) + .unwrap_or(Ty::Unknown); + let ty = self.insert_type_vars(ty); + self.normalize_associated_types_in(ty) + } + Expr::Await { expr } => { + let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); + self.resolve_associated_type(inner_ty, self.resolve_future_future_output()) + } + Expr::Try { expr } => { + let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); + self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok()) + } + Expr::Cast { expr, type_ref } => { + let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); + let cast_ty = self.make_ty(type_ref); + // FIXME check the cast... + cast_ty + } + Expr::Ref { expr, rawness, mutability } => { + let expectation = if let Some((exp_inner, exp_rawness, exp_mutability)) = + &expected.ty.as_reference_or_ptr() + { + if *exp_mutability == Mutability::Mut && *mutability == Mutability::Shared { + // FIXME: throw type error - expected mut reference but found shared ref, + // which cannot be coerced + } + if *exp_rawness == Rawness::Ref && *rawness == Rawness::RawPtr { + // FIXME: throw type error - expected reference but found ptr, + // which cannot be coerced + } + Expectation::rvalue_hint(Ty::clone(exp_inner)) + } else { + Expectation::none() + }; + let inner_ty = self.infer_expr_inner(*expr, &expectation); + let ty = match rawness { + Rawness::RawPtr => TypeCtor::RawPtr(*mutability), + Rawness::Ref => TypeCtor::Ref(*mutability), + }; + Ty::apply_one(ty, inner_ty) + } + Expr::Box { expr } => { + let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); + if let Some(box_) = self.resolve_boxed_box() { + Ty::apply_one(TypeCtor::Adt(box_), inner_ty) + } else { + Ty::Unknown + } + } + Expr::UnaryOp { expr, op } => { + let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); + match op { + UnaryOp::Deref => match self.resolver.krate() { + Some(krate) => { + let canonicalized = self.canonicalizer().canonicalize_ty(inner_ty); + match autoderef::deref( + self.db, + krate, + InEnvironment { + value: &canonicalized.value, + environment: self.trait_env.clone(), + }, + ) { + Some(derefed_ty) => { + canonicalized.decanonicalize_ty(derefed_ty.value) + } + None => Ty::Unknown, + } + } + None => Ty::Unknown, + }, + UnaryOp::Neg => { + match &inner_ty { + // Fast path for builtins + Ty::Apply(ApplicationTy { + ctor: TypeCtor::Int(IntTy { signedness: Signedness::Signed, .. }), + .. + }) + | Ty::Apply(ApplicationTy { ctor: TypeCtor::Float(_), .. }) + | Ty::Infer(InferTy::IntVar(..)) + | Ty::Infer(InferTy::FloatVar(..)) => inner_ty, + // Otherwise we resolve via the std::ops::Neg trait + _ => self + .resolve_associated_type(inner_ty, self.resolve_ops_neg_output()), + } + } + UnaryOp::Not => { + match &inner_ty { + // Fast path for builtins + Ty::Apply(ApplicationTy { ctor: TypeCtor::Bool, .. }) + | Ty::Apply(ApplicationTy { ctor: TypeCtor::Int(_), .. }) + | Ty::Infer(InferTy::IntVar(..)) => inner_ty, + // Otherwise we resolve via the std::ops::Not trait + _ => self + .resolve_associated_type(inner_ty, self.resolve_ops_not_output()), + } + } + } + } + Expr::BinaryOp { lhs, rhs, op } => match op { + Some(op) => { + let lhs_expectation = match op { + BinaryOp::LogicOp(..) => Expectation::has_type(Ty::simple(TypeCtor::Bool)), + _ => Expectation::none(), + }; + let lhs_ty = self.infer_expr(*lhs, &lhs_expectation); + // FIXME: find implementation of trait corresponding to operation + // symbol and resolve associated `Output` type + let rhs_expectation = op::binary_op_rhs_expectation(*op, lhs_ty.clone()); + let rhs_ty = self.infer_expr(*rhs, &Expectation::has_type(rhs_expectation)); + + // FIXME: similar as above, return ty is often associated trait type + op::binary_op_return_ty(*op, lhs_ty, rhs_ty) + } + _ => Ty::Unknown, + }, + Expr::Range { lhs, rhs, range_type } => { + let lhs_ty = lhs.map(|e| self.infer_expr_inner(e, &Expectation::none())); + let rhs_expect = lhs_ty + .as_ref() + .map_or_else(Expectation::none, |ty| Expectation::has_type(ty.clone())); + let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect)); + match (range_type, lhs_ty, rhs_ty) { + (RangeOp::Exclusive, None, None) => match self.resolve_range_full() { + Some(adt) => Ty::simple(TypeCtor::Adt(adt)), + None => Ty::Unknown, + }, + (RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() { + Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty), + None => Ty::Unknown, + }, + (RangeOp::Inclusive, None, Some(ty)) => { + match self.resolve_range_to_inclusive() { + Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty), + None => Ty::Unknown, + } + } + (RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() { + Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty), + None => Ty::Unknown, + }, + (RangeOp::Inclusive, Some(_), Some(ty)) => { + match self.resolve_range_inclusive() { + Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty), + None => Ty::Unknown, + } + } + (RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() { + Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty), + None => Ty::Unknown, + }, + (RangeOp::Inclusive, _, None) => Ty::Unknown, + } + } + Expr::Index { base, index } => { + let base_ty = self.infer_expr_inner(*base, &Expectation::none()); + let index_ty = self.infer_expr(*index, &Expectation::none()); + + if let (Some(index_trait), Some(krate)) = + (self.resolve_ops_index(), self.resolver.krate()) + { + let canonicalized = self.canonicalizer().canonicalize_ty(base_ty); + let self_ty = method_resolution::resolve_indexing_op( + self.db, + &canonicalized.value, + self.trait_env.clone(), + krate, + index_trait, + ); + let self_ty = + self_ty.map_or(Ty::Unknown, |t| canonicalized.decanonicalize_ty(t.value)); + self.resolve_associated_type_with_params( + self_ty, + self.resolve_ops_index_output(), + &[index_ty], + ) + } else { + Ty::Unknown + } + } + Expr::Tuple { exprs } => { + let mut tys = match &expected.ty { + ty_app!(TypeCtor::Tuple { .. }, st) => st + .iter() + .cloned() + .chain(repeat_with(|| self.table.new_type_var())) + .take(exprs.len()) + .collect::>(), + _ => (0..exprs.len()).map(|_| self.table.new_type_var()).collect(), + }; + + for (expr, ty) in exprs.iter().zip(tys.iter_mut()) { + self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone())); + } + + Ty::apply(TypeCtor::Tuple { cardinality: tys.len() as u16 }, Substs(tys.into())) + } + Expr::Array(array) => { + let elem_ty = match &expected.ty { + ty_app!(TypeCtor::Array, st) | ty_app!(TypeCtor::Slice, st) => { + st.as_single().clone() + } + _ => self.table.new_type_var(), + }; + + match array { + Array::ElementList(items) => { + for expr in items.iter() { + self.infer_expr_coerce(*expr, &Expectation::has_type(elem_ty.clone())); + } + } + Array::Repeat { initializer, repeat } => { + self.infer_expr_coerce( + *initializer, + &Expectation::has_type(elem_ty.clone()), + ); + self.infer_expr( + *repeat, + &Expectation::has_type(Ty::simple(TypeCtor::Int(IntTy::usize()))), + ); + } + } + + Ty::apply_one(TypeCtor::Array, elem_ty) + } + Expr::Literal(lit) => match lit { + Literal::Bool(..) => Ty::simple(TypeCtor::Bool), + Literal::String(..) => { + Ty::apply_one(TypeCtor::Ref(Mutability::Shared), Ty::simple(TypeCtor::Str)) + } + Literal::ByteString(..) => { + let byte_type = Ty::simple(TypeCtor::Int(IntTy::u8())); + let array_type = Ty::apply_one(TypeCtor::Array, byte_type); + Ty::apply_one(TypeCtor::Ref(Mutability::Shared), array_type) + } + Literal::Char(..) => Ty::simple(TypeCtor::Char), + Literal::Int(_v, ty) => match ty { + Some(int_ty) => Ty::simple(TypeCtor::Int((*int_ty).into())), + None => self.table.new_integer_var(), + }, + Literal::Float(_v, ty) => match ty { + Some(float_ty) => Ty::simple(TypeCtor::Float((*float_ty).into())), + None => self.table.new_float_var(), + }, + }, + }; + // use a new type variable if we got Ty::Unknown here + let ty = self.insert_type_vars_shallow(ty); + let ty = self.resolve_ty_as_possible(ty); + self.write_expr_ty(tgt_expr, ty.clone()); + ty + } + + fn infer_block( + &mut self, + statements: &[Statement], + tail: Option, + expected: &Expectation, + ) -> Ty { + for stmt in statements { + match stmt { + Statement::Let { pat, type_ref, initializer } => { + let decl_ty = + type_ref.as_ref().map(|tr| self.make_ty(tr)).unwrap_or(Ty::Unknown); + + // Always use the declared type when specified + let mut ty = decl_ty.clone(); + + if let Some(expr) = initializer { + let actual_ty = + self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone())); + if decl_ty == Ty::Unknown { + ty = actual_ty; + } + } + + let ty = self.resolve_ty_as_possible(ty); + self.infer_pat(*pat, &ty, BindingMode::default()); + } + Statement::Expr(expr) => { + self.infer_expr(*expr, &Expectation::none()); + } + } + } + + let ty = if let Some(expr) = tail { + self.infer_expr_coerce(expr, expected) + } else { + // Citing rustc: if there is no explicit tail expression, + // that is typically equivalent to a tail expression + // of `()` -- except if the block diverges. In that + // case, there is no value supplied from the tail + // expression (assuming there are no other breaks, + // this implies that the type of the block will be + // `!`). + if self.diverges.is_always() { + // we don't even make an attempt at coercion + self.table.new_maybe_never_type_var() + } else { + self.coerce(&Ty::unit(), expected.coercion_target()); + Ty::unit() + } + }; + ty + } + + fn infer_method_call( + &mut self, + tgt_expr: ExprId, + receiver: ExprId, + args: &[ExprId], + method_name: &Name, + generic_args: Option<&GenericArgs>, + ) -> Ty { + let receiver_ty = self.infer_expr(receiver, &Expectation::none()); + let canonicalized_receiver = self.canonicalizer().canonicalize_ty(receiver_ty.clone()); + + let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast()); + + let resolved = self.resolver.krate().and_then(|krate| { + method_resolution::lookup_method( + &canonicalized_receiver.value, + self.db, + self.trait_env.clone(), + krate, + &traits_in_scope, + method_name, + ) + }); + let (derefed_receiver_ty, method_ty, def_generics) = match resolved { + Some((ty, func)) => { + let ty = canonicalized_receiver.decanonicalize_ty(ty); + self.write_method_resolution(tgt_expr, func); + (ty, self.db.value_ty(func.into()), Some(generics(self.db.upcast(), func.into()))) + } + None => (receiver_ty, Binders::new(0, Ty::Unknown), None), + }; + let substs = self.substs_for_method_call(def_generics, generic_args, &derefed_receiver_ty); + let method_ty = method_ty.subst(&substs); + let method_ty = self.insert_type_vars(method_ty); + self.register_obligations_for_call(&method_ty); + let (expected_receiver_ty, param_tys, ret_ty) = match method_ty.callable_sig(self.db) { + Some(sig) => { + if !sig.params().is_empty() { + (sig.params()[0].clone(), sig.params()[1..].to_vec(), sig.ret().clone()) + } else { + (Ty::Unknown, Vec::new(), sig.ret().clone()) + } + } + None => (Ty::Unknown, Vec::new(), Ty::Unknown), + }; + // Apply autoref so the below unification works correctly + // FIXME: return correct autorefs from lookup_method + let actual_receiver_ty = match expected_receiver_ty.as_reference() { + Some((_, mutability)) => Ty::apply_one(TypeCtor::Ref(mutability), derefed_receiver_ty), + _ => derefed_receiver_ty, + }; + self.unify(&expected_receiver_ty, &actual_receiver_ty); + + self.check_call_arguments(args, ¶m_tys); + self.normalize_associated_types_in(ret_ty) + } + + fn check_call_arguments(&mut self, args: &[ExprId], param_tys: &[Ty]) { + // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 -- + // We do this in a pretty awful way: first we type-check any arguments + // that are not closures, then we type-check the closures. This is so + // that we have more information about the types of arguments when we + // type-check the functions. This isn't really the right way to do this. + for &check_closures in &[false, true] { + let param_iter = param_tys.iter().cloned().chain(repeat(Ty::Unknown)); + for (&arg, param_ty) in args.iter().zip(param_iter) { + let is_closure = matches!(&self.body[arg], Expr::Lambda { .. }); + if is_closure != check_closures { + continue; + } + + let param_ty = self.normalize_associated_types_in(param_ty); + self.infer_expr_coerce(arg, &Expectation::has_type(param_ty.clone())); + } + } + } + + fn substs_for_method_call( + &mut self, + def_generics: Option, + generic_args: Option<&GenericArgs>, + receiver_ty: &Ty, + ) -> Substs { + let (parent_params, self_params, type_params, impl_trait_params) = + def_generics.as_ref().map_or((0, 0, 0, 0), |g| g.provenance_split()); + assert_eq!(self_params, 0); // method shouldn't have another Self param + let total_len = parent_params + type_params + impl_trait_params; + let mut substs = Vec::with_capacity(total_len); + // Parent arguments are unknown, except for the receiver type + if let Some(parent_generics) = def_generics.as_ref().map(|p| p.iter_parent()) { + for (_id, param) in parent_generics { + if param.provenance == hir_def::generics::TypeParamProvenance::TraitSelf { + substs.push(receiver_ty.clone()); + } else { + substs.push(Ty::Unknown); + } + } + } + // handle provided type arguments + if let Some(generic_args) = generic_args { + // if args are provided, it should be all of them, but we can't rely on that + for arg in generic_args.args.iter().take(type_params) { + match arg { + GenericArg::Type(type_ref) => { + let ty = self.make_ty(type_ref); + substs.push(ty); + } + } + } + }; + let supplied_params = substs.len(); + for _ in supplied_params..total_len { + substs.push(Ty::Unknown); + } + assert_eq!(substs.len(), total_len); + Substs(substs.into()) + } + + fn register_obligations_for_call(&mut self, callable_ty: &Ty) { + if let Ty::Apply(a_ty) = callable_ty { + if let TypeCtor::FnDef(def) = a_ty.ctor { + let generic_predicates = self.db.generic_predicates(def.into()); + for predicate in generic_predicates.iter() { + let predicate = predicate.clone().subst(&a_ty.parameters); + if let Some(obligation) = Obligation::from_predicate(predicate) { + self.obligations.push(obligation); + } + } + // add obligation for trait implementation, if this is a trait method + match def { + CallableDefId::FunctionId(f) => { + if let AssocContainerId::TraitId(trait_) = + f.lookup(self.db.upcast()).container + { + // construct a TraitDef + let substs = a_ty + .parameters + .prefix(generics(self.db.upcast(), trait_.into()).len()); + self.obligations.push(Obligation::Trait(TraitRef { trait_, substs })); + } + } + CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {} + } + } + } + } +} diff --git a/crates/ra_hir_ty/src/infer/pat.rs b/crates/hir_ty/src/infer/pat.rs similarity index 100% rename from crates/ra_hir_ty/src/infer/pat.rs rename to crates/hir_ty/src/infer/pat.rs diff --git a/crates/ra_hir_ty/src/infer/path.rs b/crates/hir_ty/src/infer/path.rs similarity index 100% rename from crates/ra_hir_ty/src/infer/path.rs rename to crates/hir_ty/src/infer/path.rs diff --git a/crates/ra_hir_ty/src/infer/unify.rs b/crates/hir_ty/src/infer/unify.rs similarity index 100% rename from crates/ra_hir_ty/src/infer/unify.rs rename to crates/hir_ty/src/infer/unify.rs diff --git a/crates/hir_ty/src/lib.rs b/crates/hir_ty/src/lib.rs new file mode 100644 index 0000000000..1e748476ac --- /dev/null +++ b/crates/hir_ty/src/lib.rs @@ -0,0 +1,1078 @@ +//! The type system. We currently use this to infer types for completion, hover +//! information and various assists. + +#[allow(unused)] +macro_rules! eprintln { + ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; +} + +mod autoderef; +pub mod primitive; +pub mod traits; +pub mod method_resolution; +mod op; +mod lower; +pub(crate) mod infer; +pub(crate) mod utils; + +pub mod display; +pub mod db; +pub mod diagnostics; + +#[cfg(test)] +mod tests; +#[cfg(test)] +mod test_db; + +use std::{iter, mem, ops::Deref, sync::Arc}; + +use base_db::{salsa, CrateId}; +use hir_def::{ + expr::ExprId, + type_ref::{Mutability, Rawness}, + AdtId, AssocContainerId, DefWithBodyId, GenericDefId, HasModule, Lookup, TraitId, TypeAliasId, + TypeParamId, +}; +use itertools::Itertools; + +use crate::{ + db::HirDatabase, + display::HirDisplay, + primitive::{FloatTy, IntTy}, + utils::{generics, make_mut_slice, Generics}, +}; + +pub use autoderef::autoderef; +pub use infer::{InferTy, InferenceResult}; +pub use lower::CallableDefId; +pub use lower::{ + associated_type_shorthand_candidates, callable_item_sig, ImplTraitLoweringMode, TyDefId, + TyLoweringContext, ValueTyDefId, +}; +pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment}; + +pub use chalk_ir::{BoundVar, DebruijnIndex}; + +/// A type constructor or type name: this might be something like the primitive +/// type `bool`, a struct like `Vec`, or things like function pointers or +/// tuples. +#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] +pub enum TypeCtor { + /// The primitive boolean type. Written as `bool`. + Bool, + + /// The primitive character type; holds a Unicode scalar value + /// (a non-surrogate code point). Written as `char`. + Char, + + /// A primitive integer type. For example, `i32`. + Int(IntTy), + + /// A primitive floating-point type. For example, `f64`. + Float(FloatTy), + + /// Structures, enumerations and unions. + Adt(AdtId), + + /// The pointee of a string slice. Written as `str`. + Str, + + /// The pointee of an array slice. Written as `[T]`. + Slice, + + /// An array with the given length. Written as `[T; n]`. + Array, + + /// A raw pointer. Written as `*mut T` or `*const T` + RawPtr(Mutability), + + /// A reference; a pointer with an associated lifetime. Written as + /// `&'a mut T` or `&'a T`. + Ref(Mutability), + + /// The anonymous type of a function declaration/definition. Each + /// function has a unique type, which is output (for a function + /// named `foo` returning an `i32`) as `fn() -> i32 {foo}`. + /// + /// This includes tuple struct / enum variant constructors as well. + /// + /// For example the type of `bar` here: + /// + /// ``` + /// fn foo() -> i32 { 1 } + /// let bar = foo; // bar: fn() -> i32 {foo} + /// ``` + FnDef(CallableDefId), + + /// A pointer to a function. Written as `fn() -> i32`. + /// + /// For example the type of `bar` here: + /// + /// ``` + /// fn foo() -> i32 { 1 } + /// let bar: fn() -> i32 = foo; + /// ``` + // FIXME make this a Ty variant like in Chalk + FnPtr { num_args: u16, is_varargs: bool }, + + /// The never type `!`. + Never, + + /// A tuple type. For example, `(i32, bool)`. + Tuple { cardinality: u16 }, + + /// Represents an associated item like `Iterator::Item`. This is used + /// when we have tried to normalize a projection like `T::Item` but + /// couldn't find a better representation. In that case, we generate + /// an **application type** like `(Iterator::Item)`. + AssociatedType(TypeAliasId), + + /// This represents a placeholder for an opaque type in situations where we + /// don't know the hidden type (i.e. currently almost always). This is + /// analogous to the `AssociatedType` type constructor. As with that one, + /// these are only produced by Chalk. + OpaqueType(OpaqueTyId), + + /// The type of a specific closure. + /// + /// The closure signature is stored in a `FnPtr` type in the first type + /// parameter. + Closure { def: DefWithBodyId, expr: ExprId }, +} + +impl TypeCtor { + pub fn num_ty_params(self, db: &dyn HirDatabase) -> usize { + match self { + TypeCtor::Bool + | TypeCtor::Char + | TypeCtor::Int(_) + | TypeCtor::Float(_) + | TypeCtor::Str + | TypeCtor::Never => 0, + TypeCtor::Slice + | TypeCtor::Array + | TypeCtor::RawPtr(_) + | TypeCtor::Ref(_) + | TypeCtor::Closure { .. } // 1 param representing the signature of the closure + => 1, + TypeCtor::Adt(adt) => { + let generic_params = generics(db.upcast(), adt.into()); + generic_params.len() + } + TypeCtor::FnDef(callable) => { + let generic_params = generics(db.upcast(), callable.into()); + generic_params.len() + } + TypeCtor::AssociatedType(type_alias) => { + let generic_params = generics(db.upcast(), type_alias.into()); + generic_params.len() + } + TypeCtor::OpaqueType(opaque_ty_id) => { + match opaque_ty_id { + OpaqueTyId::ReturnTypeImplTrait(func, _) => { + let generic_params = generics(db.upcast(), func.into()); + generic_params.len() + } + } + } + TypeCtor::FnPtr { num_args, is_varargs: _ } => num_args as usize + 1, + TypeCtor::Tuple { cardinality } => cardinality as usize, + } + } + + pub fn krate(self, db: &dyn HirDatabase) -> Option { + match self { + TypeCtor::Bool + | TypeCtor::Char + | TypeCtor::Int(_) + | TypeCtor::Float(_) + | TypeCtor::Str + | TypeCtor::Never + | TypeCtor::Slice + | TypeCtor::Array + | TypeCtor::RawPtr(_) + | TypeCtor::Ref(_) + | TypeCtor::FnPtr { .. } + | TypeCtor::Tuple { .. } => None, + // Closure's krate is irrelevant for coherence I would think? + TypeCtor::Closure { .. } => None, + TypeCtor::Adt(adt) => Some(adt.module(db.upcast()).krate), + TypeCtor::FnDef(callable) => Some(callable.krate(db)), + TypeCtor::AssociatedType(type_alias) => { + Some(type_alias.lookup(db.upcast()).module(db.upcast()).krate) + } + TypeCtor::OpaqueType(opaque_ty_id) => match opaque_ty_id { + OpaqueTyId::ReturnTypeImplTrait(func, _) => { + Some(func.lookup(db.upcast()).module(db.upcast()).krate) + } + }, + } + } + + pub fn as_generic_def(self) -> Option { + match self { + TypeCtor::Bool + | TypeCtor::Char + | TypeCtor::Int(_) + | TypeCtor::Float(_) + | TypeCtor::Str + | TypeCtor::Never + | TypeCtor::Slice + | TypeCtor::Array + | TypeCtor::RawPtr(_) + | TypeCtor::Ref(_) + | TypeCtor::FnPtr { .. } + | TypeCtor::Tuple { .. } + | TypeCtor::Closure { .. } => None, + TypeCtor::Adt(adt) => Some(adt.into()), + TypeCtor::FnDef(callable) => Some(callable.into()), + TypeCtor::AssociatedType(type_alias) => Some(type_alias.into()), + TypeCtor::OpaqueType(_impl_trait_id) => None, + } + } +} + +/// A nominal type with (maybe 0) type parameters. This might be a primitive +/// type like `bool`, a struct, tuple, function pointer, reference or +/// several other things. +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub struct ApplicationTy { + pub ctor: TypeCtor, + pub parameters: Substs, +} + +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub struct OpaqueTy { + pub opaque_ty_id: OpaqueTyId, + pub parameters: Substs, +} + +/// A "projection" type corresponds to an (unnormalized) +/// projection like `>::Foo`. Note that the +/// trait and all its parameters are fully known. +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub struct ProjectionTy { + pub associated_ty: TypeAliasId, + pub parameters: Substs, +} + +impl ProjectionTy { + pub fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef { + TraitRef { trait_: self.trait_(db), substs: self.parameters.clone() } + } + + fn trait_(&self, db: &dyn HirDatabase) -> TraitId { + match self.associated_ty.lookup(db.upcast()).container { + AssocContainerId::TraitId(it) => it, + _ => panic!("projection ty without parent trait"), + } + } +} + +impl TypeWalk for ProjectionTy { + fn walk(&self, f: &mut impl FnMut(&Ty)) { + self.parameters.walk(f); + } + + fn walk_mut_binders( + &mut self, + f: &mut impl FnMut(&mut Ty, DebruijnIndex), + binders: DebruijnIndex, + ) { + self.parameters.walk_mut_binders(f, binders); + } +} + +/// A type. +/// +/// See also the `TyKind` enum in rustc (librustc/ty/sty.rs), which represents +/// the same thing (but in a different way). +/// +/// This should be cheap to clone. +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub enum Ty { + /// A nominal type with (maybe 0) type parameters. This might be a primitive + /// type like `bool`, a struct, tuple, function pointer, reference or + /// several other things. + Apply(ApplicationTy), + + /// A "projection" type corresponds to an (unnormalized) + /// projection like `>::Foo`. Note that the + /// trait and all its parameters are fully known. + Projection(ProjectionTy), + + /// An opaque type (`impl Trait`). + /// + /// This is currently only used for return type impl trait; each instance of + /// `impl Trait` in a return type gets its own ID. + Opaque(OpaqueTy), + + /// A placeholder for a type parameter; for example, `T` in `fn f(x: T) + /// {}` when we're type-checking the body of that function. In this + /// situation, we know this stands for *some* type, but don't know the exact + /// type. + Placeholder(TypeParamId), + + /// A bound type variable. This is used in various places: when representing + /// some polymorphic type like the type of function `fn f`, the type + /// parameters get turned into variables; during trait resolution, inference + /// variables get turned into bound variables and back; and in `Dyn` the + /// `Self` type is represented with a bound variable as well. + Bound(BoundVar), + + /// A type variable used during type checking. + Infer(InferTy), + + /// A trait object (`dyn Trait` or bare `Trait` in pre-2018 Rust). + /// + /// The predicates are quantified over the `Self` type, i.e. `Ty::Bound(0)` + /// represents the `Self` type inside the bounds. This is currently + /// implicit; Chalk has the `Binders` struct to make it explicit, but it + /// didn't seem worth the overhead yet. + Dyn(Arc<[GenericPredicate]>), + + /// A placeholder for a type which could not be computed; this is propagated + /// to avoid useless error messages. Doubles as a placeholder where type + /// variables are inserted before type checking, since we want to try to + /// infer a better type here anyway -- for the IDE use case, we want to try + /// to infer as much as possible even in the presence of type errors. + Unknown, +} + +/// A list of substitutions for generic parameters. +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub struct Substs(Arc<[Ty]>); + +impl TypeWalk for Substs { + fn walk(&self, f: &mut impl FnMut(&Ty)) { + for t in self.0.iter() { + t.walk(f); + } + } + + fn walk_mut_binders( + &mut self, + f: &mut impl FnMut(&mut Ty, DebruijnIndex), + binders: DebruijnIndex, + ) { + for t in make_mut_slice(&mut self.0) { + t.walk_mut_binders(f, binders); + } + } +} + +impl Substs { + pub fn empty() -> Substs { + Substs(Arc::new([])) + } + + pub fn single(ty: Ty) -> Substs { + Substs(Arc::new([ty])) + } + + pub fn prefix(&self, n: usize) -> Substs { + Substs(self.0[..std::cmp::min(self.0.len(), n)].into()) + } + + pub fn suffix(&self, n: usize) -> Substs { + Substs(self.0[self.0.len() - std::cmp::min(self.0.len(), n)..].into()) + } + + pub fn as_single(&self) -> &Ty { + if self.0.len() != 1 { + panic!("expected substs of len 1, got {:?}", self); + } + &self.0[0] + } + + /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). + pub(crate) fn type_params_for_generics(generic_params: &Generics) -> Substs { + Substs(generic_params.iter().map(|(id, _)| Ty::Placeholder(id)).collect()) + } + + /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). + pub fn type_params(db: &dyn HirDatabase, def: impl Into) -> Substs { + let params = generics(db.upcast(), def.into()); + Substs::type_params_for_generics(¶ms) + } + + /// Return Substs that replace each parameter by a bound variable. + pub(crate) fn bound_vars(generic_params: &Generics, debruijn: DebruijnIndex) -> Substs { + Substs( + generic_params + .iter() + .enumerate() + .map(|(idx, _)| Ty::Bound(BoundVar::new(debruijn, idx))) + .collect(), + ) + } + + pub fn build_for_def(db: &dyn HirDatabase, def: impl Into) -> SubstsBuilder { + let def = def.into(); + let params = generics(db.upcast(), def); + let param_count = params.len(); + Substs::builder(param_count) + } + + pub(crate) fn build_for_generics(generic_params: &Generics) -> SubstsBuilder { + Substs::builder(generic_params.len()) + } + + pub fn build_for_type_ctor(db: &dyn HirDatabase, type_ctor: TypeCtor) -> SubstsBuilder { + Substs::builder(type_ctor.num_ty_params(db)) + } + + fn builder(param_count: usize) -> SubstsBuilder { + SubstsBuilder { vec: Vec::with_capacity(param_count), param_count } + } +} + +/// Return an index of a parameter in the generic type parameter list by it's id. +pub fn param_idx(db: &dyn HirDatabase, id: TypeParamId) -> Option { + generics(db.upcast(), id.parent).param_idx(id) +} + +#[derive(Debug, Clone)] +pub struct SubstsBuilder { + vec: Vec, + param_count: usize, +} + +impl SubstsBuilder { + pub fn build(self) -> Substs { + assert_eq!(self.vec.len(), self.param_count); + Substs(self.vec.into()) + } + + pub fn push(mut self, ty: Ty) -> Self { + self.vec.push(ty); + self + } + + fn remaining(&self) -> usize { + self.param_count - self.vec.len() + } + + pub fn fill_with_bound_vars(self, debruijn: DebruijnIndex, starting_from: usize) -> Self { + self.fill((starting_from..).map(|idx| Ty::Bound(BoundVar::new(debruijn, idx)))) + } + + pub fn fill_with_unknown(self) -> Self { + self.fill(iter::repeat(Ty::Unknown)) + } + + pub fn fill(mut self, filler: impl Iterator) -> Self { + self.vec.extend(filler.take(self.remaining())); + assert_eq!(self.remaining(), 0); + self + } + + pub fn use_parent_substs(mut self, parent_substs: &Substs) -> Self { + assert!(self.vec.is_empty()); + assert!(parent_substs.len() <= self.param_count); + self.vec.extend(parent_substs.iter().cloned()); + self + } +} + +impl Deref for Substs { + type Target = [Ty]; + + fn deref(&self) -> &[Ty] { + &self.0 + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] +pub struct Binders { + pub num_binders: usize, + pub value: T, +} + +impl Binders { + pub fn new(num_binders: usize, value: T) -> Self { + Self { num_binders, value } + } + + pub fn as_ref(&self) -> Binders<&T> { + Binders { num_binders: self.num_binders, value: &self.value } + } + + pub fn map(self, f: impl FnOnce(T) -> U) -> Binders { + Binders { num_binders: self.num_binders, value: f(self.value) } + } + + pub fn filter_map(self, f: impl FnOnce(T) -> Option) -> Option> { + Some(Binders { num_binders: self.num_binders, value: f(self.value)? }) + } +} + +impl Binders<&T> { + pub fn cloned(&self) -> Binders { + Binders { num_binders: self.num_binders, value: self.value.clone() } + } +} + +impl Binders { + /// Substitutes all variables. + pub fn subst(self, subst: &Substs) -> T { + assert_eq!(subst.len(), self.num_binders); + self.value.subst_bound_vars(subst) + } + + /// Substitutes just a prefix of the variables (shifting the rest). + pub fn subst_prefix(self, subst: &Substs) -> Binders { + assert!(subst.len() < self.num_binders); + Binders::new(self.num_binders - subst.len(), self.value.subst_bound_vars(subst)) + } +} + +impl TypeWalk for Binders { + fn walk(&self, f: &mut impl FnMut(&Ty)) { + self.value.walk(f); + } + + fn walk_mut_binders( + &mut self, + f: &mut impl FnMut(&mut Ty, DebruijnIndex), + binders: DebruijnIndex, + ) { + self.value.walk_mut_binders(f, binders.shifted_in()) + } +} + +/// A trait with type parameters. This includes the `Self`, so this represents a concrete type implementing the trait. +/// Name to be bikeshedded: TraitBound? TraitImplements? +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub struct TraitRef { + /// FIXME name? + pub trait_: TraitId, + pub substs: Substs, +} + +impl TraitRef { + pub fn self_ty(&self) -> &Ty { + &self.substs[0] + } +} + +impl TypeWalk for TraitRef { + fn walk(&self, f: &mut impl FnMut(&Ty)) { + self.substs.walk(f); + } + + fn walk_mut_binders( + &mut self, + f: &mut impl FnMut(&mut Ty, DebruijnIndex), + binders: DebruijnIndex, + ) { + self.substs.walk_mut_binders(f, binders); + } +} + +/// Like `generics::WherePredicate`, but with resolved types: A condition on the +/// parameters of a generic item. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum GenericPredicate { + /// The given trait needs to be implemented for its type parameters. + Implemented(TraitRef), + /// An associated type bindings like in `Iterator`. + Projection(ProjectionPredicate), + /// We couldn't resolve the trait reference. (If some type parameters can't + /// be resolved, they will just be Unknown). + Error, +} + +impl GenericPredicate { + pub fn is_error(&self) -> bool { + matches!(self, GenericPredicate::Error) + } + + pub fn is_implemented(&self) -> bool { + matches!(self, GenericPredicate::Implemented(_)) + } + + pub fn trait_ref(&self, db: &dyn HirDatabase) -> Option { + match self { + GenericPredicate::Implemented(tr) => Some(tr.clone()), + GenericPredicate::Projection(proj) => Some(proj.projection_ty.trait_ref(db)), + GenericPredicate::Error => None, + } + } +} + +impl TypeWalk for GenericPredicate { + fn walk(&self, f: &mut impl FnMut(&Ty)) { + match self { + GenericPredicate::Implemented(trait_ref) => trait_ref.walk(f), + GenericPredicate::Projection(projection_pred) => projection_pred.walk(f), + GenericPredicate::Error => {} + } + } + + fn walk_mut_binders( + &mut self, + f: &mut impl FnMut(&mut Ty, DebruijnIndex), + binders: DebruijnIndex, + ) { + match self { + GenericPredicate::Implemented(trait_ref) => trait_ref.walk_mut_binders(f, binders), + GenericPredicate::Projection(projection_pred) => { + projection_pred.walk_mut_binders(f, binders) + } + GenericPredicate::Error => {} + } + } +} + +/// Basically a claim (currently not validated / checked) that the contained +/// type / trait ref contains no inference variables; any inference variables it +/// contained have been replaced by bound variables, and `kinds` tells us how +/// many there are and whether they were normal or float/int variables. This is +/// used to erase irrelevant differences between types before using them in +/// queries. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Canonical { + pub value: T, + pub kinds: Arc<[TyKind]>, +} + +impl Canonical { + pub fn new(value: T, kinds: impl IntoIterator) -> Self { + Self { value, kinds: kinds.into_iter().collect() } + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum TyKind { + General, + Integer, + Float, +} + +/// A function signature as seen by type inference: Several parameter types and +/// one return type. +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct FnSig { + params_and_return: Arc<[Ty]>, + is_varargs: bool, +} + +/// A polymorphic function signature. +pub type PolyFnSig = Binders; + +impl FnSig { + pub fn from_params_and_return(mut params: Vec, ret: Ty, is_varargs: bool) -> FnSig { + params.push(ret); + FnSig { params_and_return: params.into(), is_varargs } + } + + pub fn from_fn_ptr_substs(substs: &Substs, is_varargs: bool) -> FnSig { + FnSig { params_and_return: Arc::clone(&substs.0), is_varargs } + } + + pub fn params(&self) -> &[Ty] { + &self.params_and_return[0..self.params_and_return.len() - 1] + } + + pub fn ret(&self) -> &Ty { + &self.params_and_return[self.params_and_return.len() - 1] + } +} + +impl TypeWalk for FnSig { + fn walk(&self, f: &mut impl FnMut(&Ty)) { + for t in self.params_and_return.iter() { + t.walk(f); + } + } + + fn walk_mut_binders( + &mut self, + f: &mut impl FnMut(&mut Ty, DebruijnIndex), + binders: DebruijnIndex, + ) { + for t in make_mut_slice(&mut self.params_and_return) { + t.walk_mut_binders(f, binders); + } + } +} + +impl Ty { + pub fn simple(ctor: TypeCtor) -> Ty { + Ty::Apply(ApplicationTy { ctor, parameters: Substs::empty() }) + } + pub fn apply_one(ctor: TypeCtor, param: Ty) -> Ty { + Ty::Apply(ApplicationTy { ctor, parameters: Substs::single(param) }) + } + pub fn apply(ctor: TypeCtor, parameters: Substs) -> Ty { + Ty::Apply(ApplicationTy { ctor, parameters }) + } + pub fn unit() -> Self { + Ty::apply(TypeCtor::Tuple { cardinality: 0 }, Substs::empty()) + } + pub fn fn_ptr(sig: FnSig) -> Self { + Ty::apply( + TypeCtor::FnPtr { num_args: sig.params().len() as u16, is_varargs: sig.is_varargs }, + Substs(sig.params_and_return), + ) + } + + pub fn as_reference(&self) -> Option<(&Ty, Mutability)> { + match self { + Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(mutability), parameters }) => { + Some((parameters.as_single(), *mutability)) + } + _ => None, + } + } + + pub fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)> { + match self { + Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(mutability), parameters }) => { + Some((parameters.as_single(), Rawness::Ref, *mutability)) + } + Ty::Apply(ApplicationTy { ctor: TypeCtor::RawPtr(mutability), parameters }) => { + Some((parameters.as_single(), Rawness::RawPtr, *mutability)) + } + _ => None, + } + } + + pub fn strip_references(&self) -> &Ty { + let mut t: &Ty = self; + + while let Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(_mutability), parameters }) = t { + t = parameters.as_single(); + } + + t + } + + pub fn as_adt(&self) -> Option<(AdtId, &Substs)> { + match self { + Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(adt_def), parameters }) => { + Some((*adt_def, parameters)) + } + _ => None, + } + } + + pub fn as_tuple(&self) -> Option<&Substs> { + match self { + Ty::Apply(ApplicationTy { ctor: TypeCtor::Tuple { .. }, parameters }) => { + Some(parameters) + } + _ => None, + } + } + + pub fn is_never(&self) -> bool { + matches!(self, Ty::Apply(ApplicationTy { ctor: TypeCtor::Never, .. })) + } + + /// If this is a `dyn Trait` type, this returns the `Trait` part. + pub fn dyn_trait_ref(&self) -> Option<&TraitRef> { + match self { + Ty::Dyn(bounds) => bounds.get(0).and_then(|b| match b { + GenericPredicate::Implemented(trait_ref) => Some(trait_ref), + _ => None, + }), + _ => None, + } + } + + /// If this is a `dyn Trait`, returns that trait. + pub fn dyn_trait(&self) -> Option { + self.dyn_trait_ref().map(|it| it.trait_) + } + + fn builtin_deref(&self) -> Option { + match self { + Ty::Apply(a_ty) => match a_ty.ctor { + TypeCtor::Ref(..) => Some(Ty::clone(a_ty.parameters.as_single())), + TypeCtor::RawPtr(..) => Some(Ty::clone(a_ty.parameters.as_single())), + _ => None, + }, + _ => None, + } + } + + pub fn callable_sig(&self, db: &dyn HirDatabase) -> Option { + match self { + Ty::Apply(a_ty) => match a_ty.ctor { + TypeCtor::FnPtr { is_varargs, .. } => { + Some(FnSig::from_fn_ptr_substs(&a_ty.parameters, is_varargs)) + } + TypeCtor::FnDef(def) => { + let sig = db.callable_item_signature(def); + Some(sig.subst(&a_ty.parameters)) + } + TypeCtor::Closure { .. } => { + let sig_param = &a_ty.parameters[0]; + sig_param.callable_sig(db) + } + _ => None, + }, + _ => None, + } + } + + /// If this is a type with type parameters (an ADT or function), replaces + /// the `Substs` for these type parameters with the given ones. (So e.g. if + /// `self` is `Option<_>` and the substs contain `u32`, we'll have + /// `Option` afterwards.) + pub fn apply_substs(self, substs: Substs) -> Ty { + match self { + Ty::Apply(ApplicationTy { ctor, parameters: previous_substs }) => { + assert_eq!(previous_substs.len(), substs.len()); + Ty::Apply(ApplicationTy { ctor, parameters: substs }) + } + _ => self, + } + } + + /// Returns the type parameters of this type if it has some (i.e. is an ADT + /// or function); so if `self` is `Option`, this returns the `u32`. + pub fn substs(&self) -> Option { + match self { + Ty::Apply(ApplicationTy { parameters, .. }) => Some(parameters.clone()), + _ => None, + } + } + + pub fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option> { + match self { + Ty::Opaque(opaque_ty) => { + let predicates = match opaque_ty.opaque_ty_id { + OpaqueTyId::ReturnTypeImplTrait(func, idx) => { + db.return_type_impl_traits(func).map(|it| { + let data = (*it) + .as_ref() + .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); + data.subst(&opaque_ty.parameters) + }) + } + }; + + predicates.map(|it| it.value) + } + Ty::Placeholder(id) => { + let generic_params = db.generic_params(id.parent); + let param_data = &generic_params.types[id.local_id]; + match param_data.provenance { + hir_def::generics::TypeParamProvenance::ArgumentImplTrait => { + let predicates = db + .generic_predicates_for_param(*id) + .into_iter() + .map(|pred| pred.value.clone()) + .collect_vec(); + + Some(predicates) + } + _ => None, + } + } + _ => None, + } + } + + pub fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option { + match self { + Ty::Apply(ApplicationTy { ctor: TypeCtor::AssociatedType(type_alias_id), .. }) => { + match type_alias_id.lookup(db.upcast()).container { + AssocContainerId::TraitId(trait_id) => Some(trait_id), + _ => None, + } + } + Ty::Projection(projection_ty) => { + match projection_ty.associated_ty.lookup(db.upcast()).container { + AssocContainerId::TraitId(trait_id) => Some(trait_id), + _ => None, + } + } + _ => None, + } + } +} + +/// This allows walking structures that contain types to do something with those +/// types, similar to Chalk's `Fold` trait. +pub trait TypeWalk { + fn walk(&self, f: &mut impl FnMut(&Ty)); + fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { + self.walk_mut_binders(&mut |ty, _binders| f(ty), DebruijnIndex::INNERMOST); + } + /// Walk the type, counting entered binders. + /// + /// `Ty::Bound` variables use DeBruijn indexing, which means that 0 refers + /// to the innermost binder, 1 to the next, etc.. So when we want to + /// substitute a certain bound variable, we can't just walk the whole type + /// and blindly replace each instance of a certain index; when we 'enter' + /// things that introduce new bound variables, we have to keep track of + /// that. Currently, the only thing that introduces bound variables on our + /// side are `Ty::Dyn` and `Ty::Opaque`, which each introduce a bound + /// variable for the self type. + fn walk_mut_binders( + &mut self, + f: &mut impl FnMut(&mut Ty, DebruijnIndex), + binders: DebruijnIndex, + ); + + fn fold_binders( + mut self, + f: &mut impl FnMut(Ty, DebruijnIndex) -> Ty, + binders: DebruijnIndex, + ) -> Self + where + Self: Sized, + { + self.walk_mut_binders( + &mut |ty_mut, binders| { + let ty = mem::replace(ty_mut, Ty::Unknown); + *ty_mut = f(ty, binders); + }, + binders, + ); + self + } + + fn fold(mut self, f: &mut impl FnMut(Ty) -> Ty) -> Self + where + Self: Sized, + { + self.walk_mut(&mut |ty_mut| { + let ty = mem::replace(ty_mut, Ty::Unknown); + *ty_mut = f(ty); + }); + self + } + + /// Substitutes `Ty::Bound` vars with the given substitution. + fn subst_bound_vars(self, substs: &Substs) -> Self + where + Self: Sized, + { + self.subst_bound_vars_at_depth(substs, DebruijnIndex::INNERMOST) + } + + /// Substitutes `Ty::Bound` vars with the given substitution. + fn subst_bound_vars_at_depth(mut self, substs: &Substs, depth: DebruijnIndex) -> Self + where + Self: Sized, + { + self.walk_mut_binders( + &mut |ty, binders| { + if let &mut Ty::Bound(bound) = ty { + if bound.debruijn >= binders { + *ty = substs.0[bound.index].clone().shift_bound_vars(binders); + } + } + }, + depth, + ); + self + } + + /// Shifts up debruijn indices of `Ty::Bound` vars by `n`. + fn shift_bound_vars(self, n: DebruijnIndex) -> Self + where + Self: Sized, + { + self.fold_binders( + &mut |ty, binders| match ty { + Ty::Bound(bound) if bound.debruijn >= binders => { + Ty::Bound(bound.shifted_in_from(n)) + } + ty => ty, + }, + DebruijnIndex::INNERMOST, + ) + } +} + +impl TypeWalk for Ty { + fn walk(&self, f: &mut impl FnMut(&Ty)) { + match self { + Ty::Apply(a_ty) => { + for t in a_ty.parameters.iter() { + t.walk(f); + } + } + Ty::Projection(p_ty) => { + for t in p_ty.parameters.iter() { + t.walk(f); + } + } + Ty::Dyn(predicates) => { + for p in predicates.iter() { + p.walk(f); + } + } + Ty::Opaque(o_ty) => { + for t in o_ty.parameters.iter() { + t.walk(f); + } + } + Ty::Placeholder { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} + } + f(self); + } + + fn walk_mut_binders( + &mut self, + f: &mut impl FnMut(&mut Ty, DebruijnIndex), + binders: DebruijnIndex, + ) { + match self { + Ty::Apply(a_ty) => { + a_ty.parameters.walk_mut_binders(f, binders); + } + Ty::Projection(p_ty) => { + p_ty.parameters.walk_mut_binders(f, binders); + } + Ty::Dyn(predicates) => { + for p in make_mut_slice(predicates) { + p.walk_mut_binders(f, binders.shifted_in()); + } + } + Ty::Opaque(o_ty) => { + o_ty.parameters.walk_mut_binders(f, binders); + } + Ty::Placeholder { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} + } + f(self, binders); + } +} + +impl TypeWalk for Vec { + fn walk(&self, f: &mut impl FnMut(&Ty)) { + for t in self { + t.walk(f); + } + } + fn walk_mut_binders( + &mut self, + f: &mut impl FnMut(&mut Ty, DebruijnIndex), + binders: DebruijnIndex, + ) { + for t in self { + t.walk_mut_binders(f, binders); + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] +pub enum OpaqueTyId { + ReturnTypeImplTrait(hir_def::FunctionId, u16), +} + +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub struct ReturnTypeImplTraits { + pub(crate) impl_traits: Vec, +} + +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub(crate) struct ReturnTypeImplTrait { + pub bounds: Binders>, +} diff --git a/crates/hir_ty/src/lower.rs b/crates/hir_ty/src/lower.rs new file mode 100644 index 0000000000..cd574e983f --- /dev/null +++ b/crates/hir_ty/src/lower.rs @@ -0,0 +1,1242 @@ +//! Methods for lowering the HIR to types. There are two main cases here: +//! +//! - Lowering a type reference like `&usize` or `Option` to a +//! type: The entry point for this is `Ty::from_hir`. +//! - Building the type for an item: This happens through the `type_for_def` query. +//! +//! This usually involves resolving names, collecting generic arguments etc. +use std::{iter, sync::Arc}; + +use arena::map::ArenaMap; +use base_db::CrateId; +use hir_def::{ + adt::StructKind, + builtin_type::BuiltinType, + generics::{TypeParamProvenance, WherePredicate, WherePredicateTarget}, + path::{GenericArg, Path, PathSegment, PathSegments}, + resolver::{HasResolver, Resolver, TypeNs}, + type_ref::{TypeBound, TypeRef}, + AdtId, AssocContainerId, AssocItemId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, + HasModule, ImplId, LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, + UnionId, VariantId, +}; +use hir_expand::name::Name; +use smallvec::SmallVec; +use stdx::impl_from; +use test_utils::mark; + +use crate::{ + db::HirDatabase, + primitive::{FloatTy, IntTy}, + utils::{ + all_super_trait_refs, associated_type_by_name_including_super_traits, generics, + make_mut_slice, variant_data, + }, + Binders, BoundVar, DebruijnIndex, FnSig, GenericPredicate, OpaqueTy, OpaqueTyId, PolyFnSig, + ProjectionPredicate, ProjectionTy, ReturnTypeImplTrait, ReturnTypeImplTraits, Substs, + TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk, +}; + +#[derive(Debug)] +pub struct TyLoweringContext<'a> { + pub db: &'a dyn HirDatabase, + pub resolver: &'a Resolver, + in_binders: DebruijnIndex, + /// Note: Conceptually, it's thinkable that we could be in a location where + /// some type params should be represented as placeholders, and others + /// should be converted to variables. I think in practice, this isn't + /// possible currently, so this should be fine for now. + pub type_param_mode: TypeParamLoweringMode, + pub impl_trait_mode: ImplTraitLoweringMode, + impl_trait_counter: std::cell::Cell, + /// When turning `impl Trait` into opaque types, we have to collect the + /// bounds at the same time to get the IDs correct (without becoming too + /// complicated). I don't like using interior mutability (as for the + /// counter), but I've tried and failed to make the lifetimes work for + /// passing around a `&mut TyLoweringContext`. The core problem is that + /// we're grouping the mutable data (the counter and this field) together + /// with the immutable context (the references to the DB and resolver). + /// Splitting this up would be a possible fix. + opaque_type_data: std::cell::RefCell>, +} + +impl<'a> TyLoweringContext<'a> { + pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver) -> Self { + let impl_trait_counter = std::cell::Cell::new(0); + let impl_trait_mode = ImplTraitLoweringMode::Disallowed; + let type_param_mode = TypeParamLoweringMode::Placeholder; + let in_binders = DebruijnIndex::INNERMOST; + let opaque_type_data = std::cell::RefCell::new(Vec::new()); + Self { + db, + resolver, + in_binders, + impl_trait_mode, + impl_trait_counter, + type_param_mode, + opaque_type_data, + } + } + + pub fn with_debruijn( + &self, + debruijn: DebruijnIndex, + f: impl FnOnce(&TyLoweringContext) -> T, + ) -> T { + let opaque_ty_data_vec = self.opaque_type_data.replace(Vec::new()); + let new_ctx = Self { + in_binders: debruijn, + impl_trait_counter: std::cell::Cell::new(self.impl_trait_counter.get()), + opaque_type_data: std::cell::RefCell::new(opaque_ty_data_vec), + ..*self + }; + let result = f(&new_ctx); + self.impl_trait_counter.set(new_ctx.impl_trait_counter.get()); + self.opaque_type_data.replace(new_ctx.opaque_type_data.into_inner()); + result + } + + pub fn with_shifted_in( + &self, + debruijn: DebruijnIndex, + f: impl FnOnce(&TyLoweringContext) -> T, + ) -> T { + self.with_debruijn(self.in_binders.shifted_in_from(debruijn), f) + } + + pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self { + Self { impl_trait_mode, ..self } + } + + pub fn with_type_param_mode(self, type_param_mode: TypeParamLoweringMode) -> Self { + Self { type_param_mode, ..self } + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum ImplTraitLoweringMode { + /// `impl Trait` gets lowered into an opaque type that doesn't unify with + /// anything except itself. This is used in places where values flow 'out', + /// i.e. for arguments of the function we're currently checking, and return + /// types of functions we're calling. + Opaque, + /// `impl Trait` gets lowered into a type variable. Used for argument + /// position impl Trait when inside the respective function, since it allows + /// us to support that without Chalk. + Param, + /// `impl Trait` gets lowered into a variable that can unify with some + /// type. This is used in places where values flow 'in', i.e. for arguments + /// of functions we're calling, and the return type of the function we're + /// currently checking. + Variable, + /// `impl Trait` is disallowed and will be an error. + Disallowed, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum TypeParamLoweringMode { + Placeholder, + Variable, +} + +impl Ty { + pub fn from_hir(ctx: &TyLoweringContext<'_>, type_ref: &TypeRef) -> Self { + Ty::from_hir_ext(ctx, type_ref).0 + } + pub fn from_hir_ext(ctx: &TyLoweringContext<'_>, type_ref: &TypeRef) -> (Self, Option) { + let mut res = None; + let ty = match type_ref { + TypeRef::Never => Ty::simple(TypeCtor::Never), + TypeRef::Tuple(inner) => { + let inner_tys: Arc<[Ty]> = inner.iter().map(|tr| Ty::from_hir(ctx, tr)).collect(); + Ty::apply( + TypeCtor::Tuple { cardinality: inner_tys.len() as u16 }, + Substs(inner_tys), + ) + } + TypeRef::Path(path) => { + let (ty, res_) = Ty::from_hir_path(ctx, path); + res = res_; + ty + } + TypeRef::RawPtr(inner, mutability) => { + let inner_ty = Ty::from_hir(ctx, inner); + Ty::apply_one(TypeCtor::RawPtr(*mutability), inner_ty) + } + TypeRef::Array(inner) => { + let inner_ty = Ty::from_hir(ctx, inner); + Ty::apply_one(TypeCtor::Array, inner_ty) + } + TypeRef::Slice(inner) => { + let inner_ty = Ty::from_hir(ctx, inner); + Ty::apply_one(TypeCtor::Slice, inner_ty) + } + TypeRef::Reference(inner, mutability) => { + let inner_ty = Ty::from_hir(ctx, inner); + Ty::apply_one(TypeCtor::Ref(*mutability), inner_ty) + } + TypeRef::Placeholder => Ty::Unknown, + TypeRef::Fn(params, is_varargs) => { + let sig = Substs(params.iter().map(|tr| Ty::from_hir(ctx, tr)).collect()); + Ty::apply( + TypeCtor::FnPtr { num_args: sig.len() as u16 - 1, is_varargs: *is_varargs }, + sig, + ) + } + TypeRef::DynTrait(bounds) => { + let self_ty = Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, 0)); + let predicates = ctx.with_shifted_in(DebruijnIndex::ONE, |ctx| { + bounds + .iter() + .flat_map(|b| GenericPredicate::from_type_bound(ctx, b, self_ty.clone())) + .collect() + }); + Ty::Dyn(predicates) + } + TypeRef::ImplTrait(bounds) => { + match ctx.impl_trait_mode { + ImplTraitLoweringMode::Opaque => { + let idx = ctx.impl_trait_counter.get(); + ctx.impl_trait_counter.set(idx + 1); + + assert!(idx as usize == ctx.opaque_type_data.borrow().len()); + // this dance is to make sure the data is in the right + // place even if we encounter more opaque types while + // lowering the bounds + ctx.opaque_type_data + .borrow_mut() + .push(ReturnTypeImplTrait { bounds: Binders::new(1, Vec::new()) }); + // We don't want to lower the bounds inside the binders + // we're currently in, because they don't end up inside + // those binders. E.g. when we have `impl Trait>`, the `impl OtherTrait` can't refer + // to the self parameter from `impl Trait`, and the + // bounds aren't actually stored nested within each + // other, but separately. So if the `T` refers to a type + // parameter of the outer function, it's just one binder + // away instead of two. + let actual_opaque_type_data = ctx + .with_debruijn(DebruijnIndex::INNERMOST, |ctx| { + ReturnTypeImplTrait::from_hir(ctx, &bounds) + }); + ctx.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data; + + let func = match ctx.resolver.generic_def() { + Some(GenericDefId::FunctionId(f)) => f, + _ => panic!("opaque impl trait lowering in non-function"), + }; + let impl_trait_id = OpaqueTyId::ReturnTypeImplTrait(func, idx); + let generics = generics(ctx.db.upcast(), func.into()); + let parameters = Substs::bound_vars(&generics, ctx.in_binders); + Ty::Opaque(OpaqueTy { opaque_ty_id: impl_trait_id, parameters }) + } + ImplTraitLoweringMode::Param => { + let idx = ctx.impl_trait_counter.get(); + // FIXME we're probably doing something wrong here + ctx.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16); + if let Some(def) = ctx.resolver.generic_def() { + let generics = generics(ctx.db.upcast(), def); + let param = generics + .iter() + .filter(|(_, data)| { + data.provenance == TypeParamProvenance::ArgumentImplTrait + }) + .nth(idx as usize) + .map_or(Ty::Unknown, |(id, _)| Ty::Placeholder(id)); + param + } else { + Ty::Unknown + } + } + ImplTraitLoweringMode::Variable => { + let idx = ctx.impl_trait_counter.get(); + // FIXME we're probably doing something wrong here + ctx.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16); + let (parent_params, self_params, list_params, _impl_trait_params) = + if let Some(def) = ctx.resolver.generic_def() { + let generics = generics(ctx.db.upcast(), def); + generics.provenance_split() + } else { + (0, 0, 0, 0) + }; + Ty::Bound(BoundVar::new( + ctx.in_binders, + idx as usize + parent_params + self_params + list_params, + )) + } + ImplTraitLoweringMode::Disallowed => { + // FIXME: report error + Ty::Unknown + } + } + } + TypeRef::Error => Ty::Unknown, + }; + (ty, res) + } + + /// This is only for `generic_predicates_for_param`, where we can't just + /// lower the self types of the predicates since that could lead to cycles. + /// So we just check here if the `type_ref` resolves to a generic param, and which. + fn from_hir_only_param(ctx: &TyLoweringContext<'_>, type_ref: &TypeRef) -> Option { + let path = match type_ref { + TypeRef::Path(path) => path, + _ => return None, + }; + if path.type_anchor().is_some() { + return None; + } + if path.segments().len() > 1 { + return None; + } + let resolution = + match ctx.resolver.resolve_path_in_type_ns(ctx.db.upcast(), path.mod_path()) { + Some((it, None)) => it, + _ => return None, + }; + if let TypeNs::GenericParam(param_id) = resolution { + Some(param_id) + } else { + None + } + } + + pub(crate) fn from_type_relative_path( + ctx: &TyLoweringContext<'_>, + ty: Ty, + // We need the original resolution to lower `Self::AssocTy` correctly + res: Option, + remaining_segments: PathSegments<'_>, + ) -> (Ty, Option) { + if remaining_segments.len() == 1 { + // resolve unselected assoc types + let segment = remaining_segments.first().unwrap(); + (Ty::select_associated_type(ctx, res, segment), None) + } else if remaining_segments.len() > 1 { + // FIXME report error (ambiguous associated type) + (Ty::Unknown, None) + } else { + (ty, res) + } + } + + pub(crate) fn from_partly_resolved_hir_path( + ctx: &TyLoweringContext<'_>, + resolution: TypeNs, + resolved_segment: PathSegment<'_>, + remaining_segments: PathSegments<'_>, + infer_args: bool, + ) -> (Ty, Option) { + let ty = match resolution { + TypeNs::TraitId(trait_) => { + // if this is a bare dyn Trait, we'll directly put the required ^0 for the self type in there + let self_ty = if remaining_segments.len() == 0 { + Some(Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, 0))) + } else { + None + }; + let trait_ref = + TraitRef::from_resolved_path(ctx, trait_, resolved_segment, self_ty); + let ty = if remaining_segments.len() == 1 { + let segment = remaining_segments.first().unwrap(); + let found = associated_type_by_name_including_super_traits( + ctx.db, + trait_ref, + &segment.name, + ); + match found { + Some((super_trait_ref, associated_ty)) => { + // FIXME handle type parameters on the segment + Ty::Projection(ProjectionTy { + associated_ty, + parameters: super_trait_ref.substs, + }) + } + None => { + // FIXME: report error (associated type not found) + Ty::Unknown + } + } + } else if remaining_segments.len() > 1 { + // FIXME report error (ambiguous associated type) + Ty::Unknown + } else { + Ty::Dyn(Arc::new([GenericPredicate::Implemented(trait_ref)])) + }; + return (ty, None); + } + TypeNs::GenericParam(param_id) => { + let generics = generics( + ctx.db.upcast(), + ctx.resolver.generic_def().expect("generics in scope"), + ); + match ctx.type_param_mode { + TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id), + TypeParamLoweringMode::Variable => { + let idx = generics.param_idx(param_id).expect("matching generics"); + Ty::Bound(BoundVar::new(ctx.in_binders, idx)) + } + } + } + TypeNs::SelfType(impl_id) => { + let generics = generics(ctx.db.upcast(), impl_id.into()); + let substs = match ctx.type_param_mode { + TypeParamLoweringMode::Placeholder => { + Substs::type_params_for_generics(&generics) + } + TypeParamLoweringMode::Variable => { + Substs::bound_vars(&generics, ctx.in_binders) + } + }; + ctx.db.impl_self_ty(impl_id).subst(&substs) + } + TypeNs::AdtSelfType(adt) => { + let generics = generics(ctx.db.upcast(), adt.into()); + let substs = match ctx.type_param_mode { + TypeParamLoweringMode::Placeholder => { + Substs::type_params_for_generics(&generics) + } + TypeParamLoweringMode::Variable => { + Substs::bound_vars(&generics, ctx.in_binders) + } + }; + ctx.db.ty(adt.into()).subst(&substs) + } + + TypeNs::AdtId(it) => { + Ty::from_hir_path_inner(ctx, resolved_segment, it.into(), infer_args) + } + TypeNs::BuiltinType(it) => { + Ty::from_hir_path_inner(ctx, resolved_segment, it.into(), infer_args) + } + TypeNs::TypeAliasId(it) => { + Ty::from_hir_path_inner(ctx, resolved_segment, it.into(), infer_args) + } + // FIXME: report error + TypeNs::EnumVariantId(_) => return (Ty::Unknown, None), + }; + + Ty::from_type_relative_path(ctx, ty, Some(resolution), remaining_segments) + } + + pub(crate) fn from_hir_path(ctx: &TyLoweringContext<'_>, path: &Path) -> (Ty, Option) { + // Resolve the path (in type namespace) + if let Some(type_ref) = path.type_anchor() { + let (ty, res) = Ty::from_hir_ext(ctx, &type_ref); + return Ty::from_type_relative_path(ctx, ty, res, path.segments()); + } + let (resolution, remaining_index) = + match ctx.resolver.resolve_path_in_type_ns(ctx.db.upcast(), path.mod_path()) { + Some(it) => it, + None => return (Ty::Unknown, None), + }; + let (resolved_segment, remaining_segments) = match remaining_index { + None => ( + path.segments().last().expect("resolved path has at least one element"), + PathSegments::EMPTY, + ), + Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)), + }; + Ty::from_partly_resolved_hir_path( + ctx, + resolution, + resolved_segment, + remaining_segments, + false, + ) + } + + fn select_associated_type( + ctx: &TyLoweringContext<'_>, + res: Option, + segment: PathSegment<'_>, + ) -> Ty { + if let Some(res) = res { + let ty = + associated_type_shorthand_candidates(ctx.db, res, move |name, t, associated_ty| { + if name == segment.name { + let substs = match ctx.type_param_mode { + TypeParamLoweringMode::Placeholder => { + // if we're lowering to placeholders, we have to put + // them in now + let s = Substs::type_params( + ctx.db, + ctx.resolver.generic_def().expect( + "there should be generics if there's a generic param", + ), + ); + t.substs.clone().subst_bound_vars(&s) + } + TypeParamLoweringMode::Variable => t.substs.clone(), + }; + // We need to shift in the bound vars, since + // associated_type_shorthand_candidates does not do that + let substs = substs.shift_bound_vars(ctx.in_binders); + // FIXME handle type parameters on the segment + return Some(Ty::Projection(ProjectionTy { + associated_ty, + parameters: substs, + })); + } + + None + }); + + ty.unwrap_or(Ty::Unknown) + } else { + Ty::Unknown + } + } + + fn from_hir_path_inner( + ctx: &TyLoweringContext<'_>, + segment: PathSegment<'_>, + typable: TyDefId, + infer_args: bool, + ) -> Ty { + let generic_def = match typable { + TyDefId::BuiltinType(_) => None, + TyDefId::AdtId(it) => Some(it.into()), + TyDefId::TypeAliasId(it) => Some(it.into()), + }; + let substs = substs_from_path_segment(ctx, segment, generic_def, infer_args); + ctx.db.ty(typable).subst(&substs) + } + + /// Collect generic arguments from a path into a `Substs`. See also + /// `create_substs_for_ast_path` and `def_to_ty` in rustc. + pub(super) fn substs_from_path( + ctx: &TyLoweringContext<'_>, + path: &Path, + // Note that we don't call `db.value_type(resolved)` here, + // `ValueTyDefId` is just a convenient way to pass generics and + // special-case enum variants + resolved: ValueTyDefId, + infer_args: bool, + ) -> Substs { + let last = path.segments().last().expect("path should have at least one segment"); + let (segment, generic_def) = match resolved { + ValueTyDefId::FunctionId(it) => (last, Some(it.into())), + ValueTyDefId::StructId(it) => (last, Some(it.into())), + ValueTyDefId::UnionId(it) => (last, Some(it.into())), + ValueTyDefId::ConstId(it) => (last, Some(it.into())), + ValueTyDefId::StaticId(_) => (last, None), + ValueTyDefId::EnumVariantId(var) => { + // the generic args for an enum variant may be either specified + // on the segment referring to the enum, or on the segment + // referring to the variant. So `Option::::None` and + // `Option::None::` are both allowed (though the former is + // preferred). See also `def_ids_for_path_segments` in rustc. + let len = path.segments().len(); + let penultimate = if len >= 2 { path.segments().get(len - 2) } else { None }; + let segment = match penultimate { + Some(segment) if segment.args_and_bindings.is_some() => segment, + _ => last, + }; + (segment, Some(var.parent.into())) + } + }; + substs_from_path_segment(ctx, segment, generic_def, infer_args) + } +} + +fn substs_from_path_segment( + ctx: &TyLoweringContext<'_>, + segment: PathSegment<'_>, + def_generic: Option, + infer_args: bool, +) -> Substs { + let mut substs = Vec::new(); + let def_generics = def_generic.map(|def| generics(ctx.db.upcast(), def)); + + let (parent_params, self_params, type_params, impl_trait_params) = + def_generics.map_or((0, 0, 0, 0), |g| g.provenance_split()); + let total_len = parent_params + self_params + type_params + impl_trait_params; + + substs.extend(iter::repeat(Ty::Unknown).take(parent_params)); + + let mut had_explicit_args = false; + + if let Some(generic_args) = &segment.args_and_bindings { + if !generic_args.has_self_type { + substs.extend(iter::repeat(Ty::Unknown).take(self_params)); + } + let expected_num = + if generic_args.has_self_type { self_params + type_params } else { type_params }; + let skip = if generic_args.has_self_type && self_params == 0 { 1 } else { 0 }; + // if args are provided, it should be all of them, but we can't rely on that + for arg in generic_args.args.iter().skip(skip).take(expected_num) { + match arg { + GenericArg::Type(type_ref) => { + had_explicit_args = true; + let ty = Ty::from_hir(ctx, type_ref); + substs.push(ty); + } + } + } + } + + // handle defaults. In expression or pattern path segments without + // explicitly specified type arguments, missing type arguments are inferred + // (i.e. defaults aren't used). + if !infer_args || had_explicit_args { + if let Some(def_generic) = def_generic { + let defaults = ctx.db.generic_defaults(def_generic); + assert_eq!(total_len, defaults.len()); + + for default_ty in defaults.iter().skip(substs.len()) { + // each default can depend on the previous parameters + let substs_so_far = Substs(substs.clone().into()); + substs.push(default_ty.clone().subst(&substs_so_far)); + } + } + } + + // add placeholders for args that were not provided + // FIXME: emit diagnostics in contexts where this is not allowed + for _ in substs.len()..total_len { + substs.push(Ty::Unknown); + } + assert_eq!(substs.len(), total_len); + + Substs(substs.into()) +} + +impl TraitRef { + fn from_path( + ctx: &TyLoweringContext<'_>, + path: &Path, + explicit_self_ty: Option, + ) -> Option { + let resolved = + match ctx.resolver.resolve_path_in_type_ns_fully(ctx.db.upcast(), path.mod_path())? { + TypeNs::TraitId(tr) => tr, + _ => return None, + }; + let segment = path.segments().last().expect("path should have at least one segment"); + Some(TraitRef::from_resolved_path(ctx, resolved, segment, explicit_self_ty)) + } + + pub(crate) fn from_resolved_path( + ctx: &TyLoweringContext<'_>, + resolved: TraitId, + segment: PathSegment<'_>, + explicit_self_ty: Option, + ) -> Self { + let mut substs = TraitRef::substs_from_path(ctx, segment, resolved); + if let Some(self_ty) = explicit_self_ty { + make_mut_slice(&mut substs.0)[0] = self_ty; + } + TraitRef { trait_: resolved, substs } + } + + fn from_hir( + ctx: &TyLoweringContext<'_>, + type_ref: &TypeRef, + explicit_self_ty: Option, + ) -> Option { + let path = match type_ref { + TypeRef::Path(path) => path, + _ => return None, + }; + TraitRef::from_path(ctx, path, explicit_self_ty) + } + + fn substs_from_path( + ctx: &TyLoweringContext<'_>, + segment: PathSegment<'_>, + resolved: TraitId, + ) -> Substs { + substs_from_path_segment(ctx, segment, Some(resolved.into()), false) + } + + pub(crate) fn from_type_bound( + ctx: &TyLoweringContext<'_>, + bound: &TypeBound, + self_ty: Ty, + ) -> Option { + match bound { + TypeBound::Path(path) => TraitRef::from_path(ctx, path, Some(self_ty)), + TypeBound::Error => None, + } + } +} + +impl GenericPredicate { + pub(crate) fn from_where_predicate<'a>( + ctx: &'a TyLoweringContext<'a>, + where_predicate: &'a WherePredicate, + ) -> impl Iterator + 'a { + let self_ty = match &where_predicate.target { + WherePredicateTarget::TypeRef(type_ref) => Ty::from_hir(ctx, type_ref), + WherePredicateTarget::TypeParam(param_id) => { + let generic_def = ctx.resolver.generic_def().expect("generics in scope"); + let generics = generics(ctx.db.upcast(), generic_def); + let param_id = hir_def::TypeParamId { parent: generic_def, local_id: *param_id }; + match ctx.type_param_mode { + TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id), + TypeParamLoweringMode::Variable => { + let idx = generics.param_idx(param_id).expect("matching generics"); + Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, idx)) + } + } + } + }; + GenericPredicate::from_type_bound(ctx, &where_predicate.bound, self_ty) + } + + pub(crate) fn from_type_bound<'a>( + ctx: &'a TyLoweringContext<'a>, + bound: &'a TypeBound, + self_ty: Ty, + ) -> impl Iterator + 'a { + let trait_ref = TraitRef::from_type_bound(ctx, bound, self_ty); + iter::once(trait_ref.clone().map_or(GenericPredicate::Error, GenericPredicate::Implemented)) + .chain( + trait_ref + .into_iter() + .flat_map(move |tr| assoc_type_bindings_from_type_bound(ctx, bound, tr)), + ) + } +} + +fn assoc_type_bindings_from_type_bound<'a>( + ctx: &'a TyLoweringContext<'a>, + bound: &'a TypeBound, + trait_ref: TraitRef, +) -> impl Iterator + 'a { + let last_segment = match bound { + TypeBound::Path(path) => path.segments().last(), + TypeBound::Error => None, + }; + last_segment + .into_iter() + .flat_map(|segment| segment.args_and_bindings.into_iter()) + .flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) + .flat_map(move |binding| { + let found = associated_type_by_name_including_super_traits( + ctx.db, + trait_ref.clone(), + &binding.name, + ); + let (super_trait_ref, associated_ty) = match found { + None => return SmallVec::<[GenericPredicate; 1]>::new(), + Some(t) => t, + }; + let projection_ty = ProjectionTy { associated_ty, parameters: super_trait_ref.substs }; + let mut preds = SmallVec::with_capacity( + binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), + ); + if let Some(type_ref) = &binding.type_ref { + let ty = Ty::from_hir(ctx, type_ref); + let projection_predicate = + ProjectionPredicate { projection_ty: projection_ty.clone(), ty }; + preds.push(GenericPredicate::Projection(projection_predicate)); + } + for bound in &binding.bounds { + preds.extend(GenericPredicate::from_type_bound( + ctx, + bound, + Ty::Projection(projection_ty.clone()), + )); + } + preds + }) +} + +impl ReturnTypeImplTrait { + fn from_hir(ctx: &TyLoweringContext, bounds: &[TypeBound]) -> Self { + mark::hit!(lower_rpit); + let self_ty = Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, 0)); + let predicates = ctx.with_shifted_in(DebruijnIndex::ONE, |ctx| { + bounds + .iter() + .flat_map(|b| GenericPredicate::from_type_bound(ctx, b, self_ty.clone())) + .collect() + }); + ReturnTypeImplTrait { bounds: Binders::new(1, predicates) } + } +} + +fn count_impl_traits(type_ref: &TypeRef) -> usize { + let mut count = 0; + type_ref.walk(&mut |type_ref| { + if matches!(type_ref, TypeRef::ImplTrait(_)) { + count += 1; + } + }); + count +} + +/// Build the signature of a callable item (function, struct or enum variant). +pub fn callable_item_sig(db: &dyn HirDatabase, def: CallableDefId) -> PolyFnSig { + match def { + CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f), + CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s), + CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e), + } +} + +pub fn associated_type_shorthand_candidates( + db: &dyn HirDatabase, + res: TypeNs, + mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option, +) -> Option { + let traits_from_env: Vec<_> = match res { + TypeNs::SelfType(impl_id) => match db.impl_trait(impl_id) { + None => vec![], + Some(trait_ref) => vec![trait_ref.value], + }, + TypeNs::GenericParam(param_id) => { + let predicates = db.generic_predicates_for_param(param_id); + let mut traits_: Vec<_> = predicates + .iter() + .filter_map(|pred| match &pred.value { + GenericPredicate::Implemented(tr) => Some(tr.clone()), + _ => None, + }) + .collect(); + // Handle `Self::Type` referring to own associated type in trait definitions + if let GenericDefId::TraitId(trait_id) = param_id.parent { + let generics = generics(db.upcast(), trait_id.into()); + if generics.params.types[param_id.local_id].provenance + == TypeParamProvenance::TraitSelf + { + let trait_ref = TraitRef { + trait_: trait_id, + substs: Substs::bound_vars(&generics, DebruijnIndex::INNERMOST), + }; + traits_.push(trait_ref); + } + } + traits_ + } + _ => vec![], + }; + + for t in traits_from_env.into_iter().flat_map(move |t| all_super_trait_refs(db, t)) { + let data = db.trait_data(t.trait_); + + for (name, assoc_id) in &data.items { + match assoc_id { + AssocItemId::TypeAliasId(alias) => { + if let Some(result) = cb(name, &t, *alias) { + return Some(result); + } + } + AssocItemId::FunctionId(_) | AssocItemId::ConstId(_) => {} + } + } + } + + None +} + +/// Build the type of all specific fields of a struct or enum variant. +pub(crate) fn field_types_query( + db: &dyn HirDatabase, + variant_id: VariantId, +) -> Arc>> { + let var_data = variant_data(db.upcast(), variant_id); + let (resolver, def): (_, GenericDefId) = match variant_id { + VariantId::StructId(it) => (it.resolver(db.upcast()), it.into()), + VariantId::UnionId(it) => (it.resolver(db.upcast()), it.into()), + VariantId::EnumVariantId(it) => (it.parent.resolver(db.upcast()), it.parent.into()), + }; + let generics = generics(db.upcast(), def); + let mut res = ArenaMap::default(); + let ctx = + TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); + for (field_id, field_data) in var_data.fields().iter() { + res.insert(field_id, Binders::new(generics.len(), Ty::from_hir(&ctx, &field_data.type_ref))) + } + Arc::new(res) +} + +/// This query exists only to be used when resolving short-hand associated types +/// like `T::Item`. +/// +/// See the analogous query in rustc and its comment: +/// https://github.com/rust-lang/rust/blob/9150f844e2624eb013ec78ca08c1d416e6644026/src/librustc_typeck/astconv.rs#L46 +/// This is a query mostly to handle cycles somewhat gracefully; e.g. the +/// following bounds are disallowed: `T: Foo, U: Foo`, but +/// these are fine: `T: Foo, U: Foo<()>`. +pub(crate) fn generic_predicates_for_param_query( + db: &dyn HirDatabase, + param_id: TypeParamId, +) -> Arc<[Binders]> { + let resolver = param_id.parent.resolver(db.upcast()); + let ctx = + TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); + let generics = generics(db.upcast(), param_id.parent); + resolver + .where_predicates_in_scope() + // we have to filter out all other predicates *first*, before attempting to lower them + .filter(|pred| match &pred.target { + WherePredicateTarget::TypeRef(type_ref) => { + Ty::from_hir_only_param(&ctx, type_ref) == Some(param_id) + } + WherePredicateTarget::TypeParam(local_id) => *local_id == param_id.local_id, + }) + .flat_map(|pred| { + GenericPredicate::from_where_predicate(&ctx, pred) + .map(|p| Binders::new(generics.len(), p)) + }) + .collect() +} + +pub(crate) fn generic_predicates_for_param_recover( + _db: &dyn HirDatabase, + _cycle: &[String], + _param_id: &TypeParamId, +) -> Arc<[Binders]> { + Arc::new([]) +} + +impl TraitEnvironment { + pub fn lower(db: &dyn HirDatabase, resolver: &Resolver) -> Arc { + let ctx = TyLoweringContext::new(db, &resolver) + .with_type_param_mode(TypeParamLoweringMode::Placeholder); + let mut predicates = resolver + .where_predicates_in_scope() + .flat_map(|pred| GenericPredicate::from_where_predicate(&ctx, pred)) + .collect::>(); + + if let Some(def) = resolver.generic_def() { + let container: Option = match def { + // FIXME: is there a function for this? + GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container), + GenericDefId::AdtId(_) => None, + GenericDefId::TraitId(_) => None, + GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container), + GenericDefId::ImplId(_) => None, + GenericDefId::EnumVariantId(_) => None, + GenericDefId::ConstId(c) => Some(c.lookup(db.upcast()).container), + }; + if let Some(AssocContainerId::TraitId(trait_id)) = container { + // add `Self: Trait` to the environment in trait + // function default implementations (and hypothetical code + // inside consts or type aliases) + test_utils::mark::hit!(trait_self_implements_self); + let substs = Substs::type_params(db, trait_id); + let trait_ref = TraitRef { trait_: trait_id, substs }; + let pred = GenericPredicate::Implemented(trait_ref); + + predicates.push(pred); + } + } + + Arc::new(TraitEnvironment { predicates }) + } +} + +/// Resolve the where clause(s) of an item with generics. +pub(crate) fn generic_predicates_query( + db: &dyn HirDatabase, + def: GenericDefId, +) -> Arc<[Binders]> { + let resolver = def.resolver(db.upcast()); + let ctx = + TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); + let generics = generics(db.upcast(), def); + resolver + .where_predicates_in_scope() + .flat_map(|pred| { + GenericPredicate::from_where_predicate(&ctx, pred) + .map(|p| Binders::new(generics.len(), p)) + }) + .collect() +} + +/// Resolve the default type params from generics +pub(crate) fn generic_defaults_query( + db: &dyn HirDatabase, + def: GenericDefId, +) -> Arc<[Binders]> { + let resolver = def.resolver(db.upcast()); + let ctx = + TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); + let generic_params = generics(db.upcast(), def); + + let defaults = generic_params + .iter() + .enumerate() + .map(|(idx, (_, p))| { + let mut ty = p.default.as_ref().map_or(Ty::Unknown, |t| Ty::from_hir(&ctx, t)); + + // Each default can only refer to previous parameters. + ty.walk_mut_binders( + &mut |ty, binders| match ty { + Ty::Bound(BoundVar { debruijn, index }) if *debruijn == binders => { + if *index >= idx { + // type variable default referring to parameter coming + // after it. This is forbidden (FIXME: report + // diagnostic) + *ty = Ty::Unknown; + } + } + _ => {} + }, + DebruijnIndex::INNERMOST, + ); + + Binders::new(idx, ty) + }) + .collect(); + + defaults +} + +fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig { + let data = db.function_data(def); + let resolver = def.resolver(db.upcast()); + let ctx_params = TyLoweringContext::new(db, &resolver) + .with_impl_trait_mode(ImplTraitLoweringMode::Variable) + .with_type_param_mode(TypeParamLoweringMode::Variable); + let params = data.params.iter().map(|tr| Ty::from_hir(&ctx_params, tr)).collect::>(); + let ctx_ret = TyLoweringContext::new(db, &resolver) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) + .with_type_param_mode(TypeParamLoweringMode::Variable); + let ret = Ty::from_hir(&ctx_ret, &data.ret_type); + let generics = generics(db.upcast(), def.into()); + let num_binders = generics.len(); + Binders::new(num_binders, FnSig::from_params_and_return(params, ret, data.is_varargs)) +} + +/// Build the declared type of a function. This should not need to look at the +/// function body. +fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders { + let generics = generics(db.upcast(), def.into()); + let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST); + Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs)) +} + +/// Build the declared type of a const. +fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders { + let data = db.const_data(def); + let generics = generics(db.upcast(), def.into()); + let resolver = def.resolver(db.upcast()); + let ctx = + TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); + + Binders::new(generics.len(), Ty::from_hir(&ctx, &data.type_ref)) +} + +/// Build the declared type of a static. +fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders { + let data = db.static_data(def); + let resolver = def.resolver(db.upcast()); + let ctx = TyLoweringContext::new(db, &resolver); + + Binders::new(0, Ty::from_hir(&ctx, &data.type_ref)) +} + +/// Build the declared type of a static. +fn type_for_builtin(def: BuiltinType) -> Ty { + Ty::simple(match def { + BuiltinType::Char => TypeCtor::Char, + BuiltinType::Bool => TypeCtor::Bool, + BuiltinType::Str => TypeCtor::Str, + BuiltinType::Int(t) => TypeCtor::Int(IntTy::from(t).into()), + BuiltinType::Float(t) => TypeCtor::Float(FloatTy::from(t).into()), + }) +} + +fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnSig { + let struct_data = db.struct_data(def); + let fields = struct_data.variant_data.fields(); + let resolver = def.resolver(db.upcast()); + let ctx = + TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); + let params = + fields.iter().map(|(_, field)| Ty::from_hir(&ctx, &field.type_ref)).collect::>(); + let ret = type_for_adt(db, def.into()); + Binders::new(ret.num_binders, FnSig::from_params_and_return(params, ret.value, false)) +} + +/// Build the type of a tuple struct constructor. +fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Binders { + let struct_data = db.struct_data(def); + if let StructKind::Unit = struct_data.variant_data.kind() { + return type_for_adt(db, def.into()); + } + let generics = generics(db.upcast(), def.into()); + let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST); + Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs)) +} + +fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> PolyFnSig { + let enum_data = db.enum_data(def.parent); + let var_data = &enum_data.variants[def.local_id]; + let fields = var_data.variant_data.fields(); + let resolver = def.parent.resolver(db.upcast()); + let ctx = + TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); + let params = + fields.iter().map(|(_, field)| Ty::from_hir(&ctx, &field.type_ref)).collect::>(); + let ret = type_for_adt(db, def.parent.into()); + Binders::new(ret.num_binders, FnSig::from_params_and_return(params, ret.value, false)) +} + +/// Build the type of a tuple enum variant constructor. +fn type_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> Binders { + let enum_data = db.enum_data(def.parent); + let var_data = &enum_data.variants[def.local_id].variant_data; + if let StructKind::Unit = var_data.kind() { + return type_for_adt(db, def.parent.into()); + } + let generics = generics(db.upcast(), def.parent.into()); + let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST); + Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs)) +} + +fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders { + let generics = generics(db.upcast(), adt.into()); + let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST); + Binders::new(substs.len(), Ty::apply(TypeCtor::Adt(adt), substs)) +} + +fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders { + let generics = generics(db.upcast(), t.into()); + let resolver = t.resolver(db.upcast()); + let ctx = + TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); + let type_ref = &db.type_alias_data(t).type_ref; + let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST); + let inner = Ty::from_hir(&ctx, type_ref.as_ref().unwrap_or(&TypeRef::Error)); + Binders::new(substs.len(), inner) +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum CallableDefId { + FunctionId(FunctionId), + StructId(StructId), + EnumVariantId(EnumVariantId), +} +impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId); + +impl CallableDefId { + pub fn krate(self, db: &dyn HirDatabase) -> CrateId { + let db = db.upcast(); + match self { + CallableDefId::FunctionId(f) => f.lookup(db).module(db), + CallableDefId::StructId(s) => s.lookup(db).container.module(db), + CallableDefId::EnumVariantId(e) => e.parent.lookup(db).container.module(db), + } + .krate + } +} + +impl From for GenericDefId { + fn from(def: CallableDefId) -> GenericDefId { + match def { + CallableDefId::FunctionId(f) => f.into(), + CallableDefId::StructId(s) => s.into(), + CallableDefId::EnumVariantId(e) => e.into(), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum TyDefId { + BuiltinType(BuiltinType), + AdtId(AdtId), + TypeAliasId(TypeAliasId), +} +impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ValueTyDefId { + FunctionId(FunctionId), + StructId(StructId), + UnionId(UnionId), + EnumVariantId(EnumVariantId), + ConstId(ConstId), + StaticId(StaticId), +} +impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId); + +/// Build the declared type of an item. This depends on the namespace; e.g. for +/// `struct Foo(usize)`, we have two types: The type of the struct itself, and +/// the constructor function `(usize) -> Foo` which lives in the values +/// namespace. +pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders { + match def { + TyDefId::BuiltinType(it) => Binders::new(0, type_for_builtin(it)), + TyDefId::AdtId(it) => type_for_adt(db, it), + TyDefId::TypeAliasId(it) => type_for_type_alias(db, it), + } +} + +pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders { + let num_binders = match *def { + TyDefId::BuiltinType(_) => 0, + TyDefId::AdtId(it) => generics(db.upcast(), it.into()).len(), + TyDefId::TypeAliasId(it) => generics(db.upcast(), it.into()).len(), + }; + Binders::new(num_binders, Ty::Unknown) +} + +pub(crate) fn value_ty_query(db: &dyn HirDatabase, def: ValueTyDefId) -> Binders { + match def { + ValueTyDefId::FunctionId(it) => type_for_fn(db, it), + ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it), + ValueTyDefId::UnionId(it) => type_for_adt(db, it.into()), + ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it), + ValueTyDefId::ConstId(it) => type_for_const(db, it), + ValueTyDefId::StaticId(it) => type_for_static(db, it), + } +} + +pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binders { + let impl_data = db.impl_data(impl_id); + let resolver = impl_id.resolver(db.upcast()); + let generics = generics(db.upcast(), impl_id.into()); + let ctx = + TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); + Binders::new(generics.len(), Ty::from_hir(&ctx, &impl_data.target_type)) +} + +pub(crate) fn impl_self_ty_recover( + db: &dyn HirDatabase, + _cycle: &[String], + impl_id: &ImplId, +) -> Binders { + let generics = generics(db.upcast(), (*impl_id).into()); + Binders::new(generics.len(), Ty::Unknown) +} + +pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option> { + let impl_data = db.impl_data(impl_id); + let resolver = impl_id.resolver(db.upcast()); + let ctx = + TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); + let self_ty = db.impl_self_ty(impl_id); + let target_trait = impl_data.target_trait.as_ref()?; + Some(Binders::new( + self_ty.num_binders, + TraitRef::from_hir(&ctx, target_trait, Some(self_ty.value))?, + )) +} + +pub(crate) fn return_type_impl_traits( + db: &dyn HirDatabase, + def: hir_def::FunctionId, +) -> Option>> { + // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe + let data = db.function_data(def); + let resolver = def.resolver(db.upcast()); + let ctx_ret = TyLoweringContext::new(db, &resolver) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) + .with_type_param_mode(TypeParamLoweringMode::Variable); + let _ret = Ty::from_hir(&ctx_ret, &data.ret_type); + let generics = generics(db.upcast(), def.into()); + let num_binders = generics.len(); + let return_type_impl_traits = + ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() }; + if return_type_impl_traits.impl_traits.is_empty() { + None + } else { + Some(Arc::new(Binders::new(num_binders, return_type_impl_traits))) + } +} diff --git a/crates/hir_ty/src/method_resolution.rs b/crates/hir_ty/src/method_resolution.rs new file mode 100644 index 0000000000..ec59145c70 --- /dev/null +++ b/crates/hir_ty/src/method_resolution.rs @@ -0,0 +1,769 @@ +//! This module is concerned with finding methods that a given type provides. +//! For details about how this works in rustc, see the method lookup page in the +//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html) +//! and the corresponding code mostly in librustc_typeck/check/method/probe.rs. +use std::{iter, sync::Arc}; + +use arrayvec::ArrayVec; +use base_db::CrateId; +use hir_def::{ + builtin_type::{IntBitness, Signedness}, + lang_item::LangItemTarget, + type_ref::Mutability, + AssocContainerId, AssocItemId, FunctionId, HasModule, ImplId, Lookup, TraitId, +}; +use hir_expand::name::Name; +use rustc_hash::{FxHashMap, FxHashSet}; + +use super::Substs; +use crate::{ + autoderef, + db::HirDatabase, + primitive::{FloatBitness, FloatTy, IntTy}, + utils::all_super_traits, + ApplicationTy, Canonical, DebruijnIndex, InEnvironment, TraitEnvironment, TraitRef, Ty, TyKind, + TypeCtor, TypeWalk, +}; + +/// This is used as a key for indexing impls. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum TyFingerprint { + Apply(TypeCtor), +} + +impl TyFingerprint { + /// Creates a TyFingerprint for looking up an impl. Only certain types can + /// have impls: if we have some `struct S`, we can have an `impl S`, but not + /// `impl &S`. Hence, this will return `None` for reference types and such. + pub(crate) fn for_impl(ty: &Ty) -> Option { + match ty { + Ty::Apply(a_ty) => Some(TyFingerprint::Apply(a_ty.ctor)), + _ => None, + } + } +} + +pub(crate) const ALL_INT_FPS: [TyFingerprint; 12] = [ + TyFingerprint::Apply(TypeCtor::Int(IntTy { + signedness: Signedness::Unsigned, + bitness: IntBitness::X8, + })), + TyFingerprint::Apply(TypeCtor::Int(IntTy { + signedness: Signedness::Unsigned, + bitness: IntBitness::X16, + })), + TyFingerprint::Apply(TypeCtor::Int(IntTy { + signedness: Signedness::Unsigned, + bitness: IntBitness::X32, + })), + TyFingerprint::Apply(TypeCtor::Int(IntTy { + signedness: Signedness::Unsigned, + bitness: IntBitness::X64, + })), + TyFingerprint::Apply(TypeCtor::Int(IntTy { + signedness: Signedness::Unsigned, + bitness: IntBitness::X128, + })), + TyFingerprint::Apply(TypeCtor::Int(IntTy { + signedness: Signedness::Unsigned, + bitness: IntBitness::Xsize, + })), + TyFingerprint::Apply(TypeCtor::Int(IntTy { + signedness: Signedness::Signed, + bitness: IntBitness::X8, + })), + TyFingerprint::Apply(TypeCtor::Int(IntTy { + signedness: Signedness::Signed, + bitness: IntBitness::X16, + })), + TyFingerprint::Apply(TypeCtor::Int(IntTy { + signedness: Signedness::Signed, + bitness: IntBitness::X32, + })), + TyFingerprint::Apply(TypeCtor::Int(IntTy { + signedness: Signedness::Signed, + bitness: IntBitness::X64, + })), + TyFingerprint::Apply(TypeCtor::Int(IntTy { + signedness: Signedness::Signed, + bitness: IntBitness::X128, + })), + TyFingerprint::Apply(TypeCtor::Int(IntTy { + signedness: Signedness::Signed, + bitness: IntBitness::Xsize, + })), +]; + +pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 2] = [ + TyFingerprint::Apply(TypeCtor::Float(FloatTy { bitness: FloatBitness::X32 })), + TyFingerprint::Apply(TypeCtor::Float(FloatTy { bitness: FloatBitness::X64 })), +]; + +/// Trait impls defined or available in some crate. +#[derive(Debug, Eq, PartialEq)] +pub struct TraitImpls { + // If the `Option` is `None`, the impl may apply to any self type. + map: FxHashMap, Vec>>, +} + +impl TraitImpls { + pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc { + let _p = profile::span("trait_impls_in_crate_query"); + let mut impls = Self { map: FxHashMap::default() }; + + let crate_def_map = db.crate_def_map(krate); + for (_module_id, module_data) in crate_def_map.modules.iter() { + for impl_id in module_data.scope.impls() { + let target_trait = match db.impl_trait(impl_id) { + Some(tr) => tr.value.trait_, + None => continue, + }; + let self_ty = db.impl_self_ty(impl_id); + let self_ty_fp = TyFingerprint::for_impl(&self_ty.value); + impls + .map + .entry(target_trait) + .or_default() + .entry(self_ty_fp) + .or_default() + .push(impl_id); + } + } + + Arc::new(impls) + } + + pub(crate) fn trait_impls_in_deps_query(db: &dyn HirDatabase, krate: CrateId) -> Arc { + let _p = profile::span("trait_impls_in_deps_query"); + let crate_graph = db.crate_graph(); + let mut res = Self { map: FxHashMap::default() }; + + for krate in crate_graph.transitive_deps(krate) { + res.merge(&db.trait_impls_in_crate(krate)); + } + + Arc::new(res) + } + + fn merge(&mut self, other: &Self) { + for (trait_, other_map) in &other.map { + let map = self.map.entry(*trait_).or_default(); + for (fp, impls) in other_map { + let vec = map.entry(*fp).or_default(); + vec.extend(impls); + } + } + } + + /// Queries all impls of the given trait. + pub fn for_trait(&self, trait_: TraitId) -> impl Iterator + '_ { + self.map + .get(&trait_) + .into_iter() + .flat_map(|map| map.values().flat_map(|v| v.iter().copied())) + } + + /// Queries all impls of `trait_` that may apply to `self_ty`. + pub fn for_trait_and_self_ty( + &self, + trait_: TraitId, + self_ty: TyFingerprint, + ) -> impl Iterator + '_ { + self.map + .get(&trait_) + .into_iter() + .flat_map(move |map| map.get(&None).into_iter().chain(map.get(&Some(self_ty)))) + .flat_map(|v| v.iter().copied()) + } + + pub fn all_impls(&self) -> impl Iterator + '_ { + self.map.values().flat_map(|map| map.values().flat_map(|v| v.iter().copied())) + } +} + +/// Inherent impls defined in some crate. +/// +/// Inherent impls can only be defined in the crate that also defines the self type of the impl +/// (note that some primitives are considered to be defined by both libcore and liballoc). +/// +/// This makes inherent impl lookup easier than trait impl lookup since we only have to consider a +/// single crate. +#[derive(Debug, Eq, PartialEq)] +pub struct InherentImpls { + map: FxHashMap>, +} + +impl InherentImpls { + pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc { + let mut map: FxHashMap<_, Vec<_>> = FxHashMap::default(); + + let crate_def_map = db.crate_def_map(krate); + for (_module_id, module_data) in crate_def_map.modules.iter() { + for impl_id in module_data.scope.impls() { + let data = db.impl_data(impl_id); + if data.target_trait.is_some() { + continue; + } + + let self_ty = db.impl_self_ty(impl_id); + if let Some(fp) = TyFingerprint::for_impl(&self_ty.value) { + map.entry(fp).or_default().push(impl_id); + } + } + } + + Arc::new(Self { map }) + } + + pub fn for_self_ty(&self, self_ty: &Ty) -> &[ImplId] { + match TyFingerprint::for_impl(self_ty) { + Some(fp) => self.map.get(&fp).map(|vec| vec.as_ref()).unwrap_or(&[]), + None => &[], + } + } + + pub fn all_impls(&self) -> impl Iterator + '_ { + self.map.values().flat_map(|v| v.iter().copied()) + } +} + +impl Ty { + pub fn def_crates( + &self, + db: &dyn HirDatabase, + cur_crate: CrateId, + ) -> Option> { + // Types like slice can have inherent impls in several crates, (core and alloc). + // The corresponding impls are marked with lang items, so we can use them to find the required crates. + macro_rules! lang_item_crate { + ($($name:expr),+ $(,)?) => {{ + let mut v = ArrayVec::<[LangItemTarget; 2]>::new(); + $( + v.extend(db.lang_item(cur_crate, $name.into())); + )+ + v + }}; + } + + let lang_item_targets = match self { + Ty::Apply(a_ty) => match a_ty.ctor { + TypeCtor::Adt(def_id) => { + return Some(std::iter::once(def_id.module(db.upcast()).krate).collect()) + } + TypeCtor::Bool => lang_item_crate!("bool"), + TypeCtor::Char => lang_item_crate!("char"), + TypeCtor::Float(f) => match f.bitness { + // There are two lang items: one in libcore (fXX) and one in libstd (fXX_runtime) + FloatBitness::X32 => lang_item_crate!("f32", "f32_runtime"), + FloatBitness::X64 => lang_item_crate!("f64", "f64_runtime"), + }, + TypeCtor::Int(i) => lang_item_crate!(i.ty_to_string()), + TypeCtor::Str => lang_item_crate!("str_alloc", "str"), + TypeCtor::Slice => lang_item_crate!("slice_alloc", "slice"), + TypeCtor::RawPtr(Mutability::Shared) => lang_item_crate!("const_ptr"), + TypeCtor::RawPtr(Mutability::Mut) => lang_item_crate!("mut_ptr"), + _ => return None, + }, + _ => return None, + }; + let res = lang_item_targets + .into_iter() + .filter_map(|it| match it { + LangItemTarget::ImplDefId(it) => Some(it), + _ => None, + }) + .map(|it| it.lookup(db.upcast()).container.module(db.upcast()).krate) + .collect(); + Some(res) + } +} +/// Look up the method with the given name, returning the actual autoderefed +/// receiver type (but without autoref applied yet). +pub(crate) fn lookup_method( + ty: &Canonical, + db: &dyn HirDatabase, + env: Arc, + krate: CrateId, + traits_in_scope: &FxHashSet, + name: &Name, +) -> Option<(Ty, FunctionId)> { + iterate_method_candidates( + ty, + db, + env, + krate, + &traits_in_scope, + Some(name), + LookupMode::MethodCall, + |ty, f| match f { + AssocItemId::FunctionId(f) => Some((ty.clone(), f)), + _ => None, + }, + ) +} + +/// Whether we're looking up a dotted method call (like `v.len()`) or a path +/// (like `Vec::new`). +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum LookupMode { + /// Looking up a method call like `v.len()`: We only consider candidates + /// that have a `self` parameter, and do autoderef. + MethodCall, + /// Looking up a path like `Vec::new` or `Vec::default`: We consider all + /// candidates including associated constants, but don't do autoderef. + Path, +} + +// This would be nicer if it just returned an iterator, but that runs into +// lifetime problems, because we need to borrow temp `CrateImplDefs`. +// FIXME add a context type here? +pub fn iterate_method_candidates( + ty: &Canonical, + db: &dyn HirDatabase, + env: Arc, + krate: CrateId, + traits_in_scope: &FxHashSet, + name: Option<&Name>, + mode: LookupMode, + mut callback: impl FnMut(&Ty, AssocItemId) -> Option, +) -> Option { + let mut slot = None; + iterate_method_candidates_impl( + ty, + db, + env, + krate, + traits_in_scope, + name, + mode, + &mut |ty, item| { + assert!(slot.is_none()); + slot = callback(ty, item); + slot.is_some() + }, + ); + slot +} + +fn iterate_method_candidates_impl( + ty: &Canonical, + db: &dyn HirDatabase, + env: Arc, + krate: CrateId, + traits_in_scope: &FxHashSet, + name: Option<&Name>, + mode: LookupMode, + callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool, +) -> bool { + match mode { + LookupMode::MethodCall => { + // For method calls, rust first does any number of autoderef, and then one + // autoref (i.e. when the method takes &self or &mut self). We just ignore + // the autoref currently -- when we find a method matching the given name, + // we assume it fits. + + // Also note that when we've got a receiver like &S, even if the method we + // find in the end takes &self, we still do the autoderef step (just as + // rustc does an autoderef and then autoref again). + let ty = InEnvironment { value: ty.clone(), environment: env.clone() }; + + // We have to be careful about the order we're looking at candidates + // in here. Consider the case where we're resolving `x.clone()` + // where `x: &Vec<_>`. This resolves to the clone method with self + // type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where + // the receiver type exactly matches before cases where we have to + // do autoref. But in the autoderef steps, the `&_` self type comes + // up *before* the `Vec<_>` self type. + // + // On the other hand, we don't want to just pick any by-value method + // before any by-autoref method; it's just that we need to consider + // the methods by autoderef order of *receiver types*, not *self + // types*. + + let deref_chain = autoderef_method_receiver(db, krate, ty); + for i in 0..deref_chain.len() { + if iterate_method_candidates_with_autoref( + &deref_chain[i..], + db, + env.clone(), + krate, + traits_in_scope, + name, + callback, + ) { + return true; + } + } + false + } + LookupMode::Path => { + // No autoderef for path lookups + iterate_method_candidates_for_self_ty( + &ty, + db, + env, + krate, + traits_in_scope, + name, + callback, + ) + } + } +} + +fn iterate_method_candidates_with_autoref( + deref_chain: &[Canonical], + db: &dyn HirDatabase, + env: Arc, + krate: CrateId, + traits_in_scope: &FxHashSet, + name: Option<&Name>, + mut callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool, +) -> bool { + if iterate_method_candidates_by_receiver( + &deref_chain[0], + &deref_chain[1..], + db, + env.clone(), + krate, + &traits_in_scope, + name, + &mut callback, + ) { + return true; + } + let refed = Canonical { + kinds: deref_chain[0].kinds.clone(), + value: Ty::apply_one(TypeCtor::Ref(Mutability::Shared), deref_chain[0].value.clone()), + }; + if iterate_method_candidates_by_receiver( + &refed, + deref_chain, + db, + env.clone(), + krate, + &traits_in_scope, + name, + &mut callback, + ) { + return true; + } + let ref_muted = Canonical { + kinds: deref_chain[0].kinds.clone(), + value: Ty::apply_one(TypeCtor::Ref(Mutability::Mut), deref_chain[0].value.clone()), + }; + if iterate_method_candidates_by_receiver( + &ref_muted, + deref_chain, + db, + env, + krate, + &traits_in_scope, + name, + &mut callback, + ) { + return true; + } + false +} + +fn iterate_method_candidates_by_receiver( + receiver_ty: &Canonical, + rest_of_deref_chain: &[Canonical], + db: &dyn HirDatabase, + env: Arc, + krate: CrateId, + traits_in_scope: &FxHashSet, + name: Option<&Name>, + mut callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool, +) -> bool { + // We're looking for methods with *receiver* type receiver_ty. These could + // be found in any of the derefs of receiver_ty, so we have to go through + // that. + for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) { + if iterate_inherent_methods(self_ty, db, name, Some(receiver_ty), krate, &mut callback) { + return true; + } + } + for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) { + if iterate_trait_method_candidates( + self_ty, + db, + env.clone(), + krate, + &traits_in_scope, + name, + Some(receiver_ty), + &mut callback, + ) { + return true; + } + } + false +} + +fn iterate_method_candidates_for_self_ty( + self_ty: &Canonical, + db: &dyn HirDatabase, + env: Arc, + krate: CrateId, + traits_in_scope: &FxHashSet, + name: Option<&Name>, + mut callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool, +) -> bool { + if iterate_inherent_methods(self_ty, db, name, None, krate, &mut callback) { + return true; + } + iterate_trait_method_candidates(self_ty, db, env, krate, traits_in_scope, name, None, callback) +} + +fn iterate_trait_method_candidates( + self_ty: &Canonical, + db: &dyn HirDatabase, + env: Arc, + krate: CrateId, + traits_in_scope: &FxHashSet, + name: Option<&Name>, + receiver_ty: Option<&Canonical>, + callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool, +) -> bool { + // if ty is `dyn Trait`, the trait doesn't need to be in scope + let inherent_trait = + self_ty.value.dyn_trait().into_iter().flat_map(|t| all_super_traits(db.upcast(), t)); + let env_traits = if let Ty::Placeholder(_) = self_ty.value { + // if we have `T: Trait` in the param env, the trait doesn't need to be in scope + env.trait_predicates_for_self_ty(&self_ty.value) + .map(|tr| tr.trait_) + .flat_map(|t| all_super_traits(db.upcast(), t)) + .collect() + } else { + Vec::new() + }; + let traits = + inherent_trait.chain(env_traits.into_iter()).chain(traits_in_scope.iter().copied()); + 'traits: for t in traits { + let data = db.trait_data(t); + + // we'll be lazy about checking whether the type implements the + // trait, but if we find out it doesn't, we'll skip the rest of the + // iteration + let mut known_implemented = false; + for (_name, item) in data.items.iter() { + if !is_valid_candidate(db, name, receiver_ty, *item, self_ty) { + continue; + } + if !known_implemented { + let goal = generic_implements_goal(db, env.clone(), t, self_ty.clone()); + if db.trait_solve(krate, goal).is_none() { + continue 'traits; + } + } + known_implemented = true; + if callback(&self_ty.value, *item) { + return true; + } + } + } + false +} + +fn iterate_inherent_methods( + self_ty: &Canonical, + db: &dyn HirDatabase, + name: Option<&Name>, + receiver_ty: Option<&Canonical>, + krate: CrateId, + callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool, +) -> bool { + let def_crates = match self_ty.value.def_crates(db, krate) { + Some(k) => k, + None => return false, + }; + for krate in def_crates { + let impls = db.inherent_impls_in_crate(krate); + + for &impl_def in impls.for_self_ty(&self_ty.value) { + for &item in db.impl_data(impl_def).items.iter() { + if !is_valid_candidate(db, name, receiver_ty, item, self_ty) { + continue; + } + // we have to check whether the self type unifies with the type + // that the impl is for. If we have a receiver type, this + // already happens in `is_valid_candidate` above; if not, we + // check it here + if receiver_ty.is_none() && inherent_impl_substs(db, impl_def, self_ty).is_none() { + test_utils::mark::hit!(impl_self_type_match_without_receiver); + continue; + } + if callback(&self_ty.value, item) { + return true; + } + } + } + } + false +} + +/// Returns the self type for the index trait call. +pub fn resolve_indexing_op( + db: &dyn HirDatabase, + ty: &Canonical, + env: Arc, + krate: CrateId, + index_trait: TraitId, +) -> Option> { + let ty = InEnvironment { value: ty.clone(), environment: env.clone() }; + let deref_chain = autoderef_method_receiver(db, krate, ty); + for ty in deref_chain { + let goal = generic_implements_goal(db, env.clone(), index_trait, ty.clone()); + if db.trait_solve(krate, goal).is_some() { + return Some(ty); + } + } + None +} + +fn is_valid_candidate( + db: &dyn HirDatabase, + name: Option<&Name>, + receiver_ty: Option<&Canonical>, + item: AssocItemId, + self_ty: &Canonical, +) -> bool { + match item { + AssocItemId::FunctionId(m) => { + let data = db.function_data(m); + if let Some(name) = name { + if &data.name != name { + return false; + } + } + if let Some(receiver_ty) = receiver_ty { + if !data.has_self_param { + return false; + } + let transformed_receiver_ty = match transform_receiver_ty(db, m, self_ty) { + Some(ty) => ty, + None => return false, + }; + if transformed_receiver_ty != receiver_ty.value { + return false; + } + } + true + } + AssocItemId::ConstId(c) => { + let data = db.const_data(c); + name.map_or(true, |name| data.name.as_ref() == Some(name)) && receiver_ty.is_none() + } + _ => false, + } +} + +pub(crate) fn inherent_impl_substs( + db: &dyn HirDatabase, + impl_id: ImplId, + self_ty: &Canonical, +) -> Option { + // we create a var for each type parameter of the impl; we need to keep in + // mind here that `self_ty` might have vars of its own + let vars = Substs::build_for_def(db, impl_id) + .fill_with_bound_vars(DebruijnIndex::INNERMOST, self_ty.kinds.len()) + .build(); + let self_ty_with_vars = db.impl_self_ty(impl_id).subst(&vars); + let mut kinds = self_ty.kinds.to_vec(); + kinds.extend(iter::repeat(TyKind::General).take(vars.len())); + let tys = Canonical { kinds: kinds.into(), value: (self_ty_with_vars, self_ty.value.clone()) }; + let substs = super::infer::unify(&tys); + // We only want the substs for the vars we added, not the ones from self_ty. + // Also, if any of the vars we added are still in there, we replace them by + // Unknown. I think this can only really happen if self_ty contained + // Unknown, and in that case we want the result to contain Unknown in those + // places again. + substs.map(|s| fallback_bound_vars(s.suffix(vars.len()), self_ty.kinds.len())) +} + +/// This replaces any 'free' Bound vars in `s` (i.e. those with indices past +/// num_vars_to_keep) by `Ty::Unknown`. +fn fallback_bound_vars(s: Substs, num_vars_to_keep: usize) -> Substs { + s.fold_binders( + &mut |ty, binders| { + if let Ty::Bound(bound) = &ty { + if bound.index >= num_vars_to_keep && bound.debruijn >= binders { + Ty::Unknown + } else { + ty + } + } else { + ty + } + }, + DebruijnIndex::INNERMOST, + ) +} + +fn transform_receiver_ty( + db: &dyn HirDatabase, + function_id: FunctionId, + self_ty: &Canonical, +) -> Option { + let substs = match function_id.lookup(db.upcast()).container { + AssocContainerId::TraitId(_) => Substs::build_for_def(db, function_id) + .push(self_ty.value.clone()) + .fill_with_unknown() + .build(), + AssocContainerId::ImplId(impl_id) => inherent_impl_substs(db, impl_id, &self_ty)?, + AssocContainerId::ContainerId(_) => unreachable!(), + }; + let sig = db.callable_item_signature(function_id.into()); + Some(sig.value.params()[0].clone().subst_bound_vars(&substs)) +} + +pub fn implements_trait( + ty: &Canonical, + db: &dyn HirDatabase, + env: Arc, + krate: CrateId, + trait_: TraitId, +) -> bool { + let goal = generic_implements_goal(db, env, trait_, ty.clone()); + let solution = db.trait_solve(krate, goal); + + solution.is_some() +} + +/// This creates Substs for a trait with the given Self type and type variables +/// for all other parameters, to query Chalk with it. +fn generic_implements_goal( + db: &dyn HirDatabase, + env: Arc, + trait_: TraitId, + self_ty: Canonical, +) -> Canonical> { + let mut kinds = self_ty.kinds.to_vec(); + let substs = super::Substs::build_for_def(db, trait_) + .push(self_ty.value) + .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len()) + .build(); + kinds.extend(iter::repeat(TyKind::General).take(substs.len() - 1)); + let trait_ref = TraitRef { trait_, substs }; + let obligation = super::Obligation::Trait(trait_ref); + Canonical { kinds: kinds.into(), value: InEnvironment::new(env, obligation) } +} + +fn autoderef_method_receiver( + db: &dyn HirDatabase, + krate: CrateId, + ty: InEnvironment>, +) -> Vec> { + let mut deref_chain: Vec<_> = autoderef::autoderef(db, Some(krate), ty).collect(); + // As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!) + if let Some(Ty::Apply(ApplicationTy { ctor: TypeCtor::Array, parameters })) = + deref_chain.last().map(|ty| &ty.value) + { + let kinds = deref_chain.last().unwrap().kinds.clone(); + let unsized_ty = Ty::apply(TypeCtor::Slice, parameters.clone()); + deref_chain.push(Canonical { value: unsized_ty, kinds }) + } + deref_chain +} diff --git a/crates/ra_hir_ty/src/op.rs b/crates/hir_ty/src/op.rs similarity index 100% rename from crates/ra_hir_ty/src/op.rs rename to crates/hir_ty/src/op.rs diff --git a/crates/ra_hir_ty/src/primitive.rs b/crates/hir_ty/src/primitive.rs similarity index 100% rename from crates/ra_hir_ty/src/primitive.rs rename to crates/hir_ty/src/primitive.rs diff --git a/crates/hir_ty/src/test_db.rs b/crates/hir_ty/src/test_db.rs new file mode 100644 index 0000000000..15b8435e92 --- /dev/null +++ b/crates/hir_ty/src/test_db.rs @@ -0,0 +1,136 @@ +//! Database used for testing `hir`. + +use std::{ + fmt, panic, + sync::{Arc, Mutex}, +}; + +use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast}; +use hir_def::{db::DefDatabase, ModuleId}; +use hir_expand::db::AstDatabase; +use rustc_hash::{FxHashMap, FxHashSet}; +use syntax::TextRange; +use test_utils::extract_annotations; + +#[salsa::database( + base_db::SourceDatabaseExtStorage, + base_db::SourceDatabaseStorage, + hir_expand::db::AstDatabaseStorage, + hir_def::db::InternDatabaseStorage, + hir_def::db::DefDatabaseStorage, + crate::db::HirDatabaseStorage +)] +#[derive(Default)] +pub struct TestDB { + storage: salsa::Storage, + events: Mutex>>, +} +impl fmt::Debug for TestDB { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("TestDB").finish() + } +} + +impl Upcast for TestDB { + fn upcast(&self) -> &(dyn AstDatabase + 'static) { + &*self + } +} + +impl Upcast for TestDB { + fn upcast(&self) -> &(dyn DefDatabase + 'static) { + &*self + } +} + +impl salsa::Database for TestDB { + fn salsa_event(&self, event: salsa::Event) { + let mut events = self.events.lock().unwrap(); + if let Some(events) = &mut *events { + events.push(event); + } + } +} + +impl salsa::ParallelDatabase for TestDB { + fn snapshot(&self) -> salsa::Snapshot { + salsa::Snapshot::new(TestDB { + storage: self.storage.snapshot(), + events: Default::default(), + }) + } +} + +impl panic::RefUnwindSafe for TestDB {} + +impl FileLoader for TestDB { + fn file_text(&self, file_id: FileId) -> Arc { + FileLoaderDelegate(self).file_text(file_id) + } + fn resolve_path(&self, anchor: FileId, path: &str) -> Option { + FileLoaderDelegate(self).resolve_path(anchor, path) + } + fn relevant_crates(&self, file_id: FileId) -> Arc> { + FileLoaderDelegate(self).relevant_crates(file_id) + } +} + +impl TestDB { + pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId { + for &krate in self.relevant_crates(file_id).iter() { + let crate_def_map = self.crate_def_map(krate); + for (local_id, data) in crate_def_map.modules.iter() { + if data.origin.file_id() == Some(file_id) { + return ModuleId { krate, local_id }; + } + } + } + panic!("Can't find module for file") + } + + pub(crate) fn extract_annotations(&self) -> FxHashMap> { + let mut files = Vec::new(); + let crate_graph = self.crate_graph(); + for krate in crate_graph.iter() { + let crate_def_map = self.crate_def_map(krate); + for (module_id, _) in crate_def_map.modules.iter() { + let file_id = crate_def_map[module_id].origin.file_id(); + files.extend(file_id) + } + } + files + .into_iter() + .filter_map(|file_id| { + let text = self.file_text(file_id); + let annotations = extract_annotations(&text); + if annotations.is_empty() { + return None; + } + Some((file_id, annotations)) + }) + .collect() + } +} + +impl TestDB { + pub fn log(&self, f: impl FnOnce()) -> Vec { + *self.events.lock().unwrap() = Some(Vec::new()); + f(); + self.events.lock().unwrap().take().unwrap() + } + + pub fn log_executed(&self, f: impl FnOnce()) -> Vec { + let events = self.log(f); + events + .into_iter() + .filter_map(|e| match e.kind { + // This pretty horrible, but `Debug` is the only way to inspect + // QueryDescriptor at the moment. + salsa::EventKind::WillExecute { database_key } => { + Some(format!("{:?}", database_key.debug(self))) + } + _ => None, + }) + .collect() + } +} diff --git a/crates/hir_ty/src/tests.rs b/crates/hir_ty/src/tests.rs new file mode 100644 index 0000000000..c953925ecb --- /dev/null +++ b/crates/hir_ty/src/tests.rs @@ -0,0 +1,359 @@ +mod never_type; +mod coercion; +mod regression; +mod simple; +mod patterns; +mod traits; +mod method_resolution; +mod macros; +mod display_source_code; + +use std::sync::Arc; + +use base_db::{fixture::WithFixture, FileRange, SourceDatabase, SourceDatabaseExt}; +use expect::Expect; +use hir_def::{ + body::{BodySourceMap, SyntheticSyntax}, + child_by_source::ChildBySource, + db::DefDatabase, + item_scope::ItemScope, + keys, + nameres::CrateDefMap, + AssocItemId, DefWithBodyId, LocalModuleId, Lookup, ModuleDefId, +}; +use hir_expand::{db::AstDatabase, InFile}; +use stdx::format_to; +use syntax::{ + algo, + ast::{self, AstNode}, + SyntaxNode, +}; + +use crate::{ + db::HirDatabase, display::HirDisplay, infer::TypeMismatch, test_db::TestDB, InferenceResult, Ty, +}; + +// These tests compare the inference results for all expressions in a file +// against snapshots of the expected results using expect. Use +// `env UPDATE_EXPECT=1 cargo test -p hir_ty` to update the snapshots. + +fn setup_tracing() -> tracing::subscriber::DefaultGuard { + use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry}; + use tracing_tree::HierarchicalLayer; + let filter = EnvFilter::from_env("CHALK_DEBUG"); + let layer = HierarchicalLayer::default() + .with_indent_lines(true) + .with_ansi(false) + .with_indent_amount(2) + .with_writer(std::io::stderr); + let subscriber = Registry::default().with(filter).with(layer); + tracing::subscriber::set_default(subscriber) +} + +fn check_types(ra_fixture: &str) { + check_types_impl(ra_fixture, false) +} + +fn check_types_source_code(ra_fixture: &str) { + check_types_impl(ra_fixture, true) +} + +fn check_types_impl(ra_fixture: &str, display_source: bool) { + let _tracing = setup_tracing(); + let db = TestDB::with_files(ra_fixture); + let mut checked_one = false; + for (file_id, annotations) in db.extract_annotations() { + for (range, expected) in annotations { + let ty = type_at_range(&db, FileRange { file_id, range }); + let actual = if display_source { + let module = db.module_for_file(file_id); + ty.display_source_code(&db, module).unwrap() + } else { + ty.display(&db).to_string() + }; + assert_eq!(expected, actual); + checked_one = true; + } + } + assert!(checked_one, "no `//^` annotations found"); +} + +fn type_at_range(db: &TestDB, pos: FileRange) -> Ty { + let file = db.parse(pos.file_id).ok().unwrap(); + let expr = algo::find_node_at_range::(file.syntax(), pos.range).unwrap(); + let fn_def = expr.syntax().ancestors().find_map(ast::Fn::cast).unwrap(); + let module = db.module_for_file(pos.file_id); + let func = *module.child_by_source(db)[keys::FUNCTION] + .get(&InFile::new(pos.file_id.into(), fn_def)) + .unwrap(); + + let (_body, source_map) = db.body_with_source_map(func.into()); + if let Some(expr_id) = source_map.node_expr(InFile::new(pos.file_id.into(), &expr)) { + let infer = db.infer(func.into()); + return infer[expr_id].clone(); + } + panic!("Can't find expression") +} + +fn infer(ra_fixture: &str) -> String { + infer_with_mismatches(ra_fixture, false) +} + +fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { + let _tracing = setup_tracing(); + let (db, file_id) = TestDB::with_single_file(content); + + let mut buf = String::new(); + + let mut infer_def = |inference_result: Arc, + body_source_map: Arc| { + let mut types: Vec<(InFile, &Ty)> = Vec::new(); + let mut mismatches: Vec<(InFile, &TypeMismatch)> = Vec::new(); + + for (pat, ty) in inference_result.type_of_pat.iter() { + let syntax_ptr = match body_source_map.pat_syntax(pat) { + Ok(sp) => { + let root = db.parse_or_expand(sp.file_id).unwrap(); + sp.map(|ptr| { + ptr.either( + |it| it.to_node(&root).syntax().clone(), + |it| it.to_node(&root).syntax().clone(), + ) + }) + } + Err(SyntheticSyntax) => continue, + }; + types.push((syntax_ptr, ty)); + } + + for (expr, ty) in inference_result.type_of_expr.iter() { + let node = match body_source_map.expr_syntax(expr) { + Ok(sp) => { + let root = db.parse_or_expand(sp.file_id).unwrap(); + sp.map(|ptr| ptr.to_node(&root).syntax().clone()) + } + Err(SyntheticSyntax) => continue, + }; + types.push((node.clone(), ty)); + if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) { + mismatches.push((node, mismatch)); + } + } + + // sort ranges for consistency + types.sort_by_key(|(node, _)| { + let range = node.value.text_range(); + (range.start(), range.end()) + }); + for (node, ty) in &types { + let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) { + (self_param.self_token().unwrap().text_range(), "self".to_string()) + } else { + (node.value.text_range(), node.value.text().to_string().replace("\n", " ")) + }; + let macro_prefix = if node.file_id != file_id.into() { "!" } else { "" }; + format_to!( + buf, + "{}{:?} '{}': {}\n", + macro_prefix, + range, + ellipsize(text, 15), + ty.display(&db) + ); + } + if include_mismatches { + mismatches.sort_by_key(|(node, _)| { + let range = node.value.text_range(); + (range.start(), range.end()) + }); + for (src_ptr, mismatch) in &mismatches { + let range = src_ptr.value.text_range(); + let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" }; + format_to!( + buf, + "{}{:?}: expected {}, got {}\n", + macro_prefix, + range, + mismatch.expected.display(&db), + mismatch.actual.display(&db), + ); + } + } + }; + + let module = db.module_for_file(file_id); + let crate_def_map = db.crate_def_map(module.krate); + + let mut defs: Vec = Vec::new(); + visit_module(&db, &crate_def_map, module.local_id, &mut |it| defs.push(it)); + defs.sort_by_key(|def| match def { + DefWithBodyId::FunctionId(it) => { + let loc = it.lookup(&db); + let tree = db.item_tree(loc.id.file_id); + tree.source(&db, loc.id).syntax().text_range().start() + } + DefWithBodyId::ConstId(it) => { + let loc = it.lookup(&db); + let tree = db.item_tree(loc.id.file_id); + tree.source(&db, loc.id).syntax().text_range().start() + } + DefWithBodyId::StaticId(it) => { + let loc = it.lookup(&db); + let tree = db.item_tree(loc.id.file_id); + tree.source(&db, loc.id).syntax().text_range().start() + } + }); + for def in defs { + let (_body, source_map) = db.body_with_source_map(def); + let infer = db.infer(def); + infer_def(infer, source_map); + } + + buf.truncate(buf.trim_end().len()); + buf +} + +fn visit_module( + db: &TestDB, + crate_def_map: &CrateDefMap, + module_id: LocalModuleId, + cb: &mut dyn FnMut(DefWithBodyId), +) { + visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb); + for impl_id in crate_def_map[module_id].scope.impls() { + let impl_data = db.impl_data(impl_id); + for &item in impl_data.items.iter() { + match item { + AssocItemId::FunctionId(it) => { + let def = it.into(); + cb(def); + let body = db.body(def); + visit_scope(db, crate_def_map, &body.item_scope, cb); + } + AssocItemId::ConstId(it) => { + let def = it.into(); + cb(def); + let body = db.body(def); + visit_scope(db, crate_def_map, &body.item_scope, cb); + } + AssocItemId::TypeAliasId(_) => (), + } + } + } + + fn visit_scope( + db: &TestDB, + crate_def_map: &CrateDefMap, + scope: &ItemScope, + cb: &mut dyn FnMut(DefWithBodyId), + ) { + for decl in scope.declarations() { + match decl { + ModuleDefId::FunctionId(it) => { + let def = it.into(); + cb(def); + let body = db.body(def); + visit_scope(db, crate_def_map, &body.item_scope, cb); + } + ModuleDefId::ConstId(it) => { + let def = it.into(); + cb(def); + let body = db.body(def); + visit_scope(db, crate_def_map, &body.item_scope, cb); + } + ModuleDefId::StaticId(it) => { + let def = it.into(); + cb(def); + let body = db.body(def); + visit_scope(db, crate_def_map, &body.item_scope, cb); + } + ModuleDefId::TraitId(it) => { + let trait_data = db.trait_data(it); + for &(_, item) in trait_data.items.iter() { + match item { + AssocItemId::FunctionId(it) => cb(it.into()), + AssocItemId::ConstId(it) => cb(it.into()), + AssocItemId::TypeAliasId(_) => (), + } + } + } + ModuleDefId::ModuleId(it) => visit_module(db, crate_def_map, it.local_id, cb), + _ => (), + } + } + } +} + +fn ellipsize(mut text: String, max_len: usize) -> String { + if text.len() <= max_len { + return text; + } + let ellipsis = "..."; + let e_len = ellipsis.len(); + let mut prefix_len = (max_len - e_len) / 2; + while !text.is_char_boundary(prefix_len) { + prefix_len += 1; + } + let mut suffix_len = max_len - e_len - prefix_len; + while !text.is_char_boundary(text.len() - suffix_len) { + suffix_len += 1; + } + text.replace_range(prefix_len..text.len() - suffix_len, ellipsis); + text +} + +#[test] +fn typing_whitespace_inside_a_function_should_not_invalidate_types() { + let (mut db, pos) = TestDB::with_position( + " + //- /lib.rs + fn foo() -> i32 { + <|>1 + 1 + } + ", + ); + { + let events = db.log_executed(|| { + let module = db.module_for_file(pos.file_id); + let crate_def_map = db.crate_def_map(module.krate); + visit_module(&db, &crate_def_map, module.local_id, &mut |def| { + db.infer(def); + }); + }); + assert!(format!("{:?}", events).contains("infer")) + } + + let new_text = " + fn foo() -> i32 { + 1 + + + 1 + } + " + .to_string(); + + db.set_file_text(pos.file_id, Arc::new(new_text)); + + { + let events = db.log_executed(|| { + let module = db.module_for_file(pos.file_id); + let crate_def_map = db.crate_def_map(module.krate); + visit_module(&db, &crate_def_map, module.local_id, &mut |def| { + db.infer(def); + }); + }); + assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) + } +} + +fn check_infer(ra_fixture: &str, expect: Expect) { + let mut actual = infer(ra_fixture); + actual.push('\n'); + expect.assert_eq(&actual); +} + +fn check_infer_with_mismatches(ra_fixture: &str, expect: Expect) { + let mut actual = infer_with_mismatches(ra_fixture, true); + actual.push('\n'); + expect.assert_eq(&actual); +} diff --git a/crates/ra_hir_ty/src/tests/coercion.rs b/crates/hir_ty/src/tests/coercion.rs similarity index 100% rename from crates/ra_hir_ty/src/tests/coercion.rs rename to crates/hir_ty/src/tests/coercion.rs diff --git a/crates/ra_hir_ty/src/tests/display_source_code.rs b/crates/hir_ty/src/tests/display_source_code.rs similarity index 100% rename from crates/ra_hir_ty/src/tests/display_source_code.rs rename to crates/hir_ty/src/tests/display_source_code.rs diff --git a/crates/hir_ty/src/tests/macros.rs b/crates/hir_ty/src/tests/macros.rs new file mode 100644 index 0000000000..d887c7a799 --- /dev/null +++ b/crates/hir_ty/src/tests/macros.rs @@ -0,0 +1,787 @@ +use std::fs; + +use expect::expect; +use test_utils::project_dir; + +use super::{check_infer, check_types}; + +#[test] +fn cfg_impl_def() { + check_types( + r#" +//- /main.rs crate:main deps:foo cfg:test +use foo::S as T; +struct S; + +#[cfg(test)] +impl S { + fn foo1(&self) -> i32 { 0 } +} + +#[cfg(not(test))] +impl S { + fn foo2(&self) -> i32 { 0 } +} + +fn test() { + let t = (S.foo1(), S.foo2(), T.foo3(), T.foo4()); + t; +} //^ (i32, {unknown}, i32, {unknown}) + +//- /foo.rs crate:foo +struct S; + +#[cfg(not(test))] +impl S { + fn foo3(&self) -> i32 { 0 } +} + +#[cfg(test)] +impl S { + fn foo4(&self) -> i32 { 0 } +} +"#, + ); +} + +#[test] +fn infer_macros_expanded() { + check_infer( + r#" + struct Foo(Vec); + + macro_rules! foo { + ($($item:expr),*) => { + { + Foo(vec![$($item,)*]) + } + }; + } + + fn main() { + let x = foo!(1,2); + } + "#, + expect![[r#" + !0..17 '{Foo(v...,2,])}': Foo + !1..4 'Foo': Foo({unknown}) -> Foo + !1..16 'Foo(vec![1,2,])': Foo + !5..15 'vec![1,2,]': {unknown} + 155..181 '{ ...,2); }': () + 165..166 'x': Foo + "#]], + ); +} + +#[test] +fn infer_legacy_textual_scoped_macros_expanded() { + check_infer( + r#" + struct Foo(Vec); + + #[macro_use] + mod m { + macro_rules! foo { + ($($item:expr),*) => { + { + Foo(vec![$($item,)*]) + } + }; + } + } + + fn main() { + let x = foo!(1,2); + let y = crate::foo!(1,2); + } + "#, + expect![[r#" + !0..17 '{Foo(v...,2,])}': Foo + !1..4 'Foo': Foo({unknown}) -> Foo + !1..16 'Foo(vec![1,2,])': Foo + !5..15 'vec![1,2,]': {unknown} + 194..250 '{ ...,2); }': () + 204..205 'x': Foo + 227..228 'y': {unknown} + 231..247 'crate:...!(1,2)': {unknown} + "#]], + ); +} + +#[test] +fn infer_path_qualified_macros_expanded() { + check_infer( + r#" + #[macro_export] + macro_rules! foo { + () => { 42i32 } + } + + mod m { + pub use super::foo as bar; + } + + fn main() { + let x = crate::foo!(); + let y = m::bar!(); + } + "#, + expect![[r#" + !0..5 '42i32': i32 + !0..5 '42i32': i32 + 110..163 '{ ...!(); }': () + 120..121 'x': i32 + 147..148 'y': i32 + "#]], + ); +} + +#[test] +fn expr_macro_expanded_in_various_places() { + check_infer( + r#" + macro_rules! spam { + () => (1isize); + } + + fn spam() { + spam!(); + (spam!()); + spam!().spam(spam!()); + for _ in spam!() {} + || spam!(); + while spam!() {} + break spam!(); + return spam!(); + match spam!() { + _ if spam!() => spam!(), + } + spam!()(spam!()); + Spam { spam: spam!() }; + spam!()[spam!()]; + await spam!(); + spam!() as usize; + &spam!(); + -spam!(); + spam!()..spam!(); + spam!() + spam!(); + } + "#, + expect![[r#" + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + !0..6 '1isize': isize + 53..456 '{ ...!(); }': () + 87..108 'spam!(...am!())': {unknown} + 114..133 'for _ ...!() {}': () + 118..119 '_': {unknown} + 131..133 '{}': () + 138..148 '|| spam!()': || -> isize + 154..170 'while ...!() {}': () + 168..170 '{}': () + 175..188 'break spam!()': ! + 194..208 'return spam!()': ! + 214..268 'match ... }': isize + 238..239 '_': isize + 273..289 'spam!(...am!())': {unknown} + 295..317 'Spam {...m!() }': {unknown} + 323..339 'spam!(...am!()]': {unknown} + 364..380 'spam!(... usize': usize + 386..394 '&spam!()': &isize + 400..408 '-spam!()': isize + 414..430 'spam!(...pam!()': {unknown} + 436..453 'spam!(...pam!()': isize + "#]], + ); +} + +#[test] +fn infer_type_value_macro_having_same_name() { + check_infer( + r#" + #[macro_export] + macro_rules! foo { + () => { + mod foo { + pub use super::foo; + } + }; + ($x:tt) => { + $x + }; + } + + foo!(); + + fn foo() { + let foo = foo::foo!(42i32); + } + "#, + expect![[r#" + !0..5 '42i32': i32 + 170..205 '{ ...32); }': () + 180..183 'foo': i32 + "#]], + ); +} + +#[test] +fn processes_impls_generated_by_macros() { + check_types( + r#" +macro_rules! m { + ($ident:ident) => (impl Trait for $ident {}) +} +trait Trait { fn foo(self) -> u128 {} } +struct S; +m!(S); +fn test() { S.foo(); } + //^ u128 +"#, + ); +} + +#[test] +fn infer_assoc_items_generated_by_macros() { + check_types( + r#" +macro_rules! m { + () => (fn foo(&self) -> u128 {0}) +} +struct S; +impl S { + m!(); +} + +fn test() { S.foo(); } + //^ u128 +"#, + ); +} + +#[test] +fn infer_assoc_items_generated_by_macros_chain() { + check_types( + r#" +macro_rules! m_inner { + () => {fn foo(&self) -> u128 {0}} +} +macro_rules! m { + () => {m_inner!();} +} + +struct S; +impl S { + m!(); +} + +fn test() { S.foo(); } + //^ u128 +"#, + ); +} + +#[test] +fn infer_macro_with_dollar_crate_is_correct_in_expr() { + check_types( + r#" +//- /main.rs crate:main deps:foo +fn test() { + let x = (foo::foo!(1), foo::foo!(2)); + x; +} //^ (i32, usize) + +//- /lib.rs crate:foo +#[macro_export] +macro_rules! foo { + (1) => { $crate::bar!() }; + (2) => { 1 + $crate::baz() }; +} + +#[macro_export] +macro_rules! bar { + () => { 42 } +} + +pub fn baz() -> usize { 31usize } +"#, + ); +} + +#[test] +fn infer_macro_with_dollar_crate_is_correct_in_trait_associate_type() { + check_types( + r#" +//- /main.rs crate:main deps:foo +use foo::Trait; + +fn test() { + let msg = foo::Message(foo::MessageRef); + let r = msg.deref(); + r; + //^ &MessageRef +} + +//- /lib.rs crate:foo +pub struct MessageRef; +pub struct Message(MessageRef); + +pub trait Trait { + type Target; + fn deref(&self) -> &Self::Target; +} + +#[macro_export] +macro_rules! expand { + () => { + impl Trait for Message { + type Target = $crate::MessageRef; + fn deref(&self) -> &Self::Target { + &self.0 + } + } + } +} + +expand!(); +"#, + ); +} + +#[test] +fn infer_type_value_non_legacy_macro_use_as() { + check_infer( + r#" + mod m { + macro_rules! _foo { + ($x:ident) => { type $x = u64; } + } + pub(crate) use _foo as foo; + } + + m::foo!(foo); + use foo as bar; + fn f() -> bar { 0 } + fn main() { + let _a = f(); + } + "#, + expect![[r#" + 158..163 '{ 0 }': u64 + 160..161 '0': u64 + 174..196 '{ ...f(); }': () + 184..186 '_a': u64 + 190..191 'f': fn f() -> u64 + 190..193 'f()': u64 + "#]], + ); +} + +#[test] +fn infer_local_macro() { + check_infer( + r#" + fn main() { + macro_rules! foo { + () => { 1usize } + } + let _a = foo!(); + } + "#, + expect![[r#" + !0..6 '1usize': usize + 10..89 '{ ...!(); }': () + 16..65 'macro_... }': {unknown} + 74..76 '_a': usize + "#]], + ); +} + +#[test] +fn infer_local_inner_macros() { + check_types( + r#" +//- /main.rs crate:main deps:foo +fn test() { + let x = foo::foo!(1); + x; +} //^ i32 + +//- /lib.rs crate:foo +#[macro_export(local_inner_macros)] +macro_rules! foo { + (1) => { bar!() }; +} + +#[macro_export] +macro_rules! bar { + () => { 42 } +} + +"#, + ); +} + +#[test] +fn infer_builtin_macros_line() { + check_infer( + r#" + #[rustc_builtin_macro] + macro_rules! line {() => {}} + + fn main() { + let x = line!(); + } + "#, + expect![[r#" + !0..1 '0': i32 + 63..87 '{ ...!(); }': () + 73..74 'x': i32 + "#]], + ); +} + +#[test] +fn infer_builtin_macros_file() { + check_infer( + r#" + #[rustc_builtin_macro] + macro_rules! file {() => {}} + + fn main() { + let x = file!(); + } + "#, + expect![[r#" + !0..2 '""': &str + 63..87 '{ ...!(); }': () + 73..74 'x': &str + "#]], + ); +} + +#[test] +fn infer_builtin_macros_column() { + check_infer( + r#" + #[rustc_builtin_macro] + macro_rules! column {() => {}} + + fn main() { + let x = column!(); + } + "#, + expect![[r#" + !0..1 '0': i32 + 65..91 '{ ...!(); }': () + 75..76 'x': i32 + "#]], + ); +} + +#[test] +fn infer_builtin_macros_concat() { + check_infer( + r#" + #[rustc_builtin_macro] + macro_rules! concat {() => {}} + + fn main() { + let x = concat!("hello", concat!("world", "!")); + } + "#, + expect![[r#" + !0..13 '"helloworld!"': &str + 65..121 '{ ...")); }': () + 75..76 'x': &str + "#]], + ); +} + +#[test] +fn infer_builtin_macros_include() { + check_types( + r#" +//- /main.rs +#[rustc_builtin_macro] +macro_rules! include {() => {}} + +include!("foo.rs"); + +fn main() { + bar(); +} //^ u32 + +//- /foo.rs +fn bar() -> u32 {0} +"#, + ); +} + +#[test] +#[ignore] +fn include_accidentally_quadratic() { + let file = project_dir().join("crates/syntax/test_data/accidentally_quadratic"); + let big_file = fs::read_to_string(file).unwrap(); + let big_file = vec![big_file; 10].join("\n"); + + let fixture = r#" +//- /main.rs +#[rustc_builtin_macro] +macro_rules! include {() => {}} + +include!("foo.rs"); + +fn main() { + RegisterBlock { }; + //^ RegisterBlock +} + "#; + let fixture = format!("{}\n//- /foo.rs\n{}", fixture, big_file); + check_types(&fixture); +} + +#[test] +fn infer_builtin_macros_include_concat() { + check_types( + r#" +//- /main.rs +#[rustc_builtin_macro] +macro_rules! include {() => {}} + +#[rustc_builtin_macro] +macro_rules! concat {() => {}} + +include!(concat!("f", "oo.rs")); + +fn main() { + bar(); +} //^ u32 + +//- /foo.rs +fn bar() -> u32 {0} +"#, + ); +} + +#[test] +fn infer_builtin_macros_include_concat_with_bad_env_should_failed() { + check_types( + r#" +//- /main.rs +#[rustc_builtin_macro] +macro_rules! include {() => {}} + +#[rustc_builtin_macro] +macro_rules! concat {() => {}} + +#[rustc_builtin_macro] +macro_rules! env {() => {}} + +include!(concat!(env!("OUT_DIR"), "/foo.rs")); + +fn main() { + bar(); +} //^ {unknown} + +//- /foo.rs +fn bar() -> u32 {0} +"#, + ); +} + +#[test] +fn infer_builtin_macros_include_itself_should_failed() { + check_types( + r#" +#[rustc_builtin_macro] +macro_rules! include {() => {}} + +include!("main.rs"); + +fn main() { + 0 +} //^ i32 +"#, + ); +} + +#[test] +fn infer_builtin_macros_concat_with_lazy() { + check_infer( + r#" + macro_rules! hello {() => {"hello"}} + + #[rustc_builtin_macro] + macro_rules! concat {() => {}} + + fn main() { + let x = concat!(hello!(), concat!("world", "!")); + } + "#, + expect![[r#" + !0..13 '"helloworld!"': &str + 103..160 '{ ...")); }': () + 113..114 'x': &str + "#]], + ); +} + +#[test] +fn infer_builtin_macros_env() { + check_infer( + r#" + //- /main.rs env:foo=bar + #[rustc_builtin_macro] + macro_rules! env {() => {}} + + fn main() { + let x = env!("foo"); + } + "#, + expect![[r#" + !0..22 '"__RA_...TED__"': &str + 62..90 '{ ...o"); }': () + 72..73 'x': &str + "#]], + ); +} + +#[test] +fn infer_derive_clone_simple() { + check_types( + r#" +//- /main.rs crate:main deps:core +#[derive(Clone)] +struct S; +fn test() { + S.clone(); +} //^ S + +//- /lib.rs crate:core +#[prelude_import] +use clone::*; +mod clone { + trait Clone { + fn clone(&self) -> Self; + } +} +"#, + ); +} + +#[test] +fn infer_derive_clone_in_core() { + check_types( + r#" +//- /lib.rs crate:core +#[prelude_import] +use clone::*; +mod clone { + trait Clone { + fn clone(&self) -> Self; + } +} +#[derive(Clone)] +pub struct S; + +//- /main.rs crate:main deps:core +use core::S; +fn test() { + S.clone(); +} //^ S +"#, + ); +} + +#[test] +fn infer_derive_clone_with_params() { + check_types( + r#" +//- /main.rs crate:main deps:core +#[derive(Clone)] +struct S; +#[derive(Clone)] +struct Wrapper(T); +struct NonClone; +fn test() { + (Wrapper(S).clone(), Wrapper(NonClone).clone()); + //^ (Wrapper, {unknown}) +} + +//- /lib.rs crate:core +#[prelude_import] +use clone::*; +mod clone { + trait Clone { + fn clone(&self) -> Self; + } +} +"#, + ); +} + +#[test] +fn infer_custom_derive_simple() { + // FIXME: this test current now do nothing + check_types( + r#" +//- /main.rs crate:main +use foo::Foo; + +#[derive(Foo)] +struct S{} + +fn test() { + S{}; +} //^ S +"#, + ); +} + +#[test] +fn macro_in_arm() { + check_infer( + r#" + macro_rules! unit { + () => { () }; + } + + fn main() { + let x = match () { + unit!() => 92u32, + }; + } + "#, + expect![[r#" + 51..110 '{ ... }; }': () + 61..62 'x': u32 + 65..107 'match ... }': u32 + 71..73 '()': () + 84..91 'unit!()': () + 95..100 '92u32': u32 + "#]], + ); +} diff --git a/crates/ra_hir_ty/src/tests/method_resolution.rs b/crates/hir_ty/src/tests/method_resolution.rs similarity index 100% rename from crates/ra_hir_ty/src/tests/method_resolution.rs rename to crates/hir_ty/src/tests/method_resolution.rs diff --git a/crates/ra_hir_ty/src/tests/never_type.rs b/crates/hir_ty/src/tests/never_type.rs similarity index 100% rename from crates/ra_hir_ty/src/tests/never_type.rs rename to crates/hir_ty/src/tests/never_type.rs diff --git a/crates/ra_hir_ty/src/tests/patterns.rs b/crates/hir_ty/src/tests/patterns.rs similarity index 100% rename from crates/ra_hir_ty/src/tests/patterns.rs rename to crates/hir_ty/src/tests/patterns.rs diff --git a/crates/ra_hir_ty/src/tests/regression.rs b/crates/hir_ty/src/tests/regression.rs similarity index 100% rename from crates/ra_hir_ty/src/tests/regression.rs rename to crates/hir_ty/src/tests/regression.rs diff --git a/crates/hir_ty/src/tests/simple.rs b/crates/hir_ty/src/tests/simple.rs new file mode 100644 index 0000000000..59eb59d5fa --- /dev/null +++ b/crates/hir_ty/src/tests/simple.rs @@ -0,0 +1,2218 @@ +use expect::expect; + +use super::{check_infer, check_types}; + +#[test] +fn infer_box() { + check_types( + r#" +//- /main.rs crate:main deps:std +fn test() { + let x = box 1; + let t = (x, box x, box &1, box [1]); + t; +} //^ (Box, Box>, Box<&i32>, Box<[i32; _]>) + +//- /std.rs crate:std +#[prelude_import] use prelude::*; +mod prelude {} + +mod boxed { + #[lang = "owned_box"] + pub struct Box { + inner: *mut T, + } +} +"#, + ); +} + +#[test] +fn infer_adt_self() { + check_types( + r#" +enum Nat { Succ(Self), Demo(Nat), Zero } + +fn test() { + let foo: Nat = Nat::Zero; + if let Nat::Succ(x) = foo { + x + } //^ Nat +} +"#, + ); +} + +#[test] +fn self_in_struct_lit() { + check_infer( + r#" + //- /main.rs + struct S { x: T } + + impl S { + fn foo() { + Self { x: 1 }; + } + } + "#, + expect![[r#" + 49..79 '{ ... }': () + 59..72 'Self { x: 1 }': S + 69..70 '1': u32 + "#]], + ); +} + +#[test] +fn type_alias_in_struct_lit() { + check_infer( + r#" + //- /main.rs + struct S { x: T } + + type SS = S; + + fn foo() { + SS { x: 1 }; + } + "#, + expect![[r#" + 50..70 '{ ...1 }; }': () + 56..67 'SS { x: 1 }': S + 64..65 '1': u32 + "#]], + ); +} + +#[test] +fn infer_ranges() { + check_types( + r#" +//- /main.rs crate:main deps:core +fn test() { + let a = ..; + let b = 1..; + let c = ..2u32; + let d = 1..2usize; + let e = ..=10; + let f = 'a'..='z'; + + let t = (a, b, c, d, e, f); + t; +} //^ (RangeFull, RangeFrom, RangeTo, Range, RangeToInclusive, RangeInclusive) + +//- /core.rs crate:core +#[prelude_import] use prelude::*; +mod prelude {} + +pub mod ops { + pub struct Range { + pub start: Idx, + pub end: Idx, + } + pub struct RangeFrom { + pub start: Idx, + } + struct RangeFull; + pub struct RangeInclusive { + start: Idx, + end: Idx, + is_empty: u8, + } + pub struct RangeTo { + pub end: Idx, + } + pub struct RangeToInclusive { + pub end: Idx, + } +} +"#, + ); +} + +#[test] +fn infer_while_let() { + check_types( + r#" +enum Option { Some(T), None } + +fn test() { + let foo: Option = None; + while let Option::Some(x) = foo { + x + } //^ f32 +} +"#, + ); +} + +#[test] +fn infer_basics() { + check_infer( + r#" + fn test(a: u32, b: isize, c: !, d: &str) { + a; + b; + c; + d; + 1usize; + 1isize; + "test"; + 1.0f32; + }"#, + expect![[r#" + 8..9 'a': u32 + 16..17 'b': isize + 26..27 'c': ! + 32..33 'd': &str + 41..120 '{ ...f32; }': () + 47..48 'a': u32 + 54..55 'b': isize + 61..62 'c': ! + 68..69 'd': &str + 75..81 '1usize': usize + 87..93 '1isize': isize + 99..105 '"test"': &str + 111..117 '1.0f32': f32 + "#]], + ); +} + +#[test] +fn infer_let() { + check_infer( + r#" + fn test() { + let a = 1isize; + let b: usize = 1; + let c = b; + let d: u32; + let e; + let f: i32 = e; + } + "#, + expect![[r#" + 10..117 '{ ...= e; }': () + 20..21 'a': isize + 24..30 '1isize': isize + 40..41 'b': usize + 51..52 '1': usize + 62..63 'c': usize + 66..67 'b': usize + 77..78 'd': u32 + 93..94 'e': i32 + 104..105 'f': i32 + 113..114 'e': i32 + "#]], + ); +} + +#[test] +fn infer_paths() { + check_infer( + r#" + fn a() -> u32 { 1 } + + mod b { + fn c() -> u32 { 1 } + } + + fn test() { + a(); + b::c(); + } + "#, + expect![[r#" + 14..19 '{ 1 }': u32 + 16..17 '1': u32 + 47..52 '{ 1 }': u32 + 49..50 '1': u32 + 66..90 '{ ...c(); }': () + 72..73 'a': fn a() -> u32 + 72..75 'a()': u32 + 81..85 'b::c': fn c() -> u32 + 81..87 'b::c()': u32 + "#]], + ); +} + +#[test] +fn infer_path_type() { + check_infer( + r#" + struct S; + + impl S { + fn foo() -> i32 { 1 } + } + + fn test() { + S::foo(); + ::foo(); + } + "#, + expect![[r#" + 40..45 '{ 1 }': i32 + 42..43 '1': i32 + 59..92 '{ ...o(); }': () + 65..71 'S::foo': fn foo() -> i32 + 65..73 'S::foo()': i32 + 79..87 '::foo': fn foo() -> i32 + 79..89 '::foo()': i32 + "#]], + ); +} + +#[test] +fn infer_struct() { + check_infer( + r#" + struct A { + b: B, + c: C, + } + struct B; + struct C(usize); + + fn test() { + let c = C(1); + B; + let a: A = A { b: B, c: C(1) }; + a.b; + a.c; + } + "#, + expect![[r#" + 71..153 '{ ...a.c; }': () + 81..82 'c': C + 85..86 'C': C(usize) -> C + 85..89 'C(1)': C + 87..88 '1': usize + 95..96 'B': B + 106..107 'a': A + 113..132 'A { b:...C(1) }': A + 120..121 'B': B + 126..127 'C': C(usize) -> C + 126..130 'C(1)': C + 128..129 '1': usize + 138..139 'a': A + 138..141 'a.b': B + 147..148 'a': A + 147..150 'a.c': C + "#]], + ); +} + +#[test] +fn infer_enum() { + check_infer( + r#" + enum E { + V1 { field: u32 }, + V2 + } + fn test() { + E::V1 { field: 1 }; + E::V2; + }"#, + expect![[r#" + 51..89 '{ ...:V2; }': () + 57..75 'E::V1 ...d: 1 }': E + 72..73 '1': u32 + 81..86 'E::V2': E + "#]], + ); +} + +#[test] +fn infer_union() { + check_infer( + r#" + union MyUnion { + foo: u32, + bar: f32, + } + + fn test() { + let u = MyUnion { foo: 0 }; + unsafe { baz(u); } + let u = MyUnion { bar: 0.0 }; + unsafe { baz(u); } + } + + unsafe fn baz(u: MyUnion) { + let inner = u.foo; + let inner = u.bar; + } + "#, + expect![[r#" + 57..172 '{ ...); } }': () + 67..68 'u': MyUnion + 71..89 'MyUnio...o: 0 }': MyUnion + 86..87 '0': u32 + 95..113 'unsafe...(u); }': () + 102..113 '{ baz(u); }': () + 104..107 'baz': fn baz(MyUnion) + 104..110 'baz(u)': () + 108..109 'u': MyUnion + 122..123 'u': MyUnion + 126..146 'MyUnio... 0.0 }': MyUnion + 141..144 '0.0': f32 + 152..170 'unsafe...(u); }': () + 159..170 '{ baz(u); }': () + 161..164 'baz': fn baz(MyUnion) + 161..167 'baz(u)': () + 165..166 'u': MyUnion + 188..189 'u': MyUnion + 200..249 '{ ...bar; }': () + 210..215 'inner': u32 + 218..219 'u': MyUnion + 218..223 'u.foo': u32 + 233..238 'inner': f32 + 241..242 'u': MyUnion + 241..246 'u.bar': f32 + "#]], + ); +} + +#[test] +fn infer_refs() { + check_infer( + r#" + fn test(a: &u32, b: &mut u32, c: *const u32, d: *mut u32) { + a; + *a; + &a; + &mut a; + b; + *b; + &b; + c; + *c; + d; + *d; + } + "#, + expect![[r#" + 8..9 'a': &u32 + 17..18 'b': &mut u32 + 30..31 'c': *const u32 + 45..46 'd': *mut u32 + 58..149 '{ ... *d; }': () + 64..65 'a': &u32 + 71..73 '*a': u32 + 72..73 'a': &u32 + 79..81 '&a': &&u32 + 80..81 'a': &u32 + 87..93 '&mut a': &mut &u32 + 92..93 'a': &u32 + 99..100 'b': &mut u32 + 106..108 '*b': u32 + 107..108 'b': &mut u32 + 114..116 '&b': &&mut u32 + 115..116 'b': &mut u32 + 122..123 'c': *const u32 + 129..131 '*c': u32 + 130..131 'c': *const u32 + 137..138 'd': *mut u32 + 144..146 '*d': u32 + 145..146 'd': *mut u32 + "#]], + ); +} + +#[test] +fn infer_raw_ref() { + check_infer( + r#" + fn test(a: i32) { + &raw mut a; + &raw const a; + } + "#, + expect![[r#" + 8..9 'a': i32 + 16..53 '{ ...t a; }': () + 22..32 '&raw mut a': *mut i32 + 31..32 'a': i32 + 38..50 '&raw const a': *const i32 + 49..50 'a': i32 + "#]], + ); +} + +#[test] +fn infer_literals() { + check_infer( + r##" + fn test() { + 5i32; + 5f32; + 5f64; + "hello"; + b"bytes"; + 'c'; + b'b'; + 3.14; + 5000; + false; + true; + r#" + //! doc + // non-doc + mod foo {} + "#; + br#"yolo"#; + } + "##, + expect![[r##" + 10..216 '{ ...o"#; }': () + 16..20 '5i32': i32 + 26..30 '5f32': f32 + 36..40 '5f64': f64 + 46..53 '"hello"': &str + 59..67 'b"bytes"': &[u8; _] + 73..76 ''c'': char + 82..86 'b'b'': u8 + 92..96 '3.14': f64 + 102..106 '5000': i32 + 112..117 'false': bool + 123..127 'true': bool + 133..197 'r#" ... "#': &str + 203..213 'br#"yolo"#': &[u8; _] + "##]], + ); +} + +#[test] +fn infer_unary_op() { + check_infer( + r#" + enum SomeType {} + + fn test(x: SomeType) { + let b = false; + let c = !b; + let a = 100; + let d: i128 = -a; + let e = -100; + let f = !!!true; + let g = !42; + let h = !10u32; + let j = !a; + -3.14; + !3; + -x; + !x; + -"hello"; + !"hello"; + } + "#, + expect![[r#" + 26..27 'x': SomeType + 39..271 '{ ...lo"; }': () + 49..50 'b': bool + 53..58 'false': bool + 68..69 'c': bool + 72..74 '!b': bool + 73..74 'b': bool + 84..85 'a': i128 + 88..91 '100': i128 + 101..102 'd': i128 + 111..113 '-a': i128 + 112..113 'a': i128 + 123..124 'e': i32 + 127..131 '-100': i32 + 128..131 '100': i32 + 141..142 'f': bool + 145..152 '!!!true': bool + 146..152 '!!true': bool + 147..152 '!true': bool + 148..152 'true': bool + 162..163 'g': i32 + 166..169 '!42': i32 + 167..169 '42': i32 + 179..180 'h': u32 + 183..189 '!10u32': u32 + 184..189 '10u32': u32 + 199..200 'j': i128 + 203..205 '!a': i128 + 204..205 'a': i128 + 211..216 '-3.14': f64 + 212..216 '3.14': f64 + 222..224 '!3': i32 + 223..224 '3': i32 + 230..232 '-x': {unknown} + 231..232 'x': SomeType + 238..240 '!x': {unknown} + 239..240 'x': SomeType + 246..254 '-"hello"': {unknown} + 247..254 '"hello"': &str + 260..268 '!"hello"': {unknown} + 261..268 '"hello"': &str + "#]], + ); +} + +#[test] +fn infer_backwards() { + check_infer( + r#" + fn takes_u32(x: u32) {} + + struct S { i32_field: i32 } + + fn test() -> &mut &f64 { + let a = unknown_function(); + takes_u32(a); + let b = unknown_function(); + S { i32_field: b }; + let c = unknown_function(); + &mut &c + } + "#, + expect![[r#" + 13..14 'x': u32 + 21..23 '{}': () + 77..230 '{ ...t &c }': &mut &f64 + 87..88 'a': u32 + 91..107 'unknow...nction': {unknown} + 91..109 'unknow...tion()': u32 + 115..124 'takes_u32': fn takes_u32(u32) + 115..127 'takes_u32(a)': () + 125..126 'a': u32 + 137..138 'b': i32 + 141..157 'unknow...nction': {unknown} + 141..159 'unknow...tion()': i32 + 165..183 'S { i3...d: b }': S + 180..181 'b': i32 + 193..194 'c': f64 + 197..213 'unknow...nction': {unknown} + 197..215 'unknow...tion()': f64 + 221..228 '&mut &c': &mut &f64 + 226..228 '&c': &f64 + 227..228 'c': f64 + "#]], + ); +} + +#[test] +fn infer_self() { + check_infer( + r#" + struct S; + + impl S { + fn test(&self) { + self; + } + fn test2(self: &Self) { + self; + } + fn test3() -> Self { + S {} + } + fn test4() -> Self { + Self {} + } + } + "#, + expect![[r#" + 33..37 'self': &S + 39..60 '{ ... }': () + 49..53 'self': &S + 74..78 'self': &S + 87..108 '{ ... }': () + 97..101 'self': &S + 132..152 '{ ... }': S + 142..146 'S {}': S + 176..199 '{ ... }': S + 186..193 'Self {}': S + "#]], + ); +} + +#[test] +fn infer_self_as_path() { + check_infer( + r#" + struct S1; + struct S2(isize); + enum E { + V1, + V2(u32), + } + + impl S1 { + fn test() { + Self; + } + } + impl S2 { + fn test() { + Self(1); + } + } + impl E { + fn test() { + Self::V1; + Self::V2(1); + } + } + "#, + expect![[r#" + 86..107 '{ ... }': () + 96..100 'Self': S1 + 134..158 '{ ... }': () + 144..148 'Self': S2(isize) -> S2 + 144..151 'Self(1)': S2 + 149..150 '1': isize + 184..230 '{ ... }': () + 194..202 'Self::V1': E + 212..220 'Self::V2': V2(u32) -> E + 212..223 'Self::V2(1)': E + 221..222 '1': u32 + "#]], + ); +} + +#[test] +fn infer_binary_op() { + check_infer( + r#" + fn f(x: bool) -> i32 { + 0i32 + } + + fn test() -> bool { + let x = a && b; + let y = true || false; + let z = x == y; + let t = x != y; + let minus_forty: isize = -40isize; + let h = minus_forty <= CONST_2; + let c = f(z || y) + 5; + let d = b; + let g = minus_forty ^= i; + let ten: usize = 10; + let ten_is_eleven = ten == some_num; + + ten < 3 + } + "#, + expect![[r#" + 5..6 'x': bool + 21..33 '{ 0i32 }': i32 + 27..31 '0i32': i32 + 53..369 '{ ... < 3 }': bool + 63..64 'x': bool + 67..68 'a': bool + 67..73 'a && b': bool + 72..73 'b': bool + 83..84 'y': bool + 87..91 'true': bool + 87..100 'true || false': bool + 95..100 'false': bool + 110..111 'z': bool + 114..115 'x': bool + 114..120 'x == y': bool + 119..120 'y': bool + 130..131 't': bool + 134..135 'x': bool + 134..140 'x != y': bool + 139..140 'y': bool + 150..161 'minus_forty': isize + 171..179 '-40isize': isize + 172..179 '40isize': isize + 189..190 'h': bool + 193..204 'minus_forty': isize + 193..215 'minus_...ONST_2': bool + 208..215 'CONST_2': isize + 225..226 'c': i32 + 229..230 'f': fn f(bool) -> i32 + 229..238 'f(z || y)': i32 + 229..242 'f(z || y) + 5': i32 + 231..232 'z': bool + 231..237 'z || y': bool + 236..237 'y': bool + 241..242 '5': i32 + 252..253 'd': {unknown} + 256..257 'b': {unknown} + 267..268 'g': () + 271..282 'minus_forty': isize + 271..287 'minus_...y ^= i': () + 286..287 'i': isize + 297..300 'ten': usize + 310..312 '10': usize + 322..335 'ten_is_eleven': bool + 338..341 'ten': usize + 338..353 'ten == some_num': bool + 345..353 'some_num': usize + 360..363 'ten': usize + 360..367 'ten < 3': bool + 366..367 '3': usize + "#]], + ); +} + +#[test] +fn infer_shift_op() { + check_infer( + r#" + fn test() { + 1u32 << 5u8; + 1u32 >> 5u8; + } + "#, + expect![[r#" + 10..47 '{ ...5u8; }': () + 16..20 '1u32': u32 + 16..27 '1u32 << 5u8': u32 + 24..27 '5u8': u8 + 33..37 '1u32': u32 + 33..44 '1u32 >> 5u8': u32 + 41..44 '5u8': u8 + "#]], + ); +} + +#[test] +fn infer_field_autoderef() { + check_infer( + r#" + struct A { + b: B, + } + struct B; + + fn test1(a: A) { + let a1 = a; + a1.b; + let a2 = &a; + a2.b; + let a3 = &mut a; + a3.b; + let a4 = &&&&&&&a; + a4.b; + let a5 = &mut &&mut &&mut a; + a5.b; + } + + fn test2(a1: *const A, a2: *mut A) { + a1.b; + a2.b; + } + "#, + expect![[r#" + 43..44 'a': A + 49..212 '{ ...5.b; }': () + 59..61 'a1': A + 64..65 'a': A + 71..73 'a1': A + 71..75 'a1.b': B + 85..87 'a2': &A + 90..92 '&a': &A + 91..92 'a': A + 98..100 'a2': &A + 98..102 'a2.b': B + 112..114 'a3': &mut A + 117..123 '&mut a': &mut A + 122..123 'a': A + 129..131 'a3': &mut A + 129..133 'a3.b': B + 143..145 'a4': &&&&&&&A + 148..156 '&&&&&&&a': &&&&&&&A + 149..156 '&&&&&&a': &&&&&&A + 150..156 '&&&&&a': &&&&&A + 151..156 '&&&&a': &&&&A + 152..156 '&&&a': &&&A + 153..156 '&&a': &&A + 154..156 '&a': &A + 155..156 'a': A + 162..164 'a4': &&&&&&&A + 162..166 'a4.b': B + 176..178 'a5': &mut &&mut &&mut A + 181..199 '&mut &...&mut a': &mut &&mut &&mut A + 186..199 '&&mut &&mut a': &&mut &&mut A + 187..199 '&mut &&mut a': &mut &&mut A + 192..199 '&&mut a': &&mut A + 193..199 '&mut a': &mut A + 198..199 'a': A + 205..207 'a5': &mut &&mut &&mut A + 205..209 'a5.b': B + 223..225 'a1': *const A + 237..239 'a2': *mut A + 249..272 '{ ...2.b; }': () + 255..257 'a1': *const A + 255..259 'a1.b': B + 265..267 'a2': *mut A + 265..269 'a2.b': B + "#]], + ); +} + +#[test] +fn infer_argument_autoderef() { + check_infer( + r#" + #[lang = "deref"] + pub trait Deref { + type Target; + fn deref(&self) -> &Self::Target; + } + + struct A(T); + + impl A { + fn foo(&self) -> &T { + &self.0 + } + } + + struct B(T); + + impl Deref for B { + type Target = T; + fn deref(&self) -> &Self::Target { + &self.0 + } + } + + fn test() { + let t = A::foo(&&B(B(A(42)))); + } + "#, + expect![[r#" + 67..71 'self': &Self + 138..142 'self': &A + 150..173 '{ ... }': &T + 160..167 '&self.0': &T + 161..165 'self': &A + 161..167 'self.0': T + 254..258 'self': &B + 277..300 '{ ... }': &T + 287..294 '&self.0': &T + 288..292 'self': &B + 288..294 'self.0': T + 314..352 '{ ...))); }': () + 324..325 't': &i32 + 328..334 'A::foo': fn foo(&A) -> &i32 + 328..349 'A::foo...42))))': &i32 + 335..348 '&&B(B(A(42)))': &&B>> + 336..348 '&B(B(A(42)))': &B>> + 337..338 'B': B>>(B>) -> B>> + 337..348 'B(B(A(42)))': B>> + 339..340 'B': B>(A) -> B> + 339..347 'B(A(42))': B> + 341..342 'A': A(i32) -> A + 341..346 'A(42)': A + 343..345 '42': i32 + "#]], + ); +} + +#[test] +fn infer_method_argument_autoderef() { + check_infer( + r#" + #[lang = "deref"] + pub trait Deref { + type Target; + fn deref(&self) -> &Self::Target; + } + + struct A(*mut T); + + impl A { + fn foo(&self, x: &A) -> &T { + &*x.0 + } + } + + struct B(T); + + impl Deref for B { + type Target = T; + fn deref(&self) -> &Self::Target { + &self.0 + } + } + + fn test(a: A) { + let t = A(0 as *mut _).foo(&&B(B(a))); + } + "#, + expect![[r#" + 67..71 'self': &Self + 143..147 'self': &A + 149..150 'x': &A + 165..186 '{ ... }': &T + 175..180 '&*x.0': &T + 176..180 '*x.0': T + 177..178 'x': &A + 177..180 'x.0': *mut T + 267..271 'self': &B + 290..313 '{ ... }': &T + 300..307 '&self.0': &T + 301..305 'self': &B + 301..307 'self.0': T + 325..326 'a': A + 336..382 '{ ...))); }': () + 346..347 't': &i32 + 350..351 'A': A(*mut i32) -> A + 350..364 'A(0 as *mut _)': A + 350..379 'A(0 as...B(a)))': &i32 + 352..353 '0': i32 + 352..363 '0 as *mut _': *mut i32 + 369..378 '&&B(B(a))': &&B>> + 370..378 '&B(B(a))': &B>> + 371..372 'B': B>>(B>) -> B>> + 371..378 'B(B(a))': B>> + 373..374 'B': B>(A) -> B> + 373..377 'B(a)': B> + 375..376 'a': A + "#]], + ); +} + +#[test] +fn infer_in_elseif() { + check_infer( + r#" + struct Foo { field: i32 } + fn main(foo: Foo) { + if true { + + } else if false { + foo.field + } + } + "#, + expect![[r#" + 34..37 'foo': Foo + 44..108 '{ ... } }': () + 50..106 'if tru... }': () + 53..57 'true': bool + 58..66 '{ }': () + 72..106 'if fal... }': i32 + 75..80 'false': bool + 81..106 '{ ... }': i32 + 91..94 'foo': Foo + 91..100 'foo.field': i32 + "#]], + ) +} + +#[test] +fn infer_if_match_with_return() { + check_infer( + r#" + fn foo() { + let _x1 = if true { + 1 + } else { + return; + }; + let _x2 = if true { + 2 + } else { + return + }; + let _x3 = match true { + true => 3, + _ => { + return; + } + }; + let _x4 = match true { + true => 4, + _ => return + }; + }"#, + expect![[r#" + 9..322 '{ ... }; }': () + 19..22 '_x1': i32 + 25..79 'if tru... }': i32 + 28..32 'true': bool + 33..50 '{ ... }': i32 + 43..44 '1': i32 + 56..79 '{ ... }': i32 + 66..72 'return': ! + 89..92 '_x2': i32 + 95..148 'if tru... }': i32 + 98..102 'true': bool + 103..120 '{ ... }': i32 + 113..114 '2': i32 + 126..148 '{ ... }': ! + 136..142 'return': ! + 158..161 '_x3': i32 + 164..246 'match ... }': i32 + 170..174 'true': bool + 185..189 'true': bool + 185..189 'true': bool + 193..194 '3': i32 + 204..205 '_': bool + 209..240 '{ ... }': i32 + 223..229 'return': ! + 256..259 '_x4': i32 + 262..319 'match ... }': i32 + 268..272 'true': bool + 283..287 'true': bool + 283..287 'true': bool + 291..292 '4': i32 + 302..303 '_': bool + 307..313 'return': ! + "#]], + ) +} + +#[test] +fn infer_inherent_method() { + check_infer( + r#" + struct A; + + impl A { + fn foo(self, x: u32) -> i32 {} + } + + mod b { + impl super::A { + fn bar(&self, x: u64) -> i64 {} + } + } + + fn test(a: A) { + a.foo(1); + (&a).bar(1); + a.bar(1); + } + "#, + expect![[r#" + 31..35 'self': A + 37..38 'x': u32 + 52..54 '{}': () + 102..106 'self': &A + 108..109 'x': u64 + 123..125 '{}': () + 143..144 'a': A + 149..197 '{ ...(1); }': () + 155..156 'a': A + 155..163 'a.foo(1)': i32 + 161..162 '1': u32 + 169..180 '(&a).bar(1)': i64 + 170..172 '&a': &A + 171..172 'a': A + 178..179 '1': u64 + 186..187 'a': A + 186..194 'a.bar(1)': i64 + 192..193 '1': u64 + "#]], + ); +} + +#[test] +fn infer_inherent_method_str() { + check_infer( + r#" + #[lang = "str"] + impl str { + fn foo(&self) -> i32 {} + } + + fn test() { + "foo".foo(); + } + "#, + expect![[r#" + 39..43 'self': &str + 52..54 '{}': () + 68..88 '{ ...o(); }': () + 74..79 '"foo"': &str + 74..85 '"foo".foo()': i32 + "#]], + ); +} + +#[test] +fn infer_tuple() { + check_infer( + r#" + fn test(x: &str, y: isize) { + let a: (u32, &str) = (1, "a"); + let b = (a, x); + let c = (y, x); + let d = (c, x); + let e = (1, "e"); + let f = (e, "d"); + } + "#, + expect![[r#" + 8..9 'x': &str + 17..18 'y': isize + 27..169 '{ ...d"); }': () + 37..38 'a': (u32, &str) + 54..62 '(1, "a")': (u32, &str) + 55..56 '1': u32 + 58..61 '"a"': &str + 72..73 'b': ((u32, &str), &str) + 76..82 '(a, x)': ((u32, &str), &str) + 77..78 'a': (u32, &str) + 80..81 'x': &str + 92..93 'c': (isize, &str) + 96..102 '(y, x)': (isize, &str) + 97..98 'y': isize + 100..101 'x': &str + 112..113 'd': ((isize, &str), &str) + 116..122 '(c, x)': ((isize, &str), &str) + 117..118 'c': (isize, &str) + 120..121 'x': &str + 132..133 'e': (i32, &str) + 136..144 '(1, "e")': (i32, &str) + 137..138 '1': i32 + 140..143 '"e"': &str + 154..155 'f': ((i32, &str), &str) + 158..166 '(e, "d")': ((i32, &str), &str) + 159..160 'e': (i32, &str) + 162..165 '"d"': &str + "#]], + ); +} + +#[test] +fn infer_array() { + check_infer( + r#" + fn test(x: &str, y: isize) { + let a = [x]; + let b = [a, a]; + let c = [b, b]; + + let d = [y, 1, 2, 3]; + let d = [1, y, 2, 3]; + let e = [y]; + let f = [d, d]; + let g = [e, e]; + + let h = [1, 2]; + let i = ["a", "b"]; + + let b = [a, ["b"]]; + let x: [u8; 0] = []; + } + "#, + expect![[r#" + 8..9 'x': &str + 17..18 'y': isize + 27..292 '{ ... []; }': () + 37..38 'a': [&str; _] + 41..44 '[x]': [&str; _] + 42..43 'x': &str + 54..55 'b': [[&str; _]; _] + 58..64 '[a, a]': [[&str; _]; _] + 59..60 'a': [&str; _] + 62..63 'a': [&str; _] + 74..75 'c': [[[&str; _]; _]; _] + 78..84 '[b, b]': [[[&str; _]; _]; _] + 79..80 'b': [[&str; _]; _] + 82..83 'b': [[&str; _]; _] + 95..96 'd': [isize; _] + 99..111 '[y, 1, 2, 3]': [isize; _] + 100..101 'y': isize + 103..104 '1': isize + 106..107 '2': isize + 109..110 '3': isize + 121..122 'd': [isize; _] + 125..137 '[1, y, 2, 3]': [isize; _] + 126..127 '1': isize + 129..130 'y': isize + 132..133 '2': isize + 135..136 '3': isize + 147..148 'e': [isize; _] + 151..154 '[y]': [isize; _] + 152..153 'y': isize + 164..165 'f': [[isize; _]; _] + 168..174 '[d, d]': [[isize; _]; _] + 169..170 'd': [isize; _] + 172..173 'd': [isize; _] + 184..185 'g': [[isize; _]; _] + 188..194 '[e, e]': [[isize; _]; _] + 189..190 'e': [isize; _] + 192..193 'e': [isize; _] + 205..206 'h': [i32; _] + 209..215 '[1, 2]': [i32; _] + 210..211 '1': i32 + 213..214 '2': i32 + 225..226 'i': [&str; _] + 229..239 '["a", "b"]': [&str; _] + 230..233 '"a"': &str + 235..238 '"b"': &str + 250..251 'b': [[&str; _]; _] + 254..264 '[a, ["b"]]': [[&str; _]; _] + 255..256 'a': [&str; _] + 258..263 '["b"]': [&str; _] + 259..262 '"b"': &str + 274..275 'x': [u8; _] + 287..289 '[]': [u8; _] + "#]], + ); +} + +#[test] +fn infer_struct_generics() { + check_infer( + r#" + struct A { + x: T, + } + + fn test(a1: A, i: i32) { + a1.x; + let a2 = A { x: i }; + a2.x; + let a3 = A:: { x: 1 }; + a3.x; + } + "#, + expect![[r#" + 35..37 'a1': A + 47..48 'i': i32 + 55..146 '{ ...3.x; }': () + 61..63 'a1': A + 61..65 'a1.x': u32 + 75..77 'a2': A + 80..90 'A { x: i }': A + 87..88 'i': i32 + 96..98 'a2': A + 96..100 'a2.x': i32 + 110..112 'a3': A + 115..133 'A:: + 130..131 '1': i128 + 139..141 'a3': A + 139..143 'a3.x': i128 + "#]], + ); +} + +#[test] +fn infer_tuple_struct_generics() { + check_infer( + r#" + struct A(T); + enum Option { Some(T), None } + use Option::*; + + fn test() { + A(42); + A(42u128); + Some("x"); + Option::Some("x"); + None; + let x: Option = None; + } + "#, + expect![[r#" + 75..183 '{ ...one; }': () + 81..82 'A': A(i32) -> A + 81..86 'A(42)': A + 83..85 '42': i32 + 92..93 'A': A(u128) -> A + 92..101 'A(42u128)': A + 94..100 '42u128': u128 + 107..111 'Some': Some<&str>(&str) -> Option<&str> + 107..116 'Some("x")': Option<&str> + 112..115 '"x"': &str + 122..134 'Option::Some': Some<&str>(&str) -> Option<&str> + 122..139 'Option...e("x")': Option<&str> + 135..138 '"x"': &str + 145..149 'None': Option<{unknown}> + 159..160 'x': Option + 176..180 'None': Option + "#]], + ); +} + +#[test] +fn infer_function_generics() { + check_infer( + r#" + fn id(t: T) -> T { t } + + fn test() { + id(1u32); + id::(1); + let x: u64 = id(1); + } + "#, + expect![[r#" + 9..10 't': T + 20..25 '{ t }': T + 22..23 't': T + 37..97 '{ ...(1); }': () + 43..45 'id': fn id(u32) -> u32 + 43..51 'id(1u32)': u32 + 46..50 '1u32': u32 + 57..67 'id::': fn id(i128) -> i128 + 57..70 'id::(1)': i128 + 68..69 '1': i128 + 80..81 'x': u64 + 89..91 'id': fn id(u64) -> u64 + 89..94 'id(1)': u64 + 92..93 '1': u64 + "#]], + ); +} + +#[test] +fn infer_impl_generics_basic() { + check_infer( + r#" + struct A { + x: T1, + y: T2, + } + impl A { + fn x(self) -> X { + self.x + } + fn y(self) -> Y { + self.y + } + fn z(self, t: T) -> (X, Y, T) { + (self.x, self.y, t) + } + } + + fn test() -> i128 { + let a = A { x: 1u64, y: 1i64 }; + a.x(); + a.y(); + a.z(1i128); + a.z::(1); + } + "#, + expect![[r#" + 73..77 'self': A + 84..106 '{ ... }': X + 94..98 'self': A + 94..100 'self.x': X + 116..120 'self': A + 127..149 '{ ... }': Y + 137..141 'self': A + 137..143 'self.y': Y + 162..166 'self': A + 168..169 't': T + 187..222 '{ ... }': (X, Y, T) + 197..216 '(self.....y, t)': (X, Y, T) + 198..202 'self': A + 198..204 'self.x': X + 206..210 'self': A + 206..212 'self.y': Y + 214..215 't': T + 244..341 '{ ...(1); }': () + 254..255 'a': A + 258..280 'A { x:...1i64 }': A + 265..269 '1u64': u64 + 274..278 '1i64': i64 + 286..287 'a': A + 286..291 'a.x()': u64 + 297..298 'a': A + 297..302 'a.y()': i64 + 308..309 'a': A + 308..318 'a.z(1i128)': (u64, i64, i128) + 312..317 '1i128': i128 + 324..325 'a': A + 324..338 'a.z::(1)': (u64, i64, u128) + 336..337 '1': u128 + "#]], + ); +} + +#[test] +fn infer_impl_generics_with_autoderef() { + check_infer( + r#" + enum Option { + Some(T), + None, + } + impl Option { + fn as_ref(&self) -> Option<&T> {} + } + fn test(o: Option) { + (&o).as_ref(); + o.as_ref(); + } + "#, + expect![[r#" + 77..81 'self': &Option + 97..99 '{}': () + 110..111 'o': Option + 126..164 '{ ...f(); }': () + 132..145 '(&o).as_ref()': Option<&u32> + 133..135 '&o': &Option + 134..135 'o': Option + 151..152 'o': Option + 151..161 'o.as_ref()': Option<&u32> + "#]], + ); +} + +#[test] +fn infer_generic_chain() { + check_infer( + r#" + struct A { + x: T, + } + impl A { + fn x(self) -> T2 { + self.x + } + } + fn id(t: T) -> T { t } + + fn test() -> i128 { + let x = 1; + let y = id(x); + let a = A { x: id(y) }; + let z = id(a.x); + let b = A { x: z }; + b.x() + } + "#, + expect![[r#" + 52..56 'self': A + 64..86 '{ ... }': T2 + 74..78 'self': A + 74..80 'self.x': T2 + 98..99 't': T + 109..114 '{ t }': T + 111..112 't': T + 134..254 '{ ....x() }': i128 + 144..145 'x': i128 + 148..149 '1': i128 + 159..160 'y': i128 + 163..165 'id': fn id(i128) -> i128 + 163..168 'id(x)': i128 + 166..167 'x': i128 + 178..179 'a': A + 182..196 'A { x: id(y) }': A + 189..191 'id': fn id(i128) -> i128 + 189..194 'id(y)': i128 + 192..193 'y': i128 + 206..207 'z': i128 + 210..212 'id': fn id(i128) -> i128 + 210..217 'id(a.x)': i128 + 213..214 'a': A + 213..216 'a.x': i128 + 227..228 'b': A + 231..241 'A { x: z }': A + 238..239 'z': i128 + 247..248 'b': A + 247..252 'b.x()': i128 + "#]], + ); +} + +#[test] +fn infer_associated_const() { + check_infer( + r#" + struct Struct; + + impl Struct { + const FOO: u32 = 1; + } + + enum Enum {} + + impl Enum { + const BAR: u32 = 2; + } + + trait Trait { + const ID: u32; + } + + struct TraitTest; + + impl Trait for TraitTest { + const ID: u32 = 5; + } + + fn test() { + let x = Struct::FOO; + let y = Enum::BAR; + let z = TraitTest::ID; + } + "#, + expect![[r#" + 51..52 '1': u32 + 104..105 '2': u32 + 212..213 '5': u32 + 228..306 '{ ...:ID; }': () + 238..239 'x': u32 + 242..253 'Struct::FOO': u32 + 263..264 'y': u32 + 267..276 'Enum::BAR': u32 + 286..287 'z': u32 + 290..303 'TraitTest::ID': u32 + "#]], + ); +} + +#[test] +fn infer_type_alias() { + check_infer( + r#" + struct A { x: X, y: Y } + type Foo = A; + type Bar = A; + type Baz = A; + fn test(x: Foo, y: Bar<&str>, z: Baz) { + x.x; + x.y; + y.x; + y.y; + z.x; + z.y; + } + "#, + expect![[r#" + 115..116 'x': A + 123..124 'y': A<&str, u128> + 137..138 'z': A + 153..210 '{ ...z.y; }': () + 159..160 'x': A + 159..162 'x.x': u32 + 168..169 'x': A + 168..171 'x.y': i128 + 177..178 'y': A<&str, u128> + 177..180 'y.x': &str + 186..187 'y': A<&str, u128> + 186..189 'y.y': u128 + 195..196 'z': A + 195..198 'z.x': u8 + 204..205 'z': A + 204..207 'z.y': i8 + "#]], + ) +} + +#[test] +fn recursive_type_alias() { + check_infer( + r#" + struct A {} + type Foo = Foo; + type Bar = A; + fn test(x: Foo) {} + "#, + expect![[r#" + 58..59 'x': {unknown} + 66..68 '{}': () + "#]], + ) +} + +#[test] +fn infer_type_param() { + check_infer( + r#" + fn id(x: T) -> T { + x + } + + fn clone(x: &T) -> T { + *x + } + + fn test() { + let y = 10u32; + id(y); + let x: bool = clone(z); + id::(1); + } + "#, + expect![[r#" + 9..10 'x': T + 20..29 '{ x }': T + 26..27 'x': T + 43..44 'x': &T + 55..65 '{ *x }': T + 61..63 '*x': T + 62..63 'x': &T + 77..157 '{ ...(1); }': () + 87..88 'y': u32 + 91..96 '10u32': u32 + 102..104 'id': fn id(u32) -> u32 + 102..107 'id(y)': u32 + 105..106 'y': u32 + 117..118 'x': bool + 127..132 'clone': fn clone(&bool) -> bool + 127..135 'clone(z)': bool + 133..134 'z': &bool + 141..151 'id::': fn id(i128) -> i128 + 141..154 'id::(1)': i128 + 152..153 '1': i128 + "#]], + ); +} + +#[test] +fn infer_const() { + check_infer( + r#" + struct Foo; + impl Foo { const ASSOC_CONST: u32 = 0; } + const GLOBAL_CONST: u32 = 101; + fn test() { + const LOCAL_CONST: u32 = 99; + let x = LOCAL_CONST; + let z = GLOBAL_CONST; + let id = Foo::ASSOC_CONST; + } + "#, + expect![[r#" + 48..49 '0': u32 + 79..82 '101': u32 + 94..212 '{ ...NST; }': () + 137..138 'x': u32 + 141..152 'LOCAL_CONST': u32 + 162..163 'z': u32 + 166..178 'GLOBAL_CONST': u32 + 188..190 'id': u32 + 193..209 'Foo::A..._CONST': u32 + 125..127 '99': u32 + "#]], + ); +} + +#[test] +fn infer_static() { + check_infer( + r#" + static GLOBAL_STATIC: u32 = 101; + static mut GLOBAL_STATIC_MUT: u32 = 101; + fn test() { + static LOCAL_STATIC: u32 = 99; + static mut LOCAL_STATIC_MUT: u32 = 99; + let x = LOCAL_STATIC; + let y = LOCAL_STATIC_MUT; + let z = GLOBAL_STATIC; + let w = GLOBAL_STATIC_MUT; + } + "#, + expect![[r#" + 28..31 '101': u32 + 69..72 '101': u32 + 84..279 '{ ...MUT; }': () + 172..173 'x': u32 + 176..188 'LOCAL_STATIC': u32 + 198..199 'y': u32 + 202..218 'LOCAL_...IC_MUT': u32 + 228..229 'z': u32 + 232..245 'GLOBAL_STATIC': u32 + 255..256 'w': u32 + 259..276 'GLOBAL...IC_MUT': u32 + 117..119 '99': u32 + 160..162 '99': u32 + "#]], + ); +} + +#[test] +fn shadowing_primitive() { + check_types( + r#" +struct i32; +struct Foo; + +impl i32 { fn foo(&self) -> Foo { Foo } } + +fn main() { + let x: i32 = i32; + x.foo(); + //^ Foo +}"#, + ); +} + +#[test] +fn not_shadowing_primitive_by_module() { + check_types( + r#" +//- /str.rs +fn foo() {} + +//- /main.rs +mod str; +fn foo() -> &'static str { "" } + +fn main() { + foo(); + //^ &str +}"#, + ); +} + +#[test] +fn not_shadowing_module_by_primitive() { + check_types( + r#" +//- /str.rs +fn foo() -> u32 {0} + +//- /main.rs +mod str; +fn foo() -> &'static str { "" } + +fn main() { + str::foo(); + //^ u32 +}"#, + ); +} + +// This test is actually testing the shadowing behavior within hir_def. It +// lives here because the testing infrastructure in hir_def isn't currently +// capable of asserting the necessary conditions. +#[test] +fn should_be_shadowing_imports() { + check_types( + r#" +mod a { + pub fn foo() -> i8 {0} + pub struct foo { a: i8 } +} +mod b { pub fn foo () -> u8 {0} } +mod c { pub struct foo { a: u8 } } +mod d { + pub use super::a::*; + pub use super::c::foo; + pub use super::b::foo; +} + +fn main() { + d::foo(); + //^ u8 + d::foo{a:0}; + //^ u8 +}"#, + ); +} + +#[test] +fn closure_return() { + check_infer( + r#" + fn foo() -> u32 { + let x = || -> usize { return 1; }; + } + "#, + expect![[r#" + 16..58 '{ ...; }; }': () + 26..27 'x': || -> usize + 30..55 '|| -> ...n 1; }': || -> usize + 42..55 '{ return 1; }': usize + 44..52 'return 1': ! + 51..52 '1': usize + "#]], + ); +} + +#[test] +fn closure_return_unit() { + check_infer( + r#" + fn foo() -> u32 { + let x = || { return; }; + } + "#, + expect![[r#" + 16..47 '{ ...; }; }': () + 26..27 'x': || -> () + 30..44 '|| { return; }': || -> () + 33..44 '{ return; }': () + 35..41 'return': ! + "#]], + ); +} + +#[test] +fn closure_return_inferred() { + check_infer( + r#" + fn foo() -> u32 { + let x = || { "test" }; + } + "#, + expect![[r#" + 16..46 '{ ..." }; }': () + 26..27 'x': || -> &str + 30..43 '|| { "test" }': || -> &str + 33..43 '{ "test" }': &str + 35..41 '"test"': &str + "#]], + ); +} + +#[test] +fn fn_pointer_return() { + check_infer( + r#" + struct Vtable { + method: fn(), + } + + fn main() { + let vtable = Vtable { method: || {} }; + let m = vtable.method; + } + "#, + expect![[r#" + 47..120 '{ ...hod; }': () + 57..63 'vtable': Vtable + 66..90 'Vtable...| {} }': Vtable + 83..88 '|| {}': || -> () + 86..88 '{}': () + 100..101 'm': fn() + 104..110 'vtable': Vtable + 104..117 'vtable.method': fn() + "#]], + ); +} + +#[test] +fn effects_smoke_test() { + check_infer( + r#" + fn main() { + let x = unsafe { 92 }; + let y = async { async { () }.await }; + let z = try { () }; + let t = 'a: { 92 }; + } + "#, + expect![[r#" + 10..130 '{ ...2 }; }': () + 20..21 'x': i32 + 24..37 'unsafe { 92 }': i32 + 31..37 '{ 92 }': i32 + 33..35 '92': i32 + 47..48 'y': {unknown} + 57..79 '{ asyn...wait }': {unknown} + 59..77 'async ....await': {unknown} + 65..71 '{ () }': () + 67..69 '()': () + 89..90 'z': {unknown} + 93..103 'try { () }': {unknown} + 97..103 '{ () }': () + 99..101 '()': () + 113..114 't': i32 + 121..127 '{ 92 }': i32 + 123..125 '92': i32 + "#]], + ) +} + +#[test] +fn infer_generic_from_later_assignment() { + check_infer( + r#" + enum Option { Some(T), None } + use Option::*; + + fn test() { + let mut end = None; + loop { + end = Some(true); + } + } + "#, + expect![[r#" + 59..129 '{ ... } }': () + 69..76 'mut end': Option + 79..83 'None': Option + 89..127 'loop {... }': ! + 94..127 '{ ... }': () + 104..107 'end': Option + 104..120 'end = ...(true)': () + 110..114 'Some': Some(bool) -> Option + 110..120 'Some(true)': Option + 115..119 'true': bool + "#]], + ); +} + +#[test] +fn infer_loop_break_with_val() { + check_infer( + r#" + enum Option { Some(T), None } + use Option::*; + + fn test() { + let x = loop { + if false { + break None; + } + + break Some(true); + }; + } + "#, + expect![[r#" + 59..168 '{ ... }; }': () + 69..70 'x': Option + 73..165 'loop {... }': Option + 78..165 '{ ... }': () + 88..132 'if fal... }': () + 91..96 'false': bool + 97..132 '{ ... }': () + 111..121 'break None': ! + 117..121 'None': Option + 142..158 'break ...(true)': ! + 148..152 'Some': Some(bool) -> Option + 148..158 'Some(true)': Option + 153..157 'true': bool + "#]], + ); +} + +#[test] +fn infer_loop_break_without_val() { + check_infer( + r#" + enum Option { Some(T), None } + use Option::*; + + fn test() { + let x = loop { + if false { + break; + } + }; + } + "#, + expect![[r#" + 59..136 '{ ... }; }': () + 69..70 'x': () + 73..133 'loop {... }': () + 78..133 '{ ... }': () + 88..127 'if fal... }': () + 91..96 'false': bool + 97..127 '{ ... }': () + 111..116 'break': ! + "#]], + ); +} + +#[test] +fn infer_labelled_break_with_val() { + check_infer( + r#" + fn foo() { + let _x = || 'outer: loop { + let inner = 'inner: loop { + let i = Default::default(); + if (break 'outer i) { + loop { break 'inner 5i8; }; + } else if true { + break 'inner 6; + } + break 7; + }; + break inner < 8; + }; + } + "#, + expect![[r#" + 9..335 '{ ... }; }': () + 19..21 '_x': || -> bool + 24..332 '|| 'ou... }': || -> bool + 27..332 ''outer... }': bool + 40..332 '{ ... }': () + 54..59 'inner': i8 + 62..300 ''inner... }': i8 + 75..300 '{ ... }': () + 93..94 'i': bool + 97..113 'Defaul...efault': {unknown} + 97..115 'Defaul...ault()': bool + 129..269 'if (br... }': () + 133..147 'break 'outer i': ! + 146..147 'i': bool + 149..208 '{ ... }': () + 167..193 'loop {...5i8; }': ! + 172..193 '{ brea...5i8; }': () + 174..190 'break ...er 5i8': ! + 187..190 '5i8': i8 + 214..269 'if tru... }': () + 217..221 'true': bool + 222..269 '{ ... }': () + 240..254 'break 'inner 6': ! + 253..254 '6': i8 + 282..289 'break 7': ! + 288..289 '7': i8 + 310..325 'break inner < 8': ! + 316..321 'inner': i8 + 316..325 'inner < 8': bool + 324..325 '8': i8 + "#]], + ); +} + +#[test] +fn generic_default() { + check_infer( + r#" + struct Thing { t: T } + enum OtherThing { + One { t: T }, + Two(T), + } + + fn test(t1: Thing, t2: OtherThing, t3: Thing, t4: OtherThing) { + t1.t; + t3.t; + match t2 { + OtherThing::One { t } => { t; }, + OtherThing::Two(t) => { t; }, + } + match t4 { + OtherThing::One { t } => { t; }, + OtherThing::Two(t) => { t; }, + } + } + "#, + expect![[r#" + 97..99 't1': Thing<()> + 108..110 't2': OtherThing<()> + 124..126 't3': Thing + 140..142 't4': OtherThing + 161..384 '{ ... } }': () + 167..169 't1': Thing<()> + 167..171 't1.t': () + 177..179 't3': Thing + 177..181 't3.t': i32 + 187..282 'match ... }': () + 193..195 't2': OtherThing<()> + 206..227 'OtherT... { t }': OtherThing<()> + 224..225 't': () + 231..237 '{ t; }': () + 233..234 't': () + 247..265 'OtherT...Two(t)': OtherThing<()> + 263..264 't': () + 269..275 '{ t; }': () + 271..272 't': () + 287..382 'match ... }': () + 293..295 't4': OtherThing + 306..327 'OtherT... { t }': OtherThing + 324..325 't': i32 + 331..337 '{ t; }': () + 333..334 't': i32 + 347..365 'OtherT...Two(t)': OtherThing + 363..364 't': i32 + 369..375 '{ t; }': () + 371..372 't': i32 + "#]], + ); +} + +#[test] +fn generic_default_in_struct_literal() { + check_infer( + r#" + struct Thing { t: T } + enum OtherThing { + One { t: T }, + Two(T), + } + + fn test() { + let x = Thing { t: loop {} }; + let y = Thing { t: () }; + let z = Thing { t: 1i32 }; + if let Thing { t } = z { + t; + } + + let a = OtherThing::One { t: 1i32 }; + let b = OtherThing::Two(1i32); + } + "#, + expect![[r#" + 99..319 '{ ...32); }': () + 109..110 'x': Thing + 113..133 'Thing ...p {} }': Thing + 124..131 'loop {}': ! + 129..131 '{}': () + 143..144 'y': Thing<()> + 147..162 'Thing { t: () }': Thing<()> + 158..160 '()': () + 172..173 'z': Thing + 176..193 'Thing ...1i32 }': Thing + 187..191 '1i32': i32 + 199..240 'if let... }': () + 206..217 'Thing { t }': Thing + 214..215 't': i32 + 220..221 'z': Thing + 222..240 '{ ... }': () + 232..233 't': i32 + 250..251 'a': OtherThing + 254..281 'OtherT...1i32 }': OtherThing + 275..279 '1i32': i32 + 291..292 'b': OtherThing + 295..310 'OtherThing::Two': Two(i32) -> OtherThing + 295..316 'OtherT...(1i32)': OtherThing + 311..315 '1i32': i32 + "#]], + ); +} + +#[test] +fn generic_default_depending_on_other_type_arg() { + // FIXME: the {unknown} is a bug + check_infer( + r#" + struct Thing T> { t: T } + + fn test(t1: Thing, t2: Thing) { + t1; + t2; + Thing::<_> { t: 1u32 }; + } + "#, + expect![[r#" + 56..58 't1': Thing u32> + 72..74 't2': Thing u128> + 83..130 '{ ...2 }; }': () + 89..91 't1': Thing u32> + 97..99 't2': Thing u128> + 105..127 'Thing:...1u32 }': Thing {unknown}> + 121..125 '1u32': u32 + "#]], + ); +} + +#[test] +fn generic_default_depending_on_other_type_arg_forward() { + // the {unknown} here is intentional, as defaults are not allowed to + // refer to type parameters coming later + check_infer( + r#" + struct Thing T, T = u128> { t: T } + + fn test(t1: Thing) { + t1; + } + "#, + expect![[r#" + 56..58 't1': Thing {unknown}, u128> + 67..78 '{ t1; }': () + 73..75 't1': Thing {unknown}, u128> + "#]], + ); +} diff --git a/crates/ra_hir_ty/src/tests/traits.rs b/crates/hir_ty/src/tests/traits.rs similarity index 100% rename from crates/ra_hir_ty/src/tests/traits.rs rename to crates/hir_ty/src/tests/traits.rs diff --git a/crates/hir_ty/src/traits.rs b/crates/hir_ty/src/traits.rs new file mode 100644 index 0000000000..1c3abb18f0 --- /dev/null +++ b/crates/hir_ty/src/traits.rs @@ -0,0 +1,285 @@ +//! Trait solving using Chalk. +use std::sync::Arc; + +use base_db::CrateId; +use chalk_ir::cast::Cast; +use chalk_solve::{logging_db::LoggingRustIrDatabase, Solver}; +use hir_def::{lang_item::LangItemTarget, TraitId}; + +use crate::{db::HirDatabase, DebruijnIndex, Substs}; + +use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk}; + +use self::chalk::{from_chalk, Interner, ToChalk}; + +pub(crate) mod chalk; + +// This controls the maximum size of types Chalk considers. If we set this too +// high, we can run into slow edge cases; if we set it too low, Chalk won't +// find some solutions. +// FIXME this is currently hardcoded in the recursive solver +// const CHALK_SOLVER_MAX_SIZE: usize = 10; + +/// This controls how much 'time' we give the Chalk solver before giving up. +const CHALK_SOLVER_FUEL: i32 = 100; + +#[derive(Debug, Copy, Clone)] +struct ChalkContext<'a> { + db: &'a dyn HirDatabase, + krate: CrateId, +} + +fn create_chalk_solver() -> chalk_recursive::RecursiveSolver { + let overflow_depth = 100; + let caching_enabled = true; + chalk_recursive::RecursiveSolver::new(overflow_depth, caching_enabled) +} + +/// A set of clauses that we assume to be true. E.g. if we are inside this function: +/// ```rust +/// fn foo(t: T) {} +/// ``` +/// we assume that `T: Default`. +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct TraitEnvironment { + pub predicates: Vec, +} + +impl TraitEnvironment { + /// Returns trait refs with the given self type which are supposed to hold + /// in this trait env. E.g. if we are in `foo()`, this will + /// find that `T: SomeTrait` if we call it for `T`. + pub(crate) fn trait_predicates_for_self_ty<'a>( + &'a self, + ty: &'a Ty, + ) -> impl Iterator + 'a { + self.predicates.iter().filter_map(move |pred| match pred { + GenericPredicate::Implemented(tr) if tr.self_ty() == ty => Some(tr), + _ => None, + }) + } +} + +/// Something (usually a goal), along with an environment. +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct InEnvironment { + pub environment: Arc, + pub value: T, +} + +impl InEnvironment { + pub fn new(environment: Arc, value: T) -> InEnvironment { + InEnvironment { environment, value } + } +} + +/// Something that needs to be proven (by Chalk) during type checking, e.g. that +/// a certain type implements a certain trait. Proving the Obligation might +/// result in additional information about inference variables. +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub enum Obligation { + /// Prove that a certain type implements a trait (the type is the `Self` type + /// parameter to the `TraitRef`). + Trait(TraitRef), + Projection(ProjectionPredicate), +} + +impl Obligation { + pub fn from_predicate(predicate: GenericPredicate) -> Option { + match predicate { + GenericPredicate::Implemented(trait_ref) => Some(Obligation::Trait(trait_ref)), + GenericPredicate::Projection(projection_pred) => { + Some(Obligation::Projection(projection_pred)) + } + GenericPredicate::Error => None, + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct ProjectionPredicate { + pub projection_ty: ProjectionTy, + pub ty: Ty, +} + +impl TypeWalk for ProjectionPredicate { + fn walk(&self, f: &mut impl FnMut(&Ty)) { + self.projection_ty.walk(f); + self.ty.walk(f); + } + + fn walk_mut_binders( + &mut self, + f: &mut impl FnMut(&mut Ty, DebruijnIndex), + binders: DebruijnIndex, + ) { + self.projection_ty.walk_mut_binders(f, binders); + self.ty.walk_mut_binders(f, binders); + } +} + +/// Solve a trait goal using Chalk. +pub(crate) fn trait_solve_query( + db: &dyn HirDatabase, + krate: CrateId, + goal: Canonical>, +) -> Option { + let _p = profile::span("trait_solve_query").detail(|| match &goal.value.value { + Obligation::Trait(it) => db.trait_data(it.trait_).name.to_string(), + Obligation::Projection(_) => "projection".to_string(), + }); + log::info!("trait_solve_query({})", goal.value.value.display(db)); + + if let Obligation::Projection(pred) = &goal.value.value { + if let Ty::Bound(_) = &pred.projection_ty.parameters[0] { + // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible + return Some(Solution::Ambig(Guidance::Unknown)); + } + } + + let canonical = goal.to_chalk(db).cast(&Interner); + + // We currently don't deal with universes (I think / hope they're not yet + // relevant for our use cases?) + let u_canonical = chalk_ir::UCanonical { canonical, universes: 1 }; + let solution = solve(db, krate, &u_canonical); + solution.map(|solution| solution_from_chalk(db, solution)) +} + +fn solve( + db: &dyn HirDatabase, + krate: CrateId, + goal: &chalk_ir::UCanonical>>, +) -> Option> { + let context = ChalkContext { db, krate }; + log::debug!("solve goal: {:?}", goal); + let mut solver = create_chalk_solver(); + + let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL); + + let should_continue = || { + context.db.check_canceled(); + let remaining = fuel.get(); + fuel.set(remaining - 1); + if remaining == 0 { + log::debug!("fuel exhausted"); + } + remaining > 0 + }; + + let mut solve = || { + if is_chalk_print() { + let logging_db = LoggingRustIrDatabase::new(context); + let solution = solver.solve_limited(&logging_db, goal, should_continue); + log::debug!("chalk program:\n{}", logging_db); + solution + } else { + solver.solve_limited(&context, goal, should_continue) + } + }; + + // don't set the TLS for Chalk unless Chalk debugging is active, to make + // extra sure we only use it for debugging + let solution = + if is_chalk_debug() { chalk::tls::set_current_program(db, solve) } else { solve() }; + + log::debug!("solve({:?}) => {:?}", goal, solution); + + solution +} + +fn is_chalk_debug() -> bool { + std::env::var("CHALK_DEBUG").is_ok() +} + +fn is_chalk_print() -> bool { + std::env::var("CHALK_PRINT").is_ok() +} + +fn solution_from_chalk( + db: &dyn HirDatabase, + solution: chalk_solve::Solution, +) -> Solution { + let convert_subst = |subst: chalk_ir::Canonical>| { + let result = from_chalk(db, subst); + SolutionVariables(result) + }; + match solution { + chalk_solve::Solution::Unique(constr_subst) => { + let subst = chalk_ir::Canonical { + value: constr_subst.value.subst, + binders: constr_subst.binders, + }; + Solution::Unique(convert_subst(subst)) + } + chalk_solve::Solution::Ambig(chalk_solve::Guidance::Definite(subst)) => { + Solution::Ambig(Guidance::Definite(convert_subst(subst))) + } + chalk_solve::Solution::Ambig(chalk_solve::Guidance::Suggested(subst)) => { + Solution::Ambig(Guidance::Suggested(convert_subst(subst))) + } + chalk_solve::Solution::Ambig(chalk_solve::Guidance::Unknown) => { + Solution::Ambig(Guidance::Unknown) + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct SolutionVariables(pub Canonical); + +#[derive(Clone, Debug, PartialEq, Eq)] +/// A (possible) solution for a proposed goal. +pub enum Solution { + /// The goal indeed holds, and there is a unique value for all existential + /// variables. + Unique(SolutionVariables), + + /// The goal may be provable in multiple ways, but regardless we may have some guidance + /// for type inference. In this case, we don't return any lifetime + /// constraints, since we have not "committed" to any particular solution + /// yet. + Ambig(Guidance), +} + +#[derive(Clone, Debug, PartialEq, Eq)] +/// When a goal holds ambiguously (e.g., because there are multiple possible +/// solutions), we issue a set of *guidance* back to type inference. +pub enum Guidance { + /// The existential variables *must* have the given values if the goal is + /// ever to hold, but that alone isn't enough to guarantee the goal will + /// actually hold. + Definite(SolutionVariables), + + /// There are multiple plausible values for the existentials, but the ones + /// here are suggested as the preferred choice heuristically. These should + /// be used for inference fallback only. + Suggested(SolutionVariables), + + /// There's no useful information to feed back to type inference + Unknown, +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum FnTrait { + FnOnce, + FnMut, + Fn, +} + +impl FnTrait { + fn lang_item_name(self) -> &'static str { + match self { + FnTrait::FnOnce => "fn_once", + FnTrait::FnMut => "fn_mut", + FnTrait::Fn => "fn", + } + } + + pub fn get_id(&self, db: &dyn HirDatabase, krate: CrateId) -> Option { + let target = db.lang_item(krate, self.lang_item_name().into())?; + match target { + LangItemTarget::TraitId(t) => Some(t), + _ => None, + } + } +} diff --git a/crates/hir_ty/src/traits/chalk.rs b/crates/hir_ty/src/traits/chalk.rs new file mode 100644 index 0000000000..17c83b6a46 --- /dev/null +++ b/crates/hir_ty/src/traits/chalk.rs @@ -0,0 +1,589 @@ +//! Conversion code from/to Chalk. +use std::sync::Arc; + +use log::debug; + +use chalk_ir::{fold::shift::Shift, CanonicalVarKinds, GenericArg, TypeName}; +use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait}; + +use base_db::{salsa::InternKey, CrateId}; +use hir_def::{ + lang_item::{lang_attr, LangItemTarget}, + AssocContainerId, AssocItemId, HasModule, Lookup, TypeAliasId, +}; + +use super::ChalkContext; +use crate::{ + db::HirDatabase, + display::HirDisplay, + method_resolution::{TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS}, + utils::generics, + CallableDefId, DebruijnIndex, FnSig, GenericPredicate, Substs, Ty, TypeCtor, +}; +use mapping::{ + convert_where_clauses, generic_predicate_to_inline_bound, make_binders, TypeAliasAsValue, +}; + +pub use self::interner::*; + +pub(super) mod tls; +mod interner; +mod mapping; + +pub(super) trait ToChalk { + type Chalk; + fn to_chalk(self, db: &dyn HirDatabase) -> Self::Chalk; + fn from_chalk(db: &dyn HirDatabase, chalk: Self::Chalk) -> Self; +} + +pub(super) fn from_chalk(db: &dyn HirDatabase, chalk: ChalkT) -> T +where + T: ToChalk, +{ + T::from_chalk(db, chalk) +} + +impl<'a> chalk_solve::RustIrDatabase for ChalkContext<'a> { + fn associated_ty_data(&self, id: AssocTypeId) -> Arc { + self.db.associated_ty_data(id) + } + fn trait_datum(&self, trait_id: TraitId) -> Arc { + self.db.trait_datum(self.krate, trait_id) + } + fn adt_datum(&self, struct_id: AdtId) -> Arc { + self.db.struct_datum(self.krate, struct_id) + } + fn adt_repr(&self, _struct_id: AdtId) -> rust_ir::AdtRepr { + rust_ir::AdtRepr { repr_c: false, repr_packed: false } + } + fn impl_datum(&self, impl_id: ImplId) -> Arc { + self.db.impl_datum(self.krate, impl_id) + } + + fn fn_def_datum( + &self, + fn_def_id: chalk_ir::FnDefId, + ) -> Arc> { + self.db.fn_def_datum(self.krate, fn_def_id) + } + + fn impls_for_trait( + &self, + trait_id: TraitId, + parameters: &[GenericArg], + binders: &CanonicalVarKinds, + ) -> Vec { + debug!("impls_for_trait {:?}", trait_id); + let trait_: hir_def::TraitId = from_chalk(self.db, trait_id); + + let ty: Ty = from_chalk(self.db, parameters[0].assert_ty_ref(&Interner).clone()); + + fn binder_kind(ty: &Ty, binders: &CanonicalVarKinds) -> Option { + if let Ty::Bound(bv) = ty { + let binders = binders.as_slice(&Interner); + if bv.debruijn == DebruijnIndex::INNERMOST { + if let chalk_ir::VariableKind::Ty(tk) = binders[bv.index].kind { + return Some(tk); + } + } + } + None + } + + let self_ty_fp = TyFingerprint::for_impl(&ty); + let fps: &[TyFingerprint] = match binder_kind(&ty, binders) { + Some(chalk_ir::TyKind::Integer) => &ALL_INT_FPS, + Some(chalk_ir::TyKind::Float) => &ALL_FLOAT_FPS, + _ => self_ty_fp.as_ref().map(std::slice::from_ref).unwrap_or(&[]), + }; + + // Note: Since we're using impls_for_trait, only impls where the trait + // can be resolved should ever reach Chalk. `impl_datum` relies on that + // and will panic if the trait can't be resolved. + let in_deps = self.db.trait_impls_in_deps(self.krate); + let in_self = self.db.trait_impls_in_crate(self.krate); + let impl_maps = [in_deps, in_self]; + + let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db); + + let result: Vec<_> = if fps.is_empty() { + debug!("Unrestricted search for {:?} impls...", trait_); + impl_maps + .iter() + .flat_map(|crate_impl_defs| crate_impl_defs.for_trait(trait_).map(id_to_chalk)) + .collect() + } else { + impl_maps + .iter() + .flat_map(|crate_impl_defs| { + fps.iter().flat_map(move |fp| { + crate_impl_defs.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk) + }) + }) + .collect() + }; + + debug!("impls_for_trait returned {} impls", result.len()); + result + } + fn impl_provided_for(&self, auto_trait_id: TraitId, struct_id: AdtId) -> bool { + debug!("impl_provided_for {:?}, {:?}", auto_trait_id, struct_id); + false // FIXME + } + fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc { + self.db.associated_ty_value(self.krate, id) + } + + fn custom_clauses(&self) -> Vec> { + vec![] + } + fn local_impls_to_coherence_check(&self, _trait_id: TraitId) -> Vec { + // We don't do coherence checking (yet) + unimplemented!() + } + fn interner(&self) -> &Interner { + &Interner + } + fn well_known_trait_id( + &self, + well_known_trait: rust_ir::WellKnownTrait, + ) -> Option> { + let lang_attr = lang_attr_from_well_known_trait(well_known_trait); + let trait_ = match self.db.lang_item(self.krate, lang_attr.into()) { + Some(LangItemTarget::TraitId(trait_)) => trait_, + _ => return None, + }; + Some(trait_.to_chalk(self.db)) + } + + fn program_clauses_for_env( + &self, + environment: &chalk_ir::Environment, + ) -> chalk_ir::ProgramClauses { + self.db.program_clauses_for_chalk_env(self.krate, environment.clone()) + } + + fn opaque_ty_data(&self, id: chalk_ir::OpaqueTyId) -> Arc { + let interned_id = crate::db::InternedOpaqueTyId::from(id); + let full_id = self.db.lookup_intern_impl_trait_id(interned_id); + let (func, idx) = match full_id { + crate::OpaqueTyId::ReturnTypeImplTrait(func, idx) => (func, idx), + }; + let datas = + self.db.return_type_impl_traits(func).expect("impl trait id without impl traits"); + let data = &datas.value.impl_traits[idx as usize]; + let bound = OpaqueTyDatumBound { + bounds: make_binders( + data.bounds + .value + .iter() + .cloned() + .filter(|b| !b.is_error()) + .map(|b| b.to_chalk(self.db)) + .collect(), + 1, + ), + where_clauses: make_binders(vec![], 0), + }; + let num_vars = datas.num_binders; + Arc::new(OpaqueTyDatum { opaque_ty_id: id, bound: make_binders(bound, num_vars) }) + } + + fn hidden_opaque_type(&self, _id: chalk_ir::OpaqueTyId) -> chalk_ir::Ty { + // FIXME: actually provide the hidden type; it is relevant for auto traits + Ty::Unknown.to_chalk(self.db) + } + + fn is_object_safe(&self, _trait_id: chalk_ir::TraitId) -> bool { + // FIXME: implement actual object safety + true + } + + fn closure_kind( + &self, + _closure_id: chalk_ir::ClosureId, + _substs: &chalk_ir::Substitution, + ) -> rust_ir::ClosureKind { + // Fn is the closure kind that implements all three traits + rust_ir::ClosureKind::Fn + } + fn closure_inputs_and_output( + &self, + _closure_id: chalk_ir::ClosureId, + substs: &chalk_ir::Substitution, + ) -> chalk_ir::Binders> { + let sig_ty: Ty = + from_chalk(self.db, substs.at(&Interner, 0).assert_ty_ref(&Interner).clone()); + let sig = FnSig::from_fn_ptr_substs( + &sig_ty.substs().expect("first closure param should be fn ptr"), + false, + ); + let io = rust_ir::FnDefInputsAndOutputDatum { + argument_types: sig.params().iter().map(|ty| ty.clone().to_chalk(self.db)).collect(), + return_type: sig.ret().clone().to_chalk(self.db), + }; + make_binders(io.shifted_in(&Interner), 0) + } + fn closure_upvars( + &self, + _closure_id: chalk_ir::ClosureId, + _substs: &chalk_ir::Substitution, + ) -> chalk_ir::Binders> { + let ty = Ty::unit().to_chalk(self.db); + make_binders(ty, 0) + } + fn closure_fn_substitution( + &self, + _closure_id: chalk_ir::ClosureId, + _substs: &chalk_ir::Substitution, + ) -> chalk_ir::Substitution { + Substs::empty().to_chalk(self.db) + } + + fn trait_name(&self, trait_id: chalk_ir::TraitId) -> String { + let id = from_chalk(self.db, trait_id); + self.db.trait_data(id).name.to_string() + } + // FIXME: lookup names + fn adt_name(&self, struct_id: chalk_ir::AdtId) -> String { + let datum = self.db.struct_datum(self.krate, struct_id); + format!("{:?}", datum.name(&Interner)) + } + fn assoc_type_name(&self, assoc_ty_id: chalk_ir::AssocTypeId) -> String { + format!("Assoc_{}", assoc_ty_id.0) + } + fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId) -> String { + format!("Opaque_{}", opaque_ty_id.0) + } + fn fn_def_name(&self, fn_def_id: chalk_ir::FnDefId) -> String { + format!("fn_{}", fn_def_id.0) + } +} + +pub(crate) fn program_clauses_for_chalk_env_query( + db: &dyn HirDatabase, + krate: CrateId, + environment: chalk_ir::Environment, +) -> chalk_ir::ProgramClauses { + chalk_solve::program_clauses_for_env(&ChalkContext { db, krate }, &environment) +} + +pub(crate) fn associated_ty_data_query( + db: &dyn HirDatabase, + id: AssocTypeId, +) -> Arc { + debug!("associated_ty_data {:?}", id); + let type_alias: TypeAliasId = from_chalk(db, id); + let trait_ = match type_alias.lookup(db.upcast()).container { + AssocContainerId::TraitId(t) => t, + _ => panic!("associated type not in trait"), + }; + + // Lower bounds -- we could/should maybe move this to a separate query in `lower` + let type_alias_data = db.type_alias_data(type_alias); + let generic_params = generics(db.upcast(), type_alias.into()); + let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST); + let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast()); + let ctx = crate::TyLoweringContext::new(db, &resolver) + .with_type_param_mode(crate::lower::TypeParamLoweringMode::Variable); + let self_ty = Ty::Bound(crate::BoundVar::new(crate::DebruijnIndex::INNERMOST, 0)); + let bounds = type_alias_data + .bounds + .iter() + .flat_map(|bound| GenericPredicate::from_type_bound(&ctx, bound, self_ty.clone())) + .filter_map(|pred| generic_predicate_to_inline_bound(db, &pred, &self_ty)) + .map(|bound| make_binders(bound.shifted_in(&Interner), 0)) + .collect(); + + let where_clauses = convert_where_clauses(db, type_alias.into(), &bound_vars); + let bound_data = rust_ir::AssociatedTyDatumBound { bounds, where_clauses }; + let datum = AssociatedTyDatum { + trait_id: trait_.to_chalk(db), + id, + name: type_alias, + binders: make_binders(bound_data, generic_params.len()), + }; + Arc::new(datum) +} + +pub(crate) fn trait_datum_query( + db: &dyn HirDatabase, + krate: CrateId, + trait_id: TraitId, +) -> Arc { + debug!("trait_datum {:?}", trait_id); + let trait_: hir_def::TraitId = from_chalk(db, trait_id); + let trait_data = db.trait_data(trait_); + debug!("trait {:?} = {:?}", trait_id, trait_data.name); + let generic_params = generics(db.upcast(), trait_.into()); + let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST); + let flags = rust_ir::TraitFlags { + auto: trait_data.auto, + upstream: trait_.lookup(db.upcast()).container.module(db.upcast()).krate != krate, + non_enumerable: true, + coinductive: false, // only relevant for Chalk testing + // FIXME: set these flags correctly + marker: false, + fundamental: false, + }; + let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars); + let associated_ty_ids = + trait_data.associated_types().map(|type_alias| type_alias.to_chalk(db)).collect(); + let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses }; + let well_known = + lang_attr(db.upcast(), trait_).and_then(|name| well_known_trait_from_lang_attr(&name)); + let trait_datum = TraitDatum { + id: trait_id, + binders: make_binders(trait_datum_bound, bound_vars.len()), + flags, + associated_ty_ids, + well_known, + }; + Arc::new(trait_datum) +} + +fn well_known_trait_from_lang_attr(name: &str) -> Option { + Some(match name { + "sized" => WellKnownTrait::Sized, + "copy" => WellKnownTrait::Copy, + "clone" => WellKnownTrait::Clone, + "drop" => WellKnownTrait::Drop, + "fn_once" => WellKnownTrait::FnOnce, + "fn_mut" => WellKnownTrait::FnMut, + "fn" => WellKnownTrait::Fn, + "unsize" => WellKnownTrait::Unsize, + _ => return None, + }) +} + +fn lang_attr_from_well_known_trait(attr: WellKnownTrait) -> &'static str { + match attr { + WellKnownTrait::Sized => "sized", + WellKnownTrait::Copy => "copy", + WellKnownTrait::Clone => "clone", + WellKnownTrait::Drop => "drop", + WellKnownTrait::FnOnce => "fn_once", + WellKnownTrait::FnMut => "fn_mut", + WellKnownTrait::Fn => "fn", + WellKnownTrait::Unsize => "unsize", + } +} + +pub(crate) fn struct_datum_query( + db: &dyn HirDatabase, + krate: CrateId, + struct_id: AdtId, +) -> Arc { + debug!("struct_datum {:?}", struct_id); + let type_ctor: TypeCtor = from_chalk(db, TypeName::Adt(struct_id)); + debug!("struct {:?} = {:?}", struct_id, type_ctor); + let num_params = type_ctor.num_ty_params(db); + let upstream = type_ctor.krate(db) != Some(krate); + let where_clauses = type_ctor + .as_generic_def() + .map(|generic_def| { + let generic_params = generics(db.upcast(), generic_def); + let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST); + convert_where_clauses(db, generic_def, &bound_vars) + }) + .unwrap_or_else(Vec::new); + let flags = rust_ir::AdtFlags { + upstream, + // FIXME set fundamental and phantom_data flags correctly + fundamental: false, + phantom_data: false, + }; + // FIXME provide enum variants properly (for auto traits) + let variant = rust_ir::AdtVariantDatum { + fields: Vec::new(), // FIXME add fields (only relevant for auto traits), + }; + let struct_datum_bound = rust_ir::AdtDatumBound { variants: vec![variant], where_clauses }; + let struct_datum = StructDatum { + // FIXME set ADT kind + kind: rust_ir::AdtKind::Struct, + id: struct_id, + binders: make_binders(struct_datum_bound, num_params), + flags, + }; + Arc::new(struct_datum) +} + +pub(crate) fn impl_datum_query( + db: &dyn HirDatabase, + krate: CrateId, + impl_id: ImplId, +) -> Arc { + let _p = profile::span("impl_datum"); + debug!("impl_datum {:?}", impl_id); + let impl_: hir_def::ImplId = from_chalk(db, impl_id); + impl_def_datum(db, krate, impl_id, impl_) +} + +fn impl_def_datum( + db: &dyn HirDatabase, + krate: CrateId, + chalk_id: ImplId, + impl_id: hir_def::ImplId, +) -> Arc { + let trait_ref = db + .impl_trait(impl_id) + // ImplIds for impls where the trait ref can't be resolved should never reach Chalk + .expect("invalid impl passed to Chalk") + .value; + let impl_data = db.impl_data(impl_id); + + let generic_params = generics(db.upcast(), impl_id.into()); + let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST); + let trait_ = trait_ref.trait_; + let impl_type = if impl_id.lookup(db.upcast()).container.module(db.upcast()).krate == krate { + rust_ir::ImplType::Local + } else { + rust_ir::ImplType::External + }; + let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars); + let negative = impl_data.is_negative; + debug!( + "impl {:?}: {}{} where {:?}", + chalk_id, + if negative { "!" } else { "" }, + trait_ref.display(db), + where_clauses + ); + let trait_ref = trait_ref.to_chalk(db); + + let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive }; + + let impl_datum_bound = rust_ir::ImplDatumBound { trait_ref, where_clauses }; + let trait_data = db.trait_data(trait_); + let associated_ty_value_ids = impl_data + .items + .iter() + .filter_map(|item| match item { + AssocItemId::TypeAliasId(type_alias) => Some(*type_alias), + _ => None, + }) + .filter(|&type_alias| { + // don't include associated types that don't exist in the trait + let name = &db.type_alias_data(type_alias).name; + trait_data.associated_type_by_name(name).is_some() + }) + .map(|type_alias| TypeAliasAsValue(type_alias).to_chalk(db)) + .collect(); + debug!("impl_datum: {:?}", impl_datum_bound); + let impl_datum = ImplDatum { + binders: make_binders(impl_datum_bound, bound_vars.len()), + impl_type, + polarity, + associated_ty_value_ids, + }; + Arc::new(impl_datum) +} + +pub(crate) fn associated_ty_value_query( + db: &dyn HirDatabase, + krate: CrateId, + id: AssociatedTyValueId, +) -> Arc { + let type_alias: TypeAliasAsValue = from_chalk(db, id); + type_alias_associated_ty_value(db, krate, type_alias.0) +} + +fn type_alias_associated_ty_value( + db: &dyn HirDatabase, + _krate: CrateId, + type_alias: TypeAliasId, +) -> Arc { + let type_alias_data = db.type_alias_data(type_alias); + let impl_id = match type_alias.lookup(db.upcast()).container { + AssocContainerId::ImplId(it) => it, + _ => panic!("assoc ty value should be in impl"), + }; + + let trait_ref = db.impl_trait(impl_id).expect("assoc ty value should not exist").value; // we don't return any assoc ty values if the impl'd trait can't be resolved + + let assoc_ty = db + .trait_data(trait_ref.trait_) + .associated_type_by_name(&type_alias_data.name) + .expect("assoc ty value should not exist"); // validated when building the impl data as well + let ty = db.ty(type_alias.into()); + let value_bound = rust_ir::AssociatedTyValueBound { ty: ty.value.to_chalk(db) }; + let value = rust_ir::AssociatedTyValue { + impl_id: impl_id.to_chalk(db), + associated_ty_id: assoc_ty.to_chalk(db), + value: make_binders(value_bound, ty.num_binders), + }; + Arc::new(value) +} + +pub(crate) fn fn_def_datum_query( + db: &dyn HirDatabase, + _krate: CrateId, + fn_def_id: FnDefId, +) -> Arc { + let callable_def: CallableDefId = from_chalk(db, fn_def_id); + let generic_params = generics(db.upcast(), callable_def.into()); + let sig = db.callable_item_signature(callable_def); + let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST); + let where_clauses = convert_where_clauses(db, callable_def.into(), &bound_vars); + let bound = rust_ir::FnDefDatumBound { + // Note: Chalk doesn't actually use this information yet as far as I am aware, but we provide it anyway + inputs_and_output: make_binders( + rust_ir::FnDefInputsAndOutputDatum { + argument_types: sig + .value + .params() + .iter() + .map(|ty| ty.clone().to_chalk(db)) + .collect(), + return_type: sig.value.ret().clone().to_chalk(db), + } + .shifted_in(&Interner), + 0, + ), + where_clauses, + }; + let datum = FnDefDatum { + id: fn_def_id, + abi: (), + safety: chalk_ir::Safety::Safe, + variadic: sig.value.is_varargs, + binders: make_binders(bound, sig.num_binders), + }; + Arc::new(datum) +} + +impl From for crate::db::InternedCallableDefId { + fn from(fn_def_id: FnDefId) -> Self { + InternKey::from_intern_id(fn_def_id.0) + } +} + +impl From for FnDefId { + fn from(callable_def_id: crate::db::InternedCallableDefId) -> Self { + chalk_ir::FnDefId(callable_def_id.as_intern_id()) + } +} + +impl From for crate::db::InternedOpaqueTyId { + fn from(id: OpaqueTyId) -> Self { + InternKey::from_intern_id(id.0) + } +} + +impl From for OpaqueTyId { + fn from(id: crate::db::InternedOpaqueTyId) -> Self { + chalk_ir::OpaqueTyId(id.as_intern_id()) + } +} + +impl From> for crate::db::ClosureId { + fn from(id: chalk_ir::ClosureId) -> Self { + Self::from_intern_id(id.0) + } +} + +impl From for chalk_ir::ClosureId { + fn from(id: crate::db::ClosureId) -> Self { + chalk_ir::ClosureId(id.as_intern_id()) + } +} diff --git a/crates/hir_ty/src/traits/chalk/interner.rs b/crates/hir_ty/src/traits/chalk/interner.rs new file mode 100644 index 0000000000..fc0f9c2019 --- /dev/null +++ b/crates/hir_ty/src/traits/chalk/interner.rs @@ -0,0 +1,383 @@ +//! Implementation of the Chalk `Interner` trait, which allows customizing the +//! representation of the various objects Chalk deals with (types, goals etc.). + +use super::tls; +use base_db::salsa::InternId; +use chalk_ir::{GenericArg, Goal, GoalData}; +use hir_def::TypeAliasId; +use std::{fmt, sync::Arc}; + +#[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)] +pub struct Interner; + +pub type AssocTypeId = chalk_ir::AssocTypeId; +pub type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum; +pub type TraitId = chalk_ir::TraitId; +pub type TraitDatum = chalk_solve::rust_ir::TraitDatum; +pub type AdtId = chalk_ir::AdtId; +pub type StructDatum = chalk_solve::rust_ir::AdtDatum; +pub type ImplId = chalk_ir::ImplId; +pub type ImplDatum = chalk_solve::rust_ir::ImplDatum; +pub type AssociatedTyValueId = chalk_solve::rust_ir::AssociatedTyValueId; +pub type AssociatedTyValue = chalk_solve::rust_ir::AssociatedTyValue; +pub type FnDefId = chalk_ir::FnDefId; +pub type FnDefDatum = chalk_solve::rust_ir::FnDefDatum; +pub type OpaqueTyId = chalk_ir::OpaqueTyId; +pub type OpaqueTyDatum = chalk_solve::rust_ir::OpaqueTyDatum; + +impl chalk_ir::interner::Interner for Interner { + type InternedType = Box>; // FIXME use Arc? + type InternedLifetime = chalk_ir::LifetimeData; + type InternedConst = Arc>; + type InternedConcreteConst = (); + type InternedGenericArg = chalk_ir::GenericArgData; + type InternedGoal = Arc>; + type InternedGoals = Vec>; + type InternedSubstitution = Vec>; + type InternedProgramClause = chalk_ir::ProgramClauseData; + type InternedProgramClauses = Arc<[chalk_ir::ProgramClause]>; + type InternedQuantifiedWhereClauses = Vec>; + type InternedVariableKinds = Vec>; + type InternedCanonicalVarKinds = Vec>; + type InternedConstraints = Vec>>; + type DefId = InternId; + type InternedAdtId = hir_def::AdtId; + type Identifier = TypeAliasId; + type FnAbi = (); + + fn debug_adt_id(type_kind_id: AdtId, fmt: &mut fmt::Formatter<'_>) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_struct_id(type_kind_id, fmt))) + } + + fn debug_trait_id(type_kind_id: TraitId, fmt: &mut fmt::Formatter<'_>) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_trait_id(type_kind_id, fmt))) + } + + fn debug_assoc_type_id(id: AssocTypeId, fmt: &mut fmt::Formatter<'_>) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_assoc_type_id(id, fmt))) + } + + fn debug_alias( + alias: &chalk_ir::AliasTy, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_alias(alias, fmt))) + } + + fn debug_projection_ty( + proj: &chalk_ir::ProjectionTy, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt))) + } + + fn debug_opaque_ty( + opaque_ty: &chalk_ir::OpaqueTy, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_opaque_ty(opaque_ty, fmt))) + } + + fn debug_opaque_ty_id( + opaque_ty_id: chalk_ir::OpaqueTyId, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_opaque_ty_id(opaque_ty_id, fmt))) + } + + fn debug_ty(ty: &chalk_ir::Ty, fmt: &mut fmt::Formatter<'_>) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_ty(ty, fmt))) + } + + fn debug_lifetime( + lifetime: &chalk_ir::Lifetime, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_lifetime(lifetime, fmt))) + } + + fn debug_generic_arg( + parameter: &GenericArg, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_generic_arg(parameter, fmt))) + } + + fn debug_goal(goal: &Goal, fmt: &mut fmt::Formatter<'_>) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_goal(goal, fmt))) + } + + fn debug_goals( + goals: &chalk_ir::Goals, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_goals(goals, fmt))) + } + + fn debug_program_clause_implication( + pci: &chalk_ir::ProgramClauseImplication, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_program_clause_implication(pci, fmt))) + } + + fn debug_application_ty( + application_ty: &chalk_ir::ApplicationTy, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_application_ty(application_ty, fmt))) + } + + fn debug_substitution( + substitution: &chalk_ir::Substitution, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_substitution(substitution, fmt))) + } + + fn debug_separator_trait_ref( + separator_trait_ref: &chalk_ir::SeparatorTraitRef, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| { + Some(prog?.debug_separator_trait_ref(separator_trait_ref, fmt)) + }) + } + + fn debug_fn_def_id( + fn_def_id: chalk_ir::FnDefId, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_fn_def_id(fn_def_id, fmt))) + } + fn debug_const( + constant: &chalk_ir::Const, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_const(constant, fmt))) + } + fn debug_variable_kinds( + variable_kinds: &chalk_ir::VariableKinds, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_variable_kinds(variable_kinds, fmt))) + } + fn debug_variable_kinds_with_angles( + variable_kinds: &chalk_ir::VariableKinds, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| { + Some(prog?.debug_variable_kinds_with_angles(variable_kinds, fmt)) + }) + } + fn debug_canonical_var_kinds( + canonical_var_kinds: &chalk_ir::CanonicalVarKinds, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| { + Some(prog?.debug_canonical_var_kinds(canonical_var_kinds, fmt)) + }) + } + fn debug_program_clause( + clause: &chalk_ir::ProgramClause, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_program_clause(clause, fmt))) + } + fn debug_program_clauses( + clauses: &chalk_ir::ProgramClauses, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_program_clauses(clauses, fmt))) + } + fn debug_quantified_where_clauses( + clauses: &chalk_ir::QuantifiedWhereClauses, + fmt: &mut fmt::Formatter<'_>, + ) -> Option { + tls::with_current_program(|prog| Some(prog?.debug_quantified_where_clauses(clauses, fmt))) + } + + fn intern_ty(&self, ty: chalk_ir::TyData) -> Box> { + Box::new(ty) + } + + fn ty_data<'a>(&self, ty: &'a Box>) -> &'a chalk_ir::TyData { + ty + } + + fn intern_lifetime( + &self, + lifetime: chalk_ir::LifetimeData, + ) -> chalk_ir::LifetimeData { + lifetime + } + + fn lifetime_data<'a>( + &self, + lifetime: &'a chalk_ir::LifetimeData, + ) -> &'a chalk_ir::LifetimeData { + lifetime + } + + fn intern_const(&self, constant: chalk_ir::ConstData) -> Arc> { + Arc::new(constant) + } + + fn const_data<'a>( + &self, + constant: &'a Arc>, + ) -> &'a chalk_ir::ConstData { + constant + } + + fn const_eq(&self, _ty: &Box>, _c1: &(), _c2: &()) -> bool { + true + } + + fn intern_generic_arg( + &self, + parameter: chalk_ir::GenericArgData, + ) -> chalk_ir::GenericArgData { + parameter + } + + fn generic_arg_data<'a>( + &self, + parameter: &'a chalk_ir::GenericArgData, + ) -> &'a chalk_ir::GenericArgData { + parameter + } + + fn intern_goal(&self, goal: GoalData) -> Arc> { + Arc::new(goal) + } + + fn intern_goals( + &self, + data: impl IntoIterator, E>>, + ) -> Result { + data.into_iter().collect() + } + + fn goal_data<'a>(&self, goal: &'a Arc>) -> &'a GoalData { + goal + } + + fn goals_data<'a>(&self, goals: &'a Vec>) -> &'a [Goal] { + goals + } + + fn intern_substitution( + &self, + data: impl IntoIterator, E>>, + ) -> Result>, E> { + data.into_iter().collect() + } + + fn substitution_data<'a>( + &self, + substitution: &'a Vec>, + ) -> &'a [GenericArg] { + substitution + } + + fn intern_program_clause( + &self, + data: chalk_ir::ProgramClauseData, + ) -> chalk_ir::ProgramClauseData { + data + } + + fn program_clause_data<'a>( + &self, + clause: &'a chalk_ir::ProgramClauseData, + ) -> &'a chalk_ir::ProgramClauseData { + clause + } + + fn intern_program_clauses( + &self, + data: impl IntoIterator, E>>, + ) -> Result]>, E> { + data.into_iter().collect() + } + + fn program_clauses_data<'a>( + &self, + clauses: &'a Arc<[chalk_ir::ProgramClause]>, + ) -> &'a [chalk_ir::ProgramClause] { + &clauses + } + + fn intern_quantified_where_clauses( + &self, + data: impl IntoIterator, E>>, + ) -> Result { + data.into_iter().collect() + } + + fn quantified_where_clauses_data<'a>( + &self, + clauses: &'a Self::InternedQuantifiedWhereClauses, + ) -> &'a [chalk_ir::QuantifiedWhereClause] { + clauses + } + + fn intern_generic_arg_kinds( + &self, + data: impl IntoIterator, E>>, + ) -> Result { + data.into_iter().collect() + } + + fn variable_kinds_data<'a>( + &self, + parameter_kinds: &'a Self::InternedVariableKinds, + ) -> &'a [chalk_ir::VariableKind] { + ¶meter_kinds + } + + fn intern_canonical_var_kinds( + &self, + data: impl IntoIterator, E>>, + ) -> Result { + data.into_iter().collect() + } + + fn canonical_var_kinds_data<'a>( + &self, + canonical_var_kinds: &'a Self::InternedCanonicalVarKinds, + ) -> &'a [chalk_ir::CanonicalVarKind] { + &canonical_var_kinds + } + + fn intern_constraints( + &self, + data: impl IntoIterator>, E>>, + ) -> Result { + data.into_iter().collect() + } + + fn constraints_data<'a>( + &self, + constraints: &'a Self::InternedConstraints, + ) -> &'a [chalk_ir::InEnvironment>] { + constraints + } + fn debug_closure_id( + _fn_def_id: chalk_ir::ClosureId, + _fmt: &mut fmt::Formatter<'_>, + ) -> Option { + None + } + fn debug_constraints( + _clauses: &chalk_ir::Constraints, + _fmt: &mut fmt::Formatter<'_>, + ) -> Option { + None + } +} + +impl chalk_ir::interner::HasInterner for Interner { + type Interner = Self; +} diff --git a/crates/hir_ty/src/traits/chalk/mapping.rs b/crates/hir_ty/src/traits/chalk/mapping.rs new file mode 100644 index 0000000000..fe62f3fa7c --- /dev/null +++ b/crates/hir_ty/src/traits/chalk/mapping.rs @@ -0,0 +1,787 @@ +//! This module contains the implementations of the `ToChalk` trait, which +//! handles conversion between our data types and their corresponding types in +//! Chalk (in both directions); plus some helper functions for more specialized +//! conversions. + +use chalk_ir::{ + cast::Cast, fold::shift::Shift, interner::HasInterner, PlaceholderIndex, Scalar, TypeName, + UniverseIndex, +}; +use chalk_solve::rust_ir; + +use base_db::salsa::InternKey; +use hir_def::{type_ref::Mutability, AssocContainerId, GenericDefId, Lookup, TypeAliasId}; + +use crate::{ + db::HirDatabase, + primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness}, + traits::{Canonical, Obligation}, + ApplicationTy, CallableDefId, GenericPredicate, InEnvironment, OpaqueTy, OpaqueTyId, + ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TyKind, TypeCtor, +}; + +use super::interner::*; +use super::*; + +impl ToChalk for Ty { + type Chalk = chalk_ir::Ty; + fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Ty { + match self { + Ty::Apply(apply_ty) => match apply_ty.ctor { + TypeCtor::Ref(m) => ref_to_chalk(db, m, apply_ty.parameters), + TypeCtor::Array => array_to_chalk(db, apply_ty.parameters), + TypeCtor::FnPtr { num_args: _, is_varargs } => { + let substitution = apply_ty.parameters.to_chalk(db).shifted_in(&Interner); + chalk_ir::TyData::Function(chalk_ir::FnPointer { + num_binders: 0, + abi: (), + safety: chalk_ir::Safety::Safe, + variadic: is_varargs, + substitution, + }) + .intern(&Interner) + } + _ => { + let name = apply_ty.ctor.to_chalk(db); + let substitution = apply_ty.parameters.to_chalk(db); + chalk_ir::ApplicationTy { name, substitution }.cast(&Interner).intern(&Interner) + } + }, + Ty::Projection(proj_ty) => { + let associated_ty_id = proj_ty.associated_ty.to_chalk(db); + let substitution = proj_ty.parameters.to_chalk(db); + chalk_ir::AliasTy::Projection(chalk_ir::ProjectionTy { + associated_ty_id, + substitution, + }) + .cast(&Interner) + .intern(&Interner) + } + Ty::Placeholder(id) => { + let interned_id = db.intern_type_param_id(id); + PlaceholderIndex { + ui: UniverseIndex::ROOT, + idx: interned_id.as_intern_id().as_usize(), + } + .to_ty::(&Interner) + } + Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx).intern(&Interner), + Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"), + Ty::Dyn(predicates) => { + let where_clauses = chalk_ir::QuantifiedWhereClauses::from_iter( + &Interner, + predicates.iter().filter(|p| !p.is_error()).cloned().map(|p| p.to_chalk(db)), + ); + let bounded_ty = chalk_ir::DynTy { + bounds: make_binders(where_clauses, 1), + lifetime: FAKE_PLACEHOLDER.to_lifetime(&Interner), + }; + chalk_ir::TyData::Dyn(bounded_ty).intern(&Interner) + } + Ty::Opaque(opaque_ty) => { + let opaque_ty_id = opaque_ty.opaque_ty_id.to_chalk(db); + let substitution = opaque_ty.parameters.to_chalk(db); + chalk_ir::TyData::Alias(chalk_ir::AliasTy::Opaque(chalk_ir::OpaqueTy { + opaque_ty_id, + substitution, + })) + .intern(&Interner) + } + Ty::Unknown => { + let substitution = chalk_ir::Substitution::empty(&Interner); + let name = TypeName::Error; + chalk_ir::ApplicationTy { name, substitution }.cast(&Interner).intern(&Interner) + } + } + } + fn from_chalk(db: &dyn HirDatabase, chalk: chalk_ir::Ty) -> Self { + match chalk.data(&Interner).clone() { + chalk_ir::TyData::Apply(apply_ty) => match apply_ty.name { + TypeName::Error => Ty::Unknown, + TypeName::Ref(m) => ref_from_chalk(db, m, apply_ty.substitution), + TypeName::Array => array_from_chalk(db, apply_ty.substitution), + _ => { + let ctor = from_chalk(db, apply_ty.name); + let parameters = from_chalk(db, apply_ty.substitution); + Ty::Apply(ApplicationTy { ctor, parameters }) + } + }, + chalk_ir::TyData::Placeholder(idx) => { + assert_eq!(idx.ui, UniverseIndex::ROOT); + let interned_id = crate::db::GlobalTypeParamId::from_intern_id( + crate::salsa::InternId::from(idx.idx), + ); + Ty::Placeholder(db.lookup_intern_type_param_id(interned_id)) + } + chalk_ir::TyData::Alias(chalk_ir::AliasTy::Projection(proj)) => { + let associated_ty = from_chalk(db, proj.associated_ty_id); + let parameters = from_chalk(db, proj.substitution); + Ty::Projection(ProjectionTy { associated_ty, parameters }) + } + chalk_ir::TyData::Alias(chalk_ir::AliasTy::Opaque(opaque_ty)) => { + let impl_trait_id = from_chalk(db, opaque_ty.opaque_ty_id); + let parameters = from_chalk(db, opaque_ty.substitution); + Ty::Opaque(OpaqueTy { opaque_ty_id: impl_trait_id, parameters }) + } + chalk_ir::TyData::Function(chalk_ir::FnPointer { + num_binders, + variadic, + substitution, + .. + }) => { + assert_eq!(num_binders, 0); + let parameters: Substs = from_chalk( + db, + substitution.shifted_out(&Interner).expect("fn ptr should have no binders"), + ); + Ty::Apply(ApplicationTy { + ctor: TypeCtor::FnPtr { + num_args: (parameters.len() - 1) as u16, + is_varargs: variadic, + }, + parameters, + }) + } + chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx), + chalk_ir::TyData::InferenceVar(_iv, _kind) => Ty::Unknown, + chalk_ir::TyData::Dyn(where_clauses) => { + assert_eq!(where_clauses.bounds.binders.len(&Interner), 1); + let predicates = where_clauses + .bounds + .skip_binders() + .iter(&Interner) + .map(|c| from_chalk(db, c.clone())) + .collect(); + Ty::Dyn(predicates) + } + } + } +} + +const FAKE_PLACEHOLDER: PlaceholderIndex = + PlaceholderIndex { ui: UniverseIndex::ROOT, idx: usize::MAX }; + +/// We currently don't model lifetimes, but Chalk does. So, we have to insert a +/// fake lifetime here, because Chalks built-in logic may expect it to be there. +fn ref_to_chalk( + db: &dyn HirDatabase, + mutability: Mutability, + subst: Substs, +) -> chalk_ir::Ty { + let arg = subst[0].clone().to_chalk(db); + let lifetime = FAKE_PLACEHOLDER.to_lifetime(&Interner); + chalk_ir::ApplicationTy { + name: TypeName::Ref(mutability.to_chalk(db)), + substitution: chalk_ir::Substitution::from_iter( + &Interner, + vec![lifetime.cast(&Interner), arg.cast(&Interner)], + ), + } + .intern(&Interner) +} + +/// Here we remove the lifetime from the type we got from Chalk. +fn ref_from_chalk( + db: &dyn HirDatabase, + mutability: chalk_ir::Mutability, + subst: chalk_ir::Substitution, +) -> Ty { + let tys = subst + .iter(&Interner) + .filter_map(|p| Some(from_chalk(db, p.ty(&Interner)?.clone()))) + .collect(); + Ty::apply(TypeCtor::Ref(from_chalk(db, mutability)), Substs(tys)) +} + +/// We currently don't model constants, but Chalk does. So, we have to insert a +/// fake constant here, because Chalks built-in logic may expect it to be there. +fn array_to_chalk(db: &dyn HirDatabase, subst: Substs) -> chalk_ir::Ty { + let arg = subst[0].clone().to_chalk(db); + let usize_ty = chalk_ir::ApplicationTy { + name: TypeName::Scalar(Scalar::Uint(chalk_ir::UintTy::Usize)), + substitution: chalk_ir::Substitution::empty(&Interner), + } + .intern(&Interner); + let const_ = FAKE_PLACEHOLDER.to_const(&Interner, usize_ty); + chalk_ir::ApplicationTy { + name: TypeName::Array, + substitution: chalk_ir::Substitution::from_iter( + &Interner, + vec![arg.cast(&Interner), const_.cast(&Interner)], + ), + } + .intern(&Interner) +} + +/// Here we remove the const from the type we got from Chalk. +fn array_from_chalk(db: &dyn HirDatabase, subst: chalk_ir::Substitution) -> Ty { + let tys = subst + .iter(&Interner) + .filter_map(|p| Some(from_chalk(db, p.ty(&Interner)?.clone()))) + .collect(); + Ty::apply(TypeCtor::Array, Substs(tys)) +} + +impl ToChalk for Substs { + type Chalk = chalk_ir::Substitution; + + fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Substitution { + chalk_ir::Substitution::from_iter(&Interner, self.iter().map(|ty| ty.clone().to_chalk(db))) + } + + fn from_chalk(db: &dyn HirDatabase, parameters: chalk_ir::Substitution) -> Substs { + let tys = parameters + .iter(&Interner) + .map(|p| match p.ty(&Interner) { + Some(ty) => from_chalk(db, ty.clone()), + None => unimplemented!(), + }) + .collect(); + Substs(tys) + } +} + +impl ToChalk for TraitRef { + type Chalk = chalk_ir::TraitRef; + + fn to_chalk(self: TraitRef, db: &dyn HirDatabase) -> chalk_ir::TraitRef { + let trait_id = self.trait_.to_chalk(db); + let substitution = self.substs.to_chalk(db); + chalk_ir::TraitRef { trait_id, substitution } + } + + fn from_chalk(db: &dyn HirDatabase, trait_ref: chalk_ir::TraitRef) -> Self { + let trait_ = from_chalk(db, trait_ref.trait_id); + let substs = from_chalk(db, trait_ref.substitution); + TraitRef { trait_, substs } + } +} + +impl ToChalk for hir_def::TraitId { + type Chalk = TraitId; + + fn to_chalk(self, _db: &dyn HirDatabase) -> TraitId { + chalk_ir::TraitId(self.as_intern_id()) + } + + fn from_chalk(_db: &dyn HirDatabase, trait_id: TraitId) -> hir_def::TraitId { + InternKey::from_intern_id(trait_id.0) + } +} + +impl ToChalk for OpaqueTyId { + type Chalk = chalk_ir::OpaqueTyId; + + fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::OpaqueTyId { + db.intern_impl_trait_id(self).into() + } + + fn from_chalk( + db: &dyn HirDatabase, + opaque_ty_id: chalk_ir::OpaqueTyId, + ) -> OpaqueTyId { + db.lookup_intern_impl_trait_id(opaque_ty_id.into()) + } +} + +impl ToChalk for TypeCtor { + type Chalk = TypeName; + + fn to_chalk(self, db: &dyn HirDatabase) -> TypeName { + match self { + TypeCtor::AssociatedType(type_alias) => { + let type_id = type_alias.to_chalk(db); + TypeName::AssociatedType(type_id) + } + + TypeCtor::OpaqueType(impl_trait_id) => { + let id = impl_trait_id.to_chalk(db); + TypeName::OpaqueType(id) + } + + TypeCtor::Bool => TypeName::Scalar(Scalar::Bool), + TypeCtor::Char => TypeName::Scalar(Scalar::Char), + TypeCtor::Int(int_ty) => TypeName::Scalar(int_ty_to_chalk(int_ty)), + TypeCtor::Float(FloatTy { bitness: FloatBitness::X32 }) => { + TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F32)) + } + TypeCtor::Float(FloatTy { bitness: FloatBitness::X64 }) => { + TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F64)) + } + + TypeCtor::Tuple { cardinality } => TypeName::Tuple(cardinality.into()), + TypeCtor::RawPtr(mutability) => TypeName::Raw(mutability.to_chalk(db)), + TypeCtor::Slice => TypeName::Slice, + TypeCtor::Array => TypeName::Array, + TypeCtor::Ref(mutability) => TypeName::Ref(mutability.to_chalk(db)), + TypeCtor::Str => TypeName::Str, + TypeCtor::FnDef(callable_def) => { + let id = callable_def.to_chalk(db); + TypeName::FnDef(id) + } + TypeCtor::Never => TypeName::Never, + + TypeCtor::Closure { def, expr } => { + let closure_id = db.intern_closure((def, expr)); + TypeName::Closure(closure_id.into()) + } + + TypeCtor::Adt(adt_id) => TypeName::Adt(chalk_ir::AdtId(adt_id)), + + TypeCtor::FnPtr { .. } => { + // This should not be reached, since Chalk doesn't represent + // function pointers with TypeName + unreachable!() + } + } + } + + fn from_chalk(db: &dyn HirDatabase, type_name: TypeName) -> TypeCtor { + match type_name { + TypeName::Adt(struct_id) => TypeCtor::Adt(struct_id.0), + TypeName::AssociatedType(type_id) => TypeCtor::AssociatedType(from_chalk(db, type_id)), + TypeName::OpaqueType(opaque_type_id) => { + TypeCtor::OpaqueType(from_chalk(db, opaque_type_id)) + } + + TypeName::Scalar(Scalar::Bool) => TypeCtor::Bool, + TypeName::Scalar(Scalar::Char) => TypeCtor::Char, + TypeName::Scalar(Scalar::Int(int_ty)) => TypeCtor::Int(IntTy { + signedness: Signedness::Signed, + bitness: bitness_from_chalk_int(int_ty), + }), + TypeName::Scalar(Scalar::Uint(uint_ty)) => TypeCtor::Int(IntTy { + signedness: Signedness::Unsigned, + bitness: bitness_from_chalk_uint(uint_ty), + }), + TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F32)) => { + TypeCtor::Float(FloatTy { bitness: FloatBitness::X32 }) + } + TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F64)) => { + TypeCtor::Float(FloatTy { bitness: FloatBitness::X64 }) + } + TypeName::Tuple(cardinality) => TypeCtor::Tuple { cardinality: cardinality as u16 }, + TypeName::Raw(mutability) => TypeCtor::RawPtr(from_chalk(db, mutability)), + TypeName::Slice => TypeCtor::Slice, + TypeName::Ref(mutability) => TypeCtor::Ref(from_chalk(db, mutability)), + TypeName::Str => TypeCtor::Str, + TypeName::Never => TypeCtor::Never, + + TypeName::FnDef(fn_def_id) => { + let callable_def = from_chalk(db, fn_def_id); + TypeCtor::FnDef(callable_def) + } + TypeName::Array => TypeCtor::Array, + + TypeName::Closure(id) => { + let id: crate::db::ClosureId = id.into(); + let (def, expr) = db.lookup_intern_closure(id); + TypeCtor::Closure { def, expr } + } + + TypeName::Error => { + // this should not be reached, since we don't represent TypeName::Error with TypeCtor + unreachable!() + } + } + } +} + +fn bitness_from_chalk_uint(uint_ty: chalk_ir::UintTy) -> IntBitness { + use chalk_ir::UintTy; + + match uint_ty { + UintTy::Usize => IntBitness::Xsize, + UintTy::U8 => IntBitness::X8, + UintTy::U16 => IntBitness::X16, + UintTy::U32 => IntBitness::X32, + UintTy::U64 => IntBitness::X64, + UintTy::U128 => IntBitness::X128, + } +} + +fn bitness_from_chalk_int(int_ty: chalk_ir::IntTy) -> IntBitness { + use chalk_ir::IntTy; + + match int_ty { + IntTy::Isize => IntBitness::Xsize, + IntTy::I8 => IntBitness::X8, + IntTy::I16 => IntBitness::X16, + IntTy::I32 => IntBitness::X32, + IntTy::I64 => IntBitness::X64, + IntTy::I128 => IntBitness::X128, + } +} + +fn int_ty_to_chalk(int_ty: IntTy) -> Scalar { + use chalk_ir::{IntTy, UintTy}; + + match int_ty.signedness { + Signedness::Signed => Scalar::Int(match int_ty.bitness { + IntBitness::Xsize => IntTy::Isize, + IntBitness::X8 => IntTy::I8, + IntBitness::X16 => IntTy::I16, + IntBitness::X32 => IntTy::I32, + IntBitness::X64 => IntTy::I64, + IntBitness::X128 => IntTy::I128, + }), + Signedness::Unsigned => Scalar::Uint(match int_ty.bitness { + IntBitness::Xsize => UintTy::Usize, + IntBitness::X8 => UintTy::U8, + IntBitness::X16 => UintTy::U16, + IntBitness::X32 => UintTy::U32, + IntBitness::X64 => UintTy::U64, + IntBitness::X128 => UintTy::U128, + }), + } +} + +impl ToChalk for Mutability { + type Chalk = chalk_ir::Mutability; + fn to_chalk(self, _db: &dyn HirDatabase) -> Self::Chalk { + match self { + Mutability::Shared => chalk_ir::Mutability::Not, + Mutability::Mut => chalk_ir::Mutability::Mut, + } + } + fn from_chalk(_db: &dyn HirDatabase, chalk: Self::Chalk) -> Self { + match chalk { + chalk_ir::Mutability::Mut => Mutability::Mut, + chalk_ir::Mutability::Not => Mutability::Shared, + } + } +} + +impl ToChalk for hir_def::ImplId { + type Chalk = ImplId; + + fn to_chalk(self, _db: &dyn HirDatabase) -> ImplId { + chalk_ir::ImplId(self.as_intern_id()) + } + + fn from_chalk(_db: &dyn HirDatabase, impl_id: ImplId) -> hir_def::ImplId { + InternKey::from_intern_id(impl_id.0) + } +} + +impl ToChalk for CallableDefId { + type Chalk = FnDefId; + + fn to_chalk(self, db: &dyn HirDatabase) -> FnDefId { + db.intern_callable_def(self).into() + } + + fn from_chalk(db: &dyn HirDatabase, fn_def_id: FnDefId) -> CallableDefId { + db.lookup_intern_callable_def(fn_def_id.into()) + } +} + +impl ToChalk for TypeAliasId { + type Chalk = AssocTypeId; + + fn to_chalk(self, _db: &dyn HirDatabase) -> AssocTypeId { + chalk_ir::AssocTypeId(self.as_intern_id()) + } + + fn from_chalk(_db: &dyn HirDatabase, type_alias_id: AssocTypeId) -> TypeAliasId { + InternKey::from_intern_id(type_alias_id.0) + } +} + +pub struct TypeAliasAsValue(pub TypeAliasId); + +impl ToChalk for TypeAliasAsValue { + type Chalk = AssociatedTyValueId; + + fn to_chalk(self, _db: &dyn HirDatabase) -> AssociatedTyValueId { + rust_ir::AssociatedTyValueId(self.0.as_intern_id()) + } + + fn from_chalk( + _db: &dyn HirDatabase, + assoc_ty_value_id: AssociatedTyValueId, + ) -> TypeAliasAsValue { + TypeAliasAsValue(TypeAliasId::from_intern_id(assoc_ty_value_id.0)) + } +} + +impl ToChalk for GenericPredicate { + type Chalk = chalk_ir::QuantifiedWhereClause; + + fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::QuantifiedWhereClause { + match self { + GenericPredicate::Implemented(trait_ref) => { + let chalk_trait_ref = trait_ref.to_chalk(db); + let chalk_trait_ref = chalk_trait_ref.shifted_in(&Interner); + make_binders(chalk_ir::WhereClause::Implemented(chalk_trait_ref), 0) + } + GenericPredicate::Projection(projection_pred) => { + let ty = projection_pred.ty.to_chalk(db).shifted_in(&Interner); + let projection = projection_pred.projection_ty.to_chalk(db).shifted_in(&Interner); + let alias = chalk_ir::AliasTy::Projection(projection); + make_binders(chalk_ir::WhereClause::AliasEq(chalk_ir::AliasEq { alias, ty }), 0) + } + GenericPredicate::Error => panic!("tried passing GenericPredicate::Error to Chalk"), + } + } + + fn from_chalk( + db: &dyn HirDatabase, + where_clause: chalk_ir::QuantifiedWhereClause, + ) -> GenericPredicate { + // we don't produce any where clauses with binders and can't currently deal with them + match where_clause + .skip_binders() + .shifted_out(&Interner) + .expect("unexpected bound vars in where clause") + { + chalk_ir::WhereClause::Implemented(tr) => { + GenericPredicate::Implemented(from_chalk(db, tr)) + } + chalk_ir::WhereClause::AliasEq(projection_eq) => { + let projection_ty = from_chalk( + db, + match projection_eq.alias { + chalk_ir::AliasTy::Projection(p) => p, + _ => unimplemented!(), + }, + ); + let ty = from_chalk(db, projection_eq.ty); + GenericPredicate::Projection(ProjectionPredicate { projection_ty, ty }) + } + + chalk_ir::WhereClause::LifetimeOutlives(_) => { + // we shouldn't get these from Chalk + panic!("encountered LifetimeOutlives from Chalk") + } + + chalk_ir::WhereClause::TypeOutlives(_) => { + // we shouldn't get these from Chalk + panic!("encountered TypeOutlives from Chalk") + } + } + } +} + +impl ToChalk for ProjectionTy { + type Chalk = chalk_ir::ProjectionTy; + + fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::ProjectionTy { + chalk_ir::ProjectionTy { + associated_ty_id: self.associated_ty.to_chalk(db), + substitution: self.parameters.to_chalk(db), + } + } + + fn from_chalk( + db: &dyn HirDatabase, + projection_ty: chalk_ir::ProjectionTy, + ) -> ProjectionTy { + ProjectionTy { + associated_ty: from_chalk(db, projection_ty.associated_ty_id), + parameters: from_chalk(db, projection_ty.substitution), + } + } +} + +impl ToChalk for ProjectionPredicate { + type Chalk = chalk_ir::AliasEq; + + fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasEq { + chalk_ir::AliasEq { + alias: chalk_ir::AliasTy::Projection(self.projection_ty.to_chalk(db)), + ty: self.ty.to_chalk(db), + } + } + + fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::AliasEq) -> Self { + unimplemented!() + } +} + +impl ToChalk for Obligation { + type Chalk = chalk_ir::DomainGoal; + + fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::DomainGoal { + match self { + Obligation::Trait(tr) => tr.to_chalk(db).cast(&Interner), + Obligation::Projection(pr) => pr.to_chalk(db).cast(&Interner), + } + } + + fn from_chalk(_db: &dyn HirDatabase, _goal: chalk_ir::DomainGoal) -> Self { + unimplemented!() + } +} + +impl ToChalk for Canonical +where + T: ToChalk, + T::Chalk: HasInterner, +{ + type Chalk = chalk_ir::Canonical; + + fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical { + let kinds = self + .kinds + .iter() + .map(|k| match k { + TyKind::General => chalk_ir::TyKind::General, + TyKind::Integer => chalk_ir::TyKind::Integer, + TyKind::Float => chalk_ir::TyKind::Float, + }) + .map(|tk| { + chalk_ir::CanonicalVarKind::new( + chalk_ir::VariableKind::Ty(tk), + chalk_ir::UniverseIndex::ROOT, + ) + }); + let value = self.value.to_chalk(db); + chalk_ir::Canonical { + value, + binders: chalk_ir::CanonicalVarKinds::from_iter(&Interner, kinds), + } + } + + fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical) -> Canonical { + let kinds = canonical + .binders + .iter(&Interner) + .map(|k| match k.kind { + chalk_ir::VariableKind::Ty(tk) => match tk { + chalk_ir::TyKind::General => TyKind::General, + chalk_ir::TyKind::Integer => TyKind::Integer, + chalk_ir::TyKind::Float => TyKind::Float, + }, + chalk_ir::VariableKind::Lifetime => panic!("unexpected lifetime from Chalk"), + chalk_ir::VariableKind::Const(_) => panic!("unexpected const from Chalk"), + }) + .collect(); + Canonical { kinds, value: from_chalk(db, canonical.value) } + } +} + +impl ToChalk for Arc { + type Chalk = chalk_ir::Environment; + + fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Environment { + let mut clauses = Vec::new(); + for pred in &self.predicates { + if pred.is_error() { + // for env, we just ignore errors + continue; + } + let program_clause: chalk_ir::ProgramClause = + pred.clone().to_chalk(db).cast(&Interner); + clauses.push(program_clause.into_from_env_clause(&Interner)); + } + chalk_ir::Environment::new(&Interner).add_clauses(&Interner, clauses) + } + + fn from_chalk( + _db: &dyn HirDatabase, + _env: chalk_ir::Environment, + ) -> Arc { + unimplemented!() + } +} + +impl ToChalk for InEnvironment +where + T::Chalk: chalk_ir::interner::HasInterner, +{ + type Chalk = chalk_ir::InEnvironment; + + fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::InEnvironment { + chalk_ir::InEnvironment { + environment: self.environment.to_chalk(db), + goal: self.value.to_chalk(db), + } + } + + fn from_chalk( + db: &dyn HirDatabase, + in_env: chalk_ir::InEnvironment, + ) -> InEnvironment { + InEnvironment { + environment: from_chalk(db, in_env.environment), + value: from_chalk(db, in_env.goal), + } + } +} + +pub(super) fn make_binders(value: T, num_vars: usize) -> chalk_ir::Binders +where + T: HasInterner, +{ + chalk_ir::Binders::new( + chalk_ir::VariableKinds::from_iter( + &Interner, + std::iter::repeat(chalk_ir::VariableKind::Ty(chalk_ir::TyKind::General)).take(num_vars), + ), + value, + ) +} + +pub(super) fn convert_where_clauses( + db: &dyn HirDatabase, + def: GenericDefId, + substs: &Substs, +) -> Vec> { + let generic_predicates = db.generic_predicates(def); + let mut result = Vec::with_capacity(generic_predicates.len()); + for pred in generic_predicates.iter() { + if pred.value.is_error() { + // skip errored predicates completely + continue; + } + result.push(pred.clone().subst(substs).to_chalk(db)); + } + result +} + +pub(super) fn generic_predicate_to_inline_bound( + db: &dyn HirDatabase, + pred: &GenericPredicate, + self_ty: &Ty, +) -> Option> { + // An InlineBound is like a GenericPredicate, except the self type is left out. + // We don't have a special type for this, but Chalk does. + match pred { + GenericPredicate::Implemented(trait_ref) => { + if &trait_ref.substs[0] != self_ty { + // we can only convert predicates back to type bounds if they + // have the expected self type + return None; + } + let args_no_self = trait_ref.substs[1..] + .iter() + .map(|ty| ty.clone().to_chalk(db).cast(&Interner)) + .collect(); + let trait_bound = + rust_ir::TraitBound { trait_id: trait_ref.trait_.to_chalk(db), args_no_self }; + Some(rust_ir::InlineBound::TraitBound(trait_bound)) + } + GenericPredicate::Projection(proj) => { + if &proj.projection_ty.parameters[0] != self_ty { + return None; + } + let trait_ = match proj.projection_ty.associated_ty.lookup(db.upcast()).container { + AssocContainerId::TraitId(t) => t, + _ => panic!("associated type not in trait"), + }; + let args_no_self = proj.projection_ty.parameters[1..] + .iter() + .map(|ty| ty.clone().to_chalk(db).cast(&Interner)) + .collect(); + let alias_eq_bound = rust_ir::AliasEqBound { + value: proj.ty.clone().to_chalk(db), + trait_bound: rust_ir::TraitBound { trait_id: trait_.to_chalk(db), args_no_self }, + associated_ty_id: proj.projection_ty.associated_ty.to_chalk(db), + parameters: Vec::new(), // FIXME we don't support generic associated types yet + }; + Some(rust_ir::InlineBound::AliasEqBound(alias_eq_bound)) + } + GenericPredicate::Error => None, + } +} diff --git a/crates/ra_hir_ty/src/traits/chalk/tls.rs b/crates/hir_ty/src/traits/chalk/tls.rs similarity index 100% rename from crates/ra_hir_ty/src/traits/chalk/tls.rs rename to crates/hir_ty/src/traits/chalk/tls.rs diff --git a/crates/ra_hir_ty/src/utils.rs b/crates/hir_ty/src/utils.rs similarity index 100% rename from crates/ra_hir_ty/src/utils.rs rename to crates/hir_ty/src/utils.rs diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml new file mode 100644 index 0000000000..e4b970c73b --- /dev/null +++ b/crates/ide/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "ide" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +either = "1.5.3" +indexmap = "1.3.2" +itertools = "0.9.0" +log = "0.4.8" +rustc-hash = "1.1.0" +oorandom = "11.1.2" + +stdx = { path = "../stdx" } +syntax = { path = "../syntax" } +text_edit = { path = "../text_edit" } +base_db = { path = "../base_db" } +ide_db = { path = "../ide_db" } +cfg = { path = "../cfg" } +profile = { path = "../profile" } +test_utils = { path = "../test_utils" } +assists = { path = "../assists" } +ssr = { path = "../ssr" } + +# ide should depend only on the top-level `hir` package. if you need +# something from some `hir_xxx` subpackage, reexport the API via `hir`. +hir = { path = "../hir" } + +[dev-dependencies] +expect = { path = "../expect" } diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs new file mode 100644 index 0000000000..58e26b94ca --- /dev/null +++ b/crates/ide/src/call_hierarchy.rs @@ -0,0 +1,393 @@ +//! Entry point for call-hierarchy + +use indexmap::IndexMap; + +use hir::Semantics; +use ide_db::RootDatabase; +use syntax::{ast, match_ast, AstNode, TextRange}; + +use crate::{ + call_info::FnCallNode, display::ToNav, goto_definition, references, FilePosition, + NavigationTarget, RangeInfo, +}; + +#[derive(Debug, Clone)] +pub struct CallItem { + pub target: NavigationTarget, + pub ranges: Vec, +} + +impl CallItem { + #[cfg(test)] + pub(crate) fn assert_match(&self, expected: &str) { + let actual = self.debug_render(); + test_utils::assert_eq_text!(expected.trim(), actual.trim(),); + } + + #[cfg(test)] + pub(crate) fn debug_render(&self) -> String { + format!("{} : {:?}", self.target.debug_render(), self.ranges) + } +} + +pub(crate) fn call_hierarchy( + db: &RootDatabase, + position: FilePosition, +) -> Option>> { + goto_definition::goto_definition(db, position) +} + +pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Option> { + let sema = Semantics::new(db); + + // 1. Find all refs + // 2. Loop through refs and determine unique fndef. This will become our `from: CallHierarchyItem,` in the reply. + // 3. Add ranges relative to the start of the fndef. + let refs = references::find_all_refs(&sema, position, None)?; + + let mut calls = CallLocations::default(); + + for reference in refs.info.references() { + let file_id = reference.file_range.file_id; + let file = sema.parse(file_id); + let file = file.syntax(); + let token = file.token_at_offset(reference.file_range.range.start()).next()?; + let token = sema.descend_into_macros(token); + let syntax = token.parent(); + + // This target is the containing function + if let Some(nav) = syntax.ancestors().find_map(|node| { + match_ast! { + match node { + ast::Fn(it) => { + let def = sema.to_def(&it)?; + Some(def.to_nav(sema.db)) + }, + _ => None, + } + } + }) { + let relative_range = reference.file_range.range; + calls.add(&nav, relative_range); + } + } + + Some(calls.into_items()) +} + +pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option> { + let sema = Semantics::new(db); + let file_id = position.file_id; + let file = sema.parse(file_id); + let file = file.syntax(); + let token = file.token_at_offset(position.offset).next()?; + let token = sema.descend_into_macros(token); + let syntax = token.parent(); + + let mut calls = CallLocations::default(); + + syntax + .descendants() + .filter_map(|node| FnCallNode::with_node_exact(&node)) + .filter_map(|call_node| { + let name_ref = call_node.name_ref()?; + + if let Some(func_target) = match &call_node { + FnCallNode::CallExpr(expr) => { + //FIXME: Type::as_callable is broken + let callable = sema.type_of_expr(&expr.expr()?)?.as_callable(db)?; + match callable.kind() { + hir::CallableKind::Function(it) => { + let fn_def: hir::Function = it.into(); + let nav = fn_def.to_nav(db); + Some(nav) + } + _ => None, + } + } + FnCallNode::MethodCallExpr(expr) => { + let function = sema.resolve_method_call(&expr)?; + Some(function.to_nav(db)) + } + } { + Some((func_target, name_ref.syntax().text_range())) + } else { + None + } + }) + .for_each(|(nav, range)| calls.add(&nav, range)); + + Some(calls.into_items()) +} + +#[derive(Default)] +struct CallLocations { + funcs: IndexMap>, +} + +impl CallLocations { + fn add(&mut self, target: &NavigationTarget, range: TextRange) { + self.funcs.entry(target.clone()).or_default().push(range); + } + + fn into_items(self) -> Vec { + self.funcs.into_iter().map(|(target, ranges)| CallItem { target, ranges }).collect() + } +} + +#[cfg(test)] +mod tests { + use base_db::FilePosition; + + use crate::mock_analysis::analysis_and_position; + + fn check_hierarchy( + ra_fixture: &str, + expected: &str, + expected_incoming: &[&str], + expected_outgoing: &[&str], + ) { + let (analysis, pos) = analysis_and_position(ra_fixture); + + let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info; + assert_eq!(navs.len(), 1); + let nav = navs.pop().unwrap(); + nav.assert_match(expected); + + let item_pos = + FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() }; + let incoming_calls = analysis.incoming_calls(item_pos).unwrap().unwrap(); + assert_eq!(incoming_calls.len(), expected_incoming.len()); + + for call in 0..incoming_calls.len() { + incoming_calls[call].assert_match(expected_incoming[call]); + } + + let outgoing_calls = analysis.outgoing_calls(item_pos).unwrap().unwrap(); + assert_eq!(outgoing_calls.len(), expected_outgoing.len()); + + for call in 0..outgoing_calls.len() { + outgoing_calls[call].assert_match(expected_outgoing[call]); + } + } + + #[test] + fn test_call_hierarchy_on_ref() { + check_hierarchy( + r#" +//- /lib.rs +fn callee() {} +fn caller() { + call<|>ee(); +} +"#, + "callee FN FileId(1) 0..14 3..9", + &["caller FN FileId(1) 15..44 18..24 : [33..39]"], + &[], + ); + } + + #[test] + fn test_call_hierarchy_on_def() { + check_hierarchy( + r#" +//- /lib.rs +fn call<|>ee() {} +fn caller() { + callee(); +} +"#, + "callee FN FileId(1) 0..14 3..9", + &["caller FN FileId(1) 15..44 18..24 : [33..39]"], + &[], + ); + } + + #[test] + fn test_call_hierarchy_in_same_fn() { + check_hierarchy( + r#" +//- /lib.rs +fn callee() {} +fn caller() { + call<|>ee(); + callee(); +} +"#, + "callee FN FileId(1) 0..14 3..9", + &["caller FN FileId(1) 15..58 18..24 : [33..39, 47..53]"], + &[], + ); + } + + #[test] + fn test_call_hierarchy_in_different_fn() { + check_hierarchy( + r#" +//- /lib.rs +fn callee() {} +fn caller1() { + call<|>ee(); +} + +fn caller2() { + callee(); +} +"#, + "callee FN FileId(1) 0..14 3..9", + &[ + "caller1 FN FileId(1) 15..45 18..25 : [34..40]", + "caller2 FN FileId(1) 47..77 50..57 : [66..72]", + ], + &[], + ); + } + + #[test] + fn test_call_hierarchy_in_tests_mod() { + check_hierarchy( + r#" +//- /lib.rs cfg:test +fn callee() {} +fn caller1() { + call<|>ee(); +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_caller() { + callee(); + } +} +"#, + "callee FN FileId(1) 0..14 3..9", + &[ + "caller1 FN FileId(1) 15..45 18..25 : [34..40]", + "test_caller FN FileId(1) 95..149 110..121 : [134..140]", + ], + &[], + ); + } + + #[test] + fn test_call_hierarchy_in_different_files() { + check_hierarchy( + r#" +//- /lib.rs +mod foo; +use foo::callee; + +fn caller() { + call<|>ee(); +} + +//- /foo/mod.rs +pub fn callee() {} +"#, + "callee FN FileId(2) 0..18 7..13", + &["caller FN FileId(1) 27..56 30..36 : [45..51]"], + &[], + ); + } + + #[test] + fn test_call_hierarchy_outgoing() { + check_hierarchy( + r#" +//- /lib.rs +fn callee() {} +fn call<|>er() { + callee(); + callee(); +} +"#, + "caller FN FileId(1) 15..58 18..24", + &[], + &["callee FN FileId(1) 0..14 3..9 : [33..39, 47..53]"], + ); + } + + #[test] + fn test_call_hierarchy_outgoing_in_different_files() { + check_hierarchy( + r#" +//- /lib.rs +mod foo; +use foo::callee; + +fn call<|>er() { + callee(); +} + +//- /foo/mod.rs +pub fn callee() {} +"#, + "caller FN FileId(1) 27..56 30..36", + &[], + &["callee FN FileId(2) 0..18 7..13 : [45..51]"], + ); + } + + #[test] + fn test_call_hierarchy_incoming_outgoing() { + check_hierarchy( + r#" +//- /lib.rs +fn caller1() { + call<|>er2(); +} + +fn caller2() { + caller3(); +} + +fn caller3() { + +} +"#, + "caller2 FN FileId(1) 33..64 36..43", + &["caller1 FN FileId(1) 0..31 3..10 : [19..26]"], + &["caller3 FN FileId(1) 66..83 69..76 : [52..59]"], + ); + } + + #[test] + fn test_call_hierarchy_issue_5103() { + check_hierarchy( + r#" +fn a() { + b() +} + +fn b() {} + +fn main() { + a<|>() +} +"#, + "a FN FileId(1) 0..18 3..4", + &["main FN FileId(1) 31..52 34..38 : [47..48]"], + &["b FN FileId(1) 20..29 23..24 : [13..14]"], + ); + + check_hierarchy( + r#" +fn a() { + b<|>() +} + +fn b() {} + +fn main() { + a() +} +"#, + "b FN FileId(1) 20..29 23..24", + &["a FN FileId(1) 0..18 3..4 : [13..14]"], + &[], + ); + } +} diff --git a/crates/ide/src/call_info.rs b/crates/ide/src/call_info.rs new file mode 100644 index 0000000000..86abd2d8ce --- /dev/null +++ b/crates/ide/src/call_info.rs @@ -0,0 +1,742 @@ +//! FIXME: write short doc here +use either::Either; +use hir::{Docs, HirDisplay, Semantics, Type}; +use ide_db::RootDatabase; +use stdx::format_to; +use syntax::{ + ast::{self, ArgListOwner}, + match_ast, AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize, +}; +use test_utils::mark; + +use crate::FilePosition; + +/// Contains information about a call site. Specifically the +/// `FunctionSignature`and current parameter. +#[derive(Debug)] +pub struct CallInfo { + pub doc: Option, + pub signature: String, + pub active_parameter: Option, + parameters: Vec, +} + +impl CallInfo { + pub fn parameter_labels(&self) -> impl Iterator + '_ { + self.parameters.iter().map(move |&it| &self.signature[it]) + } + pub fn parameter_ranges(&self) -> &[TextRange] { + &self.parameters + } + fn push_param(&mut self, param: &str) { + if !self.signature.ends_with('(') { + self.signature.push_str(", "); + } + let start = TextSize::of(&self.signature); + self.signature.push_str(param); + let end = TextSize::of(&self.signature); + self.parameters.push(TextRange::new(start, end)) + } +} + +/// Computes parameter information for the given call expression. +pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option { + let sema = Semantics::new(db); + let file = sema.parse(position.file_id); + let file = file.syntax(); + let token = file.token_at_offset(position.offset).next()?; + let token = sema.descend_into_macros(token); + + let (callable, active_parameter) = call_info_impl(&sema, token)?; + + let mut res = + CallInfo { doc: None, signature: String::new(), parameters: vec![], active_parameter }; + + match callable.kind() { + hir::CallableKind::Function(func) => { + res.doc = func.docs(db).map(|it| it.as_str().to_string()); + format_to!(res.signature, "fn {}", func.name(db)); + } + hir::CallableKind::TupleStruct(strukt) => { + res.doc = strukt.docs(db).map(|it| it.as_str().to_string()); + format_to!(res.signature, "struct {}", strukt.name(db)); + } + hir::CallableKind::TupleEnumVariant(variant) => { + res.doc = variant.docs(db).map(|it| it.as_str().to_string()); + format_to!( + res.signature, + "enum {}::{}", + variant.parent_enum(db).name(db), + variant.name(db) + ); + } + hir::CallableKind::Closure => (), + } + + res.signature.push('('); + { + if let Some(self_param) = callable.receiver_param(db) { + format_to!(res.signature, "{}", self_param) + } + let mut buf = String::new(); + for (pat, ty) in callable.params(db) { + buf.clear(); + if let Some(pat) = pat { + match pat { + Either::Left(_self) => format_to!(buf, "self: "), + Either::Right(pat) => format_to!(buf, "{}: ", pat), + } + } + format_to!(buf, "{}", ty.display(db)); + res.push_param(&buf); + } + } + res.signature.push(')'); + + match callable.kind() { + hir::CallableKind::Function(_) | hir::CallableKind::Closure => { + let ret_type = callable.return_type(); + if !ret_type.is_unit() { + format_to!(res.signature, " -> {}", ret_type.display(db)); + } + } + hir::CallableKind::TupleStruct(_) | hir::CallableKind::TupleEnumVariant(_) => {} + } + Some(res) +} + +fn call_info_impl( + sema: &Semantics, + token: SyntaxToken, +) -> Option<(hir::Callable, Option)> { + // Find the calling expression and it's NameRef + let calling_node = FnCallNode::with_node(&token.parent())?; + + let callable = match &calling_node { + FnCallNode::CallExpr(call) => sema.type_of_expr(&call.expr()?)?.as_callable(sema.db)?, + FnCallNode::MethodCallExpr(call) => sema.resolve_method_call_as_callable(call)?, + }; + let active_param = if let Some(arg_list) = calling_node.arg_list() { + // Number of arguments specified at the call site + let num_args_at_callsite = arg_list.args().count(); + + let arg_list_range = arg_list.syntax().text_range(); + if !arg_list_range.contains_inclusive(token.text_range().start()) { + mark::hit!(call_info_bad_offset); + return None; + } + let param = std::cmp::min( + num_args_at_callsite, + arg_list + .args() + .take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start()) + .count(), + ); + + Some(param) + } else { + None + }; + Some((callable, active_param)) +} + +#[derive(Debug)] +pub(crate) struct ActiveParameter { + pub(crate) ty: Type, + pub(crate) name: String, +} + +impl ActiveParameter { + pub(crate) fn at(db: &RootDatabase, position: FilePosition) -> Option { + let sema = Semantics::new(db); + let file = sema.parse(position.file_id); + let file = file.syntax(); + let token = file.token_at_offset(position.offset).next()?; + let token = sema.descend_into_macros(token); + Self::at_token(&sema, token) + } + + pub(crate) fn at_token(sema: &Semantics, token: SyntaxToken) -> Option { + let (signature, active_parameter) = call_info_impl(&sema, token)?; + + let idx = active_parameter?; + let mut params = signature.params(sema.db); + if !(idx < params.len()) { + mark::hit!(too_many_arguments); + return None; + } + let (pat, ty) = params.swap_remove(idx); + let name = pat?.to_string(); + Some(ActiveParameter { ty, name }) + } +} + +#[derive(Debug)] +pub(crate) enum FnCallNode { + CallExpr(ast::CallExpr), + MethodCallExpr(ast::MethodCallExpr), +} + +impl FnCallNode { + fn with_node(syntax: &SyntaxNode) -> Option { + syntax.ancestors().find_map(|node| { + match_ast! { + match node { + ast::CallExpr(it) => Some(FnCallNode::CallExpr(it)), + ast::MethodCallExpr(it) => { + let arg_list = it.arg_list()?; + if !arg_list.syntax().text_range().contains_range(syntax.text_range()) { + return None; + } + Some(FnCallNode::MethodCallExpr(it)) + }, + _ => None, + } + } + }) + } + + pub(crate) fn with_node_exact(node: &SyntaxNode) -> Option { + match_ast! { + match node { + ast::CallExpr(it) => Some(FnCallNode::CallExpr(it)), + ast::MethodCallExpr(it) => Some(FnCallNode::MethodCallExpr(it)), + _ => None, + } + } + } + + pub(crate) fn name_ref(&self) -> Option { + match self { + FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()? { + ast::Expr::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?, + _ => return None, + }), + + FnCallNode::MethodCallExpr(call_expr) => { + call_expr.syntax().children().filter_map(ast::NameRef::cast).next() + } + } + } + + fn arg_list(&self) -> Option { + match self { + FnCallNode::CallExpr(expr) => expr.arg_list(), + FnCallNode::MethodCallExpr(expr) => expr.arg_list(), + } + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + use test_utils::mark; + + use crate::mock_analysis::analysis_and_position; + + fn check(ra_fixture: &str, expect: Expect) { + let (analysis, position) = analysis_and_position(ra_fixture); + let call_info = analysis.call_info(position).unwrap(); + let actual = match call_info { + Some(call_info) => { + let docs = match &call_info.doc { + None => "".to_string(), + Some(docs) => format!("{}\n------\n", docs.as_str()), + }; + let params = call_info + .parameter_labels() + .enumerate() + .map(|(i, param)| { + if Some(i) == call_info.active_parameter { + format!("<{}>", param) + } else { + param.to_string() + } + }) + .collect::>() + .join(", "); + format!("{}{}\n({})\n", docs, call_info.signature, params) + } + None => String::new(), + }; + expect.assert_eq(&actual); + } + + #[test] + fn test_fn_signature_two_args() { + check( + r#" +fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo(<|>3, ); } +"#, + expect![[r#" + fn foo(x: u32, y: u32) -> u32 + (, y: u32) + "#]], + ); + check( + r#" +fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo(3<|>, ); } +"#, + expect![[r#" + fn foo(x: u32, y: u32) -> u32 + (, y: u32) + "#]], + ); + check( + r#" +fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo(3,<|> ); } +"#, + expect![[r#" + fn foo(x: u32, y: u32) -> u32 + (x: u32, ) + "#]], + ); + check( + r#" +fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo(3, <|>); } +"#, + expect![[r#" + fn foo(x: u32, y: u32) -> u32 + (x: u32, ) + "#]], + ); + } + + #[test] + fn test_fn_signature_two_args_empty() { + check( + r#" +fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo(<|>); } +"#, + expect![[r#" + fn foo(x: u32, y: u32) -> u32 + (, y: u32) + "#]], + ); + } + + #[test] + fn test_fn_signature_two_args_first_generics() { + check( + r#" +fn foo(x: T, y: U) -> u32 + where T: Copy + Display, U: Debug +{ x + y } + +fn bar() { foo(<|>3, ); } +"#, + expect![[r#" + fn foo(x: i32, y: {unknown}) -> u32 + (, y: {unknown}) + "#]], + ); + } + + #[test] + fn test_fn_signature_no_params() { + check( + r#" +fn foo() -> T where T: Copy + Display {} +fn bar() { foo(<|>); } +"#, + expect![[r#" + fn foo() -> {unknown} + () + "#]], + ); + } + + #[test] + fn test_fn_signature_for_impl() { + check( + r#" +struct F; +impl F { pub fn new() { } } +fn bar() { + let _ : F = F::new(<|>); +} +"#, + expect![[r#" + fn new() + () + "#]], + ); + } + + #[test] + fn test_fn_signature_for_method_self() { + check( + r#" +struct S; +impl S { pub fn do_it(&self) {} } + +fn bar() { + let s: S = S; + s.do_it(<|>); +} +"#, + expect![[r#" + fn do_it(&self) + () + "#]], + ); + } + + #[test] + fn test_fn_signature_for_method_with_arg() { + check( + r#" +struct S; +impl S { + fn foo(&self, x: i32) {} +} + +fn main() { S.foo(<|>); } +"#, + expect![[r#" + fn foo(&self, x: i32) + () + "#]], + ); + } + + #[test] + fn test_fn_signature_for_method_with_arg_as_assoc_fn() { + check( + r#" +struct S; +impl S { + fn foo(&self, x: i32) {} +} + +fn main() { S::foo(<|>); } +"#, + expect![[r#" + fn foo(self: &S, x: i32) + (, x: i32) + "#]], + ); + } + + #[test] + fn test_fn_signature_with_docs_simple() { + check( + r#" +/// test +// non-doc-comment +fn foo(j: u32) -> u32 { + j +} + +fn bar() { + let _ = foo(<|>); +} +"#, + expect![[r#" + test + ------ + fn foo(j: u32) -> u32 + () + "#]], + ); + } + + #[test] + fn test_fn_signature_with_docs() { + check( + r#" +/// Adds one to the number given. +/// +/// # Examples +/// +/// ``` +/// let five = 5; +/// +/// assert_eq!(6, my_crate::add_one(5)); +/// ``` +pub fn add_one(x: i32) -> i32 { + x + 1 +} + +pub fn do() { + add_one(<|> +}"#, + expect![[r##" + Adds one to the number given. + + # Examples + + ``` + let five = 5; + + assert_eq!(6, my_crate::add_one(5)); + ``` + ------ + fn add_one(x: i32) -> i32 + () + "##]], + ); + } + + #[test] + fn test_fn_signature_with_docs_impl() { + check( + r#" +struct addr; +impl addr { + /// Adds one to the number given. + /// + /// # Examples + /// + /// ``` + /// let five = 5; + /// + /// assert_eq!(6, my_crate::add_one(5)); + /// ``` + pub fn add_one(x: i32) -> i32 { + x + 1 + } +} + +pub fn do_it() { + addr {}; + addr::add_one(<|>); +} +"#, + expect![[r##" + Adds one to the number given. + + # Examples + + ``` + let five = 5; + + assert_eq!(6, my_crate::add_one(5)); + ``` + ------ + fn add_one(x: i32) -> i32 + () + "##]], + ); + } + + #[test] + fn test_fn_signature_with_docs_from_actix() { + check( + r#" +struct WriteHandler; + +impl WriteHandler { + /// Method is called when writer emits error. + /// + /// If this method returns `ErrorAction::Continue` writer processing + /// continues otherwise stream processing stops. + fn error(&mut self, err: E, ctx: &mut Self::Context) -> Running { + Running::Stop + } + + /// Method is called when writer finishes. + /// + /// By default this method stops actor's `Context`. + fn finished(&mut self, ctx: &mut Self::Context) { + ctx.stop() + } +} + +pub fn foo(mut r: WriteHandler<()>) { + r.finished(<|>); +} +"#, + expect![[r#" + Method is called when writer finishes. + + By default this method stops actor's `Context`. + ------ + fn finished(&mut self, ctx: &mut {unknown}) + () + "#]], + ); + } + + #[test] + fn call_info_bad_offset() { + mark::check!(call_info_bad_offset); + check( + r#" +fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo <|> (3, ); } +"#, + expect![[""]], + ); + } + + #[test] + fn test_nested_method_in_lambda() { + check( + r#" +struct Foo; +impl Foo { fn bar(&self, _: u32) { } } + +fn bar(_: u32) { } + +fn main() { + let foo = Foo; + std::thread::spawn(move || foo.bar(<|>)); +} +"#, + expect![[r#" + fn bar(&self, _: u32) + (<_: u32>) + "#]], + ); + } + + #[test] + fn works_for_tuple_structs() { + check( + r#" +/// A cool tuple struct +struct S(u32, i32); +fn main() { + let s = S(0, <|>); +} +"#, + expect![[r#" + A cool tuple struct + ------ + struct S(u32, i32) + (u32, ) + "#]], + ); + } + + #[test] + fn generic_struct() { + check( + r#" +struct S(T); +fn main() { + let s = S(<|>); +} +"#, + expect![[r#" + struct S({unknown}) + (<{unknown}>) + "#]], + ); + } + + #[test] + fn works_for_enum_variants() { + check( + r#" +enum E { + /// A Variant + A(i32), + /// Another + B, + /// And C + C { a: i32, b: i32 } +} + +fn main() { + let a = E::A(<|>); +} +"#, + expect![[r#" + A Variant + ------ + enum E::A(i32) + () + "#]], + ); + } + + #[test] + fn cant_call_struct_record() { + check( + r#" +struct S { x: u32, y: i32 } +fn main() { + let s = S(<|>); +} +"#, + expect![[""]], + ); + } + + #[test] + fn cant_call_enum_record() { + check( + r#" +enum E { + /// A Variant + A(i32), + /// Another + B, + /// And C + C { a: i32, b: i32 } +} + +fn main() { + let a = E::C(<|>); +} +"#, + expect![[""]], + ); + } + + #[test] + fn fn_signature_for_call_in_macro() { + check( + r#" +macro_rules! id { ($($tt:tt)*) => { $($tt)* } } +fn foo() { } +id! { + fn bar() { foo(<|>); } +} +"#, + expect![[r#" + fn foo() + () + "#]], + ); + } + + #[test] + fn call_info_for_lambdas() { + check( + r#" +struct S; +fn foo(s: S) -> i32 { 92 } +fn main() { + (|s| foo(s))(<|>) +} + "#, + expect![[r#" + (S) -> i32 + () + "#]], + ) + } + + #[test] + fn call_info_for_fn_ptr() { + check( + r#" +fn main(f: fn(i32, f64) -> char) { + f(0, <|>) +} + "#, + expect![[r#" + (i32, f64) -> char + (i32, ) + "#]], + ) + } +} diff --git a/crates/ide/src/completion.rs b/crates/ide/src/completion.rs new file mode 100644 index 0000000000..7fb4d687e1 --- /dev/null +++ b/crates/ide/src/completion.rs @@ -0,0 +1,206 @@ +mod completion_config; +mod completion_item; +mod completion_context; +mod presentation; +mod patterns; +#[cfg(test)] +mod test_utils; + +mod complete_attribute; +mod complete_dot; +mod complete_record; +mod complete_pattern; +mod complete_fn_param; +mod complete_keyword; +mod complete_snippet; +mod complete_qualified_path; +mod complete_unqualified_path; +mod complete_postfix; +mod complete_macro_in_item_position; +mod complete_trait_impl; + +use ide_db::RootDatabase; + +use crate::{ + completion::{ + completion_context::CompletionContext, + completion_item::{CompletionKind, Completions}, + }, + FilePosition, +}; + +pub use crate::completion::{ + completion_config::CompletionConfig, + completion_item::{CompletionItem, CompletionItemKind, CompletionScore, InsertTextFormat}, +}; + +//FIXME: split the following feature into fine-grained features. + +// Feature: Magic Completions +// +// In addition to usual reference completion, rust-analyzer provides some ✨magic✨ +// completions as well: +// +// Keywords like `if`, `else` `while`, `loop` are completed with braces, and cursor +// is placed at the appropriate position. Even though `if` is easy to type, you +// still want to complete it, to get ` { }` for free! `return` is inserted with a +// space or `;` depending on the return type of the function. +// +// When completing a function call, `()` are automatically inserted. If a function +// takes arguments, the cursor is positioned inside the parenthesis. +// +// There are postfix completions, which can be triggered by typing something like +// `foo().if`. The word after `.` determines postfix completion. Possible variants are: +// +// - `expr.if` -> `if expr {}` or `if let ... {}` for `Option` or `Result` +// - `expr.match` -> `match expr {}` +// - `expr.while` -> `while expr {}` or `while let ... {}` for `Option` or `Result` +// - `expr.ref` -> `&expr` +// - `expr.refm` -> `&mut expr` +// - `expr.not` -> `!expr` +// - `expr.dbg` -> `dbg!(expr)` +// +// There also snippet completions: +// +// .Expressions +// - `pd` -> `eprintln!(" = {:?}", );` +// - `ppd` -> `eprintln!(" = {:#?}", );` +// +// .Items +// - `tfn` -> `#[test] fn feature(){}` +// - `tmod` -> +// ```rust +// #[cfg(test)] +// mod tests { +// use super::*; +// +// #[test] +// fn test_name() {} +// } +// ``` + +/// Main entry point for completion. We run completion as a two-phase process. +/// +/// First, we look at the position and collect a so-called `CompletionContext. +/// This is a somewhat messy process, because, during completion, syntax tree is +/// incomplete and can look really weird. +/// +/// Once the context is collected, we run a series of completion routines which +/// look at the context and produce completion items. One subtlety about this +/// phase is that completion engine should not filter by the substring which is +/// already present, it should give all possible variants for the identifier at +/// the caret. In other words, for +/// +/// ```no-run +/// fn f() { +/// let foo = 92; +/// let _ = bar<|> +/// } +/// ``` +/// +/// `foo` *should* be present among the completion variants. Filtering by +/// identifier prefix/fuzzy match should be done higher in the stack, together +/// with ordering of completions (currently this is done by the client). +pub(crate) fn completions( + db: &RootDatabase, + config: &CompletionConfig, + position: FilePosition, +) -> Option { + let ctx = CompletionContext::new(db, position, config)?; + + let mut acc = Completions::default(); + complete_attribute::complete_attribute(&mut acc, &ctx); + complete_fn_param::complete_fn_param(&mut acc, &ctx); + complete_keyword::complete_expr_keyword(&mut acc, &ctx); + complete_keyword::complete_use_tree_keyword(&mut acc, &ctx); + complete_snippet::complete_expr_snippet(&mut acc, &ctx); + complete_snippet::complete_item_snippet(&mut acc, &ctx); + complete_qualified_path::complete_qualified_path(&mut acc, &ctx); + complete_unqualified_path::complete_unqualified_path(&mut acc, &ctx); + complete_dot::complete_dot(&mut acc, &ctx); + complete_record::complete_record(&mut acc, &ctx); + complete_pattern::complete_pattern(&mut acc, &ctx); + complete_postfix::complete_postfix(&mut acc, &ctx); + complete_macro_in_item_position::complete_macro_in_item_position(&mut acc, &ctx); + complete_trait_impl::complete_trait_impl(&mut acc, &ctx); + + Some(acc) +} + +#[cfg(test)] +mod tests { + use crate::completion::completion_config::CompletionConfig; + use crate::mock_analysis::analysis_and_position; + + struct DetailAndDocumentation<'a> { + detail: &'a str, + documentation: &'a str, + } + + fn check_detail_and_documentation(ra_fixture: &str, expected: DetailAndDocumentation) { + let (analysis, position) = analysis_and_position(ra_fixture); + let config = CompletionConfig::default(); + let completions = analysis.completions(&config, position).unwrap().unwrap(); + for item in completions { + if item.detail() == Some(expected.detail) { + let opt = item.documentation(); + let doc = opt.as_ref().map(|it| it.as_str()); + assert_eq!(doc, Some(expected.documentation)); + return; + } + } + panic!("completion detail not found: {}", expected.detail) + } + + #[test] + fn test_completion_detail_from_macro_generated_struct_fn_doc_attr() { + check_detail_and_documentation( + r#" + //- /lib.rs + macro_rules! bar { + () => { + struct Bar; + impl Bar { + #[doc = "Do the foo"] + fn foo(&self) {} + } + } + } + + bar!(); + + fn foo() { + let bar = Bar; + bar.fo<|>; + } + "#, + DetailAndDocumentation { detail: "fn foo(&self)", documentation: "Do the foo" }, + ); + } + + #[test] + fn test_completion_detail_from_macro_generated_struct_fn_doc_comment() { + check_detail_and_documentation( + r#" + //- /lib.rs + macro_rules! bar { + () => { + struct Bar; + impl Bar { + /// Do the foo + fn foo(&self) {} + } + } + } + + bar!(); + + fn foo() { + let bar = Bar; + bar.fo<|>; + } + "#, + DetailAndDocumentation { detail: "fn foo(&self)", documentation: " Do the foo" }, + ); + } +} diff --git a/crates/ide/src/completion/complete_attribute.rs b/crates/ide/src/completion/complete_attribute.rs new file mode 100644 index 0000000000..603d935deb --- /dev/null +++ b/crates/ide/src/completion/complete_attribute.rs @@ -0,0 +1,644 @@ +//! Completion for attributes +//! +//! This module uses a bit of static metadata to provide completions +//! for built-in attributes. + +use rustc_hash::FxHashSet; +use syntax::{ast, AstNode, SyntaxKind}; + +use crate::completion::{ + completion_context::CompletionContext, + completion_item::{CompletionItem, CompletionItemKind, CompletionKind, Completions}, +}; + +pub(super) fn complete_attribute(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> { + let attribute = ctx.attribute_under_caret.as_ref()?; + match (attribute.path(), attribute.token_tree()) { + (Some(path), Some(token_tree)) if path.to_string() == "derive" => { + complete_derive(acc, ctx, token_tree) + } + (Some(path), Some(token_tree)) + if ["allow", "warn", "deny", "forbid"] + .iter() + .any(|lint_level| lint_level == &path.to_string()) => + { + complete_lint(acc, ctx, token_tree) + } + (_, Some(_token_tree)) => {} + _ => complete_attribute_start(acc, ctx, attribute), + } + Some(()) +} + +fn complete_attribute_start(acc: &mut Completions, ctx: &CompletionContext, attribute: &ast::Attr) { + for attr_completion in ATTRIBUTES { + let mut item = CompletionItem::new( + CompletionKind::Attribute, + ctx.source_range(), + attr_completion.label, + ) + .kind(CompletionItemKind::Attribute); + + if let Some(lookup) = attr_completion.lookup { + item = item.lookup_by(lookup); + } + + match (attr_completion.snippet, ctx.config.snippet_cap) { + (Some(snippet), Some(cap)) => { + item = item.insert_snippet(cap, snippet); + } + _ => {} + } + + if attribute.kind() == ast::AttrKind::Inner || !attr_completion.prefer_inner { + acc.add(item); + } + } +} + +struct AttrCompletion { + label: &'static str, + lookup: Option<&'static str>, + snippet: Option<&'static str>, + prefer_inner: bool, +} + +impl AttrCompletion { + const fn prefer_inner(self) -> AttrCompletion { + AttrCompletion { prefer_inner: true, ..self } + } +} + +const fn attr( + label: &'static str, + lookup: Option<&'static str>, + snippet: Option<&'static str>, +) -> AttrCompletion { + AttrCompletion { label, lookup, snippet, prefer_inner: false } +} + +const ATTRIBUTES: &[AttrCompletion] = &[ + attr("allow(…)", Some("allow"), Some("allow(${0:lint})")), + attr("cfg_attr(…)", Some("cfg_attr"), Some("cfg_attr(${1:predicate}, ${0:attr})")), + attr("cfg(…)", Some("cfg"), Some("cfg(${0:predicate})")), + attr("deny(…)", Some("deny"), Some("deny(${0:lint})")), + attr(r#"deprecated = "…""#, Some("deprecated"), Some(r#"deprecated = "${0:reason}""#)), + attr("derive(…)", Some("derive"), Some(r#"derive(${0:Debug})"#)), + attr(r#"doc = "…""#, Some("doc"), Some(r#"doc = "${0:docs}""#)), + attr("feature(…)", Some("feature"), Some("feature(${0:flag})")).prefer_inner(), + attr("forbid(…)", Some("forbid"), Some("forbid(${0:lint})")), + // FIXME: resolve through macro resolution? + attr("global_allocator", None, None).prefer_inner(), + attr(r#"ignore = "…""#, Some("ignore"), Some(r#"ignore = "${0:reason}""#)), + attr("inline(…)", Some("inline"), Some("inline(${0:lint})")), + attr(r#"link_name = "…""#, Some("link_name"), Some(r#"link_name = "${0:symbol_name}""#)), + attr("link", None, None), + attr("macro_export", None, None), + attr("macro_use", None, None), + attr(r#"must_use = "…""#, Some("must_use"), Some(r#"must_use = "${0:reason}""#)), + attr("no_mangle", None, None), + attr("no_std", None, None).prefer_inner(), + attr("non_exhaustive", None, None), + attr("panic_handler", None, None).prefer_inner(), + attr("path = \"…\"", Some("path"), Some("path =\"${0:path}\"")), + attr("proc_macro", None, None), + attr("proc_macro_attribute", None, None), + attr("proc_macro_derive(…)", Some("proc_macro_derive"), Some("proc_macro_derive(${0:Trait})")), + attr("recursion_limit = …", Some("recursion_limit"), Some("recursion_limit = ${0:128}")) + .prefer_inner(), + attr("repr(…)", Some("repr"), Some("repr(${0:C})")), + attr( + "should_panic(…)", + Some("should_panic"), + Some(r#"should_panic(expected = "${0:reason}")"#), + ), + attr( + r#"target_feature = "…""#, + Some("target_feature"), + Some("target_feature = \"${0:feature}\""), + ), + attr("test", None, None), + attr("used", None, None), + attr("warn(…)", Some("warn"), Some("warn(${0:lint})")), + attr( + r#"windows_subsystem = "…""#, + Some("windows_subsystem"), + Some(r#"windows_subsystem = "${0:subsystem}""#), + ) + .prefer_inner(), +]; + +fn complete_derive(acc: &mut Completions, ctx: &CompletionContext, derive_input: ast::TokenTree) { + if let Ok(existing_derives) = parse_comma_sep_input(derive_input) { + for derive_completion in DEFAULT_DERIVE_COMPLETIONS + .into_iter() + .filter(|completion| !existing_derives.contains(completion.label)) + { + let mut label = derive_completion.label.to_owned(); + for dependency in derive_completion + .dependencies + .into_iter() + .filter(|&&dependency| !existing_derives.contains(dependency)) + { + label.push_str(", "); + label.push_str(dependency); + } + acc.add( + CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), label) + .kind(CompletionItemKind::Attribute), + ); + } + + for custom_derive_name in get_derive_names_in_scope(ctx).difference(&existing_derives) { + acc.add( + CompletionItem::new( + CompletionKind::Attribute, + ctx.source_range(), + custom_derive_name, + ) + .kind(CompletionItemKind::Attribute), + ); + } + } +} + +fn complete_lint(acc: &mut Completions, ctx: &CompletionContext, derive_input: ast::TokenTree) { + if let Ok(existing_lints) = parse_comma_sep_input(derive_input) { + for lint_completion in DEFAULT_LINT_COMPLETIONS + .into_iter() + .filter(|completion| !existing_lints.contains(completion.label)) + { + acc.add( + CompletionItem::new( + CompletionKind::Attribute, + ctx.source_range(), + lint_completion.label, + ) + .kind(CompletionItemKind::Attribute) + .detail(lint_completion.description), + ); + } + } +} + +fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Result, ()> { + match (derive_input.left_delimiter_token(), derive_input.right_delimiter_token()) { + (Some(left_paren), Some(right_paren)) + if left_paren.kind() == SyntaxKind::L_PAREN + && right_paren.kind() == SyntaxKind::R_PAREN => + { + let mut input_derives = FxHashSet::default(); + let mut current_derive = String::new(); + for token in derive_input + .syntax() + .children_with_tokens() + .filter_map(|token| token.into_token()) + .skip_while(|token| token != &left_paren) + .skip(1) + .take_while(|token| token != &right_paren) + { + if SyntaxKind::COMMA == token.kind() { + if !current_derive.is_empty() { + input_derives.insert(current_derive); + current_derive = String::new(); + } + } else { + current_derive.push_str(token.to_string().trim()); + } + } + + if !current_derive.is_empty() { + input_derives.insert(current_derive); + } + Ok(input_derives) + } + _ => Err(()), + } +} + +fn get_derive_names_in_scope(ctx: &CompletionContext) -> FxHashSet { + let mut result = FxHashSet::default(); + ctx.scope.process_all_names(&mut |name, scope_def| { + if let hir::ScopeDef::MacroDef(mac) = scope_def { + if mac.is_derive_macro() { + result.insert(name.to_string()); + } + } + }); + result +} + +struct DeriveCompletion { + label: &'static str, + dependencies: &'static [&'static str], +} + +/// Standard Rust derives and the information about their dependencies +/// (the dependencies are needed so that the main derive don't break the compilation when added) +#[rustfmt::skip] +const DEFAULT_DERIVE_COMPLETIONS: &[DeriveCompletion] = &[ + DeriveCompletion { label: "Clone", dependencies: &[] }, + DeriveCompletion { label: "Copy", dependencies: &["Clone"] }, + DeriveCompletion { label: "Debug", dependencies: &[] }, + DeriveCompletion { label: "Default", dependencies: &[] }, + DeriveCompletion { label: "Hash", dependencies: &[] }, + DeriveCompletion { label: "PartialEq", dependencies: &[] }, + DeriveCompletion { label: "Eq", dependencies: &["PartialEq"] }, + DeriveCompletion { label: "PartialOrd", dependencies: &["PartialEq"] }, + DeriveCompletion { label: "Ord", dependencies: &["PartialOrd", "Eq", "PartialEq"] }, +]; + +struct LintCompletion { + label: &'static str, + description: &'static str, +} + +#[rustfmt::skip] +const DEFAULT_LINT_COMPLETIONS: &[LintCompletion] = &[ + LintCompletion { label: "absolute_paths_not_starting_with_crate", description: r#"fully qualified paths that start with a module name instead of `crate`, `self`, or an extern crate name"# }, + LintCompletion { label: "anonymous_parameters", description: r#"detects anonymous parameters"# }, + LintCompletion { label: "box_pointers", description: r#"use of owned (Box type) heap memory"# }, + LintCompletion { label: "deprecated_in_future", description: r#"detects use of items that will be deprecated in a future version"# }, + LintCompletion { label: "elided_lifetimes_in_paths", description: r#"hidden lifetime parameters in types are deprecated"# }, + LintCompletion { label: "explicit_outlives_requirements", description: r#"outlives requirements can be inferred"# }, + LintCompletion { label: "indirect_structural_match", description: r#"pattern with const indirectly referencing non-structural-match type"# }, + LintCompletion { label: "keyword_idents", description: r#"detects edition keywords being used as an identifier"# }, + LintCompletion { label: "macro_use_extern_crate", description: r#"the `#[macro_use]` attribute is now deprecated in favor of using macros via the module system"# }, + LintCompletion { label: "meta_variable_misuse", description: r#"possible meta-variable misuse at macro definition"# }, + LintCompletion { label: "missing_copy_implementations", description: r#"detects potentially-forgotten implementations of `Copy`"# }, + LintCompletion { label: "missing_crate_level_docs", description: r#"detects crates with no crate-level documentation"# }, + LintCompletion { label: "missing_debug_implementations", description: r#"detects missing implementations of Debug"# }, + LintCompletion { label: "missing_docs", description: r#"detects missing documentation for public members"# }, + LintCompletion { label: "missing_doc_code_examples", description: r#"detects publicly-exported items without code samples in their documentation"# }, + LintCompletion { label: "non_ascii_idents", description: r#"detects non-ASCII identifiers"# }, + LintCompletion { label: "private_doc_tests", description: r#"detects code samples in docs of private items not documented by rustdoc"# }, + LintCompletion { label: "single_use_lifetimes", description: r#"detects lifetime parameters that are only used once"# }, + LintCompletion { label: "trivial_casts", description: r#"detects trivial casts which could be removed"# }, + LintCompletion { label: "trivial_numeric_casts", description: r#"detects trivial casts of numeric types which could be removed"# }, + LintCompletion { label: "unaligned_references", description: r#"detects unaligned references to fields of packed structs"# }, + LintCompletion { label: "unreachable_pub", description: r#"`pub` items not reachable from crate root"# }, + LintCompletion { label: "unsafe_code", description: r#"usage of `unsafe` code"# }, + LintCompletion { label: "unsafe_op_in_unsafe_fn", description: r#"unsafe operations in unsafe functions without an explicit unsafe block are deprecated"# }, + LintCompletion { label: "unstable_features", description: r#"enabling unstable features (deprecated. do not use)"# }, + LintCompletion { label: "unused_crate_dependencies", description: r#"crate dependencies that are never used"# }, + LintCompletion { label: "unused_extern_crates", description: r#"extern crates that are never used"# }, + LintCompletion { label: "unused_import_braces", description: r#"unnecessary braces around an imported item"# }, + LintCompletion { label: "unused_lifetimes", description: r#"detects lifetime parameters that are never used"# }, + LintCompletion { label: "unused_qualifications", description: r#"detects unnecessarily qualified names"# }, + LintCompletion { label: "unused_results", description: r#"unused result of an expression in a statement"# }, + LintCompletion { label: "variant_size_differences", description: r#"detects enums with widely varying variant sizes"# }, + LintCompletion { label: "array_into_iter", description: r#"detects calling `into_iter` on arrays"# }, + LintCompletion { label: "asm_sub_register", description: r#"using only a subset of a register for inline asm inputs"# }, + LintCompletion { label: "bare_trait_objects", description: r#"suggest using `dyn Trait` for trait objects"# }, + LintCompletion { label: "bindings_with_variant_name", description: r#"detects pattern bindings with the same name as one of the matched variants"# }, + LintCompletion { label: "cenum_impl_drop_cast", description: r#"a C-like enum implementing Drop is cast"# }, + LintCompletion { label: "clashing_extern_declarations", description: r#"detects when an extern fn has been declared with the same name but different types"# }, + LintCompletion { label: "coherence_leak_check", description: r#"distinct impls distinguished only by the leak-check code"# }, + LintCompletion { label: "confusable_idents", description: r#"detects visually confusable pairs between identifiers"# }, + LintCompletion { label: "dead_code", description: r#"detect unused, unexported items"# }, + LintCompletion { label: "deprecated", description: r#"detects use of deprecated items"# }, + LintCompletion { label: "ellipsis_inclusive_range_patterns", description: r#"`...` range patterns are deprecated"# }, + LintCompletion { label: "exported_private_dependencies", description: r#"public interface leaks type from a private dependency"# }, + LintCompletion { label: "illegal_floating_point_literal_pattern", description: r#"floating-point literals cannot be used in patterns"# }, + LintCompletion { label: "improper_ctypes", description: r#"proper use of libc types in foreign modules"# }, + LintCompletion { label: "improper_ctypes_definitions", description: r#"proper use of libc types in foreign item definitions"# }, + LintCompletion { label: "incomplete_features", description: r#"incomplete features that may function improperly in some or all cases"# }, + LintCompletion { label: "inline_no_sanitize", description: r#"detects incompatible use of `#[inline(always)]` and `#[no_sanitize(...)]`"# }, + LintCompletion { label: "intra_doc_link_resolution_failure", description: r#"failures in resolving intra-doc link targets"# }, + LintCompletion { label: "invalid_codeblock_attributes", description: r#"codeblock attribute looks a lot like a known one"# }, + LintCompletion { label: "invalid_value", description: r#"an invalid value is being created (such as a NULL reference)"# }, + LintCompletion { label: "irrefutable_let_patterns", description: r#"detects irrefutable patterns in if-let and while-let statements"# }, + LintCompletion { label: "late_bound_lifetime_arguments", description: r#"detects generic lifetime arguments in path segments with late bound lifetime parameters"# }, + LintCompletion { label: "mixed_script_confusables", description: r#"detects Unicode scripts whose mixed script confusables codepoints are solely used"# }, + LintCompletion { label: "mutable_borrow_reservation_conflict", description: r#"reservation of a two-phased borrow conflicts with other shared borrows"# }, + LintCompletion { label: "non_camel_case_types", description: r#"types, variants, traits and type parameters should have camel case names"# }, + LintCompletion { label: "non_shorthand_field_patterns", description: r#"using `Struct { x: x }` instead of `Struct { x }` in a pattern"# }, + LintCompletion { label: "non_snake_case", description: r#"variables, methods, functions, lifetime parameters and modules should have snake case names"# }, + LintCompletion { label: "non_upper_case_globals", description: r#"static constants should have uppercase identifiers"# }, + LintCompletion { label: "no_mangle_generic_items", description: r#"generic items must be mangled"# }, + LintCompletion { label: "overlapping_patterns", description: r#"detects overlapping patterns"# }, + LintCompletion { label: "path_statements", description: r#"path statements with no effect"# }, + LintCompletion { label: "private_in_public", description: r#"detect private items in public interfaces not caught by the old implementation"# }, + LintCompletion { label: "proc_macro_derive_resolution_fallback", description: r#"detects proc macro derives using inaccessible names from parent modules"# }, + LintCompletion { label: "redundant_semicolons", description: r#"detects unnecessary trailing semicolons"# }, + LintCompletion { label: "renamed_and_removed_lints", description: r#"lints that have been renamed or removed"# }, + LintCompletion { label: "safe_packed_borrows", description: r#"safe borrows of fields of packed structs were erroneously allowed"# }, + LintCompletion { label: "stable_features", description: r#"stable features found in `#[feature]` directive"# }, + LintCompletion { label: "trivial_bounds", description: r#"these bounds don't depend on an type parameters"# }, + LintCompletion { label: "type_alias_bounds", description: r#"bounds in type aliases are not enforced"# }, + LintCompletion { label: "tyvar_behind_raw_pointer", description: r#"raw pointer to an inference variable"# }, + LintCompletion { label: "uncommon_codepoints", description: r#"detects uncommon Unicode codepoints in identifiers"# }, + LintCompletion { label: "unconditional_recursion", description: r#"functions that cannot return without calling themselves"# }, + LintCompletion { label: "unknown_lints", description: r#"unrecognized lint attribute"# }, + LintCompletion { label: "unnameable_test_items", description: r#"detects an item that cannot be named being marked as `#[test_case]`"# }, + LintCompletion { label: "unreachable_code", description: r#"detects unreachable code paths"# }, + LintCompletion { label: "unreachable_patterns", description: r#"detects unreachable patterns"# }, + LintCompletion { label: "unstable_name_collisions", description: r#"detects name collision with an existing but unstable method"# }, + LintCompletion { label: "unused_allocation", description: r#"detects unnecessary allocations that can be eliminated"# }, + LintCompletion { label: "unused_assignments", description: r#"detect assignments that will never be read"# }, + LintCompletion { label: "unused_attributes", description: r#"detects attributes that were not used by the compiler"# }, + LintCompletion { label: "unused_braces", description: r#"unnecessary braces around an expression"# }, + LintCompletion { label: "unused_comparisons", description: r#"comparisons made useless by limits of the types involved"# }, + LintCompletion { label: "unused_doc_comments", description: r#"detects doc comments that aren't used by rustdoc"# }, + LintCompletion { label: "unused_features", description: r#"unused features found in crate-level `#[feature]` directives"# }, + LintCompletion { label: "unused_imports", description: r#"imports that are never used"# }, + LintCompletion { label: "unused_labels", description: r#"detects labels that are never used"# }, + LintCompletion { label: "unused_macros", description: r#"detects macros that were not used"# }, + LintCompletion { label: "unused_must_use", description: r#"unused result of a type flagged as `#[must_use]`"# }, + LintCompletion { label: "unused_mut", description: r#"detect mut variables which don't need to be mutable"# }, + LintCompletion { label: "unused_parens", description: r#"`if`, `match`, `while` and `return` do not need parentheses"# }, + LintCompletion { label: "unused_unsafe", description: r#"unnecessary use of an `unsafe` block"# }, + LintCompletion { label: "unused_variables", description: r#"detect variables which are not used in any way"# }, + LintCompletion { label: "warnings", description: r#"mass-change the level for lints which produce warnings"# }, + LintCompletion { label: "where_clauses_object_safety", description: r#"checks the object safety of where clauses"# }, + LintCompletion { label: "while_true", description: r#"suggest using `loop { }` instead of `while true { }`"# }, + LintCompletion { label: "ambiguous_associated_items", description: r#"ambiguous associated items"# }, + LintCompletion { label: "arithmetic_overflow", description: r#"arithmetic operation overflows"# }, + LintCompletion { label: "conflicting_repr_hints", description: r#"conflicts between `#[repr(..)]` hints that were previously accepted and used in practice"# }, + LintCompletion { label: "const_err", description: r#"constant evaluation detected erroneous expression"# }, + LintCompletion { label: "ill_formed_attribute_input", description: r#"ill-formed attribute inputs that were previously accepted and used in practice"# }, + LintCompletion { label: "incomplete_include", description: r#"trailing content in included file"# }, + LintCompletion { label: "invalid_type_param_default", description: r#"type parameter default erroneously allowed in invalid location"# }, + LintCompletion { label: "macro_expanded_macro_exports_accessed_by_absolute_paths", description: r#"macro-expanded `macro_export` macros from the current crate cannot be referred to by absolute paths"# }, + LintCompletion { label: "missing_fragment_specifier", description: r#"detects missing fragment specifiers in unused `macro_rules!` patterns"# }, + LintCompletion { label: "mutable_transmutes", description: r#"mutating transmuted &mut T from &T may cause undefined behavior"# }, + LintCompletion { label: "no_mangle_const_items", description: r#"const items will not have their symbols exported"# }, + LintCompletion { label: "order_dependent_trait_objects", description: r#"trait-object types were treated as different depending on marker-trait order"# }, + LintCompletion { label: "overflowing_literals", description: r#"literal out of range for its type"# }, + LintCompletion { label: "patterns_in_fns_without_body", description: r#"patterns in functions without body were erroneously allowed"# }, + LintCompletion { label: "pub_use_of_private_extern_crate", description: r#"detect public re-exports of private extern crates"# }, + LintCompletion { label: "soft_unstable", description: r#"a feature gate that doesn't break dependent crates"# }, + LintCompletion { label: "unconditional_panic", description: r#"operation will cause a panic at runtime"# }, + LintCompletion { label: "unknown_crate_types", description: r#"unknown crate type found in `#[crate_type]` directive"# }, +]; + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::completion::{test_utils::completion_list, CompletionKind}; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Attribute); + expect.assert_eq(&actual); + } + + #[test] + fn empty_derive_completion() { + check( + r#" +#[derive(<|>)] +struct Test {} + "#, + expect![[r#" + at Clone + at Copy, Clone + at Debug + at Default + at Eq, PartialEq + at Hash + at Ord, PartialOrd, Eq, PartialEq + at PartialEq + at PartialOrd, PartialEq + "#]], + ); + } + + #[test] + fn empty_lint_completion() { + check( + r#"#[allow(<|>)]"#, + expect![[r#" + at absolute_paths_not_starting_with_crate fully qualified paths that start with a module name instead of `crate`, `self`, or an extern crate name + at ambiguous_associated_items ambiguous associated items + at anonymous_parameters detects anonymous parameters + at arithmetic_overflow arithmetic operation overflows + at array_into_iter detects calling `into_iter` on arrays + at asm_sub_register using only a subset of a register for inline asm inputs + at bare_trait_objects suggest using `dyn Trait` for trait objects + at bindings_with_variant_name detects pattern bindings with the same name as one of the matched variants + at box_pointers use of owned (Box type) heap memory + at cenum_impl_drop_cast a C-like enum implementing Drop is cast + at clashing_extern_declarations detects when an extern fn has been declared with the same name but different types + at coherence_leak_check distinct impls distinguished only by the leak-check code + at conflicting_repr_hints conflicts between `#[repr(..)]` hints that were previously accepted and used in practice + at confusable_idents detects visually confusable pairs between identifiers + at const_err constant evaluation detected erroneous expression + at dead_code detect unused, unexported items + at deprecated detects use of deprecated items + at deprecated_in_future detects use of items that will be deprecated in a future version + at elided_lifetimes_in_paths hidden lifetime parameters in types are deprecated + at ellipsis_inclusive_range_patterns `...` range patterns are deprecated + at explicit_outlives_requirements outlives requirements can be inferred + at exported_private_dependencies public interface leaks type from a private dependency + at ill_formed_attribute_input ill-formed attribute inputs that were previously accepted and used in practice + at illegal_floating_point_literal_pattern floating-point literals cannot be used in patterns + at improper_ctypes proper use of libc types in foreign modules + at improper_ctypes_definitions proper use of libc types in foreign item definitions + at incomplete_features incomplete features that may function improperly in some or all cases + at incomplete_include trailing content in included file + at indirect_structural_match pattern with const indirectly referencing non-structural-match type + at inline_no_sanitize detects incompatible use of `#[inline(always)]` and `#[no_sanitize(...)]` + at intra_doc_link_resolution_failure failures in resolving intra-doc link targets + at invalid_codeblock_attributes codeblock attribute looks a lot like a known one + at invalid_type_param_default type parameter default erroneously allowed in invalid location + at invalid_value an invalid value is being created (such as a NULL reference) + at irrefutable_let_patterns detects irrefutable patterns in if-let and while-let statements + at keyword_idents detects edition keywords being used as an identifier + at late_bound_lifetime_arguments detects generic lifetime arguments in path segments with late bound lifetime parameters + at macro_expanded_macro_exports_accessed_by_absolute_paths macro-expanded `macro_export` macros from the current crate cannot be referred to by absolute paths + at macro_use_extern_crate the `#[macro_use]` attribute is now deprecated in favor of using macros via the module system + at meta_variable_misuse possible meta-variable misuse at macro definition + at missing_copy_implementations detects potentially-forgotten implementations of `Copy` + at missing_crate_level_docs detects crates with no crate-level documentation + at missing_debug_implementations detects missing implementations of Debug + at missing_doc_code_examples detects publicly-exported items without code samples in their documentation + at missing_docs detects missing documentation for public members + at missing_fragment_specifier detects missing fragment specifiers in unused `macro_rules!` patterns + at mixed_script_confusables detects Unicode scripts whose mixed script confusables codepoints are solely used + at mutable_borrow_reservation_conflict reservation of a two-phased borrow conflicts with other shared borrows + at mutable_transmutes mutating transmuted &mut T from &T may cause undefined behavior + at no_mangle_const_items const items will not have their symbols exported + at no_mangle_generic_items generic items must be mangled + at non_ascii_idents detects non-ASCII identifiers + at non_camel_case_types types, variants, traits and type parameters should have camel case names + at non_shorthand_field_patterns using `Struct { x: x }` instead of `Struct { x }` in a pattern + at non_snake_case variables, methods, functions, lifetime parameters and modules should have snake case names + at non_upper_case_globals static constants should have uppercase identifiers + at order_dependent_trait_objects trait-object types were treated as different depending on marker-trait order + at overflowing_literals literal out of range for its type + at overlapping_patterns detects overlapping patterns + at path_statements path statements with no effect + at patterns_in_fns_without_body patterns in functions without body were erroneously allowed + at private_doc_tests detects code samples in docs of private items not documented by rustdoc + at private_in_public detect private items in public interfaces not caught by the old implementation + at proc_macro_derive_resolution_fallback detects proc macro derives using inaccessible names from parent modules + at pub_use_of_private_extern_crate detect public re-exports of private extern crates + at redundant_semicolons detects unnecessary trailing semicolons + at renamed_and_removed_lints lints that have been renamed or removed + at safe_packed_borrows safe borrows of fields of packed structs were erroneously allowed + at single_use_lifetimes detects lifetime parameters that are only used once + at soft_unstable a feature gate that doesn't break dependent crates + at stable_features stable features found in `#[feature]` directive + at trivial_bounds these bounds don't depend on an type parameters + at trivial_casts detects trivial casts which could be removed + at trivial_numeric_casts detects trivial casts of numeric types which could be removed + at type_alias_bounds bounds in type aliases are not enforced + at tyvar_behind_raw_pointer raw pointer to an inference variable + at unaligned_references detects unaligned references to fields of packed structs + at uncommon_codepoints detects uncommon Unicode codepoints in identifiers + at unconditional_panic operation will cause a panic at runtime + at unconditional_recursion functions that cannot return without calling themselves + at unknown_crate_types unknown crate type found in `#[crate_type]` directive + at unknown_lints unrecognized lint attribute + at unnameable_test_items detects an item that cannot be named being marked as `#[test_case]` + at unreachable_code detects unreachable code paths + at unreachable_patterns detects unreachable patterns + at unreachable_pub `pub` items not reachable from crate root + at unsafe_code usage of `unsafe` code + at unsafe_op_in_unsafe_fn unsafe operations in unsafe functions without an explicit unsafe block are deprecated + at unstable_features enabling unstable features (deprecated. do not use) + at unstable_name_collisions detects name collision with an existing but unstable method + at unused_allocation detects unnecessary allocations that can be eliminated + at unused_assignments detect assignments that will never be read + at unused_attributes detects attributes that were not used by the compiler + at unused_braces unnecessary braces around an expression + at unused_comparisons comparisons made useless by limits of the types involved + at unused_crate_dependencies crate dependencies that are never used + at unused_doc_comments detects doc comments that aren't used by rustdoc + at unused_extern_crates extern crates that are never used + at unused_features unused features found in crate-level `#[feature]` directives + at unused_import_braces unnecessary braces around an imported item + at unused_imports imports that are never used + at unused_labels detects labels that are never used + at unused_lifetimes detects lifetime parameters that are never used + at unused_macros detects macros that were not used + at unused_must_use unused result of a type flagged as `#[must_use]` + at unused_mut detect mut variables which don't need to be mutable + at unused_parens `if`, `match`, `while` and `return` do not need parentheses + at unused_qualifications detects unnecessarily qualified names + at unused_results unused result of an expression in a statement + at unused_unsafe unnecessary use of an `unsafe` block + at unused_variables detect variables which are not used in any way + at variant_size_differences detects enums with widely varying variant sizes + at warnings mass-change the level for lints which produce warnings + at where_clauses_object_safety checks the object safety of where clauses + at while_true suggest using `loop { }` instead of `while true { }` + "#]], + ) + } + + #[test] + fn no_completion_for_incorrect_derive() { + check( + r#" +#[derive{<|>)] +struct Test {} +"#, + expect![[r#""#]], + ) + } + + #[test] + fn derive_with_input_completion() { + check( + r#" +#[derive(serde::Serialize, PartialEq, <|>)] +struct Test {} +"#, + expect![[r#" + at Clone + at Copy, Clone + at Debug + at Default + at Eq + at Hash + at Ord, PartialOrd, Eq + at PartialOrd + "#]], + ) + } + + #[test] + fn test_attribute_completion() { + check( + r#"#[<|>]"#, + expect![[r#" + at allow(…) + at cfg(…) + at cfg_attr(…) + at deny(…) + at deprecated = "…" + at derive(…) + at doc = "…" + at forbid(…) + at ignore = "…" + at inline(…) + at link + at link_name = "…" + at macro_export + at macro_use + at must_use = "…" + at no_mangle + at non_exhaustive + at path = "…" + at proc_macro + at proc_macro_attribute + at proc_macro_derive(…) + at repr(…) + at should_panic(…) + at target_feature = "…" + at test + at used + at warn(…) + "#]], + ) + } + + #[test] + fn test_attribute_completion_inside_nested_attr() { + check(r#"#[cfg(<|>)]"#, expect![[]]) + } + + #[test] + fn test_inner_attribute_completion() { + check( + r"#![<|>]", + expect![[r#" + at allow(…) + at cfg(…) + at cfg_attr(…) + at deny(…) + at deprecated = "…" + at derive(…) + at doc = "…" + at feature(…) + at forbid(…) + at global_allocator + at ignore = "…" + at inline(…) + at link + at link_name = "…" + at macro_export + at macro_use + at must_use = "…" + at no_mangle + at no_std + at non_exhaustive + at panic_handler + at path = "…" + at proc_macro + at proc_macro_attribute + at proc_macro_derive(…) + at recursion_limit = … + at repr(…) + at should_panic(…) + at target_feature = "…" + at test + at used + at warn(…) + at windows_subsystem = "…" + "#]], + ); + } +} diff --git a/crates/ra_ide/src/completion/complete_dot.rs b/crates/ide/src/completion/complete_dot.rs similarity index 100% rename from crates/ra_ide/src/completion/complete_dot.rs rename to crates/ide/src/completion/complete_dot.rs diff --git a/crates/ide/src/completion/complete_fn_param.rs b/crates/ide/src/completion/complete_fn_param.rs new file mode 100644 index 0000000000..7c63ce58f8 --- /dev/null +++ b/crates/ide/src/completion/complete_fn_param.rs @@ -0,0 +1,135 @@ +//! See `complete_fn_param`. + +use rustc_hash::FxHashMap; +use syntax::{ + ast::{self, ModuleItemOwner}, + match_ast, AstNode, +}; + +use crate::completion::{CompletionContext, CompletionItem, CompletionKind, Completions}; + +/// Complete repeated parameters, both name and type. For example, if all +/// functions in a file have a `spam: &mut Spam` parameter, a completion with +/// `spam: &mut Spam` insert text/label and `spam` lookup string will be +/// suggested. +pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) { + if !ctx.is_param { + return; + } + + let mut params = FxHashMap::default(); + + let me = ctx.token.ancestors().find_map(ast::Fn::cast); + let mut process_fn = |func: ast::Fn| { + if Some(&func) == me.as_ref() { + return; + } + func.param_list().into_iter().flat_map(|it| it.params()).for_each(|param| { + let text = param.syntax().text().to_string(); + params.entry(text).or_insert(param); + }) + }; + + for node in ctx.token.parent().ancestors() { + match_ast! { + match node { + ast::SourceFile(it) => it.items().filter_map(|item| match item { + ast::Item::Fn(it) => Some(it), + _ => None, + }).for_each(&mut process_fn), + ast::ItemList(it) => it.items().filter_map(|item| match item { + ast::Item::Fn(it) => Some(it), + _ => None, + }).for_each(&mut process_fn), + ast::AssocItemList(it) => it.assoc_items().filter_map(|item| match item { + ast::AssocItem::Fn(it) => Some(it), + _ => None, + }).for_each(&mut process_fn), + _ => continue, + } + }; + } + + params + .into_iter() + .filter_map(|(label, param)| { + let lookup = param.pat()?.syntax().text().to_string(); + Some((label, lookup)) + }) + .for_each(|(label, lookup)| { + CompletionItem::new(CompletionKind::Magic, ctx.source_range(), label) + .kind(crate::CompletionItemKind::Binding) + .lookup_by(lookup) + .add_to(acc) + }); +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::completion::{test_utils::completion_list, CompletionKind}; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Magic); + expect.assert_eq(&actual); + } + + #[test] + fn test_param_completion_last_param() { + check( + r#" +fn foo(file_id: FileId) {} +fn bar(file_id: FileId) {} +fn baz(file<|>) {} +"#, + expect![[r#" + bn file_id: FileId + "#]], + ); + } + + #[test] + fn test_param_completion_nth_param() { + check( + r#" +fn foo(file_id: FileId) {} +fn baz(file<|>, x: i32) {} +"#, + expect![[r#" + bn file_id: FileId + "#]], + ); + } + + #[test] + fn test_param_completion_trait_param() { + check( + r#" +pub(crate) trait SourceRoot { + pub fn contains(&self, file_id: FileId) -> bool; + pub fn module_map(&self) -> &ModuleMap; + pub fn lines(&self, file_id: FileId) -> &LineIndex; + pub fn syntax(&self, file<|>) +} +"#, + expect![[r#" + bn file_id: FileId + "#]], + ); + } + + #[test] + fn completes_param_in_inner_function() { + check( + r#" +fn outer(text: String) { + fn inner(<|>) +} +"#, + expect![[r#" + bn text: String + "#]], + ) + } +} diff --git a/crates/ide/src/completion/complete_keyword.rs b/crates/ide/src/completion/complete_keyword.rs new file mode 100644 index 0000000000..22ada3cf29 --- /dev/null +++ b/crates/ide/src/completion/complete_keyword.rs @@ -0,0 +1,527 @@ +//! FIXME: write short doc here + +use syntax::{ast, SyntaxKind}; +use test_utils::mark; + +use crate::completion::{ + CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions, +}; + +pub(super) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionContext) { + // complete keyword "crate" in use stmt + let source_range = ctx.source_range(); + + if ctx.use_item_syntax.is_some() { + if ctx.path_qual.is_none() { + CompletionItem::new(CompletionKind::Keyword, source_range, "crate::") + .kind(CompletionItemKind::Keyword) + .insert_text("crate::") + .add_to(acc); + } + CompletionItem::new(CompletionKind::Keyword, source_range, "self") + .kind(CompletionItemKind::Keyword) + .add_to(acc); + CompletionItem::new(CompletionKind::Keyword, source_range, "super::") + .kind(CompletionItemKind::Keyword) + .insert_text("super::") + .add_to(acc); + } + + // Suggest .await syntax for types that implement Future trait + if let Some(receiver) = &ctx.dot_receiver { + if let Some(ty) = ctx.sema.type_of_expr(receiver) { + if ty.impls_future(ctx.db) { + CompletionItem::new(CompletionKind::Keyword, ctx.source_range(), "await") + .kind(CompletionItemKind::Keyword) + .detail("expr.await") + .insert_text("await") + .add_to(acc); + } + }; + } +} + +pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) { + if ctx.token.kind() == SyntaxKind::COMMENT { + mark::hit!(no_keyword_completion_in_comments); + return; + } + + let has_trait_or_impl_parent = ctx.has_impl_parent || ctx.has_trait_parent; + if ctx.trait_as_prev_sibling || ctx.impl_as_prev_sibling { + add_keyword(ctx, acc, "where", "where "); + return; + } + if ctx.unsafe_is_prev { + if ctx.has_item_list_or_source_file_parent || ctx.block_expr_parent { + add_keyword(ctx, acc, "fn", "fn $0() {}") + } + + if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { + add_keyword(ctx, acc, "trait", "trait $0 {}"); + add_keyword(ctx, acc, "impl", "impl $0 {}"); + } + + return; + } + if ctx.has_item_list_or_source_file_parent || has_trait_or_impl_parent || ctx.block_expr_parent + { + add_keyword(ctx, acc, "fn", "fn $0() {}"); + } + if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { + add_keyword(ctx, acc, "use", "use "); + add_keyword(ctx, acc, "impl", "impl $0 {}"); + add_keyword(ctx, acc, "trait", "trait $0 {}"); + } + + if ctx.has_item_list_or_source_file_parent { + add_keyword(ctx, acc, "enum", "enum $0 {}"); + add_keyword(ctx, acc, "struct", "struct $0"); + add_keyword(ctx, acc, "union", "union $0 {}"); + } + + if ctx.is_expr { + add_keyword(ctx, acc, "match", "match $0 {}"); + add_keyword(ctx, acc, "while", "while $0 {}"); + add_keyword(ctx, acc, "loop", "loop {$0}"); + add_keyword(ctx, acc, "if", "if "); + add_keyword(ctx, acc, "if let", "if let "); + } + + if ctx.if_is_prev || ctx.block_expr_parent { + add_keyword(ctx, acc, "let", "let "); + } + + if ctx.after_if { + add_keyword(ctx, acc, "else", "else {$0}"); + add_keyword(ctx, acc, "else if", "else if $0 {}"); + } + if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { + add_keyword(ctx, acc, "mod", "mod $0 {}"); + } + if ctx.bind_pat_parent || ctx.ref_pat_parent { + add_keyword(ctx, acc, "mut", "mut "); + } + if ctx.has_item_list_or_source_file_parent || has_trait_or_impl_parent || ctx.block_expr_parent + { + add_keyword(ctx, acc, "const", "const "); + add_keyword(ctx, acc, "type", "type "); + } + if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { + add_keyword(ctx, acc, "static", "static "); + }; + if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { + add_keyword(ctx, acc, "extern", "extern "); + } + if ctx.has_item_list_or_source_file_parent + || has_trait_or_impl_parent + || ctx.block_expr_parent + || ctx.is_match_arm + { + add_keyword(ctx, acc, "unsafe", "unsafe "); + } + if ctx.in_loop_body { + if ctx.can_be_stmt { + add_keyword(ctx, acc, "continue", "continue;"); + add_keyword(ctx, acc, "break", "break;"); + } else { + add_keyword(ctx, acc, "continue", "continue"); + add_keyword(ctx, acc, "break", "break"); + } + } + if ctx.has_item_list_or_source_file_parent || ctx.has_impl_parent { + add_keyword(ctx, acc, "pub", "pub ") + } + + if !ctx.is_trivial_path { + return; + } + let fn_def = match &ctx.function_syntax { + Some(it) => it, + None => return, + }; + acc.add_all(complete_return(ctx, &fn_def, ctx.can_be_stmt)); +} + +fn keyword(ctx: &CompletionContext, kw: &str, snippet: &str) -> CompletionItem { + let res = CompletionItem::new(CompletionKind::Keyword, ctx.source_range(), kw) + .kind(CompletionItemKind::Keyword); + + match ctx.config.snippet_cap { + Some(cap) => res.insert_snippet(cap, snippet), + _ => res.insert_text(if snippet.contains('$') { kw } else { snippet }), + } + .build() +} + +fn add_keyword(ctx: &CompletionContext, acc: &mut Completions, kw: &str, snippet: &str) { + acc.add(keyword(ctx, kw, snippet)); +} + +fn complete_return( + ctx: &CompletionContext, + fn_def: &ast::Fn, + can_be_stmt: bool, +) -> Option { + let snip = match (can_be_stmt, fn_def.ret_type().is_some()) { + (true, true) => "return $0;", + (true, false) => "return;", + (false, true) => "return $0", + (false, false) => "return", + }; + Some(keyword(ctx, "return", snip)) +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::completion::{ + test_utils::{check_edit, completion_list}, + CompletionKind, + }; + use test_utils::mark; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Keyword); + expect.assert_eq(&actual) + } + + #[test] + fn test_keywords_in_use_stmt() { + check( + r"use <|>", + expect![[r#" + kw crate:: + kw self + kw super:: + "#]], + ); + + check( + r"use a::<|>", + expect![[r#" + kw self + kw super:: + "#]], + ); + + check( + r"use a::{b, <|>}", + expect![[r#" + kw self + kw super:: + "#]], + ); + } + + #[test] + fn test_keywords_at_source_file_level() { + check( + r"m<|>", + expect![[r#" + kw const + kw enum + kw extern + kw fn + kw impl + kw mod + kw pub + kw static + kw struct + kw trait + kw type + kw union + kw unsafe + kw use + "#]], + ); + } + + #[test] + fn test_keywords_in_function() { + check( + r"fn quux() { <|> }", + expect![[r#" + kw const + kw extern + kw fn + kw if + kw if let + kw impl + kw let + kw loop + kw match + kw mod + kw return + kw static + kw trait + kw type + kw unsafe + kw use + kw while + "#]], + ); + } + + #[test] + fn test_keywords_inside_block() { + check( + r"fn quux() { if true { <|> } }", + expect![[r#" + kw const + kw extern + kw fn + kw if + kw if let + kw impl + kw let + kw loop + kw match + kw mod + kw return + kw static + kw trait + kw type + kw unsafe + kw use + kw while + "#]], + ); + } + + #[test] + fn test_keywords_after_if() { + check( + r#"fn quux() { if true { () } <|> }"#, + expect![[r#" + kw const + kw else + kw else if + kw extern + kw fn + kw if + kw if let + kw impl + kw let + kw loop + kw match + kw mod + kw return + kw static + kw trait + kw type + kw unsafe + kw use + kw while + "#]], + ); + check_edit( + "else", + r#"fn quux() { if true { () } <|> }"#, + r#"fn quux() { if true { () } else {$0} }"#, + ); + } + + #[test] + fn test_keywords_in_match_arm() { + check( + r#" +fn quux() -> i32 { + match () { () => <|> } +} +"#, + expect![[r#" + kw if + kw if let + kw loop + kw match + kw return + kw unsafe + kw while + "#]], + ); + } + + #[test] + fn test_keywords_in_trait_def() { + check( + r"trait My { <|> }", + expect![[r#" + kw const + kw fn + kw type + kw unsafe + "#]], + ); + } + + #[test] + fn test_keywords_in_impl_def() { + check( + r"impl My { <|> }", + expect![[r#" + kw const + kw fn + kw pub + kw type + kw unsafe + "#]], + ); + } + + #[test] + fn test_keywords_in_loop() { + check( + r"fn my() { loop { <|> } }", + expect![[r#" + kw break + kw const + kw continue + kw extern + kw fn + kw if + kw if let + kw impl + kw let + kw loop + kw match + kw mod + kw return + kw static + kw trait + kw type + kw unsafe + kw use + kw while + "#]], + ); + } + + #[test] + fn test_keywords_after_unsafe_in_item_list() { + check( + r"unsafe <|>", + expect![[r#" + kw fn + kw impl + kw trait + "#]], + ); + } + + #[test] + fn test_keywords_after_unsafe_in_block_expr() { + check( + r"fn my_fn() { unsafe <|> }", + expect![[r#" + kw fn + kw impl + kw trait + "#]], + ); + } + + #[test] + fn test_mut_in_ref_and_in_fn_parameters_list() { + check( + r"fn my_fn(&<|>) {}", + expect![[r#" + kw mut + "#]], + ); + check( + r"fn my_fn(<|>) {}", + expect![[r#" + kw mut + "#]], + ); + check( + r"fn my_fn() { let &<|> }", + expect![[r#" + kw mut + "#]], + ); + } + + #[test] + fn test_where_keyword() { + check( + r"trait A <|>", + expect![[r#" + kw where + "#]], + ); + check( + r"impl A <|>", + expect![[r#" + kw where + "#]], + ); + } + + #[test] + fn no_keyword_completion_in_comments() { + mark::check!(no_keyword_completion_in_comments); + check( + r#" +fn test() { + let x = 2; // A comment<|> +} +"#, + expect![[""]], + ); + check( + r#" +/* +Some multi-line comment<|> +*/ +"#, + expect![[""]], + ); + check( + r#" +/// Some doc comment +/// let test<|> = 1 +"#, + expect![[""]], + ); + } + + #[test] + fn test_completion_await_impls_future() { + check( + r#" +//- /main.rs +use std::future::*; +struct A {} +impl Future for A {} +fn foo(a: A) { a.<|> } + +//- /std/lib.rs +pub mod future { + #[lang = "future_trait"] + pub trait Future {} +} +"#, + expect![[r#" + kw await expr.await + "#]], + ) + } + + #[test] + fn after_let() { + check( + r#"fn main() { let _ = <|> }"#, + expect![[r#" + kw if + kw if let + kw loop + kw match + kw return + kw while + "#]], + ) + } +} diff --git a/crates/ra_ide/src/completion/complete_macro_in_item_position.rs b/crates/ide/src/completion/complete_macro_in_item_position.rs similarity index 100% rename from crates/ra_ide/src/completion/complete_macro_in_item_position.rs rename to crates/ide/src/completion/complete_macro_in_item_position.rs diff --git a/crates/ra_ide/src/completion/complete_pattern.rs b/crates/ide/src/completion/complete_pattern.rs similarity index 100% rename from crates/ra_ide/src/completion/complete_pattern.rs rename to crates/ide/src/completion/complete_pattern.rs diff --git a/crates/ide/src/completion/complete_postfix.rs b/crates/ide/src/completion/complete_postfix.rs new file mode 100644 index 0000000000..d50b13c52c --- /dev/null +++ b/crates/ide/src/completion/complete_postfix.rs @@ -0,0 +1,378 @@ +//! FIXME: write short doc here +use assists::utils::TryEnum; +use syntax::{ + ast::{self, AstNode}, + TextRange, TextSize, +}; +use text_edit::TextEdit; + +use crate::{ + completion::{ + completion_config::SnippetCap, + completion_context::CompletionContext, + completion_item::{Builder, CompletionKind, Completions}, + }, + CompletionItem, CompletionItemKind, +}; + +pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { + if !ctx.config.enable_postfix_completions { + return; + } + + let dot_receiver = match &ctx.dot_receiver { + Some(it) => it, + None => return, + }; + + let receiver_text = + get_receiver_text(dot_receiver, ctx.dot_receiver_is_ambiguous_float_literal); + + let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) { + Some(it) => it, + None => return, + }; + + let cap = match ctx.config.snippet_cap { + Some(it) => it, + None => return, + }; + let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty); + if let Some(try_enum) = &try_enum { + match try_enum { + TryEnum::Result => { + postfix_snippet( + ctx, + cap, + &dot_receiver, + "ifl", + "if let Ok {}", + &format!("if let Ok($1) = {} {{\n $0\n}}", receiver_text), + ) + .add_to(acc); + + postfix_snippet( + ctx, + cap, + &dot_receiver, + "while", + "while let Ok {}", + &format!("while let Ok($1) = {} {{\n $0\n}}", receiver_text), + ) + .add_to(acc); + } + TryEnum::Option => { + postfix_snippet( + ctx, + cap, + &dot_receiver, + "ifl", + "if let Some {}", + &format!("if let Some($1) = {} {{\n $0\n}}", receiver_text), + ) + .add_to(acc); + + postfix_snippet( + ctx, + cap, + &dot_receiver, + "while", + "while let Some {}", + &format!("while let Some($1) = {} {{\n $0\n}}", receiver_text), + ) + .add_to(acc); + } + } + } else if receiver_ty.is_bool() || receiver_ty.is_unknown() { + postfix_snippet( + ctx, + cap, + &dot_receiver, + "if", + "if expr {}", + &format!("if {} {{\n $0\n}}", receiver_text), + ) + .add_to(acc); + postfix_snippet( + ctx, + cap, + &dot_receiver, + "while", + "while expr {}", + &format!("while {} {{\n $0\n}}", receiver_text), + ) + .add_to(acc); + postfix_snippet(ctx, cap, &dot_receiver, "not", "!expr", &format!("!{}", receiver_text)) + .add_to(acc); + } + + postfix_snippet(ctx, cap, &dot_receiver, "ref", "&expr", &format!("&{}", receiver_text)) + .add_to(acc); + postfix_snippet( + ctx, + cap, + &dot_receiver, + "refm", + "&mut expr", + &format!("&mut {}", receiver_text), + ) + .add_to(acc); + + // The rest of the postfix completions create an expression that moves an argument, + // so it's better to consider references now to avoid breaking the compilation + let dot_receiver = include_references(dot_receiver); + let receiver_text = + get_receiver_text(&dot_receiver, ctx.dot_receiver_is_ambiguous_float_literal); + + match try_enum { + Some(try_enum) => match try_enum { + TryEnum::Result => { + postfix_snippet( + ctx, + cap, + &dot_receiver, + "match", + "match expr {}", + &format!("match {} {{\n Ok(${{1:_}}) => {{$2}},\n Err(${{3:_}}) => {{$0}},\n}}", receiver_text), + ) + .add_to(acc); + } + TryEnum::Option => { + postfix_snippet( + ctx, + cap, + &dot_receiver, + "match", + "match expr {}", + &format!( + "match {} {{\n Some(${{1:_}}) => {{$2}},\n None => {{$0}},\n}}", + receiver_text + ), + ) + .add_to(acc); + } + }, + None => { + postfix_snippet( + ctx, + cap, + &dot_receiver, + "match", + "match expr {}", + &format!("match {} {{\n ${{1:_}} => {{$0}},\n}}", receiver_text), + ) + .add_to(acc); + } + } + + postfix_snippet( + ctx, + cap, + &dot_receiver, + "box", + "Box::new(expr)", + &format!("Box::new({})", receiver_text), + ) + .add_to(acc); + + postfix_snippet( + ctx, + cap, + &dot_receiver, + "dbg", + "dbg!(expr)", + &format!("dbg!({})", receiver_text), + ) + .add_to(acc); + + postfix_snippet( + ctx, + cap, + &dot_receiver, + "call", + "function(expr)", + &format!("${{1}}({})", receiver_text), + ) + .add_to(acc); +} + +fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: bool) -> String { + if receiver_is_ambiguous_float_literal { + let text = receiver.syntax().text(); + let without_dot = ..text.len() - TextSize::of('.'); + text.slice(without_dot).to_string() + } else { + receiver.to_string() + } +} + +fn include_references(initial_element: &ast::Expr) -> ast::Expr { + let mut resulting_element = initial_element.clone(); + while let Some(parent_ref_element) = + resulting_element.syntax().parent().and_then(ast::RefExpr::cast) + { + resulting_element = ast::Expr::from(parent_ref_element); + } + resulting_element +} + +fn postfix_snippet( + ctx: &CompletionContext, + cap: SnippetCap, + receiver: &ast::Expr, + label: &str, + detail: &str, + snippet: &str, +) -> Builder { + let edit = { + let receiver_syntax = receiver.syntax(); + let receiver_range = ctx.sema.original_range(receiver_syntax).range; + let delete_range = TextRange::new(receiver_range.start(), ctx.source_range().end()); + TextEdit::replace(delete_range, snippet.to_string()) + }; + CompletionItem::new(CompletionKind::Postfix, ctx.source_range(), label) + .detail(detail) + .kind(CompletionItemKind::Snippet) + .snippet_edit(cap, edit) +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::completion::{ + test_utils::{check_edit, completion_list}, + CompletionKind, + }; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Postfix); + expect.assert_eq(&actual) + } + + #[test] + fn postfix_completion_works_for_trivial_path_expression() { + check( + r#" +fn main() { + let bar = true; + bar.<|> +} +"#, + expect![[r#" + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn if if expr {} + sn match match expr {} + sn not !expr + sn ref &expr + sn refm &mut expr + sn while while expr {} + "#]], + ); + } + + #[test] + fn postfix_type_filtering() { + check( + r#" +fn main() { + let bar: u8 = 12; + bar.<|> +} +"#, + expect![[r#" + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn match match expr {} + sn ref &expr + sn refm &mut expr + "#]], + ) + } + + #[test] + fn option_iflet() { + check_edit( + "ifl", + r#" +enum Option { Some(T), None } + +fn main() { + let bar = Option::Some(true); + bar.<|> +} +"#, + r#" +enum Option { Some(T), None } + +fn main() { + let bar = Option::Some(true); + if let Some($1) = bar { + $0 +} +} +"#, + ); + } + + #[test] + fn result_match() { + check_edit( + "match", + r#" +enum Result { Ok(T), Err(E) } + +fn main() { + let bar = Result::Ok(true); + bar.<|> +} +"#, + r#" +enum Result { Ok(T), Err(E) } + +fn main() { + let bar = Result::Ok(true); + match bar { + Ok(${1:_}) => {$2}, + Err(${3:_}) => {$0}, +} +} +"#, + ); + } + + #[test] + fn postfix_completion_works_for_ambiguous_float_literal() { + check_edit("refm", r#"fn main() { 42.<|> }"#, r#"fn main() { &mut 42 }"#) + } + + #[test] + fn works_in_simple_macro() { + check_edit( + "dbg", + r#" +macro_rules! m { ($e:expr) => { $e } } +fn main() { + let bar: u8 = 12; + m!(bar.d<|>) +} +"#, + r#" +macro_rules! m { ($e:expr) => { $e } } +fn main() { + let bar: u8 = 12; + m!(dbg!(bar)) +} +"#, + ); + } + + #[test] + fn postfix_completion_for_references() { + check_edit("dbg", r#"fn main() { &&42.<|> }"#, r#"fn main() { dbg!(&&42) }"#); + check_edit("refm", r#"fn main() { &&42.<|> }"#, r#"fn main() { &&&mut 42 }"#); + } +} diff --git a/crates/ide/src/completion/complete_qualified_path.rs b/crates/ide/src/completion/complete_qualified_path.rs new file mode 100644 index 0000000000..74794dc88b --- /dev/null +++ b/crates/ide/src/completion/complete_qualified_path.rs @@ -0,0 +1,733 @@ +//! Completion of paths, i.e. `some::prefix::<|>`. + +use hir::{Adt, HasVisibility, PathResolution, ScopeDef}; +use rustc_hash::FxHashSet; +use syntax::AstNode; +use test_utils::mark; + +use crate::completion::{CompletionContext, Completions}; + +pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionContext) { + let path = match &ctx.path_qual { + Some(path) => path.clone(), + None => return, + }; + + if ctx.attribute_under_caret.is_some() { + return; + } + + let context_module = ctx.scope.module(); + + let resolution = match ctx.sema.resolve_path(&path) { + Some(res) => res, + None => return, + }; + + // Add associated types on type parameters and `Self`. + resolution.assoc_type_shorthand_candidates(ctx.db, |alias| { + acc.add_type_alias(ctx, alias); + None::<()> + }); + + match resolution { + PathResolution::Def(hir::ModuleDef::Module(module)) => { + let module_scope = module.scope(ctx.db, context_module); + for (name, def) in module_scope { + if ctx.use_item_syntax.is_some() { + if let ScopeDef::Unknown = def { + if let Some(name_ref) = ctx.name_ref_syntax.as_ref() { + if name_ref.syntax().text() == name.to_string().as_str() { + // for `use self::foo<|>`, don't suggest `foo` as a completion + mark::hit!(dont_complete_current_use); + continue; + } + } + } + } + + acc.add_resolution(ctx, name.to_string(), &def); + } + } + PathResolution::Def(def @ hir::ModuleDef::Adt(_)) + | PathResolution::Def(def @ hir::ModuleDef::TypeAlias(_)) => { + if let hir::ModuleDef::Adt(Adt::Enum(e)) = def { + for variant in e.variants(ctx.db) { + acc.add_enum_variant(ctx, variant, None); + } + } + let ty = match def { + hir::ModuleDef::Adt(adt) => adt.ty(ctx.db), + hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db), + _ => unreachable!(), + }; + + // XXX: For parity with Rust bug #22519, this does not complete Ty::AssocType. + // (where AssocType is defined on a trait, not an inherent impl) + + let krate = ctx.krate; + if let Some(krate) = krate { + let traits_in_scope = ctx.scope.traits_in_scope(); + ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { + if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { + return None; + } + match item { + hir::AssocItem::Function(func) => { + acc.add_function(ctx, func, None); + } + hir::AssocItem::Const(ct) => acc.add_const(ctx, ct), + hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), + } + None::<()> + }); + + // Iterate assoc types separately + ty.iterate_assoc_items(ctx.db, krate, |item| { + if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { + return None; + } + match item { + hir::AssocItem::Function(_) | hir::AssocItem::Const(_) => {} + hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), + } + None::<()> + }); + } + } + PathResolution::Def(hir::ModuleDef::Trait(t)) => { + // Handles `Trait::assoc` as well as `::assoc`. + for item in t.items(ctx.db) { + if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { + continue; + } + match item { + hir::AssocItem::Function(func) => { + acc.add_function(ctx, func, None); + } + hir::AssocItem::Const(ct) => acc.add_const(ctx, ct), + hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), + } + } + } + PathResolution::TypeParam(_) | PathResolution::SelfType(_) => { + if let Some(krate) = ctx.krate { + let ty = match resolution { + PathResolution::TypeParam(param) => param.ty(ctx.db), + PathResolution::SelfType(impl_def) => impl_def.target_ty(ctx.db), + _ => return, + }; + + let traits_in_scope = ctx.scope.traits_in_scope(); + let mut seen = FxHashSet::default(); + ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { + if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { + return None; + } + + // We might iterate candidates of a trait multiple times here, so deduplicate + // them. + if seen.insert(item) { + match item { + hir::AssocItem::Function(func) => { + acc.add_function(ctx, func, None); + } + hir::AssocItem::Const(ct) => acc.add_const(ctx, ct), + hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), + } + } + None::<()> + }); + } + } + _ => {} + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + use test_utils::mark; + + use crate::completion::{ + test_utils::{check_edit, completion_list}, + CompletionKind, + }; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Reference); + expect.assert_eq(&actual); + } + + fn check_builtin(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::BuiltinType); + expect.assert_eq(&actual); + } + + #[test] + fn dont_complete_current_use() { + mark::check!(dont_complete_current_use); + check(r#"use self::foo<|>;"#, expect![[""]]); + } + + #[test] + fn dont_complete_current_use_in_braces_with_glob() { + check( + r#" +mod foo { pub struct S; } +use self::{foo::*, bar<|>}; +"#, + expect![[r#" + st S + md foo + "#]], + ); + } + + #[test] + fn dont_complete_primitive_in_use() { + check_builtin(r#"use self::<|>;"#, expect![[""]]); + } + + #[test] + fn dont_complete_primitive_in_module_scope() { + check_builtin(r#"fn foo() { self::<|> }"#, expect![[""]]); + } + + #[test] + fn completes_primitives() { + check_builtin( + r#"fn main() { let _: <|> = 92; }"#, + expect![[r#" + bt bool + bt char + bt f32 + bt f64 + bt i128 + bt i16 + bt i32 + bt i64 + bt i8 + bt isize + bt str + bt u128 + bt u16 + bt u32 + bt u64 + bt u8 + bt usize + "#]], + ); + } + + #[test] + fn completes_mod_with_same_name_as_function() { + check( + r#" +use self::my::<|>; + +mod my { pub struct Bar; } +fn my() {} +"#, + expect![[r#" + st Bar + "#]], + ); + } + + #[test] + fn filters_visibility() { + check( + r#" +use self::my::<|>; + +mod my { + struct Bar; + pub struct Foo; + pub use Bar as PublicBar; +} +"#, + expect![[r#" + st Foo + st PublicBar + "#]], + ); + } + + #[test] + fn completes_use_item_starting_with_self() { + check( + r#" +use self::m::<|>; + +mod m { pub struct Bar; } +"#, + expect![[r#" + st Bar + "#]], + ); + } + + #[test] + fn completes_use_item_starting_with_crate() { + check( + r#" +//- /lib.rs +mod foo; +struct Spam; +//- /foo.rs +use crate::Sp<|> +"#, + expect![[r#" + st Spam + md foo + "#]], + ); + } + + #[test] + fn completes_nested_use_tree() { + check( + r#" +//- /lib.rs +mod foo; +struct Spam; +//- /foo.rs +use crate::{Sp<|>}; +"#, + expect![[r#" + st Spam + md foo + "#]], + ); + } + + #[test] + fn completes_deeply_nested_use_tree() { + check( + r#" +//- /lib.rs +mod foo; +pub mod bar { + pub mod baz { + pub struct Spam; + } +} +//- /foo.rs +use crate::{bar::{baz::Sp<|>}}; +"#, + expect![[r#" + st Spam + "#]], + ); + } + + #[test] + fn completes_enum_variant() { + check( + r#" +enum E { Foo, Bar(i32) } +fn foo() { let _ = E::<|> } +"#, + expect![[r#" + ev Bar(…) (i32) + ev Foo () + "#]], + ); + } + + #[test] + fn completes_struct_associated_items() { + check( + r#" +//- /lib.rs +struct S; + +impl S { + fn a() {} + fn b(&self) {} + const C: i32 = 42; + type T = i32; +} + +fn foo() { let _ = S::<|> } +"#, + expect![[r#" + ct C const C: i32 = 42; + ta T type T = i32; + fn a() fn a() + me b() fn b(&self) + "#]], + ); + } + + #[test] + fn associated_item_visibility() { + check( + r#" +struct S; + +mod m { + impl super::S { + pub(super) fn public_method() { } + fn private_method() { } + pub(super) type PublicType = u32; + type PrivateType = u32; + pub(super) const PUBLIC_CONST: u32 = 1; + const PRIVATE_CONST: u32 = 1; + } +} + +fn foo() { let _ = S::<|> } +"#, + expect![[r#" + ct PUBLIC_CONST pub(super) const PUBLIC_CONST: u32 = 1; + ta PublicType pub(super) type PublicType = u32; + fn public_method() pub(super) fn public_method() + "#]], + ); + } + + #[test] + fn completes_enum_associated_method() { + check( + r#" +enum E {}; +impl E { fn m() { } } + +fn foo() { let _ = E::<|> } + "#, + expect![[r#" + fn m() fn m() + "#]], + ); + } + + #[test] + fn completes_union_associated_method() { + check( + r#" +union U {}; +impl U { fn m() { } } + +fn foo() { let _ = U::<|> } +"#, + expect![[r#" + fn m() fn m() + "#]], + ); + } + + #[test] + fn completes_use_paths_across_crates() { + check( + r#" +//- /main.rs +use foo::<|>; + +//- /foo/lib.rs +pub mod bar { pub struct S; } +"#, + expect![[r#" + md bar + "#]], + ); + } + + #[test] + fn completes_trait_associated_method_1() { + check( + r#" +trait Trait { fn m(); } + +fn foo() { let _ = Trait::<|> } +"#, + expect![[r#" + fn m() fn m() + "#]], + ); + } + + #[test] + fn completes_trait_associated_method_2() { + check( + r#" +trait Trait { fn m(); } + +struct S; +impl Trait for S {} + +fn foo() { let _ = S::<|> } +"#, + expect![[r#" + fn m() fn m() + "#]], + ); + } + + #[test] + fn completes_trait_associated_method_3() { + check( + r#" +trait Trait { fn m(); } + +struct S; +impl Trait for S {} + +fn foo() { let _ = ::<|> } +"#, + expect![[r#" + fn m() fn m() + "#]], + ); + } + + #[test] + fn completes_ty_param_assoc_ty() { + check( + r#" +trait Super { + type Ty; + const CONST: u8; + fn func() {} + fn method(&self) {} +} + +trait Sub: Super { + type SubTy; + const C2: (); + fn subfunc() {} + fn submethod(&self) {} +} + +fn foo() { T::<|> } +"#, + expect![[r#" + ct C2 const C2: (); + ct CONST const CONST: u8; + ta SubTy type SubTy; + ta Ty type Ty; + fn func() fn func() + me method() fn method(&self) + fn subfunc() fn subfunc() + me submethod() fn submethod(&self) + "#]], + ); + } + + #[test] + fn completes_self_param_assoc_ty() { + check( + r#" +trait Super { + type Ty; + const CONST: u8 = 0; + fn func() {} + fn method(&self) {} +} + +trait Sub: Super { + type SubTy; + const C2: () = (); + fn subfunc() {} + fn submethod(&self) {} +} + +struct Wrap(T); +impl Super for Wrap {} +impl Sub for Wrap { + fn subfunc() { + // Should be able to assume `Self: Sub + Super` + Self::<|> + } +} +"#, + expect![[r#" + ct C2 const C2: () = (); + ct CONST const CONST: u8 = 0; + ta SubTy type SubTy; + ta Ty type Ty; + fn func() fn func() + me method() fn method(&self) + fn subfunc() fn subfunc() + me submethod() fn submethod(&self) + "#]], + ); + } + + #[test] + fn completes_type_alias() { + check( + r#" +struct S; +impl S { fn foo() {} } +type T = S; +impl T { fn bar() {} } + +fn main() { T::<|>; } +"#, + expect![[r#" + fn bar() fn bar() + fn foo() fn foo() + "#]], + ); + } + + #[test] + fn completes_qualified_macros() { + check( + r#" +#[macro_export] +macro_rules! foo { () => {} } + +fn main() { let _ = crate::<|> } + "#, + expect![[r##" + ma foo!(…) #[macro_export] + macro_rules! foo + fn main() fn main() + "##]], + ); + } + + #[test] + fn test_super_super_completion() { + check( + r#" +mod a { + const A: usize = 0; + mod b { + const B: usize = 0; + mod c { use super::super::<|> } + } +} +"#, + expect![[r#" + ct A + md b + "#]], + ); + } + + #[test] + fn completes_reexported_items_under_correct_name() { + check( + r#" +fn foo() { self::m::<|> } + +mod m { + pub use super::p::wrong_fn as right_fn; + pub use super::p::WRONG_CONST as RIGHT_CONST; + pub use super::p::WrongType as RightType; +} +mod p { + fn wrong_fn() {} + const WRONG_CONST: u32 = 1; + struct WrongType {}; +} +"#, + expect![[r#" + ct RIGHT_CONST + st RightType + fn right_fn() fn wrong_fn() + "#]], + ); + + check_edit( + "RightType", + r#" +fn foo() { self::m::<|> } + +mod m { + pub use super::p::wrong_fn as right_fn; + pub use super::p::WRONG_CONST as RIGHT_CONST; + pub use super::p::WrongType as RightType; +} +mod p { + fn wrong_fn() {} + const WRONG_CONST: u32 = 1; + struct WrongType {}; +} +"#, + r#" +fn foo() { self::m::RightType } + +mod m { + pub use super::p::wrong_fn as right_fn; + pub use super::p::WRONG_CONST as RIGHT_CONST; + pub use super::p::WrongType as RightType; +} +mod p { + fn wrong_fn() {} + const WRONG_CONST: u32 = 1; + struct WrongType {}; +} +"#, + ); + } + + #[test] + fn completes_in_simple_macro_call() { + check( + r#" +macro_rules! m { ($e:expr) => { $e } } +fn main() { m!(self::f<|>); } +fn foo() {} +"#, + expect![[r#" + fn foo() fn foo() + fn main() fn main() + "#]], + ); + } + + #[test] + fn function_mod_share_name() { + check( + r#" +fn foo() { self::m::<|> } + +mod m { + pub mod z {} + pub fn z() {} +} +"#, + expect![[r#" + md z + fn z() pub fn z() + "#]], + ); + } + + #[test] + fn completes_hashmap_new() { + check( + r#" +struct RandomState; +struct HashMap {} + +impl HashMap { + pub fn new() -> HashMap { } +} +fn foo() { + HashMap::<|> +} +"#, + expect![[r#" + fn new() pub fn new() -> HashMap + "#]], + ); + } + + #[test] + fn dont_complete_attr() { + check( + r#" +mod foo { pub struct Foo; } +#[foo::<|>] +fn f() {} +"#, + expect![[""]], + ); + } +} diff --git a/crates/ra_ide/src/completion/complete_record.rs b/crates/ide/src/completion/complete_record.rs similarity index 100% rename from crates/ra_ide/src/completion/complete_record.rs rename to crates/ide/src/completion/complete_record.rs diff --git a/crates/ide/src/completion/complete_snippet.rs b/crates/ide/src/completion/complete_snippet.rs new file mode 100644 index 0000000000..4368e4eec8 --- /dev/null +++ b/crates/ide/src/completion/complete_snippet.rs @@ -0,0 +1,116 @@ +//! FIXME: write short doc here + +use crate::completion::{ + completion_config::SnippetCap, completion_item::Builder, CompletionContext, CompletionItem, + CompletionItemKind, CompletionKind, Completions, +}; + +fn snippet(ctx: &CompletionContext, cap: SnippetCap, label: &str, snippet: &str) -> Builder { + CompletionItem::new(CompletionKind::Snippet, ctx.source_range(), label) + .insert_snippet(cap, snippet) + .kind(CompletionItemKind::Snippet) +} + +pub(super) fn complete_expr_snippet(acc: &mut Completions, ctx: &CompletionContext) { + if !(ctx.is_trivial_path && ctx.function_syntax.is_some()) { + return; + } + let cap = match ctx.config.snippet_cap { + Some(it) => it, + None => return, + }; + + snippet(ctx, cap, "pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc); + snippet(ctx, cap, "ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc); +} + +pub(super) fn complete_item_snippet(acc: &mut Completions, ctx: &CompletionContext) { + if !ctx.is_new_item { + return; + } + let cap = match ctx.config.snippet_cap { + Some(it) => it, + None => return, + }; + + snippet( + ctx, + cap, + "tmod (Test module)", + "\ +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn ${1:test_name}() { + $0 + } +}", + ) + .lookup_by("tmod") + .add_to(acc); + + snippet( + ctx, + cap, + "tfn (Test function)", + "\ +#[test] +fn ${1:feature}() { + $0 +}", + ) + .lookup_by("tfn") + .add_to(acc); + + snippet(ctx, cap, "macro_rules", "macro_rules! $1 {\n\t($2) => {\n\t\t$0\n\t};\n}").add_to(acc); + snippet(ctx, cap, "pub(crate)", "pub(crate) $0").add_to(acc); +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::completion::{test_utils::completion_list, CompletionKind}; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Snippet); + expect.assert_eq(&actual) + } + + #[test] + fn completes_snippets_in_expressions() { + check( + r#"fn foo(x: i32) { <|> }"#, + expect![[r#" + sn pd + sn ppd + "#]], + ); + } + + #[test] + fn should_not_complete_snippets_in_path() { + check(r#"fn foo(x: i32) { ::foo<|> }"#, expect![[""]]); + check(r#"fn foo(x: i32) { ::<|> }"#, expect![[""]]); + } + + #[test] + fn completes_snippets_in_items() { + check( + r#" +#[cfg(test)] +mod tests { + <|> +} +"#, + expect![[r#" + sn macro_rules + sn pub(crate) + sn tfn (Test function) + sn tmod (Test module) + "#]], + ) + } +} diff --git a/crates/ide/src/completion/complete_trait_impl.rs b/crates/ide/src/completion/complete_trait_impl.rs new file mode 100644 index 0000000000..478e312623 --- /dev/null +++ b/crates/ide/src/completion/complete_trait_impl.rs @@ -0,0 +1,488 @@ +//! Completion for associated items in a trait implementation. +//! +//! This module adds the completion items related to implementing associated +//! items within a `impl Trait for Struct` block. The current context node +//! must be within either a `FN`, `TYPE_ALIAS`, or `CONST` node +//! and an direct child of an `IMPL`. +//! +//! # Examples +//! +//! Considering the following trait `impl`: +//! +//! ```ignore +//! trait SomeTrait { +//! fn foo(); +//! } +//! +//! impl SomeTrait for () { +//! fn f<|> +//! } +//! ``` +//! +//! may result in the completion of the following method: +//! +//! ```ignore +//! # trait SomeTrait { +//! # fn foo(); +//! # } +//! +//! impl SomeTrait for () { +//! fn foo() {}<|> +//! } +//! ``` + +use assists::utils::get_missing_assoc_items; +use hir::{self, Docs, HasSource}; +use syntax::{ + ast::{self, edit, Impl}, + AstNode, SyntaxKind, SyntaxNode, TextRange, T, +}; +use text_edit::TextEdit; + +use crate::{ + completion::{ + CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions, + }, + display::function_declaration, +}; + +pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext) { + if let Some((trigger, impl_def)) = completion_match(ctx) { + match trigger.kind() { + SyntaxKind::NAME_REF => get_missing_assoc_items(&ctx.sema, &impl_def) + .into_iter() + .for_each(|item| match item { + hir::AssocItem::Function(fn_item) => { + add_function_impl(&trigger, acc, ctx, fn_item) + } + hir::AssocItem::TypeAlias(type_item) => { + add_type_alias_impl(&trigger, acc, ctx, type_item) + } + hir::AssocItem::Const(const_item) => { + add_const_impl(&trigger, acc, ctx, const_item) + } + }), + + SyntaxKind::FN => { + for missing_fn in get_missing_assoc_items(&ctx.sema, &impl_def) + .into_iter() + .filter_map(|item| match item { + hir::AssocItem::Function(fn_item) => Some(fn_item), + _ => None, + }) + { + add_function_impl(&trigger, acc, ctx, missing_fn); + } + } + + SyntaxKind::TYPE_ALIAS => { + for missing_fn in get_missing_assoc_items(&ctx.sema, &impl_def) + .into_iter() + .filter_map(|item| match item { + hir::AssocItem::TypeAlias(type_item) => Some(type_item), + _ => None, + }) + { + add_type_alias_impl(&trigger, acc, ctx, missing_fn); + } + } + + SyntaxKind::CONST => { + for missing_fn in get_missing_assoc_items(&ctx.sema, &impl_def) + .into_iter() + .filter_map(|item| match item { + hir::AssocItem::Const(const_item) => Some(const_item), + _ => None, + }) + { + add_const_impl(&trigger, acc, ctx, missing_fn); + } + } + + _ => {} + } + } +} + +fn completion_match(ctx: &CompletionContext) -> Option<(SyntaxNode, Impl)> { + let (trigger, impl_def_offset) = ctx.token.ancestors().find_map(|p| match p.kind() { + SyntaxKind::FN | SyntaxKind::TYPE_ALIAS | SyntaxKind::CONST | SyntaxKind::BLOCK_EXPR => { + Some((p, 2)) + } + SyntaxKind::NAME_REF => Some((p, 5)), + _ => None, + })?; + let impl_def = (0..impl_def_offset - 1) + .try_fold(trigger.parent()?, |t, _| t.parent()) + .and_then(ast::Impl::cast)?; + Some((trigger, impl_def)) +} + +fn add_function_impl( + fn_def_node: &SyntaxNode, + acc: &mut Completions, + ctx: &CompletionContext, + func: hir::Function, +) { + let fn_name = func.name(ctx.db).to_string(); + + let label = if !func.params(ctx.db).is_empty() { + format!("fn {}(..)", fn_name) + } else { + format!("fn {}()", fn_name) + }; + + let builder = CompletionItem::new(CompletionKind::Magic, ctx.source_range(), label) + .lookup_by(fn_name) + .set_documentation(func.docs(ctx.db)); + + let completion_kind = if func.has_self_param(ctx.db) { + CompletionItemKind::Method + } else { + CompletionItemKind::Function + }; + let range = TextRange::new(fn_def_node.text_range().start(), ctx.source_range().end()); + + let function_decl = function_declaration(&func.source(ctx.db).value); + match ctx.config.snippet_cap { + Some(cap) => { + let snippet = format!("{} {{\n $0\n}}", function_decl); + builder.snippet_edit(cap, TextEdit::replace(range, snippet)) + } + None => { + let header = format!("{} {{", function_decl); + builder.text_edit(TextEdit::replace(range, header)) + } + } + .kind(completion_kind) + .add_to(acc); +} + +fn add_type_alias_impl( + type_def_node: &SyntaxNode, + acc: &mut Completions, + ctx: &CompletionContext, + type_alias: hir::TypeAlias, +) { + let alias_name = type_alias.name(ctx.db).to_string(); + + let snippet = format!("type {} = ", alias_name); + + let range = TextRange::new(type_def_node.text_range().start(), ctx.source_range().end()); + + CompletionItem::new(CompletionKind::Magic, ctx.source_range(), snippet.clone()) + .text_edit(TextEdit::replace(range, snippet)) + .lookup_by(alias_name) + .kind(CompletionItemKind::TypeAlias) + .set_documentation(type_alias.docs(ctx.db)) + .add_to(acc); +} + +fn add_const_impl( + const_def_node: &SyntaxNode, + acc: &mut Completions, + ctx: &CompletionContext, + const_: hir::Const, +) { + let const_name = const_.name(ctx.db).map(|n| n.to_string()); + + if let Some(const_name) = const_name { + let snippet = make_const_compl_syntax(&const_.source(ctx.db).value); + + let range = TextRange::new(const_def_node.text_range().start(), ctx.source_range().end()); + + CompletionItem::new(CompletionKind::Magic, ctx.source_range(), snippet.clone()) + .text_edit(TextEdit::replace(range, snippet)) + .lookup_by(const_name) + .kind(CompletionItemKind::Const) + .set_documentation(const_.docs(ctx.db)) + .add_to(acc); + } +} + +fn make_const_compl_syntax(const_: &ast::Const) -> String { + let const_ = edit::remove_attrs_and_docs(const_); + + let const_start = const_.syntax().text_range().start(); + let const_end = const_.syntax().text_range().end(); + + let start = + const_.syntax().first_child_or_token().map_or(const_start, |f| f.text_range().start()); + + let end = const_ + .syntax() + .children_with_tokens() + .find(|s| s.kind() == T![;] || s.kind() == T![=]) + .map_or(const_end, |f| f.text_range().start()); + + let len = end - start; + let range = TextRange::new(0.into(), len); + + let syntax = const_.syntax().text().slice(range).to_string(); + + format!("{} = ", syntax.trim_end()) +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::completion::{ + test_utils::{check_edit, completion_list}, + CompletionKind, + }; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Magic); + expect.assert_eq(&actual) + } + + #[test] + fn name_ref_function_type_const() { + check( + r#" +trait Test { + type TestType; + const TEST_CONST: u16; + fn test(); +} +struct T; + +impl Test for T { + t<|> +} +"#, + expect![[" +ct const TEST_CONST: u16 = \n\ +fn fn test() +ta type TestType = \n\ + "]], + ); + } + + #[test] + fn no_nested_fn_completions() { + check( + r" +trait Test { + fn test(); + fn test2(); +} +struct T; + +impl Test for T { + fn test() { + t<|> + } +} +", + expect![[""]], + ); + } + + #[test] + fn name_ref_single_function() { + check_edit( + "test", + r#" +trait Test { + fn test(); +} +struct T; + +impl Test for T { + t<|> +} +"#, + r#" +trait Test { + fn test(); +} +struct T; + +impl Test for T { + fn test() { + $0 +} +} +"#, + ); + } + + #[test] + fn single_function() { + check_edit( + "test", + r#" +trait Test { + fn test(); +} +struct T; + +impl Test for T { + fn t<|> +} +"#, + r#" +trait Test { + fn test(); +} +struct T; + +impl Test for T { + fn test() { + $0 +} +} +"#, + ); + } + + #[test] + fn hide_implemented_fn() { + check( + r#" +trait Test { + fn foo(); + fn foo_bar(); +} +struct T; + +impl Test for T { + fn foo() {} + fn f<|> +} +"#, + expect![[r#" + fn fn foo_bar() + "#]], + ); + } + + #[test] + fn generic_fn() { + check_edit( + "foo", + r#" +trait Test { + fn foo(); +} +struct T; + +impl Test for T { + fn f<|> +} +"#, + r#" +trait Test { + fn foo(); +} +struct T; + +impl Test for T { + fn foo() { + $0 +} +} +"#, + ); + check_edit( + "foo", + r#" +trait Test { + fn foo() where T: Into; +} +struct T; + +impl Test for T { + fn f<|> +} +"#, + r#" +trait Test { + fn foo() where T: Into; +} +struct T; + +impl Test for T { + fn foo() +where T: Into { + $0 +} +} +"#, + ); + } + + #[test] + fn associated_type() { + check_edit( + "SomeType", + r#" +trait Test { + type SomeType; +} + +impl Test for () { + type S<|> +} +"#, + " +trait Test { + type SomeType; +} + +impl Test for () { + type SomeType = \n\ +} +", + ); + } + + #[test] + fn associated_const() { + check_edit( + "SOME_CONST", + r#" +trait Test { + const SOME_CONST: u16; +} + +impl Test for () { + const S<|> +} +"#, + " +trait Test { + const SOME_CONST: u16; +} + +impl Test for () { + const SOME_CONST: u16 = \n\ +} +", + ); + + check_edit( + "SOME_CONST", + r#" +trait Test { + const SOME_CONST: u16 = 92; +} + +impl Test for () { + const S<|> +} +"#, + " +trait Test { + const SOME_CONST: u16 = 92; +} + +impl Test for () { + const SOME_CONST: u16 = \n\ +} +", + ); + } +} diff --git a/crates/ide/src/completion/complete_unqualified_path.rs b/crates/ide/src/completion/complete_unqualified_path.rs new file mode 100644 index 0000000000..824227f310 --- /dev/null +++ b/crates/ide/src/completion/complete_unqualified_path.rs @@ -0,0 +1,658 @@ +//! Completion of names from the current scope, e.g. locals and imported items. + +use hir::{Adt, ModuleDef, ScopeDef, Type}; +use syntax::AstNode; +use test_utils::mark; + +use crate::completion::{CompletionContext, Completions}; + +pub(super) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionContext) { + if !(ctx.is_trivial_path || ctx.is_pat_binding_or_const) { + return; + } + if ctx.record_lit_syntax.is_some() + || ctx.record_pat_syntax.is_some() + || ctx.attribute_under_caret.is_some() + { + return; + } + + if let Some(ty) = &ctx.expected_type { + complete_enum_variants(acc, ctx, ty); + } + + if ctx.is_pat_binding_or_const { + return; + } + + ctx.scope.process_all_names(&mut |name, res| { + if ctx.use_item_syntax.is_some() { + if let (ScopeDef::Unknown, Some(name_ref)) = (&res, &ctx.name_ref_syntax) { + if name_ref.syntax().text() == name.to_string().as_str() { + mark::hit!(self_fulfilling_completion); + return; + } + } + } + acc.add_resolution(ctx, name.to_string(), &res) + }); +} + +fn complete_enum_variants(acc: &mut Completions, ctx: &CompletionContext, ty: &Type) { + if let Some(Adt::Enum(enum_data)) = ty.as_adt() { + let variants = enum_data.variants(ctx.db); + + let module = if let Some(module) = ctx.scope.module() { + // Compute path from the completion site if available. + module + } else { + // Otherwise fall back to the enum's definition site. + enum_data.module(ctx.db) + }; + + for variant in variants { + if let Some(path) = module.find_use_path(ctx.db, ModuleDef::from(variant)) { + // Variants with trivial paths are already added by the existing completion logic, + // so we should avoid adding these twice + if path.segments.len() > 1 { + acc.add_qualified_enum_variant(ctx, variant, path); + } + } + } + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + use test_utils::mark; + + use crate::completion::{ + test_utils::{check_edit, completion_list}, + CompletionKind, + }; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Reference); + expect.assert_eq(&actual) + } + + #[test] + fn self_fulfilling_completion() { + mark::check!(self_fulfilling_completion); + check( + r#" +use foo<|> +use std::collections; +"#, + expect![[r#" + ?? collections + "#]], + ); + } + + #[test] + fn bind_pat_and_path_ignore_at() { + check( + r#" +enum Enum { A, B } +fn quux(x: Option) { + match x { + None => (), + Some(en<|> @ Enum::A) => (), + } +} +"#, + expect![[""]], + ); + } + + #[test] + fn bind_pat_and_path_ignore_ref() { + check( + r#" +enum Enum { A, B } +fn quux(x: Option) { + match x { + None => (), + Some(ref en<|>) => (), + } +} +"#, + expect![[""]], + ); + } + + #[test] + fn bind_pat_and_path() { + check( + r#" +enum Enum { A, B } +fn quux(x: Option) { + match x { + None => (), + Some(En<|>) => (), + } +} +"#, + expect![[r#" + en Enum + "#]], + ); + } + + #[test] + fn completes_bindings_from_let() { + check( + r#" +fn quux(x: i32) { + let y = 92; + 1 + <|>; + let z = (); +} +"#, + expect![[r#" + fn quux(…) fn quux(x: i32) + bn x i32 + bn y i32 + "#]], + ); + } + + #[test] + fn completes_bindings_from_if_let() { + check( + r#" +fn quux() { + if let Some(x) = foo() { + let y = 92; + }; + if let Some(a) = bar() { + let b = 62; + 1 + <|> + } +} +"#, + expect![[r#" + bn a + bn b i32 + fn quux() fn quux() + "#]], + ); + } + + #[test] + fn completes_bindings_from_for() { + check( + r#" +fn quux() { + for x in &[1, 2, 3] { <|> } +} +"#, + expect![[r#" + fn quux() fn quux() + bn x + "#]], + ); + } + + #[test] + fn completes_if_prefix_is_keyword() { + mark::check!(completes_if_prefix_is_keyword); + check_edit( + "wherewolf", + r#" +fn main() { + let wherewolf = 92; + drop(where<|>) +} +"#, + r#" +fn main() { + let wherewolf = 92; + drop(wherewolf) +} +"#, + ) + } + + #[test] + fn completes_generic_params() { + check( + r#"fn quux() { <|> }"#, + expect![[r#" + tp T + fn quux() fn quux() + "#]], + ); + } + + #[test] + fn completes_generic_params_in_struct() { + check( + r#"struct S { x: <|>}"#, + expect![[r#" + st S<…> + tp Self + tp T + "#]], + ); + } + + #[test] + fn completes_self_in_enum() { + check( + r#"enum X { Y(<|>) }"#, + expect![[r#" + tp Self + en X + "#]], + ); + } + + #[test] + fn completes_module_items() { + check( + r#" +struct S; +enum E {} +fn quux() { <|> } +"#, + expect![[r#" + en E + st S + fn quux() fn quux() + "#]], + ); + } + + #[test] + fn completes_extern_prelude() { + check( + r#" +//- /lib.rs +use <|>; + +//- /other_crate/lib.rs +// nothing here +"#, + expect![[r#" + md other_crate + "#]], + ); + } + + #[test] + fn completes_module_items_in_nested_modules() { + check( + r#" +struct Foo; +mod m { + struct Bar; + fn quux() { <|> } +} +"#, + expect![[r#" + st Bar + fn quux() fn quux() + "#]], + ); + } + + #[test] + fn completes_return_type() { + check( + r#" +struct Foo; +fn x() -> <|> +"#, + expect![[r#" + st Foo + fn x() fn x() + "#]], + ); + } + + #[test] + fn dont_show_both_completions_for_shadowing() { + check( + r#" +fn foo() { + let bar = 92; + { + let bar = 62; + drop(<|>) + } +} +"#, + // FIXME: should be only one bar here + expect![[r#" + bn bar i32 + bn bar i32 + fn foo() fn foo() + "#]], + ); + } + + #[test] + fn completes_self_in_methods() { + check( + r#"impl S { fn foo(&self) { <|> } }"#, + expect![[r#" + tp Self + bn self &{unknown} + "#]], + ); + } + + #[test] + fn completes_prelude() { + check( + r#" +//- /main.rs +fn foo() { let x: <|> } + +//- /std/lib.rs +#[prelude_import] +use prelude::*; + +mod prelude { struct Option; } +"#, + expect![[r#" + st Option + fn foo() fn foo() + md std + "#]], + ); + } + + #[test] + fn completes_std_prelude_if_core_is_defined() { + check( + r#" +//- /main.rs +fn foo() { let x: <|> } + +//- /core/lib.rs +#[prelude_import] +use prelude::*; + +mod prelude { struct Option; } + +//- /std/lib.rs +#[prelude_import] +use prelude::*; + +mod prelude { struct String; } +"#, + expect![[r#" + st String + md core + fn foo() fn foo() + md std + "#]], + ); + } + + #[test] + fn completes_macros_as_value() { + check( + r#" +macro_rules! foo { () => {} } + +#[macro_use] +mod m1 { + macro_rules! bar { () => {} } +} + +mod m2 { + macro_rules! nope { () => {} } + + #[macro_export] + macro_rules! baz { () => {} } +} + +fn main() { let v = <|> } +"#, + expect![[r##" + ma bar!(…) macro_rules! bar + ma baz!(…) #[macro_export] + macro_rules! baz + ma foo!(…) macro_rules! foo + md m1 + md m2 + fn main() fn main() + "##]], + ); + } + + #[test] + fn completes_both_macro_and_value() { + check( + r#" +macro_rules! foo { () => {} } +fn foo() { <|> } +"#, + expect![[r#" + ma foo!(…) macro_rules! foo + fn foo() fn foo() + "#]], + ); + } + + #[test] + fn completes_macros_as_type() { + check( + r#" +macro_rules! foo { () => {} } +fn main() { let x: <|> } +"#, + expect![[r#" + ma foo!(…) macro_rules! foo + fn main() fn main() + "#]], + ); + } + + #[test] + fn completes_macros_as_stmt() { + check( + r#" +macro_rules! foo { () => {} } +fn main() { <|> } +"#, + expect![[r#" + ma foo!(…) macro_rules! foo + fn main() fn main() + "#]], + ); + } + + #[test] + fn completes_local_item() { + check( + r#" +fn main() { + return f<|>; + fn frobnicate() {} +} +"#, + expect![[r#" + fn frobnicate() fn frobnicate() + fn main() fn main() + "#]], + ); + } + + #[test] + fn completes_in_simple_macro_1() { + check( + r#" +macro_rules! m { ($e:expr) => { $e } } +fn quux(x: i32) { + let y = 92; + m!(<|>); +} +"#, + expect![[r#" + ma m!(…) macro_rules! m + fn quux(…) fn quux(x: i32) + bn x i32 + bn y i32 + "#]], + ); + } + + #[test] + fn completes_in_simple_macro_2() { + check( + r" +macro_rules! m { ($e:expr) => { $e } } +fn quux(x: i32) { + let y = 92; + m!(x<|>); +} +", + expect![[r#" + ma m!(…) macro_rules! m + fn quux(…) fn quux(x: i32) + bn x i32 + bn y i32 + "#]], + ); + } + + #[test] + fn completes_in_simple_macro_without_closing_parens() { + check( + r#" +macro_rules! m { ($e:expr) => { $e } } +fn quux(x: i32) { + let y = 92; + m!(x<|> +} +"#, + expect![[r#" + ma m!(…) macro_rules! m + fn quux(…) fn quux(x: i32) + bn x i32 + bn y i32 + "#]], + ); + } + + #[test] + fn completes_unresolved_uses() { + check( + r#" +use spam::Quux; + +fn main() { <|> } +"#, + expect![[r#" + ?? Quux + fn main() fn main() + "#]], + ); + } + #[test] + fn completes_enum_variant_matcharm() { + check( + r#" +enum Foo { Bar, Baz, Quux } + +fn main() { + let foo = Foo::Quux; + match foo { Qu<|> } +} +"#, + expect![[r#" + en Foo + ev Foo::Bar () + ev Foo::Baz () + ev Foo::Quux () + "#]], + ) + } + + #[test] + fn completes_enum_variant_iflet() { + check( + r#" +enum Foo { Bar, Baz, Quux } + +fn main() { + let foo = Foo::Quux; + if let Qu<|> = foo { } +} +"#, + expect![[r#" + en Foo + ev Foo::Bar () + ev Foo::Baz () + ev Foo::Quux () + "#]], + ) + } + + #[test] + fn completes_enum_variant_basic_expr() { + check( + r#" +enum Foo { Bar, Baz, Quux } +fn main() { let foo: Foo = Q<|> } +"#, + expect![[r#" + en Foo + ev Foo::Bar () + ev Foo::Baz () + ev Foo::Quux () + fn main() fn main() + "#]], + ) + } + + #[test] + fn completes_enum_variant_from_module() { + check( + r#" +mod m { pub enum E { V } } +fn f() -> m::E { V<|> } +"#, + expect![[r#" + fn f() fn f() -> m::E + md m + ev m::E::V () + "#]], + ) + } + + #[test] + fn dont_complete_attr() { + check( + r#" +struct Foo; +#[<|>] +fn f() {} +"#, + expect![[""]], + ) + } + + #[test] + fn completes_type_or_trait_in_impl_block() { + check( + r#" +trait MyTrait {} +struct MyStruct {} + +impl My<|> +"#, + expect![[r#" + st MyStruct + tt MyTrait + tp Self + "#]], + ) + } +} diff --git a/crates/ra_ide/src/completion/completion_config.rs b/crates/ide/src/completion/completion_config.rs similarity index 100% rename from crates/ra_ide/src/completion/completion_config.rs rename to crates/ide/src/completion/completion_config.rs diff --git a/crates/ide/src/completion/completion_context.rs b/crates/ide/src/completion/completion_context.rs new file mode 100644 index 0000000000..85456a66f5 --- /dev/null +++ b/crates/ide/src/completion/completion_context.rs @@ -0,0 +1,486 @@ +//! FIXME: write short doc here + +use base_db::SourceDatabase; +use hir::{Semantics, SemanticsScope, Type}; +use ide_db::RootDatabase; +use syntax::{ + algo::{find_covering_element, find_node_at_offset}, + ast, match_ast, AstNode, NodeOrToken, + SyntaxKind::*, + SyntaxNode, SyntaxToken, TextRange, TextSize, +}; +use test_utils::mark; +use text_edit::Indel; + +use crate::{ + call_info::ActiveParameter, + completion::{ + patterns::{ + has_bind_pat_parent, has_block_expr_parent, has_impl_as_prev_sibling, has_impl_parent, + has_item_list_or_source_file_parent, has_ref_parent, has_trait_as_prev_sibling, + has_trait_parent, if_is_prev, is_in_loop_body, is_match_arm, unsafe_is_prev, + }, + CompletionConfig, + }, + FilePosition, +}; + +/// `CompletionContext` is created early during completion to figure out, where +/// exactly is the cursor, syntax-wise. +#[derive(Debug)] +pub(crate) struct CompletionContext<'a> { + pub(super) sema: Semantics<'a, RootDatabase>, + pub(super) scope: SemanticsScope<'a>, + pub(super) db: &'a RootDatabase, + pub(super) config: &'a CompletionConfig, + pub(super) position: FilePosition, + /// The token before the cursor, in the original file. + pub(super) original_token: SyntaxToken, + /// The token before the cursor, in the macro-expanded file. + pub(super) token: SyntaxToken, + pub(super) krate: Option, + pub(super) expected_type: Option, + pub(super) name_ref_syntax: Option, + pub(super) function_syntax: Option, + pub(super) use_item_syntax: Option, + pub(super) record_lit_syntax: Option, + pub(super) record_pat_syntax: Option, + pub(super) record_field_syntax: Option, + pub(super) impl_def: Option, + /// FIXME: `ActiveParameter` is string-based, which is very very wrong + pub(super) active_parameter: Option, + pub(super) is_param: bool, + /// If a name-binding or reference to a const in a pattern. + /// Irrefutable patterns (like let) are excluded. + pub(super) is_pat_binding_or_const: bool, + /// A single-indent path, like `foo`. `::foo` should not be considered a trivial path. + pub(super) is_trivial_path: bool, + /// If not a trivial path, the prefix (qualifier). + pub(super) path_qual: Option, + pub(super) after_if: bool, + /// `true` if we are a statement or a last expr in the block. + pub(super) can_be_stmt: bool, + /// `true` if we expect an expression at the cursor position. + pub(super) is_expr: bool, + /// Something is typed at the "top" level, in module or impl/trait. + pub(super) is_new_item: bool, + /// The receiver if this is a field or method access, i.e. writing something.<|> + pub(super) dot_receiver: Option, + pub(super) dot_receiver_is_ambiguous_float_literal: bool, + /// If this is a call (method or function) in particular, i.e. the () are already there. + pub(super) is_call: bool, + /// Like `is_call`, but for tuple patterns. + pub(super) is_pattern_call: bool, + /// If this is a macro call, i.e. the () are already there. + pub(super) is_macro_call: bool, + pub(super) is_path_type: bool, + pub(super) has_type_args: bool, + pub(super) attribute_under_caret: Option, + pub(super) unsafe_is_prev: bool, + pub(super) if_is_prev: bool, + pub(super) block_expr_parent: bool, + pub(super) bind_pat_parent: bool, + pub(super) ref_pat_parent: bool, + pub(super) in_loop_body: bool, + pub(super) has_trait_parent: bool, + pub(super) has_impl_parent: bool, + pub(super) trait_as_prev_sibling: bool, + pub(super) impl_as_prev_sibling: bool, + pub(super) is_match_arm: bool, + pub(super) has_item_list_or_source_file_parent: bool, +} + +impl<'a> CompletionContext<'a> { + pub(super) fn new( + db: &'a RootDatabase, + position: FilePosition, + config: &'a CompletionConfig, + ) -> Option> { + let sema = Semantics::new(db); + + let original_file = sema.parse(position.file_id); + + // Insert a fake ident to get a valid parse tree. We will use this file + // to determine context, though the original_file will be used for + // actual completion. + let file_with_fake_ident = { + let parse = db.parse(position.file_id); + let edit = Indel::insert(position.offset, "intellijRulezz".to_string()); + parse.reparse(&edit).tree() + }; + let fake_ident_token = + file_with_fake_ident.syntax().token_at_offset(position.offset).right_biased().unwrap(); + + let krate = sema.to_module_def(position.file_id).map(|m| m.krate()); + let original_token = + original_file.syntax().token_at_offset(position.offset).left_biased()?; + let token = sema.descend_into_macros(original_token.clone()); + let scope = sema.scope_at_offset(&token.parent(), position.offset); + let mut ctx = CompletionContext { + sema, + scope, + db, + config, + original_token, + token, + position, + krate, + expected_type: None, + name_ref_syntax: None, + function_syntax: None, + use_item_syntax: None, + record_lit_syntax: None, + record_pat_syntax: None, + record_field_syntax: None, + impl_def: None, + active_parameter: ActiveParameter::at(db, position), + is_param: false, + is_pat_binding_or_const: false, + is_trivial_path: false, + path_qual: None, + after_if: false, + can_be_stmt: false, + is_expr: false, + is_new_item: false, + dot_receiver: None, + is_call: false, + is_pattern_call: false, + is_macro_call: false, + is_path_type: false, + has_type_args: false, + dot_receiver_is_ambiguous_float_literal: false, + attribute_under_caret: None, + unsafe_is_prev: false, + in_loop_body: false, + ref_pat_parent: false, + bind_pat_parent: false, + block_expr_parent: false, + has_trait_parent: false, + has_impl_parent: false, + trait_as_prev_sibling: false, + impl_as_prev_sibling: false, + if_is_prev: false, + is_match_arm: false, + has_item_list_or_source_file_parent: false, + }; + + let mut original_file = original_file.syntax().clone(); + let mut hypothetical_file = file_with_fake_ident.syntax().clone(); + let mut offset = position.offset; + let mut fake_ident_token = fake_ident_token; + + // Are we inside a macro call? + while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = ( + find_node_at_offset::(&original_file, offset), + find_node_at_offset::(&hypothetical_file, offset), + ) { + if actual_macro_call.path().as_ref().map(|s| s.syntax().text()) + != macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text()) + { + break; + } + let hypothetical_args = match macro_call_with_fake_ident.token_tree() { + Some(tt) => tt, + None => break, + }; + if let (Some(actual_expansion), Some(hypothetical_expansion)) = ( + ctx.sema.expand(&actual_macro_call), + ctx.sema.speculative_expand( + &actual_macro_call, + &hypothetical_args, + fake_ident_token, + ), + ) { + let new_offset = hypothetical_expansion.1.text_range().start(); + if new_offset > actual_expansion.text_range().end() { + break; + } + original_file = actual_expansion; + hypothetical_file = hypothetical_expansion.0; + fake_ident_token = hypothetical_expansion.1; + offset = new_offset; + } else { + break; + } + } + ctx.fill_keyword_patterns(&hypothetical_file, offset); + ctx.fill(&original_file, hypothetical_file, offset); + Some(ctx) + } + + // The range of the identifier that is being completed. + pub(crate) fn source_range(&self) -> TextRange { + // check kind of macro-expanded token, but use range of original token + if self.token.kind() == IDENT || self.token.kind().is_keyword() { + mark::hit!(completes_if_prefix_is_keyword); + self.original_token.text_range() + } else { + TextRange::empty(self.position.offset) + } + } + + fn fill_keyword_patterns(&mut self, file_with_fake_ident: &SyntaxNode, offset: TextSize) { + let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap(); + let syntax_element = NodeOrToken::Token(fake_ident_token); + self.block_expr_parent = has_block_expr_parent(syntax_element.clone()); + self.unsafe_is_prev = unsafe_is_prev(syntax_element.clone()); + self.if_is_prev = if_is_prev(syntax_element.clone()); + self.bind_pat_parent = has_bind_pat_parent(syntax_element.clone()); + self.ref_pat_parent = has_ref_parent(syntax_element.clone()); + self.in_loop_body = is_in_loop_body(syntax_element.clone()); + self.has_trait_parent = has_trait_parent(syntax_element.clone()); + self.has_impl_parent = has_impl_parent(syntax_element.clone()); + self.impl_as_prev_sibling = has_impl_as_prev_sibling(syntax_element.clone()); + self.trait_as_prev_sibling = has_trait_as_prev_sibling(syntax_element.clone()); + self.is_match_arm = is_match_arm(syntax_element.clone()); + self.has_item_list_or_source_file_parent = + has_item_list_or_source_file_parent(syntax_element); + } + + fn fill( + &mut self, + original_file: &SyntaxNode, + file_with_fake_ident: SyntaxNode, + offset: TextSize, + ) { + // FIXME: this is wrong in at least two cases: + // * when there's no token `foo(<|>)` + // * when there is a token, but it happens to have type of it's own + self.expected_type = self + .token + .ancestors() + .find_map(|node| { + let ty = match_ast! { + match node { + ast::Pat(it) => self.sema.type_of_pat(&it), + ast::Expr(it) => self.sema.type_of_expr(&it), + _ => return None, + } + }; + Some(ty) + }) + .flatten(); + self.attribute_under_caret = find_node_at_offset(&file_with_fake_ident, offset); + + // First, let's try to complete a reference to some declaration. + if let Some(name_ref) = find_node_at_offset::(&file_with_fake_ident, offset) { + // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. + // See RFC#1685. + if is_node::(name_ref.syntax()) { + self.is_param = true; + return; + } + // FIXME: remove this (V) duplication and make the check more precise + if name_ref.syntax().ancestors().find_map(ast::RecordPatFieldList::cast).is_some() { + self.record_pat_syntax = + self.sema.find_node_at_offset_with_macros(&original_file, offset); + } + self.classify_name_ref(original_file, name_ref, offset); + } + + // Otherwise, see if this is a declaration. We can use heuristics to + // suggest declaration names, see `CompletionKind::Magic`. + if let Some(name) = find_node_at_offset::(&file_with_fake_ident, offset) { + if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::IdentPat::cast) { + self.is_pat_binding_or_const = true; + if bind_pat.at_token().is_some() + || bind_pat.ref_token().is_some() + || bind_pat.mut_token().is_some() + { + self.is_pat_binding_or_const = false; + } + if bind_pat.syntax().parent().and_then(ast::RecordPatFieldList::cast).is_some() { + self.is_pat_binding_or_const = false; + } + if let Some(let_stmt) = bind_pat.syntax().ancestors().find_map(ast::LetStmt::cast) { + if let Some(pat) = let_stmt.pat() { + if pat.syntax().text_range().contains_range(bind_pat.syntax().text_range()) + { + self.is_pat_binding_or_const = false; + } + } + } + } + if is_node::(name.syntax()) { + self.is_param = true; + return; + } + // FIXME: remove this (^) duplication and make the check more precise + if name.syntax().ancestors().find_map(ast::RecordPatFieldList::cast).is_some() { + self.record_pat_syntax = + self.sema.find_node_at_offset_with_macros(&original_file, offset); + } + } + } + + fn classify_name_ref( + &mut self, + original_file: &SyntaxNode, + name_ref: ast::NameRef, + offset: TextSize, + ) { + self.name_ref_syntax = + find_node_at_offset(&original_file, name_ref.syntax().text_range().start()); + let name_range = name_ref.syntax().text_range(); + if ast::RecordExprField::for_field_name(&name_ref).is_some() { + self.record_lit_syntax = + self.sema.find_node_at_offset_with_macros(&original_file, offset); + } + + self.impl_def = self + .sema + .ancestors_with_macros(self.token.parent()) + .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) + .find_map(ast::Impl::cast); + + let top_node = name_ref + .syntax() + .ancestors() + .take_while(|it| it.text_range() == name_range) + .last() + .unwrap(); + + match top_node.parent().map(|it| it.kind()) { + Some(SOURCE_FILE) | Some(ITEM_LIST) => { + self.is_new_item = true; + return; + } + _ => (), + } + + self.use_item_syntax = + self.sema.ancestors_with_macros(self.token.parent()).find_map(ast::Use::cast); + + self.function_syntax = self + .sema + .ancestors_with_macros(self.token.parent()) + .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) + .find_map(ast::Fn::cast); + + self.record_field_syntax = self + .sema + .ancestors_with_macros(self.token.parent()) + .take_while(|it| { + it.kind() != SOURCE_FILE && it.kind() != MODULE && it.kind() != CALL_EXPR + }) + .find_map(ast::RecordExprField::cast); + + let parent = match name_ref.syntax().parent() { + Some(it) => it, + None => return, + }; + + if let Some(segment) = ast::PathSegment::cast(parent.clone()) { + let path = segment.parent_path(); + self.is_call = path + .syntax() + .parent() + .and_then(ast::PathExpr::cast) + .and_then(|it| it.syntax().parent().and_then(ast::CallExpr::cast)) + .is_some(); + self.is_macro_call = path.syntax().parent().and_then(ast::MacroCall::cast).is_some(); + self.is_pattern_call = + path.syntax().parent().and_then(ast::TupleStructPat::cast).is_some(); + + self.is_path_type = path.syntax().parent().and_then(ast::PathType::cast).is_some(); + self.has_type_args = segment.generic_arg_list().is_some(); + + if let Some(path) = path_or_use_tree_qualifier(&path) { + self.path_qual = path + .segment() + .and_then(|it| { + find_node_with_range::( + original_file, + it.syntax().text_range(), + ) + }) + .map(|it| it.parent_path()); + return; + } + + if let Some(segment) = path.segment() { + if segment.coloncolon_token().is_some() { + return; + } + } + + self.is_trivial_path = true; + + // Find either enclosing expr statement (thing with `;`) or a + // block. If block, check that we are the last expr. + self.can_be_stmt = name_ref + .syntax() + .ancestors() + .find_map(|node| { + if let Some(stmt) = ast::ExprStmt::cast(node.clone()) { + return Some(stmt.syntax().text_range() == name_ref.syntax().text_range()); + } + if let Some(block) = ast::BlockExpr::cast(node) { + return Some( + block.expr().map(|e| e.syntax().text_range()) + == Some(name_ref.syntax().text_range()), + ); + } + None + }) + .unwrap_or(false); + self.is_expr = path.syntax().parent().and_then(ast::PathExpr::cast).is_some(); + + if let Some(off) = name_ref.syntax().text_range().start().checked_sub(2.into()) { + if let Some(if_expr) = + self.sema.find_node_at_offset_with_macros::(original_file, off) + { + if if_expr.syntax().text_range().end() < name_ref.syntax().text_range().start() + { + self.after_if = true; + } + } + } + } + if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { + // The receiver comes before the point of insertion of the fake + // ident, so it should have the same range in the non-modified file + self.dot_receiver = field_expr + .expr() + .map(|e| e.syntax().text_range()) + .and_then(|r| find_node_with_range(original_file, r)); + self.dot_receiver_is_ambiguous_float_literal = + if let Some(ast::Expr::Literal(l)) = &self.dot_receiver { + match l.kind() { + ast::LiteralKind::FloatNumber { .. } => l.token().text().ends_with('.'), + _ => false, + } + } else { + false + } + } + if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent) { + // As above + self.dot_receiver = method_call_expr + .expr() + .map(|e| e.syntax().text_range()) + .and_then(|r| find_node_with_range(original_file, r)); + self.is_call = true; + } + } +} + +fn find_node_with_range(syntax: &SyntaxNode, range: TextRange) -> Option { + find_covering_element(syntax, range).ancestors().find_map(N::cast) +} + +fn is_node(node: &SyntaxNode) -> bool { + match node.ancestors().find_map(N::cast) { + None => false, + Some(n) => n.syntax().text_range() == node.text_range(), + } +} + +fn path_or_use_tree_qualifier(path: &ast::Path) -> Option { + if let Some(qual) = path.qualifier() { + return Some(qual); + } + let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?; + let use_tree = use_tree_list.syntax().parent().and_then(ast::UseTree::cast)?; + use_tree.path() +} diff --git a/crates/ide/src/completion/completion_item.rs b/crates/ide/src/completion/completion_item.rs new file mode 100644 index 0000000000..9377cdc577 --- /dev/null +++ b/crates/ide/src/completion/completion_item.rs @@ -0,0 +1,384 @@ +//! FIXME: write short doc here + +use std::fmt; + +use hir::Documentation; +use syntax::TextRange; +use text_edit::TextEdit; + +use crate::completion::completion_config::SnippetCap; + +/// `CompletionItem` describes a single completion variant in the editor pop-up. +/// It is basically a POD with various properties. To construct a +/// `CompletionItem`, use `new` method and the `Builder` struct. +pub struct CompletionItem { + /// Used only internally in tests, to check only specific kind of + /// completion (postfix, keyword, reference, etc). + #[allow(unused)] + pub(crate) completion_kind: CompletionKind, + /// Label in the completion pop up which identifies completion. + label: String, + /// Range of identifier that is being completed. + /// + /// It should be used primarily for UI, but we also use this to convert + /// genetic TextEdit into LSP's completion edit (see conv.rs). + /// + /// `source_range` must contain the completion offset. `insert_text` should + /// start with what `source_range` points to, or VSCode will filter out the + /// completion silently. + source_range: TextRange, + /// What happens when user selects this item. + /// + /// Typically, replaces `source_range` with new identifier. + text_edit: TextEdit, + insert_text_format: InsertTextFormat, + + /// What item (struct, function, etc) are we completing. + kind: Option, + + /// Lookup is used to check if completion item indeed can complete current + /// ident. + /// + /// That is, in `foo.bar<|>` lookup of `abracadabra` will be accepted (it + /// contains `bar` sub sequence), and `quux` will rejected. + lookup: Option, + + /// Additional info to show in the UI pop up. + detail: Option, + documentation: Option, + + /// Whether this item is marked as deprecated + deprecated: bool, + + /// If completing a function call, ask the editor to show parameter popup + /// after completion. + trigger_call_info: bool, + + /// Score is useful to pre select or display in better order completion items + score: Option, +} + +// We use custom debug for CompletionItem to make snapshot tests more readable. +impl fmt::Debug for CompletionItem { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut s = f.debug_struct("CompletionItem"); + s.field("label", &self.label()).field("source_range", &self.source_range()); + if self.text_edit().len() == 1 { + let atom = &self.text_edit().iter().next().unwrap(); + s.field("delete", &atom.delete); + s.field("insert", &atom.insert); + } else { + s.field("text_edit", &self.text_edit); + } + if let Some(kind) = self.kind().as_ref() { + s.field("kind", kind); + } + if self.lookup() != self.label() { + s.field("lookup", &self.lookup()); + } + if let Some(detail) = self.detail() { + s.field("detail", &detail); + } + if let Some(documentation) = self.documentation() { + s.field("documentation", &documentation); + } + if self.deprecated { + s.field("deprecated", &true); + } + if let Some(score) = &self.score { + s.field("score", score); + } + if self.trigger_call_info { + s.field("trigger_call_info", &true); + } + s.finish() + } +} + +#[derive(Debug, Clone, Copy, Ord, PartialOrd, Eq, PartialEq)] +pub enum CompletionScore { + /// If only type match + TypeMatch, + /// If type and name match + TypeAndNameMatch, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum CompletionItemKind { + Snippet, + Keyword, + Module, + Function, + BuiltinType, + Struct, + Enum, + EnumVariant, + Binding, + Field, + Static, + Const, + Trait, + TypeAlias, + Method, + TypeParam, + Macro, + Attribute, + UnresolvedReference, +} + +impl CompletionItemKind { + #[cfg(test)] + pub(crate) fn tag(&self) -> &'static str { + match self { + CompletionItemKind::Attribute => "at", + CompletionItemKind::Binding => "bn", + CompletionItemKind::BuiltinType => "bt", + CompletionItemKind::Const => "ct", + CompletionItemKind::Enum => "en", + CompletionItemKind::EnumVariant => "ev", + CompletionItemKind::Field => "fd", + CompletionItemKind::Function => "fn", + CompletionItemKind::Keyword => "kw", + CompletionItemKind::Macro => "ma", + CompletionItemKind::Method => "me", + CompletionItemKind::Module => "md", + CompletionItemKind::Snippet => "sn", + CompletionItemKind::Static => "sc", + CompletionItemKind::Struct => "st", + CompletionItemKind::Trait => "tt", + CompletionItemKind::TypeAlias => "ta", + CompletionItemKind::TypeParam => "tp", + CompletionItemKind::UnresolvedReference => "??", + } + } +} + +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +pub(crate) enum CompletionKind { + /// Parser-based keyword completion. + Keyword, + /// Your usual "complete all valid identifiers". + Reference, + /// "Secret sauce" completions. + Magic, + Snippet, + Postfix, + BuiltinType, + Attribute, +} + +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +pub enum InsertTextFormat { + PlainText, + Snippet, +} + +impl CompletionItem { + pub(crate) fn new( + completion_kind: CompletionKind, + source_range: TextRange, + label: impl Into, + ) -> Builder { + let label = label.into(); + Builder { + source_range, + completion_kind, + label, + insert_text: None, + insert_text_format: InsertTextFormat::PlainText, + detail: None, + documentation: None, + lookup: None, + kind: None, + text_edit: None, + deprecated: None, + trigger_call_info: None, + score: None, + } + } + /// What user sees in pop-up in the UI. + pub fn label(&self) -> &str { + &self.label + } + pub fn source_range(&self) -> TextRange { + self.source_range + } + + pub fn insert_text_format(&self) -> InsertTextFormat { + self.insert_text_format + } + + pub fn text_edit(&self) -> &TextEdit { + &self.text_edit + } + + /// Short one-line additional information, like a type + pub fn detail(&self) -> Option<&str> { + self.detail.as_deref() + } + /// A doc-comment + pub fn documentation(&self) -> Option { + self.documentation.clone() + } + /// What string is used for filtering. + pub fn lookup(&self) -> &str { + self.lookup.as_deref().unwrap_or(&self.label) + } + + pub fn kind(&self) -> Option { + self.kind + } + + pub fn deprecated(&self) -> bool { + self.deprecated + } + + pub fn score(&self) -> Option { + self.score + } + + pub fn trigger_call_info(&self) -> bool { + self.trigger_call_info + } +} + +/// A helper to make `CompletionItem`s. +#[must_use] +pub(crate) struct Builder { + source_range: TextRange, + completion_kind: CompletionKind, + label: String, + insert_text: Option, + insert_text_format: InsertTextFormat, + detail: Option, + documentation: Option, + lookup: Option, + kind: Option, + text_edit: Option, + deprecated: Option, + trigger_call_info: Option, + score: Option, +} + +impl Builder { + pub(crate) fn add_to(self, acc: &mut Completions) { + acc.add(self.build()) + } + + pub(crate) fn build(self) -> CompletionItem { + let label = self.label; + let text_edit = match self.text_edit { + Some(it) => it, + None => TextEdit::replace( + self.source_range, + self.insert_text.unwrap_or_else(|| label.clone()), + ), + }; + + CompletionItem { + source_range: self.source_range, + label, + insert_text_format: self.insert_text_format, + text_edit, + detail: self.detail, + documentation: self.documentation, + lookup: self.lookup, + kind: self.kind, + completion_kind: self.completion_kind, + deprecated: self.deprecated.unwrap_or(false), + trigger_call_info: self.trigger_call_info.unwrap_or(false), + score: self.score, + } + } + pub(crate) fn lookup_by(mut self, lookup: impl Into) -> Builder { + self.lookup = Some(lookup.into()); + self + } + pub(crate) fn label(mut self, label: impl Into) -> Builder { + self.label = label.into(); + self + } + pub(crate) fn insert_text(mut self, insert_text: impl Into) -> Builder { + self.insert_text = Some(insert_text.into()); + self + } + pub(crate) fn insert_snippet( + mut self, + _cap: SnippetCap, + snippet: impl Into, + ) -> Builder { + self.insert_text_format = InsertTextFormat::Snippet; + self.insert_text(snippet) + } + pub(crate) fn kind(mut self, kind: CompletionItemKind) -> Builder { + self.kind = Some(kind); + self + } + pub(crate) fn text_edit(mut self, edit: TextEdit) -> Builder { + self.text_edit = Some(edit); + self + } + pub(crate) fn snippet_edit(mut self, _cap: SnippetCap, edit: TextEdit) -> Builder { + self.insert_text_format = InsertTextFormat::Snippet; + self.text_edit(edit) + } + #[allow(unused)] + pub(crate) fn detail(self, detail: impl Into) -> Builder { + self.set_detail(Some(detail)) + } + pub(crate) fn set_detail(mut self, detail: Option>) -> Builder { + self.detail = detail.map(Into::into); + self + } + #[allow(unused)] + pub(crate) fn documentation(self, docs: Documentation) -> Builder { + self.set_documentation(Some(docs)) + } + pub(crate) fn set_documentation(mut self, docs: Option) -> Builder { + self.documentation = docs.map(Into::into); + self + } + pub(crate) fn set_deprecated(mut self, deprecated: bool) -> Builder { + self.deprecated = Some(deprecated); + self + } + pub(crate) fn set_score(mut self, score: CompletionScore) -> Builder { + self.score = Some(score); + self + } + pub(crate) fn trigger_call_info(mut self) -> Builder { + self.trigger_call_info = Some(true); + self + } +} + +impl<'a> Into for Builder { + fn into(self) -> CompletionItem { + self.build() + } +} + +/// Represents an in-progress set of completions being built. +#[derive(Debug, Default)] +pub(crate) struct Completions { + buf: Vec, +} + +impl Completions { + pub(crate) fn add(&mut self, item: impl Into) { + self.buf.push(item.into()) + } + pub(crate) fn add_all(&mut self, items: I) + where + I: IntoIterator, + I::Item: Into, + { + items.into_iter().for_each(|item| self.add(item.into())) + } +} + +impl Into> for Completions { + fn into(self) -> Vec { + self.buf + } +} diff --git a/crates/ide/src/completion/patterns.rs b/crates/ide/src/completion/patterns.rs new file mode 100644 index 0000000000..ffc97c076a --- /dev/null +++ b/crates/ide/src/completion/patterns.rs @@ -0,0 +1,194 @@ +//! Patterns telling us certain facts about current syntax element, they are used in completion context + +use syntax::{ + algo::non_trivia_sibling, + ast::{self, LoopBodyOwner}, + match_ast, AstNode, Direction, NodeOrToken, SyntaxElement, + SyntaxKind::*, + SyntaxNode, SyntaxToken, +}; + +#[cfg(test)] +use crate::completion::test_utils::check_pattern_is_applicable; + +pub(crate) fn has_trait_parent(element: SyntaxElement) -> bool { + not_same_range_ancestor(element) + .filter(|it| it.kind() == ASSOC_ITEM_LIST) + .and_then(|it| it.parent()) + .filter(|it| it.kind() == TRAIT) + .is_some() +} +#[test] +fn test_has_trait_parent() { + check_pattern_is_applicable(r"trait A { f<|> }", has_trait_parent); +} + +pub(crate) fn has_impl_parent(element: SyntaxElement) -> bool { + not_same_range_ancestor(element) + .filter(|it| it.kind() == ASSOC_ITEM_LIST) + .and_then(|it| it.parent()) + .filter(|it| it.kind() == IMPL) + .is_some() +} +#[test] +fn test_has_impl_parent() { + check_pattern_is_applicable(r"impl A { f<|> }", has_impl_parent); +} + +pub(crate) fn has_block_expr_parent(element: SyntaxElement) -> bool { + not_same_range_ancestor(element).filter(|it| it.kind() == BLOCK_EXPR).is_some() +} +#[test] +fn test_has_block_expr_parent() { + check_pattern_is_applicable(r"fn my_fn() { let a = 2; f<|> }", has_block_expr_parent); +} + +pub(crate) fn has_bind_pat_parent(element: SyntaxElement) -> bool { + element.ancestors().find(|it| it.kind() == IDENT_PAT).is_some() +} +#[test] +fn test_has_bind_pat_parent() { + check_pattern_is_applicable(r"fn my_fn(m<|>) {}", has_bind_pat_parent); + check_pattern_is_applicable(r"fn my_fn() { let m<|> }", has_bind_pat_parent); +} + +pub(crate) fn has_ref_parent(element: SyntaxElement) -> bool { + not_same_range_ancestor(element) + .filter(|it| it.kind() == REF_PAT || it.kind() == REF_EXPR) + .is_some() +} +#[test] +fn test_has_ref_parent() { + check_pattern_is_applicable(r"fn my_fn(&m<|>) {}", has_ref_parent); + check_pattern_is_applicable(r"fn my() { let &m<|> }", has_ref_parent); +} + +pub(crate) fn has_item_list_or_source_file_parent(element: SyntaxElement) -> bool { + let ancestor = not_same_range_ancestor(element); + if !ancestor.is_some() { + return true; + } + ancestor.filter(|it| it.kind() == SOURCE_FILE || it.kind() == ITEM_LIST).is_some() +} +#[test] +fn test_has_item_list_or_source_file_parent() { + check_pattern_is_applicable(r"i<|>", has_item_list_or_source_file_parent); + check_pattern_is_applicable(r"mod foo { f<|> }", has_item_list_or_source_file_parent); +} + +pub(crate) fn is_match_arm(element: SyntaxElement) -> bool { + not_same_range_ancestor(element.clone()).filter(|it| it.kind() == MATCH_ARM).is_some() + && previous_sibling_or_ancestor_sibling(element) + .and_then(|it| it.into_token()) + .filter(|it| it.kind() == FAT_ARROW) + .is_some() +} +#[test] +fn test_is_match_arm() { + check_pattern_is_applicable(r"fn my_fn() { match () { () => m<|> } }", is_match_arm); +} + +pub(crate) fn unsafe_is_prev(element: SyntaxElement) -> bool { + element + .into_token() + .and_then(|it| previous_non_trivia_token(it)) + .filter(|it| it.kind() == UNSAFE_KW) + .is_some() +} +#[test] +fn test_unsafe_is_prev() { + check_pattern_is_applicable(r"unsafe i<|>", unsafe_is_prev); +} + +pub(crate) fn if_is_prev(element: SyntaxElement) -> bool { + element + .into_token() + .and_then(|it| previous_non_trivia_token(it)) + .filter(|it| it.kind() == IF_KW) + .is_some() +} +#[test] +fn test_if_is_prev() { + check_pattern_is_applicable(r"if l<|>", if_is_prev); +} + +pub(crate) fn has_trait_as_prev_sibling(element: SyntaxElement) -> bool { + previous_sibling_or_ancestor_sibling(element).filter(|it| it.kind() == TRAIT).is_some() +} +#[test] +fn test_has_trait_as_prev_sibling() { + check_pattern_is_applicable(r"trait A w<|> {}", has_trait_as_prev_sibling); +} + +pub(crate) fn has_impl_as_prev_sibling(element: SyntaxElement) -> bool { + previous_sibling_or_ancestor_sibling(element).filter(|it| it.kind() == IMPL).is_some() +} +#[test] +fn test_has_impl_as_prev_sibling() { + check_pattern_is_applicable(r"impl A w<|> {}", has_impl_as_prev_sibling); +} + +pub(crate) fn is_in_loop_body(element: SyntaxElement) -> bool { + let leaf = match element { + NodeOrToken::Node(node) => node, + NodeOrToken::Token(token) => token.parent(), + }; + for node in leaf.ancestors() { + if node.kind() == FN || node.kind() == CLOSURE_EXPR { + break; + } + let loop_body = match_ast! { + match node { + ast::ForExpr(it) => it.loop_body(), + ast::WhileExpr(it) => it.loop_body(), + ast::LoopExpr(it) => it.loop_body(), + _ => None, + } + }; + if let Some(body) = loop_body { + if body.syntax().text_range().contains_range(leaf.text_range()) { + return true; + } + } + } + false +} + +fn not_same_range_ancestor(element: SyntaxElement) -> Option { + element + .ancestors() + .take_while(|it| it.text_range() == element.text_range()) + .last() + .and_then(|it| it.parent()) +} + +fn previous_non_trivia_token(token: SyntaxToken) -> Option { + let mut token = token.prev_token(); + while let Some(inner) = token.clone() { + if !inner.kind().is_trivia() { + return Some(inner); + } else { + token = inner.prev_token(); + } + } + None +} + +fn previous_sibling_or_ancestor_sibling(element: SyntaxElement) -> Option { + let token_sibling = non_trivia_sibling(element.clone(), Direction::Prev); + if let Some(sibling) = token_sibling { + Some(sibling) + } else { + // if not trying to find first ancestor which has such a sibling + let node = match element { + NodeOrToken::Node(node) => node, + NodeOrToken::Token(token) => token.parent(), + }; + let range = node.text_range(); + let top_node = node.ancestors().take_while(|it| it.text_range() == range).last()?; + let prev_sibling_node = top_node.ancestors().find(|it| { + non_trivia_sibling(NodeOrToken::Node(it.to_owned()), Direction::Prev).is_some() + })?; + non_trivia_sibling(NodeOrToken::Node(prev_sibling_node), Direction::Prev) + } +} diff --git a/crates/ide/src/completion/presentation.rs b/crates/ide/src/completion/presentation.rs new file mode 100644 index 0000000000..e1b1ea4ce4 --- /dev/null +++ b/crates/ide/src/completion/presentation.rs @@ -0,0 +1,1229 @@ +//! This modules takes care of rendering various definitions as completion items. +//! It also handles scoring (sorting) completions. + +use hir::{Docs, HasAttrs, HasSource, HirDisplay, ModPath, ScopeDef, StructKind, Type}; +use itertools::Itertools; +use syntax::ast::NameOwner; +use test_utils::mark; + +use crate::{ + completion::{ + completion_item::Builder, CompletionContext, CompletionItem, CompletionItemKind, + CompletionKind, Completions, + }, + display::{const_label, function_declaration, macro_label, type_label}, + CompletionScore, RootDatabase, +}; + +impl Completions { + pub(crate) fn add_field(&mut self, ctx: &CompletionContext, field: hir::Field, ty: &Type) { + let is_deprecated = is_deprecated(field, ctx.db); + let name = field.name(ctx.db); + let mut completion_item = + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.to_string()) + .kind(CompletionItemKind::Field) + .detail(ty.display(ctx.db).to_string()) + .set_documentation(field.docs(ctx.db)) + .set_deprecated(is_deprecated); + + if let Some(score) = compute_score(ctx, &ty, &name.to_string()) { + completion_item = completion_item.set_score(score); + } + + completion_item.add_to(self); + } + + pub(crate) fn add_tuple_field(&mut self, ctx: &CompletionContext, field: usize, ty: &Type) { + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), field.to_string()) + .kind(CompletionItemKind::Field) + .detail(ty.display(ctx.db).to_string()) + .add_to(self); + } + + pub(crate) fn add_resolution( + &mut self, + ctx: &CompletionContext, + local_name: String, + resolution: &ScopeDef, + ) { + use hir::ModuleDef::*; + + let completion_kind = match resolution { + ScopeDef::ModuleDef(BuiltinType(..)) => CompletionKind::BuiltinType, + _ => CompletionKind::Reference, + }; + + let kind = match resolution { + ScopeDef::ModuleDef(Module(..)) => CompletionItemKind::Module, + ScopeDef::ModuleDef(Function(func)) => { + return self.add_function(ctx, *func, Some(local_name)); + } + ScopeDef::ModuleDef(Adt(hir::Adt::Struct(_))) => CompletionItemKind::Struct, + // FIXME: add CompletionItemKind::Union + ScopeDef::ModuleDef(Adt(hir::Adt::Union(_))) => CompletionItemKind::Struct, + ScopeDef::ModuleDef(Adt(hir::Adt::Enum(_))) => CompletionItemKind::Enum, + + ScopeDef::ModuleDef(EnumVariant(var)) => { + return self.add_enum_variant(ctx, *var, Some(local_name)); + } + ScopeDef::ModuleDef(Const(..)) => CompletionItemKind::Const, + ScopeDef::ModuleDef(Static(..)) => CompletionItemKind::Static, + ScopeDef::ModuleDef(Trait(..)) => CompletionItemKind::Trait, + ScopeDef::ModuleDef(TypeAlias(..)) => CompletionItemKind::TypeAlias, + ScopeDef::ModuleDef(BuiltinType(..)) => CompletionItemKind::BuiltinType, + ScopeDef::GenericParam(..) => CompletionItemKind::TypeParam, + ScopeDef::Local(..) => CompletionItemKind::Binding, + // (does this need its own kind?) + ScopeDef::AdtSelfType(..) | ScopeDef::ImplSelfType(..) => CompletionItemKind::TypeParam, + ScopeDef::MacroDef(mac) => { + return self.add_macro(ctx, Some(local_name), *mac); + } + ScopeDef::Unknown => { + return self.add( + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), local_name) + .kind(CompletionItemKind::UnresolvedReference), + ); + } + }; + + let docs = match resolution { + ScopeDef::ModuleDef(Module(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(Adt(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(EnumVariant(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(Const(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(Static(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(Trait(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(TypeAlias(it)) => it.docs(ctx.db), + _ => None, + }; + + let mut completion_item = + CompletionItem::new(completion_kind, ctx.source_range(), local_name.clone()); + if let ScopeDef::Local(local) = resolution { + let ty = local.ty(ctx.db); + if !ty.is_unknown() { + completion_item = completion_item.detail(ty.display(ctx.db).to_string()); + } + }; + + if let ScopeDef::Local(local) = resolution { + if let Some(score) = compute_score(ctx, &local.ty(ctx.db), &local_name) { + completion_item = completion_item.set_score(score); + } + } + + // Add `<>` for generic types + if ctx.is_path_type && !ctx.has_type_args && ctx.config.add_call_parenthesis { + if let Some(cap) = ctx.config.snippet_cap { + let has_non_default_type_params = match resolution { + ScopeDef::ModuleDef(Adt(it)) => it.has_non_default_type_params(ctx.db), + ScopeDef::ModuleDef(TypeAlias(it)) => it.has_non_default_type_params(ctx.db), + _ => false, + }; + if has_non_default_type_params { + mark::hit!(inserts_angle_brackets_for_generics); + completion_item = completion_item + .lookup_by(local_name.clone()) + .label(format!("{}<…>", local_name)) + .insert_snippet(cap, format!("{}<$0>", local_name)); + } + } + } + + completion_item.kind(kind).set_documentation(docs).add_to(self) + } + + pub(crate) fn add_macro( + &mut self, + ctx: &CompletionContext, + name: Option, + macro_: hir::MacroDef, + ) { + // FIXME: Currently proc-macro do not have ast-node, + // such that it does not have source + if macro_.is_proc_macro() { + return; + } + + let name = match name { + Some(it) => it, + None => return, + }; + + let ast_node = macro_.source(ctx.db).value; + let detail = macro_label(&ast_node); + + let docs = macro_.docs(ctx.db); + + let mut builder = CompletionItem::new( + CompletionKind::Reference, + ctx.source_range(), + &format!("{}!", name), + ) + .kind(CompletionItemKind::Macro) + .set_documentation(docs.clone()) + .set_deprecated(is_deprecated(macro_, ctx.db)) + .detail(detail); + + let needs_bang = ctx.use_item_syntax.is_none() && !ctx.is_macro_call; + builder = match ctx.config.snippet_cap { + Some(cap) if needs_bang => { + let docs = docs.as_ref().map_or("", |s| s.as_str()); + let (bra, ket) = guess_macro_braces(&name, docs); + builder + .insert_snippet(cap, format!("{}!{}$0{}", name, bra, ket)) + .label(format!("{}!{}…{}", name, bra, ket)) + .lookup_by(format!("{}!", name)) + } + None if needs_bang => builder.insert_text(format!("{}!", name)), + _ => { + mark::hit!(dont_insert_macro_call_parens_unncessary); + builder.insert_text(name) + } + }; + + self.add(builder); + } + + pub(crate) fn add_function( + &mut self, + ctx: &CompletionContext, + func: hir::Function, + local_name: Option, + ) { + let has_self_param = func.has_self_param(ctx.db); + + let name = local_name.unwrap_or_else(|| func.name(ctx.db).to_string()); + let ast_node = func.source(ctx.db).value; + + let mut builder = + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.clone()) + .kind(if has_self_param { + CompletionItemKind::Method + } else { + CompletionItemKind::Function + }) + .set_documentation(func.docs(ctx.db)) + .set_deprecated(is_deprecated(func, ctx.db)) + .detail(function_declaration(&ast_node)); + + let params = ast_node + .param_list() + .into_iter() + .flat_map(|it| it.params()) + .flat_map(|it| it.pat()) + .map(|pat| pat.to_string().trim_start_matches('_').into()) + .collect(); + + builder = builder.add_call_parens(ctx, name, Params::Named(params)); + + self.add(builder) + } + + pub(crate) fn add_const(&mut self, ctx: &CompletionContext, constant: hir::Const) { + let ast_node = constant.source(ctx.db).value; + let name = match ast_node.name() { + Some(name) => name, + _ => return, + }; + let detail = const_label(&ast_node); + + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.text().to_string()) + .kind(CompletionItemKind::Const) + .set_documentation(constant.docs(ctx.db)) + .set_deprecated(is_deprecated(constant, ctx.db)) + .detail(detail) + .add_to(self); + } + + pub(crate) fn add_type_alias(&mut self, ctx: &CompletionContext, type_alias: hir::TypeAlias) { + let type_def = type_alias.source(ctx.db).value; + let name = match type_def.name() { + Some(name) => name, + _ => return, + }; + let detail = type_label(&type_def); + + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.text().to_string()) + .kind(CompletionItemKind::TypeAlias) + .set_documentation(type_alias.docs(ctx.db)) + .set_deprecated(is_deprecated(type_alias, ctx.db)) + .detail(detail) + .add_to(self); + } + + pub(crate) fn add_qualified_enum_variant( + &mut self, + ctx: &CompletionContext, + variant: hir::EnumVariant, + path: ModPath, + ) { + self.add_enum_variant_impl(ctx, variant, None, Some(path)) + } + + pub(crate) fn add_enum_variant( + &mut self, + ctx: &CompletionContext, + variant: hir::EnumVariant, + local_name: Option, + ) { + self.add_enum_variant_impl(ctx, variant, local_name, None) + } + + fn add_enum_variant_impl( + &mut self, + ctx: &CompletionContext, + variant: hir::EnumVariant, + local_name: Option, + path: Option, + ) { + let is_deprecated = is_deprecated(variant, ctx.db); + let name = local_name.unwrap_or_else(|| variant.name(ctx.db).to_string()); + let qualified_name = match &path { + Some(it) => it.to_string(), + None => name.to_string(), + }; + let detail_types = variant + .fields(ctx.db) + .into_iter() + .map(|field| (field.name(ctx.db), field.signature_ty(ctx.db))); + let variant_kind = variant.kind(ctx.db); + let detail = match variant_kind { + StructKind::Tuple | StructKind::Unit => format!( + "({})", + detail_types.map(|(_, t)| t.display(ctx.db).to_string()).format(", ") + ), + StructKind::Record => format!( + "{{ {} }}", + detail_types + .map(|(n, t)| format!("{}: {}", n, t.display(ctx.db).to_string())) + .format(", ") + ), + }; + let mut res = CompletionItem::new( + CompletionKind::Reference, + ctx.source_range(), + qualified_name.clone(), + ) + .kind(CompletionItemKind::EnumVariant) + .set_documentation(variant.docs(ctx.db)) + .set_deprecated(is_deprecated) + .detail(detail); + + if path.is_some() { + res = res.lookup_by(name); + } + + if variant_kind == StructKind::Tuple { + mark::hit!(inserts_parens_for_tuple_enums); + let params = Params::Anonymous(variant.fields(ctx.db).len()); + res = res.add_call_parens(ctx, qualified_name, params) + } + + res.add_to(self); + } +} + +pub(crate) fn compute_score( + ctx: &CompletionContext, + ty: &Type, + name: &str, +) -> Option { + let (active_name, active_type) = if let Some(record_field) = &ctx.record_field_syntax { + mark::hit!(record_field_type_match); + let (struct_field, _local) = ctx.sema.resolve_record_field(record_field)?; + (struct_field.name(ctx.db).to_string(), struct_field.signature_ty(ctx.db)) + } else if let Some(active_parameter) = &ctx.active_parameter { + mark::hit!(active_param_type_match); + (active_parameter.name.clone(), active_parameter.ty.clone()) + } else { + return None; + }; + + // Compute score + // For the same type + if &active_type != ty { + return None; + } + + let mut res = CompletionScore::TypeMatch; + + // If same type + same name then go top position + if active_name == name { + res = CompletionScore::TypeAndNameMatch + } + + Some(res) +} + +enum Params { + Named(Vec), + Anonymous(usize), +} + +impl Params { + fn len(&self) -> usize { + match self { + Params::Named(xs) => xs.len(), + Params::Anonymous(len) => *len, + } + } + + fn is_empty(&self) -> bool { + self.len() == 0 + } +} + +impl Builder { + fn add_call_parens(mut self, ctx: &CompletionContext, name: String, params: Params) -> Builder { + if !ctx.config.add_call_parenthesis { + return self; + } + if ctx.use_item_syntax.is_some() { + mark::hit!(no_parens_in_use_item); + return self; + } + if ctx.is_pattern_call { + mark::hit!(dont_duplicate_pattern_parens); + return self; + } + if ctx.is_call { + return self; + } + + // Don't add parentheses if the expected type is some function reference. + if let Some(ty) = &ctx.expected_type { + if ty.is_fn() { + mark::hit!(no_call_parens_if_fn_ptr_needed); + return self; + } + } + + let cap = match ctx.config.snippet_cap { + Some(it) => it, + None => return self, + }; + // If not an import, add parenthesis automatically. + mark::hit!(inserts_parens_for_function_calls); + + let (snippet, label) = if params.is_empty() { + (format!("{}()$0", name), format!("{}()", name)) + } else { + self = self.trigger_call_info(); + let snippet = match (ctx.config.add_call_argument_snippets, params) { + (true, Params::Named(params)) => { + let function_params_snippet = + params.iter().enumerate().format_with(", ", |(index, param_name), f| { + f(&format_args!("${{{}:{}}}", index + 1, param_name)) + }); + format!("{}({})$0", name, function_params_snippet) + } + _ => { + mark::hit!(suppress_arg_snippets); + format!("{}($0)", name) + } + }; + + (snippet, format!("{}(…)", name)) + }; + self.lookup_by(name).label(label).insert_snippet(cap, snippet) + } +} + +fn is_deprecated(node: impl HasAttrs, db: &RootDatabase) -> bool { + node.attrs(db).by_key("deprecated").exists() +} + +fn guess_macro_braces(macro_name: &str, docs: &str) -> (&'static str, &'static str) { + let mut votes = [0, 0, 0]; + for (idx, s) in docs.match_indices(¯o_name) { + let (before, after) = (&docs[..idx], &docs[idx + s.len()..]); + // Ensure to match the full word + if after.starts_with('!') + && !before.ends_with(|c: char| c == '_' || c.is_ascii_alphanumeric()) + { + // It may have spaces before the braces like `foo! {}` + match after[1..].chars().find(|&c| !c.is_whitespace()) { + Some('{') => votes[0] += 1, + Some('[') => votes[1] += 1, + Some('(') => votes[2] += 1, + _ => {} + } + } + } + + // Insert a space before `{}`. + // We prefer the last one when some votes equal. + let (_vote, (bra, ket)) = votes + .iter() + .zip(&[(" {", "}"), ("[", "]"), ("(", ")")]) + .max_by_key(|&(&vote, _)| vote) + .unwrap(); + (*bra, *ket) +} + +#[cfg(test)] +mod tests { + use std::cmp::Reverse; + + use expect::{expect, Expect}; + use test_utils::mark; + + use crate::{ + completion::{ + test_utils::{ + check_edit, check_edit_with_config, do_completion, get_all_completion_items, + }, + CompletionConfig, CompletionKind, + }, + CompletionScore, + }; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = do_completion(ra_fixture, CompletionKind::Reference); + expect.assert_debug_eq(&actual); + } + + fn check_scores(ra_fixture: &str, expect: Expect) { + fn display_score(score: Option) -> &'static str { + match score { + Some(CompletionScore::TypeMatch) => "[type]", + Some(CompletionScore::TypeAndNameMatch) => "[type+name]", + None => "[]".into(), + } + } + + let mut completions = get_all_completion_items(CompletionConfig::default(), ra_fixture); + completions.sort_by_key(|it| (Reverse(it.score()), it.label().to_string())); + let actual = completions + .into_iter() + .filter(|it| it.completion_kind == CompletionKind::Reference) + .map(|it| { + let tag = it.kind().unwrap().tag(); + let score = display_score(it.score()); + format!("{} {} {}\n", tag, it.label(), score) + }) + .collect::(); + expect.assert_eq(&actual); + } + + #[test] + fn enum_detail_includes_record_fields() { + check( + r#" +enum Foo { Foo { x: i32, y: i32 } } + +fn main() { Foo::Fo<|> } +"#, + expect![[r#" + [ + CompletionItem { + label: "Foo", + source_range: 54..56, + delete: 54..56, + insert: "Foo", + kind: EnumVariant, + detail: "{ x: i32, y: i32 }", + }, + ] + "#]], + ); + } + + #[test] + fn enum_detail_doesnt_include_tuple_fields() { + check( + r#" +enum Foo { Foo (i32, i32) } + +fn main() { Foo::Fo<|> } +"#, + expect![[r#" + [ + CompletionItem { + label: "Foo(…)", + source_range: 46..48, + delete: 46..48, + insert: "Foo($0)", + kind: EnumVariant, + lookup: "Foo", + detail: "(i32, i32)", + trigger_call_info: true, + }, + ] + "#]], + ); + } + + #[test] + fn enum_detail_just_parentheses_for_unit() { + check( + r#" +enum Foo { Foo } + +fn main() { Foo::Fo<|> } +"#, + expect![[r#" + [ + CompletionItem { + label: "Foo", + source_range: 35..37, + delete: 35..37, + insert: "Foo", + kind: EnumVariant, + detail: "()", + }, + ] + "#]], + ); + } + + #[test] + fn sets_deprecated_flag_in_completion_items() { + check( + r#" +#[deprecated] +fn something_deprecated() {} +#[deprecated(since = "1.0.0")] +fn something_else_deprecated() {} + +fn main() { som<|> } +"#, + expect![[r#" + [ + CompletionItem { + label: "main()", + source_range: 121..124, + delete: 121..124, + insert: "main()$0", + kind: Function, + lookup: "main", + detail: "fn main()", + }, + CompletionItem { + label: "something_deprecated()", + source_range: 121..124, + delete: 121..124, + insert: "something_deprecated()$0", + kind: Function, + lookup: "something_deprecated", + detail: "fn something_deprecated()", + deprecated: true, + }, + CompletionItem { + label: "something_else_deprecated()", + source_range: 121..124, + delete: 121..124, + insert: "something_else_deprecated()$0", + kind: Function, + lookup: "something_else_deprecated", + detail: "fn something_else_deprecated()", + deprecated: true, + }, + ] + "#]], + ); + + check( + r#" +struct A { #[deprecated] the_field: u32 } +fn foo() { A { the<|> } } +"#, + expect![[r#" + [ + CompletionItem { + label: "the_field", + source_range: 57..60, + delete: 57..60, + insert: "the_field", + kind: Field, + detail: "u32", + deprecated: true, + }, + ] + "#]], + ); + } + + #[test] + fn renders_docs() { + check( + r#" +struct S { + /// Field docs + foo: +} +impl S { + /// Method docs + fn bar(self) { self.<|> } +}"#, + expect![[r#" + [ + CompletionItem { + label: "bar()", + source_range: 94..94, + delete: 94..94, + insert: "bar()$0", + kind: Method, + lookup: "bar", + detail: "fn bar(self)", + documentation: Documentation( + "Method docs", + ), + }, + CompletionItem { + label: "foo", + source_range: 94..94, + delete: 94..94, + insert: "foo", + kind: Field, + detail: "{unknown}", + documentation: Documentation( + "Field docs", + ), + }, + ] + "#]], + ); + + check( + r#" +use self::my<|>; + +/// mod docs +mod my { } + +/// enum docs +enum E { + /// variant docs + V +} +use self::E::*; +"#, + expect![[r#" + [ + CompletionItem { + label: "E", + source_range: 10..12, + delete: 10..12, + insert: "E", + kind: Enum, + documentation: Documentation( + "enum docs", + ), + }, + CompletionItem { + label: "V", + source_range: 10..12, + delete: 10..12, + insert: "V", + kind: EnumVariant, + detail: "()", + documentation: Documentation( + "variant docs", + ), + }, + CompletionItem { + label: "my", + source_range: 10..12, + delete: 10..12, + insert: "my", + kind: Module, + documentation: Documentation( + "mod docs", + ), + }, + ] + "#]], + ) + } + + #[test] + fn dont_render_attrs() { + check( + r#" +struct S; +impl S { + #[inline] + fn the_method(&self) { } +} +fn foo(s: S) { s.<|> } +"#, + expect![[r#" + [ + CompletionItem { + label: "the_method()", + source_range: 81..81, + delete: 81..81, + insert: "the_method()$0", + kind: Method, + lookup: "the_method", + detail: "fn the_method(&self)", + }, + ] + "#]], + ) + } + + #[test] + fn inserts_parens_for_function_calls() { + mark::check!(inserts_parens_for_function_calls); + check_edit( + "no_args", + r#" +fn no_args() {} +fn main() { no_<|> } +"#, + r#" +fn no_args() {} +fn main() { no_args()$0 } +"#, + ); + + check_edit( + "with_args", + r#" +fn with_args(x: i32, y: String) {} +fn main() { with_<|> } +"#, + r#" +fn with_args(x: i32, y: String) {} +fn main() { with_args(${1:x}, ${2:y})$0 } +"#, + ); + + check_edit( + "foo", + r#" +struct S; +impl S { + fn foo(&self) {} +} +fn bar(s: &S) { s.f<|> } +"#, + r#" +struct S; +impl S { + fn foo(&self) {} +} +fn bar(s: &S) { s.foo()$0 } +"#, + ); + + check_edit( + "foo", + r#" +struct S {} +impl S { + fn foo(&self, x: i32) {} +} +fn bar(s: &S) { + s.f<|> +} +"#, + r#" +struct S {} +impl S { + fn foo(&self, x: i32) {} +} +fn bar(s: &S) { + s.foo(${1:x})$0 +} +"#, + ); + } + + #[test] + fn suppress_arg_snippets() { + mark::check!(suppress_arg_snippets); + check_edit_with_config( + CompletionConfig { add_call_argument_snippets: false, ..CompletionConfig::default() }, + "with_args", + r#" +fn with_args(x: i32, y: String) {} +fn main() { with_<|> } +"#, + r#" +fn with_args(x: i32, y: String) {} +fn main() { with_args($0) } +"#, + ); + } + + #[test] + fn strips_underscores_from_args() { + check_edit( + "foo", + r#" +fn foo(_foo: i32, ___bar: bool, ho_ge_: String) {} +fn main() { f<|> } +"#, + r#" +fn foo(_foo: i32, ___bar: bool, ho_ge_: String) {} +fn main() { foo(${1:foo}, ${2:bar}, ${3:ho_ge_})$0 } +"#, + ); + } + + #[test] + fn inserts_parens_for_tuple_enums() { + mark::check!(inserts_parens_for_tuple_enums); + check_edit( + "Some", + r#" +enum Option { Some(T), None } +use Option::*; +fn main() -> Option { + Som<|> +} +"#, + r#" +enum Option { Some(T), None } +use Option::*; +fn main() -> Option { + Some($0) +} +"#, + ); + check_edit( + "Some", + r#" +enum Option { Some(T), None } +use Option::*; +fn main(value: Option) { + match value { + Som<|> + } +} +"#, + r#" +enum Option { Some(T), None } +use Option::*; +fn main(value: Option) { + match value { + Some($0) + } +} +"#, + ); + } + + #[test] + fn dont_duplicate_pattern_parens() { + mark::check!(dont_duplicate_pattern_parens); + check_edit( + "Var", + r#" +enum E { Var(i32) } +fn main() { + match E::Var(92) { + E::<|>(92) => (), + } +} +"#, + r#" +enum E { Var(i32) } +fn main() { + match E::Var(92) { + E::Var(92) => (), + } +} +"#, + ); + } + + #[test] + fn no_call_parens_if_fn_ptr_needed() { + mark::check!(no_call_parens_if_fn_ptr_needed); + check_edit( + "foo", + r#" +fn foo(foo: u8, bar: u8) {} +struct ManualVtable { f: fn(u8, u8) } + +fn main() -> ManualVtable { + ManualVtable { f: f<|> } +} +"#, + r#" +fn foo(foo: u8, bar: u8) {} +struct ManualVtable { f: fn(u8, u8) } + +fn main() -> ManualVtable { + ManualVtable { f: foo } +} +"#, + ); + } + + #[test] + fn no_parens_in_use_item() { + mark::check!(no_parens_in_use_item); + check_edit( + "foo", + r#" +mod m { pub fn foo() {} } +use crate::m::f<|>; +"#, + r#" +mod m { pub fn foo() {} } +use crate::m::foo; +"#, + ); + } + + #[test] + fn no_parens_in_call() { + check_edit( + "foo", + r#" +fn foo(x: i32) {} +fn main() { f<|>(); } +"#, + r#" +fn foo(x: i32) {} +fn main() { foo(); } +"#, + ); + check_edit( + "foo", + r#" +struct Foo; +impl Foo { fn foo(&self){} } +fn f(foo: &Foo) { foo.f<|>(); } +"#, + r#" +struct Foo; +impl Foo { fn foo(&self){} } +fn f(foo: &Foo) { foo.foo(); } +"#, + ); + } + + #[test] + fn inserts_angle_brackets_for_generics() { + mark::check!(inserts_angle_brackets_for_generics); + check_edit( + "Vec", + r#" +struct Vec {} +fn foo(xs: Ve<|>) +"#, + r#" +struct Vec {} +fn foo(xs: Vec<$0>) +"#, + ); + check_edit( + "Vec", + r#" +type Vec = (T,); +fn foo(xs: Ve<|>) +"#, + r#" +type Vec = (T,); +fn foo(xs: Vec<$0>) +"#, + ); + check_edit( + "Vec", + r#" +struct Vec {} +fn foo(xs: Ve<|>) +"#, + r#" +struct Vec {} +fn foo(xs: Vec) +"#, + ); + check_edit( + "Vec", + r#" +struct Vec {} +fn foo(xs: Ve<|>) +"#, + r#" +struct Vec {} +fn foo(xs: Vec) +"#, + ); + } + + #[test] + fn dont_insert_macro_call_parens_unncessary() { + mark::check!(dont_insert_macro_call_parens_unncessary); + check_edit( + "frobnicate!", + r#" +//- /main.rs +use foo::<|>; +//- /foo/lib.rs +#[macro_export] +macro_rules frobnicate { () => () } +"#, + r#" +use foo::frobnicate; +"#, + ); + + check_edit( + "frobnicate!", + r#" +macro_rules frobnicate { () => () } +fn main() { frob<|>!(); } +"#, + r#" +macro_rules frobnicate { () => () } +fn main() { frobnicate!(); } +"#, + ); + } + + #[test] + fn active_param_score() { + mark::check!(active_param_type_match); + check_scores( + r#" +struct S { foo: i64, bar: u32, baz: u32 } +fn test(bar: u32) { } +fn foo(s: S) { test(s.<|>) } +"#, + expect![[r#" + fd bar [type+name] + fd baz [type] + fd foo [] + "#]], + ); + } + + #[test] + fn record_field_scores() { + mark::check!(record_field_type_match); + check_scores( + r#" +struct A { foo: i64, bar: u32, baz: u32 } +struct B { x: (), y: f32, bar: u32 } +fn foo(a: A) { B { bar: a.<|> }; } +"#, + expect![[r#" + fd bar [type+name] + fd baz [type] + fd foo [] + "#]], + ) + } + + #[test] + fn record_field_and_call_scores() { + check_scores( + r#" +struct A { foo: i64, bar: u32, baz: u32 } +struct B { x: (), y: f32, bar: u32 } +fn f(foo: i64) { } +fn foo(a: A) { B { bar: f(a.<|>) }; } +"#, + expect![[r#" + fd foo [type+name] + fd bar [] + fd baz [] + "#]], + ); + check_scores( + r#" +struct A { foo: i64, bar: u32, baz: u32 } +struct B { x: (), y: f32, bar: u32 } +fn f(foo: i64) { } +fn foo(a: A) { f(B { bar: a.<|> }); } +"#, + expect![[r#" + fd bar [type+name] + fd baz [type] + fd foo [] + "#]], + ); + } + + #[test] + fn prioritize_exact_ref_match() { + check_scores( + r#" +struct WorldSnapshot { _f: () }; +fn go(world: &WorldSnapshot) { go(w<|>) } +"#, + expect![[r#" + bn world [type+name] + st WorldSnapshot [] + fn go(…) [] + "#]], + ); + } + + #[test] + fn too_many_arguments() { + mark::check!(too_many_arguments); + check_scores( + r#" +struct Foo; +fn f(foo: &Foo) { f(foo, w<|>) } +"#, + expect![[r#" + st Foo [] + fn f(…) [] + bn foo [] + "#]], + ); + } + + #[test] + fn guesses_macro_braces() { + check_edit( + "vec!", + r#" +/// Creates a [`Vec`] containing the arguments. +/// +/// ``` +/// let v = vec![1, 2, 3]; +/// assert_eq!(v[0], 1); +/// assert_eq!(v[1], 2); +/// assert_eq!(v[2], 3); +/// ``` +macro_rules! vec { () => {} } + +fn fn main() { v<|> } +"#, + r#" +/// Creates a [`Vec`] containing the arguments. +/// +/// ``` +/// let v = vec![1, 2, 3]; +/// assert_eq!(v[0], 1); +/// assert_eq!(v[1], 2); +/// assert_eq!(v[2], 3); +/// ``` +macro_rules! vec { () => {} } + +fn fn main() { vec![$0] } +"#, + ); + + check_edit( + "foo!", + r#" +/// Foo +/// +/// Don't call `fooo!()` `fooo!()`, or `_foo![]` `_foo![]`, +/// call as `let _=foo! { hello world };` +macro_rules! foo { () => {} } +fn main() { <|> } +"#, + r#" +/// Foo +/// +/// Don't call `fooo!()` `fooo!()`, or `_foo![]` `_foo![]`, +/// call as `let _=foo! { hello world };` +macro_rules! foo { () => {} } +fn main() { foo! {$0} } +"#, + ) + } +} diff --git a/crates/ide/src/completion/test_utils.rs b/crates/ide/src/completion/test_utils.rs new file mode 100644 index 0000000000..1452d7e9e5 --- /dev/null +++ b/crates/ide/src/completion/test_utils.rs @@ -0,0 +1,114 @@ +//! Runs completion for testing purposes. + +use hir::Semantics; +use itertools::Itertools; +use stdx::{format_to, trim_indent}; +use syntax::{AstNode, NodeOrToken, SyntaxElement}; +use test_utils::assert_eq_text; + +use crate::{ + completion::{completion_item::CompletionKind, CompletionConfig}, + mock_analysis::analysis_and_position, + CompletionItem, +}; + +pub(crate) fn do_completion(code: &str, kind: CompletionKind) -> Vec { + do_completion_with_config(CompletionConfig::default(), code, kind) +} + +pub(crate) fn do_completion_with_config( + config: CompletionConfig, + code: &str, + kind: CompletionKind, +) -> Vec { + let mut kind_completions: Vec = get_all_completion_items(config, code) + .into_iter() + .filter(|c| c.completion_kind == kind) + .collect(); + kind_completions.sort_by(|l, r| l.label().cmp(r.label())); + kind_completions +} + +pub(crate) fn completion_list(code: &str, kind: CompletionKind) -> String { + completion_list_with_config(CompletionConfig::default(), code, kind) +} + +pub(crate) fn completion_list_with_config( + config: CompletionConfig, + code: &str, + kind: CompletionKind, +) -> String { + let mut kind_completions: Vec = get_all_completion_items(config, code) + .into_iter() + .filter(|c| c.completion_kind == kind) + .collect(); + kind_completions.sort_by_key(|c| c.label().to_owned()); + let label_width = kind_completions + .iter() + .map(|it| monospace_width(it.label())) + .max() + .unwrap_or_default() + .min(16); + kind_completions + .into_iter() + .map(|it| { + let tag = it.kind().unwrap().tag(); + let var_name = format!("{} {}", tag, it.label()); + let mut buf = var_name; + if let Some(detail) = it.detail() { + let width = label_width.saturating_sub(monospace_width(it.label())); + format_to!(buf, "{:width$} {}", "", detail, width = width); + } + format_to!(buf, "\n"); + buf + }) + .collect() +} + +fn monospace_width(s: &str) -> usize { + s.chars().count() +} + +pub(crate) fn check_edit(what: &str, ra_fixture_before: &str, ra_fixture_after: &str) { + check_edit_with_config(CompletionConfig::default(), what, ra_fixture_before, ra_fixture_after) +} + +pub(crate) fn check_edit_with_config( + config: CompletionConfig, + what: &str, + ra_fixture_before: &str, + ra_fixture_after: &str, +) { + let ra_fixture_after = trim_indent(ra_fixture_after); + let (analysis, position) = analysis_and_position(ra_fixture_before); + let completions: Vec = + analysis.completions(&config, position).unwrap().unwrap().into(); + let (completion,) = completions + .iter() + .filter(|it| it.lookup() == what) + .collect_tuple() + .unwrap_or_else(|| panic!("can't find {:?} completion in {:#?}", what, completions)); + let mut actual = analysis.file_text(position.file_id).unwrap().to_string(); + completion.text_edit().apply(&mut actual); + assert_eq_text!(&ra_fixture_after, &actual) +} + +pub(crate) fn check_pattern_is_applicable(code: &str, check: fn(SyntaxElement) -> bool) { + let (analysis, pos) = analysis_and_position(code); + analysis + .with_db(|db| { + let sema = Semantics::new(db); + let original_file = sema.parse(pos.file_id); + let token = original_file.syntax().token_at_offset(pos.offset).left_biased().unwrap(); + assert!(check(NodeOrToken::Token(token))); + }) + .unwrap(); +} + +pub(crate) fn get_all_completion_items( + config: CompletionConfig, + code: &str, +) -> Vec { + let (analysis, position) = analysis_and_position(code); + analysis.completions(&config, position).unwrap().unwrap().into() +} diff --git a/crates/ide/src/diagnostics.rs b/crates/ide/src/diagnostics.rs new file mode 100644 index 0000000000..a3ec98178a --- /dev/null +++ b/crates/ide/src/diagnostics.rs @@ -0,0 +1,678 @@ +//! Collects diagnostics & fixits for a single file. +//! +//! The tricky bit here is that diagnostics are produced by hir in terms of +//! macro-expanded files, but we need to present them to the users in terms of +//! original files. So we need to map the ranges. + +use std::cell::RefCell; + +use base_db::SourceDatabase; +use hir::{diagnostics::DiagnosticSinkBuilder, Semantics}; +use ide_db::RootDatabase; +use itertools::Itertools; +use syntax::{ + ast::{self, AstNode}, + SyntaxNode, TextRange, T, +}; +use text_edit::TextEdit; + +use crate::{Diagnostic, FileId, Fix, SourceFileEdit}; + +mod diagnostics_with_fix; +use diagnostics_with_fix::DiagnosticWithFix; + +#[derive(Debug, Copy, Clone)] +pub enum Severity { + Error, + WeakWarning, +} + +pub(crate) fn diagnostics( + db: &RootDatabase, + file_id: FileId, + enable_experimental: bool, +) -> Vec { + let _p = profile::span("diagnostics"); + let sema = Semantics::new(db); + let parse = db.parse(file_id); + let mut res = Vec::new(); + + // [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily. + res.extend(parse.errors().iter().take(128).map(|err| Diagnostic { + range: err.range(), + message: format!("Syntax Error: {}", err), + severity: Severity::Error, + fix: None, + })); + + for node in parse.tree().syntax().descendants() { + check_unnecessary_braces_in_use_statement(&mut res, file_id, &node); + check_struct_shorthand_initialization(&mut res, file_id, &node); + } + let res = RefCell::new(res); + let mut sink = DiagnosticSinkBuilder::new() + .on::(|d| { + res.borrow_mut().push(diagnostic_with_fix(d, &sema)); + }) + .on::(|d| { + res.borrow_mut().push(diagnostic_with_fix(d, &sema)); + }) + .on::(|d| { + res.borrow_mut().push(diagnostic_with_fix(d, &sema)); + }) + .on::(|d| { + res.borrow_mut().push(diagnostic_with_fix(d, &sema)); + }) + // Only collect experimental diagnostics when they're enabled. + .filter(|diag| !diag.is_experimental() || enable_experimental) + // Diagnostics not handled above get no fix and default treatment. + .build(|d| { + res.borrow_mut().push(Diagnostic { + message: d.message(), + range: sema.diagnostics_display_range(d).range, + severity: Severity::Error, + fix: None, + }) + }); + + if let Some(m) = sema.to_module_def(file_id) { + m.diagnostics(db, &mut sink); + }; + drop(sink); + res.into_inner() +} + +fn diagnostic_with_fix(d: &D, sema: &Semantics) -> Diagnostic { + Diagnostic { + range: sema.diagnostics_display_range(d).range, + message: d.message(), + severity: Severity::Error, + fix: d.fix(&sema), + } +} + +fn check_unnecessary_braces_in_use_statement( + acc: &mut Vec, + file_id: FileId, + node: &SyntaxNode, +) -> Option<()> { + let use_tree_list = ast::UseTreeList::cast(node.clone())?; + if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() { + let use_range = use_tree_list.syntax().text_range(); + let edit = + text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(&single_use_tree) + .unwrap_or_else(|| { + let to_replace = single_use_tree.syntax().text().to_string(); + let mut edit_builder = TextEdit::builder(); + edit_builder.delete(use_range); + edit_builder.insert(use_range.start(), to_replace); + edit_builder.finish() + }); + + acc.push(Diagnostic { + range: use_range, + message: "Unnecessary braces in use statement".to_string(), + severity: Severity::WeakWarning, + fix: Some(Fix::new( + "Remove unnecessary braces", + SourceFileEdit { file_id, edit }.into(), + use_range, + )), + }); + } + + Some(()) +} + +fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement( + single_use_tree: &ast::UseTree, +) -> Option { + let use_tree_list_node = single_use_tree.syntax().parent()?; + if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind() == T![self] { + let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start(); + let end = use_tree_list_node.text_range().end(); + return Some(TextEdit::delete(TextRange::new(start, end))); + } + None +} + +fn check_struct_shorthand_initialization( + acc: &mut Vec, + file_id: FileId, + node: &SyntaxNode, +) -> Option<()> { + let record_lit = ast::RecordExpr::cast(node.clone())?; + let record_field_list = record_lit.record_expr_field_list()?; + for record_field in record_field_list.fields() { + if let (Some(name_ref), Some(expr)) = (record_field.name_ref(), record_field.expr()) { + let field_name = name_ref.syntax().text().to_string(); + let field_expr = expr.syntax().text().to_string(); + let field_name_is_tup_index = name_ref.as_tuple_field().is_some(); + if field_name == field_expr && !field_name_is_tup_index { + let mut edit_builder = TextEdit::builder(); + edit_builder.delete(record_field.syntax().text_range()); + edit_builder.insert(record_field.syntax().text_range().start(), field_name); + let edit = edit_builder.finish(); + + let field_range = record_field.syntax().text_range(); + acc.push(Diagnostic { + range: field_range, + message: "Shorthand struct initialization".to_string(), + severity: Severity::WeakWarning, + fix: Some(Fix::new( + "Use struct shorthand initialization", + SourceFileEdit { file_id, edit }.into(), + field_range, + )), + }); + } + } + } + Some(()) +} + +#[cfg(test)] +mod tests { + use stdx::trim_indent; + use test_utils::assert_eq_text; + + use crate::mock_analysis::{analysis_and_position, single_file, MockAnalysis}; + use expect::{expect, Expect}; + + /// Takes a multi-file input fixture with annotated cursor positions, + /// and checks that: + /// * a diagnostic is produced + /// * this diagnostic fix trigger range touches the input cursor position + /// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied + fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) { + let after = trim_indent(ra_fixture_after); + + let (analysis, file_position) = analysis_and_position(ra_fixture_before); + let diagnostic = analysis.diagnostics(file_position.file_id, true).unwrap().pop().unwrap(); + let mut fix = diagnostic.fix.unwrap(); + let edit = fix.source_change.source_file_edits.pop().unwrap().edit; + let target_file_contents = analysis.file_text(file_position.file_id).unwrap(); + let actual = { + let mut actual = target_file_contents.to_string(); + edit.apply(&mut actual); + actual + }; + + assert_eq_text!(&after, &actual); + assert!( + fix.fix_trigger_range.start() <= file_position.offset + && fix.fix_trigger_range.end() >= file_position.offset, + "diagnostic fix range {:?} does not touch cursor position {:?}", + fix.fix_trigger_range, + file_position.offset + ); + } + + /// Checks that a diagnostic applies to the file containing the `<|>` cursor marker + /// which has a fix that can apply to other files. + fn check_apply_diagnostic_fix_in_other_file(ra_fixture_before: &str, ra_fixture_after: &str) { + let ra_fixture_after = &trim_indent(ra_fixture_after); + let (analysis, file_pos) = analysis_and_position(ra_fixture_before); + let current_file_id = file_pos.file_id; + let diagnostic = analysis.diagnostics(current_file_id, true).unwrap().pop().unwrap(); + let mut fix = diagnostic.fix.unwrap(); + let edit = fix.source_change.source_file_edits.pop().unwrap(); + let changed_file_id = edit.file_id; + let before = analysis.file_text(changed_file_id).unwrap(); + let actual = { + let mut actual = before.to_string(); + edit.edit.apply(&mut actual); + actual + }; + assert_eq_text!(ra_fixture_after, &actual); + } + + /// Takes a multi-file input fixture with annotated cursor position and checks that no diagnostics + /// apply to the file containing the cursor. + fn check_no_diagnostics(ra_fixture: &str) { + let mock = MockAnalysis::with_files(ra_fixture); + let files = mock.files().map(|(it, _)| it).collect::>(); + let analysis = mock.analysis(); + let diagnostics = files + .into_iter() + .flat_map(|file_id| analysis.diagnostics(file_id, true).unwrap()) + .collect::>(); + assert_eq!(diagnostics.len(), 0, "unexpected diagnostics:\n{:#?}", diagnostics); + } + + fn check_expect(ra_fixture: &str, expect: Expect) { + let (analysis, file_id) = single_file(ra_fixture); + let diagnostics = analysis.diagnostics(file_id, true).unwrap(); + expect.assert_debug_eq(&diagnostics) + } + + #[test] + fn test_wrap_return_type() { + check_fix( + r#" +//- /main.rs +use core::result::Result::{self, Ok, Err}; + +fn div(x: i32, y: i32) -> Result { + if y == 0 { + return Err(()); + } + x / y<|> +} +//- /core/lib.rs +pub mod result { + pub enum Result { Ok(T), Err(E) } +} +"#, + r#" +use core::result::Result::{self, Ok, Err}; + +fn div(x: i32, y: i32) -> Result { + if y == 0 { + return Err(()); + } + Ok(x / y) +} +"#, + ); + } + + #[test] + fn test_wrap_return_type_handles_generic_functions() { + check_fix( + r#" +//- /main.rs +use core::result::Result::{self, Ok, Err}; + +fn div(x: T) -> Result { + if x == 0 { + return Err(7); + } + <|>x +} +//- /core/lib.rs +pub mod result { + pub enum Result { Ok(T), Err(E) } +} +"#, + r#" +use core::result::Result::{self, Ok, Err}; + +fn div(x: T) -> Result { + if x == 0 { + return Err(7); + } + Ok(x) +} +"#, + ); + } + + #[test] + fn test_wrap_return_type_handles_type_aliases() { + check_fix( + r#" +//- /main.rs +use core::result::Result::{self, Ok, Err}; + +type MyResult = Result; + +fn div(x: i32, y: i32) -> MyResult { + if y == 0 { + return Err(()); + } + x <|>/ y +} +//- /core/lib.rs +pub mod result { + pub enum Result { Ok(T), Err(E) } +} +"#, + r#" +use core::result::Result::{self, Ok, Err}; + +type MyResult = Result; + +fn div(x: i32, y: i32) -> MyResult { + if y == 0 { + return Err(()); + } + Ok(x / y) +} +"#, + ); + } + + #[test] + fn test_wrap_return_type_not_applicable_when_expr_type_does_not_match_ok_type() { + check_no_diagnostics( + r#" +//- /main.rs +use core::result::Result::{self, Ok, Err}; + +fn foo() -> Result<(), i32> { 0 } + +//- /core/lib.rs +pub mod result { + pub enum Result { Ok(T), Err(E) } +} +"#, + ); + } + + #[test] + fn test_wrap_return_type_not_applicable_when_return_type_is_not_result() { + check_no_diagnostics( + r#" +//- /main.rs +use core::result::Result::{self, Ok, Err}; + +enum SomeOtherEnum { Ok(i32), Err(String) } + +fn foo() -> SomeOtherEnum { 0 } + +//- /core/lib.rs +pub mod result { + pub enum Result { Ok(T), Err(E) } +} +"#, + ); + } + + #[test] + fn test_fill_struct_fields_empty() { + check_fix( + r#" +struct TestStruct { one: i32, two: i64 } + +fn test_fn() { + let s = TestStruct {<|>}; +} +"#, + r#" +struct TestStruct { one: i32, two: i64 } + +fn test_fn() { + let s = TestStruct { one: (), two: ()}; +} +"#, + ); + } + + #[test] + fn test_fill_struct_fields_self() { + check_fix( + r#" +struct TestStruct { one: i32 } + +impl TestStruct { + fn test_fn() { let s = Self {<|>}; } +} +"#, + r#" +struct TestStruct { one: i32 } + +impl TestStruct { + fn test_fn() { let s = Self { one: ()}; } +} +"#, + ); + } + + #[test] + fn test_fill_struct_fields_enum() { + check_fix( + r#" +enum Expr { + Bin { lhs: Box, rhs: Box } +} + +impl Expr { + fn new_bin(lhs: Box, rhs: Box) -> Expr { + Expr::Bin {<|> } + } +} +"#, + r#" +enum Expr { + Bin { lhs: Box, rhs: Box } +} + +impl Expr { + fn new_bin(lhs: Box, rhs: Box) -> Expr { + Expr::Bin { lhs: (), rhs: () } + } +} +"#, + ); + } + + #[test] + fn test_fill_struct_fields_partial() { + check_fix( + r#" +struct TestStruct { one: i32, two: i64 } + +fn test_fn() { + let s = TestStruct{ two: 2<|> }; +} +"#, + r" +struct TestStruct { one: i32, two: i64 } + +fn test_fn() { + let s = TestStruct{ two: 2, one: () }; +} +", + ); + } + + #[test] + fn test_fill_struct_fields_no_diagnostic() { + check_no_diagnostics( + r" + struct TestStruct { one: i32, two: i64 } + + fn test_fn() { + let one = 1; + let s = TestStruct{ one, two: 2 }; + } + ", + ); + } + + #[test] + fn test_fill_struct_fields_no_diagnostic_on_spread() { + check_no_diagnostics( + r" + struct TestStruct { one: i32, two: i64 } + + fn test_fn() { + let one = 1; + let s = TestStruct{ ..a }; + } + ", + ); + } + + #[test] + fn test_unresolved_module_diagnostic() { + check_expect( + r#"mod foo;"#, + expect![[r#" + [ + Diagnostic { + message: "unresolved module", + range: 0..8, + severity: Error, + fix: Some( + Fix { + label: "Create module", + source_change: SourceChange { + source_file_edits: [], + file_system_edits: [ + CreateFile { + anchor: FileId( + 1, + ), + dst: "foo.rs", + }, + ], + is_snippet: false, + }, + fix_trigger_range: 0..8, + }, + ), + }, + ] + "#]], + ); + } + + #[test] + fn range_mapping_out_of_macros() { + // FIXME: this is very wrong, but somewhat tricky to fix. + check_fix( + r#" +fn some() {} +fn items() {} +fn here() {} + +macro_rules! id { ($($tt:tt)*) => { $($tt)*}; } + +fn main() { + let _x = id![Foo { a: <|>42 }]; +} + +pub struct Foo { pub a: i32, pub b: i32 } +"#, + r#" +fn {a:42, b: ()} {} +fn items() {} +fn here() {} + +macro_rules! id { ($($tt:tt)*) => { $($tt)*}; } + +fn main() { + let _x = id![Foo { a: 42 }]; +} + +pub struct Foo { pub a: i32, pub b: i32 } +"#, + ); + } + + #[test] + fn test_check_unnecessary_braces_in_use_statement() { + check_no_diagnostics( + r#" +use a; +use a::{c, d::e}; +"#, + ); + check_fix(r#"use {<|>b};"#, r#"use b;"#); + check_fix(r#"use {b<|>};"#, r#"use b;"#); + check_fix(r#"use a::{c<|>};"#, r#"use a::c;"#); + check_fix(r#"use a::{self<|>};"#, r#"use a;"#); + check_fix(r#"use a::{c, d::{e<|>}};"#, r#"use a::{c, d::e};"#); + } + + #[test] + fn test_check_struct_shorthand_initialization() { + check_no_diagnostics( + r#" +struct A { a: &'static str } +fn main() { A { a: "hello" } } +"#, + ); + check_no_diagnostics( + r#" +struct A(usize); +fn main() { A { 0: 0 } } +"#, + ); + + check_fix( + r#" +struct A { a: &'static str } +fn main() { + let a = "haha"; + A { a<|>: a } +} +"#, + r#" +struct A { a: &'static str } +fn main() { + let a = "haha"; + A { a } +} +"#, + ); + + check_fix( + r#" +struct A { a: &'static str, b: &'static str } +fn main() { + let a = "haha"; + let b = "bb"; + A { a<|>: a, b } +} +"#, + r#" +struct A { a: &'static str, b: &'static str } +fn main() { + let a = "haha"; + let b = "bb"; + A { a, b } +} +"#, + ); + } + + #[test] + fn test_add_field_from_usage() { + check_fix( + r" +fn main() { + Foo { bar: 3, baz<|>: false}; +} +struct Foo { + bar: i32 +} +", + r" +fn main() { + Foo { bar: 3, baz: false}; +} +struct Foo { + bar: i32, + baz: bool +} +", + ) + } + + #[test] + fn test_add_field_in_other_file_from_usage() { + check_apply_diagnostic_fix_in_other_file( + r" + //- /main.rs + mod foo; + + fn main() { + <|>foo::Foo { bar: 3, baz: false}; + } + //- /foo.rs + struct Foo { + bar: i32 + } + ", + r" + struct Foo { + bar: i32, + pub(crate) baz: bool + } + ", + ) + } +} diff --git a/crates/ide/src/diagnostics/diagnostics_with_fix.rs b/crates/ide/src/diagnostics/diagnostics_with_fix.rs new file mode 100644 index 0000000000..85b46c9958 --- /dev/null +++ b/crates/ide/src/diagnostics/diagnostics_with_fix.rs @@ -0,0 +1,171 @@ +//! Provides a way to attach fixes to the diagnostics. +//! The same module also has all curret custom fixes for the diagnostics implemented. +use crate::Fix; +use ast::{edit::IndentLevel, make}; +use base_db::FileId; +use hir::{ + db::AstDatabase, + diagnostics::{Diagnostic, MissingFields, MissingOkInTailExpr, NoSuchField, UnresolvedModule}, + HasSource, HirDisplay, Semantics, VariantDef, +}; +use ide_db::{ + source_change::{FileSystemEdit, SourceFileEdit}, + RootDatabase, +}; +use syntax::{algo, ast, AstNode}; +use text_edit::TextEdit; + +/// A [Diagnostic] that potentially has a fix available. +/// +/// [Diagnostic]: hir::diagnostics::Diagnostic +pub trait DiagnosticWithFix: Diagnostic { + fn fix(&self, sema: &Semantics) -> Option; +} + +impl DiagnosticWithFix for UnresolvedModule { + fn fix(&self, sema: &Semantics) -> Option { + let root = sema.db.parse_or_expand(self.file)?; + let unresolved_module = self.decl.to_node(&root); + Some(Fix::new( + "Create module", + FileSystemEdit::CreateFile { + anchor: self.file.original_file(sema.db), + dst: self.candidate.clone(), + } + .into(), + unresolved_module.syntax().text_range(), + )) + } +} + +impl DiagnosticWithFix for NoSuchField { + fn fix(&self, sema: &Semantics) -> Option { + let root = sema.db.parse_or_expand(self.file)?; + missing_record_expr_field_fix( + &sema, + self.file.original_file(sema.db), + &self.field.to_node(&root), + ) + } +} + +impl DiagnosticWithFix for MissingFields { + fn fix(&self, sema: &Semantics) -> Option { + // Note that although we could add a diagnostics to + // fill the missing tuple field, e.g : + // `struct A(usize);` + // `let a = A { 0: () }` + // but it is uncommon usage and it should not be encouraged. + if self.missed_fields.iter().any(|it| it.as_tuple_index().is_some()) { + return None; + } + + let root = sema.db.parse_or_expand(self.file)?; + let old_field_list = self.field_list_parent.to_node(&root).record_expr_field_list()?; + let mut new_field_list = old_field_list.clone(); + for f in self.missed_fields.iter() { + let field = + make::record_expr_field(make::name_ref(&f.to_string()), Some(make::expr_unit())); + new_field_list = new_field_list.append_field(&field); + } + + let edit = { + let mut builder = TextEdit::builder(); + algo::diff(&old_field_list.syntax(), &new_field_list.syntax()) + .into_text_edit(&mut builder); + builder.finish() + }; + Some(Fix::new( + "Fill struct fields", + SourceFileEdit { file_id: self.file.original_file(sema.db), edit }.into(), + sema.original_range(&old_field_list.syntax()).range, + )) + } +} + +impl DiagnosticWithFix for MissingOkInTailExpr { + fn fix(&self, sema: &Semantics) -> Option { + let root = sema.db.parse_or_expand(self.file)?; + let tail_expr = self.expr.to_node(&root); + let tail_expr_range = tail_expr.syntax().text_range(); + let edit = TextEdit::replace(tail_expr_range, format!("Ok({})", tail_expr.syntax())); + let source_change = + SourceFileEdit { file_id: self.file.original_file(sema.db), edit }.into(); + Some(Fix::new("Wrap with ok", source_change, tail_expr_range)) + } +} + +fn missing_record_expr_field_fix( + sema: &Semantics, + usage_file_id: FileId, + record_expr_field: &ast::RecordExprField, +) -> Option { + let record_lit = ast::RecordExpr::cast(record_expr_field.syntax().parent()?.parent()?)?; + let def_id = sema.resolve_variant(record_lit)?; + let module; + let def_file_id; + let record_fields = match VariantDef::from(def_id) { + VariantDef::Struct(s) => { + module = s.module(sema.db); + let source = s.source(sema.db); + def_file_id = source.file_id; + let fields = source.value.field_list()?; + record_field_list(fields)? + } + VariantDef::Union(u) => { + module = u.module(sema.db); + let source = u.source(sema.db); + def_file_id = source.file_id; + source.value.record_field_list()? + } + VariantDef::EnumVariant(e) => { + module = e.module(sema.db); + let source = e.source(sema.db); + def_file_id = source.file_id; + let fields = source.value.field_list()?; + record_field_list(fields)? + } + }; + let def_file_id = def_file_id.original_file(sema.db); + + let new_field_type = sema.type_of_expr(&record_expr_field.expr()?)?; + if new_field_type.is_unknown() { + return None; + } + let new_field = make::record_field( + record_expr_field.field_name()?, + make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?), + ); + + let last_field = record_fields.fields().last()?; + let last_field_syntax = last_field.syntax(); + let indent = IndentLevel::from_node(last_field_syntax); + + let mut new_field = new_field.to_string(); + if usage_file_id != def_file_id { + new_field = format!("pub(crate) {}", new_field); + } + new_field = format!("\n{}{}", indent, new_field); + + let needs_comma = !last_field_syntax.to_string().ends_with(','); + if needs_comma { + new_field = format!(",{}", new_field); + } + + let source_change = SourceFileEdit { + file_id: def_file_id, + edit: TextEdit::insert(last_field_syntax.text_range().end(), new_field), + }; + return Some(Fix::new( + "Create field", + source_change.into(), + record_expr_field.syntax().text_range(), + )); + + fn record_field_list(field_def_list: ast::FieldList) -> Option { + match field_def_list { + ast::FieldList::RecordFieldList(it) => Some(it), + ast::FieldList::TupleFieldList(_) => None, + } + } +} diff --git a/crates/ide/src/display.rs b/crates/ide/src/display.rs new file mode 100644 index 0000000000..41b5bdc490 --- /dev/null +++ b/crates/ide/src/display.rs @@ -0,0 +1,83 @@ +//! This module contains utilities for turning SyntaxNodes and HIR types +//! into types that may be used to render in a UI. + +mod navigation_target; +mod short_label; + +use syntax::{ + ast::{self, AstNode, AttrsOwner, GenericParamsOwner, NameOwner}, + SyntaxKind::{ATTR, COMMENT}, +}; + +use ast::VisibilityOwner; +use stdx::format_to; + +pub use navigation_target::NavigationTarget; +pub(crate) use navigation_target::{ToNav, TryToNav}; +pub(crate) use short_label::ShortLabel; + +pub(crate) fn function_declaration(node: &ast::Fn) -> String { + let mut buf = String::new(); + if let Some(vis) = node.visibility() { + format_to!(buf, "{} ", vis); + } + if node.async_token().is_some() { + format_to!(buf, "async "); + } + if node.const_token().is_some() { + format_to!(buf, "const "); + } + if node.unsafe_token().is_some() { + format_to!(buf, "unsafe "); + } + if let Some(abi) = node.abi() { + // Keyword `extern` is included in the string. + format_to!(buf, "{} ", abi); + } + if let Some(name) = node.name() { + format_to!(buf, "fn {}", name) + } + if let Some(type_params) = node.generic_param_list() { + format_to!(buf, "{}", type_params); + } + if let Some(param_list) = node.param_list() { + format_to!(buf, "{}", param_list); + } + if let Some(ret_type) = node.ret_type() { + if ret_type.ty().is_some() { + format_to!(buf, " {}", ret_type); + } + } + if let Some(where_clause) = node.where_clause() { + format_to!(buf, "\n{}", where_clause); + } + buf +} + +pub(crate) fn const_label(node: &ast::Const) -> String { + let label: String = node + .syntax() + .children_with_tokens() + .filter(|child| !(child.kind() == COMMENT || child.kind() == ATTR)) + .map(|node| node.to_string()) + .collect(); + + label.trim().to_owned() +} + +pub(crate) fn type_label(node: &ast::TypeAlias) -> String { + let label: String = node + .syntax() + .children_with_tokens() + .filter(|child| !(child.kind() == COMMENT || child.kind() == ATTR)) + .map(|node| node.to_string()) + .collect(); + + label.trim().to_owned() +} + +pub(crate) fn macro_label(node: &ast::MacroCall) -> String { + let name = node.name().map(|name| name.syntax().text().to_string()).unwrap_or_default(); + let vis = if node.has_atom_attr("macro_export") { "#[macro_export]\n" } else { "" }; + format!("{}macro_rules! {}", vis, name) +} diff --git a/crates/ide/src/display/navigation_target.rs b/crates/ide/src/display/navigation_target.rs new file mode 100644 index 0000000000..e771061772 --- /dev/null +++ b/crates/ide/src/display/navigation_target.rs @@ -0,0 +1,491 @@ +//! FIXME: write short doc here + +use base_db::{FileId, SourceDatabase}; +use either::Either; +use hir::{original_range, AssocItem, FieldSource, HasSource, InFile, ModuleSource}; +use ide_db::{defs::Definition, RootDatabase}; +use syntax::{ + ast::{self, DocCommentsOwner, NameOwner}, + match_ast, AstNode, SmolStr, + SyntaxKind::{self, IDENT_PAT, TYPE_PARAM}, + TextRange, +}; + +use crate::FileSymbol; + +use super::short_label::ShortLabel; + +/// `NavigationTarget` represents and element in the editor's UI which you can +/// click on to navigate to a particular piece of code. +/// +/// Typically, a `NavigationTarget` corresponds to some element in the source +/// code, like a function or a struct, but this is not strictly required. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct NavigationTarget { + pub file_id: FileId, + /// Range which encompasses the whole element. + /// + /// Should include body, doc comments, attributes, etc. + /// + /// Clients should use this range to answer "is the cursor inside the + /// element?" question. + pub full_range: TextRange, + /// A "most interesting" range withing the `full_range`. + /// + /// Typically, `full_range` is the whole syntax node, including doc + /// comments, and `focus_range` is the range of the identifier. "Most + /// interesting" range within the full range, typically the range of + /// identifier. + /// + /// Clients should place the cursor on this range when navigating to this target. + pub focus_range: Option, + pub name: SmolStr, + pub kind: SyntaxKind, + pub container_name: Option, + pub description: Option, + pub docs: Option, +} + +pub(crate) trait ToNav { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget; +} + +pub(crate) trait TryToNav { + fn try_to_nav(&self, db: &RootDatabase) -> Option; +} + +impl NavigationTarget { + pub fn focus_or_full_range(&self) -> TextRange { + self.focus_range.unwrap_or(self.full_range) + } + + pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget { + let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default(); + if let Some(src) = module.declaration_source(db) { + let frange = original_range(db, src.as_ref().map(|it| it.syntax())); + let mut res = NavigationTarget::from_syntax( + frange.file_id, + name, + None, + frange.range, + src.value.syntax().kind(), + ); + res.docs = src.value.doc_comment_text(); + res.description = src.value.short_label(); + return res; + } + module.to_nav(db) + } + + #[cfg(test)] + pub(crate) fn assert_match(&self, expected: &str) { + let actual = self.debug_render(); + test_utils::assert_eq_text!(expected.trim(), actual.trim(),); + } + + #[cfg(test)] + pub(crate) fn debug_render(&self) -> String { + let mut buf = + format!("{} {:?} {:?} {:?}", self.name, self.kind, self.file_id, self.full_range); + if let Some(focus_range) = self.focus_range { + buf.push_str(&format!(" {:?}", focus_range)) + } + if let Some(container_name) = &self.container_name { + buf.push_str(&format!(" {}", container_name)) + } + buf + } + + /// Allows `NavigationTarget` to be created from a `NameOwner` + pub(crate) fn from_named( + db: &RootDatabase, + node: InFile<&dyn ast::NameOwner>, + ) -> NavigationTarget { + let name = + node.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_")); + let focus_range = + node.value.name().map(|it| original_range(db, node.with_value(it.syntax())).range); + let frange = original_range(db, node.map(|it| it.syntax())); + + NavigationTarget::from_syntax( + frange.file_id, + name, + focus_range, + frange.range, + node.value.syntax().kind(), + ) + } + + /// Allows `NavigationTarget` to be created from a `DocCommentsOwner` and a `NameOwner` + pub(crate) fn from_doc_commented( + db: &RootDatabase, + named: InFile<&dyn ast::NameOwner>, + node: InFile<&dyn ast::DocCommentsOwner>, + ) -> NavigationTarget { + let name = + named.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_")); + let frange = original_range(db, node.map(|it| it.syntax())); + + NavigationTarget::from_syntax( + frange.file_id, + name, + None, + frange.range, + node.value.syntax().kind(), + ) + } + + fn from_syntax( + file_id: FileId, + name: SmolStr, + focus_range: Option, + full_range: TextRange, + kind: SyntaxKind, + ) -> NavigationTarget { + NavigationTarget { + file_id, + name, + kind, + full_range, + focus_range, + container_name: None, + description: None, + docs: None, + } + } +} + +impl ToNav for FileSymbol { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + NavigationTarget { + file_id: self.file_id, + name: self.name.clone(), + kind: self.kind, + full_range: self.range, + focus_range: self.name_range, + container_name: self.container_name.clone(), + description: description_from_symbol(db, self), + docs: docs_from_symbol(db, self), + } + } +} + +impl TryToNav for Definition { + fn try_to_nav(&self, db: &RootDatabase) -> Option { + match self { + Definition::Macro(it) => Some(it.to_nav(db)), + Definition::Field(it) => Some(it.to_nav(db)), + Definition::ModuleDef(it) => it.try_to_nav(db), + Definition::SelfType(it) => Some(it.to_nav(db)), + Definition::Local(it) => Some(it.to_nav(db)), + Definition::TypeParam(it) => Some(it.to_nav(db)), + } + } +} + +impl TryToNav for hir::ModuleDef { + fn try_to_nav(&self, db: &RootDatabase) -> Option { + let res = match self { + hir::ModuleDef::Module(it) => it.to_nav(db), + hir::ModuleDef::Function(it) => it.to_nav(db), + hir::ModuleDef::Adt(it) => it.to_nav(db), + hir::ModuleDef::EnumVariant(it) => it.to_nav(db), + hir::ModuleDef::Const(it) => it.to_nav(db), + hir::ModuleDef::Static(it) => it.to_nav(db), + hir::ModuleDef::Trait(it) => it.to_nav(db), + hir::ModuleDef::TypeAlias(it) => it.to_nav(db), + hir::ModuleDef::BuiltinType(_) => return None, + }; + Some(res) + } +} + +pub(crate) trait ToNavFromAst {} +impl ToNavFromAst for hir::Function {} +impl ToNavFromAst for hir::Const {} +impl ToNavFromAst for hir::Static {} +impl ToNavFromAst for hir::Struct {} +impl ToNavFromAst for hir::Enum {} +impl ToNavFromAst for hir::EnumVariant {} +impl ToNavFromAst for hir::Union {} +impl ToNavFromAst for hir::TypeAlias {} +impl ToNavFromAst for hir::Trait {} + +impl ToNav for D +where + D: HasSource + ToNavFromAst + Copy, + D::Ast: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, +{ + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + let mut res = + NavigationTarget::from_named(db, src.as_ref().map(|it| it as &dyn ast::NameOwner)); + res.docs = src.value.doc_comment_text(); + res.description = src.value.short_label(); + res + } +} + +impl ToNav for hir::Module { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.definition_source(db); + let name = self.name(db).map(|it| it.to_string().into()).unwrap_or_default(); + let (syntax, focus) = match &src.value { + ModuleSource::SourceFile(node) => (node.syntax(), None), + ModuleSource::Module(node) => { + (node.syntax(), node.name().map(|it| it.syntax().text_range())) + } + }; + let frange = original_range(db, src.with_value(syntax)); + NavigationTarget::from_syntax(frange.file_id, name, focus, frange.range, syntax.kind()) + } +} + +impl ToNav for hir::ImplDef { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + let derive_attr = self.is_builtin_derive(db); + let frange = if let Some(item) = &derive_attr { + original_range(db, item.syntax()) + } else { + original_range(db, src.as_ref().map(|it| it.syntax())) + }; + let focus_range = if derive_attr.is_some() { + None + } else { + src.value.self_ty().map(|ty| original_range(db, src.with_value(ty.syntax())).range) + }; + + NavigationTarget::from_syntax( + frange.file_id, + "impl".into(), + focus_range, + frange.range, + src.value.syntax().kind(), + ) + } +} + +impl ToNav for hir::Field { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + + match &src.value { + FieldSource::Named(it) => { + let mut res = NavigationTarget::from_named(db, src.with_value(it)); + res.docs = it.doc_comment_text(); + res.description = it.short_label(); + res + } + FieldSource::Pos(it) => { + let frange = original_range(db, src.with_value(it.syntax())); + NavigationTarget::from_syntax( + frange.file_id, + "".into(), + None, + frange.range, + it.syntax().kind(), + ) + } + } + } +} + +impl ToNav for hir::MacroDef { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + log::debug!("nav target {:#?}", src.value.syntax()); + let mut res = + NavigationTarget::from_named(db, src.as_ref().map(|it| it as &dyn ast::NameOwner)); + res.docs = src.value.doc_comment_text(); + res + } +} + +impl ToNav for hir::Adt { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + match self { + hir::Adt::Struct(it) => it.to_nav(db), + hir::Adt::Union(it) => it.to_nav(db), + hir::Adt::Enum(it) => it.to_nav(db), + } + } +} + +impl ToNav for hir::AssocItem { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + match self { + AssocItem::Function(it) => it.to_nav(db), + AssocItem::Const(it) => it.to_nav(db), + AssocItem::TypeAlias(it) => it.to_nav(db), + } + } +} + +impl ToNav for hir::Local { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + let node = match &src.value { + Either::Left(bind_pat) => { + bind_pat.name().map_or_else(|| bind_pat.syntax().clone(), |it| it.syntax().clone()) + } + Either::Right(it) => it.syntax().clone(), + }; + let full_range = original_range(db, src.with_value(&node)); + let name = match self.name(db) { + Some(it) => it.to_string().into(), + None => "".into(), + }; + NavigationTarget { + file_id: full_range.file_id, + name, + kind: IDENT_PAT, + full_range: full_range.range, + focus_range: None, + container_name: None, + description: None, + docs: None, + } + } +} + +impl ToNav for hir::TypeParam { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + let full_range = match &src.value { + Either::Left(it) => it.syntax().text_range(), + Either::Right(it) => it.syntax().text_range(), + }; + let focus_range = match &src.value { + Either::Left(_) => None, + Either::Right(it) => it.name().map(|it| it.syntax().text_range()), + }; + NavigationTarget { + file_id: src.file_id.original_file(db), + name: self.name(db).to_string().into(), + kind: TYPE_PARAM, + full_range, + focus_range, + container_name: None, + description: None, + docs: None, + } + } +} + +pub(crate) fn docs_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option { + let parse = db.parse(symbol.file_id); + let node = symbol.ptr.to_node(parse.tree().syntax()); + + match_ast! { + match node { + ast::Fn(it) => it.doc_comment_text(), + ast::Struct(it) => it.doc_comment_text(), + ast::Enum(it) => it.doc_comment_text(), + ast::Trait(it) => it.doc_comment_text(), + ast::Module(it) => it.doc_comment_text(), + ast::TypeAlias(it) => it.doc_comment_text(), + ast::Const(it) => it.doc_comment_text(), + ast::Static(it) => it.doc_comment_text(), + ast::RecordField(it) => it.doc_comment_text(), + ast::Variant(it) => it.doc_comment_text(), + ast::MacroCall(it) => it.doc_comment_text(), + _ => None, + } + } +} + +/// Get a description of a symbol. +/// +/// e.g. `struct Name`, `enum Name`, `fn Name` +pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option { + let parse = db.parse(symbol.file_id); + let node = symbol.ptr.to_node(parse.tree().syntax()); + + match_ast! { + match node { + ast::Fn(it) => it.short_label(), + ast::Struct(it) => it.short_label(), + ast::Enum(it) => it.short_label(), + ast::Trait(it) => it.short_label(), + ast::Module(it) => it.short_label(), + ast::TypeAlias(it) => it.short_label(), + ast::Const(it) => it.short_label(), + ast::Static(it) => it.short_label(), + ast::RecordField(it) => it.short_label(), + ast::Variant(it) => it.short_label(), + _ => None, + } + } +} + +#[cfg(test)] +mod tests { + use expect::expect; + + use crate::{mock_analysis::single_file, Query}; + + #[test] + fn test_nav_for_symbol() { + let (analysis, _) = single_file( + r#" +enum FooInner { } +fn foo() { enum FooInner { } } +"#, + ); + + let navs = analysis.symbol_search(Query::new("FooInner".to_string())).unwrap(); + expect![[r#" + [ + NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..17, + focus_range: Some( + 5..13, + ), + name: "FooInner", + kind: ENUM, + container_name: None, + description: Some( + "enum FooInner", + ), + docs: None, + }, + NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 29..46, + focus_range: Some( + 34..42, + ), + name: "FooInner", + kind: ENUM, + container_name: Some( + "foo", + ), + description: Some( + "enum FooInner", + ), + docs: None, + }, + ] + "#]] + .assert_debug_eq(&navs); + } + + #[test] + fn test_world_symbols_are_case_sensitive() { + let (analysis, _) = single_file( + r#" +fn foo() {} +struct Foo; +"#, + ); + + let navs = analysis.symbol_search(Query::new("foo".to_string())).unwrap(); + assert_eq!(navs.len(), 2) + } +} diff --git a/crates/ide/src/display/short_label.rs b/crates/ide/src/display/short_label.rs new file mode 100644 index 0000000000..ea49d9f970 --- /dev/null +++ b/crates/ide/src/display/short_label.rs @@ -0,0 +1,111 @@ +//! FIXME: write short doc here + +use stdx::format_to; +use syntax::ast::{self, AstNode, NameOwner, VisibilityOwner}; + +pub(crate) trait ShortLabel { + fn short_label(&self) -> Option; +} + +impl ShortLabel for ast::Fn { + fn short_label(&self) -> Option { + Some(crate::display::function_declaration(self)) + } +} + +impl ShortLabel for ast::Struct { + fn short_label(&self) -> Option { + short_label_from_node(self, "struct ") + } +} + +impl ShortLabel for ast::Union { + fn short_label(&self) -> Option { + short_label_from_node(self, "union ") + } +} + +impl ShortLabel for ast::Enum { + fn short_label(&self) -> Option { + short_label_from_node(self, "enum ") + } +} + +impl ShortLabel for ast::Trait { + fn short_label(&self) -> Option { + if self.unsafe_token().is_some() { + short_label_from_node(self, "unsafe trait ") + } else { + short_label_from_node(self, "trait ") + } + } +} + +impl ShortLabel for ast::Module { + fn short_label(&self) -> Option { + short_label_from_node(self, "mod ") + } +} + +impl ShortLabel for ast::SourceFile { + fn short_label(&self) -> Option { + None + } +} + +impl ShortLabel for ast::TypeAlias { + fn short_label(&self) -> Option { + short_label_from_node(self, "type ") + } +} + +impl ShortLabel for ast::Const { + fn short_label(&self) -> Option { + let mut new_buf = short_label_from_ty(self, self.ty(), "const ")?; + if let Some(expr) = self.body() { + format_to!(new_buf, " = {}", expr.syntax()); + } + Some(new_buf) + } +} + +impl ShortLabel for ast::Static { + fn short_label(&self) -> Option { + short_label_from_ty(self, self.ty(), "static ") + } +} + +impl ShortLabel for ast::RecordField { + fn short_label(&self) -> Option { + short_label_from_ty(self, self.ty(), "") + } +} + +impl ShortLabel for ast::Variant { + fn short_label(&self) -> Option { + Some(self.name()?.text().to_string()) + } +} + +fn short_label_from_ty(node: &T, ty: Option, prefix: &str) -> Option +where + T: NameOwner + VisibilityOwner, +{ + let mut buf = short_label_from_node(node, prefix)?; + + if let Some(type_ref) = ty { + format_to!(buf, ": {}", type_ref.syntax()); + } + + Some(buf) +} + +fn short_label_from_node(node: &T, label: &str) -> Option +where + T: NameOwner + VisibilityOwner, +{ + let mut buf = node.visibility().map(|v| format!("{} ", v.syntax())).unwrap_or_default(); + buf.push_str(label); + buf.push_str(node.name()?.text().as_str()); + Some(buf) +} diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs new file mode 100644 index 0000000000..31455709d7 --- /dev/null +++ b/crates/ide/src/expand_macro.rs @@ -0,0 +1,283 @@ +use hir::Semantics; +use ide_db::RootDatabase; +use syntax::{ + algo::{find_node_at_offset, SyntaxRewriter}, + ast, AstNode, NodeOrToken, SyntaxKind, + SyntaxKind::*, + SyntaxNode, WalkEvent, T, +}; + +use crate::FilePosition; + +pub struct ExpandedMacro { + pub name: String, + pub expansion: String, +} + +// Feature: Expand Macro Recursively +// +// Shows the full macro expansion of the macro at current cursor. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Expand macro recursively** +// |=== +pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option { + let sema = Semantics::new(db); + let file = sema.parse(position.file_id); + let name_ref = find_node_at_offset::(file.syntax(), position.offset)?; + let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?; + + let expanded = expand_macro_recur(&sema, &mac)?; + + // FIXME: + // macro expansion may lose all white space information + // But we hope someday we can use ra_fmt for that + let expansion = insert_whitespaces(expanded); + Some(ExpandedMacro { name: name_ref.text().to_string(), expansion }) +} + +fn expand_macro_recur( + sema: &Semantics, + macro_call: &ast::MacroCall, +) -> Option { + let mut expanded = sema.expand(macro_call)?; + + let children = expanded.descendants().filter_map(ast::MacroCall::cast); + let mut rewriter = SyntaxRewriter::default(); + + for child in children.into_iter() { + if let Some(new_node) = expand_macro_recur(sema, &child) { + // Replace the whole node if it is root + // `replace_descendants` will not replace the parent node + // but `SyntaxNode::descendants include itself + if expanded == *child.syntax() { + expanded = new_node; + } else { + rewriter.replace(child.syntax(), &new_node) + } + } + } + + let res = rewriter.rewrite(&expanded); + Some(res) +} + +// FIXME: It would also be cool to share logic here and in the mbe tests, +// which are pretty unreadable at the moment. +fn insert_whitespaces(syn: SyntaxNode) -> String { + let mut res = String::new(); + let mut token_iter = syn + .preorder_with_tokens() + .filter_map(|event| { + if let WalkEvent::Enter(NodeOrToken::Token(token)) = event { + Some(token) + } else { + None + } + }) + .peekable(); + + let mut indent = 0; + let mut last: Option = None; + + while let Some(token) = token_iter.next() { + let mut is_next = |f: fn(SyntaxKind) -> bool, default| -> bool { + token_iter.peek().map(|it| f(it.kind())).unwrap_or(default) + }; + let is_last = + |f: fn(SyntaxKind) -> bool, default| -> bool { last.map(f).unwrap_or(default) }; + + res += &match token.kind() { + k if is_text(k) && is_next(|it| !it.is_punct(), true) => token.text().to_string() + " ", + L_CURLY if is_next(|it| it != R_CURLY, true) => { + indent += 1; + let leading_space = if is_last(is_text, false) { " " } else { "" }; + format!("{}{{\n{}", leading_space, " ".repeat(indent)) + } + R_CURLY if is_last(|it| it != L_CURLY, true) => { + indent = indent.saturating_sub(1); + format!("\n{}}}", " ".repeat(indent)) + } + R_CURLY => format!("}}\n{}", " ".repeat(indent)), + T![;] => format!(";\n{}", " ".repeat(indent)), + T![->] => " -> ".to_string(), + T![=] => " = ".to_string(), + T![=>] => " => ".to_string(), + _ => token.text().to_string(), + }; + + last = Some(token.kind()); + } + + return res; + + fn is_text(k: SyntaxKind) -> bool { + k.is_keyword() || k.is_literal() || k == IDENT + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::mock_analysis::analysis_and_position; + + fn check(ra_fixture: &str, expect: Expect) { + let (analysis, pos) = analysis_and_position(ra_fixture); + let expansion = analysis.expand_macro(pos).unwrap().unwrap(); + let actual = format!("{}\n{}", expansion.name, expansion.expansion); + expect.assert_eq(&actual); + } + + #[test] + fn macro_expand_recursive_expansion() { + check( + r#" +macro_rules! bar { + () => { fn b() {} } +} +macro_rules! foo { + () => { bar!(); } +} +macro_rules! baz { + () => { foo!(); } +} +f<|>oo!(); +"#, + expect![[r#" + foo + fn b(){} + "#]], + ); + } + + #[test] + fn macro_expand_multiple_lines() { + check( + r#" +macro_rules! foo { + () => { + fn some_thing() -> u32 { + let a = 0; + a + 10 + } + } +} +f<|>oo!(); + "#, + expect![[r#" + foo + fn some_thing() -> u32 { + let a = 0; + a+10 + }"#]], + ); + } + + #[test] + fn macro_expand_match_ast() { + check( + r#" +macro_rules! match_ast { + (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; + (match ($node:expr) { + $( ast::$ast:ident($it:ident) => $res:block, )* + _ => $catch_all:expr $(,)? + }) => {{ + $( if let Some($it) = ast::$ast::cast($node.clone()) $res else )* + { $catch_all } + }}; +} + +fn main() { + mat<|>ch_ast! { + match container { + ast::TraitDef(it) => {}, + ast::ImplDef(it) => {}, + _ => { continue }, + } + } +} +"#, + expect![[r#" + match_ast + { + if let Some(it) = ast::TraitDef::cast(container.clone()){} + else if let Some(it) = ast::ImplDef::cast(container.clone()){} + else { + { + continue + } + } + }"#]], + ); + } + + #[test] + fn macro_expand_match_ast_inside_let_statement() { + check( + r#" +macro_rules! match_ast { + (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; + (match ($node:expr) {}) => {{}}; +} + +fn main() { + let p = f(|it| { + let res = mat<|>ch_ast! { match c {}}; + Some(res) + })?; +} +"#, + expect![[r#" + match_ast + {} + "#]], + ); + } + + #[test] + fn macro_expand_inner_macro_fail_to_expand() { + check( + r#" +macro_rules! bar { + (BAD) => {}; +} +macro_rules! foo { + () => {bar!()}; +} + +fn main() { + let res = fo<|>o!(); +} +"#, + expect![[r#" + foo + "#]], + ); + } + + #[test] + fn macro_expand_with_dollar_crate() { + check( + r#" +#[macro_export] +macro_rules! bar { + () => {0}; +} +macro_rules! foo { + () => {$crate::bar!()}; +} + +fn main() { + let res = fo<|>o!(); +} +"#, + expect![[r#" + foo + 0 "#]], + ); + } +} diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs new file mode 100644 index 0000000000..34563a0267 --- /dev/null +++ b/crates/ide/src/extend_selection.rs @@ -0,0 +1,654 @@ +use std::iter::successors; + +use hir::Semantics; +use ide_db::RootDatabase; +use syntax::{ + algo::{self, find_covering_element, skip_trivia_token}, + ast::{self, AstNode, AstToken}, + Direction, NodeOrToken, + SyntaxKind::{self, *}, + SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, T, +}; + +use crate::FileRange; + +// Feature: Extend Selection +// +// Extends the current selection to the encompassing syntactic construct +// (expression, statement, item, module, etc). It works with multiple cursors. +// +// |=== +// | Editor | Shortcut +// +// | VS Code | kbd:[Ctrl+Shift+→] +// |=== +pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { + let sema = Semantics::new(db); + let src = sema.parse(frange.file_id); + try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range) +} + +fn try_extend_selection( + sema: &Semantics, + root: &SyntaxNode, + frange: FileRange, +) -> Option { + let range = frange.range; + + let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING]; + let list_kinds = [ + RECORD_PAT_FIELD_LIST, + MATCH_ARM_LIST, + RECORD_FIELD_LIST, + TUPLE_FIELD_LIST, + RECORD_EXPR_FIELD_LIST, + VARIANT_LIST, + USE_TREE_LIST, + GENERIC_PARAM_LIST, + GENERIC_ARG_LIST, + TYPE_BOUND_LIST, + PARAM_LIST, + ARG_LIST, + ARRAY_EXPR, + TUPLE_EXPR, + TUPLE_TYPE, + TUPLE_PAT, + WHERE_CLAUSE, + ]; + + if range.is_empty() { + let offset = range.start(); + let mut leaves = root.token_at_offset(offset); + if leaves.clone().all(|it| it.kind() == WHITESPACE) { + return Some(extend_ws(root, leaves.next()?, offset)); + } + let leaf_range = match leaves { + TokenAtOffset::None => return None, + TokenAtOffset::Single(l) => { + if string_kinds.contains(&l.kind()) { + extend_single_word_in_comment_or_string(&l, offset) + .unwrap_or_else(|| l.text_range()) + } else { + l.text_range() + } + } + TokenAtOffset::Between(l, r) => pick_best(l, r).text_range(), + }; + return Some(leaf_range); + }; + let node = match find_covering_element(root, range) { + NodeOrToken::Token(token) => { + if token.text_range() != range { + return Some(token.text_range()); + } + if let Some(comment) = ast::Comment::cast(token.clone()) { + if let Some(range) = extend_comments(comment) { + return Some(range); + } + } + token.parent() + } + NodeOrToken::Node(node) => node, + }; + + // if we are in single token_tree, we maybe live in macro or attr + if node.kind() == TOKEN_TREE { + if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) { + if let Some(range) = extend_tokens_from_range(sema, macro_call, range) { + return Some(range); + } + } + } + + if node.text_range() != range { + return Some(node.text_range()); + } + + let node = shallowest_node(&node); + + if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { + if let Some(range) = extend_list_item(&node) { + return Some(range); + } + } + + node.parent().map(|it| it.text_range()) +} + +fn extend_tokens_from_range( + sema: &Semantics, + macro_call: ast::MacroCall, + original_range: TextRange, +) -> Option { + let src = find_covering_element(¯o_call.syntax(), original_range); + let (first_token, last_token) = match src { + NodeOrToken::Node(it) => (it.first_token()?, it.last_token()?), + NodeOrToken::Token(it) => (it.clone(), it), + }; + + let mut first_token = skip_trivia_token(first_token, Direction::Next)?; + let mut last_token = skip_trivia_token(last_token, Direction::Prev)?; + + while !original_range.contains_range(first_token.text_range()) { + first_token = skip_trivia_token(first_token.next_token()?, Direction::Next)?; + } + while !original_range.contains_range(last_token.text_range()) { + last_token = skip_trivia_token(last_token.prev_token()?, Direction::Prev)?; + } + + // compute original mapped token range + let extended = { + let fst_expanded = sema.descend_into_macros(first_token.clone()); + let lst_expanded = sema.descend_into_macros(last_token.clone()); + let mut lca = algo::least_common_ancestor(&fst_expanded.parent(), &lst_expanded.parent())?; + lca = shallowest_node(&lca); + if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) { + lca = lca.parent()?; + } + lca + }; + + // Compute parent node range + let validate = |token: &SyntaxToken| { + let expanded = sema.descend_into_macros(token.clone()); + algo::least_common_ancestor(&extended, &expanded.parent()).as_ref() == Some(&extended) + }; + + // Find the first and last text range under expanded parent + let first = successors(Some(first_token), |token| { + let token = token.prev_token()?; + skip_trivia_token(token, Direction::Prev) + }) + .take_while(validate) + .last()?; + + let last = successors(Some(last_token), |token| { + let token = token.next_token()?; + skip_trivia_token(token, Direction::Next) + }) + .take_while(validate) + .last()?; + + let range = first.text_range().cover(last.text_range()); + if range.contains_range(original_range) && original_range != range { + Some(range) + } else { + None + } +} + +/// Find the shallowest node with same range, which allows us to traverse siblings. +fn shallowest_node(node: &SyntaxNode) -> SyntaxNode { + node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap() +} + +fn extend_single_word_in_comment_or_string( + leaf: &SyntaxToken, + offset: TextSize, +) -> Option { + let text: &str = leaf.text(); + let cursor_position: u32 = (offset - leaf.text_range().start()).into(); + + let (before, after) = text.split_at(cursor_position as usize); + + fn non_word_char(c: char) -> bool { + !(c.is_alphanumeric() || c == '_') + } + + let start_idx = before.rfind(non_word_char)? as u32; + let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32; + + let from: TextSize = (start_idx + 1).into(); + let to: TextSize = (cursor_position + end_idx).into(); + + let range = TextRange::new(from, to); + if range.is_empty() { + None + } else { + Some(range + leaf.text_range().start()) + } +} + +fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange { + let ws_text = ws.text(); + let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start(); + let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start(); + let ws_suffix = &ws_text.as_str()[suffix]; + let ws_prefix = &ws_text.as_str()[prefix]; + if ws_text.contains('\n') && !ws_suffix.contains('\n') { + if let Some(node) = ws.next_sibling_or_token() { + let start = match ws_prefix.rfind('\n') { + Some(idx) => ws.text_range().start() + TextSize::from((idx + 1) as u32), + None => node.text_range().start(), + }; + let end = if root.text().char_at(node.text_range().end()) == Some('\n') { + node.text_range().end() + TextSize::of('\n') + } else { + node.text_range().end() + }; + return TextRange::new(start, end); + } + } + ws.text_range() +} + +fn pick_best(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken { + return if priority(&r) > priority(&l) { r } else { l }; + fn priority(n: &SyntaxToken) -> usize { + match n.kind() { + WHITESPACE => 0, + IDENT | T![self] | T![super] | T![crate] | LIFETIME => 2, + _ => 1, + } + } +} + +/// Extend list item selection to include nearby delimiter and whitespace. +fn extend_list_item(node: &SyntaxNode) -> Option { + fn is_single_line_ws(node: &SyntaxToken) -> bool { + node.kind() == WHITESPACE && !node.text().contains('\n') + } + + fn nearby_delimiter( + delimiter_kind: SyntaxKind, + node: &SyntaxNode, + dir: Direction, + ) -> Option { + node.siblings_with_tokens(dir) + .skip(1) + .skip_while(|node| match node { + NodeOrToken::Node(_) => false, + NodeOrToken::Token(it) => is_single_line_ws(it), + }) + .next() + .and_then(|it| it.into_token()) + .filter(|node| node.kind() == delimiter_kind) + } + + let delimiter = match node.kind() { + TYPE_BOUND => T![+], + _ => T![,], + }; + + if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Next) { + // Include any following whitespace when delimiter is after list item. + let final_node = delimiter_node + .next_sibling_or_token() + .and_then(|it| it.into_token()) + .filter(|node| is_single_line_ws(node)) + .unwrap_or(delimiter_node); + + return Some(TextRange::new(node.text_range().start(), final_node.text_range().end())); + } + if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Prev) { + return Some(TextRange::new(delimiter_node.text_range().start(), node.text_range().end())); + } + + None +} + +fn extend_comments(comment: ast::Comment) -> Option { + let prev = adj_comments(&comment, Direction::Prev); + let next = adj_comments(&comment, Direction::Next); + if prev != next { + Some(TextRange::new(prev.syntax().text_range().start(), next.syntax().text_range().end())) + } else { + None + } +} + +fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment { + let mut res = comment.clone(); + for element in comment.syntax().siblings_with_tokens(dir) { + let token = match element.as_token() { + None => break, + Some(token) => token, + }; + if let Some(c) = ast::Comment::cast(token.clone()) { + res = c + } else if token.kind() != WHITESPACE || token.text().contains("\n\n") { + break; + } + } + res +} + +#[cfg(test)] +mod tests { + use crate::mock_analysis::analysis_and_position; + + use super::*; + + fn do_check(before: &str, afters: &[&str]) { + let (analysis, position) = analysis_and_position(&before); + let before = analysis.file_text(position.file_id).unwrap(); + let range = TextRange::empty(position.offset); + let mut frange = FileRange { file_id: position.file_id, range }; + + for &after in afters { + frange.range = analysis.extend_selection(frange).unwrap(); + let actual = &before[frange.range]; + assert_eq!(after, actual); + } + } + + #[test] + fn test_extend_selection_arith() { + do_check(r#"fn foo() { <|>1 + 1 }"#, &["1", "1 + 1", "{ 1 + 1 }"]); + } + + #[test] + fn test_extend_selection_list() { + do_check(r#"fn foo(<|>x: i32) {}"#, &["x", "x: i32"]); + do_check(r#"fn foo(<|>x: i32, y: i32) {}"#, &["x", "x: i32", "x: i32, "]); + do_check(r#"fn foo(<|>x: i32,y: i32) {}"#, &["x", "x: i32", "x: i32,", "(x: i32,y: i32)"]); + do_check(r#"fn foo(x: i32, <|>y: i32) {}"#, &["y", "y: i32", ", y: i32"]); + do_check(r#"fn foo(x: i32, <|>y: i32, ) {}"#, &["y", "y: i32", "y: i32, "]); + do_check(r#"fn foo(x: i32,<|>y: i32) {}"#, &["y", "y: i32", ",y: i32"]); + + do_check(r#"const FOO: [usize; 2] = [ 22<|> , 33];"#, &["22", "22 , "]); + do_check(r#"const FOO: [usize; 2] = [ 22 , 33<|>];"#, &["33", ", 33"]); + do_check(r#"const FOO: [usize; 2] = [ 22 , 33<|> ,];"#, &["33", "33 ,", "[ 22 , 33 ,]"]); + + do_check(r#"fn main() { (1, 2<|>) }"#, &["2", ", 2", "(1, 2)"]); + + do_check( + r#" +const FOO: [usize; 2] = [ + 22, + <|>33, +]"#, + &["33", "33,"], + ); + + do_check( + r#" +const FOO: [usize; 2] = [ + 22 + , 33<|>, +]"#, + &["33", "33,"], + ); + } + + #[test] + fn test_extend_selection_start_of_the_line() { + do_check( + r#" +impl S { +<|> fn foo() { + + } +}"#, + &[" fn foo() {\n\n }\n"], + ); + } + + #[test] + fn test_extend_selection_doc_comments() { + do_check( + r#" +struct A; + +/// bla +/// bla +struct B { + <|> +} + "#, + &["\n \n", "{\n \n}", "/// bla\n/// bla\nstruct B {\n \n}"], + ) + } + + #[test] + fn test_extend_selection_comments() { + do_check( + r#" +fn bar(){} + +// fn foo() { +// 1 + <|>1 +// } + +// fn foo(){} + "#, + &["1", "// 1 + 1", "// fn foo() {\n// 1 + 1\n// }"], + ); + + do_check( + r#" +// #[derive(Debug, Clone, Copy, PartialEq, Eq)] +// pub enum Direction { +// <|> Next, +// Prev +// } +"#, + &[ + "// Next,", + "// #[derive(Debug, Clone, Copy, PartialEq, Eq)]\n// pub enum Direction {\n// Next,\n// Prev\n// }", + ], + ); + + do_check( + r#" +/* +foo +_bar1<|>*/ +"#, + &["_bar1", "/*\nfoo\n_bar1*/"], + ); + + do_check(r#"//!<|>foo_2 bar"#, &["foo_2", "//!foo_2 bar"]); + + do_check(r#"/<|>/foo bar"#, &["//foo bar"]); + } + + #[test] + fn test_extend_selection_prefer_idents() { + do_check( + r#" +fn main() { foo<|>+bar;} +"#, + &["foo", "foo+bar"], + ); + do_check( + r#" +fn main() { foo+<|>bar;} +"#, + &["bar", "foo+bar"], + ); + } + + #[test] + fn test_extend_selection_prefer_lifetimes() { + do_check(r#"fn foo<<|>'a>() {}"#, &["'a", "<'a>"]); + do_check(r#"fn foo<'a<|>>() {}"#, &["'a", "<'a>"]); + } + + #[test] + fn test_extend_selection_select_first_word() { + do_check(r#"// foo bar b<|>az quxx"#, &["baz", "// foo bar baz quxx"]); + do_check( + r#" +impl S { +fn foo() { +// hel<|>lo world +} +} +"#, + &["hello", "// hello world"], + ); + } + + #[test] + fn test_extend_selection_string() { + do_check( + r#" +fn bar(){} + +" fn f<|>oo() {" +"#, + &["foo", "\" fn foo() {\""], + ); + } + + #[test] + fn test_extend_trait_bounds_list_in_where_clause() { + do_check( + r#" +fn foo() + where + R: req::Request + 'static, + R::Params: DeserializeOwned<|> + panic::UnwindSafe + 'static, + R::Result: Serialize + 'static, +"#, + &[ + "DeserializeOwned", + "DeserializeOwned + ", + "DeserializeOwned + panic::UnwindSafe + 'static", + "R::Params: DeserializeOwned + panic::UnwindSafe + 'static", + "R::Params: DeserializeOwned + panic::UnwindSafe + 'static,", + ], + ); + do_check(r#"fn foo() where T: <|>Copy"#, &["Copy"]); + do_check(r#"fn foo() where T: <|>Copy + Display"#, &["Copy", "Copy + "]); + do_check(r#"fn foo() where T: <|>Copy +Display"#, &["Copy", "Copy +"]); + do_check(r#"fn foo() where T: <|>Copy+Display"#, &["Copy", "Copy+"]); + do_check(r#"fn foo() where T: Copy + <|>Display"#, &["Display", "+ Display"]); + do_check(r#"fn foo() where T: Copy + <|>Display + Sync"#, &["Display", "Display + "]); + do_check(r#"fn foo() where T: Copy +<|>Display"#, &["Display", "+Display"]); + } + + #[test] + fn test_extend_trait_bounds_list_inline() { + do_check(r#"fn fooCopy>() {}"#, &["Copy"]); + do_check(r#"fn fooCopy + Display>() {}"#, &["Copy", "Copy + "]); + do_check(r#"fn fooCopy +Display>() {}"#, &["Copy", "Copy +"]); + do_check(r#"fn fooCopy+Display>() {}"#, &["Copy", "Copy+"]); + do_check(r#"fn fooDisplay>() {}"#, &["Display", "+ Display"]); + do_check(r#"fn fooDisplay + Sync>() {}"#, &["Display", "Display + "]); + do_check(r#"fn fooDisplay>() {}"#, &["Display", "+Display"]); + do_check( + r#"fn foo + Display, U: Copy>() {}"#, + &[ + "Copy", + "Copy + ", + "Copy + Display", + "T: Copy + Display", + "T: Copy + Display, ", + "", + ], + ); + } + + #[test] + fn test_extend_selection_on_tuple_in_type() { + do_check( + r#"fn main() { let _: (krate, <|>_crate_def_map, module_id) = (); }"#, + &["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"], + ); + // white space variations + do_check( + r#"fn main() { let _: (krate,<|>_crate_def_map,module_id) = (); }"#, + &["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"], + ); + do_check( + r#" +fn main() { let _: ( + krate, + _crate<|>_def_map, + module_id +) = (); }"#, + &[ + "_crate_def_map", + "_crate_def_map,", + "(\n krate,\n _crate_def_map,\n module_id\n)", + ], + ); + } + + #[test] + fn test_extend_selection_on_tuple_in_rvalue() { + do_check( + r#"fn main() { let var = (krate, _crate_def_map<|>, module_id); }"#, + &["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"], + ); + // white space variations + do_check( + r#"fn main() { let var = (krate,_crate<|>_def_map,module_id); }"#, + &["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"], + ); + do_check( + r#" +fn main() { let var = ( + krate, + _crate_def_map<|>, + module_id +); }"#, + &[ + "_crate_def_map", + "_crate_def_map,", + "(\n krate,\n _crate_def_map,\n module_id\n)", + ], + ); + } + + #[test] + fn test_extend_selection_on_tuple_pat() { + do_check( + r#"fn main() { let (krate, _crate_def_map<|>, module_id) = var; }"#, + &["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"], + ); + // white space variations + do_check( + r#"fn main() { let (krate,_crate<|>_def_map,module_id) = var; }"#, + &["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"], + ); + do_check( + r#" +fn main() { let ( + krate, + _crate_def_map<|>, + module_id +) = var; }"#, + &[ + "_crate_def_map", + "_crate_def_map,", + "(\n krate,\n _crate_def_map,\n module_id\n)", + ], + ); + } + + #[test] + fn extend_selection_inside_macros() { + do_check( + r#"macro_rules! foo { ($item:item) => {$item} } + foo!{fn hello(na<|>me:usize){}}"#, + &[ + "name", + "name:usize", + "(name:usize)", + "fn hello(name:usize){}", + "{fn hello(name:usize){}}", + "foo!{fn hello(name:usize){}}", + ], + ); + } + + #[test] + fn extend_selection_inside_recur_macros() { + do_check( + r#" macro_rules! foo2 { ($item:item) => {$item} } + macro_rules! foo { ($item:item) => {foo2!($item);} } + foo!{fn hello(na<|>me:usize){}}"#, + &[ + "name", + "name:usize", + "(name:usize)", + "fn hello(name:usize){}", + "{fn hello(name:usize){}}", + "foo!{fn hello(name:usize){}}", + ], + ); + } +} diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs new file mode 100644 index 0000000000..c90247ba65 --- /dev/null +++ b/crates/ide/src/file_structure.rs @@ -0,0 +1,431 @@ +use syntax::{ + ast::{self, AttrsOwner, GenericParamsOwner, NameOwner}, + match_ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, WalkEvent, +}; + +#[derive(Debug, Clone)] +pub struct StructureNode { + pub parent: Option, + pub label: String, + pub navigation_range: TextRange, + pub node_range: TextRange, + pub kind: SyntaxKind, + pub detail: Option, + pub deprecated: bool, +} + +// Feature: File Structure +// +// Provides a tree of the symbols defined in the file. Can be used to +// +// * fuzzy search symbol in a file (super useful) +// * draw breadcrumbs to describe the context around the cursor +// * draw outline of the file +// +// |=== +// | Editor | Shortcut +// +// | VS Code | kbd:[Ctrl+Shift+O] +// |=== +pub fn file_structure(file: &SourceFile) -> Vec { + let mut res = Vec::new(); + let mut stack = Vec::new(); + + for event in file.syntax().preorder() { + match event { + WalkEvent::Enter(node) => { + if let Some(mut symbol) = structure_node(&node) { + symbol.parent = stack.last().copied(); + stack.push(res.len()); + res.push(symbol); + } + } + WalkEvent::Leave(node) => { + if structure_node(&node).is_some() { + stack.pop().unwrap(); + } + } + } + } + res +} + +fn structure_node(node: &SyntaxNode) -> Option { + fn decl(node: N) -> Option { + decl_with_detail(&node, None) + } + + fn decl_with_type_ref( + node: &N, + type_ref: Option, + ) -> Option { + let detail = type_ref.map(|type_ref| { + let mut detail = String::new(); + collapse_ws(type_ref.syntax(), &mut detail); + detail + }); + decl_with_detail(node, detail) + } + + fn decl_with_detail( + node: &N, + detail: Option, + ) -> Option { + let name = node.name()?; + + Some(StructureNode { + parent: None, + label: name.text().to_string(), + navigation_range: name.syntax().text_range(), + node_range: node.syntax().text_range(), + kind: node.syntax().kind(), + detail, + deprecated: node.attrs().filter_map(|x| x.simple_name()).any(|x| x == "deprecated"), + }) + } + + fn collapse_ws(node: &SyntaxNode, output: &mut String) { + let mut can_insert_ws = false; + node.text().for_each_chunk(|chunk| { + for line in chunk.lines() { + let line = line.trim(); + if line.is_empty() { + if can_insert_ws { + output.push(' '); + can_insert_ws = false; + } + } else { + output.push_str(line); + can_insert_ws = true; + } + } + }) + } + + match_ast! { + match node { + ast::Fn(it) => { + let mut detail = String::from("fn"); + if let Some(type_param_list) = it.generic_param_list() { + collapse_ws(type_param_list.syntax(), &mut detail); + } + if let Some(param_list) = it.param_list() { + collapse_ws(param_list.syntax(), &mut detail); + } + if let Some(ret_type) = it.ret_type() { + detail.push_str(" "); + collapse_ws(ret_type.syntax(), &mut detail); + } + + decl_with_detail(&it, Some(detail)) + }, + ast::Struct(it) => decl(it), + ast::Union(it) => decl(it), + ast::Enum(it) => decl(it), + ast::Variant(it) => decl(it), + ast::Trait(it) => decl(it), + ast::Module(it) => decl(it), + ast::TypeAlias(it) => decl_with_type_ref(&it, it.ty()), + ast::RecordField(it) => decl_with_type_ref(&it, it.ty()), + ast::Const(it) => decl_with_type_ref(&it, it.ty()), + ast::Static(it) => decl_with_type_ref(&it, it.ty()), + ast::Impl(it) => { + let target_type = it.self_ty()?; + let target_trait = it.trait_(); + let label = match target_trait { + None => format!("impl {}", target_type.syntax().text()), + Some(t) => { + format!("impl {} for {}", t.syntax().text(), target_type.syntax().text(),) + } + }; + + let node = StructureNode { + parent: None, + label, + navigation_range: target_type.syntax().text_range(), + node_range: it.syntax().text_range(), + kind: it.syntax().kind(), + detail: None, + deprecated: false, + }; + Some(node) + }, + ast::MacroCall(it) => { + match it.path().and_then(|it| it.segment()).and_then(|it| it.name_ref()) { + Some(path_segment) if path_segment.text() == "macro_rules" + => decl(it), + _ => None, + } + }, + _ => None, + } + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use super::*; + + fn check(ra_fixture: &str, expect: Expect) { + let file = SourceFile::parse(ra_fixture).ok().unwrap(); + let structure = file_structure(&file); + expect.assert_debug_eq(&structure) + } + + #[test] + fn test_file_structure() { + check( + r#" +struct Foo { + x: i32 +} + +mod m { + fn bar1() {} + fn bar2(t: T) -> T {} + fn bar3(a: A, + b: B) -> Vec< + u32 + > {} +} + +enum E { X, Y(i32) } +type T = (); +static S: i32 = 92; +const C: i32 = 92; + +impl E {} + +impl fmt::Debug for E {} + +macro_rules! mc { + () => {} +} + +#[macro_export] +macro_rules! mcexp { + () => {} +} + +/// Doc comment +macro_rules! mcexp { + () => {} +} + +#[deprecated] +fn obsolete() {} + +#[deprecated(note = "for awhile")] +fn very_obsolete() {} +"#, + expect![[r#" + [ + StructureNode { + parent: None, + label: "Foo", + navigation_range: 8..11, + node_range: 1..26, + kind: STRUCT, + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 0, + ), + label: "x", + navigation_range: 18..19, + node_range: 18..24, + kind: RECORD_FIELD, + detail: Some( + "i32", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "m", + navigation_range: 32..33, + node_range: 28..158, + kind: MODULE, + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 2, + ), + label: "bar1", + navigation_range: 43..47, + node_range: 40..52, + kind: FN, + detail: Some( + "fn()", + ), + deprecated: false, + }, + StructureNode { + parent: Some( + 2, + ), + label: "bar2", + navigation_range: 60..64, + node_range: 57..81, + kind: FN, + detail: Some( + "fn(t: T) -> T", + ), + deprecated: false, + }, + StructureNode { + parent: Some( + 2, + ), + label: "bar3", + navigation_range: 89..93, + node_range: 86..156, + kind: FN, + detail: Some( + "fn(a: A, b: B) -> Vec< u32 >", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "E", + navigation_range: 165..166, + node_range: 160..180, + kind: ENUM, + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 6, + ), + label: "X", + navigation_range: 169..170, + node_range: 169..170, + kind: VARIANT, + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 6, + ), + label: "Y", + navigation_range: 172..173, + node_range: 172..178, + kind: VARIANT, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "T", + navigation_range: 186..187, + node_range: 181..193, + kind: TYPE_ALIAS, + detail: Some( + "()", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "S", + navigation_range: 201..202, + node_range: 194..213, + kind: STATIC, + detail: Some( + "i32", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "C", + navigation_range: 220..221, + node_range: 214..232, + kind: CONST, + detail: Some( + "i32", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "impl E", + navigation_range: 239..240, + node_range: 234..243, + kind: IMPL, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "impl fmt::Debug for E", + navigation_range: 265..266, + node_range: 245..269, + kind: IMPL, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "mc", + navigation_range: 284..286, + node_range: 271..303, + kind: MACRO_CALL, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "mcexp", + navigation_range: 334..339, + node_range: 305..356, + kind: MACRO_CALL, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "mcexp", + navigation_range: 387..392, + node_range: 358..409, + kind: MACRO_CALL, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "obsolete", + navigation_range: 428..436, + node_range: 411..441, + kind: FN, + detail: Some( + "fn()", + ), + deprecated: true, + }, + StructureNode { + parent: None, + label: "very_obsolete", + navigation_range: 481..494, + node_range: 443..499, + kind: FN, + detail: Some( + "fn()", + ), + deprecated: true, + }, + ] + "#]], + ); + } +} diff --git a/crates/ide/src/folding_ranges.rs b/crates/ide/src/folding_ranges.rs new file mode 100644 index 0000000000..7523aec557 --- /dev/null +++ b/crates/ide/src/folding_ranges.rs @@ -0,0 +1,422 @@ +//! FIXME: write short doc here + +use rustc_hash::FxHashSet; + +use syntax::{ + ast::{self, AstNode, AstToken, VisibilityOwner}, + Direction, NodeOrToken, SourceFile, + SyntaxKind::{self, *}, + SyntaxNode, TextRange, +}; + +#[derive(Debug, PartialEq, Eq)] +pub enum FoldKind { + Comment, + Imports, + Mods, + Block, + ArgList, +} + +#[derive(Debug)] +pub struct Fold { + pub range: TextRange, + pub kind: FoldKind, +} + +pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { + let mut res = vec![]; + let mut visited_comments = FxHashSet::default(); + let mut visited_imports = FxHashSet::default(); + let mut visited_mods = FxHashSet::default(); + + for element in file.syntax().descendants_with_tokens() { + // Fold items that span multiple lines + if let Some(kind) = fold_kind(element.kind()) { + let is_multiline = match &element { + NodeOrToken::Node(node) => node.text().contains_char('\n'), + NodeOrToken::Token(token) => token.text().contains('\n'), + }; + if is_multiline { + res.push(Fold { range: element.text_range(), kind }); + continue; + } + } + + match element { + NodeOrToken::Token(token) => { + // Fold groups of comments + if let Some(comment) = ast::Comment::cast(token) { + if !visited_comments.contains(&comment) { + if let Some(range) = + contiguous_range_for_comment(comment, &mut visited_comments) + { + res.push(Fold { range, kind: FoldKind::Comment }) + } + } + } + } + NodeOrToken::Node(node) => { + // Fold groups of imports + if node.kind() == USE && !visited_imports.contains(&node) { + if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) { + res.push(Fold { range, kind: FoldKind::Imports }) + } + } + + // Fold groups of mods + if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node) + { + if let Some(range) = + contiguous_range_for_group_unless(&node, has_visibility, &mut visited_mods) + { + res.push(Fold { range, kind: FoldKind::Mods }) + } + } + } + } + } + + res +} + +fn fold_kind(kind: SyntaxKind) -> Option { + match kind { + COMMENT => Some(FoldKind::Comment), + USE => Some(FoldKind::Imports), + ARG_LIST | PARAM_LIST => Some(FoldKind::ArgList), + ASSOC_ITEM_LIST + | RECORD_FIELD_LIST + | RECORD_PAT_FIELD_LIST + | RECORD_EXPR_FIELD_LIST + | ITEM_LIST + | EXTERN_ITEM_LIST + | USE_TREE_LIST + | BLOCK_EXPR + | MATCH_ARM_LIST + | VARIANT_LIST + | TOKEN_TREE => Some(FoldKind::Block), + _ => None, + } +} + +fn has_visibility(node: &SyntaxNode) -> bool { + ast::Module::cast(node.clone()).and_then(|m| m.visibility()).is_some() +} + +fn contiguous_range_for_group( + first: &SyntaxNode, + visited: &mut FxHashSet, +) -> Option { + contiguous_range_for_group_unless(first, |_| false, visited) +} + +fn contiguous_range_for_group_unless( + first: &SyntaxNode, + unless: impl Fn(&SyntaxNode) -> bool, + visited: &mut FxHashSet, +) -> Option { + visited.insert(first.clone()); + + let mut last = first.clone(); + for element in first.siblings_with_tokens(Direction::Next) { + let node = match element { + NodeOrToken::Token(token) => { + if let Some(ws) = ast::Whitespace::cast(token) { + if !ws.spans_multiple_lines() { + // Ignore whitespace without blank lines + continue; + } + } + // There is a blank line or another token, which means that the + // group ends here + break; + } + NodeOrToken::Node(node) => node, + }; + + // Stop if we find a node that doesn't belong to the group + if node.kind() != first.kind() || unless(&node) { + break; + } + + visited.insert(node.clone()); + last = node; + } + + if first != &last { + Some(TextRange::new(first.text_range().start(), last.text_range().end())) + } else { + // The group consists of only one element, therefore it cannot be folded + None + } +} + +fn contiguous_range_for_comment( + first: ast::Comment, + visited: &mut FxHashSet, +) -> Option { + visited.insert(first.clone()); + + // Only fold comments of the same flavor + let group_kind = first.kind(); + if !group_kind.shape.is_line() { + return None; + } + + let mut last = first.clone(); + for element in first.syntax().siblings_with_tokens(Direction::Next) { + match element { + NodeOrToken::Token(token) => { + if let Some(ws) = ast::Whitespace::cast(token.clone()) { + if !ws.spans_multiple_lines() { + // Ignore whitespace without blank lines + continue; + } + } + if let Some(c) = ast::Comment::cast(token) { + if c.kind() == group_kind { + visited.insert(c.clone()); + last = c; + continue; + } + } + // The comment group ends because either: + // * An element of a different kind was reached + // * A comment of a different flavor was reached + break; + } + NodeOrToken::Node(_) => break, + }; + } + + if first != last { + Some(TextRange::new(first.syntax().text_range().start(), last.syntax().text_range().end())) + } else { + // The group consists of only one element, therefore it cannot be folded + None + } +} + +#[cfg(test)] +mod tests { + use test_utils::extract_tags; + + use super::*; + + fn check(ra_fixture: &str) { + let (ranges, text) = extract_tags(ra_fixture, "fold"); + + let parse = SourceFile::parse(&text); + let folds = folding_ranges(&parse.tree()); + assert_eq!( + folds.len(), + ranges.len(), + "The amount of folds is different than the expected amount" + ); + + for (fold, (range, attr)) in folds.iter().zip(ranges.into_iter()) { + assert_eq!(fold.range.start(), range.start()); + assert_eq!(fold.range.end(), range.end()); + + let kind = match fold.kind { + FoldKind::Comment => "comment", + FoldKind::Imports => "imports", + FoldKind::Mods => "mods", + FoldKind::Block => "block", + FoldKind::ArgList => "arglist", + }; + assert_eq!(kind, &attr.unwrap()); + } + } + + #[test] + fn test_fold_comments() { + check( + r#" +// Hello +// this is a multiline +// comment +// + +// But this is not + +fn main() { + // We should + // also + // fold + // this one. + //! But this one is different + //! because it has another flavor + /* As does this + multiline comment */ +}"#, + ); + } + + #[test] + fn test_fold_imports() { + check( + r#" +use std::{ + str, + vec, + io as iop +}; + +fn main() { +}"#, + ); + } + + #[test] + fn test_fold_mods() { + check( + r#" + +pub mod foo; +mod after_pub; +mod after_pub_next; + +mod before_pub; +mod before_pub_next; +pub mod bar; + +mod not_folding_single; +pub mod foobar; +pub not_folding_single_next; + +#[cfg(test)] +mod with_attribute; +mod with_attribute_next; + +fn main() { +}"#, + ); + } + + #[test] + fn test_fold_import_groups() { + check( + r#" +use std::str; +use std::vec; +use std::io as iop; + +use std::mem; +use std::f64; + +use std::collections::HashMap; +// Some random comment +use std::collections::VecDeque; + +fn main() { +}"#, + ); + } + + #[test] + fn test_fold_import_and_groups() { + check( + r#" +use std::str; +use std::vec; +use std::io as iop; + +use std::mem; +use std::f64; + +use std::collections::{ + HashMap, + VecDeque, +}; +// Some random comment + +fn main() { +}"#, + ); + } + + #[test] + fn test_folds_structs() { + check( + r#" +struct Foo { +} +"#, + ); + } + + #[test] + fn test_folds_traits() { + check( + r#" +trait Foo { +} +"#, + ); + } + + #[test] + fn test_folds_macros() { + check( + r#" +macro_rules! foo { + ($($tt:tt)*) => { $($tt)* } +} +"#, + ); + } + + #[test] + fn test_fold_match_arms() { + check( + r#" +fn main() { + match 0 { + 0 => 0, + _ => 1, + } +} +"#, + ); + } + + #[test] + fn fold_big_calls() { + check( + r#" +fn main() { + frobnicate( + 1, + 2, + 3, + ) +} +"#, + ) + } + + #[test] + fn fold_record_literals() { + check( + r#" +const _: S = S { + +}; +"#, + ) + } + + #[test] + fn fold_multiline_params() { + check( + r#" +fn foo( + x: i32, + y: String, +) {} +"#, + ) + } +} diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs new file mode 100644 index 0000000000..15e9b7fad8 --- /dev/null +++ b/crates/ide/src/goto_definition.rs @@ -0,0 +1,989 @@ +use hir::Semantics; +use ide_db::{ + defs::{classify_name, classify_name_ref}, + symbol_index, RootDatabase, +}; +use syntax::{ + ast::{self}, + match_ast, AstNode, + SyntaxKind::*, + SyntaxToken, TokenAtOffset, T, +}; + +use crate::{ + display::{ToNav, TryToNav}, + FilePosition, NavigationTarget, RangeInfo, +}; + +// Feature: Go to Definition +// +// Navigates to the definition of an identifier. +// +// |=== +// | Editor | Shortcut +// +// | VS Code | kbd:[F12] +// |=== +pub(crate) fn goto_definition( + db: &RootDatabase, + position: FilePosition, +) -> Option>> { + let sema = Semantics::new(db); + let file = sema.parse(position.file_id).syntax().clone(); + let original_token = pick_best(file.token_at_offset(position.offset))?; + let token = sema.descend_into_macros(original_token.clone()); + let parent = token.parent(); + + let nav_targets = match_ast! { + match parent { + ast::NameRef(name_ref) => { + reference_definition(&sema, &name_ref).to_vec() + }, + ast::Name(name) => { + let def = classify_name(&sema, &name)?.definition(sema.db); + let nav = def.try_to_nav(sema.db)?; + vec![nav] + }, + _ => return None, + } + }; + + Some(RangeInfo::new(original_token.text_range(), nav_targets)) +} + +fn pick_best(tokens: TokenAtOffset) -> Option { + return tokens.max_by_key(priority); + fn priority(n: &SyntaxToken) -> usize { + match n.kind() { + IDENT | INT_NUMBER | T![self] => 2, + kind if kind.is_trivia() => 0, + _ => 1, + } + } +} + +#[derive(Debug)] +pub(crate) enum ReferenceResult { + Exact(NavigationTarget), + Approximate(Vec), +} + +impl ReferenceResult { + fn to_vec(self) -> Vec { + match self { + ReferenceResult::Exact(target) => vec![target], + ReferenceResult::Approximate(vec) => vec, + } + } +} + +pub(crate) fn reference_definition( + sema: &Semantics, + name_ref: &ast::NameRef, +) -> ReferenceResult { + let name_kind = classify_name_ref(sema, name_ref); + if let Some(def) = name_kind { + let def = def.definition(sema.db); + return match def.try_to_nav(sema.db) { + Some(nav) => ReferenceResult::Exact(nav), + None => ReferenceResult::Approximate(Vec::new()), + }; + } + + // Fallback index based approach: + let navs = symbol_index::index_resolve(sema.db, name_ref) + .into_iter() + .map(|s| s.to_nav(sema.db)) + .collect(); + ReferenceResult::Approximate(navs) +} + +#[cfg(test)] +mod tests { + use base_db::FileRange; + use syntax::{TextRange, TextSize}; + + use crate::mock_analysis::MockAnalysis; + + fn check(ra_fixture: &str) { + let (mock, position) = MockAnalysis::with_files_and_position(ra_fixture); + let (mut expected, data) = mock.annotation(); + let analysis = mock.analysis(); + match data.as_str() { + "" => (), + "file" => { + expected.range = + TextRange::up_to(TextSize::of(&*analysis.file_text(expected.file_id).unwrap())) + } + data => panic!("bad data: {}", data), + } + + let mut navs = + analysis.goto_definition(position).unwrap().expect("no definition found").info; + if navs.len() == 0 { + panic!("unresolved reference") + } + assert_eq!(navs.len(), 1); + + let nav = navs.pop().unwrap(); + assert_eq!(expected, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }); + } + + #[test] + fn goto_def_for_extern_crate() { + check( + r#" + //- /main.rs + extern crate std<|>; + //- /std/lib.rs + // empty + //^ file + "#, + ) + } + + #[test] + fn goto_def_for_renamed_extern_crate() { + check( + r#" + //- /main.rs + extern crate std as abc<|>; + //- /std/lib.rs + // empty + //^ file + "#, + ) + } + + #[test] + fn goto_def_in_items() { + check( + r#" +struct Foo; + //^^^ +enum E { X(Foo<|>) } +"#, + ); + } + + #[test] + fn goto_def_at_start_of_item() { + check( + r#" +struct Foo; + //^^^ +enum E { X(<|>Foo) } +"#, + ); + } + + #[test] + fn goto_definition_resolves_correct_name() { + check( + r#" +//- /lib.rs +use a::Foo; +mod a; +mod b; +enum E { X(Foo<|>) } + +//- /a.rs +struct Foo; + //^^^ +//- /b.rs +struct Foo; +"#, + ); + } + + #[test] + fn goto_def_for_module_declaration() { + check( + r#" +//- /lib.rs +mod <|>foo; + +//- /foo.rs +// empty +//^ file +"#, + ); + + check( + r#" +//- /lib.rs +mod <|>foo; + +//- /foo/mod.rs +// empty +//^ file +"#, + ); + } + + #[test] + fn goto_def_for_macros() { + check( + r#" +macro_rules! foo { () => { () } } + //^^^ +fn bar() { + <|>foo!(); +} +"#, + ); + } + + #[test] + fn goto_def_for_macros_from_other_crates() { + check( + r#" +//- /lib.rs +use foo::foo; +fn bar() { + <|>foo!(); +} + +//- /foo/lib.rs +#[macro_export] +macro_rules! foo { () => { () } } + //^^^ +"#, + ); + } + + #[test] + fn goto_def_for_macros_in_use_tree() { + check( + r#" +//- /lib.rs +use foo::foo<|>; + +//- /foo/lib.rs +#[macro_export] +macro_rules! foo { () => { () } } + //^^^ +"#, + ); + } + + #[test] + fn goto_def_for_macro_defined_fn_with_arg() { + check( + r#" +//- /lib.rs +macro_rules! define_fn { + ($name:ident) => (fn $name() {}) +} + +define_fn!(foo); + //^^^ + +fn bar() { + <|>foo(); +} +"#, + ); + } + + #[test] + fn goto_def_for_macro_defined_fn_no_arg() { + check( + r#" +//- /lib.rs +macro_rules! define_fn { + () => (fn foo() {}) +} + + define_fn!(); +//^^^^^^^^^^^^^ + +fn bar() { + <|>foo(); +} +"#, + ); + } + + #[test] + fn goto_definition_works_for_macro_inside_pattern() { + check( + r#" +//- /lib.rs +macro_rules! foo {() => {0}} + //^^^ + +fn bar() { + match (0,1) { + (<|>foo!(), _) => {} + } +} +"#, + ); + } + + #[test] + fn goto_definition_works_for_macro_inside_match_arm_lhs() { + check( + r#" +//- /lib.rs +macro_rules! foo {() => {0}} + //^^^ +fn bar() { + match 0 { + <|>foo!() => {} + } +} +"#, + ); + } + + #[test] + fn goto_def_for_use_alias() { + check( + r#" +//- /lib.rs +use foo as bar<|>; + +//- /foo/lib.rs +// empty +//^ file +"#, + ); + } + + #[test] + fn goto_def_for_use_alias_foo_macro() { + check( + r#" +//- /lib.rs +use foo::foo as bar<|>; + +//- /foo/lib.rs +#[macro_export] +macro_rules! foo { () => { () } } + //^^^ +"#, + ); + } + + #[test] + fn goto_def_for_methods() { + check( + r#" +//- /lib.rs +struct Foo; +impl Foo { + fn frobnicate(&self) { } + //^^^^^^^^^^ +} + +fn bar(foo: &Foo) { + foo.frobnicate<|>(); +} +"#, + ); + } + + #[test] + fn goto_def_for_fields() { + check( + r#" +struct Foo { + spam: u32, +} //^^^^ + +fn bar(foo: &Foo) { + foo.spam<|>; +} +"#, + ); + } + + #[test] + fn goto_def_for_record_fields() { + check( + r#" +//- /lib.rs +struct Foo { + spam: u32, +} //^^^^ + +fn bar() -> Foo { + Foo { + spam<|>: 0, + } +} +"#, + ); + } + + #[test] + fn goto_def_for_record_pat_fields() { + check( + r#" +//- /lib.rs +struct Foo { + spam: u32, +} //^^^^ + +fn bar(foo: Foo) -> Foo { + let Foo { spam<|>: _, } = foo +} +"#, + ); + } + + #[test] + fn goto_def_for_record_fields_macros() { + check( + r" +macro_rules! m { () => { 92 };} +struct Foo { spam: u32 } + //^^^^ + +fn bar() -> Foo { + Foo { spam<|>: m!() } +} +", + ); + } + + #[test] + fn goto_for_tuple_fields() { + check( + r#" +struct Foo(u32); + //^^^ + +fn bar() { + let foo = Foo(0); + foo.<|>0; +} +"#, + ); + } + + #[test] + fn goto_def_for_ufcs_inherent_methods() { + check( + r#" +struct Foo; +impl Foo { + fn frobnicate() { } +} //^^^^^^^^^^ + +fn bar(foo: &Foo) { + Foo::frobnicate<|>(); +} +"#, + ); + } + + #[test] + fn goto_def_for_ufcs_trait_methods_through_traits() { + check( + r#" +trait Foo { + fn frobnicate(); +} //^^^^^^^^^^ + +fn bar() { + Foo::frobnicate<|>(); +} +"#, + ); + } + + #[test] + fn goto_def_for_ufcs_trait_methods_through_self() { + check( + r#" +struct Foo; +trait Trait { + fn frobnicate(); +} //^^^^^^^^^^ +impl Trait for Foo {} + +fn bar() { + Foo::frobnicate<|>(); +} +"#, + ); + } + + #[test] + fn goto_definition_on_self() { + check( + r#" +struct Foo; +impl Foo { + //^^^ + pub fn new() -> Self { + Self<|> {} + } +} +"#, + ); + check( + r#" +struct Foo; +impl Foo { + //^^^ + pub fn new() -> Self<|> { + Self {} + } +} +"#, + ); + + check( + r#" +enum Foo { A } +impl Foo { + //^^^ + pub fn new() -> Self<|> { + Foo::A + } +} +"#, + ); + + check( + r#" +enum Foo { A } +impl Foo { + //^^^ + pub fn thing(a: &Self<|>) { + } +} +"#, + ); + } + + #[test] + fn goto_definition_on_self_in_trait_impl() { + check( + r#" +struct Foo; +trait Make { + fn new() -> Self; +} +impl Make for Foo { + //^^^ + fn new() -> Self { + Self<|> {} + } +} +"#, + ); + + check( + r#" +struct Foo; +trait Make { + fn new() -> Self; +} +impl Make for Foo { + //^^^ + fn new() -> Self<|> { + Self {} + } +} +"#, + ); + } + + #[test] + fn goto_def_when_used_on_definition_name_itself() { + check( + r#" +struct Foo<|> { value: u32 } + //^^^ + "#, + ); + + check( + r#" +struct Foo { + field<|>: string, +} //^^^^^ +"#, + ); + + check( + r#" +fn foo_test<|>() { } + //^^^^^^^^ +"#, + ); + + check( + r#" +enum Foo<|> { Variant } + //^^^ +"#, + ); + + check( + r#" +enum Foo { + Variant1, + Variant2<|>, + //^^^^^^^^ + Variant3, +} +"#, + ); + + check( + r#" +static INNER<|>: &str = ""; + //^^^^^ +"#, + ); + + check( + r#" +const INNER<|>: &str = ""; + //^^^^^ +"#, + ); + + check( + r#" +type Thing<|> = Option<()>; + //^^^^^ +"#, + ); + + check( + r#" +trait Foo<|> { } + //^^^ +"#, + ); + + check( + r#" +mod bar<|> { } + //^^^ +"#, + ); + } + + #[test] + fn goto_from_macro() { + check( + r#" +macro_rules! id { + ($($tt:tt)*) => { $($tt)* } +} +fn foo() {} + //^^^ +id! { + fn bar() { + fo<|>o(); + } +} +mod confuse_index { fn foo(); } +"#, + ); + } + + #[test] + fn goto_through_format() { + check( + r#" +#[macro_export] +macro_rules! format { + ($($arg:tt)*) => ($crate::fmt::format($crate::__export::format_args!($($arg)*))) +} +#[rustc_builtin_macro] +#[macro_export] +macro_rules! format_args { + ($fmt:expr) => ({ /* compiler built-in */ }); + ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ }) +} +pub mod __export { + pub use crate::format_args; + fn foo() {} // for index confusion +} +fn foo() -> i8 {} + //^^^ +fn test() { + format!("{}", fo<|>o()) +} +"#, + ); + } + + #[test] + fn goto_for_type_param() { + check( + r#" +struct Foo { t: <|>T } + //^ +"#, + ); + } + + #[test] + fn goto_within_macro() { + check( + r#" +macro_rules! id { + ($($tt:tt)*) => ($($tt)*) +} + +fn foo() { + let x = 1; + //^ + id!({ + let y = <|>x; + let z = y; + }); +} +"#, + ); + + check( + r#" +macro_rules! id { + ($($tt:tt)*) => ($($tt)*) +} + +fn foo() { + let x = 1; + id!({ + let y = x; + //^ + let z = <|>y; + }); +} +"#, + ); + } + + #[test] + fn goto_def_in_local_fn() { + check( + r#" +fn main() { + fn foo() { + let x = 92; + //^ + <|>x; + } +} +"#, + ); + } + + #[test] + fn goto_def_in_local_macro() { + check( + r#" +fn bar() { + macro_rules! foo { () => { () } } + //^^^ + <|>foo!(); +} +"#, + ); + } + + #[test] + fn goto_def_for_field_init_shorthand() { + check( + r#" +struct Foo { x: i32 } +fn main() { + let x = 92; + //^ + Foo { x<|> }; +} +"#, + ) + } + + #[test] + fn goto_def_for_enum_variant_field() { + check( + r#" +enum Foo { + Bar { x: i32 } +} //^ +fn baz(foo: Foo) { + match foo { + Foo::Bar { x<|> } => x + }; +} +"#, + ); + } + + #[test] + fn goto_def_for_enum_variant_self_pattern_const() { + check( + r#" +enum Foo { Bar } + //^^^ +impl Foo { + fn baz(self) { + match self { Self::Bar<|> => {} } + } +} +"#, + ); + } + + #[test] + fn goto_def_for_enum_variant_self_pattern_record() { + check( + r#" +enum Foo { Bar { val: i32 } } + //^^^ +impl Foo { + fn baz(self) -> i32 { + match self { Self::Bar<|> { val } => {} } + } +} +"#, + ); + } + + #[test] + fn goto_def_for_enum_variant_self_expr_const() { + check( + r#" +enum Foo { Bar } + //^^^ +impl Foo { + fn baz(self) { Self::Bar<|>; } +} +"#, + ); + } + + #[test] + fn goto_def_for_enum_variant_self_expr_record() { + check( + r#" +enum Foo { Bar { val: i32 } } + //^^^ +impl Foo { + fn baz(self) { Self::Bar<|> {val: 4}; } +} +"#, + ); + } + + #[test] + fn goto_def_for_type_alias_generic_parameter() { + check( + r#" +type Alias = T<|>; + //^ +"#, + ) + } + + #[test] + fn goto_def_for_macro_container() { + check( + r#" +//- /lib.rs +foo::module<|>::mac!(); + +//- /foo/lib.rs +pub mod module { + //^^^^^^ + #[macro_export] + macro_rules! _mac { () => { () } } + pub use crate::_mac as mac; +} +"#, + ); + } + + #[test] + fn goto_def_for_assoc_ty_in_path() { + check( + r#" +trait Iterator { + type Item; + //^^^^ +} + +fn f() -> impl Iterator = u8> {} +"#, + ); + } + + #[test] + fn goto_def_for_assoc_ty_in_path_multiple() { + check( + r#" +trait Iterator { + type A; + //^ + type B; +} + +fn f() -> impl Iterator = u8, B = ()> {} +"#, + ); + check( + r#" +trait Iterator { + type A; + type B; + //^ +} + +fn f() -> impl Iterator = ()> {} +"#, + ); + } + + #[test] + fn goto_def_for_assoc_ty_ufcs() { + check( + r#" +trait Iterator { + type Item; + //^^^^ +} + +fn g() -> <() as Iterator = ()>>::Item {} +"#, + ); + } + + #[test] + fn goto_def_for_assoc_ty_ufcs_multiple() { + check( + r#" +trait Iterator { + type A; + //^ + type B; +} + +fn g() -> <() as Iterator = (), B = u8>>::B {} +"#, + ); + check( + r#" +trait Iterator { + type A; + type B; + //^ +} + +fn g() -> <() as Iterator = u8>>::A {} +"#, + ); + } +} diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs new file mode 100644 index 0000000000..f503f4ec5f --- /dev/null +++ b/crates/ide/src/goto_implementation.rs @@ -0,0 +1,229 @@ +use hir::{Crate, ImplDef, Semantics}; +use ide_db::RootDatabase; +use syntax::{algo::find_node_at_offset, ast, AstNode}; + +use crate::{display::ToNav, FilePosition, NavigationTarget, RangeInfo}; + +// Feature: Go to Implementation +// +// Navigates to the impl block of structs, enums or traits. Also implemented as a code lens. +// +// |=== +// | Editor | Shortcut +// +// | VS Code | kbd:[Ctrl+F12] +// |=== +pub(crate) fn goto_implementation( + db: &RootDatabase, + position: FilePosition, +) -> Option>> { + let sema = Semantics::new(db); + let source_file = sema.parse(position.file_id); + let syntax = source_file.syntax().clone(); + + let krate = sema.to_module_def(position.file_id)?.krate(); + + if let Some(nominal_def) = find_node_at_offset::(&syntax, position.offset) { + return Some(RangeInfo::new( + nominal_def.syntax().text_range(), + impls_for_def(&sema, &nominal_def, krate)?, + )); + } else if let Some(trait_def) = find_node_at_offset::(&syntax, position.offset) { + return Some(RangeInfo::new( + trait_def.syntax().text_range(), + impls_for_trait(&sema, &trait_def, krate)?, + )); + } + + None +} + +fn impls_for_def( + sema: &Semantics, + node: &ast::AdtDef, + krate: Crate, +) -> Option> { + let ty = match node { + ast::AdtDef::Struct(def) => sema.to_def(def)?.ty(sema.db), + ast::AdtDef::Enum(def) => sema.to_def(def)?.ty(sema.db), + ast::AdtDef::Union(def) => sema.to_def(def)?.ty(sema.db), + }; + + let impls = ImplDef::all_in_crate(sema.db, krate); + + Some( + impls + .into_iter() + .filter(|impl_def| ty.is_equal_for_find_impls(&impl_def.target_ty(sema.db))) + .map(|imp| imp.to_nav(sema.db)) + .collect(), + ) +} + +fn impls_for_trait( + sema: &Semantics, + node: &ast::Trait, + krate: Crate, +) -> Option> { + let tr = sema.to_def(node)?; + + let impls = ImplDef::for_trait(sema.db, krate, tr); + + Some(impls.into_iter().map(|imp| imp.to_nav(sema.db)).collect()) +} + +#[cfg(test)] +mod tests { + use base_db::FileRange; + + use crate::mock_analysis::MockAnalysis; + + fn check(ra_fixture: &str) { + let (mock, position) = MockAnalysis::with_files_and_position(ra_fixture); + let annotations = mock.annotations(); + let analysis = mock.analysis(); + + let navs = analysis.goto_implementation(position).unwrap().unwrap().info; + + let key = |frange: &FileRange| (frange.file_id, frange.range.start()); + + let mut expected = annotations + .into_iter() + .map(|(range, data)| { + assert!(data.is_empty()); + range + }) + .collect::>(); + expected.sort_by_key(key); + + let mut actual = navs + .into_iter() + .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) + .collect::>(); + actual.sort_by_key(key); + + assert_eq!(expected, actual); + } + + #[test] + fn goto_implementation_works() { + check( + r#" +struct Foo<|>; +impl Foo {} + //^^^ +"#, + ); + } + + #[test] + fn goto_implementation_works_multiple_blocks() { + check( + r#" +struct Foo<|>; +impl Foo {} + //^^^ +impl Foo {} + //^^^ +"#, + ); + } + + #[test] + fn goto_implementation_works_multiple_mods() { + check( + r#" +struct Foo<|>; +mod a { + impl super::Foo {} + //^^^^^^^^^^ +} +mod b { + impl super::Foo {} + //^^^^^^^^^^ +} +"#, + ); + } + + #[test] + fn goto_implementation_works_multiple_files() { + check( + r#" +//- /lib.rs +struct Foo<|>; +mod a; +mod b; +//- /a.rs +impl crate::Foo {} + //^^^^^^^^^^ +//- /b.rs +impl crate::Foo {} + //^^^^^^^^^^ +"#, + ); + } + + #[test] + fn goto_implementation_for_trait() { + check( + r#" +trait T<|> {} +struct Foo; +impl T for Foo {} + //^^^ +"#, + ); + } + + #[test] + fn goto_implementation_for_trait_multiple_files() { + check( + r#" +//- /lib.rs +trait T<|> {}; +struct Foo; +mod a; +mod b; +//- /a.rs +impl crate::T for crate::Foo {} + //^^^^^^^^^^ +//- /b.rs +impl crate::T for crate::Foo {} + //^^^^^^^^^^ + "#, + ); + } + + #[test] + fn goto_implementation_all_impls() { + check( + r#" +//- /lib.rs +trait T {} +struct Foo<|>; +impl Foo {} + //^^^ +impl T for Foo {} + //^^^ +impl T for &Foo {} + //^^^^ +"#, + ); + } + + #[test] + fn goto_implementation_to_builtin_derive() { + check( + r#" + #[derive(Copy)] +//^^^^^^^^^^^^^^^ +struct Foo<|>; + +mod marker { + trait Copy {} +} +"#, + ); + } +} diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs new file mode 100644 index 0000000000..4a151b1506 --- /dev/null +++ b/crates/ide/src/goto_type_definition.rs @@ -0,0 +1,151 @@ +use ide_db::RootDatabase; +use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset, T}; + +use crate::{display::ToNav, FilePosition, NavigationTarget, RangeInfo}; + +// Feature: Go to Type Definition +// +// Navigates to the type of an identifier. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Go to Type Definition* +// |=== +pub(crate) fn goto_type_definition( + db: &RootDatabase, + position: FilePosition, +) -> Option>> { + let sema = hir::Semantics::new(db); + + let file: ast::SourceFile = sema.parse(position.file_id); + let token: SyntaxToken = pick_best(file.syntax().token_at_offset(position.offset))?; + let token: SyntaxToken = sema.descend_into_macros(token); + + let (ty, node) = sema.ancestors_with_macros(token.parent()).find_map(|node| { + let ty = match_ast! { + match node { + ast::Expr(it) => sema.type_of_expr(&it)?, + ast::Pat(it) => sema.type_of_pat(&it)?, + ast::SelfParam(it) => sema.type_of_self(&it)?, + _ => return None, + } + }; + + Some((ty, node)) + })?; + + let adt_def = ty.autoderef(db).filter_map(|ty| ty.as_adt()).last()?; + + let nav = adt_def.to_nav(db); + Some(RangeInfo::new(node.text_range(), vec![nav])) +} + +fn pick_best(tokens: TokenAtOffset) -> Option { + return tokens.max_by_key(priority); + fn priority(n: &SyntaxToken) -> usize { + match n.kind() { + IDENT | INT_NUMBER | T![self] => 2, + kind if kind.is_trivia() => 0, + _ => 1, + } + } +} + +#[cfg(test)] +mod tests { + use base_db::FileRange; + + use crate::mock_analysis::MockAnalysis; + + fn check(ra_fixture: &str) { + let (mock, position) = MockAnalysis::with_files_and_position(ra_fixture); + let (expected, data) = mock.annotation(); + assert!(data.is_empty()); + let analysis = mock.analysis(); + + let mut navs = analysis.goto_type_definition(position).unwrap().unwrap().info; + assert_eq!(navs.len(), 1); + let nav = navs.pop().unwrap(); + assert_eq!(expected, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }); + } + + #[test] + fn goto_type_definition_works_simple() { + check( + r#" +struct Foo; + //^^^ +fn foo() { + let f: Foo; f<|> +} +"#, + ); + } + + #[test] + fn goto_type_definition_works_simple_ref() { + check( + r#" +struct Foo; + //^^^ +fn foo() { + let f: &Foo; f<|> +} +"#, + ); + } + + #[test] + fn goto_type_definition_works_through_macro() { + check( + r#" +macro_rules! id { ($($tt:tt)*) => { $($tt)* } } +struct Foo {} + //^^^ +id! { + fn bar() { let f<|> = Foo {}; } +} +"#, + ); + } + + #[test] + fn goto_type_definition_for_param() { + check( + r#" +struct Foo; + //^^^ +fn foo(<|>f: Foo) {} +"#, + ); + } + + #[test] + fn goto_type_definition_for_tuple_field() { + check( + r#" +struct Foo; + //^^^ +struct Bar(Foo); +fn foo() { + let bar = Bar(Foo); + bar.<|>0; +} +"#, + ); + } + + #[test] + fn goto_def_for_self_param() { + check( + r#" +struct Foo; + //^^^ +impl Foo { + fn f(&self<|>) {} +} +"#, + ) + } +} diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs new file mode 100644 index 0000000000..300c00edc8 --- /dev/null +++ b/crates/ide/src/hover.rs @@ -0,0 +1,2480 @@ +use base_db::SourceDatabase; +use hir::{ + Adt, AsAssocItem, AssocItemContainer, Documentation, FieldSource, HasSource, HirDisplay, + Module, ModuleDef, ModuleSource, Semantics, +}; +use ide_db::{ + defs::{classify_name, classify_name_ref, Definition}, + RootDatabase, +}; +use itertools::Itertools; +use stdx::format_to; +use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset, T}; +use test_utils::mark; + +use crate::{ + display::{macro_label, ShortLabel, ToNav, TryToNav}, + markup::Markup, + runnables::runnable, + FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, +}; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct HoverConfig { + pub implementations: bool, + pub run: bool, + pub debug: bool, + pub goto_type_def: bool, +} + +impl Default for HoverConfig { + fn default() -> Self { + Self { implementations: true, run: true, debug: true, goto_type_def: true } + } +} + +impl HoverConfig { + pub const NO_ACTIONS: Self = + Self { implementations: false, run: false, debug: false, goto_type_def: false }; + + pub fn any(&self) -> bool { + self.implementations || self.runnable() || self.goto_type_def + } + + pub fn none(&self) -> bool { + !self.any() + } + + pub fn runnable(&self) -> bool { + self.run || self.debug + } +} + +#[derive(Debug, Clone)] +pub enum HoverAction { + Runnable(Runnable), + Implementaion(FilePosition), + GoToType(Vec), +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct HoverGotoTypeData { + pub mod_path: String, + pub nav: NavigationTarget, +} + +/// Contains the results when hovering over an item +#[derive(Debug, Default)] +pub struct HoverResult { + pub markup: Markup, + pub actions: Vec, +} + +// Feature: Hover +// +// Shows additional information, like type of an expression or documentation for definition when "focusing" code. +// Focusing is usually hovering with a mouse, but can also be triggered with a shortcut. +pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option> { + let sema = Semantics::new(db); + let file = sema.parse(position.file_id).syntax().clone(); + let token = pick_best(file.token_at_offset(position.offset))?; + let token = sema.descend_into_macros(token); + + let mut res = HoverResult::default(); + + let node = token.parent(); + let definition = match_ast! { + match node { + ast::NameRef(name_ref) => classify_name_ref(&sema, &name_ref).map(|d| d.definition(sema.db)), + ast::Name(name) => classify_name(&sema, &name).map(|d| d.definition(sema.db)), + _ => None, + } + }; + if let Some(definition) = definition { + if let Some(markup) = hover_for_definition(db, definition) { + res.markup = markup; + if let Some(action) = show_implementations_action(db, definition) { + res.actions.push(action); + } + + if let Some(action) = runnable_action(&sema, definition, position.file_id) { + res.actions.push(action); + } + + if let Some(action) = goto_type_action(db, definition) { + res.actions.push(action); + } + + let range = sema.original_range(&node).range; + return Some(RangeInfo::new(range, res)); + } + } + + let node = token + .ancestors() + .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?; + + let ty = match_ast! { + match node { + ast::Expr(it) => sema.type_of_expr(&it)?, + ast::Pat(it) => sema.type_of_pat(&it)?, + // If this node is a MACRO_CALL, it means that `descend_into_macros` failed to resolve. + // (e.g expanding a builtin macro). So we give up here. + ast::MacroCall(_it) => return None, + _ => return None, + } + }; + + res.markup = Markup::fenced_block(&ty.display(db)); + let range = sema.original_range(&node).range; + Some(RangeInfo::new(range, res)) +} + +fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option { + fn to_action(nav_target: NavigationTarget) -> HoverAction { + HoverAction::Implementaion(FilePosition { + file_id: nav_target.file_id, + offset: nav_target.focus_or_full_range().start(), + }) + } + + match def { + Definition::ModuleDef(it) => match it { + ModuleDef::Adt(Adt::Struct(it)) => Some(to_action(it.to_nav(db))), + ModuleDef::Adt(Adt::Union(it)) => Some(to_action(it.to_nav(db))), + ModuleDef::Adt(Adt::Enum(it)) => Some(to_action(it.to_nav(db))), + ModuleDef::Trait(it) => Some(to_action(it.to_nav(db))), + _ => None, + }, + _ => None, + } +} + +fn runnable_action( + sema: &Semantics, + def: Definition, + file_id: FileId, +) -> Option { + match def { + Definition::ModuleDef(it) => match it { + ModuleDef::Module(it) => match it.definition_source(sema.db).value { + ModuleSource::Module(it) => runnable(&sema, it.syntax().clone(), file_id) + .map(|it| HoverAction::Runnable(it)), + _ => None, + }, + ModuleDef::Function(it) => { + let src = it.source(sema.db); + if src.file_id != file_id.into() { + mark::hit!(hover_macro_generated_struct_fn_doc_comment); + mark::hit!(hover_macro_generated_struct_fn_doc_attr); + + return None; + } + + runnable(&sema, src.value.syntax().clone(), file_id) + .map(|it| HoverAction::Runnable(it)) + } + _ => None, + }, + _ => None, + } +} + +fn goto_type_action(db: &RootDatabase, def: Definition) -> Option { + match def { + Definition::Local(it) => { + let mut targets: Vec = Vec::new(); + let mut push_new_def = |item: ModuleDef| { + if !targets.contains(&item) { + targets.push(item); + } + }; + + it.ty(db).walk(db, |t| { + if let Some(adt) = t.as_adt() { + push_new_def(adt.into()); + } else if let Some(trait_) = t.as_dyn_trait() { + push_new_def(trait_.into()); + } else if let Some(traits) = t.as_impl_traits(db) { + traits.into_iter().for_each(|it| push_new_def(it.into())); + } else if let Some(trait_) = t.as_associated_type_parent_trait(db) { + push_new_def(trait_.into()); + } + }); + + let targets = targets + .into_iter() + .filter_map(|it| { + Some(HoverGotoTypeData { + mod_path: render_path( + db, + it.module(db)?, + it.name(db).map(|name| name.to_string()), + ), + nav: it.try_to_nav(db)?, + }) + }) + .collect(); + + Some(HoverAction::GoToType(targets)) + } + _ => None, + } +} + +fn hover_markup( + docs: Option, + desc: Option, + mod_path: Option, +) -> Option { + match desc { + Some(desc) => { + let mut buf = String::new(); + + if let Some(mod_path) = mod_path { + if !mod_path.is_empty() { + format_to!(buf, "```rust\n{}\n```\n\n", mod_path); + } + } + format_to!(buf, "```rust\n{}\n```", desc); + + if let Some(doc) = docs { + format_to!(buf, "\n___\n\n{}", doc); + } + Some(buf.into()) + } + None => docs.map(Markup::from), + } +} + +fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option { + match def { + Definition::Field(f) => Some(f.parent_def(db).name(db)), + Definition::Local(l) => l.parent(db).name(db), + Definition::ModuleDef(md) => match md { + ModuleDef::Function(f) => match f.as_assoc_item(db)?.container(db) { + AssocItemContainer::Trait(t) => Some(t.name(db)), + AssocItemContainer::ImplDef(i) => i.target_ty(db).as_adt().map(|adt| adt.name(db)), + }, + ModuleDef::EnumVariant(e) => Some(e.parent_enum(db).name(db)), + _ => None, + }, + Definition::SelfType(i) => i.target_ty(db).as_adt().map(|adt| adt.name(db)), + _ => None, + } + .map(|name| name.to_string()) +} + +fn render_path(db: &RootDatabase, module: Module, item_name: Option) -> String { + let crate_name = + db.crate_graph()[module.krate().into()].display_name.as_ref().map(ToString::to_string); + let module_path = module + .path_to_root(db) + .into_iter() + .rev() + .flat_map(|it| it.name(db).map(|name| name.to_string())); + crate_name.into_iter().chain(module_path).chain(item_name).join("::") +} + +fn definition_mod_path(db: &RootDatabase, def: &Definition) -> Option { + def.module(db).map(|module| render_path(db, module, definition_owner_name(db, def))) +} + +fn hover_for_definition(db: &RootDatabase, def: Definition) -> Option { + let mod_path = definition_mod_path(db, &def); + return match def { + Definition::Macro(it) => { + let src = it.source(db); + let docs = Documentation::from_ast(&src.value).map(Into::into); + hover_markup(docs, Some(macro_label(&src.value)), mod_path) + } + Definition::Field(it) => { + let src = it.source(db); + match src.value { + FieldSource::Named(it) => { + let docs = Documentation::from_ast(&it).map(Into::into); + hover_markup(docs, it.short_label(), mod_path) + } + _ => None, + } + } + Definition::ModuleDef(it) => match it { + ModuleDef::Module(it) => match it.definition_source(db).value { + ModuleSource::Module(it) => { + let docs = Documentation::from_ast(&it).map(Into::into); + hover_markup(docs, it.short_label(), mod_path) + } + ModuleSource::SourceFile(it) => { + let docs = Documentation::from_ast(&it).map(Into::into); + hover_markup(docs, it.short_label(), mod_path) + } + }, + ModuleDef::Function(it) => from_def_source(db, it, mod_path), + ModuleDef::Adt(Adt::Struct(it)) => from_def_source(db, it, mod_path), + ModuleDef::Adt(Adt::Union(it)) => from_def_source(db, it, mod_path), + ModuleDef::Adt(Adt::Enum(it)) => from_def_source(db, it, mod_path), + ModuleDef::EnumVariant(it) => from_def_source(db, it, mod_path), + ModuleDef::Const(it) => from_def_source(db, it, mod_path), + ModuleDef::Static(it) => from_def_source(db, it, mod_path), + ModuleDef::Trait(it) => from_def_source(db, it, mod_path), + ModuleDef::TypeAlias(it) => from_def_source(db, it, mod_path), + ModuleDef::BuiltinType(it) => return Some(it.to_string().into()), + }, + Definition::Local(it) => return Some(Markup::fenced_block(&it.ty(db).display(db))), + Definition::TypeParam(_) | Definition::SelfType(_) => { + // FIXME: Hover for generic param + None + } + }; + + fn from_def_source(db: &RootDatabase, def: D, mod_path: Option) -> Option + where + D: HasSource, + A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel + ast::AttrsOwner, + { + let src = def.source(db); + let docs = Documentation::from_ast(&src.value).map(Into::into); + hover_markup(docs, src.value.short_label(), mod_path) + } +} + +fn pick_best(tokens: TokenAtOffset) -> Option { + return tokens.max_by_key(priority); + fn priority(n: &SyntaxToken) -> usize { + match n.kind() { + IDENT | INT_NUMBER => 3, + T!['('] | T![')'] => 2, + kind if kind.is_trivia() => 0, + _ => 1, + } + } +} + +#[cfg(test)] +mod tests { + use base_db::FileLoader; + use expect::{expect, Expect}; + + use crate::mock_analysis::analysis_and_position; + + use super::*; + + fn check_hover_no_result(ra_fixture: &str) { + let (analysis, position) = analysis_and_position(ra_fixture); + assert!(analysis.hover(position).unwrap().is_none()); + } + + fn check(ra_fixture: &str, expect: Expect) { + let (analysis, position) = analysis_and_position(ra_fixture); + let hover = analysis.hover(position).unwrap().unwrap(); + + let content = analysis.db.file_text(position.file_id); + let hovered_element = &content[hover.range]; + + let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup); + expect.assert_eq(&actual) + } + + fn check_actions(ra_fixture: &str, expect: Expect) { + let (analysis, position) = analysis_and_position(ra_fixture); + let hover = analysis.hover(position).unwrap().unwrap(); + expect.assert_debug_eq(&hover.info.actions) + } + + #[test] + fn hover_shows_type_of_an_expression() { + check( + r#" +pub fn foo() -> u32 { 1 } + +fn main() { + let foo_test = foo()<|>; +} +"#, + expect![[r#" + *foo()* + ```rust + u32 + ``` + "#]], + ); + } + + #[test] + fn hover_shows_long_type_of_an_expression() { + check( + r#" +struct Scan { a: A, b: B, c: C } +struct Iter { inner: I } +enum Option { Some(T), None } + +struct OtherStruct { i: T } + +fn scan(a: A, b: B, c: C) -> Iter, B, C>> { + Iter { inner: Scan { a, b, c } } +} + +fn main() { + let num: i32 = 55; + let closure = |memo: &mut u32, value: &u32, _another: &mut u32| -> Option { + Option::Some(*memo + value) + }; + let number = 5u32; + let mut iter<|> = scan(OtherStruct { i: num }, closure, number); +} +"#, + expect![[r#" + *iter* + ```rust + Iter>, |&mut u32, &u32, &mut u32| -> Option, u32>> + ``` + "#]], + ); + } + + #[test] + fn hover_shows_fn_signature() { + // Single file with result + check( + r#" +pub fn foo() -> u32 { 1 } + +fn main() { let foo_test = fo<|>o(); } +"#, + expect![[r#" + *foo* + ```rust + pub fn foo() -> u32 + ``` + "#]], + ); + + // Multiple candidates but results are ambiguous. + check( + r#" +//- /a.rs +pub fn foo() -> u32 { 1 } + +//- /b.rs +pub fn foo() -> &str { "" } + +//- /c.rs +pub fn foo(a: u32, b: u32) {} + +//- /main.rs +mod a; +mod b; +mod c; + +fn main() { let foo_test = fo<|>o(); } + "#, + expect![[r#" + *foo* + ```rust + {unknown} + ``` + "#]], + ); + } + + #[test] + fn hover_shows_fn_signature_with_type_params() { + check( + r#" +pub fn foo<'a, T: AsRef>(b: &'a T) -> &'a str { } + +fn main() { let foo_test = fo<|>o(); } + "#, + expect![[r#" + *foo* + ```rust + pub fn foo<'a, T: AsRef>(b: &'a T) -> &'a str + ``` + "#]], + ); + } + + #[test] + fn hover_shows_fn_signature_on_fn_name() { + check( + r#" +pub fn foo<|>(a: u32, b: u32) -> u32 {} + +fn main() { } +"#, + expect![[r#" + *foo* + ```rust + pub fn foo(a: u32, b: u32) -> u32 + ``` + "#]], + ); + } + + #[test] + fn hover_shows_fn_doc() { + check( + r#" +/// # Example +/// ``` +/// # use std::path::Path; +/// # +/// foo(Path::new("hello, world!")) +/// ``` +pub fn foo<|>(_: &Path) {} + +fn main() { } +"#, + expect![[r#" + *foo* + ```rust + pub fn foo(_: &Path) + ``` + ___ + + # Example + ``` + # use std::path::Path; + # + foo(Path::new("hello, world!")) + ``` + "#]], + ); + } + + #[test] + fn hover_shows_struct_field_info() { + // Hovering over the field when instantiating + check( + r#" +struct Foo { field_a: u32 } + +fn main() { + let foo = Foo { field_a<|>: 0, }; +} +"#, + expect![[r#" + *field_a* + ```rust + Foo + ``` + + ```rust + field_a: u32 + ``` + "#]], + ); + + // Hovering over the field in the definition + check( + r#" +struct Foo { field_a<|>: u32 } + +fn main() { + let foo = Foo { field_a: 0 }; +} +"#, + expect![[r#" + *field_a* + ```rust + Foo + ``` + + ```rust + field_a: u32 + ``` + "#]], + ); + } + + #[test] + fn hover_const_static() { + check( + r#"const foo<|>: u32 = 123;"#, + expect![[r#" + *foo* + ```rust + const foo: u32 = 123 + ``` + "#]], + ); + check( + r#"static foo<|>: u32 = 456;"#, + expect![[r#" + *foo* + ```rust + static foo: u32 + ``` + "#]], + ); + } + + #[test] + fn hover_default_generic_types() { + check( + r#" +struct Test { k: K, t: T } + +fn main() { + let zz<|> = Test { t: 23u8, k: 33 }; +}"#, + expect![[r#" + *zz* + ```rust + Test + ``` + "#]], + ); + } + + #[test] + fn hover_some() { + check( + r#" +enum Option { Some(T) } +use Option::Some; + +fn main() { So<|>me(12); } +"#, + expect![[r#" + *Some* + ```rust + Option + ``` + + ```rust + Some + ``` + "#]], + ); + + check( + r#" +enum Option { Some(T) } +use Option::Some; + +fn main() { let b<|>ar = Some(12); } +"#, + expect![[r#" + *bar* + ```rust + Option + ``` + "#]], + ); + } + + #[test] + fn hover_enum_variant() { + check( + r#" +enum Option { + /// The None variant + Non<|>e +} +"#, + expect![[r#" + *None* + ```rust + Option + ``` + + ```rust + None + ``` + ___ + + The None variant + "#]], + ); + + check( + r#" +enum Option { + /// The Some variant + Some(T) +} +fn main() { + let s = Option::Som<|>e(12); +} +"#, + expect![[r#" + *Some* + ```rust + Option + ``` + + ```rust + Some + ``` + ___ + + The Some variant + "#]], + ); + } + + #[test] + fn hover_for_local_variable() { + check( + r#"fn func(foo: i32) { fo<|>o; }"#, + expect![[r#" + *foo* + ```rust + i32 + ``` + "#]], + ) + } + + #[test] + fn hover_for_local_variable_pat() { + check( + r#"fn func(fo<|>o: i32) {}"#, + expect![[r#" + *foo* + ```rust + i32 + ``` + "#]], + ) + } + + #[test] + fn hover_local_var_edge() { + check( + r#"fn func(foo: i32) { if true { <|>foo; }; }"#, + expect![[r#" + *foo* + ```rust + i32 + ``` + "#]], + ) + } + + #[test] + fn hover_for_param_edge() { + check( + r#"fn func(<|>foo: i32) {}"#, + expect![[r#" + *foo* + ```rust + i32 + ``` + "#]], + ) + } + + #[test] + fn hover_for_param_with_multiple_traits() { + check( + r#"trait Deref { + type Target: ?Sized; + } + trait DerefMut { + type Target: ?Sized; + } + fn f(_x<|>: impl Deref + DerefMut) {}"#, + expect![[r#" + *_x* + ```rust + impl Deref + DerefMut + ``` + "#]], + ) + } + + #[test] + fn test_hover_infer_associated_method_result() { + check( + r#" +struct Thing { x: u32 } + +impl Thing { + fn new() -> Thing { Thing { x: 0 } } +} + +fn main() { let foo_<|>test = Thing::new(); } + "#, + expect![[r#" + *foo_test* + ```rust + Thing + ``` + "#]], + ) + } + + #[test] + fn test_hover_infer_associated_method_exact() { + check( + r#" +mod wrapper { + struct Thing { x: u32 } + + impl Thing { + fn new() -> Thing { Thing { x: 0 } } + } +} + +fn main() { let foo_test = wrapper::Thing::new<|>(); } +"#, + expect![[r#" + *new* + ```rust + wrapper::Thing + ``` + + ```rust + fn new() -> Thing + ``` + "#]], + ) + } + + #[test] + fn test_hover_infer_associated_const_in_pattern() { + check( + r#" +struct X; +impl X { + const C: u32 = 1; +} + +fn main() { + match 1 { + X::C<|> => {}, + 2 => {}, + _ => {} + }; +} +"#, + expect![[r#" + *C* + ```rust + const C: u32 = 1 + ``` + "#]], + ) + } + + #[test] + fn test_hover_self() { + check( + r#" +struct Thing { x: u32 } +impl Thing { + fn new() -> Self { Self<|> { x: 0 } } +} +"#, + expect![[r#" + *Self { x: 0 }* + ```rust + Thing + ``` + "#]], + ) + } /* FIXME: revive these tests + let (analysis, position) = analysis_and_position( + " + struct Thing { x: u32 } + impl Thing { + fn new() -> Self<|> { + Self { x: 0 } + } + } + ", + ); + + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup(&hover.info.markup.as_str()), ("Thing")); + + let (analysis, position) = analysis_and_position( + " + enum Thing { A } + impl Thing { + pub fn new() -> Self<|> { + Thing::A + } + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup(&hover.info.markup.as_str()), ("enum Thing")); + + let (analysis, position) = analysis_and_position( + " + enum Thing { A } + impl Thing { + pub fn thing(a: Self<|>) { + } + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup(&hover.info.markup.as_str()), ("enum Thing")); + */ + + #[test] + fn test_hover_shadowing_pat() { + check( + r#" +fn x() {} + +fn y() { + let x = 0i32; + x<|>; +} +"#, + expect![[r#" + *x* + ```rust + i32 + ``` + "#]], + ) + } + + #[test] + fn test_hover_macro_invocation() { + check( + r#" +macro_rules! foo { () => {} } + +fn f() { fo<|>o!(); } +"#, + expect![[r#" + *foo* + ```rust + macro_rules! foo + ``` + "#]], + ) + } + + #[test] + fn test_hover_tuple_field() { + check( + r#"struct TS(String, i32<|>);"#, + expect![[r#" + *i32* + i32 + "#]], + ) + } + + #[test] + fn test_hover_through_macro() { + check( + r#" +macro_rules! id { ($($tt:tt)*) => { $($tt)* } } +fn foo() {} +id! { + fn bar() { fo<|>o(); } +} +"#, + expect![[r#" + *foo* + ```rust + fn foo() + ``` + "#]], + ); + } + + #[test] + fn test_hover_through_expr_in_macro() { + check( + r#" +macro_rules! id { ($($tt:tt)*) => { $($tt)* } } +fn foo(bar:u32) { let a = id!(ba<|>r); } +"#, + expect![[r#" + *bar* + ```rust + u32 + ``` + "#]], + ); + } + + #[test] + fn test_hover_through_expr_in_macro_recursive() { + check( + r#" +macro_rules! id_deep { ($($tt:tt)*) => { $($tt)* } } +macro_rules! id { ($($tt:tt)*) => { id_deep!($($tt)*) } } +fn foo(bar:u32) { let a = id!(ba<|>r); } +"#, + expect![[r#" + *bar* + ```rust + u32 + ``` + "#]], + ); + } + + #[test] + fn test_hover_through_func_in_macro_recursive() { + check( + r#" +macro_rules! id_deep { ($($tt:tt)*) => { $($tt)* } } +macro_rules! id { ($($tt:tt)*) => { id_deep!($($tt)*) } } +fn bar() -> u32 { 0 } +fn foo() { let a = id!([0u32, bar(<|>)] ); } +"#, + expect![[r#" + *bar()* + ```rust + u32 + ``` + "#]], + ); + } + + #[test] + fn test_hover_through_literal_string_in_macro() { + check( + r#" +macro_rules! arr { ($($tt:tt)*) => { [$($tt)*)] } } +fn foo() { + let mastered_for_itunes = ""; + let _ = arr!("Tr<|>acks", &mastered_for_itunes); +} +"#, + expect![[r#" + *"Tracks"* + ```rust + &str + ``` + "#]], + ); + } + + #[test] + fn test_hover_through_assert_macro() { + check( + r#" +#[rustc_builtin_macro] +macro_rules! assert {} + +fn bar() -> bool { true } +fn foo() { + assert!(ba<|>r()); +} +"#, + expect![[r#" + *bar* + ```rust + fn bar() -> bool + ``` + "#]], + ); + } + + #[test] + fn test_hover_through_literal_string_in_builtin_macro() { + check_hover_no_result( + r#" + #[rustc_builtin_macro] + macro_rules! format {} + + fn foo() { + format!("hel<|>lo {}", 0); + } + "#, + ); + } + + #[test] + fn test_hover_non_ascii_space_doc() { + check( + " +/// <- `\u{3000}` here +fn foo() { } + +fn bar() { fo<|>o(); } +", + expect![[r#" + *foo* + ```rust + fn foo() + ``` + ___ + + <- ` ` here + "#]], + ); + } + + #[test] + fn test_hover_function_show_qualifiers() { + check( + r#"async fn foo<|>() {}"#, + expect![[r#" + *foo* + ```rust + async fn foo() + ``` + "#]], + ); + check( + r#"pub const unsafe fn foo<|>() {}"#, + expect![[r#" + *foo* + ```rust + pub const unsafe fn foo() + ``` + "#]], + ); + check( + r#"pub(crate) async unsafe extern "C" fn foo<|>() {}"#, + expect![[r#" + *foo* + ```rust + pub(crate) async unsafe extern "C" fn foo() + ``` + "#]], + ); + } + + #[test] + fn test_hover_trait_show_qualifiers() { + check_actions( + r"unsafe trait foo<|>() {}", + expect![[r#" + [ + Implementaion( + FilePosition { + file_id: FileId( + 1, + ), + offset: 13, + }, + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_extern_crate() { + check( + r#" +//- /main.rs +extern crate st<|>d; +//- /std/lib.rs +//! Standard library for this test +//! +//! Printed? +//! abc123 + "#, + expect![[r#" + *std* + Standard library for this test + + Printed? + abc123 + "#]], + ); + check( + r#" +//- /main.rs +extern crate std as ab<|>c; +//- /std/lib.rs +//! Standard library for this test +//! +//! Printed? +//! abc123 + "#, + expect![[r#" + *abc* + Standard library for this test + + Printed? + abc123 + "#]], + ); + } + + #[test] + fn test_hover_mod_with_same_name_as_function() { + check( + r#" +use self::m<|>y::Bar; +mod my { pub struct Bar; } + +fn my() {} +"#, + expect![[r#" + *my* + ```rust + mod my + ``` + "#]], + ); + } + + #[test] + fn test_hover_struct_doc_comment() { + check( + r#" +/// bar docs +struct Bar; + +fn foo() { let bar = Ba<|>r; } +"#, + expect![[r#" + *Bar* + ```rust + struct Bar + ``` + ___ + + bar docs + "#]], + ); + } + + #[test] + fn test_hover_struct_doc_attr() { + check( + r#" +#[doc = "bar docs"] +struct Bar; + +fn foo() { let bar = Ba<|>r; } +"#, + expect![[r#" + *Bar* + ```rust + struct Bar + ``` + ___ + + bar docs + "#]], + ); + } + + #[test] + fn test_hover_struct_doc_attr_multiple_and_mixed() { + check( + r#" +/// bar docs 0 +#[doc = "bar docs 1"] +#[doc = "bar docs 2"] +struct Bar; + +fn foo() { let bar = Ba<|>r; } +"#, + expect![[r#" + *Bar* + ```rust + struct Bar + ``` + ___ + + bar docs 0 + + bar docs 1 + + bar docs 2 + "#]], + ); + } + + #[test] + fn test_hover_macro_generated_struct_fn_doc_comment() { + mark::check!(hover_macro_generated_struct_fn_doc_comment); + + check( + r#" +macro_rules! bar { + () => { + struct Bar; + impl Bar { + /// Do the foo + fn foo(&self) {} + } + } +} + +bar!(); + +fn foo() { let bar = Bar; bar.fo<|>o(); } +"#, + expect![[r#" + *foo* + ```rust + Bar + ``` + + ```rust + fn foo(&self) + ``` + ___ + + Do the foo + "#]], + ); + } + + #[test] + fn test_hover_macro_generated_struct_fn_doc_attr() { + mark::check!(hover_macro_generated_struct_fn_doc_attr); + + check( + r#" +macro_rules! bar { + () => { + struct Bar; + impl Bar { + #[doc = "Do the foo"] + fn foo(&self) {} + } + } +} + +bar!(); + +fn foo() { let bar = Bar; bar.fo<|>o(); } +"#, + expect![[r#" + *foo* + ```rust + Bar + ``` + + ```rust + fn foo(&self) + ``` + ___ + + Do the foo + "#]], + ); + } + + #[test] + fn test_hover_trait_has_impl_action() { + check_actions( + r#"trait foo<|>() {}"#, + expect![[r#" + [ + Implementaion( + FilePosition { + file_id: FileId( + 1, + ), + offset: 6, + }, + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_struct_has_impl_action() { + check_actions( + r"struct foo<|>() {}", + expect![[r#" + [ + Implementaion( + FilePosition { + file_id: FileId( + 1, + ), + offset: 7, + }, + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_union_has_impl_action() { + check_actions( + r#"union foo<|>() {}"#, + expect![[r#" + [ + Implementaion( + FilePosition { + file_id: FileId( + 1, + ), + offset: 6, + }, + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_enum_has_impl_action() { + check_actions( + r"enum foo<|>() { A, B }", + expect![[r#" + [ + Implementaion( + FilePosition { + file_id: FileId( + 1, + ), + offset: 5, + }, + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_test_has_action() { + check_actions( + r#" +#[test] +fn foo_<|>test() {} +"#, + expect![[r#" + [ + Runnable( + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..24, + focus_range: Some( + 11..19, + ), + name: "foo_test", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "foo_test", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [], + }, + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_test_mod_has_action() { + check_actions( + r#" +mod tests<|> { + #[test] + fn foo_test() {} +} +"#, + expect![[r#" + [ + Runnable( + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..46, + focus_range: Some( + 4..9, + ), + name: "tests", + kind: MODULE, + container_name: None, + description: None, + docs: None, + }, + kind: TestMod { + path: "tests", + }, + cfg_exprs: [], + }, + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_struct_has_goto_type_action() { + check_actions( + r#" +struct S{ f1: u32 } + +fn main() { let s<|>t = S{ f1:0 }; } + "#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..19, + focus_range: Some( + 7..8, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_generic_struct_has_goto_type_actions() { + check_actions( + r#" +struct Arg(u32); +struct S{ f1: T } + +fn main() { let s<|>t = S{ f1:Arg(0) }; } +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 17..37, + focus_range: Some( + 24..25, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "Arg", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..16, + focus_range: Some( + 7..10, + ), + name: "Arg", + kind: STRUCT, + container_name: None, + description: Some( + "struct Arg", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_generic_struct_has_flattened_goto_type_actions() { + check_actions( + r#" +struct Arg(u32); +struct S{ f1: T } + +fn main() { let s<|>t = S{ f1: S{ f1: Arg(0) } }; } + "#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 17..37, + focus_range: Some( + 24..25, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "Arg", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..16, + focus_range: Some( + 7..10, + ), + name: "Arg", + kind: STRUCT, + container_name: None, + description: Some( + "struct Arg", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_tuple_has_goto_type_actions() { + check_actions( + r#" +struct A(u32); +struct B(u32); +mod M { + pub struct C(u32); +} + +fn main() { let s<|>t = (A(1), B(2), M::C(3) ); } +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "A", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..14, + focus_range: Some( + 7..8, + ), + name: "A", + kind: STRUCT, + container_name: None, + description: Some( + "struct A", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "B", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 15..29, + focus_range: Some( + 22..23, + ), + name: "B", + kind: STRUCT, + container_name: None, + description: Some( + "struct B", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "M::C", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 42..60, + focus_range: Some( + 53..54, + ), + name: "C", + kind: STRUCT, + container_name: None, + description: Some( + "pub struct C", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_return_impl_trait_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +fn foo() -> impl Foo {} + +fn main() { let s<|>t = foo(); } +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..12, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_generic_return_impl_trait_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +struct S; +fn foo() -> impl Foo {} + +fn main() { let s<|>t = foo(); } +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..15, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 16..25, + focus_range: Some( + 23..24, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_return_impl_traits_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +trait Bar {} +fn foo() -> impl Foo + Bar {} + +fn main() { let s<|>t = foo(); } + "#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..12, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "Bar", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 13..25, + focus_range: Some( + 19..22, + ), + name: "Bar", + kind: TRAIT, + container_name: None, + description: Some( + "trait Bar", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_generic_return_impl_traits_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +trait Bar {} +struct S1 {} +struct S2 {} + +fn foo() -> impl Foo + Bar {} + +fn main() { let s<|>t = foo(); } +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..15, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "Bar", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 16..31, + focus_range: Some( + 22..25, + ), + name: "Bar", + kind: TRAIT, + container_name: None, + description: Some( + "trait Bar", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "S1", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 32..44, + focus_range: Some( + 39..41, + ), + name: "S1", + kind: STRUCT, + container_name: None, + description: Some( + "struct S1", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "S2", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 45..57, + focus_range: Some( + 52..54, + ), + name: "S2", + kind: STRUCT, + container_name: None, + description: Some( + "struct S2", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_arg_impl_trait_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +fn foo(ar<|>g: &impl Foo) {} +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..12, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_arg_impl_traits_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +trait Bar {} +struct S{} + +fn foo(ar<|>g: &impl Foo + Bar) {} +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..12, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "Bar", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 13..28, + focus_range: Some( + 19..22, + ), + name: "Bar", + kind: TRAIT, + container_name: None, + description: Some( + "trait Bar", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 29..39, + focus_range: Some( + 36..37, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_arg_generic_impl_trait_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +struct S {} +fn foo(ar<|>g: &impl Foo) {} +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..15, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 16..27, + focus_range: Some( + 23..24, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_dyn_return_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +struct S; +impl Foo for S {} + +struct B{} +fn foo() -> B {} + +fn main() { let s<|>t = foo(); } +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "B", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 42..55, + focus_range: Some( + 49..50, + ), + name: "B", + kind: STRUCT, + container_name: None, + description: Some( + "struct B", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..12, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_dyn_arg_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +fn foo(ar<|>g: &dyn Foo) {} +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..12, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_generic_dyn_arg_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +struct S {} +fn foo(ar<|>g: &dyn Foo) {} +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..15, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 16..27, + focus_range: Some( + 23..24, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_goto_type_action_links_order() { + check_actions( + r#" +trait ImplTrait {} +trait DynTrait {} +struct B {} +struct S {} + +fn foo(a<|>rg: &impl ImplTrait>>>) {} + "#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "ImplTrait", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..21, + focus_range: Some( + 6..15, + ), + name: "ImplTrait", + kind: TRAIT, + container_name: None, + description: Some( + "trait ImplTrait", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "B", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 43..57, + focus_range: Some( + 50..51, + ), + name: "B", + kind: STRUCT, + container_name: None, + description: Some( + "struct B", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "DynTrait", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 22..42, + focus_range: Some( + 28..36, + ), + name: "DynTrait", + kind: TRAIT, + container_name: None, + description: Some( + "trait DynTrait", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 58..69, + focus_range: Some( + 65..66, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_associated_type_has_goto_type_action() { + check_actions( + r#" +trait Foo { + type Item; + fn get(self) -> Self::Item {} +} + +struct Bar{} +struct S{} + +impl Foo for S { type Item = Bar; } + +fn test() -> impl Foo { S {} } + +fn main() { let s<|>t = test().get(); } +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..62, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } +} diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs new file mode 100644 index 0000000000..002adf9159 --- /dev/null +++ b/crates/ide/src/inlay_hints.rs @@ -0,0 +1,927 @@ +use hir::{Adt, Callable, HirDisplay, Semantics, Type}; +use ide_db::RootDatabase; +use stdx::to_lower_snake_case; +use syntax::{ + ast::{self, ArgListOwner, AstNode}, + match_ast, Direction, NodeOrToken, SmolStr, SyntaxKind, TextRange, T, +}; + +use crate::FileId; +use ast::NameOwner; +use either::Either; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct InlayHintsConfig { + pub type_hints: bool, + pub parameter_hints: bool, + pub chaining_hints: bool, + pub max_length: Option, +} + +impl Default for InlayHintsConfig { + fn default() -> Self { + Self { type_hints: true, parameter_hints: true, chaining_hints: true, max_length: None } + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum InlayKind { + TypeHint, + ParameterHint, + ChainingHint, +} + +#[derive(Debug)] +pub struct InlayHint { + pub range: TextRange, + pub kind: InlayKind, + pub label: SmolStr, +} + +// Feature: Inlay Hints +// +// rust-analyzer shows additional information inline with the source code. +// Editors usually render this using read-only virtual text snippets interspersed with code. +// +// rust-analyzer shows hits for +// +// * types of local variables +// * names of function arguments +// * types of chained expressions +// +// **Note:** VS Code does not have native support for inlay hints https://github.com/microsoft/vscode/issues/16221[yet] and the hints are implemented using decorations. +// This approach has limitations, the caret movement and bracket highlighting near the edges of the hint may be weird: +// https://github.com/rust-analyzer/rust-analyzer/issues/1623[1], https://github.com/rust-analyzer/rust-analyzer/issues/3453[2]. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Toggle inlay hints* +// |=== +pub(crate) fn inlay_hints( + db: &RootDatabase, + file_id: FileId, + config: &InlayHintsConfig, +) -> Vec { + let _p = profile::span("inlay_hints"); + let sema = Semantics::new(db); + let file = sema.parse(file_id); + + let mut res = Vec::new(); + for node in file.syntax().descendants() { + if let Some(expr) = ast::Expr::cast(node.clone()) { + get_chaining_hints(&mut res, &sema, config, expr); + } + + match_ast! { + match node { + ast::CallExpr(it) => { get_param_name_hints(&mut res, &sema, config, ast::Expr::from(it)); }, + ast::MethodCallExpr(it) => { get_param_name_hints(&mut res, &sema, config, ast::Expr::from(it)); }, + ast::IdentPat(it) => { get_bind_pat_hints(&mut res, &sema, config, it); }, + _ => (), + } + } + } + res +} + +fn get_chaining_hints( + acc: &mut Vec, + sema: &Semantics, + config: &InlayHintsConfig, + expr: ast::Expr, +) -> Option<()> { + if !config.chaining_hints { + return None; + } + + if matches!(expr, ast::Expr::RecordExpr(_)) { + return None; + } + + let mut tokens = expr + .syntax() + .siblings_with_tokens(Direction::Next) + .filter_map(NodeOrToken::into_token) + .filter(|t| match t.kind() { + SyntaxKind::WHITESPACE if !t.text().contains('\n') => false, + SyntaxKind::COMMENT => false, + _ => true, + }); + + // Chaining can be defined as an expression whose next sibling tokens are newline and dot + // Ignoring extra whitespace and comments + let next = tokens.next()?.kind(); + let next_next = tokens.next()?.kind(); + if next == SyntaxKind::WHITESPACE && next_next == T![.] { + let ty = sema.type_of_expr(&expr)?; + if ty.is_unknown() { + return None; + } + if matches!(expr, ast::Expr::PathExpr(_)) { + if let Some(Adt::Struct(st)) = ty.as_adt() { + if st.fields(sema.db).is_empty() { + return None; + } + } + } + let label = ty.display_truncated(sema.db, config.max_length).to_string(); + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::ChainingHint, + label: label.into(), + }); + } + Some(()) +} + +fn get_param_name_hints( + acc: &mut Vec, + sema: &Semantics, + config: &InlayHintsConfig, + expr: ast::Expr, +) -> Option<()> { + if !config.parameter_hints { + return None; + } + + let args = match &expr { + ast::Expr::CallExpr(expr) => expr.arg_list()?.args(), + ast::Expr::MethodCallExpr(expr) => expr.arg_list()?.args(), + _ => return None, + }; + + let callable = get_callable(sema, &expr)?; + let hints = callable + .params(sema.db) + .into_iter() + .zip(args) + .filter_map(|((param, _ty), arg)| { + let param_name = match param? { + Either::Left(self_param) => self_param.to_string(), + Either::Right(pat) => match pat { + ast::Pat::IdentPat(it) => it.name()?.to_string(), + _ => return None, + }, + }; + Some((param_name, arg)) + }) + .filter(|(param_name, arg)| should_show_param_name_hint(sema, &callable, ¶m_name, &arg)) + .map(|(param_name, arg)| InlayHint { + range: arg.syntax().text_range(), + kind: InlayKind::ParameterHint, + label: param_name.into(), + }); + + acc.extend(hints); + Some(()) +} + +fn get_bind_pat_hints( + acc: &mut Vec, + sema: &Semantics, + config: &InlayHintsConfig, + pat: ast::IdentPat, +) -> Option<()> { + if !config.type_hints { + return None; + } + + let ty = sema.type_of_pat(&pat.clone().into())?; + + if should_not_display_type_hint(sema.db, &pat, &ty) { + return None; + } + + acc.push(InlayHint { + range: pat.syntax().text_range(), + kind: InlayKind::TypeHint, + label: ty.display_truncated(sema.db, config.max_length).to_string().into(), + }); + Some(()) +} + +fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &Type) -> bool { + if let Some(Adt::Enum(enum_data)) = pat_ty.as_adt() { + let pat_text = bind_pat.to_string(); + enum_data + .variants(db) + .into_iter() + .map(|variant| variant.name(db).to_string()) + .any(|enum_name| enum_name == pat_text) + } else { + false + } +} + +fn should_not_display_type_hint( + db: &RootDatabase, + bind_pat: &ast::IdentPat, + pat_ty: &Type, +) -> bool { + if pat_ty.is_unknown() { + return true; + } + + if let Some(Adt::Struct(s)) = pat_ty.as_adt() { + if s.fields(db).is_empty() && s.name(db).to_string() == bind_pat.to_string() { + return true; + } + } + + for node in bind_pat.syntax().ancestors() { + match_ast! { + match node { + ast::LetStmt(it) => { + return it.ty().is_some() + }, + ast::Param(it) => { + return it.ty().is_some() + }, + ast::MatchArm(_it) => { + return pat_is_enum_variant(db, bind_pat, pat_ty); + }, + ast::IfExpr(it) => { + return it.condition().and_then(|condition| condition.pat()).is_some() + && pat_is_enum_variant(db, bind_pat, pat_ty); + }, + ast::WhileExpr(it) => { + return it.condition().and_then(|condition| condition.pat()).is_some() + && pat_is_enum_variant(db, bind_pat, pat_ty); + }, + _ => (), + } + } + } + false +} + +fn should_show_param_name_hint( + sema: &Semantics, + callable: &Callable, + param_name: &str, + argument: &ast::Expr, +) -> bool { + let param_name = param_name.trim_start_matches('_'); + let fn_name = match callable.kind() { + hir::CallableKind::Function(it) => Some(it.name(sema.db).to_string()), + hir::CallableKind::TupleStruct(_) + | hir::CallableKind::TupleEnumVariant(_) + | hir::CallableKind::Closure => None, + }; + if param_name.is_empty() + || Some(param_name) == fn_name.as_ref().map(|s| s.trim_start_matches('_')) + || is_argument_similar_to_param_name(sema, argument, param_name) + || param_name.starts_with("ra_fixture") + { + return false; + } + + // avoid displaying hints for common functions like map, filter, etc. + // or other obvious words used in std + !(callable.n_params() == 1 && is_obvious_param(param_name)) +} + +fn is_argument_similar_to_param_name( + sema: &Semantics, + argument: &ast::Expr, + param_name: &str, +) -> bool { + if is_enum_name_similar_to_param_name(sema, argument, param_name) { + return true; + } + match get_string_representation(argument) { + None => false, + Some(repr) => { + let argument_string = repr.trim_start_matches('_'); + argument_string.starts_with(param_name) || argument_string.ends_with(param_name) + } + } +} + +fn is_enum_name_similar_to_param_name( + sema: &Semantics, + argument: &ast::Expr, + param_name: &str, +) -> bool { + match sema.type_of_expr(argument).and_then(|t| t.as_adt()) { + Some(Adt::Enum(e)) => to_lower_snake_case(&e.name(sema.db).to_string()) == param_name, + _ => false, + } +} + +fn get_string_representation(expr: &ast::Expr) -> Option { + match expr { + ast::Expr::MethodCallExpr(method_call_expr) => { + Some(method_call_expr.name_ref()?.to_string()) + } + ast::Expr::RefExpr(ref_expr) => get_string_representation(&ref_expr.expr()?), + _ => Some(expr.to_string()), + } +} + +fn is_obvious_param(param_name: &str) -> bool { + let is_obvious_param_name = + matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other"); + param_name.len() == 1 || is_obvious_param_name +} + +fn get_callable(sema: &Semantics, expr: &ast::Expr) -> Option { + match expr { + ast::Expr::CallExpr(expr) => sema.type_of_expr(&expr.expr()?)?.as_callable(sema.db), + ast::Expr::MethodCallExpr(expr) => sema.resolve_method_call_as_callable(expr), + _ => None, + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + use test_utils::extract_annotations; + + use crate::{inlay_hints::InlayHintsConfig, mock_analysis::single_file}; + + fn check(ra_fixture: &str) { + check_with_config(InlayHintsConfig::default(), ra_fixture); + } + + fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) { + let (analysis, file_id) = single_file(ra_fixture); + let expected = extract_annotations(&*analysis.file_text(file_id).unwrap()); + let inlay_hints = analysis.inlay_hints(file_id, &config).unwrap(); + let actual = + inlay_hints.into_iter().map(|it| (it.range, it.label.to_string())).collect::>(); + assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual); + } + + fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) { + let (analysis, file_id) = single_file(ra_fixture); + let inlay_hints = analysis.inlay_hints(file_id, &config).unwrap(); + expect.assert_debug_eq(&inlay_hints) + } + + #[test] + fn param_hints_only() { + check_with_config( + InlayHintsConfig { + parameter_hints: true, + type_hints: false, + chaining_hints: false, + max_length: None, + }, + r#" +fn foo(a: i32, b: i32) -> i32 { a + b } +fn main() { + let _x = foo( + 4, + //^ a + 4, + //^ b + ); +}"#, + ); + } + + #[test] + fn hints_disabled() { + check_with_config( + InlayHintsConfig { + type_hints: false, + parameter_hints: false, + chaining_hints: false, + max_length: None, + }, + r#" +fn foo(a: i32, b: i32) -> i32 { a + b } +fn main() { + let _x = foo(4, 4); +}"#, + ); + } + + #[test] + fn type_hints_only() { + check_with_config( + InlayHintsConfig { + type_hints: true, + parameter_hints: false, + chaining_hints: false, + max_length: None, + }, + r#" +fn foo(a: i32, b: i32) -> i32 { a + b } +fn main() { + let _x = foo(4, 4); + //^^ i32 +}"#, + ); + } + + #[test] + fn default_generic_types_should_not_be_displayed() { + check( + r#" +struct Test { k: K, t: T } + +fn main() { + let zz = Test { t: 23u8, k: 33 }; + //^^ Test + let zz_ref = &zz; + //^^^^^^ &Test + let test = || zz; + //^^^^ || -> Test +}"#, + ); + } + + #[test] + fn let_statement() { + check( + r#" +#[derive(PartialEq)] +enum Option { None, Some(T) } + +#[derive(PartialEq)] +struct Test { a: Option, b: u8 } + +fn main() { + struct InnerStruct {} + + let test = 54; + //^^^^ i32 + let test: i32 = 33; + let mut test = 33; + //^^^^^^^^ i32 + let _ = 22; + let test = "test"; + //^^^^ &str + let test = InnerStruct {}; + + let test = unresolved(); + + let test = (42, 'a'); + //^^^^ (i32, char) + let (a, (b, (c,)) = (2, (3, (9.2,)); + //^ i32 ^ i32 ^ f64 + let &x = &92; + //^ i32 +}"#, + ); + } + + #[test] + fn closure_parameters() { + check( + r#" +fn main() { + let mut start = 0; + //^^^^^^^^^ i32 + (0..2).for_each(|increment| { start += increment; }); + //^^^^^^^^^ i32 + + let multiply = + //^^^^^^^^ |…| -> i32 + | a, b| a * b + //^ i32 ^ i32 + ; + + let _: i32 = multiply(1, 2); + let multiply_ref = &multiply; + //^^^^^^^^^^^^ &|…| -> i32 + + let return_42 = || 42; + //^^^^^^^^^ || -> i32 +}"#, + ); + } + + #[test] + fn for_expression() { + check( + r#" +fn main() { + let mut start = 0; + //^^^^^^^^^ i32 + for increment in 0..2 { start += increment; } + //^^^^^^^^^ i32 +}"#, + ); + } + + #[test] + fn if_expr() { + check( + r#" +enum Option { None, Some(T) } +use Option::*; + +struct Test { a: Option, b: u8 } + +fn main() { + let test = Some(Test { a: Some(3), b: 1 }); + //^^^^ Option + if let None = &test {}; + if let test = &test {}; + //^^^^ &Option + if let Some(test) = &test {}; + //^^^^ &Test + if let Some(Test { a, b }) = &test {}; + //^ &Option ^ &u8 + if let Some(Test { a: x, b: y }) = &test {}; + //^ &Option ^ &u8 + if let Some(Test { a: Some(x), b: y }) = &test {}; + //^ &u32 ^ &u8 + if let Some(Test { a: None, b: y }) = &test {}; + //^ &u8 + if let Some(Test { b: y, .. }) = &test {}; + //^ &u8 + if test == None {} +}"#, + ); + } + + #[test] + fn while_expr() { + check( + r#" +enum Option { None, Some(T) } +use Option::*; + +struct Test { a: Option, b: u8 } + +fn main() { + let test = Some(Test { a: Some(3), b: 1 }); + //^^^^ Option + while let Some(Test { a: Some(x), b: y }) = &test {}; + //^ &u32 ^ &u8 +}"#, + ); + } + + #[test] + fn match_arm_list() { + check( + r#" +enum Option { None, Some(T) } +use Option::*; + +struct Test { a: Option, b: u8 } + +fn main() { + match Some(Test { a: Some(3), b: 1 }) { + None => (), + test => (), + //^^^^ Option + Some(Test { a: Some(x), b: y }) => (), + //^ u32 ^ u8 + _ => {} + } +}"#, + ); + } + + #[test] + fn hint_truncation() { + check_with_config( + InlayHintsConfig { max_length: Some(8), ..Default::default() }, + r#" +struct Smol(T); + +struct VeryLongOuterName(T); + +fn main() { + let a = Smol(0u32); + //^ Smol + let b = VeryLongOuterName(0usize); + //^ VeryLongOuterName<…> + let c = Smol(Smol(0u32)) + //^ Smol> +}"#, + ); + } + + #[test] + fn function_call_parameter_hint() { + check( + r#" +enum Option { None, Some(T) } +use Option::*; + +struct FileId {} +struct SmolStr {} + +struct TextRange {} +struct SyntaxKind {} +struct NavigationTarget {} + +struct Test {} + +impl Test { + fn method(&self, mut param: i32) -> i32 { param * 2 } + + fn from_syntax( + file_id: FileId, + name: SmolStr, + focus_range: Option, + full_range: TextRange, + kind: SyntaxKind, + docs: Option, + ) -> NavigationTarget { + NavigationTarget {} + } +} + +fn test_func(mut foo: i32, bar: i32, msg: &str, _: i32, last: i32) -> i32 { + foo + bar +} + +fn main() { + let not_literal = 1; + //^^^^^^^^^^^ i32 + let _: i32 = test_func(1, 2, "hello", 3, not_literal); + //^ foo ^ bar ^^^^^^^ msg ^^^^^^^^^^^ last + let t: Test = Test {}; + t.method(123); + //^^^ param + Test::method(&t, 3456); + //^^ &self ^^^^ param + Test::from_syntax( + FileId {}, + //^^^^^^^^^ file_id + "impl".into(), + //^^^^^^^^^^^^^ name + None, + //^^^^ focus_range + TextRange {}, + //^^^^^^^^^^^^ full_range + SyntaxKind {}, + //^^^^^^^^^^^^^ kind + None, + //^^^^ docs + ); +}"#, + ); + } + + #[test] + fn omitted_parameters_hints_heuristics() { + check_with_config( + InlayHintsConfig { max_length: Some(8), ..Default::default() }, + r#" +fn map(f: i32) {} +fn filter(predicate: i32) {} + +struct TestVarContainer { + test_var: i32, +} + +impl TestVarContainer { + fn test_var(&self) -> i32 { + self.test_var + } +} + +struct Test {} + +impl Test { + fn map(self, f: i32) -> Self { + self + } + + fn filter(self, predicate: i32) -> Self { + self + } + + fn field(self, value: i32) -> Self { + self + } + + fn no_hints_expected(&self, _: i32, test_var: i32) {} + + fn frob(&self, frob: bool) {} +} + +struct Param {} + +fn different_order(param: &Param) {} +fn different_order_mut(param: &mut Param) {} +fn has_underscore(_param: bool) {} +fn enum_matches_param_name(completion_kind: CompletionKind) {} +fn param_destructuring_omitted_1((a, b): (u32, u32)) {} +fn param_destructuring_omitted_2(TestVarContainer { test_var: _ }: TestVarContainer) {} + +fn twiddle(twiddle: bool) {} +fn doo(_doo: bool) {} + +enum CompletionKind { + Keyword, +} + +fn main() { + let container: TestVarContainer = TestVarContainer { test_var: 42 }; + let test: Test = Test {}; + + map(22); + filter(33); + + let test_processed: Test = test.map(1).filter(2).field(3); + + let test_var: i32 = 55; + test_processed.no_hints_expected(22, test_var); + test_processed.no_hints_expected(33, container.test_var); + test_processed.no_hints_expected(44, container.test_var()); + test_processed.frob(false); + + twiddle(true); + doo(true); + + let mut param_begin: Param = Param {}; + different_order(¶m_begin); + different_order(&mut param_begin); + + let param: bool = true; + has_underscore(param); + + enum_matches_param_name(CompletionKind::Keyword); + + let a: f64 = 7.0; + let b: f64 = 4.0; + let _: f64 = a.div_euclid(b); + let _: f64 = a.abs_sub(b); + + let range: (u32, u32) = (3, 5); + param_destructuring_omitted_1(range); + param_destructuring_omitted_2(container); +}"#, + ); + } + + #[test] + fn unit_structs_have_no_type_hints() { + check_with_config( + InlayHintsConfig { max_length: Some(8), ..Default::default() }, + r#" +enum Result { Ok(T), Err(E) } +use Result::*; + +struct SyntheticSyntax; + +fn main() { + match Ok(()) { + Ok(_) => (), + Err(SyntheticSyntax) => (), + } +}"#, + ); + } + + #[test] + fn chaining_hints_ignore_comments() { + check_expect( + InlayHintsConfig { + parameter_hints: false, + type_hints: false, + chaining_hints: true, + max_length: None, + }, + r#" +struct A(B); +impl A { fn into_b(self) -> B { self.0 } } +struct B(C); +impl B { fn into_c(self) -> C { self.0 } } +struct C; + +fn main() { + let c = A(B(C)) + .into_b() // This is a comment + .into_c(); +} +"#, + expect![[r#" + [ + InlayHint { + range: 147..172, + kind: ChainingHint, + label: "B", + }, + InlayHint { + range: 147..154, + kind: ChainingHint, + label: "A", + }, + ] + "#]], + ); + } + + #[test] + fn chaining_hints_without_newlines() { + check_with_config( + InlayHintsConfig { + parameter_hints: false, + type_hints: false, + chaining_hints: true, + max_length: None, + }, + r#" +struct A(B); +impl A { fn into_b(self) -> B { self.0 } } +struct B(C); +impl B { fn into_c(self) -> C { self.0 } } +struct C; + +fn main() { + let c = A(B(C)).into_b().into_c(); +}"#, + ); + } + + #[test] + fn struct_access_chaining_hints() { + check_expect( + InlayHintsConfig { + parameter_hints: false, + type_hints: false, + chaining_hints: true, + max_length: None, + }, + r#" +struct A { pub b: B } +struct B { pub c: C } +struct C(pub bool); +struct D; + +impl D { + fn foo(&self) -> i32 { 42 } +} + +fn main() { + let x = A { b: B { c: C(true) } } + .b + .c + .0; + let x = D + .foo(); +}"#, + expect![[r#" + [ + InlayHint { + range: 143..190, + kind: ChainingHint, + label: "C", + }, + InlayHint { + range: 143..179, + kind: ChainingHint, + label: "B", + }, + ] + "#]], + ); + } + + #[test] + fn generic_chaining_hints() { + check_expect( + InlayHintsConfig { + parameter_hints: false, + type_hints: false, + chaining_hints: true, + max_length: None, + }, + r#" +struct A(T); +struct B(T); +struct C(T); +struct X(T, R); + +impl A { + fn new(t: T) -> Self { A(t) } + fn into_b(self) -> B { B(self.0) } +} +impl B { + fn into_c(self) -> C { C(self.0) } +} +fn main() { + let c = A::new(X(42, true)) + .into_b() + .into_c(); +} +"#, + expect![[r#" + [ + InlayHint { + range: 246..283, + kind: ChainingHint, + label: "B>", + }, + InlayHint { + range: 246..265, + kind: ChainingHint, + label: "A>", + }, + ] + "#]], + ); + } +} diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs new file mode 100644 index 0000000000..e37702acdf --- /dev/null +++ b/crates/ide/src/join_lines.rs @@ -0,0 +1,773 @@ +use assists::utils::extract_trivial_expression; +use itertools::Itertools; +use syntax::{ + algo::{find_covering_element, non_trivia_sibling}, + ast::{self, AstNode, AstToken}, + Direction, NodeOrToken, SourceFile, + SyntaxKind::{self, USE_TREE, WHITESPACE}, + SyntaxNode, SyntaxToken, TextRange, TextSize, T, +}; +use text_edit::{TextEdit, TextEditBuilder}; + +// Feature: Join Lines +// +// Join selected lines into one, smartly fixing up whitespace, trailing commas, and braces. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Join lines** +// |=== +pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { + let range = if range.is_empty() { + let syntax = file.syntax(); + let text = syntax.text().slice(range.start()..); + let pos = match text.find_char('\n') { + None => return TextEdit::builder().finish(), + Some(pos) => pos, + }; + TextRange::at(range.start() + pos, TextSize::of('\n')) + } else { + range + }; + + let node = match find_covering_element(file.syntax(), range) { + NodeOrToken::Node(node) => node, + NodeOrToken::Token(token) => token.parent(), + }; + let mut edit = TextEdit::builder(); + for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) { + let range = match range.intersect(token.text_range()) { + Some(range) => range, + None => continue, + } - token.text_range().start(); + let text = token.text(); + for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') { + let pos: TextSize = (pos as u32).into(); + let off = token.text_range().start() + range.start() + pos; + if !edit.invalidates_offset(off) { + remove_newline(&mut edit, &token, off); + } + } + } + + edit.finish() +} + +fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextSize) { + if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { + // The node is either the first or the last in the file + let suff = &token.text()[TextRange::new( + offset - token.text_range().start() + TextSize::of('\n'), + TextSize::of(token.text().as_str()), + )]; + let spaces = suff.bytes().take_while(|&b| b == b' ').count(); + + edit.replace(TextRange::at(offset, ((spaces + 1) as u32).into()), " ".to_string()); + return; + } + + // The node is between two other nodes + let prev = token.prev_sibling_or_token().unwrap(); + let next = token.next_sibling_or_token().unwrap(); + if is_trailing_comma(prev.kind(), next.kind()) { + // Removes: trailing comma, newline (incl. surrounding whitespace) + edit.delete(TextRange::new(prev.text_range().start(), token.text_range().end())); + return; + } + if prev.kind() == T![,] && next.kind() == T!['}'] { + // Removes: comma, newline (incl. surrounding whitespace) + let space = if let Some(left) = prev.prev_sibling_or_token() { + compute_ws(left.kind(), next.kind()) + } else { + " " + }; + edit.replace( + TextRange::new(prev.text_range().start(), token.text_range().end()), + space.to_string(), + ); + return; + } + + if let (Some(_), Some(next)) = ( + prev.as_token().cloned().and_then(ast::Comment::cast), + next.as_token().cloned().and_then(ast::Comment::cast), + ) { + // Removes: newline (incl. surrounding whitespace), start of the next comment + edit.delete(TextRange::new( + token.text_range().start(), + next.syntax().text_range().start() + TextSize::of(next.prefix()), + )); + return; + } + + // Special case that turns something like: + // + // ``` + // my_function({<|> + // + // }) + // ``` + // + // into `my_function()` + if join_single_expr_block(edit, token).is_some() { + return; + } + // ditto for + // + // ``` + // use foo::{<|> + // bar + // }; + // ``` + if join_single_use_tree(edit, token).is_some() { + return; + } + + // Remove newline but add a computed amount of whitespace characters + edit.replace(token.text_range(), compute_ws(prev.kind(), next.kind()).to_string()); +} + +fn has_comma_after(node: &SyntaxNode) -> bool { + match non_trivia_sibling(node.clone().into(), Direction::Next) { + Some(n) => n.kind() == T![,], + _ => false, + } +} + +fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { + let block_expr = ast::BlockExpr::cast(token.parent())?; + if !block_expr.is_standalone() { + return None; + } + let expr = extract_trivial_expression(&block_expr)?; + + let block_range = block_expr.syntax().text_range(); + let mut buf = expr.syntax().text().to_string(); + + // Match block needs to have a comma after the block + if let Some(match_arm) = block_expr.syntax().parent().and_then(ast::MatchArm::cast) { + if !has_comma_after(match_arm.syntax()) { + buf.push(','); + } + } + + edit.replace(block_range, buf); + + Some(()) +} + +fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { + let use_tree_list = ast::UseTreeList::cast(token.parent())?; + let (tree,) = use_tree_list.use_trees().collect_tuple()?; + edit.replace(use_tree_list.syntax().text_range(), tree.syntax().text().to_string()); + Some(()) +} + +fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { + matches!((left, right), (T![,], T![')']) | (T![,], T![']'])) +} + +fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str { + match left { + T!['('] | T!['['] => return "", + T!['{'] => { + if let USE_TREE = right { + return ""; + } + } + _ => (), + } + match right { + T![')'] | T![']'] => return "", + T!['}'] => { + if let USE_TREE = left { + return ""; + } + } + T![.] => return "", + _ => (), + } + " " +} + +#[cfg(test)] +mod tests { + use syntax::SourceFile; + use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range}; + + use super::*; + + fn check_join_lines(before: &str, after: &str) { + let (before_cursor_pos, before) = extract_offset(before); + let file = SourceFile::parse(&before).ok().unwrap(); + + let range = TextRange::empty(before_cursor_pos); + let result = join_lines(&file, range); + + let actual = { + let mut actual = before.to_string(); + result.apply(&mut actual); + actual + }; + let actual_cursor_pos = result + .apply_to_offset(before_cursor_pos) + .expect("cursor position is affected by the edit"); + let actual = add_cursor(&actual, actual_cursor_pos); + assert_eq_text!(after, &actual); + } + + #[test] + fn test_join_lines_comma() { + check_join_lines( + r" +fn foo() { + <|>foo(1, + ) +} +", + r" +fn foo() { + <|>foo(1) +} +", + ); + } + + #[test] + fn test_join_lines_lambda_block() { + check_join_lines( + r" +pub fn reparse(&self, edit: &AtomTextEdit) -> File { + <|>self.incremental_reparse(edit).unwrap_or_else(|| { + self.full_reparse(edit) + }) +} +", + r" +pub fn reparse(&self, edit: &AtomTextEdit) -> File { + <|>self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) +} +", + ); + } + + #[test] + fn test_join_lines_block() { + check_join_lines( + r" +fn foo() { + foo(<|>{ + 92 + }) +}", + r" +fn foo() { + foo(<|>92) +}", + ); + } + + #[test] + fn test_join_lines_diverging_block() { + let before = r" + fn foo() { + loop { + match x { + 92 => <|>{ + continue; + } + } + } + } + "; + let after = r" + fn foo() { + loop { + match x { + 92 => <|>continue, + } + } + } + "; + check_join_lines(before, after); + } + + #[test] + fn join_lines_adds_comma_for_block_in_match_arm() { + check_join_lines( + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>{ + u.foo() + } + Err(v) => v, + } +}", + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>u.foo(), + Err(v) => v, + } +}", + ); + } + + #[test] + fn join_lines_multiline_in_block() { + check_join_lines( + r" +fn foo() { + match ty { + <|> Some(ty) => { + match ty { + _ => false, + } + } + _ => true, + } +} +", + r" +fn foo() { + match ty { + <|> Some(ty) => match ty { + _ => false, + }, + _ => true, + } +} +", + ); + } + + #[test] + fn join_lines_keeps_comma_for_block_in_match_arm() { + // We already have a comma + check_join_lines( + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>{ + u.foo() + }, + Err(v) => v, + } +}", + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>u.foo(), + Err(v) => v, + } +}", + ); + + // comma with whitespace between brace and , + check_join_lines( + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>{ + u.foo() + } , + Err(v) => v, + } +}", + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>u.foo() , + Err(v) => v, + } +}", + ); + + // comma with newline between brace and , + check_join_lines( + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>{ + u.foo() + } + , + Err(v) => v, + } +}", + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>u.foo() + , + Err(v) => v, + } +}", + ); + } + + #[test] + fn join_lines_keeps_comma_with_single_arg_tuple() { + // A single arg tuple + check_join_lines( + r" +fn foo() { + let x = (<|>{ + 4 + },); +}", + r" +fn foo() { + let x = (<|>4,); +}", + ); + + // single arg tuple with whitespace between brace and comma + check_join_lines( + r" +fn foo() { + let x = (<|>{ + 4 + } ,); +}", + r" +fn foo() { + let x = (<|>4 ,); +}", + ); + + // single arg tuple with newline between brace and comma + check_join_lines( + r" +fn foo() { + let x = (<|>{ + 4 + } + ,); +}", + r" +fn foo() { + let x = (<|>4 + ,); +}", + ); + } + + #[test] + fn test_join_lines_use_items_left() { + // No space after the '{' + check_join_lines( + r" +<|>use syntax::{ + TextSize, TextRange, +};", + r" +<|>use syntax::{TextSize, TextRange, +};", + ); + } + + #[test] + fn test_join_lines_use_items_right() { + // No space after the '}' + check_join_lines( + r" +use syntax::{ +<|> TextSize, TextRange +};", + r" +use syntax::{ +<|> TextSize, TextRange};", + ); + } + + #[test] + fn test_join_lines_use_items_right_comma() { + // No space after the '}' + check_join_lines( + r" +use syntax::{ +<|> TextSize, TextRange, +};", + r" +use syntax::{ +<|> TextSize, TextRange};", + ); + } + + #[test] + fn test_join_lines_use_tree() { + check_join_lines( + r" +use syntax::{ + algo::<|>{ + find_token_at_offset, + }, + ast, +};", + r" +use syntax::{ + algo::<|>find_token_at_offset, + ast, +};", + ); + } + + #[test] + fn test_join_lines_normal_comments() { + check_join_lines( + r" +fn foo() { + // Hello<|> + // world! +} +", + r" +fn foo() { + // Hello<|> world! +} +", + ); + } + + #[test] + fn test_join_lines_doc_comments() { + check_join_lines( + r" +fn foo() { + /// Hello<|> + /// world! +} +", + r" +fn foo() { + /// Hello<|> world! +} +", + ); + } + + #[test] + fn test_join_lines_mod_comments() { + check_join_lines( + r" +fn foo() { + //! Hello<|> + //! world! +} +", + r" +fn foo() { + //! Hello<|> world! +} +", + ); + } + + #[test] + fn test_join_lines_multiline_comments_1() { + check_join_lines( + r" +fn foo() { + // Hello<|> + /* world! */ +} +", + r" +fn foo() { + // Hello<|> world! */ +} +", + ); + } + + #[test] + fn test_join_lines_multiline_comments_2() { + check_join_lines( + r" +fn foo() { + // The<|> + /* quick + brown + fox! */ +} +", + r" +fn foo() { + // The<|> quick + brown + fox! */ +} +", + ); + } + + fn check_join_lines_sel(before: &str, after: &str) { + let (sel, before) = extract_range(before); + let parse = SourceFile::parse(&before); + let result = join_lines(&parse.tree(), sel); + let actual = { + let mut actual = before.to_string(); + result.apply(&mut actual); + actual + }; + assert_eq_text!(after, &actual); + } + + #[test] + fn test_join_lines_selection_fn_args() { + check_join_lines_sel( + r" +fn foo() { + <|>foo(1, + 2, + 3, + <|>) +} + ", + r" +fn foo() { + foo(1, 2, 3) +} + ", + ); + } + + #[test] + fn test_join_lines_selection_struct() { + check_join_lines_sel( + r" +struct Foo <|>{ + f: u32, +}<|> + ", + r" +struct Foo { f: u32 } + ", + ); + } + + #[test] + fn test_join_lines_selection_dot_chain() { + check_join_lines_sel( + r" +fn foo() { + join(<|>type_params.type_params() + .filter_map(|it| it.name()) + .map(|it| it.text())<|>) +}", + r" +fn foo() { + join(type_params.type_params().filter_map(|it| it.name()).map(|it| it.text())) +}", + ); + } + + #[test] + fn test_join_lines_selection_lambda_block_body() { + check_join_lines_sel( + r" +pub fn handle_find_matching_brace() { + params.offsets + .map(|offset| <|>{ + world.analysis().matching_brace(&file, offset).unwrap_or(offset) + }<|>) + .collect(); +}", + r" +pub fn handle_find_matching_brace() { + params.offsets + .map(|offset| world.analysis().matching_brace(&file, offset).unwrap_or(offset)) + .collect(); +}", + ); + } + + #[test] + fn test_join_lines_commented_block() { + check_join_lines( + r" +fn main() { + let _ = { + // <|>foo + // bar + 92 + }; +} + ", + r" +fn main() { + let _ = { + // <|>foo bar + 92 + }; +} + ", + ) + } + + #[test] + fn join_lines_mandatory_blocks_block() { + check_join_lines( + r" +<|>fn foo() { + 92 +} + ", + r" +<|>fn foo() { 92 +} + ", + ); + + check_join_lines( + r" +fn foo() { + <|>if true { + 92 + } +} + ", + r" +fn foo() { + <|>if true { 92 + } +} + ", + ); + + check_join_lines( + r" +fn foo() { + <|>loop { + 92 + } +} + ", + r" +fn foo() { + <|>loop { 92 + } +} + ", + ); + + check_join_lines( + r" +fn foo() { + <|>unsafe { + 92 + } +} + ", + r" +fn foo() { + <|>unsafe { 92 + } +} + ", + ); + } +} diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs new file mode 100644 index 0000000000..eb63895297 --- /dev/null +++ b/crates/ide/src/lib.rs @@ -0,0 +1,542 @@ +//! ide crate provides "ide-centric" APIs for the rust-analyzer. That is, +//! it generally operates with files and text ranges, and returns results as +//! Strings, suitable for displaying to the human. +//! +//! What powers this API are the `RootDatabase` struct, which defines a `salsa` +//! database, and the `hir` crate, where majority of the analysis happens. +//! However, IDE specific bits of the analysis (most notably completion) happen +//! in this crate. + +// For proving that RootDatabase is RefUnwindSafe. +#![recursion_limit = "128"] + +#[allow(unused)] +macro_rules! eprintln { + ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; +} + +pub mod mock_analysis; + +mod markup; +mod prime_caches; +mod display; + +mod call_hierarchy; +mod call_info; +mod completion; +mod diagnostics; +mod expand_macro; +mod extend_selection; +mod file_structure; +mod folding_ranges; +mod goto_definition; +mod goto_implementation; +mod goto_type_definition; +mod hover; +mod inlay_hints; +mod join_lines; +mod matching_brace; +mod parent_module; +mod references; +mod runnables; +mod status; +mod syntax_highlighting; +mod syntax_tree; +mod typing; + +use std::sync::Arc; + +use base_db::{ + salsa::{self, ParallelDatabase}, + CheckCanceled, Env, FileLoader, FileSet, SourceDatabase, VfsPath, +}; +use cfg::CfgOptions; +use ide_db::{ + symbol_index::{self, FileSymbol}, + LineIndexDatabase, +}; +use syntax::{SourceFile, TextRange, TextSize}; + +use crate::display::ToNav; + +pub use crate::{ + call_hierarchy::CallItem, + call_info::CallInfo, + completion::{ + CompletionConfig, CompletionItem, CompletionItemKind, CompletionScore, InsertTextFormat, + }, + diagnostics::Severity, + display::NavigationTarget, + expand_macro::ExpandedMacro, + file_structure::StructureNode, + folding_ranges::{Fold, FoldKind}, + hover::{HoverAction, HoverConfig, HoverGotoTypeData, HoverResult}, + inlay_hints::{InlayHint, InlayHintsConfig, InlayKind}, + markup::Markup, + references::{Declaration, Reference, ReferenceAccess, ReferenceKind, ReferenceSearchResult}, + runnables::{Runnable, RunnableKind, TestId}, + syntax_highlighting::{ + Highlight, HighlightModifier, HighlightModifiers, HighlightTag, HighlightedRange, + }, +}; + +pub use assists::{Assist, AssistConfig, AssistId, AssistKind, ResolvedAssist}; +pub use base_db::{ + Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRoot, + SourceRootId, +}; +pub use hir::{Documentation, Semantics}; +pub use ide_db::{ + change::AnalysisChange, + line_index::{LineCol, LineIndex}, + search::SearchScope, + source_change::{FileSystemEdit, SourceChange, SourceFileEdit}, + symbol_index::Query, + RootDatabase, +}; +pub use ssr::SsrError; +pub use text_edit::{Indel, TextEdit}; + +pub type Cancelable = Result; + +#[derive(Debug)] +pub struct Diagnostic { + pub message: String, + pub range: TextRange, + pub severity: Severity, + pub fix: Option, +} + +#[derive(Debug)] +pub struct Fix { + pub label: String, + pub source_change: SourceChange, + /// Allows to trigger the fix only when the caret is in the range given + pub fix_trigger_range: TextRange, +} + +impl Fix { + pub fn new( + label: impl Into, + source_change: SourceChange, + fix_trigger_range: TextRange, + ) -> Self { + let label = label.into(); + assert!(label.starts_with(char::is_uppercase) && !label.ends_with('.')); + Self { label, source_change, fix_trigger_range } + } +} + +/// Info associated with a text range. +#[derive(Debug)] +pub struct RangeInfo { + pub range: TextRange, + pub info: T, +} + +impl RangeInfo { + pub fn new(range: TextRange, info: T) -> RangeInfo { + RangeInfo { range, info } + } +} + +/// `AnalysisHost` stores the current state of the world. +#[derive(Debug)] +pub struct AnalysisHost { + db: RootDatabase, +} + +impl AnalysisHost { + pub fn new(lru_capacity: Option) -> AnalysisHost { + AnalysisHost { db: RootDatabase::new(lru_capacity) } + } + + pub fn update_lru_capacity(&mut self, lru_capacity: Option) { + self.db.update_lru_capacity(lru_capacity); + } + + /// Returns a snapshot of the current state, which you can query for + /// semantic information. + pub fn analysis(&self) -> Analysis { + Analysis { db: self.db.snapshot() } + } + + /// Applies changes to the current state of the world. If there are + /// outstanding snapshots, they will be canceled. + pub fn apply_change(&mut self, change: AnalysisChange) { + self.db.apply_change(change) + } + + pub fn maybe_collect_garbage(&mut self) { + self.db.maybe_collect_garbage(); + } + + pub fn collect_garbage(&mut self) { + self.db.collect_garbage(); + } + /// NB: this clears the database + pub fn per_query_memory_usage(&mut self) -> Vec<(String, profile::Bytes)> { + self.db.per_query_memory_usage() + } + pub fn request_cancellation(&mut self) { + self.db.request_cancellation(); + } + pub fn raw_database(&self) -> &RootDatabase { + &self.db + } + pub fn raw_database_mut(&mut self) -> &mut RootDatabase { + &mut self.db + } +} + +impl Default for AnalysisHost { + fn default() -> AnalysisHost { + AnalysisHost::new(None) + } +} + +/// Analysis is a snapshot of a world state at a moment in time. It is the main +/// entry point for asking semantic information about the world. When the world +/// state is advanced using `AnalysisHost::apply_change` method, all existing +/// `Analysis` are canceled (most method return `Err(Canceled)`). +#[derive(Debug)] +pub struct Analysis { + db: salsa::Snapshot, +} + +// As a general design guideline, `Analysis` API are intended to be independent +// from the language server protocol. That is, when exposing some functionality +// we should think in terms of "what API makes most sense" and not in terms of +// "what types LSP uses". Although currently LSP is the only consumer of the +// API, the API should in theory be usable as a library, or via a different +// protocol. +impl Analysis { + // Creates an analysis instance for a single file, without any extenal + // dependencies, stdlib support or ability to apply changes. See + // `AnalysisHost` for creating a fully-featured analysis. + pub fn from_single_file(text: String) -> (Analysis, FileId) { + let mut host = AnalysisHost::default(); + let file_id = FileId(0); + let mut file_set = FileSet::default(); + file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_string())); + let source_root = SourceRoot::new_local(file_set); + + let mut change = AnalysisChange::new(); + change.set_roots(vec![source_root]); + let mut crate_graph = CrateGraph::default(); + // FIXME: cfg options + // Default to enable test for single file. + let mut cfg_options = CfgOptions::default(); + cfg_options.insert_atom("test".into()); + crate_graph.add_crate_root( + file_id, + Edition::Edition2018, + None, + cfg_options, + Env::default(), + Default::default(), + ); + change.change_file(file_id, Some(Arc::new(text))); + change.set_crate_graph(crate_graph); + host.apply_change(change); + (host.analysis(), file_id) + } + + /// Debug info about the current state of the analysis. + pub fn status(&self) -> Cancelable { + self.with_db(|db| status::status(&*db)) + } + + pub fn prime_caches(&self, files: Vec) -> Cancelable<()> { + self.with_db(|db| prime_caches::prime_caches(db, files)) + } + + /// Gets the text of the source file. + pub fn file_text(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| db.file_text(file_id)) + } + + /// Gets the syntax tree of the file. + pub fn parse(&self, file_id: FileId) -> Cancelable { + self.with_db(|db| db.parse(file_id).tree()) + } + + /// Gets the file's `LineIndex`: data structure to convert between absolute + /// offsets and line/column representation. + pub fn file_line_index(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| db.line_index(file_id)) + } + + /// Selects the next syntactic nodes encompassing the range. + pub fn extend_selection(&self, frange: FileRange) -> Cancelable { + self.with_db(|db| extend_selection::extend_selection(db, frange)) + } + + /// Returns position of the matching brace (all types of braces are + /// supported). + pub fn matching_brace(&self, position: FilePosition) -> Cancelable> { + self.with_db(|db| { + let parse = db.parse(position.file_id); + let file = parse.tree(); + matching_brace::matching_brace(&file, position.offset) + }) + } + + /// Returns a syntax tree represented as `String`, for debug purposes. + // FIXME: use a better name here. + pub fn syntax_tree( + &self, + file_id: FileId, + text_range: Option, + ) -> Cancelable { + self.with_db(|db| syntax_tree::syntax_tree(&db, file_id, text_range)) + } + + pub fn expand_macro(&self, position: FilePosition) -> Cancelable> { + self.with_db(|db| expand_macro::expand_macro(db, position)) + } + + /// Returns an edit to remove all newlines in the range, cleaning up minor + /// stuff like trailing commas. + pub fn join_lines(&self, frange: FileRange) -> Cancelable { + self.with_db(|db| { + let parse = db.parse(frange.file_id); + join_lines::join_lines(&parse.tree(), frange.range) + }) + } + + /// Returns an edit which should be applied when opening a new line, fixing + /// up minor stuff like continuing the comment. + /// The edit will be a snippet (with `$0`). + pub fn on_enter(&self, position: FilePosition) -> Cancelable> { + self.with_db(|db| typing::on_enter(&db, position)) + } + + /// Returns an edit which should be applied after a character was typed. + /// + /// This is useful for some on-the-fly fixups, like adding `;` to `let =` + /// automatically. + pub fn on_char_typed( + &self, + position: FilePosition, + char_typed: char, + ) -> Cancelable> { + // Fast path to not even parse the file. + if !typing::TRIGGER_CHARS.contains(char_typed) { + return Ok(None); + } + self.with_db(|db| typing::on_char_typed(&db, position, char_typed)) + } + + /// Returns a tree representation of symbols in the file. Useful to draw a + /// file outline. + pub fn file_structure(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| file_structure::file_structure(&db.parse(file_id).tree())) + } + + /// Returns a list of the places in the file where type hints can be displayed. + pub fn inlay_hints( + &self, + file_id: FileId, + config: &InlayHintsConfig, + ) -> Cancelable> { + self.with_db(|db| inlay_hints::inlay_hints(db, file_id, config)) + } + + /// Returns the set of folding ranges. + pub fn folding_ranges(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| folding_ranges::folding_ranges(&db.parse(file_id).tree())) + } + + /// Fuzzy searches for a symbol. + pub fn symbol_search(&self, query: Query) -> Cancelable> { + self.with_db(|db| { + symbol_index::world_symbols(db, query) + .into_iter() + .map(|s| s.to_nav(db)) + .collect::>() + }) + } + + /// Returns the definitions from the symbol at `position`. + pub fn goto_definition( + &self, + position: FilePosition, + ) -> Cancelable>>> { + self.with_db(|db| goto_definition::goto_definition(db, position)) + } + + /// Returns the impls from the symbol at `position`. + pub fn goto_implementation( + &self, + position: FilePosition, + ) -> Cancelable>>> { + self.with_db(|db| goto_implementation::goto_implementation(db, position)) + } + + /// Returns the type definitions for the symbol at `position`. + pub fn goto_type_definition( + &self, + position: FilePosition, + ) -> Cancelable>>> { + self.with_db(|db| goto_type_definition::goto_type_definition(db, position)) + } + + /// Finds all usages of the reference at point. + pub fn find_all_refs( + &self, + position: FilePosition, + search_scope: Option, + ) -> Cancelable> { + self.with_db(|db| { + references::find_all_refs(&Semantics::new(db), position, search_scope).map(|it| it.info) + }) + } + + /// Returns a short text describing element at position. + pub fn hover(&self, position: FilePosition) -> Cancelable>> { + self.with_db(|db| hover::hover(db, position)) + } + + /// Computes parameter information for the given call expression. + pub fn call_info(&self, position: FilePosition) -> Cancelable> { + self.with_db(|db| call_info::call_info(db, position)) + } + + /// Computes call hierarchy candidates for the given file position. + pub fn call_hierarchy( + &self, + position: FilePosition, + ) -> Cancelable>>> { + self.with_db(|db| call_hierarchy::call_hierarchy(db, position)) + } + + /// Computes incoming calls for the given file position. + pub fn incoming_calls(&self, position: FilePosition) -> Cancelable>> { + self.with_db(|db| call_hierarchy::incoming_calls(db, position)) + } + + /// Computes incoming calls for the given file position. + pub fn outgoing_calls(&self, position: FilePosition) -> Cancelable>> { + self.with_db(|db| call_hierarchy::outgoing_calls(db, position)) + } + + /// Returns a `mod name;` declaration which created the current module. + pub fn parent_module(&self, position: FilePosition) -> Cancelable> { + self.with_db(|db| parent_module::parent_module(db, position)) + } + + /// Returns crates this file belongs too. + pub fn crate_for(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| parent_module::crate_for(db, file_id)) + } + + /// Returns the edition of the given crate. + pub fn crate_edition(&self, crate_id: CrateId) -> Cancelable { + self.with_db(|db| db.crate_graph()[crate_id].edition) + } + + /// Returns the root file of the given crate. + pub fn crate_root(&self, crate_id: CrateId) -> Cancelable { + self.with_db(|db| db.crate_graph()[crate_id].root_file_id) + } + + /// Returns the set of possible targets to run for the current file. + pub fn runnables(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| runnables::runnables(db, file_id)) + } + + /// Computes syntax highlighting for the given file + pub fn highlight(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| syntax_highlighting::highlight(db, file_id, None, false)) + } + + /// Computes syntax highlighting for the given file range. + pub fn highlight_range(&self, frange: FileRange) -> Cancelable> { + self.with_db(|db| { + syntax_highlighting::highlight(db, frange.file_id, Some(frange.range), false) + }) + } + + /// Computes syntax highlighting for the given file. + pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancelable { + self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow)) + } + + /// Computes completions at the given position. + pub fn completions( + &self, + config: &CompletionConfig, + position: FilePosition, + ) -> Cancelable>> { + self.with_db(|db| completion::completions(db, config, position).map(Into::into)) + } + + /// Computes resolved assists with source changes for the given position. + pub fn resolved_assists( + &self, + config: &AssistConfig, + frange: FileRange, + ) -> Cancelable> { + self.with_db(|db| assists::Assist::resolved(db, config, frange)) + } + + /// Computes unresolved assists (aka code actions aka intentions) for the given + /// position. + pub fn unresolved_assists( + &self, + config: &AssistConfig, + frange: FileRange, + ) -> Cancelable> { + self.with_db(|db| Assist::unresolved(db, config, frange)) + } + + /// Computes the set of diagnostics for the given file. + pub fn diagnostics( + &self, + file_id: FileId, + enable_experimental: bool, + ) -> Cancelable> { + self.with_db(|db| diagnostics::diagnostics(db, file_id, enable_experimental)) + } + + /// Returns the edit required to rename reference at the position to the new + /// name. + pub fn rename( + &self, + position: FilePosition, + new_name: &str, + ) -> Cancelable>> { + self.with_db(|db| references::rename(db, position, new_name)) + } + + pub fn structural_search_replace( + &self, + query: &str, + parse_only: bool, + resolve_context: FilePosition, + selections: Vec, + ) -> Cancelable> { + self.with_db(|db| { + let rule: ssr::SsrRule = query.parse()?; + let mut match_finder = ssr::MatchFinder::in_context(db, resolve_context, selections); + match_finder.add_rule(rule)?; + let edits = if parse_only { Vec::new() } else { match_finder.edits() }; + Ok(SourceChange::from(edits)) + }) + } + + /// Performs an operation on that may be Canceled. + fn with_db(&self, f: F) -> Cancelable + where + F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe, + { + self.db.catch_canceled(f) + } +} + +#[test] +fn analysis_is_send() { + fn is_send() {} + is_send::(); +} diff --git a/crates/ra_ide/src/markup.rs b/crates/ide/src/markup.rs similarity index 100% rename from crates/ra_ide/src/markup.rs rename to crates/ide/src/markup.rs diff --git a/crates/ide/src/matching_brace.rs b/crates/ide/src/matching_brace.rs new file mode 100644 index 0000000000..cb6abb0db9 --- /dev/null +++ b/crates/ide/src/matching_brace.rs @@ -0,0 +1,73 @@ +use syntax::{ + ast::{self, AstNode}, + SourceFile, SyntaxKind, TextSize, T, +}; +use test_utils::mark; + +// Feature: Matching Brace +// +// If the cursor is on any brace (`<>(){}[]||`) which is a part of a brace-pair, +// moves cursor to the matching brace. It uses the actual parser to determine +// braces, so it won't confuse generics with comparisons. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Find matching brace** +// |=== +pub fn matching_brace(file: &SourceFile, offset: TextSize) -> Option { + const BRACES: &[SyntaxKind] = + &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>], T![|], T![|]]; + let (brace_token, brace_idx) = file + .syntax() + .token_at_offset(offset) + .filter_map(|node| { + let idx = BRACES.iter().position(|&brace| brace == node.kind())?; + Some((node, idx)) + }) + .next()?; + let parent = brace_token.parent(); + if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) { + mark::hit!(pipes_not_braces); + return None; + } + let matching_kind = BRACES[brace_idx ^ 1]; + let matching_node = parent + .children_with_tokens() + .filter_map(|it| it.into_token()) + .find(|node| node.kind() == matching_kind && node != &brace_token)?; + Some(matching_node.text_range().start()) +} + +#[cfg(test)] +mod tests { + use test_utils::{add_cursor, assert_eq_text, extract_offset}; + + use super::*; + + #[test] + fn test_matching_brace() { + fn do_check(before: &str, after: &str) { + let (pos, before) = extract_offset(before); + let parse = SourceFile::parse(&before); + let new_pos = match matching_brace(&parse.tree(), pos) { + None => pos, + Some(pos) => pos, + }; + let actual = add_cursor(&before, new_pos); + assert_eq_text!(after, &actual); + } + + do_check("struct Foo { a: i32, }<|>", "struct Foo <|>{ a: i32, }"); + do_check("fn main() { |x: i32|<|> x * 2;}", "fn main() { <|>|x: i32| x * 2;}"); + do_check("fn main() { <|>|x: i32| x * 2;}", "fn main() { |x: i32<|>| x * 2;}"); + + { + mark::check!(pipes_not_braces); + do_check( + "fn main() { match 92 { 1 | 2 |<|> 3 => 92 } }", + "fn main() { match 92 { 1 | 2 |<|> 3 => 92 } }", + ); + } + } +} diff --git a/crates/ide/src/mock_analysis.rs b/crates/ide/src/mock_analysis.rs new file mode 100644 index 0000000000..363e6d27e5 --- /dev/null +++ b/crates/ide/src/mock_analysis.rs @@ -0,0 +1,176 @@ +//! FIXME: write short doc here +use std::sync::Arc; + +use base_db::{CrateName, FileSet, SourceRoot, VfsPath}; +use cfg::CfgOptions; +use test_utils::{ + extract_annotations, extract_range_or_offset, Fixture, RangeOrOffset, CURSOR_MARKER, +}; + +use crate::{ + Analysis, AnalysisChange, AnalysisHost, CrateGraph, Edition, FileId, FilePosition, FileRange, +}; + +/// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis +/// from a set of in-memory files. +#[derive(Debug, Default)] +pub struct MockAnalysis { + files: Vec, +} + +impl MockAnalysis { + /// Creates `MockAnalysis` using a fixture data in the following format: + /// + /// ```not_rust + /// //- /main.rs + /// mod foo; + /// fn main() {} + /// + /// //- /foo.rs + /// struct Baz; + /// ``` + pub fn with_files(ra_fixture: &str) -> MockAnalysis { + let (res, pos) = MockAnalysis::with_fixture(ra_fixture); + assert!(pos.is_none()); + res + } + + /// Same as `with_files`, but requires that a single file contains a `<|>` marker, + /// whose position is also returned. + pub fn with_files_and_position(fixture: &str) -> (MockAnalysis, FilePosition) { + let (res, position) = MockAnalysis::with_fixture(fixture); + let (file_id, range_or_offset) = position.expect("expected a marker (<|>)"); + let offset = match range_or_offset { + RangeOrOffset::Range(_) => panic!(), + RangeOrOffset::Offset(it) => it, + }; + (res, FilePosition { file_id, offset }) + } + + fn with_fixture(fixture: &str) -> (MockAnalysis, Option<(FileId, RangeOrOffset)>) { + let mut position = None; + let mut res = MockAnalysis::default(); + for mut entry in Fixture::parse(fixture) { + if entry.text.contains(CURSOR_MARKER) { + assert!(position.is_none(), "only one marker (<|>) per fixture is allowed"); + let (range_or_offset, text) = extract_range_or_offset(&entry.text); + entry.text = text; + let file_id = res.add_file_fixture(entry); + position = Some((file_id, range_or_offset)); + } else { + res.add_file_fixture(entry); + } + } + (res, position) + } + + fn add_file_fixture(&mut self, fixture: Fixture) -> FileId { + let file_id = FileId((self.files.len() + 1) as u32); + self.files.push(fixture); + file_id + } + + pub fn id_of(&self, path: &str) -> FileId { + let (file_id, _) = + self.files().find(|(_, data)| path == data.path).expect("no file in this mock"); + file_id + } + pub fn annotations(&self) -> Vec<(FileRange, String)> { + self.files() + .flat_map(|(file_id, fixture)| { + let annotations = extract_annotations(&fixture.text); + annotations + .into_iter() + .map(move |(range, data)| (FileRange { file_id, range }, data)) + }) + .collect() + } + pub fn files(&self) -> impl Iterator + '_ { + self.files.iter().enumerate().map(|(idx, fixture)| (FileId(idx as u32 + 1), fixture)) + } + pub fn annotation(&self) -> (FileRange, String) { + let mut all = self.annotations(); + assert_eq!(all.len(), 1); + all.pop().unwrap() + } + pub fn analysis_host(self) -> AnalysisHost { + let mut host = AnalysisHost::default(); + let mut change = AnalysisChange::new(); + let mut file_set = FileSet::default(); + let mut crate_graph = CrateGraph::default(); + let mut root_crate = None; + for (i, data) in self.files.into_iter().enumerate() { + let path = data.path; + assert!(path.starts_with('/')); + + let mut cfg = CfgOptions::default(); + data.cfg_atoms.iter().for_each(|it| cfg.insert_atom(it.into())); + data.cfg_key_values.iter().for_each(|(k, v)| cfg.insert_key_value(k.into(), v.into())); + let edition: Edition = + data.edition.and_then(|it| it.parse().ok()).unwrap_or(Edition::Edition2018); + + let file_id = FileId(i as u32 + 1); + let env = data.env.into_iter().collect(); + if path == "/lib.rs" || path == "/main.rs" { + root_crate = Some(crate_graph.add_crate_root( + file_id, + edition, + None, + cfg, + env, + Default::default(), + )); + } else if path.ends_with("/lib.rs") { + let base = &path[..path.len() - "/lib.rs".len()]; + let crate_name = &base[base.rfind('/').unwrap() + '/'.len_utf8()..]; + let other_crate = crate_graph.add_crate_root( + file_id, + edition, + Some(crate_name.to_string()), + cfg, + env, + Default::default(), + ); + if let Some(root_crate) = root_crate { + crate_graph + .add_dep(root_crate, CrateName::new(crate_name).unwrap(), other_crate) + .unwrap(); + } + } + let path = VfsPath::new_virtual_path(path.to_string()); + file_set.insert(file_id, path); + change.change_file(file_id, Some(Arc::new(data.text).to_owned())); + } + change.set_crate_graph(crate_graph); + change.set_roots(vec![SourceRoot::new_local(file_set)]); + host.apply_change(change); + host + } + pub fn analysis(self) -> Analysis { + self.analysis_host().analysis() + } +} + +/// Creates analysis from a multi-file fixture, returns positions marked with <|>. +pub fn analysis_and_position(ra_fixture: &str) -> (Analysis, FilePosition) { + let (mock, position) = MockAnalysis::with_files_and_position(ra_fixture); + (mock.analysis(), position) +} + +/// Creates analysis for a single file. +pub fn single_file(ra_fixture: &str) -> (Analysis, FileId) { + let mock = MockAnalysis::with_files(ra_fixture); + let file_id = mock.id_of("/main.rs"); + (mock.analysis(), file_id) +} + +/// Creates analysis for a single file, returns range marked with a pair of <|>. +pub fn analysis_and_range(ra_fixture: &str) -> (Analysis, FileRange) { + let (res, position) = MockAnalysis::with_fixture(ra_fixture); + let (file_id, range_or_offset) = position.expect("expected a marker (<|>)"); + let range = match range_or_offset { + RangeOrOffset::Range(it) => it, + RangeOrOffset::Offset(_) => panic!(), + }; + (res.analysis(), FileRange { file_id, range }) +} diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs new file mode 100644 index 0000000000..59ed2967cf --- /dev/null +++ b/crates/ide/src/parent_module.rs @@ -0,0 +1,155 @@ +use base_db::{CrateId, FileId, FilePosition}; +use hir::Semantics; +use ide_db::RootDatabase; +use syntax::{ + algo::find_node_at_offset, + ast::{self, AstNode}, +}; +use test_utils::mark; + +use crate::NavigationTarget; + +// Feature: Parent Module +// +// Navigates to the parent module of the current module. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Locate parent module** +// |=== + +/// This returns `Vec` because a module may be included from several places. We +/// don't handle this case yet though, so the Vec has length at most one. +pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec { + let sema = Semantics::new(db); + let source_file = sema.parse(position.file_id); + + let mut module = find_node_at_offset::(source_file.syntax(), position.offset); + + // If cursor is literally on `mod foo`, go to the grandpa. + if let Some(m) = &module { + if !m + .item_list() + .map_or(false, |it| it.syntax().text_range().contains_inclusive(position.offset)) + { + mark::hit!(test_resolve_parent_module_on_module_decl); + module = m.syntax().ancestors().skip(1).find_map(ast::Module::cast); + } + } + + let module = match module { + Some(module) => sema.to_def(&module), + None => sema.to_module_def(position.file_id), + }; + let module = match module { + None => return Vec::new(), + Some(it) => it, + }; + let nav = NavigationTarget::from_module_to_decl(db, module); + vec![nav] +} + +/// Returns `Vec` for the same reason as `parent_module` +pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec { + let sema = Semantics::new(db); + let module = match sema.to_module_def(file_id) { + Some(it) => it, + None => return Vec::new(), + }; + let krate = module.krate(); + vec![krate.into()] +} + +#[cfg(test)] +mod tests { + use base_db::Env; + use cfg::CfgOptions; + use test_utils::mark; + + use crate::{ + mock_analysis::{analysis_and_position, MockAnalysis}, + AnalysisChange, CrateGraph, + Edition::Edition2018, + }; + + #[test] + fn test_resolve_parent_module() { + let (analysis, pos) = analysis_and_position( + " + //- /lib.rs + mod foo; + //- /foo.rs + <|>// empty + ", + ); + let nav = analysis.parent_module(pos).unwrap().pop().unwrap(); + nav.assert_match("foo MODULE FileId(1) 0..8"); + } + + #[test] + fn test_resolve_parent_module_on_module_decl() { + mark::check!(test_resolve_parent_module_on_module_decl); + let (analysis, pos) = analysis_and_position( + " + //- /lib.rs + mod foo; + + //- /foo.rs + mod <|>bar; + + //- /foo/bar.rs + // empty + ", + ); + let nav = analysis.parent_module(pos).unwrap().pop().unwrap(); + nav.assert_match("foo MODULE FileId(1) 0..8"); + } + + #[test] + fn test_resolve_parent_module_for_inline() { + let (analysis, pos) = analysis_and_position( + " + //- /lib.rs + mod foo { + mod bar { + mod baz { <|> } + } + } + ", + ); + let nav = analysis.parent_module(pos).unwrap().pop().unwrap(); + nav.assert_match("baz MODULE FileId(1) 32..44"); + } + + #[test] + fn test_resolve_crate_root() { + let mock = MockAnalysis::with_files( + r#" +//- /bar.rs +mod foo; +//- /foo.rs +// empty +"#, + ); + let root_file = mock.id_of("/bar.rs"); + let mod_file = mock.id_of("/foo.rs"); + let mut host = mock.analysis_host(); + assert!(host.analysis().crate_for(mod_file).unwrap().is_empty()); + + let mut crate_graph = CrateGraph::default(); + let crate_id = crate_graph.add_crate_root( + root_file, + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + let mut change = AnalysisChange::new(); + change.set_crate_graph(crate_graph); + host.apply_change(change); + + assert_eq!(host.analysis().crate_for(mod_file).unwrap(), vec![crate_id]); + } +} diff --git a/crates/ra_ide/src/prime_caches.rs b/crates/ide/src/prime_caches.rs similarity index 100% rename from crates/ra_ide/src/prime_caches.rs rename to crates/ide/src/prime_caches.rs diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs new file mode 100644 index 0000000000..0a76ec6b43 --- /dev/null +++ b/crates/ide/src/references.rs @@ -0,0 +1,694 @@ +//! This module implements a reference search. +//! First, the element at the cursor position must be either an `ast::Name` +//! or `ast::NameRef`. If it's a `ast::NameRef`, at the classification step we +//! try to resolve the direct tree parent of this element, otherwise we +//! already have a definition and just need to get its HIR together with +//! some information that is needed for futher steps of searching. +//! After that, we collect files that might contain references and look +//! for text occurrences of the identifier. If there's an `ast::NameRef` +//! at the index that the match starts at and its tree parent is +//! resolved to the search element definition, we get a reference. + +mod rename; + +use hir::Semantics; +use ide_db::{ + defs::{classify_name, classify_name_ref, Definition}, + search::SearchScope, + RootDatabase, +}; +use syntax::{ + algo::find_node_at_offset, + ast::{self, NameOwner}, + AstNode, SyntaxKind, SyntaxNode, TextRange, TokenAtOffset, +}; + +use crate::{display::TryToNav, FilePosition, FileRange, NavigationTarget, RangeInfo}; + +pub(crate) use self::rename::rename; + +pub use ide_db::search::{Reference, ReferenceAccess, ReferenceKind}; + +#[derive(Debug, Clone)] +pub struct ReferenceSearchResult { + declaration: Declaration, + references: Vec, +} + +#[derive(Debug, Clone)] +pub struct Declaration { + pub nav: NavigationTarget, + pub kind: ReferenceKind, + pub access: Option, +} + +impl ReferenceSearchResult { + pub fn declaration(&self) -> &Declaration { + &self.declaration + } + + pub fn decl_target(&self) -> &NavigationTarget { + &self.declaration.nav + } + + pub fn references(&self) -> &[Reference] { + &self.references + } + + /// Total number of references + /// At least 1 since all valid references should + /// Have a declaration + pub fn len(&self) -> usize { + self.references.len() + 1 + } +} + +// allow turning ReferenceSearchResult into an iterator +// over References +impl IntoIterator for ReferenceSearchResult { + type Item = Reference; + type IntoIter = std::vec::IntoIter; + + fn into_iter(mut self) -> Self::IntoIter { + let mut v = Vec::with_capacity(self.len()); + v.push(Reference { + file_range: FileRange { + file_id: self.declaration.nav.file_id, + range: self.declaration.nav.focus_or_full_range(), + }, + kind: self.declaration.kind, + access: self.declaration.access, + }); + v.append(&mut self.references); + v.into_iter() + } +} + +pub(crate) fn find_all_refs( + sema: &Semantics, + position: FilePosition, + search_scope: Option, +) -> Option> { + let _p = profile::span("find_all_refs"); + let syntax = sema.parse(position.file_id).syntax().clone(); + + let (opt_name, search_kind) = if let Some(name) = + get_struct_def_name_for_struct_literal_search(&sema, &syntax, position) + { + (Some(name), ReferenceKind::StructLiteral) + } else { + ( + sema.find_node_at_offset_with_descend::(&syntax, position.offset), + ReferenceKind::Other, + ) + }; + + let RangeInfo { range, info: def } = find_name(&sema, &syntax, position, opt_name)?; + + let references = def + .find_usages(sema, search_scope) + .into_iter() + .filter(|r| search_kind == ReferenceKind::Other || search_kind == r.kind) + .collect(); + + let decl_range = def.try_to_nav(sema.db)?.focus_or_full_range(); + + let declaration = Declaration { + nav: def.try_to_nav(sema.db)?, + kind: ReferenceKind::Other, + access: decl_access(&def, &syntax, decl_range), + }; + + Some(RangeInfo::new(range, ReferenceSearchResult { declaration, references })) +} + +fn find_name( + sema: &Semantics, + syntax: &SyntaxNode, + position: FilePosition, + opt_name: Option, +) -> Option> { + if let Some(name) = opt_name { + let def = classify_name(sema, &name)?.definition(sema.db); + let range = name.syntax().text_range(); + return Some(RangeInfo::new(range, def)); + } + let name_ref = + sema.find_node_at_offset_with_descend::(&syntax, position.offset)?; + let def = classify_name_ref(sema, &name_ref)?.definition(sema.db); + let range = name_ref.syntax().text_range(); + Some(RangeInfo::new(range, def)) +} + +fn decl_access(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> Option { + match def { + Definition::Local(_) | Definition::Field(_) => {} + _ => return None, + }; + + let stmt = find_node_at_offset::(syntax, range.start())?; + if stmt.initializer().is_some() { + let pat = stmt.pat()?; + if let ast::Pat::IdentPat(it) = pat { + if it.mut_token().is_some() { + return Some(ReferenceAccess::Write); + } + } + } + + None +} + +fn get_struct_def_name_for_struct_literal_search( + sema: &Semantics, + syntax: &SyntaxNode, + position: FilePosition, +) -> Option { + if let TokenAtOffset::Between(ref left, ref right) = syntax.token_at_offset(position.offset) { + if right.kind() != SyntaxKind::L_CURLY && right.kind() != SyntaxKind::L_PAREN { + return None; + } + if let Some(name) = + sema.find_node_at_offset_with_descend::(&syntax, left.text_range().start()) + { + return name.syntax().ancestors().find_map(ast::Struct::cast).and_then(|l| l.name()); + } + if sema + .find_node_at_offset_with_descend::( + &syntax, + left.text_range().start(), + ) + .is_some() + { + return left.ancestors().find_map(ast::Struct::cast).and_then(|l| l.name()); + } + } + None +} + +#[cfg(test)] +mod tests { + use crate::{ + mock_analysis::{analysis_and_position, MockAnalysis}, + Declaration, Reference, ReferenceSearchResult, SearchScope, + }; + + #[test] + fn test_struct_literal_after_space() { + let refs = get_all_refs( + r#" +struct Foo <|>{ + a: i32, +} +impl Foo { + fn f() -> i32 { 42 } +} +fn main() { + let f: Foo; + f = Foo {a: Foo::f()}; +} +"#, + ); + check_result( + refs, + "Foo STRUCT FileId(1) 0..26 7..10 Other", + &["FileId(1) 101..104 StructLiteral"], + ); + } + + #[test] + fn test_struct_literal_before_space() { + let refs = get_all_refs( + r#" +struct Foo<|> {} + fn main() { + let f: Foo; + f = Foo {}; +} +"#, + ); + check_result( + refs, + "Foo STRUCT FileId(1) 0..13 7..10 Other", + &["FileId(1) 41..44 Other", "FileId(1) 54..57 StructLiteral"], + ); + } + + #[test] + fn test_struct_literal_with_generic_type() { + let refs = get_all_refs( + r#" +struct Foo <|>{} + fn main() { + let f: Foo::; + f = Foo {}; +} +"#, + ); + check_result( + refs, + "Foo STRUCT FileId(1) 0..16 7..10 Other", + &["FileId(1) 64..67 StructLiteral"], + ); + } + + #[test] + fn test_struct_literal_for_tuple() { + let refs = get_all_refs( + r#" +struct Foo<|>(i32); + +fn main() { + let f: Foo; + f = Foo(1); +} +"#, + ); + check_result( + refs, + "Foo STRUCT FileId(1) 0..16 7..10 Other", + &["FileId(1) 54..57 StructLiteral"], + ); + } + + #[test] + fn test_find_all_refs_for_local() { + let refs = get_all_refs( + r#" +fn main() { + let mut i = 1; + let j = 1; + i = i<|> + j; + + { + i = 0; + } + + i = 5; +}"#, + ); + check_result( + refs, + "i IDENT_PAT FileId(1) 24..25 Other Write", + &[ + "FileId(1) 50..51 Other Write", + "FileId(1) 54..55 Other Read", + "FileId(1) 76..77 Other Write", + "FileId(1) 94..95 Other Write", + ], + ); + } + + #[test] + fn search_filters_by_range() { + let refs = get_all_refs( + r#" +fn foo() { + let spam<|> = 92; + spam + spam +} +fn bar() { + let spam = 92; + spam + spam +} +"#, + ); + check_result( + refs, + "spam IDENT_PAT FileId(1) 19..23 Other", + &["FileId(1) 34..38 Other Read", "FileId(1) 41..45 Other Read"], + ); + } + + #[test] + fn test_find_all_refs_for_param_inside() { + let refs = get_all_refs( + r#" +fn foo(i : u32) -> u32 { + i<|> +} +"#, + ); + check_result(refs, "i IDENT_PAT FileId(1) 7..8 Other", &["FileId(1) 29..30 Other Read"]); + } + + #[test] + fn test_find_all_refs_for_fn_param() { + let refs = get_all_refs( + r#" +fn foo(i<|> : u32) -> u32 { + i +} +"#, + ); + check_result(refs, "i IDENT_PAT FileId(1) 7..8 Other", &["FileId(1) 29..30 Other Read"]); + } + + #[test] + fn test_find_all_refs_field_name() { + let refs = get_all_refs( + r#" +//- /lib.rs +struct Foo { + pub spam<|>: u32, +} + +fn main(s: Foo) { + let f = s.spam; +} +"#, + ); + check_result( + refs, + "spam RECORD_FIELD FileId(1) 17..30 21..25 Other", + &["FileId(1) 67..71 Other Read"], + ); + } + + #[test] + fn test_find_all_refs_impl_item_name() { + let refs = get_all_refs( + r#" +struct Foo; +impl Foo { + fn f<|>(&self) { } +} +"#, + ); + check_result(refs, "f FN FileId(1) 27..43 30..31 Other", &[]); + } + + #[test] + fn test_find_all_refs_enum_var_name() { + let refs = get_all_refs( + r#" +enum Foo { + A, + B<|>, + C, +} +"#, + ); + check_result(refs, "B VARIANT FileId(1) 22..23 22..23 Other", &[]); + } + + #[test] + fn test_find_all_refs_two_modules() { + let (analysis, pos) = analysis_and_position( + r#" +//- /lib.rs +pub mod foo; +pub mod bar; + +fn f() { + let i = foo::Foo { n: 5 }; +} + +//- /foo.rs +use crate::bar; + +pub struct Foo { + pub n: u32, +} + +fn f() { + let i = bar::Bar { n: 5 }; +} + +//- /bar.rs +use crate::foo; + +pub struct Bar { + pub n: u32, +} + +fn f() { + let i = foo::Foo<|> { n: 5 }; +} +"#, + ); + let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); + check_result( + refs, + "Foo STRUCT FileId(2) 17..51 28..31 Other", + &["FileId(1) 53..56 StructLiteral", "FileId(3) 79..82 StructLiteral"], + ); + } + + // `mod foo;` is not in the results because `foo` is an `ast::Name`. + // So, there are two references: the first one is a definition of the `foo` module, + // which is the whole `foo.rs`, and the second one is in `use foo::Foo`. + #[test] + fn test_find_all_refs_decl_module() { + let (analysis, pos) = analysis_and_position( + r#" +//- /lib.rs +mod foo<|>; + +use foo::Foo; + +fn f() { + let i = Foo { n: 5 }; +} + +//- /foo.rs +pub struct Foo { + pub n: u32, +} +"#, + ); + let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); + check_result(refs, "foo SOURCE_FILE FileId(2) 0..35 Other", &["FileId(1) 14..17 Other"]); + } + + #[test] + fn test_find_all_refs_super_mod_vis() { + let (analysis, pos) = analysis_and_position( + r#" +//- /lib.rs +mod foo; + +//- /foo.rs +mod some; +use some::Foo; + +fn f() { + let i = Foo { n: 5 }; +} + +//- /foo/some.rs +pub(super) struct Foo<|> { + pub n: u32, +} +"#, + ); + let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); + check_result( + refs, + "Foo STRUCT FileId(3) 0..41 18..21 Other", + &["FileId(2) 20..23 Other", "FileId(2) 47..50 StructLiteral"], + ); + } + + #[test] + fn test_find_all_refs_with_scope() { + let code = r#" + //- /lib.rs + mod foo; + mod bar; + + pub fn quux<|>() {} + + //- /foo.rs + fn f() { super::quux(); } + + //- /bar.rs + fn f() { super::quux(); } + "#; + + let (mock, pos) = MockAnalysis::with_files_and_position(code); + let bar = mock.id_of("/bar.rs"); + let analysis = mock.analysis(); + + let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); + check_result( + refs, + "quux FN FileId(1) 19..35 26..30 Other", + &["FileId(2) 16..20 StructLiteral", "FileId(3) 16..20 StructLiteral"], + ); + + let refs = + analysis.find_all_refs(pos, Some(SearchScope::single_file(bar))).unwrap().unwrap(); + check_result( + refs, + "quux FN FileId(1) 19..35 26..30 Other", + &["FileId(3) 16..20 StructLiteral"], + ); + } + + #[test] + fn test_find_all_refs_macro_def() { + let refs = get_all_refs( + r#" +#[macro_export] +macro_rules! m1<|> { () => (()) } + +fn foo() { + m1(); + m1(); +} +"#, + ); + check_result( + refs, + "m1 MACRO_CALL FileId(1) 0..46 29..31 Other", + &["FileId(1) 63..65 StructLiteral", "FileId(1) 73..75 StructLiteral"], + ); + } + + #[test] + fn test_basic_highlight_read_write() { + let refs = get_all_refs( + r#" +fn foo() { + let mut i<|> = 0; + i = i + 1; +} +"#, + ); + check_result( + refs, + "i IDENT_PAT FileId(1) 23..24 Other Write", + &["FileId(1) 34..35 Other Write", "FileId(1) 38..39 Other Read"], + ); + } + + #[test] + fn test_basic_highlight_field_read_write() { + let refs = get_all_refs( + r#" +struct S { + f: u32, +} + +fn foo() { + let mut s = S{f: 0}; + s.f<|> = 0; +} +"#, + ); + check_result( + refs, + "f RECORD_FIELD FileId(1) 15..21 15..16 Other", + &["FileId(1) 55..56 Other Read", "FileId(1) 68..69 Other Write"], + ); + } + + #[test] + fn test_basic_highlight_decl_no_write() { + let refs = get_all_refs( + r#" +fn foo() { + let i<|>; + i = 1; +} +"#, + ); + check_result(refs, "i IDENT_PAT FileId(1) 19..20 Other", &["FileId(1) 26..27 Other Write"]); + } + + #[test] + fn test_find_struct_function_refs_outside_module() { + let refs = get_all_refs( + r#" +mod foo { + pub struct Foo; + + impl Foo { + pub fn new<|>() -> Foo { + Foo + } + } +} + +fn main() { + let _f = foo::Foo::new(); +} +"#, + ); + check_result( + refs, + "new FN FileId(1) 54..101 61..64 Other", + &["FileId(1) 146..149 StructLiteral"], + ); + } + + #[test] + fn test_find_all_refs_nested_module() { + let code = r#" + //- /lib.rs + mod foo { + mod bar; + } + + fn f<|>() {} + + //- /foo/bar.rs + use crate::f; + + fn g() { + f(); + } + "#; + + let (analysis, pos) = analysis_and_position(code); + let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); + check_result( + refs, + "f FN FileId(1) 26..35 29..30 Other", + &["FileId(2) 11..12 Other", "FileId(2) 28..29 StructLiteral"], + ); + } + + fn get_all_refs(ra_fixture: &str) -> ReferenceSearchResult { + let (analysis, position) = analysis_and_position(ra_fixture); + analysis.find_all_refs(position, None).unwrap().unwrap() + } + + fn check_result(res: ReferenceSearchResult, expected_decl: &str, expected_refs: &[&str]) { + res.declaration().assert_match(expected_decl); + assert_eq!(res.references.len(), expected_refs.len()); + res.references() + .iter() + .enumerate() + .for_each(|(i, r)| ref_assert_match(r, expected_refs[i])); + } + + impl Declaration { + fn debug_render(&self) -> String { + let mut s = format!("{} {:?}", self.nav.debug_render(), self.kind); + if let Some(access) = self.access { + s.push_str(&format!(" {:?}", access)); + } + s + } + + fn assert_match(&self, expected: &str) { + let actual = self.debug_render(); + test_utils::assert_eq_text!(expected.trim(), actual.trim(),); + } + } + + fn ref_debug_render(r: &Reference) -> String { + let mut s = format!("{:?} {:?} {:?}", r.file_range.file_id, r.file_range.range, r.kind); + if let Some(access) = r.access { + s.push_str(&format!(" {:?}", access)); + } + s + } + + fn ref_assert_match(r: &Reference, expected: &str) { + let actual = ref_debug_render(r); + test_utils::assert_eq_text!(expected.trim(), actual.trim(),); + } +} diff --git a/crates/ide/src/references/rename.rs b/crates/ide/src/references/rename.rs new file mode 100644 index 0000000000..d73dc9cd00 --- /dev/null +++ b/crates/ide/src/references/rename.rs @@ -0,0 +1,1010 @@ +//! FIXME: write short doc here + +use base_db::SourceDatabaseExt; +use hir::{Module, ModuleDef, ModuleSource, Semantics}; +use ide_db::{ + defs::{classify_name, classify_name_ref, Definition, NameClass, NameRefClass}, + RootDatabase, +}; +use std::convert::TryInto; +use syntax::{ + algo::find_node_at_offset, + ast::{self, NameOwner}, + lex_single_valid_syntax_kind, match_ast, AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; +use test_utils::mark; +use text_edit::TextEdit; + +use crate::{ + references::find_all_refs, FilePosition, FileSystemEdit, RangeInfo, Reference, ReferenceKind, + SourceChange, SourceFileEdit, TextRange, TextSize, +}; + +pub(crate) fn rename( + db: &RootDatabase, + position: FilePosition, + new_name: &str, +) -> Option> { + let sema = Semantics::new(db); + + match lex_single_valid_syntax_kind(new_name)? { + SyntaxKind::IDENT | SyntaxKind::UNDERSCORE => (), + SyntaxKind::SELF_KW => return rename_to_self(&sema, position), + _ => return None, + } + + let source_file = sema.parse(position.file_id); + let syntax = source_file.syntax(); + if let Some(module) = find_module_at_offset(&sema, position, syntax) { + rename_mod(&sema, position, module, new_name) + } else if let Some(self_token) = + syntax.token_at_offset(position.offset).find(|t| t.kind() == SyntaxKind::SELF_KW) + { + rename_self_to_param(&sema, position, self_token, new_name) + } else { + rename_reference(&sema, position, new_name) + } +} + +fn find_module_at_offset( + sema: &Semantics, + position: FilePosition, + syntax: &SyntaxNode, +) -> Option { + let ident = syntax.token_at_offset(position.offset).find(|t| t.kind() == SyntaxKind::IDENT)?; + + let module = match_ast! { + match (ident.parent()) { + ast::NameRef(name_ref) => { + match classify_name_ref(sema, &name_ref)? { + NameRefClass::Definition(Definition::ModuleDef(ModuleDef::Module(module))) => module, + _ => return None, + } + }, + ast::Name(name) => { + match classify_name(&sema, &name)? { + NameClass::Definition(Definition::ModuleDef(ModuleDef::Module(module))) => module, + _ => return None, + } + }, + _ => return None, + } + }; + + Some(module) +} + +fn source_edit_from_reference(reference: Reference, new_name: &str) -> SourceFileEdit { + let mut replacement_text = String::new(); + let file_id = reference.file_range.file_id; + let range = match reference.kind { + ReferenceKind::FieldShorthandForField => { + mark::hit!(test_rename_struct_field_for_shorthand); + replacement_text.push_str(new_name); + replacement_text.push_str(": "); + TextRange::new(reference.file_range.range.start(), reference.file_range.range.start()) + } + ReferenceKind::FieldShorthandForLocal => { + mark::hit!(test_rename_local_for_field_shorthand); + replacement_text.push_str(": "); + replacement_text.push_str(new_name); + TextRange::new(reference.file_range.range.end(), reference.file_range.range.end()) + } + _ => { + replacement_text.push_str(new_name); + reference.file_range.range + } + }; + SourceFileEdit { file_id, edit: TextEdit::replace(range, replacement_text) } +} + +fn rename_mod( + sema: &Semantics, + position: FilePosition, + module: Module, + new_name: &str, +) -> Option> { + let mut source_file_edits = Vec::new(); + let mut file_system_edits = Vec::new(); + + let src = module.definition_source(sema.db); + let file_id = src.file_id.original_file(sema.db); + match src.value { + ModuleSource::SourceFile(..) => { + // mod is defined in path/to/dir/mod.rs + let dst = if module.is_mod_rs(sema.db) { + format!("../{}/mod.rs", new_name) + } else { + format!("{}.rs", new_name) + }; + let move_file = FileSystemEdit::MoveFile { src: file_id, anchor: file_id, dst }; + file_system_edits.push(move_file); + } + ModuleSource::Module(..) => {} + } + + if let Some(src) = module.declaration_source(sema.db) { + let file_id = src.file_id.original_file(sema.db); + let name = src.value.name()?; + let edit = SourceFileEdit { + file_id, + edit: TextEdit::replace(name.syntax().text_range(), new_name.into()), + }; + source_file_edits.push(edit); + } + + let RangeInfo { range, info: refs } = find_all_refs(sema, position, None)?; + let ref_edits = refs + .references + .into_iter() + .map(|reference| source_edit_from_reference(reference, new_name)); + source_file_edits.extend(ref_edits); + + Some(RangeInfo::new(range, SourceChange::from_edits(source_file_edits, file_system_edits))) +} + +fn rename_to_self( + sema: &Semantics, + position: FilePosition, +) -> Option> { + let source_file = sema.parse(position.file_id); + let syn = source_file.syntax(); + + let fn_def = find_node_at_offset::(syn, position.offset)?; + let params = fn_def.param_list()?; + if params.self_param().is_some() { + return None; // method already has self param + } + let first_param = params.params().next()?; + let mutable = match first_param.ty() { + Some(ast::Type::RefType(rt)) => rt.mut_token().is_some(), + _ => return None, // not renaming other types + }; + + let RangeInfo { range, info: refs } = find_all_refs(sema, position, None)?; + + let param_range = first_param.syntax().text_range(); + let (param_ref, usages): (Vec, Vec) = refs + .into_iter() + .partition(|reference| param_range.intersect(reference.file_range.range).is_some()); + + if param_ref.is_empty() { + return None; + } + + let mut edits = usages + .into_iter() + .map(|reference| source_edit_from_reference(reference, "self")) + .collect::>(); + + edits.push(SourceFileEdit { + file_id: position.file_id, + edit: TextEdit::replace( + param_range, + String::from(if mutable { "&mut self" } else { "&self" }), + ), + }); + + Some(RangeInfo::new(range, SourceChange::from(edits))) +} + +fn text_edit_from_self_param( + syn: &SyntaxNode, + self_param: &ast::SelfParam, + new_name: &str, +) -> Option { + fn target_type_name(impl_def: &ast::Impl) -> Option { + if let Some(ast::Type::PathType(p)) = impl_def.self_ty() { + return Some(p.path()?.segment()?.name_ref()?.text().to_string()); + } + None + } + + let impl_def = find_node_at_offset::(syn, self_param.syntax().text_range().start())?; + let type_name = target_type_name(&impl_def)?; + + let mut replacement_text = String::from(new_name); + replacement_text.push_str(": "); + replacement_text.push_str(self_param.mut_token().map_or("&", |_| "&mut ")); + replacement_text.push_str(type_name.as_str()); + + Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text)) +} + +fn rename_self_to_param( + sema: &Semantics, + position: FilePosition, + self_token: SyntaxToken, + new_name: &str, +) -> Option> { + let source_file = sema.parse(position.file_id); + let syn = source_file.syntax(); + + let text = sema.db.file_text(position.file_id); + let fn_def = find_node_at_offset::(syn, position.offset)?; + let search_range = fn_def.syntax().text_range(); + + let mut edits: Vec = vec![]; + + for (idx, _) in text.match_indices("self") { + let offset: TextSize = idx.try_into().unwrap(); + if !search_range.contains_inclusive(offset) { + continue; + } + if let Some(ref usage) = + syn.token_at_offset(offset).find(|t| t.kind() == SyntaxKind::SELF_KW) + { + let edit = if let Some(ref self_param) = ast::SelfParam::cast(usage.parent()) { + text_edit_from_self_param(syn, self_param, new_name)? + } else { + TextEdit::replace(usage.text_range(), String::from(new_name)) + }; + edits.push(SourceFileEdit { file_id: position.file_id, edit }); + } + } + + let range = ast::SelfParam::cast(self_token.parent()) + .map_or(self_token.text_range(), |p| p.syntax().text_range()); + + Some(RangeInfo::new(range, SourceChange::from(edits))) +} + +fn rename_reference( + sema: &Semantics, + position: FilePosition, + new_name: &str, +) -> Option> { + let RangeInfo { range, info: refs } = find_all_refs(sema, position, None)?; + + let edit = refs + .into_iter() + .map(|reference| source_edit_from_reference(reference, new_name)) + .collect::>(); + + if edit.is_empty() { + return None; + } + + Some(RangeInfo::new(range, SourceChange::from(edit))) +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + use stdx::trim_indent; + use test_utils::{assert_eq_text, mark}; + use text_edit::TextEdit; + + use crate::{mock_analysis::analysis_and_position, FileId}; + + fn check(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) { + let ra_fixture_after = &trim_indent(ra_fixture_after); + let (analysis, position) = analysis_and_position(ra_fixture_before); + let source_change = analysis.rename(position, new_name).unwrap(); + let mut text_edit_builder = TextEdit::builder(); + let mut file_id: Option = None; + if let Some(change) = source_change { + for edit in change.info.source_file_edits { + file_id = Some(edit.file_id); + for indel in edit.edit.into_iter() { + text_edit_builder.replace(indel.delete, indel.insert); + } + } + } + let mut result = analysis.file_text(file_id.unwrap()).unwrap().to_string(); + text_edit_builder.finish().apply(&mut result); + assert_eq_text!(ra_fixture_after, &*result); + } + + fn check_expect(new_name: &str, ra_fixture: &str, expect: Expect) { + let (analysis, position) = analysis_and_position(ra_fixture); + let source_change = analysis.rename(position, new_name).unwrap().unwrap(); + expect.assert_debug_eq(&source_change) + } + + #[test] + fn test_rename_to_underscore() { + check("_", r#"fn main() { let i<|> = 1; }"#, r#"fn main() { let _ = 1; }"#); + } + + #[test] + fn test_rename_to_raw_identifier() { + check("r#fn", r#"fn main() { let i<|> = 1; }"#, r#"fn main() { let r#fn = 1; }"#); + } + + #[test] + fn test_rename_to_invalid_identifier() { + let (analysis, position) = analysis_and_position(r#"fn main() { let i<|> = 1; }"#); + let new_name = "invalid!"; + let source_change = analysis.rename(position, new_name).unwrap(); + assert!(source_change.is_none()); + } + + #[test] + fn test_rename_for_local() { + check( + "k", + r#" +fn main() { + let mut i = 1; + let j = 1; + i = i<|> + j; + + { i = 0; } + + i = 5; +} +"#, + r#" +fn main() { + let mut k = 1; + let j = 1; + k = k + j; + + { k = 0; } + + k = 5; +} +"#, + ); + } + + #[test] + fn test_rename_for_macro_args() { + check( + "b", + r#" +macro_rules! foo {($i:ident) => {$i} } +fn main() { + let a<|> = "test"; + foo!(a); +} +"#, + r#" +macro_rules! foo {($i:ident) => {$i} } +fn main() { + let b = "test"; + foo!(b); +} +"#, + ); + } + + #[test] + fn test_rename_for_macro_args_rev() { + check( + "b", + r#" +macro_rules! foo {($i:ident) => {$i} } +fn main() { + let a = "test"; + foo!(a<|>); +} +"#, + r#" +macro_rules! foo {($i:ident) => {$i} } +fn main() { + let b = "test"; + foo!(b); +} +"#, + ); + } + + #[test] + fn test_rename_for_macro_define_fn() { + check( + "bar", + r#" +macro_rules! define_fn {($id:ident) => { fn $id{} }} +define_fn!(foo); +fn main() { + fo<|>o(); +} +"#, + r#" +macro_rules! define_fn {($id:ident) => { fn $id{} }} +define_fn!(bar); +fn main() { + bar(); +} +"#, + ); + } + + #[test] + fn test_rename_for_macro_define_fn_rev() { + check( + "bar", + r#" +macro_rules! define_fn {($id:ident) => { fn $id{} }} +define_fn!(fo<|>o); +fn main() { + foo(); +} +"#, + r#" +macro_rules! define_fn {($id:ident) => { fn $id{} }} +define_fn!(bar); +fn main() { + bar(); +} +"#, + ); + } + + #[test] + fn test_rename_for_param_inside() { + check("j", r#"fn foo(i : u32) -> u32 { i<|> }"#, r#"fn foo(j : u32) -> u32 { j }"#); + } + + #[test] + fn test_rename_refs_for_fn_param() { + check("j", r#"fn foo(i<|> : u32) -> u32 { i }"#, r#"fn foo(j : u32) -> u32 { j }"#); + } + + #[test] + fn test_rename_for_mut_param() { + check("j", r#"fn foo(mut i<|> : u32) -> u32 { i }"#, r#"fn foo(mut j : u32) -> u32 { j }"#); + } + + #[test] + fn test_rename_struct_field() { + check( + "j", + r#" +struct Foo { i<|>: i32 } + +impl Foo { + fn new(i: i32) -> Self { + Self { i: i } + } +} +"#, + r#" +struct Foo { j: i32 } + +impl Foo { + fn new(i: i32) -> Self { + Self { j: i } + } +} +"#, + ); + } + + #[test] + fn test_rename_struct_field_for_shorthand() { + mark::check!(test_rename_struct_field_for_shorthand); + check( + "j", + r#" +struct Foo { i<|>: i32 } + +impl Foo { + fn new(i: i32) -> Self { + Self { i } + } +} +"#, + r#" +struct Foo { j: i32 } + +impl Foo { + fn new(i: i32) -> Self { + Self { j: i } + } +} +"#, + ); + } + + #[test] + fn test_rename_local_for_field_shorthand() { + mark::check!(test_rename_local_for_field_shorthand); + check( + "j", + r#" +struct Foo { i: i32 } + +impl Foo { + fn new(i<|>: i32) -> Self { + Self { i } + } +} +"#, + r#" +struct Foo { i: i32 } + +impl Foo { + fn new(j: i32) -> Self { + Self { i: j } + } +} +"#, + ); + } + + #[test] + fn test_field_shorthand_correct_struct() { + check( + "j", + r#" +struct Foo { i<|>: i32 } +struct Bar { i: i32 } + +impl Bar { + fn new(i: i32) -> Self { + Self { i } + } +} +"#, + r#" +struct Foo { j: i32 } +struct Bar { i: i32 } + +impl Bar { + fn new(i: i32) -> Self { + Self { i } + } +} +"#, + ); + } + + #[test] + fn test_shadow_local_for_struct_shorthand() { + check( + "j", + r#" +struct Foo { i: i32 } + +fn baz(i<|>: i32) -> Self { + let x = Foo { i }; + { + let i = 0; + Foo { i } + } +} +"#, + r#" +struct Foo { i: i32 } + +fn baz(j: i32) -> Self { + let x = Foo { i: j }; + { + let i = 0; + Foo { i } + } +} +"#, + ); + } + + #[test] + fn test_rename_mod() { + check_expect( + "foo2", + r#" +//- /lib.rs +mod bar; + +//- /bar.rs +mod foo<|>; + +//- /bar/foo.rs +// empty +"#, + expect![[r#" + RangeInfo { + range: 4..7, + info: SourceChange { + source_file_edits: [ + SourceFileEdit { + file_id: FileId( + 2, + ), + edit: TextEdit { + indels: [ + Indel { + insert: "foo2", + delete: 4..7, + }, + ], + }, + }, + ], + file_system_edits: [ + MoveFile { + src: FileId( + 3, + ), + anchor: FileId( + 3, + ), + dst: "foo2.rs", + }, + ], + is_snippet: false, + }, + } + "#]], + ); + } + + #[test] + fn test_rename_mod_in_use_tree() { + check_expect( + "quux", + r#" +//- /main.rs +pub mod foo; +pub mod bar; +fn main() {} + +//- /foo.rs +pub struct FooContent; + +//- /bar.rs +use crate::foo<|>::FooContent; +"#, + expect![[r#" + RangeInfo { + range: 11..14, + info: SourceChange { + source_file_edits: [ + SourceFileEdit { + file_id: FileId( + 1, + ), + edit: TextEdit { + indels: [ + Indel { + insert: "quux", + delete: 8..11, + }, + ], + }, + }, + SourceFileEdit { + file_id: FileId( + 3, + ), + edit: TextEdit { + indels: [ + Indel { + insert: "quux", + delete: 11..14, + }, + ], + }, + }, + ], + file_system_edits: [ + MoveFile { + src: FileId( + 2, + ), + anchor: FileId( + 2, + ), + dst: "quux.rs", + }, + ], + is_snippet: false, + }, + } + "#]], + ); + } + + #[test] + fn test_rename_mod_in_dir() { + check_expect( + "foo2", + r#" +//- /lib.rs +mod fo<|>o; +//- /foo/mod.rs +// emtpy +"#, + expect![[r#" + RangeInfo { + range: 4..7, + info: SourceChange { + source_file_edits: [ + SourceFileEdit { + file_id: FileId( + 1, + ), + edit: TextEdit { + indels: [ + Indel { + insert: "foo2", + delete: 4..7, + }, + ], + }, + }, + ], + file_system_edits: [ + MoveFile { + src: FileId( + 2, + ), + anchor: FileId( + 2, + ), + dst: "../foo2/mod.rs", + }, + ], + is_snippet: false, + }, + } + "#]], + ); + } + + #[test] + fn test_rename_unusually_nested_mod() { + check_expect( + "bar", + r#" +//- /lib.rs +mod outer { mod fo<|>o; } + +//- /outer/foo.rs +// emtpy +"#, + expect![[r#" + RangeInfo { + range: 16..19, + info: SourceChange { + source_file_edits: [ + SourceFileEdit { + file_id: FileId( + 1, + ), + edit: TextEdit { + indels: [ + Indel { + insert: "bar", + delete: 16..19, + }, + ], + }, + }, + ], + file_system_edits: [ + MoveFile { + src: FileId( + 2, + ), + anchor: FileId( + 2, + ), + dst: "bar.rs", + }, + ], + is_snippet: false, + }, + } + "#]], + ); + } + + #[test] + fn test_module_rename_in_path() { + check( + "baz", + r#" +mod <|>foo { pub fn bar() {} } + +fn main() { foo::bar(); } +"#, + r#" +mod baz { pub fn bar() {} } + +fn main() { baz::bar(); } +"#, + ); + } + + #[test] + fn test_rename_mod_filename_and_path() { + check_expect( + "foo2", + r#" +//- /lib.rs +mod bar; +fn f() { + bar::foo::fun() +} + +//- /bar.rs +pub mod foo<|>; + +//- /bar/foo.rs +// pub fn fun() {} +"#, + expect![[r#" + RangeInfo { + range: 8..11, + info: SourceChange { + source_file_edits: [ + SourceFileEdit { + file_id: FileId( + 2, + ), + edit: TextEdit { + indels: [ + Indel { + insert: "foo2", + delete: 8..11, + }, + ], + }, + }, + SourceFileEdit { + file_id: FileId( + 1, + ), + edit: TextEdit { + indels: [ + Indel { + insert: "foo2", + delete: 27..30, + }, + ], + }, + }, + ], + file_system_edits: [ + MoveFile { + src: FileId( + 3, + ), + anchor: FileId( + 3, + ), + dst: "foo2.rs", + }, + ], + is_snippet: false, + }, + } + "#]], + ); + } + + #[test] + fn test_enum_variant_from_module_1() { + check( + "Baz", + r#" +mod foo { + pub enum Foo { Bar<|> } +} + +fn func(f: foo::Foo) { + match f { + foo::Foo::Bar => {} + } +} +"#, + r#" +mod foo { + pub enum Foo { Baz } +} + +fn func(f: foo::Foo) { + match f { + foo::Foo::Baz => {} + } +} +"#, + ); + } + + #[test] + fn test_enum_variant_from_module_2() { + check( + "baz", + r#" +mod foo { + pub struct Foo { pub bar<|>: uint } +} + +fn foo(f: foo::Foo) { + let _ = f.bar; +} +"#, + r#" +mod foo { + pub struct Foo { pub baz: uint } +} + +fn foo(f: foo::Foo) { + let _ = f.baz; +} +"#, + ); + } + + #[test] + fn test_parameter_to_self() { + check( + "self", + r#" +struct Foo { i: i32 } + +impl Foo { + fn f(foo<|>: &mut Foo) -> i32 { + foo.i + } +} +"#, + r#" +struct Foo { i: i32 } + +impl Foo { + fn f(&mut self) -> i32 { + self.i + } +} +"#, + ); + } + + #[test] + fn test_self_to_parameter() { + check( + "foo", + r#" +struct Foo { i: i32 } + +impl Foo { + fn f(&mut <|>self) -> i32 { + self.i + } +} +"#, + r#" +struct Foo { i: i32 } + +impl Foo { + fn f(foo: &mut Foo) -> i32 { + foo.i + } +} +"#, + ); + } + + #[test] + fn test_self_in_path_to_parameter() { + check( + "foo", + r#" +struct Foo { i: i32 } + +impl Foo { + fn f(&self) -> i32 { + let self_var = 1; + self<|>.i + } +} +"#, + r#" +struct Foo { i: i32 } + +impl Foo { + fn f(foo: &Foo) -> i32 { + let self_var = 1; + foo.i + } +} +"#, + ); + } +} diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs new file mode 100644 index 0000000000..c3e07c8dec --- /dev/null +++ b/crates/ide/src/runnables.rs @@ -0,0 +1,883 @@ +use std::fmt; + +use cfg::CfgExpr; +use hir::{AsAssocItem, Attrs, HirFileId, InFile, Semantics}; +use ide_db::RootDatabase; +use itertools::Itertools; +use syntax::{ + ast::{self, AstNode, AttrsOwner, DocCommentsOwner, ModuleItemOwner, NameOwner}, + match_ast, SyntaxNode, +}; + +use crate::{display::ToNav, FileId, NavigationTarget}; + +#[derive(Debug, Clone)] +pub struct Runnable { + pub nav: NavigationTarget, + pub kind: RunnableKind, + pub cfg_exprs: Vec, +} + +#[derive(Debug, Clone)] +pub enum TestId { + Name(String), + Path(String), +} + +impl fmt::Display for TestId { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + TestId::Name(name) => write!(f, "{}", name), + TestId::Path(path) => write!(f, "{}", path), + } + } +} + +#[derive(Debug, Clone)] +pub enum RunnableKind { + Test { test_id: TestId, attr: TestAttr }, + TestMod { path: String }, + Bench { test_id: TestId }, + DocTest { test_id: TestId }, + Bin, +} + +#[derive(Debug, Eq, PartialEq)] +pub struct RunnableAction { + pub run_title: &'static str, + pub debugee: bool, +} + +const TEST: RunnableAction = RunnableAction { run_title: "▶\u{fe0e} Run Test", debugee: true }; +const DOCTEST: RunnableAction = + RunnableAction { run_title: "▶\u{fe0e} Run Doctest", debugee: false }; +const BENCH: RunnableAction = RunnableAction { run_title: "▶\u{fe0e} Run Bench", debugee: true }; +const BIN: RunnableAction = RunnableAction { run_title: "▶\u{fe0e} Run", debugee: true }; + +impl Runnable { + // test package::module::testname + pub fn label(&self, target: Option) -> String { + match &self.kind { + RunnableKind::Test { test_id, .. } => format!("test {}", test_id), + RunnableKind::TestMod { path } => format!("test-mod {}", path), + RunnableKind::Bench { test_id } => format!("bench {}", test_id), + RunnableKind::DocTest { test_id, .. } => format!("doctest {}", test_id), + RunnableKind::Bin => { + target.map_or_else(|| "run binary".to_string(), |t| format!("run {}", t)) + } + } + } + + pub fn action(&self) -> &'static RunnableAction { + match &self.kind { + RunnableKind::Test { .. } | RunnableKind::TestMod { .. } => &TEST, + RunnableKind::DocTest { .. } => &DOCTEST, + RunnableKind::Bench { .. } => &BENCH, + RunnableKind::Bin => &BIN, + } + } +} + +// Feature: Run +// +// Shows a popup suggesting to run a test/benchmark/binary **at the current cursor +// location**. Super useful for repeatedly running just a single test. Do bind this +// to a shortcut! +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Run** +// |=== +pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { + let sema = Semantics::new(db); + let source_file = sema.parse(file_id); + source_file.syntax().descendants().filter_map(|i| runnable(&sema, i, file_id)).collect() +} + +pub(crate) fn runnable( + sema: &Semantics, + item: SyntaxNode, + file_id: FileId, +) -> Option { + match_ast! { + match item { + ast::Fn(it) => runnable_fn(sema, it, file_id), + ast::Module(it) => runnable_mod(sema, it, file_id), + _ => None, + } + } +} + +fn runnable_fn( + sema: &Semantics, + fn_def: ast::Fn, + file_id: FileId, +) -> Option { + let name_string = fn_def.name()?.text().to_string(); + + let kind = if name_string == "main" { + RunnableKind::Bin + } else { + let test_id = match sema.to_def(&fn_def).map(|def| def.module(sema.db)) { + Some(module) => { + let def = sema.to_def(&fn_def)?; + let impl_trait_name = def.as_assoc_item(sema.db).and_then(|assoc_item| { + match assoc_item.container(sema.db) { + hir::AssocItemContainer::Trait(trait_item) => { + Some(trait_item.name(sema.db).to_string()) + } + hir::AssocItemContainer::ImplDef(impl_def) => impl_def + .target_ty(sema.db) + .as_adt() + .map(|adt| adt.name(sema.db).to_string()), + } + }); + + let path_iter = module + .path_to_root(sema.db) + .into_iter() + .rev() + .filter_map(|it| it.name(sema.db)) + .map(|name| name.to_string()); + + let path = if let Some(impl_trait_name) = impl_trait_name { + path_iter + .chain(std::iter::once(impl_trait_name)) + .chain(std::iter::once(name_string)) + .join("::") + } else { + path_iter.chain(std::iter::once(name_string)).join("::") + }; + + TestId::Path(path) + } + None => TestId::Name(name_string), + }; + + if has_test_related_attribute(&fn_def) { + let attr = TestAttr::from_fn(&fn_def); + RunnableKind::Test { test_id, attr } + } else if fn_def.has_atom_attr("bench") { + RunnableKind::Bench { test_id } + } else if has_doc_test(&fn_def) { + RunnableKind::DocTest { test_id } + } else { + return None; + } + }; + + let attrs = Attrs::from_attrs_owner(sema.db, InFile::new(HirFileId::from(file_id), &fn_def)); + let cfg_exprs = attrs.cfg().collect(); + + let nav = if let RunnableKind::DocTest { .. } = kind { + NavigationTarget::from_doc_commented( + sema.db, + InFile::new(file_id.into(), &fn_def), + InFile::new(file_id.into(), &fn_def), + ) + } else { + NavigationTarget::from_named(sema.db, InFile::new(file_id.into(), &fn_def)) + }; + Some(Runnable { nav, kind, cfg_exprs }) +} + +#[derive(Debug, Copy, Clone)] +pub struct TestAttr { + pub ignore: bool, +} + +impl TestAttr { + fn from_fn(fn_def: &ast::Fn) -> TestAttr { + let ignore = fn_def + .attrs() + .filter_map(|attr| attr.simple_name()) + .any(|attribute_text| attribute_text == "ignore"); + TestAttr { ignore } + } +} + +/// This is a method with a heuristics to support test methods annotated with custom test annotations, such as +/// `#[test_case(...)]`, `#[tokio::test]` and similar. +/// Also a regular `#[test]` annotation is supported. +/// +/// It may produce false positives, for example, `#[wasm_bindgen_test]` requires a different command to run the test, +/// but it's better than not to have the runnables for the tests at all. +fn has_test_related_attribute(fn_def: &ast::Fn) -> bool { + fn_def + .attrs() + .filter_map(|attr| attr.path()) + .map(|path| path.syntax().to_string().to_lowercase()) + .any(|attribute_text| attribute_text.contains("test")) +} + +fn has_doc_test(fn_def: &ast::Fn) -> bool { + fn_def.doc_comment_text().map_or(false, |comment| comment.contains("```")) +} + +fn runnable_mod( + sema: &Semantics, + module: ast::Module, + file_id: FileId, +) -> Option { + if !has_test_function_or_multiple_test_submodules(&module) { + return None; + } + let module_def = sema.to_def(&module)?; + + let path = module_def + .path_to_root(sema.db) + .into_iter() + .rev() + .filter_map(|it| it.name(sema.db)) + .join("::"); + + let attrs = Attrs::from_attrs_owner(sema.db, InFile::new(HirFileId::from(file_id), &module)); + let cfg_exprs = attrs.cfg().collect(); + let nav = module_def.to_nav(sema.db); + Some(Runnable { nav, kind: RunnableKind::TestMod { path }, cfg_exprs }) +} + +// We could create runnables for modules with number_of_test_submodules > 0, +// but that bloats the runnables for no real benefit, since all tests can be run by the submodule already +fn has_test_function_or_multiple_test_submodules(module: &ast::Module) -> bool { + if let Some(item_list) = module.item_list() { + let mut number_of_test_submodules = 0; + + for item in item_list.items() { + match item { + ast::Item::Fn(f) => { + if has_test_related_attribute(&f) { + return true; + } + } + ast::Item::Module(submodule) => { + if has_test_function_or_multiple_test_submodules(&submodule) { + number_of_test_submodules += 1; + } + } + _ => (), + } + } + + number_of_test_submodules > 1 + } else { + false + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::mock_analysis::analysis_and_position; + + use super::{RunnableAction, BENCH, BIN, DOCTEST, TEST}; + + fn check( + ra_fixture: &str, + // FIXME: fold this into `expect` as well + actions: &[&RunnableAction], + expect: Expect, + ) { + let (analysis, position) = analysis_and_position(ra_fixture); + let runnables = analysis.runnables(position.file_id).unwrap(); + expect.assert_debug_eq(&runnables); + assert_eq!( + actions, + runnables.into_iter().map(|it| it.action()).collect::>().as_slice() + ); + } + + #[test] + fn test_runnables() { + check( + r#" +//- /lib.rs +<|> +fn main() {} + +#[test] +fn test_foo() {} + +#[test] +#[ignore] +fn test_foo() {} + +#[bench] +fn bench() {} +"#, + &[&BIN, &TEST, &TEST, &BENCH], + expect![[r#" + [ + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 1..13, + focus_range: Some( + 4..8, + ), + name: "main", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Bin, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 15..39, + focus_range: Some( + 26..34, + ), + name: "test_foo", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "test_foo", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 41..75, + focus_range: Some( + 62..70, + ), + name: "test_foo", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "test_foo", + ), + attr: TestAttr { + ignore: true, + }, + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 77..99, + focus_range: Some( + 89..94, + ), + name: "bench", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Bench { + test_id: Path( + "bench", + ), + }, + cfg_exprs: [], + }, + ] + "#]], + ); + } + + #[test] + fn test_runnables_doc_test() { + check( + r#" +//- /lib.rs +<|> +fn main() {} + +/// ``` +/// let x = 5; +/// ``` +fn foo() {} +"#, + &[&BIN, &DOCTEST], + expect![[r#" + [ + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 1..13, + focus_range: Some( + 4..8, + ), + name: "main", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Bin, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 15..57, + focus_range: None, + name: "foo", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: DocTest { + test_id: Path( + "foo", + ), + }, + cfg_exprs: [], + }, + ] + "#]], + ); + } + + #[test] + fn test_runnables_doc_test_in_impl() { + check( + r#" +//- /lib.rs +<|> +fn main() {} + +struct Data; +impl Data { + /// ``` + /// let x = 5; + /// ``` + fn foo() {} +} +"#, + &[&BIN, &DOCTEST], + expect![[r#" + [ + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 1..13, + focus_range: Some( + 4..8, + ), + name: "main", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Bin, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 44..98, + focus_range: None, + name: "foo", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: DocTest { + test_id: Path( + "Data::foo", + ), + }, + cfg_exprs: [], + }, + ] + "#]], + ); + } + + #[test] + fn test_runnables_module() { + check( + r#" +//- /lib.rs +<|> +mod test_mod { + #[test] + fn test_foo1() {} +} +"#, + &[&TEST, &TEST], + expect![[r#" + [ + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 1..51, + focus_range: Some( + 5..13, + ), + name: "test_mod", + kind: MODULE, + container_name: None, + description: None, + docs: None, + }, + kind: TestMod { + path: "test_mod", + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 20..49, + focus_range: Some( + 35..44, + ), + name: "test_foo1", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "test_mod::test_foo1", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [], + }, + ] + "#]], + ); + } + + #[test] + fn only_modules_with_test_functions_or_more_than_one_test_submodule_have_runners() { + check( + r#" +//- /lib.rs +<|> +mod root_tests { + mod nested_tests_0 { + mod nested_tests_1 { + #[test] + fn nested_test_11() {} + + #[test] + fn nested_test_12() {} + } + + mod nested_tests_2 { + #[test] + fn nested_test_2() {} + } + + mod nested_tests_3 {} + } + + mod nested_tests_4 {} +} +"#, + &[&TEST, &TEST, &TEST, &TEST, &TEST, &TEST], + expect![[r#" + [ + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 22..323, + focus_range: Some( + 26..40, + ), + name: "nested_tests_0", + kind: MODULE, + container_name: None, + description: None, + docs: None, + }, + kind: TestMod { + path: "root_tests::nested_tests_0", + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 51..192, + focus_range: Some( + 55..69, + ), + name: "nested_tests_1", + kind: MODULE, + container_name: None, + description: None, + docs: None, + }, + kind: TestMod { + path: "root_tests::nested_tests_0::nested_tests_1", + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 84..126, + focus_range: Some( + 107..121, + ), + name: "nested_test_11", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "root_tests::nested_tests_0::nested_tests_1::nested_test_11", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 140..182, + focus_range: Some( + 163..177, + ), + name: "nested_test_12", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "root_tests::nested_tests_0::nested_tests_1::nested_test_12", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 202..286, + focus_range: Some( + 206..220, + ), + name: "nested_tests_2", + kind: MODULE, + container_name: None, + description: None, + docs: None, + }, + kind: TestMod { + path: "root_tests::nested_tests_0::nested_tests_2", + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 235..276, + focus_range: Some( + 258..271, + ), + name: "nested_test_2", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "root_tests::nested_tests_0::nested_tests_2::nested_test_2", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [], + }, + ] + "#]], + ); + } + + #[test] + fn test_runnables_with_feature() { + check( + r#" +//- /lib.rs crate:foo cfg:feature=foo +<|> +#[test] +#[cfg(feature = "foo")] +fn test_foo1() {} +"#, + &[&TEST], + expect![[r#" + [ + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 1..50, + focus_range: Some( + 36..45, + ), + name: "test_foo1", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "test_foo1", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [ + KeyValue { + key: "feature", + value: "foo", + }, + ], + }, + ] + "#]], + ); + } + + #[test] + fn test_runnables_with_features() { + check( + r#" +//- /lib.rs crate:foo cfg:feature=foo,feature=bar +<|> +#[test] +#[cfg(all(feature = "foo", feature = "bar"))] +fn test_foo1() {} +"#, + &[&TEST], + expect![[r#" + [ + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 1..72, + focus_range: Some( + 58..67, + ), + name: "test_foo1", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "test_foo1", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [ + All( + [ + KeyValue { + key: "feature", + value: "foo", + }, + KeyValue { + key: "feature", + value: "bar", + }, + ], + ), + ], + }, + ] + "#]], + ); + } + + #[test] + fn test_runnables_no_test_function_in_module() { + check( + r#" +//- /lib.rs +<|> +mod test_mod { + fn foo1() {} +} +"#, + &[], + expect![[r#" + [] + "#]], + ); + } +} diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs new file mode 100644 index 0000000000..c237081818 --- /dev/null +++ b/crates/ide/src/status.rs @@ -0,0 +1,145 @@ +use std::{fmt, iter::FromIterator, sync::Arc}; + +use base_db::{ + salsa::debug::{DebugQueryTable, TableEntry}, + FileTextQuery, SourceRootId, +}; +use hir::MacroFile; +use ide_db::{ + symbol_index::{LibrarySymbolsQuery, SymbolIndex}, + RootDatabase, +}; +use profile::{memory_usage, Bytes}; +use rustc_hash::FxHashMap; +use syntax::{ast, Parse, SyntaxNode}; + +use crate::FileId; + +fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { + base_db::ParseQuery.in_db(db).entries::() +} +fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { + hir::db::ParseMacroQuery.in_db(db).entries::() +} + +// Feature: Status +// +// Shows internal statistic about memory usage of rust-analyzer. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Status** +// |=== +pub(crate) fn status(db: &RootDatabase) -> String { + let files_stats = FileTextQuery.in_db(db).entries::(); + let syntax_tree_stats = syntax_tree_stats(db); + let macro_syntax_tree_stats = macro_syntax_tree_stats(db); + let symbols_stats = LibrarySymbolsQuery.in_db(db).entries::(); + format!( + "{}\n{}\n{}\n{} (macros)\n\n\nmemory:\n{}\ngc {:?} seconds ago", + files_stats, + symbols_stats, + syntax_tree_stats, + macro_syntax_tree_stats, + memory_usage(), + db.last_gc.elapsed().as_secs(), + ) +} + +#[derive(Default)] +struct FilesStats { + total: usize, + size: Bytes, +} + +impl fmt::Display for FilesStats { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + write!(fmt, "{} ({}) files", self.total, self.size) + } +} + +impl FromIterator>> for FilesStats { + fn from_iter(iter: T) -> FilesStats + where + T: IntoIterator>>, + { + let mut res = FilesStats::default(); + for entry in iter { + res.total += 1; + res.size += entry.value.unwrap().len(); + } + res + } +} + +#[derive(Default)] +pub(crate) struct SyntaxTreeStats { + total: usize, + pub(crate) retained: usize, +} + +impl fmt::Display for SyntaxTreeStats { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + write!(fmt, "{} trees, {} retained", self.total, self.retained) + } +} + +impl FromIterator>> for SyntaxTreeStats { + fn from_iter(iter: T) -> SyntaxTreeStats + where + T: IntoIterator>>, + { + let mut res = SyntaxTreeStats::default(); + for entry in iter { + res.total += 1; + res.retained += entry.value.is_some() as usize; + } + res + } +} + +impl FromIterator, M)>>> for SyntaxTreeStats { + fn from_iter(iter: T) -> SyntaxTreeStats + where + T: IntoIterator, M)>>>, + { + let mut res = SyntaxTreeStats::default(); + for entry in iter { + res.total += 1; + res.retained += entry.value.is_some() as usize; + } + res + } +} + +#[derive(Default)] +struct LibrarySymbolsStats { + total: usize, + size: Bytes, +} + +impl fmt::Display for LibrarySymbolsStats { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + write!(fmt, "{} ({}) symbols", self.total, self.size) + } +} + +impl FromIterator>>> + for LibrarySymbolsStats +{ + fn from_iter(iter: T) -> LibrarySymbolsStats + where + T: IntoIterator>>>, + { + let mut res = LibrarySymbolsStats::default(); + for entry in iter { + let value = entry.value.unwrap(); + for symbols in value.values() { + res.total += symbols.len(); + res.size += symbols.memory_size(); + } + } + res + } +} diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs new file mode 100644 index 0000000000..5d7c7e8d02 --- /dev/null +++ b/crates/ide/src/syntax_highlighting.rs @@ -0,0 +1,872 @@ +mod tags; +mod html; +mod injection; +#[cfg(test)] +mod tests; + +use hir::{Name, Semantics, VariantDef}; +use ide_db::{ + defs::{classify_name, classify_name_ref, Definition, NameClass, NameRefClass}, + RootDatabase, +}; +use rustc_hash::FxHashMap; +use syntax::{ + ast::{self, HasFormatSpecifier}, + AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, + SyntaxKind::*, + TextRange, WalkEvent, T, +}; + +use crate::FileId; + +use ast::FormatSpecifier; +pub(crate) use html::highlight_as_html; +pub use tags::{Highlight, HighlightModifier, HighlightModifiers, HighlightTag}; + +#[derive(Debug, Clone)] +pub struct HighlightedRange { + pub range: TextRange, + pub highlight: Highlight, + pub binding_hash: Option, +} + +// Feature: Semantic Syntax Highlighting +// +// rust-analyzer highlights the code semantically. +// For example, `bar` in `foo::Bar` might be colored differently depending on whether `Bar` is an enum or a trait. +// rust-analyzer does not specify colors directly, instead it assigns tag (like `struct`) and a set of modifiers (like `declaration`) to each token. +// It's up to the client to map those to specific colors. +// +// The general rule is that a reference to an entity gets colored the same way as the entity itself. +// We also give special modifier for `mut` and `&mut` local variables. +pub(crate) fn highlight( + db: &RootDatabase, + file_id: FileId, + range_to_highlight: Option, + syntactic_name_ref_highlighting: bool, +) -> Vec { + let _p = profile::span("highlight"); + let sema = Semantics::new(db); + + // Determine the root based on the given range. + let (root, range_to_highlight) = { + let source_file = sema.parse(file_id); + match range_to_highlight { + Some(range) => { + let node = match source_file.syntax().covering_element(range) { + NodeOrToken::Node(it) => it, + NodeOrToken::Token(it) => it.parent(), + }; + (node, range) + } + None => (source_file.syntax().clone(), source_file.syntax().text_range()), + } + }; + + let mut bindings_shadow_count: FxHashMap = FxHashMap::default(); + // We use a stack for the DFS traversal below. + // When we leave a node, the we use it to flatten the highlighted ranges. + let mut stack = HighlightedRangeStack::new(); + + let mut current_macro_call: Option = None; + let mut format_string: Option = None; + + // Walk all nodes, keeping track of whether we are inside a macro or not. + // If in macro, expand it first and highlight the expanded code. + for event in root.preorder_with_tokens() { + match &event { + WalkEvent::Enter(_) => stack.push(), + WalkEvent::Leave(_) => stack.pop(), + }; + + let event_range = match &event { + WalkEvent::Enter(it) => it.text_range(), + WalkEvent::Leave(it) => it.text_range(), + }; + + // Element outside of the viewport, no need to highlight + if range_to_highlight.intersect(event_range).is_none() { + continue; + } + + // Track "inside macro" state + match event.clone().map(|it| it.into_node().and_then(ast::MacroCall::cast)) { + WalkEvent::Enter(Some(mc)) => { + current_macro_call = Some(mc.clone()); + if let Some(range) = macro_call_range(&mc) { + stack.add(HighlightedRange { + range, + highlight: HighlightTag::Macro.into(), + binding_hash: None, + }); + } + if let Some(name) = mc.is_macro_rules() { + if let Some((highlight, binding_hash)) = highlight_element( + &sema, + &mut bindings_shadow_count, + syntactic_name_ref_highlighting, + name.syntax().clone().into(), + ) { + stack.add(HighlightedRange { + range: name.syntax().text_range(), + highlight, + binding_hash, + }); + } + } + continue; + } + WalkEvent::Leave(Some(mc)) => { + assert!(current_macro_call == Some(mc)); + current_macro_call = None; + format_string = None; + } + _ => (), + } + + // Check for Rust code in documentation + match &event { + WalkEvent::Leave(NodeOrToken::Node(node)) => { + if let Some((doctest, range_mapping, new_comments)) = + injection::extract_doc_comments(node) + { + injection::highlight_doc_comment( + doctest, + range_mapping, + new_comments, + &mut stack, + ); + } + } + _ => (), + } + + let element = match event { + WalkEvent::Enter(it) => it, + WalkEvent::Leave(_) => continue, + }; + + let range = element.text_range(); + + let element_to_highlight = if current_macro_call.is_some() && element.kind() != COMMENT { + // Inside a macro -- expand it first + let token = match element.clone().into_token() { + Some(it) if it.parent().kind() == TOKEN_TREE => it, + _ => continue, + }; + let token = sema.descend_into_macros(token.clone()); + let parent = token.parent(); + + // Check if macro takes a format string and remember it for highlighting later. + // The macros that accept a format string expand to a compiler builtin macros + // `format_args` and `format_args_nl`. + if let Some(name) = parent + .parent() + .and_then(ast::MacroCall::cast) + .and_then(|mc| mc.path()) + .and_then(|p| p.segment()) + .and_then(|s| s.name_ref()) + { + match name.text().as_str() { + "format_args" | "format_args_nl" => { + format_string = parent + .children_with_tokens() + .filter(|t| t.kind() != WHITESPACE) + .nth(1) + .filter(|e| { + ast::String::can_cast(e.kind()) + || ast::RawString::can_cast(e.kind()) + }) + } + _ => {} + } + } + + // We only care Name and Name_ref + match (token.kind(), parent.kind()) { + (IDENT, NAME) | (IDENT, NAME_REF) => parent.into(), + _ => token.into(), + } + } else { + element.clone() + }; + + if let Some(token) = element.as_token().cloned().and_then(ast::RawString::cast) { + let expanded = element_to_highlight.as_token().unwrap().clone(); + if injection::highlight_injection(&mut stack, &sema, token, expanded).is_some() { + continue; + } + } + + let is_format_string = format_string.as_ref() == Some(&element_to_highlight); + + if let Some((highlight, binding_hash)) = highlight_element( + &sema, + &mut bindings_shadow_count, + syntactic_name_ref_highlighting, + element_to_highlight.clone(), + ) { + stack.add(HighlightedRange { range, highlight, binding_hash }); + if let Some(string) = + element_to_highlight.as_token().cloned().and_then(ast::String::cast) + { + if is_format_string { + stack.push(); + string.lex_format_specifier(|piece_range, kind| { + if let Some(highlight) = highlight_format_specifier(kind) { + stack.add(HighlightedRange { + range: piece_range + range.start(), + highlight: highlight.into(), + binding_hash: None, + }); + } + }); + stack.pop(); + } + // Highlight escape sequences + if let Some(char_ranges) = string.char_ranges() { + stack.push(); + for (piece_range, _) in char_ranges.iter().filter(|(_, char)| char.is_ok()) { + if string.text()[piece_range.start().into()..].starts_with('\\') { + stack.add(HighlightedRange { + range: piece_range + range.start(), + highlight: HighlightTag::EscapeSequence.into(), + binding_hash: None, + }); + } + } + stack.pop_and_inject(None); + } + } else if let Some(string) = + element_to_highlight.as_token().cloned().and_then(ast::RawString::cast) + { + if is_format_string { + stack.push(); + string.lex_format_specifier(|piece_range, kind| { + if let Some(highlight) = highlight_format_specifier(kind) { + stack.add(HighlightedRange { + range: piece_range + range.start(), + highlight: highlight.into(), + binding_hash: None, + }); + } + }); + stack.pop(); + } + } + } + } + + stack.flattened() +} + +#[derive(Debug)] +struct HighlightedRangeStack { + stack: Vec>, +} + +/// We use a stack to implement the flattening logic for the highlighted +/// syntax ranges. +impl HighlightedRangeStack { + fn new() -> Self { + Self { stack: vec![Vec::new()] } + } + + fn push(&mut self) { + self.stack.push(Vec::new()); + } + + /// Flattens the highlighted ranges. + /// + /// For example `#[cfg(feature = "foo")]` contains the nested ranges: + /// 1) parent-range: Attribute [0, 23) + /// 2) child-range: String [16, 21) + /// + /// The following code implements the flattening, for our example this results to: + /// `[Attribute [0, 16), String [16, 21), Attribute [21, 23)]` + fn pop(&mut self) { + let children = self.stack.pop().unwrap(); + let prev = self.stack.last_mut().unwrap(); + let needs_flattening = !children.is_empty() + && !prev.is_empty() + && prev.last().unwrap().range.contains_range(children.first().unwrap().range); + if !needs_flattening { + prev.extend(children); + } else { + let mut parent = prev.pop().unwrap(); + for ele in children { + assert!(parent.range.contains_range(ele.range)); + + let cloned = Self::intersect(&mut parent, &ele); + if !parent.range.is_empty() { + prev.push(parent); + } + prev.push(ele); + parent = cloned; + } + if !parent.range.is_empty() { + prev.push(parent); + } + } + } + + /// Intersects the `HighlightedRange` `parent` with `child`. + /// `parent` is mutated in place, becoming the range before `child`. + /// Returns the range (of the same type as `parent`) *after* `child`. + fn intersect(parent: &mut HighlightedRange, child: &HighlightedRange) -> HighlightedRange { + assert!(parent.range.contains_range(child.range)); + + let mut cloned = parent.clone(); + parent.range = TextRange::new(parent.range.start(), child.range.start()); + cloned.range = TextRange::new(child.range.end(), cloned.range.end()); + + cloned + } + + /// Remove the `HighlightRange` of `parent` that's currently covered by `child`. + fn intersect_partial(parent: &mut HighlightedRange, child: &HighlightedRange) { + assert!( + parent.range.start() <= child.range.start() + && parent.range.end() >= child.range.start() + && child.range.end() > parent.range.end() + ); + + parent.range = TextRange::new(parent.range.start(), child.range.start()); + } + + /// Similar to `pop`, but can modify arbitrary prior ranges (where `pop`) + /// can only modify the last range currently on the stack. + /// Can be used to do injections that span multiple ranges, like the + /// doctest injection below. + /// If `overwrite_parent` is non-optional, the highlighting of the parent range + /// is overwritten with the argument. + /// + /// Note that `pop` can be simulated by `pop_and_inject(false)` but the + /// latter is computationally more expensive. + fn pop_and_inject(&mut self, overwrite_parent: Option) { + let mut children = self.stack.pop().unwrap(); + let prev = self.stack.last_mut().unwrap(); + children.sort_by_key(|range| range.range.start()); + prev.sort_by_key(|range| range.range.start()); + + for child in children { + if let Some(idx) = + prev.iter().position(|parent| parent.range.contains_range(child.range)) + { + if let Some(tag) = overwrite_parent { + prev[idx].highlight = tag; + } + + let cloned = Self::intersect(&mut prev[idx], &child); + let insert_idx = if prev[idx].range.is_empty() { + prev.remove(idx); + idx + } else { + idx + 1 + }; + prev.insert(insert_idx, child); + if !cloned.range.is_empty() { + prev.insert(insert_idx + 1, cloned); + } + } else { + let maybe_idx = + prev.iter().position(|parent| parent.range.contains(child.range.start())); + match (overwrite_parent, maybe_idx) { + (Some(_), Some(idx)) => { + Self::intersect_partial(&mut prev[idx], &child); + let insert_idx = if prev[idx].range.is_empty() { + prev.remove(idx); + idx + } else { + idx + 1 + }; + prev.insert(insert_idx, child); + } + (_, None) => { + let idx = prev + .binary_search_by_key(&child.range.start(), |range| range.range.start()) + .unwrap_or_else(|x| x); + prev.insert(idx, child); + } + _ => { + unreachable!("child range should be completely contained in parent range"); + } + } + } + } + } + + fn add(&mut self, range: HighlightedRange) { + self.stack + .last_mut() + .expect("during DFS traversal, the stack must not be empty") + .push(range) + } + + fn flattened(mut self) -> Vec { + assert_eq!( + self.stack.len(), + 1, + "after DFS traversal, the stack should only contain a single element" + ); + let mut res = self.stack.pop().unwrap(); + res.sort_by_key(|range| range.range.start()); + // Check that ranges are sorted and disjoint + assert!(res + .iter() + .zip(res.iter().skip(1)) + .all(|(left, right)| left.range.end() <= right.range.start())); + res + } +} + +fn highlight_format_specifier(kind: FormatSpecifier) -> Option { + Some(match kind { + FormatSpecifier::Open + | FormatSpecifier::Close + | FormatSpecifier::Colon + | FormatSpecifier::Fill + | FormatSpecifier::Align + | FormatSpecifier::Sign + | FormatSpecifier::NumberSign + | FormatSpecifier::DollarSign + | FormatSpecifier::Dot + | FormatSpecifier::Asterisk + | FormatSpecifier::QuestionMark => HighlightTag::FormatSpecifier, + FormatSpecifier::Integer | FormatSpecifier::Zero => HighlightTag::NumericLiteral, + FormatSpecifier::Identifier => HighlightTag::Local, + }) +} + +fn macro_call_range(macro_call: &ast::MacroCall) -> Option { + let path = macro_call.path()?; + let name_ref = path.segment()?.name_ref()?; + + let range_start = name_ref.syntax().text_range().start(); + let mut range_end = name_ref.syntax().text_range().end(); + for sibling in path.syntax().siblings_with_tokens(Direction::Next) { + match sibling.kind() { + T![!] | IDENT => range_end = sibling.text_range().end(), + _ => (), + } + } + + Some(TextRange::new(range_start, range_end)) +} + +fn is_possibly_unsafe(name_ref: &ast::NameRef) -> bool { + name_ref + .syntax() + .parent() + .and_then(|parent| { + ast::FieldExpr::cast(parent.clone()) + .map(|_| true) + .or_else(|| ast::RecordPatField::cast(parent).map(|_| true)) + }) + .unwrap_or(false) +} + +fn highlight_element( + sema: &Semantics, + bindings_shadow_count: &mut FxHashMap, + syntactic_name_ref_highlighting: bool, + element: SyntaxElement, +) -> Option<(Highlight, Option)> { + let db = sema.db; + let mut binding_hash = None; + let highlight: Highlight = match element.kind() { + FN => { + bindings_shadow_count.clear(); + return None; + } + + // Highlight definitions depending on the "type" of the definition. + NAME => { + let name = element.into_node().and_then(ast::Name::cast).unwrap(); + let name_kind = classify_name(sema, &name); + + if let Some(NameClass::Definition(Definition::Local(local))) = &name_kind { + if let Some(name) = local.name(db) { + let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); + *shadow_count += 1; + binding_hash = Some(calc_binding_hash(&name, *shadow_count)) + } + }; + + match name_kind { + Some(NameClass::ExternCrate(_)) => HighlightTag::Module.into(), + Some(NameClass::Definition(def)) => { + highlight_name(sema, db, def, None, false) | HighlightModifier::Definition + } + Some(NameClass::ConstReference(def)) => highlight_name(sema, db, def, None, false), + Some(NameClass::FieldShorthand { field, .. }) => { + let mut h = HighlightTag::Field.into(); + if let Definition::Field(field) = field { + if let VariantDef::Union(_) = field.parent_def(db) { + h |= HighlightModifier::Unsafe; + } + } + + h + } + None => highlight_name_by_syntax(name) | HighlightModifier::Definition, + } + } + + // Highlight references like the definitions they resolve to + NAME_REF if element.ancestors().any(|it| it.kind() == ATTR) => { + Highlight::from(HighlightTag::Function) | HighlightModifier::Attribute + } + NAME_REF => { + let name_ref = element.into_node().and_then(ast::NameRef::cast).unwrap(); + let possibly_unsafe = is_possibly_unsafe(&name_ref); + match classify_name_ref(sema, &name_ref) { + Some(name_kind) => match name_kind { + NameRefClass::ExternCrate(_) => HighlightTag::Module.into(), + NameRefClass::Definition(def) => { + if let Definition::Local(local) = &def { + if let Some(name) = local.name(db) { + let shadow_count = + bindings_shadow_count.entry(name.clone()).or_default(); + binding_hash = Some(calc_binding_hash(&name, *shadow_count)) + } + }; + highlight_name(sema, db, def, Some(name_ref), possibly_unsafe) + } + NameRefClass::FieldShorthand { .. } => HighlightTag::Field.into(), + }, + None if syntactic_name_ref_highlighting => { + highlight_name_ref_by_syntax(name_ref, sema) + } + None => HighlightTag::UnresolvedReference.into(), + } + } + + // Simple token-based highlighting + COMMENT => { + let comment = element.into_token().and_then(ast::Comment::cast)?; + let h = HighlightTag::Comment; + match comment.kind().doc { + Some(_) => h | HighlightModifier::Documentation, + None => h.into(), + } + } + STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => HighlightTag::StringLiteral.into(), + ATTR => HighlightTag::Attribute.into(), + INT_NUMBER | FLOAT_NUMBER => HighlightTag::NumericLiteral.into(), + BYTE => HighlightTag::ByteLiteral.into(), + CHAR => HighlightTag::CharLiteral.into(), + QUESTION => Highlight::new(HighlightTag::Operator) | HighlightModifier::ControlFlow, + LIFETIME => { + let h = Highlight::new(HighlightTag::Lifetime); + match element.parent().map(|it| it.kind()) { + Some(LIFETIME_PARAM) | Some(LABEL) => h | HighlightModifier::Definition, + _ => h, + } + } + p if p.is_punct() => match p { + T![&] => { + let h = HighlightTag::Operator.into(); + let is_unsafe = element + .parent() + .and_then(ast::RefExpr::cast) + .map(|ref_expr| sema.is_unsafe_ref_expr(&ref_expr)) + .unwrap_or(false); + if is_unsafe { + h | HighlightModifier::Unsafe + } else { + h + } + } + T![::] | T![->] | T![=>] | T![..] | T![=] | T![@] => HighlightTag::Operator.into(), + T![!] if element.parent().and_then(ast::MacroCall::cast).is_some() => { + HighlightTag::Macro.into() + } + T![*] if element.parent().and_then(ast::PtrType::cast).is_some() => { + HighlightTag::Keyword.into() + } + T![*] if element.parent().and_then(ast::PrefixExpr::cast).is_some() => { + let prefix_expr = element.parent().and_then(ast::PrefixExpr::cast)?; + + let expr = prefix_expr.expr()?; + let ty = sema.type_of_expr(&expr)?; + if ty.is_raw_ptr() { + HighlightTag::Operator | HighlightModifier::Unsafe + } else if let Some(ast::PrefixOp::Deref) = prefix_expr.op_kind() { + HighlightTag::Operator.into() + } else { + HighlightTag::Punctuation.into() + } + } + T![-] if element.parent().and_then(ast::PrefixExpr::cast).is_some() => { + HighlightTag::NumericLiteral.into() + } + _ if element.parent().and_then(ast::PrefixExpr::cast).is_some() => { + HighlightTag::Operator.into() + } + _ if element.parent().and_then(ast::BinExpr::cast).is_some() => { + HighlightTag::Operator.into() + } + _ if element.parent().and_then(ast::RangeExpr::cast).is_some() => { + HighlightTag::Operator.into() + } + _ if element.parent().and_then(ast::RangePat::cast).is_some() => { + HighlightTag::Operator.into() + } + _ if element.parent().and_then(ast::RestPat::cast).is_some() => { + HighlightTag::Operator.into() + } + _ if element.parent().and_then(ast::Attr::cast).is_some() => { + HighlightTag::Attribute.into() + } + _ => HighlightTag::Punctuation.into(), + }, + + k if k.is_keyword() => { + let h = Highlight::new(HighlightTag::Keyword); + match k { + T![break] + | T![continue] + | T![else] + | T![if] + | T![loop] + | T![match] + | T![return] + | T![while] + | T![in] => h | HighlightModifier::ControlFlow, + T![for] if !is_child_of_impl(&element) => h | HighlightModifier::ControlFlow, + T![unsafe] => h | HighlightModifier::Unsafe, + T![true] | T![false] => HighlightTag::BoolLiteral.into(), + T![self] => { + let self_param_is_mut = element + .parent() + .and_then(ast::SelfParam::cast) + .and_then(|p| p.mut_token()) + .is_some(); + // closure to enforce lazyness + let self_path = || { + sema.resolve_path(&element.parent()?.parent().and_then(ast::Path::cast)?) + }; + if self_param_is_mut + || matches!(self_path(), + Some(hir::PathResolution::Local(local)) + if local.is_self(db) + && (local.is_mut(db) || local.ty(db).is_mutable_reference()) + ) + { + HighlightTag::SelfKeyword | HighlightModifier::Mutable + } else { + HighlightTag::SelfKeyword.into() + } + } + T![ref] => element + .parent() + .and_then(ast::IdentPat::cast) + .and_then(|ident_pat| { + if sema.is_unsafe_ident_pat(&ident_pat) { + Some(HighlightModifier::Unsafe) + } else { + None + } + }) + .map(|modifier| h | modifier) + .unwrap_or(h), + _ => h, + } + } + + _ => return None, + }; + + return Some((highlight, binding_hash)); + + fn calc_binding_hash(name: &Name, shadow_count: u32) -> u64 { + fn hash(x: T) -> u64 { + use std::{collections::hash_map::DefaultHasher, hash::Hasher}; + + let mut hasher = DefaultHasher::new(); + x.hash(&mut hasher); + hasher.finish() + } + + hash((name, shadow_count)) + } +} + +fn is_child_of_impl(element: &SyntaxElement) -> bool { + match element.parent() { + Some(e) => e.kind() == IMPL, + _ => false, + } +} + +fn highlight_name( + sema: &Semantics, + db: &RootDatabase, + def: Definition, + name_ref: Option, + possibly_unsafe: bool, +) -> Highlight { + match def { + Definition::Macro(_) => HighlightTag::Macro, + Definition::Field(field) => { + let mut h = HighlightTag::Field.into(); + if possibly_unsafe { + if let VariantDef::Union(_) = field.parent_def(db) { + h |= HighlightModifier::Unsafe; + } + } + + return h; + } + Definition::ModuleDef(def) => match def { + hir::ModuleDef::Module(_) => HighlightTag::Module, + hir::ModuleDef::Function(func) => { + let mut h = HighlightTag::Function.into(); + if func.is_unsafe(db) { + h |= HighlightModifier::Unsafe; + } else { + let is_unsafe = name_ref + .and_then(|name_ref| name_ref.syntax().parent()) + .and_then(ast::MethodCallExpr::cast) + .map(|method_call_expr| sema.is_unsafe_method_call(method_call_expr)) + .unwrap_or(false); + if is_unsafe { + h |= HighlightModifier::Unsafe; + } + } + return h; + } + hir::ModuleDef::Adt(hir::Adt::Struct(_)) => HighlightTag::Struct, + hir::ModuleDef::Adt(hir::Adt::Enum(_)) => HighlightTag::Enum, + hir::ModuleDef::Adt(hir::Adt::Union(_)) => HighlightTag::Union, + hir::ModuleDef::EnumVariant(_) => HighlightTag::EnumVariant, + hir::ModuleDef::Const(_) => HighlightTag::Constant, + hir::ModuleDef::Trait(_) => HighlightTag::Trait, + hir::ModuleDef::TypeAlias(_) => HighlightTag::TypeAlias, + hir::ModuleDef::BuiltinType(_) => HighlightTag::BuiltinType, + hir::ModuleDef::Static(s) => { + let mut h = Highlight::new(HighlightTag::Static); + if s.is_mut(db) { + h |= HighlightModifier::Mutable; + h |= HighlightModifier::Unsafe; + } + return h; + } + }, + Definition::SelfType(_) => HighlightTag::SelfType, + Definition::TypeParam(_) => HighlightTag::TypeParam, + Definition::Local(local) => { + let tag = + if local.is_param(db) { HighlightTag::ValueParam } else { HighlightTag::Local }; + let mut h = Highlight::new(tag); + if local.is_mut(db) || local.ty(db).is_mutable_reference() { + h |= HighlightModifier::Mutable; + } + return h; + } + } + .into() +} + +fn highlight_name_by_syntax(name: ast::Name) -> Highlight { + let default = HighlightTag::UnresolvedReference; + + let parent = match name.syntax().parent() { + Some(it) => it, + _ => return default.into(), + }; + + let tag = match parent.kind() { + STRUCT => HighlightTag::Struct, + ENUM => HighlightTag::Enum, + UNION => HighlightTag::Union, + TRAIT => HighlightTag::Trait, + TYPE_ALIAS => HighlightTag::TypeAlias, + TYPE_PARAM => HighlightTag::TypeParam, + RECORD_FIELD => HighlightTag::Field, + MODULE => HighlightTag::Module, + FN => HighlightTag::Function, + CONST => HighlightTag::Constant, + STATIC => HighlightTag::Static, + VARIANT => HighlightTag::EnumVariant, + IDENT_PAT => HighlightTag::Local, + _ => default, + }; + + tag.into() +} + +fn highlight_name_ref_by_syntax(name: ast::NameRef, sema: &Semantics) -> Highlight { + let default = HighlightTag::UnresolvedReference; + + let parent = match name.syntax().parent() { + Some(it) => it, + _ => return default.into(), + }; + + match parent.kind() { + METHOD_CALL_EXPR => { + let mut h = Highlight::new(HighlightTag::Function); + let is_unsafe = ast::MethodCallExpr::cast(parent) + .map(|method_call_expr| sema.is_unsafe_method_call(method_call_expr)) + .unwrap_or(false); + if is_unsafe { + h |= HighlightModifier::Unsafe; + } + + h + } + FIELD_EXPR => { + let h = HighlightTag::Field; + let is_union = ast::FieldExpr::cast(parent) + .and_then(|field_expr| { + let field = sema.resolve_field(&field_expr)?; + Some(if let VariantDef::Union(_) = field.parent_def(sema.db) { + true + } else { + false + }) + }) + .unwrap_or(false); + if is_union { + h | HighlightModifier::Unsafe + } else { + h.into() + } + } + PATH_SEGMENT => { + let path = match parent.parent().and_then(ast::Path::cast) { + Some(it) => it, + _ => return default.into(), + }; + let expr = match path.syntax().parent().and_then(ast::PathExpr::cast) { + Some(it) => it, + _ => { + // within path, decide whether it is module or adt by checking for uppercase name + return if name.text().chars().next().unwrap_or_default().is_uppercase() { + HighlightTag::Struct + } else { + HighlightTag::Module + } + .into(); + } + }; + let parent = match expr.syntax().parent() { + Some(it) => it, + None => return default.into(), + }; + + match parent.kind() { + CALL_EXPR => HighlightTag::Function.into(), + _ => if name.text().chars().next().unwrap_or_default().is_uppercase() { + HighlightTag::Struct.into() + } else { + HighlightTag::Constant + } + .into(), + } + } + _ => default.into(), + } +} diff --git a/crates/ide/src/syntax_highlighting/html.rs b/crates/ide/src/syntax_highlighting/html.rs new file mode 100644 index 0000000000..249368ff81 --- /dev/null +++ b/crates/ide/src/syntax_highlighting/html.rs @@ -0,0 +1,97 @@ +//! Renders a bit of code as HTML. + +use base_db::SourceDatabase; +use oorandom::Rand32; +use syntax::{AstNode, TextRange, TextSize}; + +use crate::{syntax_highlighting::highlight, FileId, RootDatabase}; + +pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { + let parse = db.parse(file_id); + + fn rainbowify(seed: u64) -> String { + let mut rng = Rand32::new(seed); + format!( + "hsl({h},{s}%,{l}%)", + h = rng.rand_range(0..361), + s = rng.rand_range(42..99), + l = rng.rand_range(40..91), + ) + } + + let ranges = highlight(db, file_id, None, false); + let text = parse.tree().syntax().to_string(); + let mut prev_pos = TextSize::from(0); + let mut buf = String::new(); + buf.push_str(&STYLE); + buf.push_str("
");
+    for range in &ranges {
+        if range.range.start() > prev_pos {
+            let curr = &text[TextRange::new(prev_pos, range.range.start())];
+            let text = html_escape(curr);
+            buf.push_str(&text);
+        }
+        let curr = &text[TextRange::new(range.range.start(), range.range.end())];
+
+        let class = range.highlight.to_string().replace('.', " ");
+        let color = match (rainbow, range.binding_hash) {
+            (true, Some(hash)) => {
+                format!(" data-binding-hash=\"{}\" style=\"color: {};\"", hash, rainbowify(hash))
+            }
+            _ => "".into(),
+        };
+        buf.push_str(&format!("{}", class, color, html_escape(curr)));
+
+        prev_pos = range.range.end();
+    }
+    // Add the remaining (non-highlighted) text
+    let curr = &text[TextRange::new(prev_pos, TextSize::of(&text))];
+    let text = html_escape(curr);
+    buf.push_str(&text);
+    buf.push_str("
"); + buf +} + +//FIXME: like, real html escaping +fn html_escape(text: &str) -> String { + text.replace("<", "<").replace(">", ">") +} + +const STYLE: &str = " + +"; diff --git a/crates/ide/src/syntax_highlighting/injection.rs b/crates/ide/src/syntax_highlighting/injection.rs new file mode 100644 index 0000000000..43f4e6feab --- /dev/null +++ b/crates/ide/src/syntax_highlighting/injection.rs @@ -0,0 +1,187 @@ +//! Syntax highlighting injections such as highlighting of documentation tests. + +use std::{collections::BTreeMap, convert::TryFrom}; + +use ast::{HasQuotes, HasStringValue}; +use hir::Semantics; +use itertools::Itertools; +use syntax::{ast, AstToken, SyntaxNode, SyntaxToken, TextRange, TextSize}; + +use crate::{ + call_info::ActiveParameter, Analysis, Highlight, HighlightModifier, HighlightTag, + HighlightedRange, RootDatabase, +}; + +use super::HighlightedRangeStack; + +pub(super) fn highlight_injection( + acc: &mut HighlightedRangeStack, + sema: &Semantics, + literal: ast::RawString, + expanded: SyntaxToken, +) -> Option<()> { + let active_parameter = ActiveParameter::at_token(&sema, expanded)?; + if !active_parameter.name.starts_with("ra_fixture") { + return None; + } + let value = literal.value()?; + let (analysis, tmp_file_id) = Analysis::from_single_file(value.into_owned()); + + if let Some(range) = literal.open_quote_text_range() { + acc.add(HighlightedRange { + range, + highlight: HighlightTag::StringLiteral.into(), + binding_hash: None, + }) + } + + for mut h in analysis.highlight(tmp_file_id).unwrap() { + if let Some(r) = literal.map_range_up(h.range) { + h.range = r; + acc.add(h) + } + } + + if let Some(range) = literal.close_quote_text_range() { + acc.add(HighlightedRange { + range, + highlight: HighlightTag::StringLiteral.into(), + binding_hash: None, + }) + } + + Some(()) +} + +/// Mapping from extracted documentation code to original code +type RangesMap = BTreeMap; + +const RUSTDOC_FENCE: &'static str = "```"; +const RUSTDOC_FENCE_TOKENS: &[&'static str] = + &["", "rust", "should_panic", "ignore", "no_run", "compile_fail", "edition2015", "edition2018"]; + +/// Extracts Rust code from documentation comments as well as a mapping from +/// the extracted source code back to the original source ranges. +/// Lastly, a vector of new comment highlight ranges (spanning only the +/// comment prefix) is returned which is used in the syntax highlighting +/// injection to replace the previous (line-spanning) comment ranges. +pub(super) fn extract_doc_comments( + node: &SyntaxNode, +) -> Option<(String, RangesMap, Vec)> { + // wrap the doctest into function body to get correct syntax highlighting + let prefix = "fn doctest() {\n"; + let suffix = "}\n"; + // Mapping from extracted documentation code to original code + let mut range_mapping: RangesMap = BTreeMap::new(); + let mut line_start = TextSize::try_from(prefix.len()).unwrap(); + let mut is_codeblock = false; + let mut is_doctest = false; + // Replace the original, line-spanning comment ranges by new, only comment-prefix + // spanning comment ranges. + let mut new_comments = Vec::new(); + let doctest = node + .children_with_tokens() + .filter_map(|el| el.into_token().and_then(ast::Comment::cast)) + .filter(|comment| comment.kind().doc.is_some()) + .filter(|comment| { + if let Some(idx) = comment.text().find(RUSTDOC_FENCE) { + is_codeblock = !is_codeblock; + // Check whether code is rust by inspecting fence guards + let guards = &comment.text()[idx + RUSTDOC_FENCE.len()..]; + let is_rust = + guards.split(',').all(|sub| RUSTDOC_FENCE_TOKENS.contains(&sub.trim())); + is_doctest = is_codeblock && is_rust; + false + } else { + is_doctest + } + }) + .map(|comment| { + let prefix_len = comment.prefix().len(); + let line: &str = comment.text().as_str(); + let range = comment.syntax().text_range(); + + // whitespace after comment is ignored + let pos = if let Some(ws) = line.chars().nth(prefix_len).filter(|c| c.is_whitespace()) { + prefix_len + ws.len_utf8() + } else { + prefix_len + }; + + // lines marked with `#` should be ignored in output, we skip the `#` char + let pos = if let Some(ws) = line.chars().nth(pos).filter(|&c| c == '#') { + pos + ws.len_utf8() + } else { + pos + }; + + range_mapping.insert(line_start, range.start() + TextSize::try_from(pos).unwrap()); + new_comments.push(HighlightedRange { + range: TextRange::new( + range.start(), + range.start() + TextSize::try_from(pos).unwrap(), + ), + highlight: HighlightTag::Comment | HighlightModifier::Documentation, + binding_hash: None, + }); + line_start += range.len() - TextSize::try_from(pos).unwrap(); + line_start += TextSize::try_from('\n'.len_utf8()).unwrap(); + + line[pos..].to_owned() + }) + .join("\n"); + + if doctest.is_empty() { + return None; + } + + let doctest = format!("{}{}{}", prefix, doctest, suffix); + Some((doctest, range_mapping, new_comments)) +} + +/// Injection of syntax highlighting of doctests. +pub(super) fn highlight_doc_comment( + text: String, + range_mapping: RangesMap, + new_comments: Vec, + stack: &mut HighlightedRangeStack, +) { + let (analysis, tmp_file_id) = Analysis::from_single_file(text); + + stack.push(); + for mut h in analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)).unwrap() { + // Determine start offset and end offset in case of multi-line ranges + let mut start_offset = None; + let mut end_offset = None; + for (line_start, orig_line_start) in range_mapping.range(..h.range.end()).rev() { + // It's possible for orig_line_start - line_start to be negative. Add h.range.start() + // here and remove it from the end range after the loop below so that the values are + // always non-negative. + let offset = h.range.start() + orig_line_start - line_start; + if line_start <= &h.range.start() { + start_offset.get_or_insert(offset); + break; + } else { + end_offset.get_or_insert(offset); + } + } + if let Some(start_offset) = start_offset { + h.range = TextRange::new( + start_offset, + h.range.end() + end_offset.unwrap_or(start_offset) - h.range.start(), + ); + + h.highlight |= HighlightModifier::Injected; + stack.add(h); + } + } + + // Inject the comment prefix highlight ranges + stack.push(); + for comment in new_comments { + stack.add(comment); + } + stack.pop_and_inject(None); + stack + .pop_and_inject(Some(Highlight::from(HighlightTag::Generic) | HighlightModifier::Injected)); +} diff --git a/crates/ra_ide/src/syntax_highlighting/tags.rs b/crates/ide/src/syntax_highlighting/tags.rs similarity index 100% rename from crates/ra_ide/src/syntax_highlighting/tags.rs rename to crates/ide/src/syntax_highlighting/tags.rs diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs new file mode 100644 index 0000000000..94f37d773f --- /dev/null +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -0,0 +1,445 @@ +use std::fs; + +use expect::{expect_file, ExpectFile}; +use test_utils::project_dir; + +use crate::{mock_analysis::single_file, FileRange, TextRange}; + +#[test] +fn test_highlighting() { + check_highlighting( + r#" +use inner::{self as inner_mod}; +mod inner {} + +#[derive(Clone, Debug)] +struct Foo { + pub x: i32, + pub y: i32, +} + +trait Bar { + fn bar(&self) -> i32; +} + +impl Bar for Foo { + fn bar(&self) -> i32 { + self.x + } +} + +impl Foo { + fn baz(mut self) -> i32 { + self.x + } + + fn qux(&mut self) { + self.x = 0; + } +} + +static mut STATIC_MUT: i32 = 0; + +fn foo<'a, T>() -> T { + foo::<'a, i32>() +} + +macro_rules! def_fn { + ($($tt:tt)*) => {$($tt)*} +} + +def_fn! { + fn bar() -> u32 { + 100 + } +} + +macro_rules! noop { + ($expr:expr) => { + $expr + } +} + +// comment +fn main() { + println!("Hello, {}!", 92); + + let mut vec = Vec::new(); + if true { + let x = 92; + vec.push(Foo { x, y: 1 }); + } + unsafe { + vec.set_len(0); + STATIC_MUT = 1; + } + + for e in vec { + // Do nothing + } + + noop!(noop!(1)); + + let mut x = 42; + let y = &mut x; + let z = &y; + + let Foo { x: z, y } = Foo { x: z, y }; + + y; +} + +enum Option { + Some(T), + None, +} +use Option::*; + +impl Option { + fn and(self, other: Option) -> Option<(T, U)> { + match other { + None => unimplemented!(), + Nope => Nope, + } + } +} +"# + .trim(), + expect_file!["crates/ide/test_data/highlighting.html"], + false, + ); +} + +#[test] +fn test_rainbow_highlighting() { + check_highlighting( + r#" +fn main() { + let hello = "hello"; + let x = hello.to_string(); + let y = hello.to_string(); + + let x = "other color please!"; + let y = x.to_string(); +} + +fn bar() { + let mut hello = "hello"; +} +"# + .trim(), + expect_file!["crates/ide/test_data/rainbow_highlighting.html"], + true, + ); +} + +#[test] +fn accidentally_quadratic() { + let file = project_dir().join("crates/syntax/test_data/accidentally_quadratic"); + let src = fs::read_to_string(file).unwrap(); + + let (analysis, file_id) = single_file(&src); + + // let t = std::time::Instant::now(); + let _ = analysis.highlight(file_id).unwrap(); + // eprintln!("elapsed: {:?}", t.elapsed()); +} + +#[test] +fn test_ranges() { + let (analysis, file_id) = single_file( + r#" +#[derive(Clone, Debug)] +struct Foo { + pub x: i32, + pub y: i32, +} +"#, + ); + + // The "x" + let highlights = &analysis + .highlight_range(FileRange { file_id, range: TextRange::at(45.into(), 1.into()) }) + .unwrap(); + + assert_eq!(&highlights[0].highlight.to_string(), "field.declaration"); +} + +#[test] +fn test_flattening() { + check_highlighting( + r##" +fn fixture(ra_fixture: &str) {} + +fn main() { + fixture(r#" + trait Foo { + fn foo() { + println!("2 + 2 = {}", 4); + } + }"# + ); +}"## + .trim(), + expect_file!["crates/ide/test_data/highlight_injection.html"], + false, + ); +} + +#[test] +fn ranges_sorted() { + let (analysis, file_id) = single_file( + r#" +#[foo(bar = "bar")] +macro_rules! test {} +}"# + .trim(), + ); + let _ = analysis.highlight(file_id).unwrap(); +} + +#[test] +fn test_string_highlighting() { + // The format string detection is based on macro-expansion, + // thus, we have to copy the macro definition from `std` + check_highlighting( + r#" +macro_rules! println { + ($($arg:tt)*) => ({ + $crate::io::_print($crate::format_args_nl!($($arg)*)); + }) +} +#[rustc_builtin_macro] +macro_rules! format_args_nl { + ($fmt:expr) => {{ /* compiler built-in */ }}; + ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }}; +} + +fn main() { + // from https://doc.rust-lang.org/std/fmt/index.html + println!("Hello"); // => "Hello" + println!("Hello, {}!", "world"); // => "Hello, world!" + println!("The number is {}", 1); // => "The number is 1" + println!("{:?}", (3, 4)); // => "(3, 4)" + println!("{value}", value=4); // => "4" + println!("{} {}", 1, 2); // => "1 2" + println!("{:04}", 42); // => "0042" with leading zerosV + println!("{1} {} {0} {}", 1, 2); // => "2 1 1 2" + println!("{argument}", argument = "test"); // => "test" + println!("{name} {}", 1, name = 2); // => "2 1" + println!("{a} {c} {b}", a="a", b='b', c=3); // => "a 3 b" + println!("{{{}}}", 2); // => "{2}" + println!("Hello {:5}!", "x"); + println!("Hello {:1$}!", "x", 5); + println!("Hello {1:0$}!", 5, "x"); + println!("Hello {:width$}!", "x", width = 5); + println!("Hello {:<5}!", "x"); + println!("Hello {:-<5}!", "x"); + println!("Hello {:^5}!", "x"); + println!("Hello {:>5}!", "x"); + println!("Hello {:+}!", 5); + println!("{:#x}!", 27); + println!("Hello {:05}!", 5); + println!("Hello {:05}!", -5); + println!("{:#010x}!", 27); + println!("Hello {0} is {1:.5}", "x", 0.01); + println!("Hello {1} is {2:.0$}", 5, "x", 0.01); + println!("Hello {0} is {2:.1$}", "x", 5, 0.01); + println!("Hello {} is {:.*}", "x", 5, 0.01); + println!("Hello {} is {2:.*}", "x", 5, 0.01); + println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01); + println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56); + println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56"); + println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56"); + println!("Hello {{}}"); + println!("{{ Hello"); + + println!(r"Hello, {}!", "world"); + + // escape sequences + println!("Hello\nWorld"); + println!("\u{48}\x65\x6C\x6C\x6F World"); + + println!("{\x41}", A = 92); + println!("{ничоси}", ничоси = 92); +}"# + .trim(), + expect_file!["crates/ide/test_data/highlight_strings.html"], + false, + ); +} + +#[test] +fn test_unsafe_highlighting() { + check_highlighting( + r#" +unsafe fn unsafe_fn() {} + +union Union { + a: u32, + b: f32, +} + +struct HasUnsafeFn; + +impl HasUnsafeFn { + unsafe fn unsafe_method(&self) {} +} + +struct TypeForStaticMut { + a: u8 +} + +static mut global_mut: TypeForStaticMut = TypeForStaticMut { a: 0 }; + +#[repr(packed)] +struct Packed { + a: u16, +} + +trait DoTheAutoref { + fn calls_autoref(&self); +} + +impl DoTheAutoref for u16 { + fn calls_autoref(&self) {} +} + +fn main() { + let x = &5 as *const _ as *const usize; + let u = Union { b: 0 }; + unsafe { + // unsafe fn and method calls + unsafe_fn(); + let b = u.b; + match u { + Union { b: 0 } => (), + Union { a } => (), + } + HasUnsafeFn.unsafe_method(); + + // unsafe deref + let y = *x; + + // unsafe access to a static mut + let a = global_mut.a; + + // unsafe ref of packed fields + let packed = Packed { a: 0 }; + let a = &packed.a; + let ref a = packed.a; + let Packed { ref a } = packed; + let Packed { a: ref _a } = packed; + + // unsafe auto ref of packed field + packed.a.calls_autoref(); + } +} +"# + .trim(), + expect_file!["crates/ide/test_data/highlight_unsafe.html"], + false, + ); +} + +#[test] +fn test_highlight_doctest() { + check_highlighting( + r#" +/// ``` +/// let _ = "early doctests should not go boom"; +/// ``` +struct Foo { + bar: bool, +} + +impl Foo { + pub const bar: bool = true; + + /// Constructs a new `Foo`. + /// + /// # Examples + /// + /// ``` + /// # #![allow(unused_mut)] + /// let mut foo: Foo = Foo::new(); + /// ``` + pub const fn new() -> Foo { + Foo { bar: true } + } + + /// `bar` method on `Foo`. + /// + /// # Examples + /// + /// ``` + /// use x::y; + /// + /// let foo = Foo::new(); + /// + /// // calls bar on foo + /// assert!(foo.bar()); + /// + /// let bar = foo.bar || Foo::bar; + /// + /// /* multi-line + /// comment */ + /// + /// let multi_line_string = "Foo + /// bar + /// "; + /// + /// ``` + /// + /// ```rust,no_run + /// let foobar = Foo::new().bar(); + /// ``` + /// + /// ```sh + /// echo 1 + /// ``` + pub fn foo(&self) -> bool { + true + } +} + +/// ``` +/// noop!(1); +/// ``` +macro_rules! noop { + ($expr:expr) => { + $expr + } +} +"# + .trim(), + expect_file!["crates/ide/test_data/highlight_doctest.html"], + false, + ); +} + +#[test] +fn test_extern_crate() { + check_highlighting( + r#" + //- /main.rs + extern crate std; + extern crate alloc as abc; + //- /std/lib.rs + pub struct S; + //- /alloc/lib.rs + pub struct A + "#, + expect_file!["crates/ide/test_data/highlight_extern_crate.html"], + false, + ); +} + +/// Highlights the code given by the `ra_fixture` argument, renders the +/// result as HTML, and compares it with the HTML file given as `snapshot`. +/// Note that the `snapshot` file is overwritten by the rendered HTML. +fn check_highlighting(ra_fixture: &str, expect: ExpectFile, rainbow: bool) { + let (analysis, file_id) = single_file(ra_fixture); + let actual_html = &analysis.highlight_as_html(file_id, rainbow).unwrap(); + expect.assert_eq(actual_html) +} diff --git a/crates/ide/src/syntax_tree.rs b/crates/ide/src/syntax_tree.rs new file mode 100644 index 0000000000..f800449596 --- /dev/null +++ b/crates/ide/src/syntax_tree.rs @@ -0,0 +1,359 @@ +use base_db::{FileId, SourceDatabase}; +use ide_db::RootDatabase; +use syntax::{ + algo, AstNode, NodeOrToken, SourceFile, + SyntaxKind::{RAW_STRING, STRING}, + SyntaxToken, TextRange, TextSize, +}; + +// Feature: Show Syntax Tree +// +// Shows the parse tree of the current file. It exists mostly for debugging +// rust-analyzer itself. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Show Syntax Tree** +// |=== +pub(crate) fn syntax_tree( + db: &RootDatabase, + file_id: FileId, + text_range: Option, +) -> String { + let parse = db.parse(file_id); + if let Some(text_range) = text_range { + let node = match algo::find_covering_element(parse.tree().syntax(), text_range) { + NodeOrToken::Node(node) => node, + NodeOrToken::Token(token) => { + if let Some(tree) = syntax_tree_for_string(&token, text_range) { + return tree; + } + token.parent() + } + }; + + format!("{:#?}", node) + } else { + format!("{:#?}", parse.tree().syntax()) + } +} + +/// Attempts parsing the selected contents of a string literal +/// as rust syntax and returns its syntax tree +fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option { + // When the range is inside a string + // we'll attempt parsing it as rust syntax + // to provide the syntax tree of the contents of the string + match token.kind() { + STRING | RAW_STRING => syntax_tree_for_token(token, text_range), + _ => None, + } +} + +fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option { + // Range of the full node + let node_range = node.text_range(); + let text = node.text().to_string(); + + // We start at some point inside the node + // Either we have selected the whole string + // or our selection is inside it + let start = text_range.start() - node_range.start(); + + // how many characters we have selected + let len = text_range.len(); + + let node_len = node_range.len(); + + let start = start; + + // We want to cap our length + let len = len.min(node_len); + + // Ensure our slice is inside the actual string + let end = + if start + len < TextSize::of(&text) { start + len } else { TextSize::of(&text) - start }; + + let text = &text[TextRange::new(start, end)]; + + // Remove possible extra string quotes from the start + // and the end of the string + let text = text + .trim_start_matches('r') + .trim_start_matches('#') + .trim_start_matches('"') + .trim_end_matches('#') + .trim_end_matches('"') + .trim() + // Remove custom markers + .replace("<|>", ""); + + let parsed = SourceFile::parse(&text); + + // If the "file" parsed without errors, + // return its syntax + if parsed.errors().is_empty() { + return Some(format!("{:#?}", parsed.tree().syntax())); + } + + None +} + +#[cfg(test)] +mod tests { + use test_utils::assert_eq_text; + + use crate::mock_analysis::{analysis_and_range, single_file}; + + #[test] + fn test_syntax_tree_without_range() { + // Basic syntax + let (analysis, file_id) = single_file(r#"fn foo() {}"#); + let syn = analysis.syntax_tree(file_id, None).unwrap(); + + assert_eq_text!( + syn.trim(), + r#" +SOURCE_FILE@0..11 + FN@0..11 + FN_KW@0..2 "fn" + WHITESPACE@2..3 " " + NAME@3..6 + IDENT@3..6 "foo" + PARAM_LIST@6..8 + L_PAREN@6..7 "(" + R_PAREN@7..8 ")" + WHITESPACE@8..9 " " + BLOCK_EXPR@9..11 + L_CURLY@9..10 "{" + R_CURLY@10..11 "}" +"# + .trim() + ); + + let (analysis, file_id) = single_file( + r#" +fn test() { + assert!(" + fn foo() { + } + ", ""); +}"# + .trim(), + ); + let syn = analysis.syntax_tree(file_id, None).unwrap(); + + assert_eq_text!( + syn.trim(), + r#" +SOURCE_FILE@0..60 + FN@0..60 + FN_KW@0..2 "fn" + WHITESPACE@2..3 " " + NAME@3..7 + IDENT@3..7 "test" + PARAM_LIST@7..9 + L_PAREN@7..8 "(" + R_PAREN@8..9 ")" + WHITESPACE@9..10 " " + BLOCK_EXPR@10..60 + L_CURLY@10..11 "{" + WHITESPACE@11..16 "\n " + EXPR_STMT@16..58 + MACRO_CALL@16..57 + PATH@16..22 + PATH_SEGMENT@16..22 + NAME_REF@16..22 + IDENT@16..22 "assert" + BANG@22..23 "!" + TOKEN_TREE@23..57 + L_PAREN@23..24 "(" + STRING@24..52 "\"\n fn foo() {\n ..." + COMMA@52..53 "," + WHITESPACE@53..54 " " + STRING@54..56 "\"\"" + R_PAREN@56..57 ")" + SEMICOLON@57..58 ";" + WHITESPACE@58..59 "\n" + R_CURLY@59..60 "}" +"# + .trim() + ); + } + + #[test] + fn test_syntax_tree_with_range() { + let (analysis, range) = analysis_and_range(r#"<|>fn foo() {}<|>"#.trim()); + let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); + + assert_eq_text!( + syn.trim(), + r#" +FN@0..11 + FN_KW@0..2 "fn" + WHITESPACE@2..3 " " + NAME@3..6 + IDENT@3..6 "foo" + PARAM_LIST@6..8 + L_PAREN@6..7 "(" + R_PAREN@7..8 ")" + WHITESPACE@8..9 " " + BLOCK_EXPR@9..11 + L_CURLY@9..10 "{" + R_CURLY@10..11 "}" +"# + .trim() + ); + + let (analysis, range) = analysis_and_range( + r#"fn test() { + <|>assert!(" + fn foo() { + } + ", "");<|> +}"# + .trim(), + ); + let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); + + assert_eq_text!( + syn.trim(), + r#" +EXPR_STMT@16..58 + MACRO_CALL@16..57 + PATH@16..22 + PATH_SEGMENT@16..22 + NAME_REF@16..22 + IDENT@16..22 "assert" + BANG@22..23 "!" + TOKEN_TREE@23..57 + L_PAREN@23..24 "(" + STRING@24..52 "\"\n fn foo() {\n ..." + COMMA@52..53 "," + WHITESPACE@53..54 " " + STRING@54..56 "\"\"" + R_PAREN@56..57 ")" + SEMICOLON@57..58 ";" +"# + .trim() + ); + } + + #[test] + fn test_syntax_tree_inside_string() { + let (analysis, range) = analysis_and_range( + r#"fn test() { + assert!(" +<|>fn foo() { +}<|> +fn bar() { +} + ", ""); +}"# + .trim(), + ); + let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); + assert_eq_text!( + syn.trim(), + r#" +SOURCE_FILE@0..12 + FN@0..12 + FN_KW@0..2 "fn" + WHITESPACE@2..3 " " + NAME@3..6 + IDENT@3..6 "foo" + PARAM_LIST@6..8 + L_PAREN@6..7 "(" + R_PAREN@7..8 ")" + WHITESPACE@8..9 " " + BLOCK_EXPR@9..12 + L_CURLY@9..10 "{" + WHITESPACE@10..11 "\n" + R_CURLY@11..12 "}" +"# + .trim() + ); + + // With a raw string + let (analysis, range) = analysis_and_range( + r###"fn test() { + assert!(r#" +<|>fn foo() { +}<|> +fn bar() { +} + "#, ""); +}"### + .trim(), + ); + let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); + assert_eq_text!( + syn.trim(), + r#" +SOURCE_FILE@0..12 + FN@0..12 + FN_KW@0..2 "fn" + WHITESPACE@2..3 " " + NAME@3..6 + IDENT@3..6 "foo" + PARAM_LIST@6..8 + L_PAREN@6..7 "(" + R_PAREN@7..8 ")" + WHITESPACE@8..9 " " + BLOCK_EXPR@9..12 + L_CURLY@9..10 "{" + WHITESPACE@10..11 "\n" + R_CURLY@11..12 "}" +"# + .trim() + ); + + // With a raw string + let (analysis, range) = analysis_and_range( + r###"fn test() { + assert!(r<|>#" +fn foo() { +} +fn bar() { +}"<|>#, ""); +}"### + .trim(), + ); + let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); + assert_eq_text!( + syn.trim(), + r#" +SOURCE_FILE@0..25 + FN@0..12 + FN_KW@0..2 "fn" + WHITESPACE@2..3 " " + NAME@3..6 + IDENT@3..6 "foo" + PARAM_LIST@6..8 + L_PAREN@6..7 "(" + R_PAREN@7..8 ")" + WHITESPACE@8..9 " " + BLOCK_EXPR@9..12 + L_CURLY@9..10 "{" + WHITESPACE@10..11 "\n" + R_CURLY@11..12 "}" + WHITESPACE@12..13 "\n" + FN@13..25 + FN_KW@13..15 "fn" + WHITESPACE@15..16 " " + NAME@16..19 + IDENT@16..19 "bar" + PARAM_LIST@19..21 + L_PAREN@19..20 "(" + R_PAREN@20..21 ")" + WHITESPACE@21..22 " " + BLOCK_EXPR@22..25 + L_CURLY@22..23 "{" + WHITESPACE@23..24 "\n" + R_CURLY@24..25 "}" +"# + .trim() + ); + } +} diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs new file mode 100644 index 0000000000..899ce5f265 --- /dev/null +++ b/crates/ide/src/typing.rs @@ -0,0 +1,364 @@ +//! This module handles auto-magic editing actions applied together with users +//! edits. For example, if the user typed +//! +//! ```text +//! foo +//! .bar() +//! .baz() +//! | // <- cursor is here +//! ``` +//! +//! and types `.` next, we want to indent the dot. +//! +//! Language server executes such typing assists synchronously. That is, they +//! block user's typing and should be pretty fast for this reason! + +mod on_enter; + +use base_db::{FilePosition, SourceDatabase}; +use ide_db::{source_change::SourceFileEdit, RootDatabase}; +use syntax::{ + algo::find_node_at_offset, + ast::{self, edit::IndentLevel, AstToken}, + AstNode, SourceFile, + SyntaxKind::{FIELD_EXPR, METHOD_CALL_EXPR}, + TextRange, TextSize, +}; + +use text_edit::TextEdit; + +use crate::SourceChange; + +pub(crate) use on_enter::on_enter; + +pub(crate) const TRIGGER_CHARS: &str = ".=>"; + +// Feature: On Typing Assists +// +// Some features trigger on typing certain characters: +// +// - typing `let =` tries to smartly add `;` if `=` is followed by an existing expression +// - typing `.` in a chain method call auto-indents +pub(crate) fn on_char_typed( + db: &RootDatabase, + position: FilePosition, + char_typed: char, +) -> Option { + assert!(TRIGGER_CHARS.contains(char_typed)); + let file = &db.parse(position.file_id).tree(); + assert_eq!(file.syntax().text().char_at(position.offset), Some(char_typed)); + let edit = on_char_typed_inner(file, position.offset, char_typed)?; + Some(SourceFileEdit { file_id: position.file_id, edit }.into()) +} + +fn on_char_typed_inner(file: &SourceFile, offset: TextSize, char_typed: char) -> Option { + assert!(TRIGGER_CHARS.contains(char_typed)); + match char_typed { + '.' => on_dot_typed(file, offset), + '=' => on_eq_typed(file, offset), + '>' => on_arrow_typed(file, offset), + _ => unreachable!(), + } +} + +/// Returns an edit which should be applied after `=` was typed. Primarily, +/// this works when adding `let =`. +// FIXME: use a snippet completion instead of this hack here. +fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option { + assert_eq!(file.syntax().text().char_at(offset), Some('=')); + let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), offset)?; + if let_stmt.semicolon_token().is_some() { + return None; + } + if let Some(expr) = let_stmt.initializer() { + let expr_range = expr.syntax().text_range(); + if expr_range.contains(offset) && offset != expr_range.start() { + return None; + } + if file.syntax().text().slice(offset..expr_range.start()).contains_char('\n') { + return None; + } + } else { + return None; + } + let offset = let_stmt.syntax().text_range().end(); + Some(TextEdit::insert(offset, ";".to_string())) +} + +/// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately. +fn on_dot_typed(file: &SourceFile, offset: TextSize) -> Option { + assert_eq!(file.syntax().text().char_at(offset), Some('.')); + let whitespace = + file.syntax().token_at_offset(offset).left_biased().and_then(ast::Whitespace::cast)?; + + let current_indent = { + let text = whitespace.text(); + let newline = text.rfind('\n')?; + &text[newline + 1..] + }; + let current_indent_len = TextSize::of(current_indent); + + let parent = whitespace.syntax().parent(); + // Make sure dot is a part of call chain + if !matches!(parent.kind(), FIELD_EXPR | METHOD_CALL_EXPR) { + return None; + } + let prev_indent = IndentLevel::from_node(&parent); + let target_indent = format!(" {}", prev_indent); + let target_indent_len = TextSize::of(&target_indent); + if current_indent_len == target_indent_len { + return None; + } + + Some(TextEdit::replace(TextRange::new(offset - current_indent_len, offset), target_indent)) +} + +/// Adds a space after an arrow when `fn foo() { ... }` is turned into `fn foo() -> { ... }` +fn on_arrow_typed(file: &SourceFile, offset: TextSize) -> Option { + let file_text = file.syntax().text(); + assert_eq!(file_text.char_at(offset), Some('>')); + let after_arrow = offset + TextSize::of('>'); + if file_text.char_at(after_arrow) != Some('{') { + return None; + } + if find_node_at_offset::(file.syntax(), offset).is_none() { + return None; + } + + Some(TextEdit::insert(after_arrow, " ".to_string())) +} + +#[cfg(test)] +mod tests { + use test_utils::{assert_eq_text, extract_offset}; + + use super::*; + + fn do_type_char(char_typed: char, before: &str) -> Option { + let (offset, before) = extract_offset(before); + let edit = TextEdit::insert(offset, char_typed.to_string()); + let mut before = before.to_string(); + edit.apply(&mut before); + let parse = SourceFile::parse(&before); + on_char_typed_inner(&parse.tree(), offset, char_typed).map(|it| { + it.apply(&mut before); + before.to_string() + }) + } + + fn type_char(char_typed: char, ra_fixture_before: &str, ra_fixture_after: &str) { + let actual = do_type_char(char_typed, ra_fixture_before) + .unwrap_or_else(|| panic!("typing `{}` did nothing", char_typed)); + + assert_eq_text!(ra_fixture_after, &actual); + } + + fn type_char_noop(char_typed: char, before: &str) { + let file_change = do_type_char(char_typed, before); + assert!(file_change.is_none()) + } + + #[test] + fn test_on_eq_typed() { + // do_check(r" + // fn foo() { + // let foo =<|> + // } + // ", r" + // fn foo() { + // let foo =; + // } + // "); + type_char( + '=', + r" +fn foo() { + let foo <|> 1 + 1 +} +", + r" +fn foo() { + let foo = 1 + 1; +} +", + ); + // do_check(r" + // fn foo() { + // let foo =<|> + // let bar = 1; + // } + // ", r" + // fn foo() { + // let foo =; + // let bar = 1; + // } + // "); + } + + #[test] + fn indents_new_chain_call() { + type_char( + '.', + r" + fn main() { + xs.foo() + <|> + } + ", + r" + fn main() { + xs.foo() + . + } + ", + ); + type_char_noop( + '.', + r" + fn main() { + xs.foo() + <|> + } + ", + ) + } + + #[test] + fn indents_new_chain_call_with_semi() { + type_char( + '.', + r" + fn main() { + xs.foo() + <|>; + } + ", + r" + fn main() { + xs.foo() + .; + } + ", + ); + type_char_noop( + '.', + r" + fn main() { + xs.foo() + <|>; + } + ", + ) + } + + #[test] + fn indents_new_chain_call_with_let() { + type_char( + '.', + r#" +fn main() { + let _ = foo + <|> + bar() +} +"#, + r#" +fn main() { + let _ = foo + . + bar() +} +"#, + ); + } + + #[test] + fn indents_continued_chain_call() { + type_char( + '.', + r" + fn main() { + xs.foo() + .first() + <|> + } + ", + r" + fn main() { + xs.foo() + .first() + . + } + ", + ); + type_char_noop( + '.', + r" + fn main() { + xs.foo() + .first() + <|> + } + ", + ); + } + + #[test] + fn indents_middle_of_chain_call() { + type_char( + '.', + r" + fn source_impl() { + let var = enum_defvariant_list().unwrap() + <|> + .nth(92) + .unwrap(); + } + ", + r" + fn source_impl() { + let var = enum_defvariant_list().unwrap() + . + .nth(92) + .unwrap(); + } + ", + ); + type_char_noop( + '.', + r" + fn source_impl() { + let var = enum_defvariant_list().unwrap() + <|> + .nth(92) + .unwrap(); + } + ", + ); + } + + #[test] + fn dont_indent_freestanding_dot() { + type_char_noop( + '.', + r" + fn main() { + <|> + } + ", + ); + type_char_noop( + '.', + r" + fn main() { + <|> + } + ", + ); + } + + #[test] + fn adds_space_after_return_type() { + type_char('>', "fn foo() -<|>{ 92 }", "fn foo() -> { 92 }") + } +} diff --git a/crates/ide/src/typing/on_enter.rs b/crates/ide/src/typing/on_enter.rs new file mode 100644 index 0000000000..f7d46146c5 --- /dev/null +++ b/crates/ide/src/typing/on_enter.rs @@ -0,0 +1,256 @@ +//! Handles the `Enter` key press. At the momently, this only continues +//! comments, but should handle indent some time in the future as well. + +use base_db::{FilePosition, SourceDatabase}; +use ide_db::RootDatabase; +use syntax::{ + ast::{self, AstToken}, + AstNode, SmolStr, SourceFile, + SyntaxKind::*, + SyntaxToken, TextRange, TextSize, TokenAtOffset, +}; +use test_utils::mark; +use text_edit::TextEdit; + +// Feature: On Enter +// +// rust-analyzer can override kbd:[Enter] key to make it smarter: +// +// - kbd:[Enter] inside triple-slash comments automatically inserts `///` +// - kbd:[Enter] in the middle or after a trailing space in `//` inserts `//` +// +// This action needs to be assigned to shortcut explicitly. +// +// VS Code:: +// +// Add the following to `keybindings.json`: +// [source,json] +// ---- +// { +// "key": "Enter", +// "command": "rust-analyzer.onEnter", +// "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust" +// } +// ---- +pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option { + let parse = db.parse(position.file_id); + let file = parse.tree(); + let comment = file + .syntax() + .token_at_offset(position.offset) + .left_biased() + .and_then(ast::Comment::cast)?; + + if comment.kind().shape.is_block() { + return None; + } + + let prefix = comment.prefix(); + let comment_range = comment.syntax().text_range(); + if position.offset < comment_range.start() + TextSize::of(prefix) { + return None; + } + + let mut remove_last_space = false; + // Continuing single-line non-doc comments (like this one :) ) is annoying + if prefix == "//" && comment_range.end() == position.offset { + if comment.text().ends_with(' ') { + mark::hit!(continues_end_of_line_comment_with_space); + remove_last_space = true; + } else if !followed_by_comment(&comment) { + return None; + } + } + + let indent = node_indent(&file, comment.syntax())?; + let inserted = format!("\n{}{} $0", indent, prefix); + let delete = if remove_last_space { + TextRange::new(position.offset - TextSize::of(' '), position.offset) + } else { + TextRange::empty(position.offset) + }; + let edit = TextEdit::replace(delete, inserted); + Some(edit) +} + +fn followed_by_comment(comment: &ast::Comment) -> bool { + let ws = match comment.syntax().next_token().and_then(ast::Whitespace::cast) { + Some(it) => it, + None => return false, + }; + if ws.spans_multiple_lines() { + return false; + } + ws.syntax().next_token().and_then(ast::Comment::cast).is_some() +} + +fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option { + let ws = match file.syntax().token_at_offset(token.text_range().start()) { + TokenAtOffset::Between(l, r) => { + assert!(r == *token); + l + } + TokenAtOffset::Single(n) => { + assert!(n == *token); + return Some("".into()); + } + TokenAtOffset::None => unreachable!(), + }; + if ws.kind() != WHITESPACE { + return None; + } + let text = ws.text(); + let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0); + Some(text[pos..].into()) +} + +#[cfg(test)] +mod tests { + use stdx::trim_indent; + use test_utils::{assert_eq_text, mark}; + + use crate::mock_analysis::analysis_and_position; + + fn apply_on_enter(before: &str) -> Option { + let (analysis, position) = analysis_and_position(&before); + let result = analysis.on_enter(position).unwrap()?; + + let mut actual = analysis.file_text(position.file_id).unwrap().to_string(); + result.apply(&mut actual); + Some(actual) + } + + fn do_check(ra_fixture_before: &str, ra_fixture_after: &str) { + let ra_fixture_after = &trim_indent(ra_fixture_after); + let actual = apply_on_enter(ra_fixture_before).unwrap(); + assert_eq_text!(ra_fixture_after, &actual); + } + + fn do_check_noop(ra_fixture_text: &str) { + assert!(apply_on_enter(ra_fixture_text).is_none()) + } + + #[test] + fn continues_doc_comment() { + do_check( + r" +/// Some docs<|> +fn foo() { +} +", + r" +/// Some docs +/// $0 +fn foo() { +} +", + ); + + do_check( + r" +impl S { + /// Some<|> docs. + fn foo() {} +} +", + r" +impl S { + /// Some + /// $0 docs. + fn foo() {} +} +", + ); + + do_check( + r" +///<|> Some docs +fn foo() { +} +", + r" +/// +/// $0 Some docs +fn foo() { +} +", + ); + } + + #[test] + fn does_not_continue_before_doc_comment() { + do_check_noop(r"<|>//! docz"); + } + + #[test] + fn continues_code_comment_in_the_middle_of_line() { + do_check( + r" +fn main() { + // Fix<|> me + let x = 1 + 1; +} +", + r" +fn main() { + // Fix + // $0 me + let x = 1 + 1; +} +", + ); + } + + #[test] + fn continues_code_comment_in_the_middle_several_lines() { + do_check( + r" +fn main() { + // Fix<|> + // me + let x = 1 + 1; +} +", + r" +fn main() { + // Fix + // $0 + // me + let x = 1 + 1; +} +", + ); + } + + #[test] + fn does_not_continue_end_of_line_comment() { + do_check_noop( + r" +fn main() { + // Fix me<|> + let x = 1 + 1; +} +", + ); + } + + #[test] + fn continues_end_of_line_comment_with_space() { + mark::check!(continues_end_of_line_comment_with_space); + do_check( + r#" +fn main() { + // Fix me <|> + let x = 1 + 1; +} +"#, + r#" +fn main() { + // Fix me + // $0 + let x = 1 + 1; +} +"#, + ); + } +} diff --git a/crates/ra_ide/test_data/highlight_doctest.html b/crates/ide/test_data/highlight_doctest.html similarity index 100% rename from crates/ra_ide/test_data/highlight_doctest.html rename to crates/ide/test_data/highlight_doctest.html diff --git a/crates/ide/test_data/highlight_extern_crate.html b/crates/ide/test_data/highlight_extern_crate.html new file mode 100644 index 0000000000..800d894c76 --- /dev/null +++ b/crates/ide/test_data/highlight_extern_crate.html @@ -0,0 +1,40 @@ + + +
extern crate std;
+extern crate alloc as abc;
+
\ No newline at end of file diff --git a/crates/ra_ide/test_data/highlight_injection.html b/crates/ide/test_data/highlight_injection.html similarity index 100% rename from crates/ra_ide/test_data/highlight_injection.html rename to crates/ide/test_data/highlight_injection.html diff --git a/crates/ra_ide/test_data/highlight_strings.html b/crates/ide/test_data/highlight_strings.html similarity index 100% rename from crates/ra_ide/test_data/highlight_strings.html rename to crates/ide/test_data/highlight_strings.html diff --git a/crates/ide/test_data/highlight_unsafe.html b/crates/ide/test_data/highlight_unsafe.html new file mode 100644 index 0000000000..552fea6689 --- /dev/null +++ b/crates/ide/test_data/highlight_unsafe.html @@ -0,0 +1,99 @@ + + +
unsafe fn unsafe_fn() {}
+
+union Union {
+    a: u32,
+    b: f32,
+}
+
+struct HasUnsafeFn;
+
+impl HasUnsafeFn {
+    unsafe fn unsafe_method(&self) {}
+}
+
+struct TypeForStaticMut {
+    a: u8
+}
+
+static mut global_mut: TypeForStaticMut = TypeForStaticMut { a: 0 };
+
+#[repr(packed)]
+struct Packed {
+    a: u16,
+}
+
+trait DoTheAutoref {
+    fn calls_autoref(&self);
+}
+
+impl DoTheAutoref for u16 {
+    fn calls_autoref(&self) {}
+}
+
+fn main() {
+    let x = &5 as *const _ as *const usize;
+    let u = Union { b: 0 };
+    unsafe {
+        // unsafe fn and method calls
+        unsafe_fn();
+        let b = u.b;
+        match u {
+            Union { b: 0 } => (),
+            Union { a } => (),
+        }
+        HasUnsafeFn.unsafe_method();
+
+        // unsafe deref
+        let y = *x;
+
+        // unsafe access to a static mut
+        let a = global_mut.a;
+
+        // unsafe ref of packed fields
+        let packed = Packed { a: 0 };
+        let a = &packed.a;
+        let ref a = packed.a;
+        let Packed { ref a } = packed;
+        let Packed { a: ref _a } = packed;
+
+        // unsafe auto ref of packed field
+        packed.a.calls_autoref();
+    }
+}
\ No newline at end of file diff --git a/crates/ra_ide/test_data/highlighting.html b/crates/ide/test_data/highlighting.html similarity index 100% rename from crates/ra_ide/test_data/highlighting.html rename to crates/ide/test_data/highlighting.html diff --git a/crates/ra_ide/test_data/rainbow_highlighting.html b/crates/ide/test_data/rainbow_highlighting.html similarity index 100% rename from crates/ra_ide/test_data/rainbow_highlighting.html rename to crates/ide/test_data/rainbow_highlighting.html diff --git a/crates/ide_db/Cargo.toml b/crates/ide_db/Cargo.toml new file mode 100644 index 0000000000..692fb64153 --- /dev/null +++ b/crates/ide_db/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "ide_db" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[features] +wasm = [] + +[dependencies] +log = "0.4.8" +rayon = "1.3.0" +fst = { version = "0.4", default-features = false } +rustc-hash = "1.1.0" +once_cell = "1.3.1" +either = "1.5.3" + +stdx = { path = "../stdx" } +syntax = { path = "../syntax" } +text_edit = { path = "../text_edit" } +base_db = { path = "../base_db" } +profile = { path = "../profile" } +test_utils = { path = "../test_utils" } +# ide should depend only on the top-level `hir` package. if you need +# something from some `hir_xxx` subpackage, reexport the API via `hir`. +hir = { path = "../hir" } diff --git a/crates/ide_db/src/change.rs b/crates/ide_db/src/change.rs new file mode 100644 index 0000000000..8b4fd7ab84 --- /dev/null +++ b/crates/ide_db/src/change.rs @@ -0,0 +1,318 @@ +//! Defines a unit of change that can applied to a state of IDE to get the next +//! state. Changes are transactional. + +use std::{fmt, sync::Arc, time}; + +use base_db::{ + salsa::{Database, Durability, SweepStrategy}, + CrateGraph, FileId, SourceDatabase, SourceDatabaseExt, SourceRoot, SourceRootId, +}; +use profile::{memory_usage, Bytes}; +use rustc_hash::FxHashSet; + +use crate::{symbol_index::SymbolsDatabase, RootDatabase}; + +#[derive(Default)] +pub struct AnalysisChange { + roots: Option>, + files_changed: Vec<(FileId, Option>)>, + crate_graph: Option, +} + +impl fmt::Debug for AnalysisChange { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + let mut d = fmt.debug_struct("AnalysisChange"); + if let Some(roots) = &self.roots { + d.field("roots", roots); + } + if !self.files_changed.is_empty() { + d.field("files_changed", &self.files_changed.len()); + } + if self.crate_graph.is_some() { + d.field("crate_graph", &self.crate_graph); + } + d.finish() + } +} + +impl AnalysisChange { + pub fn new() -> AnalysisChange { + AnalysisChange::default() + } + + pub fn set_roots(&mut self, roots: Vec) { + self.roots = Some(roots); + } + + pub fn change_file(&mut self, file_id: FileId, new_text: Option>) { + self.files_changed.push((file_id, new_text)) + } + + pub fn set_crate_graph(&mut self, graph: CrateGraph) { + self.crate_graph = Some(graph); + } +} + +#[derive(Debug)] +struct AddFile { + file_id: FileId, + path: String, + text: Arc, +} + +#[derive(Debug)] +struct RemoveFile { + file_id: FileId, + path: String, +} + +#[derive(Default)] +struct RootChange { + added: Vec, + removed: Vec, +} + +impl fmt::Debug for RootChange { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + fmt.debug_struct("AnalysisChange") + .field("added", &self.added.len()) + .field("removed", &self.removed.len()) + .finish() + } +} + +const GC_COOLDOWN: time::Duration = time::Duration::from_millis(100); + +impl RootDatabase { + pub fn request_cancellation(&mut self) { + let _p = profile::span("RootDatabase::request_cancellation"); + self.salsa_runtime_mut().synthetic_write(Durability::LOW); + } + + pub fn apply_change(&mut self, change: AnalysisChange) { + let _p = profile::span("RootDatabase::apply_change"); + self.request_cancellation(); + log::info!("apply_change {:?}", change); + if let Some(roots) = change.roots { + let mut local_roots = FxHashSet::default(); + let mut library_roots = FxHashSet::default(); + for (idx, root) in roots.into_iter().enumerate() { + let root_id = SourceRootId(idx as u32); + let durability = durability(&root); + if root.is_library { + library_roots.insert(root_id); + } else { + local_roots.insert(root_id); + } + for file_id in root.iter() { + self.set_file_source_root_with_durability(file_id, root_id, durability); + } + self.set_source_root_with_durability(root_id, Arc::new(root), durability); + } + self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); + self.set_library_roots_with_durability(Arc::new(library_roots), Durability::HIGH); + } + + for (file_id, text) in change.files_changed { + let source_root_id = self.file_source_root(file_id); + let source_root = self.source_root(source_root_id); + let durability = durability(&source_root); + // XXX: can't actually remove the file, just reset the text + let text = text.unwrap_or_default(); + self.set_file_text_with_durability(file_id, text, durability) + } + if let Some(crate_graph) = change.crate_graph { + self.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH) + } + } + + pub fn maybe_collect_garbage(&mut self) { + if cfg!(feature = "wasm") { + return; + } + + if self.last_gc_check.elapsed() > GC_COOLDOWN { + self.last_gc_check = crate::wasm_shims::Instant::now(); + } + } + + pub fn collect_garbage(&mut self) { + if cfg!(feature = "wasm") { + return; + } + + let _p = profile::span("RootDatabase::collect_garbage"); + self.last_gc = crate::wasm_shims::Instant::now(); + + let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); + + base_db::ParseQuery.in_db(self).sweep(sweep); + hir::db::ParseMacroQuery.in_db(self).sweep(sweep); + + // Macros do take significant space, but less then the syntax trees + // self.query(hir::db::MacroDefQuery).sweep(sweep); + // self.query(hir::db::MacroArgTextQuery).sweep(sweep); + // self.query(hir::db::MacroExpandQuery).sweep(sweep); + + hir::db::AstIdMapQuery.in_db(self).sweep(sweep); + + hir::db::BodyWithSourceMapQuery.in_db(self).sweep(sweep); + + hir::db::ExprScopesQuery.in_db(self).sweep(sweep); + hir::db::InferQueryQuery.in_db(self).sweep(sweep); + hir::db::BodyQuery.in_db(self).sweep(sweep); + } + + // Feature: Memory Usage + // + // Clears rust-analyzer's internal database and prints memory usage statistics. + // + // |=== + // | Editor | Action Name + // + // | VS Code | **Rust Analyzer: Memory Usage (Clears Database)** + // |=== + pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> { + let mut acc: Vec<(String, Bytes)> = vec![]; + let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); + macro_rules! sweep_each_query { + ($($q:path)*) => {$( + let before = memory_usage().allocated; + $q.in_db(self).sweep(sweep); + let after = memory_usage().allocated; + let q: $q = Default::default(); + let name = format!("{:?}", q); + acc.push((name, before - after)); + + let before = memory_usage().allocated; + $q.in_db(self).sweep(sweep.discard_everything()); + let after = memory_usage().allocated; + let q: $q = Default::default(); + let name = format!("{:?} (deps)", q); + acc.push((name, before - after)); + + let before = memory_usage().allocated; + $q.in_db(self).purge(); + let after = memory_usage().allocated; + let q: $q = Default::default(); + let name = format!("{:?} (purge)", q); + acc.push((name, before - after)); + )*} + } + sweep_each_query![ + // SourceDatabase + base_db::ParseQuery + base_db::CrateGraphQuery + + // SourceDatabaseExt + base_db::FileTextQuery + base_db::FileSourceRootQuery + base_db::SourceRootQuery + base_db::SourceRootCratesQuery + + // AstDatabase + hir::db::AstIdMapQuery + hir::db::MacroArgTextQuery + hir::db::MacroDefQuery + hir::db::ParseMacroQuery + hir::db::MacroExpandQuery + + // DefDatabase + hir::db::ItemTreeQuery + hir::db::CrateDefMapQueryQuery + hir::db::StructDataQuery + hir::db::UnionDataQuery + hir::db::EnumDataQuery + hir::db::ImplDataQuery + hir::db::TraitDataQuery + hir::db::TypeAliasDataQuery + hir::db::FunctionDataQuery + hir::db::ConstDataQuery + hir::db::StaticDataQuery + hir::db::BodyWithSourceMapQuery + hir::db::BodyQuery + hir::db::ExprScopesQuery + hir::db::GenericParamsQuery + hir::db::AttrsQuery + hir::db::ModuleLangItemsQuery + hir::db::CrateLangItemsQuery + hir::db::LangItemQuery + hir::db::DocumentationQuery + hir::db::ImportMapQuery + + // HirDatabase + hir::db::InferQueryQuery + hir::db::TyQuery + hir::db::ValueTyQuery + hir::db::ImplSelfTyQuery + hir::db::ImplTraitQuery + hir::db::FieldTypesQuery + hir::db::CallableItemSignatureQuery + hir::db::GenericPredicatesForParamQuery + hir::db::GenericPredicatesQuery + hir::db::GenericDefaultsQuery + hir::db::InherentImplsInCrateQuery + hir::db::TraitImplsInCrateQuery + hir::db::TraitImplsInDepsQuery + hir::db::AssociatedTyDataQuery + hir::db::AssociatedTyDataQuery + hir::db::TraitDatumQuery + hir::db::StructDatumQuery + hir::db::ImplDatumQuery + hir::db::FnDefDatumQuery + hir::db::ReturnTypeImplTraitsQuery + hir::db::InternCallableDefQuery + hir::db::InternTypeParamIdQuery + hir::db::InternImplTraitIdQuery + hir::db::InternClosureQuery + hir::db::AssociatedTyValueQuery + hir::db::TraitSolveQuery + + // SymbolsDatabase + crate::symbol_index::FileSymbolsQuery + crate::symbol_index::LibrarySymbolsQuery + crate::symbol_index::LocalRootsQuery + crate::symbol_index::LibraryRootsQuery + + // LineIndexDatabase + crate::LineIndexQuery + ]; + + // To collect interned data, we need to bump the revision counter by performing a synthetic + // write. + // We do this after collecting the non-interned queries to correctly attribute memory used + // by interned data. + self.salsa_runtime_mut().synthetic_write(Durability::HIGH); + + sweep_each_query![ + // AstDatabase + hir::db::InternMacroQuery + hir::db::InternEagerExpansionQuery + + // InternDatabase + hir::db::InternFunctionQuery + hir::db::InternStructQuery + hir::db::InternUnionQuery + hir::db::InternEnumQuery + hir::db::InternConstQuery + hir::db::InternStaticQuery + hir::db::InternTraitQuery + hir::db::InternTypeAliasQuery + hir::db::InternImplQuery + + // HirDatabase + hir::db::InternTypeParamIdQuery + ]; + + acc.sort_by_key(|it| std::cmp::Reverse(it.1)); + acc + } +} + +fn durability(source_root: &SourceRoot) -> Durability { + if source_root.is_library { + Durability::HIGH + } else { + Durability::LOW + } +} diff --git a/crates/ide_db/src/defs.rs b/crates/ide_db/src/defs.rs new file mode 100644 index 0000000000..0d0affc273 --- /dev/null +++ b/crates/ide_db/src/defs.rs @@ -0,0 +1,348 @@ +//! `NameDefinition` keeps information about the element we want to search references for. +//! The element is represented by `NameKind`. It's located inside some `container` and +//! has a `visibility`, which defines a search scope. +//! Note that the reference search is possible for not all of the classified items. + +// FIXME: this badly needs rename/rewrite (matklad, 2020-02-06). + +use hir::{ + db::HirDatabase, Crate, Field, HasVisibility, ImplDef, Local, MacroDef, Module, ModuleDef, + Name, PathResolution, Semantics, TypeParam, Visibility, +}; +use syntax::{ + ast::{self, AstNode}, + match_ast, SyntaxNode, +}; + +use crate::RootDatabase; + +// FIXME: a more precise name would probably be `Symbol`? +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +pub enum Definition { + Macro(MacroDef), + Field(Field), + ModuleDef(ModuleDef), + SelfType(ImplDef), + Local(Local), + TypeParam(TypeParam), +} + +impl Definition { + pub fn module(&self, db: &RootDatabase) -> Option { + match self { + Definition::Macro(it) => it.module(db), + Definition::Field(it) => Some(it.parent_def(db).module(db)), + Definition::ModuleDef(it) => it.module(db), + Definition::SelfType(it) => Some(it.module(db)), + Definition::Local(it) => Some(it.module(db)), + Definition::TypeParam(it) => Some(it.module(db)), + } + } + + pub fn visibility(&self, db: &RootDatabase) -> Option { + match self { + Definition::Macro(_) => None, + Definition::Field(sf) => Some(sf.visibility(db)), + Definition::ModuleDef(def) => def.definition_visibility(db), + Definition::SelfType(_) => None, + Definition::Local(_) => None, + Definition::TypeParam(_) => None, + } + } + + pub fn name(&self, db: &RootDatabase) -> Option { + let name = match self { + Definition::Macro(it) => it.name(db)?, + Definition::Field(it) => it.name(db), + Definition::ModuleDef(def) => match def { + hir::ModuleDef::Module(it) => it.name(db)?, + hir::ModuleDef::Function(it) => it.name(db), + hir::ModuleDef::Adt(def) => match def { + hir::Adt::Struct(it) => it.name(db), + hir::Adt::Union(it) => it.name(db), + hir::Adt::Enum(it) => it.name(db), + }, + hir::ModuleDef::EnumVariant(it) => it.name(db), + hir::ModuleDef::Const(it) => it.name(db)?, + hir::ModuleDef::Static(it) => it.name(db)?, + hir::ModuleDef::Trait(it) => it.name(db), + hir::ModuleDef::TypeAlias(it) => it.name(db), + hir::ModuleDef::BuiltinType(_) => return None, + }, + Definition::SelfType(_) => return None, + Definition::Local(it) => it.name(db)?, + Definition::TypeParam(it) => it.name(db), + }; + Some(name) + } +} + +#[derive(Debug)] +pub enum NameClass { + ExternCrate(Crate), + Definition(Definition), + /// `None` in `if let None = Some(82) {}` + ConstReference(Definition), + FieldShorthand { + local: Local, + field: Definition, + }, +} + +impl NameClass { + pub fn into_definition(self, db: &dyn HirDatabase) -> Option { + Some(match self { + NameClass::ExternCrate(krate) => Definition::ModuleDef(krate.root_module(db).into()), + NameClass::Definition(it) => it, + NameClass::ConstReference(_) => return None, + NameClass::FieldShorthand { local, field: _ } => Definition::Local(local), + }) + } + + pub fn definition(self, db: &dyn HirDatabase) -> Definition { + match self { + NameClass::ExternCrate(krate) => Definition::ModuleDef(krate.root_module(db).into()), + NameClass::Definition(it) | NameClass::ConstReference(it) => it, + NameClass::FieldShorthand { local: _, field } => field, + } + } +} + +pub fn classify_name(sema: &Semantics, name: &ast::Name) -> Option { + let _p = profile::span("classify_name"); + + let parent = name.syntax().parent()?; + + if let Some(bind_pat) = ast::IdentPat::cast(parent.clone()) { + if let Some(def) = sema.resolve_bind_pat_to_const(&bind_pat) { + return Some(NameClass::ConstReference(Definition::ModuleDef(def))); + } + } + + match_ast! { + match parent { + ast::Rename(it) => { + if let Some(use_tree) = it.syntax().parent().and_then(ast::UseTree::cast) { + let path = use_tree.path()?; + let path_segment = path.segment()?; + let name_ref_class = path_segment + .name_ref() + // The rename might be from a `self` token, so fallback to the name higher + // in the use tree. + .or_else(||{ + if path_segment.self_token().is_none() { + return None; + } + + let use_tree = use_tree + .syntax() + .parent() + .as_ref() + // Skip over UseTreeList + .and_then(SyntaxNode::parent) + .and_then(ast::UseTree::cast)?; + let path = use_tree.path()?; + let path_segment = path.segment()?; + path_segment.name_ref() + }) + .and_then(|name_ref| classify_name_ref(sema, &name_ref))?; + + Some(NameClass::Definition(name_ref_class.definition(sema.db))) + } else { + let extern_crate = it.syntax().parent().and_then(ast::ExternCrate::cast)?; + let resolved = sema.resolve_extern_crate(&extern_crate)?; + Some(NameClass::ExternCrate(resolved)) + } + }, + ast::IdentPat(it) => { + let local = sema.to_def(&it)?; + + if let Some(record_field_pat) = it.syntax().parent().and_then(ast::RecordPatField::cast) { + if record_field_pat.name_ref().is_none() { + if let Some(field) = sema.resolve_record_field_pat(&record_field_pat) { + let field = Definition::Field(field); + return Some(NameClass::FieldShorthand { local, field }); + } + } + } + + Some(NameClass::Definition(Definition::Local(local))) + }, + ast::RecordField(it) => { + let field: hir::Field = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::Field(field))) + }, + ast::Module(it) => { + let def = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Struct(it) => { + let def: hir::Struct = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Union(it) => { + let def: hir::Union = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Enum(it) => { + let def: hir::Enum = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Trait(it) => { + let def: hir::Trait = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Static(it) => { + let def: hir::Static = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Variant(it) => { + let def: hir::EnumVariant = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Fn(it) => { + let def: hir::Function = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Const(it) => { + let def: hir::Const = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::TypeAlias(it) => { + let def: hir::TypeAlias = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::MacroCall(it) => { + let def = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::Macro(def))) + }, + ast::TypeParam(it) => { + let def = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::TypeParam(def))) + }, + _ => None, + } + } +} + +#[derive(Debug)] +pub enum NameRefClass { + ExternCrate(Crate), + Definition(Definition), + FieldShorthand { local: Local, field: Definition }, +} + +impl NameRefClass { + pub fn definition(self, db: &dyn HirDatabase) -> Definition { + match self { + NameRefClass::ExternCrate(krate) => Definition::ModuleDef(krate.root_module(db).into()), + NameRefClass::Definition(def) => def, + NameRefClass::FieldShorthand { local, field: _ } => Definition::Local(local), + } + } +} + +// Note: we don't have unit-tests for this rather important function. +// It is primarily exercised via goto definition tests in `ide`. +pub fn classify_name_ref( + sema: &Semantics, + name_ref: &ast::NameRef, +) -> Option { + let _p = profile::span("classify_name_ref"); + + let parent = name_ref.syntax().parent()?; + + if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) { + if let Some(func) = sema.resolve_method_call(&method_call) { + return Some(NameRefClass::Definition(Definition::ModuleDef(func.into()))); + } + } + + if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { + if let Some(field) = sema.resolve_field(&field_expr) { + return Some(NameRefClass::Definition(Definition::Field(field))); + } + } + + if let Some(record_field) = ast::RecordExprField::for_field_name(name_ref) { + if let Some((field, local)) = sema.resolve_record_field(&record_field) { + let field = Definition::Field(field); + let res = match local { + None => NameRefClass::Definition(field), + Some(local) => NameRefClass::FieldShorthand { field, local }, + }; + return Some(res); + } + } + + if let Some(record_field_pat) = ast::RecordPatField::cast(parent.clone()) { + if let Some(field) = sema.resolve_record_field_pat(&record_field_pat) { + let field = Definition::Field(field); + return Some(NameRefClass::Definition(field)); + } + } + + if ast::AssocTypeArg::cast(parent.clone()).is_some() { + // `Trait` + // ^^^^^ + let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?; + let resolved = sema.resolve_path(&path)?; + if let PathResolution::Def(ModuleDef::Trait(tr)) = resolved { + if let Some(ty) = tr + .items(sema.db) + .iter() + .filter_map(|assoc| match assoc { + hir::AssocItem::TypeAlias(it) => Some(*it), + _ => None, + }) + .find(|alias| alias.name(sema.db).to_string() == **name_ref.text()) + { + return Some(NameRefClass::Definition(Definition::ModuleDef( + ModuleDef::TypeAlias(ty), + ))); + } + } + } + + if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { + if let Some(path) = macro_call.path() { + if path.qualifier().is_none() { + // Only use this to resolve single-segment macro calls like `foo!()`. Multi-segment + // paths are handled below (allowing `log<|>::info!` to resolve to the log crate). + if let Some(macro_def) = sema.resolve_macro_call(¯o_call) { + return Some(NameRefClass::Definition(Definition::Macro(macro_def))); + } + } + } + } + + if let Some(path) = name_ref.syntax().ancestors().find_map(ast::Path::cast) { + if let Some(resolved) = sema.resolve_path(&path) { + return Some(NameRefClass::Definition(resolved.into())); + } + } + + let extern_crate = ast::ExternCrate::cast(parent)?; + let resolved = sema.resolve_extern_crate(&extern_crate)?; + Some(NameRefClass::ExternCrate(resolved)) +} + +impl From for Definition { + fn from(path_resolution: PathResolution) -> Self { + match path_resolution { + PathResolution::Def(def) => Definition::ModuleDef(def), + PathResolution::AssocItem(item) => { + let def = match item { + hir::AssocItem::Function(it) => it.into(), + hir::AssocItem::Const(it) => it.into(), + hir::AssocItem::TypeAlias(it) => it.into(), + }; + Definition::ModuleDef(def) + } + PathResolution::Local(local) => Definition::Local(local), + PathResolution::TypeParam(par) => Definition::TypeParam(par), + PathResolution::Macro(def) => Definition::Macro(def), + PathResolution::SelfType(impl_def) => Definition::SelfType(impl_def), + } + } +} diff --git a/crates/ide_db/src/imports_locator.rs b/crates/ide_db/src/imports_locator.rs new file mode 100644 index 0000000000..ed67e35531 --- /dev/null +++ b/crates/ide_db/src/imports_locator.rs @@ -0,0 +1,64 @@ +//! This module contains an import search funcionality that is provided to the assists module. +//! Later, this should be moved away to a separate crate that is accessible from the assists module. + +use hir::{Crate, MacroDef, ModuleDef, Semantics}; +use syntax::{ast, AstNode, SyntaxKind::NAME}; + +use crate::{ + defs::{classify_name, Definition}, + symbol_index::{self, FileSymbol, Query}, + RootDatabase, +}; +use either::Either; +use rustc_hash::FxHashSet; + +pub fn find_imports<'a>( + sema: &Semantics<'a, RootDatabase>, + krate: Crate, + name_to_import: &str, +) -> Vec> { + let _p = profile::span("search_for_imports"); + let db = sema.db; + + // Query dependencies first. + let mut candidates: FxHashSet<_> = + krate.query_external_importables(db, name_to_import).collect(); + + // Query the local crate using the symbol index. + let local_results = { + let mut query = Query::new(name_to_import.to_string()); + query.exact(); + query.limit(40); + symbol_index::crate_symbols(db, krate.into(), query) + }; + + candidates.extend( + local_results + .into_iter() + .filter_map(|import_candidate| get_name_definition(sema, &import_candidate)) + .filter_map(|name_definition_to_import| match name_definition_to_import { + Definition::ModuleDef(module_def) => Some(Either::Left(module_def)), + Definition::Macro(macro_def) => Some(Either::Right(macro_def)), + _ => None, + }), + ); + + candidates.into_iter().collect() +} + +fn get_name_definition<'a>( + sema: &Semantics<'a, RootDatabase>, + import_candidate: &FileSymbol, +) -> Option { + let _p = profile::span("get_name_definition"); + let file_id = import_candidate.file_id; + + let candidate_node = import_candidate.ptr.to_node(sema.parse(file_id).syntax()); + let candidate_name_node = if candidate_node.kind() != NAME { + candidate_node.children().find(|it| it.kind() == NAME)? + } else { + candidate_node + }; + let name = ast::Name::cast(candidate_name_node)?; + classify_name(sema, &name)?.into_definition(sema.db) +} diff --git a/crates/ide_db/src/lib.rs b/crates/ide_db/src/lib.rs new file mode 100644 index 0000000000..fd474cd0f4 --- /dev/null +++ b/crates/ide_db/src/lib.rs @@ -0,0 +1,139 @@ +//! This crate defines the core datastructure representing IDE state -- `RootDatabase`. +//! +//! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search. + +pub mod line_index; +pub mod symbol_index; +pub mod change; +pub mod defs; +pub mod search; +pub mod imports_locator; +pub mod source_change; +mod wasm_shims; + +use std::{fmt, sync::Arc}; + +use base_db::{ + salsa::{self, Durability}, + Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, + Upcast, +}; +use hir::db::{AstDatabase, DefDatabase, HirDatabase}; +use rustc_hash::FxHashSet; + +use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase}; + +#[salsa::database( + base_db::SourceDatabaseStorage, + base_db::SourceDatabaseExtStorage, + LineIndexDatabaseStorage, + symbol_index::SymbolsDatabaseStorage, + hir::db::InternDatabaseStorage, + hir::db::AstDatabaseStorage, + hir::db::DefDatabaseStorage, + hir::db::HirDatabaseStorage +)] +pub struct RootDatabase { + storage: salsa::Storage, + pub last_gc: crate::wasm_shims::Instant, + pub last_gc_check: crate::wasm_shims::Instant, +} + +impl fmt::Debug for RootDatabase { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RootDatabase").finish() + } +} + +impl Upcast for RootDatabase { + fn upcast(&self) -> &(dyn AstDatabase + 'static) { + &*self + } +} + +impl Upcast for RootDatabase { + fn upcast(&self) -> &(dyn DefDatabase + 'static) { + &*self + } +} + +impl Upcast for RootDatabase { + fn upcast(&self) -> &(dyn HirDatabase + 'static) { + &*self + } +} + +impl FileLoader for RootDatabase { + fn file_text(&self, file_id: FileId) -> Arc { + FileLoaderDelegate(self).file_text(file_id) + } + fn resolve_path(&self, anchor: FileId, path: &str) -> Option { + FileLoaderDelegate(self).resolve_path(anchor, path) + } + fn relevant_crates(&self, file_id: FileId) -> Arc> { + FileLoaderDelegate(self).relevant_crates(file_id) + } +} + +impl salsa::Database for RootDatabase { + fn on_propagated_panic(&self) -> ! { + Canceled::throw() + } + fn salsa_event(&self, event: salsa::Event) { + match event.kind { + salsa::EventKind::DidValidateMemoizedValue { .. } + | salsa::EventKind::WillExecute { .. } => { + self.check_canceled(); + } + _ => (), + } + } +} + +impl Default for RootDatabase { + fn default() -> RootDatabase { + RootDatabase::new(None) + } +} + +impl RootDatabase { + pub fn new(lru_capacity: Option) -> RootDatabase { + let mut db = RootDatabase { + storage: salsa::Storage::default(), + last_gc: crate::wasm_shims::Instant::now(), + last_gc_check: crate::wasm_shims::Instant::now(), + }; + db.set_crate_graph_with_durability(Default::default(), Durability::HIGH); + db.set_local_roots_with_durability(Default::default(), Durability::HIGH); + db.set_library_roots_with_durability(Default::default(), Durability::HIGH); + db.update_lru_capacity(lru_capacity); + db + } + + pub fn update_lru_capacity(&mut self, lru_capacity: Option) { + let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_LRU_CAP); + base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); + hir::db::ParseMacroQuery.in_db_mut(self).set_lru_capacity(lru_capacity); + hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(lru_capacity); + } +} + +impl salsa::ParallelDatabase for RootDatabase { + fn snapshot(&self) -> salsa::Snapshot { + salsa::Snapshot::new(RootDatabase { + storage: self.storage.snapshot(), + last_gc: self.last_gc, + last_gc_check: self.last_gc_check, + }) + } +} + +#[salsa::query_group(LineIndexDatabaseStorage)] +pub trait LineIndexDatabase: base_db::SourceDatabase + CheckCanceled { + fn line_index(&self, file_id: FileId) -> Arc; +} + +fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc { + let text = db.file_text(file_id); + Arc::new(LineIndex::new(&*text)) +} diff --git a/crates/ide_db/src/line_index.rs b/crates/ide_db/src/line_index.rs new file mode 100644 index 0000000000..a381f7fb8d --- /dev/null +++ b/crates/ide_db/src/line_index.rs @@ -0,0 +1,281 @@ +//! `LineIndex` maps flat `TextSize` offsets into `(Line, Column)` +//! representation. +use std::iter; + +use rustc_hash::FxHashMap; +use stdx::partition_point; +use syntax::{TextRange, TextSize}; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct LineIndex { + /// Offset the the beginning of each line, zero-based + pub(crate) newlines: Vec, + /// List of non-ASCII characters on each line + pub(crate) utf16_lines: FxHashMap>, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct LineCol { + /// Zero-based + pub line: u32, + /// Zero-based + pub col_utf16: u32, +} + +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub(crate) struct Utf16Char { + /// Start offset of a character inside a line, zero-based + pub(crate) start: TextSize, + /// End offset of a character inside a line, zero-based + pub(crate) end: TextSize, +} + +impl Utf16Char { + /// Returns the length in 8-bit UTF-8 code units. + fn len(&self) -> TextSize { + self.end - self.start + } + + /// Returns the length in 16-bit UTF-16 code units. + fn len_utf16(&self) -> usize { + if self.len() == TextSize::from(4) { + 2 + } else { + 1 + } + } +} + +impl LineIndex { + pub fn new(text: &str) -> LineIndex { + let mut utf16_lines = FxHashMap::default(); + let mut utf16_chars = Vec::new(); + + let mut newlines = vec![0.into()]; + let mut curr_row = 0.into(); + let mut curr_col = 0.into(); + let mut line = 0; + for c in text.chars() { + let c_len = TextSize::of(c); + curr_row += c_len; + if c == '\n' { + newlines.push(curr_row); + + // Save any utf-16 characters seen in the previous line + if !utf16_chars.is_empty() { + utf16_lines.insert(line, utf16_chars); + utf16_chars = Vec::new(); + } + + // Prepare for processing the next line + curr_col = 0.into(); + line += 1; + continue; + } + + if !c.is_ascii() { + utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + c_len }); + } + + curr_col += c_len; + } + + // Save any utf-16 characters seen in the last line + if !utf16_chars.is_empty() { + utf16_lines.insert(line, utf16_chars); + } + + LineIndex { newlines, utf16_lines } + } + + pub fn line_col(&self, offset: TextSize) -> LineCol { + let line = partition_point(&self.newlines, |&it| it <= offset) - 1; + let line_start_offset = self.newlines[line]; + let col = offset - line_start_offset; + + LineCol { line: line as u32, col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32 } + } + + pub fn offset(&self, line_col: LineCol) -> TextSize { + //FIXME: return Result + let col = self.utf16_to_utf8_col(line_col.line, line_col.col_utf16); + self.newlines[line_col.line as usize] + col + } + + pub fn lines(&self, range: TextRange) -> impl Iterator + '_ { + let lo = partition_point(&self.newlines, |&it| it < range.start()); + let hi = partition_point(&self.newlines, |&it| it <= range.end()); + let all = iter::once(range.start()) + .chain(self.newlines[lo..hi].iter().copied()) + .chain(iter::once(range.end())); + + all.clone() + .zip(all.skip(1)) + .map(|(lo, hi)| TextRange::new(lo, hi)) + .filter(|it| !it.is_empty()) + } + + fn utf8_to_utf16_col(&self, line: u32, col: TextSize) -> usize { + let mut res: usize = col.into(); + if let Some(utf16_chars) = self.utf16_lines.get(&line) { + for c in utf16_chars { + if c.end <= col { + res -= usize::from(c.len()) - c.len_utf16(); + } else { + // From here on, all utf16 characters come *after* the character we are mapping, + // so we don't need to take them into account + break; + } + } + } + res + } + + fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize { + if let Some(utf16_chars) = self.utf16_lines.get(&line) { + for c in utf16_chars { + if col > u32::from(c.start) { + col += u32::from(c.len()) - c.len_utf16() as u32; + } else { + // From here on, all utf16 characters come *after* the character we are mapping, + // so we don't need to take them into account + break; + } + } + } + + col.into() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_line_index() { + let text = "hello\nworld"; + let index = LineIndex::new(text); + assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); + assert_eq!(index.line_col(1.into()), LineCol { line: 0, col_utf16: 1 }); + assert_eq!(index.line_col(5.into()), LineCol { line: 0, col_utf16: 5 }); + assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 0 }); + assert_eq!(index.line_col(7.into()), LineCol { line: 1, col_utf16: 1 }); + assert_eq!(index.line_col(8.into()), LineCol { line: 1, col_utf16: 2 }); + assert_eq!(index.line_col(10.into()), LineCol { line: 1, col_utf16: 4 }); + assert_eq!(index.line_col(11.into()), LineCol { line: 1, col_utf16: 5 }); + assert_eq!(index.line_col(12.into()), LineCol { line: 1, col_utf16: 6 }); + + let text = "\nhello\nworld"; + let index = LineIndex::new(text); + assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); + assert_eq!(index.line_col(1.into()), LineCol { line: 1, col_utf16: 0 }); + assert_eq!(index.line_col(2.into()), LineCol { line: 1, col_utf16: 1 }); + assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 5 }); + assert_eq!(index.line_col(7.into()), LineCol { line: 2, col_utf16: 0 }); + } + + #[test] + fn test_char_len() { + assert_eq!('メ'.len_utf8(), 3); + assert_eq!('メ'.len_utf16(), 1); + } + + #[test] + fn test_empty_index() { + let col_index = LineIndex::new( + " +const C: char = 'x'; +", + ); + assert_eq!(col_index.utf16_lines.len(), 0); + } + + #[test] + fn test_single_char() { + let col_index = LineIndex::new( + " +const C: char = 'メ'; +", + ); + + assert_eq!(col_index.utf16_lines.len(), 1); + assert_eq!(col_index.utf16_lines[&1].len(), 1); + assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); + + // UTF-8 to UTF-16, no changes + assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); + + // UTF-8 to UTF-16 + assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20); + + // UTF-16 to UTF-8, no changes + assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15)); + + // UTF-16 to UTF-8 + assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); + + let col_index = LineIndex::new("a𐐏b"); + assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5)); + } + + #[test] + fn test_string() { + let col_index = LineIndex::new( + " +const C: char = \"メ メ\"; +", + ); + + assert_eq!(col_index.utf16_lines.len(), 1); + assert_eq!(col_index.utf16_lines[&1].len(), 2); + assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); + assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() }); + + // UTF-8 to UTF-16 + assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); + + assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19); + assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21); + + assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15); + + // UTF-16 to UTF-8 + assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15)); + + // メ UTF-8: 0xE3 0x83 0xA1, UTF-16: 0x30E1 + assert_eq!(col_index.utf16_to_utf8_col(1, 17), TextSize::from(17)); // first メ at 17..20 + assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20)); // space + assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); // second メ at 21..24 + + assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15)); + } + + #[test] + fn test_splitlines() { + fn r(lo: u32, hi: u32) -> TextRange { + TextRange::new(lo.into(), hi.into()) + } + + let text = "a\nbb\nccc\n"; + let line_index = LineIndex::new(text); + + let actual = line_index.lines(r(0, 9)).collect::>(); + let expected = vec![r(0, 2), r(2, 5), r(5, 9)]; + assert_eq!(actual, expected); + + let text = ""; + let line_index = LineIndex::new(text); + + let actual = line_index.lines(r(0, 0)).collect::>(); + let expected = vec![]; + assert_eq!(actual, expected); + + let text = "\n"; + let line_index = LineIndex::new(text); + + let actual = line_index.lines(r(0, 1)).collect::>(); + let expected = vec![r(0, 1)]; + assert_eq!(actual, expected) + } +} diff --git a/crates/ide_db/src/search.rs b/crates/ide_db/src/search.rs new file mode 100644 index 0000000000..b9360bf129 --- /dev/null +++ b/crates/ide_db/src/search.rs @@ -0,0 +1,322 @@ +//! Implementation of find-usages functionality. +//! +//! It is based on the standard ide trick: first, we run a fast text search to +//! get a super-set of matches. Then, we we confirm each match using precise +//! name resolution. + +use std::{convert::TryInto, mem}; + +use base_db::{FileId, FileRange, SourceDatabaseExt}; +use hir::{DefWithBody, HasSource, Module, ModuleSource, Semantics, Visibility}; +use once_cell::unsync::Lazy; +use rustc_hash::FxHashMap; +use syntax::{ast, match_ast, AstNode, TextRange, TextSize}; + +use crate::{ + defs::{classify_name_ref, Definition, NameRefClass}, + RootDatabase, +}; + +#[derive(Debug, Clone)] +pub struct Reference { + pub file_range: FileRange, + pub kind: ReferenceKind, + pub access: Option, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum ReferenceKind { + FieldShorthandForField, + FieldShorthandForLocal, + StructLiteral, + Other, +} + +#[derive(Debug, Copy, Clone, PartialEq)] +pub enum ReferenceAccess { + Read, + Write, +} + +/// Generally, `search_scope` returns files that might contain references for the element. +/// For `pub(crate)` things it's a crate, for `pub` things it's a crate and dependant crates. +/// In some cases, the location of the references is known to within a `TextRange`, +/// e.g. for things like local variables. +pub struct SearchScope { + entries: FxHashMap>, +} + +impl SearchScope { + fn new(entries: FxHashMap>) -> SearchScope { + SearchScope { entries } + } + + pub fn empty() -> SearchScope { + SearchScope::new(FxHashMap::default()) + } + + pub fn single_file(file: FileId) -> SearchScope { + SearchScope::new(std::iter::once((file, None)).collect()) + } + + pub fn files(files: &[FileId]) -> SearchScope { + SearchScope::new(files.iter().map(|f| (*f, None)).collect()) + } + + pub fn intersection(&self, other: &SearchScope) -> SearchScope { + let (mut small, mut large) = (&self.entries, &other.entries); + if small.len() > large.len() { + mem::swap(&mut small, &mut large) + } + + let res = small + .iter() + .filter_map(|(file_id, r1)| { + let r2 = large.get(file_id)?; + let r = intersect_ranges(*r1, *r2)?; + Some((*file_id, r)) + }) + .collect(); + + return SearchScope::new(res); + + fn intersect_ranges( + r1: Option, + r2: Option, + ) -> Option> { + match (r1, r2) { + (None, r) | (r, None) => Some(r), + (Some(r1), Some(r2)) => { + let r = r1.intersect(r2)?; + Some(Some(r)) + } + } + } + } +} + +impl IntoIterator for SearchScope { + type Item = (FileId, Option); + type IntoIter = std::collections::hash_map::IntoIter>; + + fn into_iter(self) -> Self::IntoIter { + self.entries.into_iter() + } +} + +impl Definition { + fn search_scope(&self, db: &RootDatabase) -> SearchScope { + let _p = profile::span("search_scope"); + let module = match self.module(db) { + Some(it) => it, + None => return SearchScope::empty(), + }; + let module_src = module.definition_source(db); + let file_id = module_src.file_id.original_file(db); + + if let Definition::Local(var) = self { + let range = match var.parent(db) { + DefWithBody::Function(f) => f.source(db).value.syntax().text_range(), + DefWithBody::Const(c) => c.source(db).value.syntax().text_range(), + DefWithBody::Static(s) => s.source(db).value.syntax().text_range(), + }; + let mut res = FxHashMap::default(); + res.insert(file_id, Some(range)); + return SearchScope::new(res); + } + + let vis = self.visibility(db); + + if let Some(Visibility::Module(module)) = vis.and_then(|it| it.into()) { + let module: Module = module.into(); + let mut res = FxHashMap::default(); + + let mut to_visit = vec![module]; + let mut is_first = true; + while let Some(module) = to_visit.pop() { + let src = module.definition_source(db); + let file_id = src.file_id.original_file(db); + match src.value { + ModuleSource::Module(m) => { + if is_first { + let range = Some(m.syntax().text_range()); + res.insert(file_id, range); + } else { + // We have already added the enclosing file to the search scope, + // so do nothing. + } + } + ModuleSource::SourceFile(_) => { + res.insert(file_id, None); + } + }; + is_first = false; + to_visit.extend(module.children(db)); + } + + return SearchScope::new(res); + } + + if let Some(Visibility::Public) = vis { + let source_root_id = db.file_source_root(file_id); + let source_root = db.source_root(source_root_id); + let mut res = source_root.iter().map(|id| (id, None)).collect::>(); + + let krate = module.krate(); + for rev_dep in krate.reverse_dependencies(db) { + let root_file = rev_dep.root_file(db); + let source_root_id = db.file_source_root(root_file); + let source_root = db.source_root(source_root_id); + res.extend(source_root.iter().map(|id| (id, None))); + } + return SearchScope::new(res); + } + + let mut res = FxHashMap::default(); + let range = match module_src.value { + ModuleSource::Module(m) => Some(m.syntax().text_range()), + ModuleSource::SourceFile(_) => None, + }; + res.insert(file_id, range); + SearchScope::new(res) + } + + pub fn find_usages( + &self, + sema: &Semantics, + search_scope: Option, + ) -> Vec { + let _p = profile::span("Definition::find_usages"); + + let search_scope = { + let base = self.search_scope(sema.db); + match search_scope { + None => base, + Some(scope) => base.intersection(&scope), + } + }; + + let name = match self.name(sema.db) { + None => return Vec::new(), + Some(it) => it.to_string(), + }; + + let pat = name.as_str(); + let mut refs = vec![]; + + for (file_id, search_range) in search_scope { + let text = sema.db.file_text(file_id); + let search_range = + search_range.unwrap_or(TextRange::up_to(TextSize::of(text.as_str()))); + + let tree = Lazy::new(|| sema.parse(file_id).syntax().clone()); + + for (idx, _) in text.match_indices(pat) { + let offset: TextSize = idx.try_into().unwrap(); + if !search_range.contains_inclusive(offset) { + continue; + } + + let name_ref: ast::NameRef = + if let Some(name_ref) = sema.find_node_at_offset_with_descend(&tree, offset) { + name_ref + } else { + continue; + }; + + match classify_name_ref(&sema, &name_ref) { + Some(NameRefClass::Definition(def)) if &def == self => { + let kind = if is_record_lit_name_ref(&name_ref) + || is_call_expr_name_ref(&name_ref) + { + ReferenceKind::StructLiteral + } else { + ReferenceKind::Other + }; + + let file_range = sema.original_range(name_ref.syntax()); + refs.push(Reference { + file_range, + kind, + access: reference_access(&def, &name_ref), + }); + } + Some(NameRefClass::FieldShorthand { local, field }) => { + match self { + Definition::Field(_) if &field == self => refs.push(Reference { + file_range: sema.original_range(name_ref.syntax()), + kind: ReferenceKind::FieldShorthandForField, + access: reference_access(&field, &name_ref), + }), + Definition::Local(l) if &local == l => refs.push(Reference { + file_range: sema.original_range(name_ref.syntax()), + kind: ReferenceKind::FieldShorthandForLocal, + access: reference_access(&Definition::Local(local), &name_ref), + }), + + _ => {} // not a usage + }; + } + _ => {} // not a usage + } + } + } + refs + } +} + +fn reference_access(def: &Definition, name_ref: &ast::NameRef) -> Option { + // Only Locals and Fields have accesses for now. + match def { + Definition::Local(_) | Definition::Field(_) => {} + _ => return None, + }; + + let mode = name_ref.syntax().ancestors().find_map(|node| { + match_ast! { + match (node) { + ast::BinExpr(expr) => { + if expr.op_kind()?.is_assignment() { + // If the variable or field ends on the LHS's end then it's a Write (covers fields and locals). + // FIXME: This is not terribly accurate. + if let Some(lhs) = expr.lhs() { + if lhs.syntax().text_range().end() == name_ref.syntax().text_range().end() { + return Some(ReferenceAccess::Write); + } + } + } + Some(ReferenceAccess::Read) + }, + _ => None + } + } + }); + + // Default Locals and Fields to read + mode.or(Some(ReferenceAccess::Read)) +} + +fn is_call_expr_name_ref(name_ref: &ast::NameRef) -> bool { + name_ref + .syntax() + .ancestors() + .find_map(ast::CallExpr::cast) + .and_then(|c| match c.expr()? { + ast::Expr::PathExpr(p) => { + Some(p.path()?.segment()?.name_ref().as_ref() == Some(name_ref)) + } + _ => None, + }) + .unwrap_or(false) +} + +fn is_record_lit_name_ref(name_ref: &ast::NameRef) -> bool { + name_ref + .syntax() + .ancestors() + .find_map(ast::RecordExpr::cast) + .and_then(|l| l.path()) + .and_then(|p| p.segment()) + .map(|p| p.name_ref().as_ref() == Some(name_ref)) + .unwrap_or(false) +} diff --git a/crates/ide_db/src/source_change.rs b/crates/ide_db/src/source_change.rs new file mode 100644 index 0000000000..f1590ec663 --- /dev/null +++ b/crates/ide_db/src/source_change.rs @@ -0,0 +1,59 @@ +//! This modules defines type to represent changes to the source code, that flow +//! from the server to the client. +//! +//! It can be viewed as a dual for `AnalysisChange`. + +use base_db::FileId; +use text_edit::TextEdit; + +#[derive(Default, Debug, Clone)] +pub struct SourceChange { + pub source_file_edits: Vec, + pub file_system_edits: Vec, + pub is_snippet: bool, +} + +impl SourceChange { + /// Creates a new SourceChange with the given label + /// from the edits. + pub fn from_edits( + source_file_edits: Vec, + file_system_edits: Vec, + ) -> Self { + SourceChange { source_file_edits, file_system_edits, is_snippet: false } + } +} + +#[derive(Debug, Clone)] +pub struct SourceFileEdit { + pub file_id: FileId, + pub edit: TextEdit, +} + +impl From for SourceChange { + fn from(edit: SourceFileEdit) -> SourceChange { + vec![edit].into() + } +} + +impl From> for SourceChange { + fn from(source_file_edits: Vec) -> SourceChange { + SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false } + } +} + +#[derive(Debug, Clone)] +pub enum FileSystemEdit { + CreateFile { anchor: FileId, dst: String }, + MoveFile { src: FileId, anchor: FileId, dst: String }, +} + +impl From for SourceChange { + fn from(edit: FileSystemEdit) -> SourceChange { + SourceChange { + source_file_edits: Vec::new(), + file_system_edits: vec![edit], + is_snippet: false, + } + } +} diff --git a/crates/ide_db/src/symbol_index.rs b/crates/ide_db/src/symbol_index.rs new file mode 100644 index 0000000000..654df898e9 --- /dev/null +++ b/crates/ide_db/src/symbol_index.rs @@ -0,0 +1,429 @@ +//! This module handles fuzzy-searching of functions, structs and other symbols +//! by name across the whole workspace and dependencies. +//! +//! It works by building an incrementally-updated text-search index of all +//! symbols. The backbone of the index is the **awesome** `fst` crate by +//! @BurntSushi. +//! +//! In a nutshell, you give a set of strings to `fst`, and it builds a +//! finite state machine describing this set of strings. The strings which +//! could fuzzy-match a pattern can also be described by a finite state machine. +//! What is freaking cool is that you can now traverse both state machines in +//! lock-step to enumerate the strings which are both in the input set and +//! fuzz-match the query. Or, more formally, given two languages described by +//! FSTs, one can build a product FST which describes the intersection of the +//! languages. +//! +//! `fst` does not support cheap updating of the index, but it supports unioning +//! of state machines. So, to account for changing source code, we build an FST +//! for each library (which is assumed to never change) and an FST for each Rust +//! file in the current workspace, and run a query against the union of all +//! those FSTs. + +use std::{ + cmp::Ordering, + fmt, + hash::{Hash, Hasher}, + mem, + sync::Arc, +}; + +use base_db::{ + salsa::{self, ParallelDatabase}, + CrateId, FileId, SourceDatabaseExt, SourceRootId, +}; +use fst::{self, Streamer}; +use hir::db::DefDatabase; +use rayon::prelude::*; +use rustc_hash::{FxHashMap, FxHashSet}; +use syntax::{ + ast::{self, NameOwner}, + match_ast, AstNode, Parse, SmolStr, SourceFile, + SyntaxKind::{self, *}, + SyntaxNode, SyntaxNodePtr, TextRange, WalkEvent, +}; + +use crate::RootDatabase; + +#[derive(Debug)] +pub struct Query { + query: String, + lowercased: String, + only_types: bool, + libs: bool, + exact: bool, + limit: usize, +} + +impl Query { + pub fn new(query: String) -> Query { + let lowercased = query.to_lowercase(); + Query { + query, + lowercased, + only_types: false, + libs: false, + exact: false, + limit: usize::max_value(), + } + } + + pub fn only_types(&mut self) { + self.only_types = true; + } + + pub fn libs(&mut self) { + self.libs = true; + } + + pub fn exact(&mut self) { + self.exact = true; + } + + pub fn limit(&mut self, limit: usize) { + self.limit = limit + } +} + +#[salsa::query_group(SymbolsDatabaseStorage)] +pub trait SymbolsDatabase: hir::db::HirDatabase + SourceDatabaseExt { + fn file_symbols(&self, file_id: FileId) -> Arc; + fn library_symbols(&self) -> Arc>; + /// The set of "local" (that is, from the current workspace) roots. + /// Files in local roots are assumed to change frequently. + #[salsa::input] + fn local_roots(&self) -> Arc>; + /// The set of roots for crates.io libraries. + /// Files in libraries are assumed to never change. + #[salsa::input] + fn library_roots(&self) -> Arc>; +} + +fn library_symbols(db: &dyn SymbolsDatabase) -> Arc> { + let _p = profile::span("library_symbols"); + + let roots = db.library_roots(); + let res = roots + .iter() + .map(|&root_id| { + let root = db.source_root(root_id); + let files = root + .iter() + .map(|it| (it, SourceDatabaseExt::file_text(db, it))) + .collect::>(); + let symbol_index = SymbolIndex::for_files( + files.into_par_iter().map(|(file, text)| (file, SourceFile::parse(&text))), + ); + (root_id, symbol_index) + }) + .collect(); + Arc::new(res) +} + +fn file_symbols(db: &dyn SymbolsDatabase, file_id: FileId) -> Arc { + db.check_canceled(); + let parse = db.parse(file_id); + + let symbols = source_file_to_file_symbols(&parse.tree(), file_id); + + // FIXME: add macros here + + Arc::new(SymbolIndex::new(symbols)) +} + +/// Need to wrap Snapshot to provide `Clone` impl for `map_with` +struct Snap(DB); +impl Clone for Snap> { + fn clone(&self) -> Snap> { + Snap(self.0.snapshot()) + } +} + +// Feature: Workspace Symbol +// +// Uses fuzzy-search to find types, modules and functions by name across your +// project and dependencies. This is **the** most useful feature, which improves code +// navigation tremendously. It mostly works on top of the built-in LSP +// functionality, however `#` and `*` symbols can be used to narrow down the +// search. Specifically, +// +// - `Foo` searches for `Foo` type in the current workspace +// - `foo#` searches for `foo` function in the current workspace +// - `Foo*` searches for `Foo` type among dependencies, including `stdlib` +// - `foo#*` searches for `foo` function among dependencies +// +// That is, `#` switches from "types" to all symbols, `*` switches from the current +// workspace to dependencies. +// +// |=== +// | Editor | Shortcut +// +// | VS Code | kbd:[Ctrl+T] +// |=== +pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec { + let _p = profile::span("world_symbols").detail(|| query.query.clone()); + + let tmp1; + let tmp2; + let buf: Vec<&SymbolIndex> = if query.libs { + tmp1 = db.library_symbols(); + tmp1.values().collect() + } else { + let mut files = Vec::new(); + for &root in db.local_roots().iter() { + let sr = db.source_root(root); + files.extend(sr.iter()) + } + + let snap = Snap(db.snapshot()); + tmp2 = files + .par_iter() + .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) + .collect::>(); + tmp2.iter().map(|it| &**it).collect() + }; + query.search(&buf) +} + +pub fn crate_symbols(db: &RootDatabase, krate: CrateId, query: Query) -> Vec { + // FIXME(#4842): This now depends on CrateDefMap, why not build the entire symbol index from + // that instead? + + let def_map = db.crate_def_map(krate); + let mut files = Vec::new(); + let mut modules = vec![def_map.root]; + while let Some(module) = modules.pop() { + let data = &def_map[module]; + files.extend(data.origin.file_id()); + modules.extend(data.children.values()); + } + + let snap = Snap(db.snapshot()); + + let buf = files + .par_iter() + .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) + .collect::>(); + let buf = buf.iter().map(|it| &**it).collect::>(); + + query.search(&buf) +} + +pub fn index_resolve(db: &RootDatabase, name_ref: &ast::NameRef) -> Vec { + let name = name_ref.text(); + let mut query = Query::new(name.to_string()); + query.exact(); + query.limit(4); + world_symbols(db, query) +} + +#[derive(Default)] +pub struct SymbolIndex { + symbols: Vec, + map: fst::Map>, +} + +impl fmt::Debug for SymbolIndex { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("SymbolIndex").field("n_symbols", &self.symbols.len()).finish() + } +} + +impl PartialEq for SymbolIndex { + fn eq(&self, other: &SymbolIndex) -> bool { + self.symbols == other.symbols + } +} + +impl Eq for SymbolIndex {} + +impl Hash for SymbolIndex { + fn hash(&self, hasher: &mut H) { + self.symbols.hash(hasher) + } +} + +impl SymbolIndex { + fn new(mut symbols: Vec) -> SymbolIndex { + fn cmp(lhs: &FileSymbol, rhs: &FileSymbol) -> Ordering { + let lhs_chars = lhs.name.chars().map(|c| c.to_ascii_lowercase()); + let rhs_chars = rhs.name.chars().map(|c| c.to_ascii_lowercase()); + lhs_chars.cmp(rhs_chars) + } + + symbols.par_sort_by(cmp); + + let mut builder = fst::MapBuilder::memory(); + + let mut last_batch_start = 0; + + for idx in 0..symbols.len() { + if let Some(next_symbol) = symbols.get(idx + 1) { + if cmp(&symbols[last_batch_start], next_symbol) == Ordering::Equal { + continue; + } + } + + let start = last_batch_start; + let end = idx + 1; + last_batch_start = end; + + let key = symbols[start].name.as_str().to_ascii_lowercase(); + let value = SymbolIndex::range_to_map_value(start, end); + + builder.insert(key, value).unwrap(); + } + + let map = fst::Map::new(builder.into_inner().unwrap()).unwrap(); + SymbolIndex { symbols, map } + } + + pub fn len(&self) -> usize { + self.symbols.len() + } + + pub fn memory_size(&self) -> usize { + self.map.as_fst().size() + self.symbols.len() * mem::size_of::() + } + + pub(crate) fn for_files( + files: impl ParallelIterator)>, + ) -> SymbolIndex { + let symbols = files + .flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id)) + .collect::>(); + SymbolIndex::new(symbols) + } + + fn range_to_map_value(start: usize, end: usize) -> u64 { + debug_assert![start <= (std::u32::MAX as usize)]; + debug_assert![end <= (std::u32::MAX as usize)]; + + ((start as u64) << 32) | end as u64 + } + + fn map_value_to_range(value: u64) -> (usize, usize) { + let end = value as u32 as usize; + let start = (value >> 32) as usize; + (start, end) + } +} + +impl Query { + pub(crate) fn search(self, indices: &[&SymbolIndex]) -> Vec { + let mut op = fst::map::OpBuilder::new(); + for file_symbols in indices.iter() { + let automaton = fst::automaton::Subsequence::new(&self.lowercased); + op = op.add(file_symbols.map.search(automaton)) + } + let mut stream = op.union(); + let mut res = Vec::new(); + while let Some((_, indexed_values)) = stream.next() { + for indexed_value in indexed_values { + let symbol_index = &indices[indexed_value.index]; + let (start, end) = SymbolIndex::map_value_to_range(indexed_value.value); + + for symbol in &symbol_index.symbols[start..end] { + if self.only_types && !is_type(symbol.kind) { + continue; + } + if self.exact && symbol.name != self.query { + continue; + } + + res.push(symbol.clone()); + if res.len() >= self.limit { + return res; + } + } + } + } + res + } +} + +fn is_type(kind: SyntaxKind) -> bool { + matches!(kind, STRUCT | ENUM | TRAIT | TYPE_ALIAS) +} + +/// The actual data that is stored in the index. It should be as compact as +/// possible. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct FileSymbol { + pub file_id: FileId, + pub name: SmolStr, + pub kind: SyntaxKind, + pub range: TextRange, + pub ptr: SyntaxNodePtr, + pub name_range: Option, + pub container_name: Option, +} + +fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec { + let mut symbols = Vec::new(); + let mut stack = Vec::new(); + + for event in source_file.syntax().preorder() { + match event { + WalkEvent::Enter(node) => { + if let Some(mut symbol) = to_file_symbol(&node, file_id) { + symbol.container_name = stack.last().cloned(); + + stack.push(symbol.name.clone()); + symbols.push(symbol); + } + } + + WalkEvent::Leave(node) => { + if to_symbol(&node).is_some() { + stack.pop(); + } + } + } + } + + symbols +} + +fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { + fn decl(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { + let name = node.name()?; + let name_range = name.syntax().text_range(); + let name = name.text().clone(); + let ptr = SyntaxNodePtr::new(node.syntax()); + + Some((name, ptr, name_range)) + } + match_ast! { + match node { + ast::Fn(it) => decl(it), + ast::Struct(it) => decl(it), + ast::Enum(it) => decl(it), + ast::Trait(it) => decl(it), + ast::Module(it) => decl(it), + ast::TypeAlias(it) => decl(it), + ast::Const(it) => decl(it), + ast::Static(it) => decl(it), + ast::MacroCall(it) => { + if it.is_macro_rules().is_some() { + decl(it) + } else { + None + } + }, + _ => None, + } + } +} + +fn to_file_symbol(node: &SyntaxNode, file_id: FileId) -> Option { + to_symbol(node).map(move |(name, ptr, name_range)| FileSymbol { + name, + kind: node.kind(), + range: node.text_range(), + ptr, + file_id, + name_range: Some(name_range), + container_name: None, + }) +} diff --git a/crates/ra_ide_db/src/wasm_shims.rs b/crates/ide_db/src/wasm_shims.rs similarity index 100% rename from crates/ra_ide_db/src/wasm_shims.rs rename to crates/ide_db/src/wasm_shims.rs diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml new file mode 100644 index 0000000000..1aba8b7c48 --- /dev/null +++ b/crates/mbe/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "mbe" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +rustc-hash = "1.1.0" +smallvec = "1.2.0" +log = "0.4.8" + +syntax = { path = "../syntax" } +parser = { path = "../parser" } +tt = { path = "../tt" } + +[dev-dependencies] +test_utils = { path = "../test_utils" } diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs new file mode 100644 index 0000000000..f854ca09ab --- /dev/null +++ b/crates/mbe/src/lib.rs @@ -0,0 +1,278 @@ +//! `mbe` (short for Macro By Example) crate contains code for handling +//! `macro_rules` macros. It uses `TokenTree` (from `tt` package) as the +//! interface, although it contains some code to bridge `SyntaxNode`s and +//! `TokenTree`s as well! + +mod parser; +mod mbe_expander; +mod syntax_bridge; +mod tt_iter; +mod subtree_source; + +#[cfg(test)] +mod tests; + +pub use tt::{Delimiter, Punct}; + +use crate::{ + parser::{parse_pattern, Op}, + tt_iter::TtIter, +}; + +#[derive(Debug, PartialEq, Eq)] +pub enum ParseError { + Expected(String), + RepetitionEmtpyTokenTree, +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum ExpandError { + NoMatchingRule, + UnexpectedToken, + BindingError(String), + ConversionError, + InvalidRepeat, + ProcMacroError(tt::ExpansionError), +} + +impl From for ExpandError { + fn from(it: tt::ExpansionError) -> Self { + ExpandError::ProcMacroError(it) + } +} + +pub use crate::syntax_bridge::{ + ast_to_token_tree, parse_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, + TokenMap, +}; + +/// This struct contains AST for a single `macro_rules` definition. What might +/// be very confusing is that AST has almost exactly the same shape as +/// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident` +/// and `$()*` have special meaning (see `Var` and `Repeat` data structures) +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct MacroRules { + rules: Vec, + /// Highest id of the token we have in TokenMap + shift: Shift, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +struct Rule { + lhs: tt::Subtree, + rhs: tt::Subtree, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +struct Shift(u32); + +impl Shift { + fn new(tt: &tt::Subtree) -> Shift { + // Note that TokenId is started from zero, + // We have to add 1 to prevent duplication. + let value = max_id(tt).map_or(0, |it| it + 1); + return Shift(value); + + // Find the max token id inside a subtree + fn max_id(subtree: &tt::Subtree) -> Option { + subtree + .token_trees + .iter() + .filter_map(|tt| match tt { + tt::TokenTree::Subtree(subtree) => { + let tree_id = max_id(subtree); + match subtree.delimiter { + Some(it) if it.id != tt::TokenId::unspecified() => { + Some(tree_id.map_or(it.id.0, |t| t.max(it.id.0))) + } + _ => tree_id, + } + } + tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) + if ident.id != tt::TokenId::unspecified() => + { + Some(ident.id.0) + } + _ => None, + }) + .max() + } + } + + /// Shift given TokenTree token id + fn shift_all(self, tt: &mut tt::Subtree) { + for t in tt.token_trees.iter_mut() { + match t { + tt::TokenTree::Leaf(leaf) => match leaf { + tt::Leaf::Ident(ident) => ident.id = self.shift(ident.id), + tt::Leaf::Punct(punct) => punct.id = self.shift(punct.id), + tt::Leaf::Literal(lit) => lit.id = self.shift(lit.id), + }, + tt::TokenTree::Subtree(tt) => { + if let Some(it) = tt.delimiter.as_mut() { + it.id = self.shift(it.id); + }; + self.shift_all(tt) + } + } + } + } + + fn shift(self, id: tt::TokenId) -> tt::TokenId { + if id == tt::TokenId::unspecified() { + return id; + } + tt::TokenId(id.0 + self.0) + } + + fn unshift(self, id: tt::TokenId) -> Option { + id.0.checked_sub(self.0).map(tt::TokenId) + } +} + +#[derive(Debug, Eq, PartialEq)] +pub enum Origin { + Def, + Call, +} + +impl MacroRules { + pub fn parse(tt: &tt::Subtree) -> Result { + // Note: this parsing can be implemented using mbe machinery itself, by + // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing + // manually seems easier. + let mut src = TtIter::new(tt); + let mut rules = Vec::new(); + while src.len() > 0 { + let rule = Rule::parse(&mut src)?; + rules.push(rule); + if let Err(()) = src.expect_char(';') { + if src.len() > 0 { + return Err(ParseError::Expected("expected `:`".to_string())); + } + break; + } + } + + for rule in rules.iter() { + validate(&rule.lhs)?; + } + + Ok(MacroRules { rules, shift: Shift::new(tt) }) + } + + pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult { + // apply shift + let mut tt = tt.clone(); + self.shift.shift_all(&mut tt); + mbe_expander::expand(self, &tt) + } + + pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { + self.shift.shift(id) + } + + pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) { + match self.shift.unshift(id) { + Some(id) => (id, Origin::Call), + None => (id, Origin::Def), + } + } +} + +impl Rule { + fn parse(src: &mut TtIter) -> Result { + let mut lhs = src + .expect_subtree() + .map_err(|()| ParseError::Expected("expected subtree".to_string()))? + .clone(); + lhs.delimiter = None; + src.expect_char('=').map_err(|()| ParseError::Expected("expected `=`".to_string()))?; + src.expect_char('>').map_err(|()| ParseError::Expected("expected `>`".to_string()))?; + let mut rhs = src + .expect_subtree() + .map_err(|()| ParseError::Expected("expected subtree".to_string()))? + .clone(); + rhs.delimiter = None; + Ok(crate::Rule { lhs, rhs }) + } +} + +fn to_parse_error(e: ExpandError) -> ParseError { + let msg = match e { + ExpandError::InvalidRepeat => "invalid repeat".to_string(), + _ => "invalid macro definition".to_string(), + }; + ParseError::Expected(msg) +} + +fn validate(pattern: &tt::Subtree) -> Result<(), ParseError> { + for op in parse_pattern(pattern) { + let op = op.map_err(to_parse_error)?; + + match op { + Op::TokenTree(tt::TokenTree::Subtree(subtree)) => validate(subtree)?, + Op::Repeat { subtree, separator, .. } => { + // Checks that no repetition which could match an empty token + // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558 + + if separator.is_none() { + if parse_pattern(subtree).all(|child_op| { + match child_op.map_err(to_parse_error) { + Ok(Op::Var { kind, .. }) => { + // vis is optional + if kind.map_or(false, |it| it == "vis") { + return true; + } + } + Ok(Op::Repeat { kind, .. }) => { + return matches!( + kind, + parser::RepeatKind::ZeroOrMore | parser::RepeatKind::ZeroOrOne + ) + } + _ => {} + } + false + }) { + return Err(ParseError::RepetitionEmtpyTokenTree); + } + } + validate(subtree)? + } + _ => (), + } + } + Ok(()) +} + +#[derive(Debug)] +pub struct ExpandResult(pub T, pub Option); + +impl ExpandResult { + pub fn ok(t: T) -> ExpandResult { + ExpandResult(t, None) + } + + pub fn only_err(err: ExpandError) -> ExpandResult + where + T: Default, + { + ExpandResult(Default::default(), Some(err)) + } + + pub fn map(self, f: impl FnOnce(T) -> U) -> ExpandResult { + ExpandResult(f(self.0), self.1) + } + + pub fn result(self) -> Result { + self.1.map(Err).unwrap_or(Ok(self.0)) + } +} + +impl From> for ExpandResult { + fn from(result: Result) -> ExpandResult { + result + .map_or_else(|e| ExpandResult(Default::default(), Some(e)), |it| ExpandResult(it, None)) + } +} diff --git a/crates/mbe/src/mbe_expander.rs b/crates/mbe/src/mbe_expander.rs new file mode 100644 index 0000000000..1ad8b9f8a5 --- /dev/null +++ b/crates/mbe/src/mbe_expander.rs @@ -0,0 +1,180 @@ +//! This module takes a (parsed) definition of `macro_rules` invocation, a +//! `tt::TokenTree` representing an argument of macro invocation, and produces a +//! `tt::TokenTree` for the result of the expansion. + +mod matcher; +mod transcriber; + +use rustc_hash::FxHashMap; +use syntax::SmolStr; + +use crate::{ExpandError, ExpandResult}; + +pub(crate) fn expand(rules: &crate::MacroRules, input: &tt::Subtree) -> ExpandResult { + expand_rules(&rules.rules, input) +} + +fn expand_rules(rules: &[crate::Rule], input: &tt::Subtree) -> ExpandResult { + let mut match_: Option<(matcher::Match, &crate::Rule)> = None; + for rule in rules { + let new_match = match matcher::match_(&rule.lhs, input) { + Ok(m) => m, + Err(_e) => { + // error in pattern parsing + continue; + } + }; + if new_match.err.is_none() { + // If we find a rule that applies without errors, we're done. + // Unconditionally returning the transcription here makes the + // `test_repeat_bad_var` test fail. + let ExpandResult(res, transcribe_err) = + transcriber::transcribe(&rule.rhs, &new_match.bindings); + if transcribe_err.is_none() { + return ExpandResult::ok(res); + } + } + // Use the rule if we matched more tokens, or had fewer errors + if let Some((prev_match, _)) = &match_ { + if (new_match.unmatched_tts, new_match.err_count) + < (prev_match.unmatched_tts, prev_match.err_count) + { + match_ = Some((new_match, rule)); + } + } else { + match_ = Some((new_match, rule)); + } + } + if let Some((match_, rule)) = match_ { + // if we got here, there was no match without errors + let ExpandResult(result, transcribe_err) = + transcriber::transcribe(&rule.rhs, &match_.bindings); + ExpandResult(result, match_.err.or(transcribe_err)) + } else { + ExpandResult(tt::Subtree::default(), Some(ExpandError::NoMatchingRule)) + } +} + +/// The actual algorithm for expansion is not too hard, but is pretty tricky. +/// `Bindings` structure is the key to understanding what we are doing here. +/// +/// On the high level, it stores mapping from meta variables to the bits of +/// syntax it should be substituted with. For example, if `$e:expr` is matched +/// with `1 + 1` by macro_rules, the `Binding` will store `$e -> 1 + 1`. +/// +/// The tricky bit is dealing with repetitions (`$()*`). Consider this example: +/// +/// ```not_rust +/// macro_rules! foo { +/// ($($ i:ident $($ e:expr),*);*) => { +/// $(fn $ i() { $($ e);*; })* +/// } +/// } +/// foo! { foo 1,2,3; bar 4,5,6 } +/// ``` +/// +/// Here, the `$i` meta variable is matched first with `foo` and then with +/// `bar`, and `$e` is matched in turn with `1`, `2`, `3`, `4`, `5`, `6`. +/// +/// To represent such "multi-mappings", we use a recursive structures: we map +/// variables not to values, but to *lists* of values or other lists (that is, +/// to the trees). +/// +/// For the above example, the bindings would store +/// +/// ```not_rust +/// i -> [foo, bar] +/// e -> [[1, 2, 3], [4, 5, 6]] +/// ``` +/// +/// We construct `Bindings` in the `match_lhs`. The interesting case is +/// `TokenTree::Repeat`, where we use `push_nested` to create the desired +/// nesting structure. +/// +/// The other side of the puzzle is `expand_subtree`, where we use the bindings +/// to substitute meta variables in the output template. When expanding, we +/// maintain a `nesting` stack of indices which tells us which occurrence from +/// the `Bindings` we should take. We push to the stack when we enter a +/// repetition. +/// +/// In other words, `Bindings` is a *multi* mapping from `SmolStr` to +/// `tt::TokenTree`, where the index to select a particular `TokenTree` among +/// many is not a plain `usize`, but an `&[usize]`. +#[derive(Debug, Default)] +struct Bindings { + inner: FxHashMap, +} + +#[derive(Debug)] +enum Binding { + Fragment(Fragment), + Nested(Vec), + Empty, +} + +#[derive(Debug, Clone)] +enum Fragment { + /// token fragments are just copy-pasted into the output + Tokens(tt::TokenTree), + /// Ast fragments are inserted with fake delimiters, so as to make things + /// like `$i * 2` where `$i = 1 + 1` work as expectd. + Ast(tt::TokenTree), +} + +#[cfg(test)] +mod tests { + use syntax::{ast, AstNode}; + + use super::*; + use crate::ast_to_token_tree; + + #[test] + fn test_expand_rule() { + assert_err( + "($($i:ident);*) => ($i)", + "foo!{a}", + ExpandError::BindingError(String::from( + "expected simple binding, found nested binding `i`", + )), + ); + + // FIXME: + // Add an err test case for ($($i:ident)) => ($()) + } + + fn assert_err(macro_body: &str, invocation: &str, err: ExpandError) { + assert_eq!(expand_first(&create_rules(&format_macro(macro_body)), invocation).1, Some(err)); + } + + fn format_macro(macro_body: &str) -> String { + format!( + " + macro_rules! foo {{ + {} + }} +", + macro_body + ) + } + + fn create_rules(macro_definition: &str) -> crate::MacroRules { + let source_file = ast::SourceFile::parse(macro_definition).ok().unwrap(); + let macro_definition = + source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); + + let (definition_tt, _) = + ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap(); + crate::MacroRules::parse(&definition_tt).unwrap() + } + + fn expand_first(rules: &crate::MacroRules, invocation: &str) -> ExpandResult { + let source_file = ast::SourceFile::parse(invocation).ok().unwrap(); + let macro_invocation = + source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); + + let (invocation_tt, _) = + ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap(); + + expand_rules(&rules.rules, &invocation_tt) + } +} diff --git a/crates/mbe/src/mbe_expander/matcher.rs b/crates/mbe/src/mbe_expander/matcher.rs new file mode 100644 index 0000000000..b698b98326 --- /dev/null +++ b/crates/mbe/src/mbe_expander/matcher.rs @@ -0,0 +1,477 @@ +//! FIXME: write short doc here + +use crate::{ + mbe_expander::{Binding, Bindings, Fragment}, + parser::{parse_pattern, Op, RepeatKind, Separator}, + subtree_source::SubtreeTokenSource, + tt_iter::TtIter, + ExpandError, +}; + +use super::ExpandResult; +use parser::{FragmentKind::*, TreeSink}; +use syntax::{SmolStr, SyntaxKind}; +use tt::buffer::{Cursor, TokenBuffer}; + +impl Bindings { + fn push_optional(&mut self, name: &SmolStr) { + // FIXME: Do we have a better way to represent an empty token ? + // Insert an empty subtree for empty token + let tt = tt::Subtree::default().into(); + self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt))); + } + + fn push_empty(&mut self, name: &SmolStr) { + self.inner.insert(name.clone(), Binding::Empty); + } + + fn push_nested(&mut self, idx: usize, nested: Bindings) -> Result<(), ExpandError> { + for (key, value) in nested.inner { + if !self.inner.contains_key(&key) { + self.inner.insert(key.clone(), Binding::Nested(Vec::new())); + } + match self.inner.get_mut(&key) { + Some(Binding::Nested(it)) => { + // insert empty nested bindings before this one + while it.len() < idx { + it.push(Binding::Nested(vec![])); + } + it.push(value); + } + _ => { + return Err(ExpandError::BindingError(format!( + "could not find binding `{}`", + key + ))); + } + } + } + Ok(()) + } +} + +macro_rules! err { + () => { + ExpandError::BindingError(format!("")) + }; + ($($tt:tt)*) => { + ExpandError::BindingError(format!($($tt)*)) + }; +} + +#[derive(Debug, Default)] +pub(super) struct Match { + pub bindings: Bindings, + /// We currently just keep the first error and count the rest to compare matches. + pub err: Option, + pub err_count: usize, + /// How many top-level token trees were left to match. + pub unmatched_tts: usize, +} + +impl Match { + pub fn add_err(&mut self, err: ExpandError) { + let prev_err = self.err.take(); + self.err = prev_err.or(Some(err)); + self.err_count += 1; + } +} + +// General note: These functions have two channels to return errors, a `Result` +// return value and the `&mut Match`. The returned Result is for pattern parsing +// errors; if a branch of the macro definition doesn't parse, it doesn't make +// sense to try using it. Matching errors are added to the `Match`. It might +// make sense to make pattern parsing a separate step? + +pub(super) fn match_(pattern: &tt::Subtree, src: &tt::Subtree) -> Result { + assert!(pattern.delimiter == None); + + let mut res = Match::default(); + let mut src = TtIter::new(src); + + match_subtree(&mut res, pattern, &mut src)?; + + if src.len() > 0 { + res.unmatched_tts += src.len(); + res.add_err(err!("leftover tokens")); + } + + Ok(res) +} + +fn match_subtree( + res: &mut Match, + pattern: &tt::Subtree, + src: &mut TtIter, +) -> Result<(), ExpandError> { + for op in parse_pattern(pattern) { + match op? { + Op::TokenTree(tt::TokenTree::Leaf(lhs)) => { + let rhs = match src.expect_leaf() { + Ok(l) => l, + Err(()) => { + res.add_err(err!("expected leaf: `{}`", lhs)); + continue; + } + }; + match (lhs, rhs) { + ( + tt::Leaf::Punct(tt::Punct { char: lhs, .. }), + tt::Leaf::Punct(tt::Punct { char: rhs, .. }), + ) if lhs == rhs => (), + ( + tt::Leaf::Ident(tt::Ident { text: lhs, .. }), + tt::Leaf::Ident(tt::Ident { text: rhs, .. }), + ) if lhs == rhs => (), + ( + tt::Leaf::Literal(tt::Literal { text: lhs, .. }), + tt::Leaf::Literal(tt::Literal { text: rhs, .. }), + ) if lhs == rhs => (), + _ => { + res.add_err(ExpandError::UnexpectedToken); + } + } + } + Op::TokenTree(tt::TokenTree::Subtree(lhs)) => { + let rhs = match src.expect_subtree() { + Ok(s) => s, + Err(()) => { + res.add_err(err!("expected subtree")); + continue; + } + }; + if lhs.delimiter_kind() != rhs.delimiter_kind() { + res.add_err(err!("mismatched delimiter")); + continue; + } + let mut src = TtIter::new(rhs); + match_subtree(res, lhs, &mut src)?; + if src.len() > 0 { + res.add_err(err!("leftover tokens")); + } + } + Op::Var { name, kind } => { + let kind = match kind { + Some(k) => k, + None => { + res.add_err(ExpandError::UnexpectedToken); + continue; + } + }; + let ExpandResult(matched, match_err) = match_meta_var(kind.as_str(), src); + match matched { + Some(fragment) => { + res.bindings.inner.insert(name.clone(), Binding::Fragment(fragment)); + } + None if match_err.is_none() => res.bindings.push_optional(name), + _ => {} + } + if let Some(err) = match_err { + res.add_err(err); + } + } + Op::Repeat { subtree, kind, separator } => { + match_repeat(res, subtree, kind, separator, src)?; + } + } + } + Ok(()) +} + +impl<'a> TtIter<'a> { + fn eat_separator(&mut self, separator: &Separator) -> bool { + let mut fork = self.clone(); + let ok = match separator { + Separator::Ident(lhs) => match fork.expect_ident() { + Ok(rhs) => rhs.text == lhs.text, + _ => false, + }, + Separator::Literal(lhs) => match fork.expect_literal() { + Ok(rhs) => match rhs { + tt::Leaf::Literal(rhs) => rhs.text == lhs.text, + tt::Leaf::Ident(rhs) => rhs.text == lhs.text, + tt::Leaf::Punct(_) => false, + }, + _ => false, + }, + Separator::Puncts(lhss) => lhss.iter().all(|lhs| match fork.expect_punct() { + Ok(rhs) => rhs.char == lhs.char, + _ => false, + }), + }; + if ok { + *self = fork; + } + ok + } + + pub(crate) fn expect_tt(&mut self) -> Result { + match self.peek_n(0) { + Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '\'' => { + return self.expect_lifetime(); + } + _ => (), + } + + let tt = self.next().ok_or_else(|| ())?.clone(); + let punct = match tt { + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => { + punct + } + _ => return Ok(tt), + }; + + let (second, third) = match (self.peek_n(0), self.peek_n(1)) { + ( + Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), + Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))), + ) if p2.spacing == tt::Spacing::Joint => (p2.char, Some(p3.char)), + (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2.char, None), + _ => return Ok(tt), + }; + + match (punct.char, second, third) { + ('.', '.', Some('.')) + | ('.', '.', Some('=')) + | ('<', '<', Some('=')) + | ('>', '>', Some('=')) => { + let tt2 = self.next().unwrap().clone(); + let tt3 = self.next().unwrap().clone(); + Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2, tt3] }.into()) + } + ('-', '=', None) + | ('-', '>', None) + | (':', ':', None) + | ('!', '=', None) + | ('.', '.', None) + | ('*', '=', None) + | ('/', '=', None) + | ('&', '&', None) + | ('&', '=', None) + | ('%', '=', None) + | ('^', '=', None) + | ('+', '=', None) + | ('<', '<', None) + | ('<', '=', None) + | ('=', '=', None) + | ('=', '>', None) + | ('>', '=', None) + | ('>', '>', None) + | ('|', '=', None) + | ('|', '|', None) => { + let tt2 = self.next().unwrap().clone(); + Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2] }.into()) + } + _ => Ok(tt), + } + } + + pub(crate) fn expect_lifetime(&mut self) -> Result { + let punct = self.expect_punct()?; + if punct.char != '\'' { + return Err(()); + } + let ident = self.expect_ident()?; + + Ok(tt::Subtree { + delimiter: None, + token_trees: vec![ + tt::Leaf::Punct(*punct).into(), + tt::Leaf::Ident(ident.clone()).into(), + ], + } + .into()) + } + + pub(crate) fn expect_fragment( + &mut self, + fragment_kind: parser::FragmentKind, + ) -> ExpandResult> { + pub(crate) struct OffsetTokenSink<'a> { + pub(crate) cursor: Cursor<'a>, + pub(crate) error: bool, + } + + impl<'a> TreeSink for OffsetTokenSink<'a> { + fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { + if kind == SyntaxKind::LIFETIME { + n_tokens = 2; + } + for _ in 0..n_tokens { + self.cursor = self.cursor.bump_subtree(); + } + } + fn start_node(&mut self, _kind: SyntaxKind) {} + fn finish_node(&mut self) {} + fn error(&mut self, _error: parser::ParseError) { + self.error = true; + } + } + + let buffer = TokenBuffer::new(&self.inner.as_slice()); + let mut src = SubtreeTokenSource::new(&buffer); + let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; + + parser::parse_fragment(&mut src, &mut sink, fragment_kind); + + let mut err = None; + if !sink.cursor.is_root() || sink.error { + err = Some(err!("expected {:?}", fragment_kind)); + } + + let mut curr = buffer.begin(); + let mut res = vec![]; + + if sink.cursor.is_root() { + while curr != sink.cursor { + if let Some(token) = curr.token_tree() { + res.push(token); + } + curr = curr.bump(); + } + } + self.inner = self.inner.as_slice()[res.len()..].iter(); + if res.len() == 0 && err.is_none() { + err = Some(err!("no tokens consumed")); + } + let res = match res.len() { + 1 => Some(res[0].clone()), + 0 => None, + _ => Some(tt::TokenTree::Subtree(tt::Subtree { + delimiter: None, + token_trees: res.into_iter().cloned().collect(), + })), + }; + ExpandResult(res, err) + } + + pub(crate) fn eat_vis(&mut self) -> Option { + let mut fork = self.clone(); + match fork.expect_fragment(Visibility) { + ExpandResult(tt, None) => { + *self = fork; + tt + } + ExpandResult(_, Some(_)) => None, + } + } +} + +pub(super) fn match_repeat( + res: &mut Match, + pattern: &tt::Subtree, + kind: RepeatKind, + separator: Option, + src: &mut TtIter, +) -> Result<(), ExpandError> { + // Dirty hack to make macro-expansion terminate. + // This should be replaced by a propper macro-by-example implementation + let mut limit = 65536; + let mut counter = 0; + + for i in 0.. { + let mut fork = src.clone(); + + if let Some(separator) = &separator { + if i != 0 && !fork.eat_separator(separator) { + break; + } + } + + let mut nested = Match::default(); + match_subtree(&mut nested, pattern, &mut fork)?; + if nested.err.is_none() { + limit -= 1; + if limit == 0 { + log::warn!( + "match_lhs exceeded repeat pattern limit => {:#?}\n{:#?}\n{:#?}\n{:#?}", + pattern, + src, + kind, + separator + ); + break; + } + *src = fork; + + if let Err(err) = res.bindings.push_nested(counter, nested.bindings) { + res.add_err(err); + } + counter += 1; + if counter == 1 { + if let RepeatKind::ZeroOrOne = kind { + break; + } + } + } else { + break; + } + } + + match (kind, counter) { + (RepeatKind::OneOrMore, 0) => { + res.add_err(ExpandError::UnexpectedToken); + } + (_, 0) => { + // Collect all empty variables in subtrees + let mut vars = Vec::new(); + collect_vars(&mut vars, pattern)?; + for var in vars { + res.bindings.push_empty(&var) + } + } + _ => (), + } + Ok(()) +} + +fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult> { + let fragment = match kind { + "path" => Path, + "expr" => Expr, + "ty" => Type, + "pat" => Pattern, + "stmt" => Statement, + "block" => Block, + "meta" => MetaItem, + "item" => Item, + _ => { + let tt_result = match kind { + "ident" => input + .expect_ident() + .map(|ident| Some(tt::Leaf::from(ident.clone()).into())) + .map_err(|()| err!("expected ident")), + "tt" => input.expect_tt().map(Some).map_err(|()| err!()), + "lifetime" => input + .expect_lifetime() + .map(|tt| Some(tt)) + .map_err(|()| err!("expected lifetime")), + "literal" => input + .expect_literal() + .map(|literal| Some(tt::Leaf::from(literal.clone()).into())) + .map_err(|()| err!()), + // `vis` is optional + "vis" => match input.eat_vis() { + Some(vis) => Ok(Some(vis)), + None => Ok(None), + }, + _ => Err(ExpandError::UnexpectedToken), + }; + return tt_result.map(|it| it.map(Fragment::Tokens)).into(); + } + }; + let result = input.expect_fragment(fragment); + result.map(|tt| if kind == "expr" { tt.map(Fragment::Ast) } else { tt.map(Fragment::Tokens) }) +} + +fn collect_vars(buf: &mut Vec, pattern: &tt::Subtree) -> Result<(), ExpandError> { + for op in parse_pattern(pattern) { + match op? { + Op::Var { name, .. } => buf.push(name.clone()), + Op::TokenTree(tt::TokenTree::Leaf(_)) => (), + Op::TokenTree(tt::TokenTree::Subtree(subtree)) => collect_vars(buf, subtree)?, + Op::Repeat { subtree, .. } => collect_vars(buf, subtree)?, + } + } + Ok(()) +} diff --git a/crates/mbe/src/mbe_expander/transcriber.rs b/crates/mbe/src/mbe_expander/transcriber.rs new file mode 100644 index 0000000000..c9525c5bf2 --- /dev/null +++ b/crates/mbe/src/mbe_expander/transcriber.rs @@ -0,0 +1,254 @@ +//! Transcriber takes a template, like `fn $ident() {}`, a set of bindings like +//! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}` + +use syntax::SmolStr; + +use super::ExpandResult; +use crate::{ + mbe_expander::{Binding, Bindings, Fragment}, + parser::{parse_template, Op, RepeatKind, Separator}, + ExpandError, +}; + +impl Bindings { + fn contains(&self, name: &str) -> bool { + self.inner.contains_key(name) + } + + fn get(&self, name: &str, nesting: &mut [NestingState]) -> Result<&Fragment, ExpandError> { + let mut b = self.inner.get(name).ok_or_else(|| { + ExpandError::BindingError(format!("could not find binding `{}`", name)) + })?; + for nesting_state in nesting.iter_mut() { + nesting_state.hit = true; + b = match b { + Binding::Fragment(_) => break, + Binding::Nested(bs) => bs.get(nesting_state.idx).ok_or_else(|| { + nesting_state.at_end = true; + ExpandError::BindingError(format!("could not find nested binding `{}`", name)) + })?, + Binding::Empty => { + nesting_state.at_end = true; + return Err(ExpandError::BindingError(format!( + "could not find empty binding `{}`", + name + ))); + } + }; + } + match b { + Binding::Fragment(it) => Ok(it), + Binding::Nested(_) => Err(ExpandError::BindingError(format!( + "expected simple binding, found nested binding `{}`", + name + ))), + Binding::Empty => Err(ExpandError::BindingError(format!( + "expected simple binding, found empty binding `{}`", + name + ))), + } + } +} + +pub(super) fn transcribe(template: &tt::Subtree, bindings: &Bindings) -> ExpandResult { + assert!(template.delimiter == None); + let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new() }; + let mut arena: Vec = Vec::new(); + expand_subtree(&mut ctx, template, &mut arena) +} + +#[derive(Debug)] +struct NestingState { + idx: usize, + /// `hit` is currently necessary to tell `expand_repeat` if it should stop + /// because there is no variable in use by the current repetition + hit: bool, + /// `at_end` is currently necessary to tell `expand_repeat` if it should stop + /// because there is no more value avaible for the current repetition + at_end: bool, +} + +#[derive(Debug)] +struct ExpandCtx<'a> { + bindings: &'a Bindings, + nesting: Vec, +} + +fn expand_subtree( + ctx: &mut ExpandCtx, + template: &tt::Subtree, + arena: &mut Vec, +) -> ExpandResult { + // remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation + let start_elements = arena.len(); + let mut err = None; + for op in parse_template(template) { + let op = match op { + Ok(op) => op, + Err(e) => { + err = Some(e); + break; + } + }; + match op { + Op::TokenTree(tt @ tt::TokenTree::Leaf(..)) => arena.push(tt.clone()), + Op::TokenTree(tt::TokenTree::Subtree(tt)) => { + let ExpandResult(tt, e) = expand_subtree(ctx, tt, arena); + err = err.or(e); + arena.push(tt.into()); + } + Op::Var { name, kind: _ } => { + let ExpandResult(fragment, e) = expand_var(ctx, name); + err = err.or(e); + push_fragment(arena, fragment); + } + Op::Repeat { subtree, kind, separator } => { + let ExpandResult(fragment, e) = expand_repeat(ctx, subtree, kind, separator, arena); + err = err.or(e); + push_fragment(arena, fragment) + } + } + } + // drain the elements added in this instance of expand_subtree + let tts = arena.drain(start_elements..arena.len()).collect(); + ExpandResult(tt::Subtree { delimiter: template.delimiter, token_trees: tts }, err) +} + +fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> ExpandResult { + if v == "crate" { + // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path. + let tt = + tt::Leaf::from(tt::Ident { text: "$crate".into(), id: tt::TokenId::unspecified() }) + .into(); + ExpandResult::ok(Fragment::Tokens(tt)) + } else if !ctx.bindings.contains(v) { + // Note that it is possible to have a `$var` inside a macro which is not bound. + // For example: + // ``` + // macro_rules! foo { + // ($a:ident, $b:ident, $c:tt) => { + // macro_rules! bar { + // ($bi:ident) => { + // fn $bi() -> u8 {$c} + // } + // } + // } + // ``` + // We just treat it a normal tokens + let tt = tt::Subtree { + delimiter: None, + token_trees: vec![ + tt::Leaf::from(tt::Punct { + char: '$', + spacing: tt::Spacing::Alone, + id: tt::TokenId::unspecified(), + }) + .into(), + tt::Leaf::from(tt::Ident { text: v.clone(), id: tt::TokenId::unspecified() }) + .into(), + ], + } + .into(); + ExpandResult::ok(Fragment::Tokens(tt)) + } else { + ctx.bindings.get(&v, &mut ctx.nesting).map_or_else( + |e| ExpandResult(Fragment::Tokens(tt::TokenTree::empty()), Some(e)), + |b| ExpandResult::ok(b.clone()), + ) + } +} + +fn expand_repeat( + ctx: &mut ExpandCtx, + template: &tt::Subtree, + kind: RepeatKind, + separator: Option, + arena: &mut Vec, +) -> ExpandResult { + let mut buf: Vec = Vec::new(); + ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false }); + // Dirty hack to make macro-expansion terminate. + // This should be replaced by a proper macro-by-example implementation + let limit = 65536; + let mut has_seps = 0; + let mut counter = 0; + + loop { + let ExpandResult(mut t, e) = expand_subtree(ctx, template, arena); + let nesting_state = ctx.nesting.last_mut().unwrap(); + if nesting_state.at_end || !nesting_state.hit { + break; + } + nesting_state.idx += 1; + nesting_state.hit = false; + + counter += 1; + if counter == limit { + log::warn!( + "expand_tt excced in repeat pattern exceed limit => {:#?}\n{:#?}", + template, + ctx + ); + break; + } + + if e.is_some() { + continue; + } + + t.delimiter = None; + push_subtree(&mut buf, t); + + if let Some(ref sep) = separator { + match sep { + Separator::Ident(ident) => { + has_seps = 1; + buf.push(tt::Leaf::from(ident.clone()).into()); + } + Separator::Literal(lit) => { + has_seps = 1; + buf.push(tt::Leaf::from(lit.clone()).into()); + } + + Separator::Puncts(puncts) => { + has_seps = puncts.len(); + for punct in puncts { + buf.push(tt::Leaf::from(*punct).into()); + } + } + } + } + + if RepeatKind::ZeroOrOne == kind { + break; + } + } + + ctx.nesting.pop().unwrap(); + for _ in 0..has_seps { + buf.pop(); + } + + // Check if it is a single token subtree without any delimiter + // e.g {Delimiter:None> ['>'] /Delimiter:None>} + let tt = tt::Subtree { delimiter: None, token_trees: buf }.into(); + + if RepeatKind::OneOrMore == kind && counter == 0 { + return ExpandResult(Fragment::Tokens(tt), Some(ExpandError::UnexpectedToken)); + } + ExpandResult::ok(Fragment::Tokens(tt)) +} + +fn push_fragment(buf: &mut Vec, fragment: Fragment) { + match fragment { + Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt), + Fragment::Tokens(tt) | Fragment::Ast(tt) => buf.push(tt), + } +} + +fn push_subtree(buf: &mut Vec, tt: tt::Subtree) { + match tt.delimiter { + None => buf.extend(tt.token_trees), + _ => buf.push(tt.into()), + } +} diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs new file mode 100644 index 0000000000..6b46a1673c --- /dev/null +++ b/crates/mbe/src/parser.rs @@ -0,0 +1,184 @@ +//! Parser recognizes special macro syntax, `$var` and `$(repeat)*`, in token +//! trees. + +use smallvec::SmallVec; +use syntax::SmolStr; + +use crate::{tt_iter::TtIter, ExpandError}; + +#[derive(Debug)] +pub(crate) enum Op<'a> { + Var { name: &'a SmolStr, kind: Option<&'a SmolStr> }, + Repeat { subtree: &'a tt::Subtree, kind: RepeatKind, separator: Option }, + TokenTree(&'a tt::TokenTree), +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub(crate) enum RepeatKind { + ZeroOrMore, + OneOrMore, + ZeroOrOne, +} + +#[derive(Clone, Debug, Eq)] +pub(crate) enum Separator { + Literal(tt::Literal), + Ident(tt::Ident), + Puncts(SmallVec<[tt::Punct; 3]>), +} + +// Note that when we compare a Separator, we just care about its textual value. +impl PartialEq for Separator { + fn eq(&self, other: &Separator) -> bool { + use Separator::*; + + match (self, other) { + (Ident(ref a), Ident(ref b)) => a.text == b.text, + (Literal(ref a), Literal(ref b)) => a.text == b.text, + (Puncts(ref a), Puncts(ref b)) if a.len() == b.len() => { + let a_iter = a.iter().map(|a| a.char); + let b_iter = b.iter().map(|b| b.char); + a_iter.eq(b_iter) + } + _ => false, + } + } +} + +pub(crate) fn parse_template( + template: &tt::Subtree, +) -> impl Iterator, ExpandError>> { + parse_inner(template, Mode::Template) +} + +pub(crate) fn parse_pattern( + pattern: &tt::Subtree, +) -> impl Iterator, ExpandError>> { + parse_inner(pattern, Mode::Pattern) +} + +#[derive(Clone, Copy)] +enum Mode { + Pattern, + Template, +} + +fn parse_inner(src: &tt::Subtree, mode: Mode) -> impl Iterator, ExpandError>> { + let mut src = TtIter::new(src); + std::iter::from_fn(move || { + let first = src.next()?; + Some(next_op(first, &mut src, mode)) + }) +} + +macro_rules! err { + ($($tt:tt)*) => { + ExpandError::UnexpectedToken + }; +} + +macro_rules! bail { + ($($tt:tt)*) => { + return Err(err!($($tt)*)) + }; +} + +fn next_op<'a>( + first: &'a tt::TokenTree, + src: &mut TtIter<'a>, + mode: Mode, +) -> Result, ExpandError> { + let res = match first { + tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '$', .. })) => { + // Note that the '$' itself is a valid token inside macro_rules. + let second = match src.next() { + None => return Ok(Op::TokenTree(first)), + Some(it) => it, + }; + match second { + tt::TokenTree::Subtree(subtree) => { + let (separator, kind) = parse_repeat(src)?; + Op::Repeat { subtree, separator, kind } + } + tt::TokenTree::Leaf(leaf) => match leaf { + tt::Leaf::Punct(..) => return Err(ExpandError::UnexpectedToken), + tt::Leaf::Ident(ident) => { + let name = &ident.text; + let kind = eat_fragment_kind(src, mode)?; + Op::Var { name, kind } + } + tt::Leaf::Literal(lit) => { + if is_boolean_literal(lit) { + let name = &lit.text; + let kind = eat_fragment_kind(src, mode)?; + Op::Var { name, kind } + } else { + bail!("bad var 2"); + } + } + }, + } + } + tt => Op::TokenTree(tt), + }; + Ok(res) +} + +fn eat_fragment_kind<'a>( + src: &mut TtIter<'a>, + mode: Mode, +) -> Result, ExpandError> { + if let Mode::Pattern = mode { + src.expect_char(':').map_err(|()| err!("bad fragment specifier 1"))?; + let ident = src.expect_ident().map_err(|()| err!("bad fragment specifier 1"))?; + return Ok(Some(&ident.text)); + }; + Ok(None) +} + +fn is_boolean_literal(lit: &tt::Literal) -> bool { + matches!(lit.text.as_str(), "true" | "false") +} + +fn parse_repeat(src: &mut TtIter) -> Result<(Option, RepeatKind), ExpandError> { + let mut separator = Separator::Puncts(SmallVec::new()); + for tt in src { + let tt = match tt { + tt::TokenTree::Leaf(leaf) => leaf, + tt::TokenTree::Subtree(_) => return Err(ExpandError::InvalidRepeat), + }; + let has_sep = match &separator { + Separator::Puncts(puncts) => !puncts.is_empty(), + _ => true, + }; + match tt { + tt::Leaf::Ident(_) | tt::Leaf::Literal(_) if has_sep => { + return Err(ExpandError::InvalidRepeat) + } + tt::Leaf::Ident(ident) => separator = Separator::Ident(ident.clone()), + tt::Leaf::Literal(lit) => separator = Separator::Literal(lit.clone()), + tt::Leaf::Punct(punct) => { + let repeat_kind = match punct.char { + '*' => RepeatKind::ZeroOrMore, + '+' => RepeatKind::OneOrMore, + '?' => RepeatKind::ZeroOrOne, + _ => { + match &mut separator { + Separator::Puncts(puncts) => { + if puncts.len() == 3 { + return Err(ExpandError::InvalidRepeat); + } + puncts.push(punct.clone()) + } + _ => return Err(ExpandError::InvalidRepeat), + } + continue; + } + }; + let separator = if has_sep { Some(separator) } else { None }; + return Ok((separator, repeat_kind)); + } + } + } + Err(ExpandError::InvalidRepeat) +} diff --git a/crates/mbe/src/subtree_source.rs b/crates/mbe/src/subtree_source.rs new file mode 100644 index 0000000000..41461b3150 --- /dev/null +++ b/crates/mbe/src/subtree_source.rs @@ -0,0 +1,197 @@ +//! FIXME: write short doc here + +use parser::{Token, TokenSource}; +use std::cell::{Cell, Ref, RefCell}; +use syntax::{lex_single_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T}; +use tt::buffer::{Cursor, TokenBuffer}; + +#[derive(Debug, Clone, Eq, PartialEq)] +struct TtToken { + pub kind: SyntaxKind, + pub is_joint_to_next: bool, + pub text: SmolStr, +} + +pub(crate) struct SubtreeTokenSource<'a> { + cached_cursor: Cell>, + cached: RefCell>>, + curr: (Token, usize), +} + +impl<'a> SubtreeTokenSource<'a> { + // Helper function used in test + #[cfg(test)] + pub fn text(&self) -> SmolStr { + match *self.get(self.curr.1) { + Some(ref tt) => tt.text.clone(), + _ => SmolStr::new(""), + } + } +} + +impl<'a> SubtreeTokenSource<'a> { + pub fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> { + let cursor = buffer.begin(); + + let mut res = SubtreeTokenSource { + curr: (Token { kind: EOF, is_jointed_to_next: false }, 0), + cached_cursor: Cell::new(cursor), + cached: RefCell::new(Vec::with_capacity(10)), + }; + res.curr = (res.mk_token(0), 0); + res + } + + fn mk_token(&self, pos: usize) -> Token { + match *self.get(pos) { + Some(ref tt) => Token { kind: tt.kind, is_jointed_to_next: tt.is_joint_to_next }, + None => Token { kind: EOF, is_jointed_to_next: false }, + } + } + + fn get(&self, pos: usize) -> Ref> { + fn is_lifetime(c: Cursor) -> Option<(Cursor, SmolStr)> { + let tkn = c.token_tree(); + + if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tkn { + if punct.char == '\'' { + let next = c.bump(); + if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = next.token_tree() { + let res_cursor = next.bump(); + let text = SmolStr::new("'".to_string() + &ident.to_string()); + + return Some((res_cursor, text)); + } else { + panic!("Next token must be ident : {:#?}", next.token_tree()); + } + } + } + + None + } + + if pos < self.cached.borrow().len() { + return Ref::map(self.cached.borrow(), |c| &c[pos]); + } + + { + let mut cached = self.cached.borrow_mut(); + while pos >= cached.len() { + let cursor = self.cached_cursor.get(); + if cursor.eof() { + cached.push(None); + continue; + } + + if let Some((curr, text)) = is_lifetime(cursor) { + cached.push(Some(TtToken { kind: LIFETIME, is_joint_to_next: false, text })); + self.cached_cursor.set(curr); + continue; + } + + match cursor.token_tree() { + Some(tt::TokenTree::Leaf(leaf)) => { + cached.push(Some(convert_leaf(&leaf))); + self.cached_cursor.set(cursor.bump()); + } + Some(tt::TokenTree::Subtree(subtree)) => { + self.cached_cursor.set(cursor.subtree().unwrap()); + cached.push(Some(convert_delim(subtree.delimiter_kind(), false))); + } + None => { + if let Some(subtree) = cursor.end() { + cached.push(Some(convert_delim(subtree.delimiter_kind(), true))); + self.cached_cursor.set(cursor.bump()); + } + } + } + } + } + + Ref::map(self.cached.borrow(), |c| &c[pos]) + } +} + +impl<'a> TokenSource for SubtreeTokenSource<'a> { + fn current(&self) -> Token { + self.curr.0 + } + + /// Lookahead n token + fn lookahead_nth(&self, n: usize) -> Token { + self.mk_token(self.curr.1 + n) + } + + /// bump cursor to next token + fn bump(&mut self) { + if self.current().kind == EOF { + return; + } + + self.curr = (self.mk_token(self.curr.1 + 1), self.curr.1 + 1); + } + + /// Is the current token a specified keyword? + fn is_keyword(&self, kw: &str) -> bool { + match *self.get(self.curr.1) { + Some(ref t) => t.text == *kw, + _ => false, + } + } +} + +fn convert_delim(d: Option, closing: bool) -> TtToken { + let (kinds, texts) = match d { + Some(tt::DelimiterKind::Parenthesis) => ([T!['('], T![')']], "()"), + Some(tt::DelimiterKind::Brace) => ([T!['{'], T!['}']], "{}"), + Some(tt::DelimiterKind::Bracket) => ([T!['['], T![']']], "[]"), + None => ([L_DOLLAR, R_DOLLAR], ""), + }; + + let idx = closing as usize; + let kind = kinds[idx]; + let text = if !texts.is_empty() { &texts[idx..texts.len() - (1 - idx)] } else { "" }; + TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text) } +} + +fn convert_literal(l: &tt::Literal) -> TtToken { + let kind = lex_single_syntax_kind(&l.text) + .map(|(kind, _error)| kind) + .filter(|kind| kind.is_literal()) + .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l)); + + TtToken { kind, is_joint_to_next: false, text: l.text.clone() } +} + +fn convert_ident(ident: &tt::Ident) -> TtToken { + let kind = match ident.text.as_ref() { + "true" => T![true], + "false" => T![false], + i if i.starts_with('\'') => LIFETIME, + _ => SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT), + }; + + TtToken { kind, is_joint_to_next: false, text: ident.text.clone() } +} + +fn convert_punct(p: tt::Punct) -> TtToken { + let kind = match SyntaxKind::from_char(p.char) { + None => panic!("{:#?} is not a valid punct", p), + Some(kind) => kind, + }; + + let text = { + let mut buf = [0u8; 4]; + let s: &str = p.char.encode_utf8(&mut buf); + SmolStr::new(s) + }; + TtToken { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text } +} + +fn convert_leaf(leaf: &tt::Leaf) -> TtToken { + match leaf { + tt::Leaf::Literal(l) => convert_literal(l), + tt::Leaf::Ident(ident) => convert_ident(ident), + tt::Leaf::Punct(punct) => convert_punct(*punct), + } +} diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs new file mode 100644 index 0000000000..a8ad917fb8 --- /dev/null +++ b/crates/mbe/src/syntax_bridge.rs @@ -0,0 +1,832 @@ +//! FIXME: write short doc here + +use parser::{FragmentKind, ParseError, TreeSink}; +use rustc_hash::FxHashMap; +use syntax::{ + ast::{self, make::tokens::doc_comment}, + tokenize, AstToken, Parse, SmolStr, SyntaxKind, + SyntaxKind::*, + SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, T, +}; +use tt::buffer::{Cursor, TokenBuffer}; + +use crate::subtree_source::SubtreeTokenSource; +use crate::ExpandError; + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum TokenTextRange { + Token(TextRange), + Delimiter(TextRange, TextRange), +} + +impl TokenTextRange { + pub fn by_kind(self, kind: SyntaxKind) -> Option { + match self { + TokenTextRange::Token(it) => Some(it), + TokenTextRange::Delimiter(open, close) => match kind { + T!['{'] | T!['('] | T!['['] => Some(open), + T!['}'] | T![')'] | T![']'] => Some(close), + _ => None, + }, + } + } +} + +/// Maps `tt::TokenId` to the relative range of the original token. +#[derive(Debug, PartialEq, Eq, Clone, Default)] +pub struct TokenMap { + /// Maps `tt::TokenId` to the *relative* source range. + entries: Vec<(tt::TokenId, TokenTextRange)>, +} + +/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro +/// will consume). +pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> Option<(tt::Subtree, TokenMap)> { + syntax_node_to_token_tree(ast.syntax()) +} + +/// Convert the syntax node to a `TokenTree` (what macro +/// will consume). +pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> { + let global_offset = node.text_range().start(); + let mut c = Convertor::new(node, global_offset); + let subtree = c.go()?; + Some((subtree, c.id_alloc.map)) +} + +// The following items are what `rustc` macro can be parsed into : +// link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141 +// * Expr(P) -> token_tree_to_expr +// * Pat(P) -> token_tree_to_pat +// * Ty(P) -> token_tree_to_ty +// * Stmts(SmallVec<[ast::Stmt; 1]>) -> token_tree_to_stmts +// * Items(SmallVec<[P; 1]>) -> token_tree_to_items +// +// * TraitItems(SmallVec<[ast::TraitItem; 1]>) +// * AssocItems(SmallVec<[ast::AssocItem; 1]>) +// * ForeignItems(SmallVec<[ast::ForeignItem; 1]> + +pub fn token_tree_to_syntax_node( + tt: &tt::Subtree, + fragment_kind: FragmentKind, +) -> Result<(Parse, TokenMap), ExpandError> { + let tmp; + let tokens = match tt { + tt::Subtree { delimiter: None, token_trees } => token_trees.as_slice(), + _ => { + tmp = [tt.clone().into()]; + &tmp[..] + } + }; + let buffer = TokenBuffer::new(&tokens); + let mut token_source = SubtreeTokenSource::new(&buffer); + let mut tree_sink = TtTreeSink::new(buffer.begin()); + parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind); + if tree_sink.roots.len() != 1 { + return Err(ExpandError::ConversionError); + } + //FIXME: would be cool to report errors + let (parse, range_map) = tree_sink.finish(); + Ok((parse, range_map)) +} + +/// Convert a string to a `TokenTree` +pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> { + let (tokens, errors) = tokenize(text); + if !errors.is_empty() { + return None; + } + + let mut conv = RawConvertor { + text, + offset: TextSize::default(), + inner: tokens.iter(), + id_alloc: TokenIdAlloc { + map: Default::default(), + global_offset: TextSize::default(), + next_id: 0, + }, + }; + + let subtree = conv.go()?; + Some((subtree, conv.id_alloc.map)) +} + +impl TokenMap { + pub fn token_by_range(&self, relative_range: TextRange) -> Option { + let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { + TokenTextRange::Token(it) => *it == relative_range, + TokenTextRange::Delimiter(open, close) => { + *open == relative_range || *close == relative_range + } + })?; + Some(token_id) + } + + pub fn range_by_token(&self, token_id: tt::TokenId) -> Option { + let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; + Some(range) + } + + fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { + self.entries.push((token_id, TokenTextRange::Token(relative_range))); + } + + fn insert_delim( + &mut self, + token_id: tt::TokenId, + open_relative_range: TextRange, + close_relative_range: TextRange, + ) -> usize { + let res = self.entries.len(); + self.entries + .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range))); + res + } + + fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { + let (_, token_text_range) = &mut self.entries[idx]; + if let TokenTextRange::Delimiter(dim, _) = token_text_range { + *token_text_range = TokenTextRange::Delimiter(*dim, close_relative_range); + } + } + + fn remove_delim(&mut self, idx: usize) { + // FIXME: This could be accidently quadratic + self.entries.remove(idx); + } +} + +/// Returns the textual content of a doc comment block as a quoted string +/// That is, strips leading `///` (or `/**`, etc) +/// and strips the ending `*/` +/// And then quote the string, which is needed to convert to `tt::Literal` +fn doc_comment_text(comment: &ast::Comment) -> SmolStr { + let prefix_len = comment.prefix().len(); + let mut text = &comment.text()[prefix_len..]; + + // Remove ending "*/" + if comment.kind().shape == ast::CommentShape::Block { + text = &text[0..text.len() - 2]; + } + + // Quote the string + // Note that `tt::Literal` expect an escaped string + let text = format!("{:?}", text.escape_default().to_string()); + text.into() +} + +fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option> { + let comment = ast::Comment::cast(token.clone())?; + let doc = comment.kind().doc?; + + // Make `doc="\" Comments\"" + let mut meta_tkns = Vec::new(); + meta_tkns.push(mk_ident("doc")); + meta_tkns.push(mk_punct('=')); + meta_tkns.push(mk_doc_literal(&comment)); + + // Make `#![]` + let mut token_trees = Vec::new(); + token_trees.push(mk_punct('#')); + if let ast::CommentPlacement::Inner = doc { + token_trees.push(mk_punct('!')); + } + token_trees.push(tt::TokenTree::from(tt::Subtree { + delimiter: Some(tt::Delimiter { + kind: tt::DelimiterKind::Bracket, + id: tt::TokenId::unspecified(), + }), + token_trees: meta_tkns, + })); + + return Some(token_trees); + + // Helper functions + fn mk_ident(s: &str) -> tt::TokenTree { + tt::TokenTree::from(tt::Leaf::from(tt::Ident { + text: s.into(), + id: tt::TokenId::unspecified(), + })) + } + + fn mk_punct(c: char) -> tt::TokenTree { + tt::TokenTree::from(tt::Leaf::from(tt::Punct { + char: c, + spacing: tt::Spacing::Alone, + id: tt::TokenId::unspecified(), + })) + } + + fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree { + let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() }; + + tt::TokenTree::from(tt::Leaf::from(lit)) + } +} + +struct TokenIdAlloc { + map: TokenMap, + global_offset: TextSize, + next_id: u32, +} + +impl TokenIdAlloc { + fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId { + let relative_range = absolute_range - self.global_offset; + let token_id = tt::TokenId(self.next_id); + self.next_id += 1; + self.map.insert(token_id, relative_range); + token_id + } + + fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) { + let token_id = tt::TokenId(self.next_id); + self.next_id += 1; + let idx = self.map.insert_delim( + token_id, + open_abs_range - self.global_offset, + open_abs_range - self.global_offset, + ); + (token_id, idx) + } + + fn close_delim(&mut self, idx: usize, close_abs_range: Option) { + match close_abs_range { + None => { + self.map.remove_delim(idx); + } + Some(close) => { + self.map.update_close_delim(idx, close - self.global_offset); + } + } + } +} + +/// A Raw Token (straightly from lexer) convertor +struct RawConvertor<'a> { + text: &'a str, + offset: TextSize, + id_alloc: TokenIdAlloc, + inner: std::slice::Iter<'a, RawToken>, +} + +trait SrcToken: std::fmt::Debug { + fn kind(&self) -> SyntaxKind; + + fn to_char(&self) -> Option; + + fn to_text(&self) -> SmolStr; +} + +trait TokenConvertor { + type Token: SrcToken; + + fn go(&mut self) -> Option { + let mut subtree = tt::Subtree::default(); + subtree.delimiter = None; + while self.peek().is_some() { + self.collect_leaf(&mut subtree.token_trees); + } + if subtree.token_trees.is_empty() { + return None; + } + if subtree.token_trees.len() == 1 { + if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] { + return Some(first.clone()); + } + } + Some(subtree) + } + + fn collect_leaf(&mut self, result: &mut Vec) { + let (token, range) = match self.bump() { + None => return, + Some(it) => it, + }; + + let k: SyntaxKind = token.kind(); + if k == COMMENT { + if let Some(tokens) = self.convert_doc_comment(&token) { + result.extend(tokens); + } + return; + } + + result.push(if k.is_punct() { + assert_eq!(range.len(), TextSize::of('.')); + let delim = match k { + T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])), + T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])), + T!['['] => Some((tt::DelimiterKind::Bracket, T![']'])), + _ => None, + }; + + if let Some((kind, closed)) = delim { + let mut subtree = tt::Subtree::default(); + let (id, idx) = self.id_alloc().open_delim(range); + subtree.delimiter = Some(tt::Delimiter { kind, id }); + + while self.peek().map(|it| it.kind() != closed).unwrap_or(false) { + self.collect_leaf(&mut subtree.token_trees); + } + let last_range = match self.bump() { + None => { + // For error resilience, we insert an char punct for the opening delim here + self.id_alloc().close_delim(idx, None); + let leaf: tt::Leaf = tt::Punct { + id: self.id_alloc().alloc(range), + char: token.to_char().unwrap(), + spacing: tt::Spacing::Alone, + } + .into(); + result.push(leaf.into()); + result.extend(subtree.token_trees); + return; + } + Some(it) => it.1, + }; + self.id_alloc().close_delim(idx, Some(last_range)); + subtree.into() + } else { + let spacing = match self.peek() { + Some(next) + if next.kind().is_trivia() + || next.kind() == T!['['] + || next.kind() == T!['{'] + || next.kind() == T!['('] => + { + tt::Spacing::Alone + } + Some(next) if next.kind().is_punct() => tt::Spacing::Joint, + _ => tt::Spacing::Alone, + }; + let char = match token.to_char() { + Some(c) => c, + None => { + panic!("Token from lexer must be single char: token = {:#?}", token); + } + }; + tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into() + } + } else { + macro_rules! make_leaf { + ($i:ident) => { + tt::$i { id: self.id_alloc().alloc(range), text: token.to_text() }.into() + }; + } + let leaf: tt::Leaf = match k { + T![true] | T![false] => make_leaf!(Ident), + IDENT => make_leaf!(Ident), + k if k.is_keyword() => make_leaf!(Ident), + k if k.is_literal() => make_leaf!(Literal), + LIFETIME => { + let char_unit = TextSize::of('\''); + let r = TextRange::at(range.start(), char_unit); + let apostrophe = tt::Leaf::from(tt::Punct { + char: '\'', + spacing: tt::Spacing::Joint, + id: self.id_alloc().alloc(r), + }); + result.push(apostrophe.into()); + + let r = TextRange::at(range.start() + char_unit, range.len() - char_unit); + let ident = tt::Leaf::from(tt::Ident { + text: SmolStr::new(&token.to_text()[1..]), + id: self.id_alloc().alloc(r), + }); + result.push(ident.into()); + return; + } + _ => return, + }; + + leaf.into() + }); + } + + fn convert_doc_comment(&self, token: &Self::Token) -> Option>; + + fn bump(&mut self) -> Option<(Self::Token, TextRange)>; + + fn peek(&self) -> Option; + + fn id_alloc(&mut self) -> &mut TokenIdAlloc; +} + +impl<'a> SrcToken for (RawToken, &'a str) { + fn kind(&self) -> SyntaxKind { + self.0.kind + } + + fn to_char(&self) -> Option { + self.1.chars().next() + } + + fn to_text(&self) -> SmolStr { + self.1.into() + } +} + +impl RawConvertor<'_> {} + +impl<'a> TokenConvertor for RawConvertor<'a> { + type Token = (RawToken, &'a str); + + fn convert_doc_comment(&self, token: &Self::Token) -> Option> { + convert_doc_comment(&doc_comment(token.1)) + } + + fn bump(&mut self) -> Option<(Self::Token, TextRange)> { + let token = self.inner.next()?; + let range = TextRange::at(self.offset, token.len); + self.offset += token.len; + + Some(((*token, &self.text[range]), range)) + } + + fn peek(&self) -> Option { + let token = self.inner.as_slice().get(0).cloned(); + + token.map(|it| { + let range = TextRange::at(self.offset, it.len); + (it, &self.text[range]) + }) + } + + fn id_alloc(&mut self) -> &mut TokenIdAlloc { + &mut self.id_alloc + } +} + +struct Convertor { + id_alloc: TokenIdAlloc, + current: Option, + range: TextRange, + punct_offset: Option<(SyntaxToken, TextSize)>, +} + +impl Convertor { + fn new(node: &SyntaxNode, global_offset: TextSize) -> Convertor { + Convertor { + id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } }, + current: node.first_token(), + range: node.text_range(), + punct_offset: None, + } + } +} + +#[derive(Debug)] +enum SynToken { + Ordiniary(SyntaxToken), + Punch(SyntaxToken, TextSize), +} + +impl SynToken { + fn token(&self) -> &SyntaxToken { + match self { + SynToken::Ordiniary(it) => it, + SynToken::Punch(it, _) => it, + } + } +} + +impl SrcToken for SynToken { + fn kind(&self) -> SyntaxKind { + self.token().kind() + } + fn to_char(&self) -> Option { + match self { + SynToken::Ordiniary(_) => None, + SynToken::Punch(it, i) => it.text().chars().nth((*i).into()), + } + } + fn to_text(&self) -> SmolStr { + self.token().text().clone() + } +} + +impl TokenConvertor for Convertor { + type Token = SynToken; + fn convert_doc_comment(&self, token: &Self::Token) -> Option> { + convert_doc_comment(token.token()) + } + + fn bump(&mut self) -> Option<(Self::Token, TextRange)> { + if let Some((punct, offset)) = self.punct_offset.clone() { + if usize::from(offset) + 1 < punct.text().len() { + let offset = offset + TextSize::of('.'); + let range = punct.text_range(); + self.punct_offset = Some((punct.clone(), offset)); + let range = TextRange::at(range.start() + offset, TextSize::of('.')); + return Some((SynToken::Punch(punct, offset), range)); + } + } + + let curr = self.current.clone()?; + if !&self.range.contains_range(curr.text_range()) { + return None; + } + self.current = curr.next_token(); + + let token = if curr.kind().is_punct() { + let range = curr.text_range(); + let range = TextRange::at(range.start(), TextSize::of('.')); + self.punct_offset = Some((curr.clone(), 0.into())); + (SynToken::Punch(curr, 0.into()), range) + } else { + self.punct_offset = None; + let range = curr.text_range(); + (SynToken::Ordiniary(curr), range) + }; + + Some(token) + } + + fn peek(&self) -> Option { + if let Some((punct, mut offset)) = self.punct_offset.clone() { + offset = offset + TextSize::of('.'); + if usize::from(offset) < punct.text().len() { + return Some(SynToken::Punch(punct, offset)); + } + } + + let curr = self.current.clone()?; + if !self.range.contains_range(curr.text_range()) { + return None; + } + + let token = if curr.kind().is_punct() { + SynToken::Punch(curr, 0.into()) + } else { + SynToken::Ordiniary(curr) + }; + Some(token) + } + + fn id_alloc(&mut self) -> &mut TokenIdAlloc { + &mut self.id_alloc + } +} + +struct TtTreeSink<'a> { + buf: String, + cursor: Cursor<'a>, + open_delims: FxHashMap, + text_pos: TextSize, + inner: SyntaxTreeBuilder, + token_map: TokenMap, + + // Number of roots + // Use for detect ill-form tree which is not single root + roots: smallvec::SmallVec<[usize; 1]>, +} + +impl<'a> TtTreeSink<'a> { + fn new(cursor: Cursor<'a>) -> Self { + TtTreeSink { + buf: String::new(), + cursor, + open_delims: FxHashMap::default(), + text_pos: 0.into(), + inner: SyntaxTreeBuilder::default(), + roots: smallvec::SmallVec::new(), + token_map: TokenMap::default(), + } + } + + fn finish(self) -> (Parse, TokenMap) { + (self.inner.finish(), self.token_map) + } +} + +fn delim_to_str(d: Option, closing: bool) -> SmolStr { + let texts = match d { + Some(tt::DelimiterKind::Parenthesis) => "()", + Some(tt::DelimiterKind::Brace) => "{}", + Some(tt::DelimiterKind::Bracket) => "[]", + None => return "".into(), + }; + + let idx = closing as usize; + let text = &texts[idx..texts.len() - (1 - idx)]; + text.into() +} + +impl<'a> TreeSink for TtTreeSink<'a> { + fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { + if kind == L_DOLLAR || kind == R_DOLLAR { + self.cursor = self.cursor.bump_subtree(); + return; + } + if kind == LIFETIME { + n_tokens = 2; + } + + let mut last = self.cursor; + for _ in 0..n_tokens { + if self.cursor.eof() { + break; + } + last = self.cursor; + let text: SmolStr = match self.cursor.token_tree() { + Some(tt::TokenTree::Leaf(leaf)) => { + // Mark the range if needed + let (text, id) = match leaf { + tt::Leaf::Ident(ident) => (ident.text.clone(), ident.id), + tt::Leaf::Punct(punct) => { + (SmolStr::new_inline_from_ascii(1, &[punct.char as u8]), punct.id) + } + tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id), + }; + let range = TextRange::at(self.text_pos, TextSize::of(text.as_str())); + self.token_map.insert(id, range); + self.cursor = self.cursor.bump(); + text + } + Some(tt::TokenTree::Subtree(subtree)) => { + self.cursor = self.cursor.subtree().unwrap(); + if let Some(id) = subtree.delimiter.map(|it| it.id) { + self.open_delims.insert(id, self.text_pos); + } + delim_to_str(subtree.delimiter_kind(), false) + } + None => { + if let Some(parent) = self.cursor.end() { + self.cursor = self.cursor.bump(); + if let Some(id) = parent.delimiter.map(|it| it.id) { + if let Some(open_delim) = self.open_delims.get(&id) { + let open_range = TextRange::at(*open_delim, TextSize::of('(')); + let close_range = TextRange::at(self.text_pos, TextSize::of('(')); + self.token_map.insert_delim(id, open_range, close_range); + } + } + delim_to_str(parent.delimiter_kind(), true) + } else { + continue; + } + } + }; + self.buf += &text; + self.text_pos += TextSize::of(text.as_str()); + } + + let text = SmolStr::new(self.buf.as_str()); + self.buf.clear(); + self.inner.token(kind, text); + + // Add whitespace between adjoint puncts + let next = last.bump(); + if let ( + Some(tt::TokenTree::Leaf(tt::Leaf::Punct(curr))), + Some(tt::TokenTree::Leaf(tt::Leaf::Punct(_))), + ) = (last.token_tree(), next.token_tree()) + { + // Note: We always assume the semi-colon would be the last token in + // other parts of RA such that we don't add whitespace here. + if curr.spacing == tt::Spacing::Alone && curr.char != ';' { + self.inner.token(WHITESPACE, " ".into()); + self.text_pos += TextSize::of(' '); + } + } + } + + fn start_node(&mut self, kind: SyntaxKind) { + self.inner.start_node(kind); + + match self.roots.last_mut() { + None | Some(0) => self.roots.push(1), + Some(ref mut n) => **n += 1, + }; + } + + fn finish_node(&mut self) { + self.inner.finish_node(); + *self.roots.last_mut().unwrap() -= 1; + } + + fn error(&mut self, error: ParseError) { + self.inner.error(error, self.text_pos) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::parse_macro; + use parser::TokenSource; + use syntax::{ + algo::{insert_children, InsertPosition}, + ast::AstNode, + }; + + #[test] + fn convert_tt_token_source() { + let expansion = parse_macro( + r#" + macro_rules! literals { + ($i:ident) => { + { + let a = 'c'; + let c = 1000; + let f = 12E+99_f64; + let s = "rust1"; + } + } + } + "#, + ) + .expand_tt("literals!(foo);"); + let tts = &[expansion.into()]; + let buffer = tt::buffer::TokenBuffer::new(tts); + let mut tt_src = SubtreeTokenSource::new(&buffer); + let mut tokens = vec![]; + while tt_src.current().kind != EOF { + tokens.push((tt_src.current().kind, tt_src.text())); + tt_src.bump(); + } + + // [${] + // [let] [a] [=] ['c'] [;] + assert_eq!(tokens[2 + 3].1, "'c'"); + assert_eq!(tokens[2 + 3].0, CHAR); + // [let] [c] [=] [1000] [;] + assert_eq!(tokens[2 + 5 + 3].1, "1000"); + assert_eq!(tokens[2 + 5 + 3].0, INT_NUMBER); + // [let] [f] [=] [12E+99_f64] [;] + assert_eq!(tokens[2 + 10 + 3].1, "12E+99_f64"); + assert_eq!(tokens[2 + 10 + 3].0, FLOAT_NUMBER); + + // [let] [s] [=] ["rust1"] [;] + assert_eq!(tokens[2 + 15 + 3].1, "\"rust1\""); + assert_eq!(tokens[2 + 15 + 3].0, STRING); + } + + #[test] + fn stmts_token_trees_to_expr_is_err() { + let expansion = parse_macro( + r#" + macro_rules! stmts { + () => { + let a = 0; + let b = 0; + let c = 0; + let d = 0; + } + } + "#, + ) + .expand_tt("stmts!();"); + assert!(token_tree_to_syntax_node(&expansion, FragmentKind::Expr).is_err()); + } + + #[test] + fn test_token_tree_last_child_is_white_space() { + let source_file = ast::SourceFile::parse("f!({} );").ok().unwrap(); + let macro_call = source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); + let token_tree = macro_call.token_tree().unwrap(); + + // Token Tree now is : + // TokenTree + // - T!['('] + // - TokenTree + // - T!['{'] + // - T!['}'] + // - WHITE_SPACE + // - T![')'] + + let rbrace = + token_tree.syntax().descendants_with_tokens().find(|it| it.kind() == T!['}']).unwrap(); + let space = token_tree + .syntax() + .descendants_with_tokens() + .find(|it| it.kind() == SyntaxKind::WHITESPACE) + .unwrap(); + + // reorder th white space, such that the white is inside the inner token-tree. + let token_tree = insert_children( + &rbrace.parent().unwrap(), + InsertPosition::Last, + std::iter::once(space), + ); + + // Token Tree now is : + // TokenTree + // - T!['{'] + // - T!['}'] + // - WHITE_SPACE + let token_tree = ast::TokenTree::cast(token_tree).unwrap(); + let tt = ast_to_token_tree(&token_tree).unwrap().0; + + assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace)); + } + + #[test] + fn test_token_tree_multi_char_punct() { + let source_file = ast::SourceFile::parse("struct Foo { a: x::Y }").ok().unwrap(); + let struct_def = source_file.syntax().descendants().find_map(ast::Struct::cast).unwrap(); + let tt = ast_to_token_tree(&struct_def).unwrap().0; + token_tree_to_syntax_node(&tt, FragmentKind::Item).unwrap(); + } +} diff --git a/crates/mbe/src/tests.rs b/crates/mbe/src/tests.rs new file mode 100644 index 0000000000..0796ceee1a --- /dev/null +++ b/crates/mbe/src/tests.rs @@ -0,0 +1,1898 @@ +use std::fmt::Write; + +use ::parser::FragmentKind; +use syntax::{ast, AstNode, NodeOrToken, SyntaxKind::IDENT, SyntaxNode, WalkEvent, T}; +use test_utils::assert_eq_text; + +use super::*; + +mod rule_parsing { + use syntax::{ast, AstNode}; + + use crate::ast_to_token_tree; + + use super::*; + + #[test] + fn test_valid_arms() { + fn check(macro_body: &str) { + let m = parse_macro_arm(macro_body); + m.unwrap(); + } + + check("($i:ident) => ()"); + check("($($i:ident)*) => ($_)"); + check("($($true:ident)*) => ($true)"); + check("($($false:ident)*) => ($false)"); + check("($) => ($)"); + } + + #[test] + fn test_invalid_arms() { + fn check(macro_body: &str, err: &str) { + let m = parse_macro_arm(macro_body); + assert_eq!(m, Err(ParseError::Expected(String::from(err)))); + } + + check("invalid", "expected subtree"); + + check("$i:ident => ()", "expected subtree"); + check("($i:ident) ()", "expected `=`"); + check("($($i:ident)_) => ()", "invalid repeat"); + + check("($i) => ($i)", "invalid macro definition"); + check("($i:) => ($i)", "invalid macro definition"); + } + + fn parse_macro_arm(arm_definition: &str) -> Result { + let macro_definition = format!(" macro_rules! m {{ {} }} ", arm_definition); + let source_file = ast::SourceFile::parse(¯o_definition).ok().unwrap(); + let macro_definition = + source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); + + let (definition_tt, _) = + ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap(); + crate::MacroRules::parse(&definition_tt) + } +} + +// Good first issue (although a slightly challenging one): +// +// * Pick a random test from here +// https://github.com/intellij-rust/intellij-rust/blob/c4e9feee4ad46e7953b1948c112533360b6087bb/src/test/kotlin/org/rust/lang/core/macros/RsMacroExpansionTest.kt +// * Port the test to rust and add it to this module +// * Make it pass :-) + +#[test] +fn test_token_id_shift() { + let expansion = parse_macro( + r#" +macro_rules! foobar { + ($e:ident) => { foo bar $e } +} +"#, + ) + .expand_tt("foobar!(baz);"); + + fn get_id(t: &tt::TokenTree) -> Option { + if let tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = t { + return Some(ident.id.0); + } + None + } + + assert_eq!(expansion.token_trees.len(), 3); + // {($e:ident) => { foo bar $e }} + // 012345 67 8 9 T 12 + assert_eq!(get_id(&expansion.token_trees[0]), Some(9)); + assert_eq!(get_id(&expansion.token_trees[1]), Some(10)); + + // The input args of macro call include parentheses: + // (baz) + // So baz should be 12+1+1 + assert_eq!(get_id(&expansion.token_trees[2]), Some(14)); +} + +#[test] +fn test_token_map() { + let expanded = parse_macro( + r#" +macro_rules! foobar { + ($e:ident) => { fn $e() {} } +} +"#, + ) + .expand_tt("foobar!(baz);"); + + let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap(); + let content = node.syntax_node().to_string(); + + let get_text = |id, kind| -> String { + content[token_map.range_by_token(id).unwrap().by_kind(kind).unwrap()].to_string() + }; + + assert_eq!(expanded.token_trees.len(), 4); + // {($e:ident) => { fn $e() {} }} + // 012345 67 8 9 T12 3 + + assert_eq!(get_text(tt::TokenId(9), IDENT), "fn"); + assert_eq!(get_text(tt::TokenId(12), T!['(']), "("); + assert_eq!(get_text(tt::TokenId(13), T!['{']), "{"); +} + +#[test] +fn test_convert_tt() { + parse_macro(r#" +macro_rules! impl_froms { + ($e:ident: $($v:ident),*) => { + $( + impl From<$v> for $e { + fn from(it: $v) -> $e { + $e::$v(it) + } + } + )* + } +} +"#) + .assert_expand_tt( + "impl_froms!(TokenTree: Leaf, Subtree);", + "impl From for TokenTree {fn from (it : Leaf) -> TokenTree {TokenTree ::Leaf (it)}} \ + impl From for TokenTree {fn from (it : Subtree) -> TokenTree {TokenTree ::Subtree (it)}}" + ); +} + +#[test] +fn test_convert_tt2() { + parse_macro( + r#" +macro_rules! impl_froms { + ($e:ident: $($v:ident),*) => { + $( + impl From<$v> for $e { + fn from(it: $v) -> $e { + $e::$v(it) + } + } + )* + } +} +"#, + ) + .assert_expand( + "impl_froms!(TokenTree: Leaf, Subtree);", + r#" +SUBTREE $ + IDENT impl 20 + IDENT From 21 + PUNCH < [joint] 22 + IDENT Leaf 53 + PUNCH > [alone] 25 + IDENT for 26 + IDENT TokenTree 51 + SUBTREE {} 29 + IDENT fn 30 + IDENT from 31 + SUBTREE () 32 + IDENT it 33 + PUNCH : [alone] 34 + IDENT Leaf 53 + PUNCH - [joint] 37 + PUNCH > [alone] 38 + IDENT TokenTree 51 + SUBTREE {} 41 + IDENT TokenTree 51 + PUNCH : [joint] 44 + PUNCH : [joint] 45 + IDENT Leaf 53 + SUBTREE () 48 + IDENT it 49 + IDENT impl 20 + IDENT From 21 + PUNCH < [joint] 22 + IDENT Subtree 55 + PUNCH > [alone] 25 + IDENT for 26 + IDENT TokenTree 51 + SUBTREE {} 29 + IDENT fn 30 + IDENT from 31 + SUBTREE () 32 + IDENT it 33 + PUNCH : [alone] 34 + IDENT Subtree 55 + PUNCH - [joint] 37 + PUNCH > [alone] 38 + IDENT TokenTree 51 + SUBTREE {} 41 + IDENT TokenTree 51 + PUNCH : [joint] 44 + PUNCH : [joint] 45 + IDENT Subtree 55 + SUBTREE () 48 + IDENT it 49 +"#, + ); +} + +#[test] +fn test_lifetime_split() { + parse_macro( + r#" +macro_rules! foo { + ($($t:tt)*) => { $($t)*} +} +"#, + ) + .assert_expand( + r#"foo!(static bar: &'static str = "hello";);"#, + r#" +SUBTREE $ + IDENT static 17 + IDENT bar 18 + PUNCH : [alone] 19 + PUNCH & [alone] 20 + PUNCH ' [joint] 21 + IDENT static 22 + IDENT str 23 + PUNCH = [alone] 24 + LITERAL "hello" 25 + PUNCH ; [joint] 26 +"#, + ); +} + +#[test] +fn test_expr_order() { + let expanded = parse_macro( + r#" + macro_rules! foo { + ($ i:expr) => { + fn bar() { $ i * 2; } + } + } +"#, + ) + .expand_items("foo! { 1 + 1}"); + + let dump = format!("{:#?}", expanded); + assert_eq_text!( + dump.trim(), + r#"MACRO_ITEMS@0..15 + FN@0..15 + FN_KW@0..2 "fn" + NAME@2..5 + IDENT@2..5 "bar" + PARAM_LIST@5..7 + L_PAREN@5..6 "(" + R_PAREN@6..7 ")" + BLOCK_EXPR@7..15 + L_CURLY@7..8 "{" + EXPR_STMT@8..14 + BIN_EXPR@8..13 + BIN_EXPR@8..11 + LITERAL@8..9 + INT_NUMBER@8..9 "1" + PLUS@9..10 "+" + LITERAL@10..11 + INT_NUMBER@10..11 "1" + STAR@11..12 "*" + LITERAL@12..13 + INT_NUMBER@12..13 "2" + SEMICOLON@13..14 ";" + R_CURLY@14..15 "}""#, + ); +} + +#[test] +fn test_fail_match_pattern_by_first_token() { + parse_macro( + r#" + macro_rules! foo { + ($ i:ident) => ( + mod $ i {} + ); + (= $ i:ident) => ( + fn $ i() {} + ); + (+ $ i:ident) => ( + struct $ i; + ) + } +"#, + ) + .assert_expand_items("foo! { foo }", "mod foo {}") + .assert_expand_items("foo! { = bar }", "fn bar () {}") + .assert_expand_items("foo! { + Baz }", "struct Baz ;"); +} + +#[test] +fn test_fail_match_pattern_by_last_token() { + parse_macro( + r#" + macro_rules! foo { + ($ i:ident) => ( + mod $ i {} + ); + ($ i:ident =) => ( + fn $ i() {} + ); + ($ i:ident +) => ( + struct $ i; + ) + } +"#, + ) + .assert_expand_items("foo! { foo }", "mod foo {}") + .assert_expand_items("foo! { bar = }", "fn bar () {}") + .assert_expand_items("foo! { Baz + }", "struct Baz ;"); +} + +#[test] +fn test_fail_match_pattern_by_word_token() { + parse_macro( + r#" + macro_rules! foo { + ($ i:ident) => ( + mod $ i {} + ); + (spam $ i:ident) => ( + fn $ i() {} + ); + (eggs $ i:ident) => ( + struct $ i; + ) + } +"#, + ) + .assert_expand_items("foo! { foo }", "mod foo {}") + .assert_expand_items("foo! { spam bar }", "fn bar () {}") + .assert_expand_items("foo! { eggs Baz }", "struct Baz ;"); +} + +#[test] +fn test_match_group_pattern_by_separator_token() { + parse_macro( + r#" + macro_rules! foo { + ($ ($ i:ident),*) => ($ ( + mod $ i {} + )*); + ($ ($ i:ident)#*) => ($ ( + fn $ i() {} + )*); + ($ i:ident ,# $ j:ident) => ( + struct $ i; + struct $ j; + ) + } +"#, + ) + .assert_expand_items("foo! { foo, bar }", "mod foo {} mod bar {}") + .assert_expand_items("foo! { foo# bar }", "fn foo () {} fn bar () {}") + .assert_expand_items("foo! { Foo,# Bar }", "struct Foo ; struct Bar ;"); +} + +#[test] +fn test_match_group_pattern_with_multiple_defs() { + parse_macro( + r#" + macro_rules! foo { + ($ ($ i:ident),*) => ( struct Bar { $ ( + fn $ i {} + )*} ); + } +"#, + ) + .assert_expand_items("foo! { foo, bar }", "struct Bar {fn foo {} fn bar {}}"); +} + +#[test] +fn test_match_group_pattern_with_multiple_statement() { + parse_macro( + r#" + macro_rules! foo { + ($ ($ i:ident),*) => ( fn baz { $ ( + $ i (); + )*} ); + } +"#, + ) + .assert_expand_items("foo! { foo, bar }", "fn baz {foo () ; bar () ;}"); +} + +#[test] +fn test_match_group_pattern_with_multiple_statement_without_semi() { + parse_macro( + r#" + macro_rules! foo { + ($ ($ i:ident),*) => ( fn baz { $ ( + $i() + );*} ); + } +"#, + ) + .assert_expand_items("foo! { foo, bar }", "fn baz {foo () ;bar ()}"); +} + +#[test] +fn test_match_group_empty_fixed_token() { + parse_macro( + r#" + macro_rules! foo { + ($ ($ i:ident)* #abc) => ( fn baz { $ ( + $ i (); + )*} ); + } +"#, + ) + .assert_expand_items("foo! {#abc}", "fn baz {}"); +} + +#[test] +fn test_match_group_in_subtree() { + parse_macro( + r#" + macro_rules! foo { + (fn $name:ident {$($i:ident)*} ) => ( fn $name() { $ ( + $ i (); + )*} ); + }"#, + ) + .assert_expand_items("foo! {fn baz {a b} }", "fn baz () {a () ; b () ;}"); +} + +#[test] +fn test_match_group_with_multichar_sep() { + parse_macro( + r#" + macro_rules! foo { + (fn $name:ident {$($i:literal)*} ) => ( fn $name() -> bool { $($i)&&*} ); + }"#, + ) + .assert_expand_items("foo! (fn baz {true true} );", "fn baz () -> bool {true &&true}"); +} + +#[test] +fn test_match_group_zero_match() { + parse_macro( + r#" + macro_rules! foo { + ( $($i:ident)* ) => (); + }"#, + ) + .assert_expand_items("foo! ();", ""); +} + +#[test] +fn test_match_group_in_group() { + parse_macro( + r#" + macro_rules! foo { + { $( ( $($i:ident)* ) )* } => ( $( ( $($i)* ) )* ); + }"#, + ) + .assert_expand_items("foo! ( (a b) );", "(a b)"); +} + +#[test] +fn test_expand_to_item_list() { + let tree = parse_macro( + " + macro_rules! structs { + ($($i:ident),*) => { + $(struct $i { field: u32 } )* + } + } + ", + ) + .expand_items("structs!(Foo, Bar);"); + assert_eq!( + format!("{:#?}", tree).trim(), + r#" +MACRO_ITEMS@0..40 + STRUCT@0..20 + STRUCT_KW@0..6 "struct" + NAME@6..9 + IDENT@6..9 "Foo" + RECORD_FIELD_LIST@9..20 + L_CURLY@9..10 "{" + RECORD_FIELD@10..19 + NAME@10..15 + IDENT@10..15 "field" + COLON@15..16 ":" + PATH_TYPE@16..19 + PATH@16..19 + PATH_SEGMENT@16..19 + NAME_REF@16..19 + IDENT@16..19 "u32" + R_CURLY@19..20 "}" + STRUCT@20..40 + STRUCT_KW@20..26 "struct" + NAME@26..29 + IDENT@26..29 "Bar" + RECORD_FIELD_LIST@29..40 + L_CURLY@29..30 "{" + RECORD_FIELD@30..39 + NAME@30..35 + IDENT@30..35 "field" + COLON@35..36 ":" + PATH_TYPE@36..39 + PATH@36..39 + PATH_SEGMENT@36..39 + NAME_REF@36..39 + IDENT@36..39 "u32" + R_CURLY@39..40 "}""# + .trim() + ); +} + +fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree { + if let tt::TokenTree::Subtree(subtree) = tt { + return &subtree; + } + unreachable!("It is not a subtree"); +} +fn to_literal(tt: &tt::TokenTree) -> &tt::Literal { + if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = tt { + return lit; + } + unreachable!("It is not a literal"); +} + +fn to_punct(tt: &tt::TokenTree) -> &tt::Punct { + if let tt::TokenTree::Leaf(tt::Leaf::Punct(lit)) = tt { + return lit; + } + unreachable!("It is not a Punct"); +} + +#[test] +fn test_expand_literals_to_token_tree() { + let expansion = parse_macro( + r#" + macro_rules! literals { + ($i:ident) => { + { + let a = 'c'; + let c = 1000; + let f = 12E+99_f64; + let s = "rust1"; + } + } + } + "#, + ) + .expand_tt("literals!(foo);"); + let stm_tokens = &to_subtree(&expansion.token_trees[0]).token_trees; + + // [let] [a] [=] ['c'] [;] + assert_eq!(to_literal(&stm_tokens[3]).text, "'c'"); + // [let] [c] [=] [1000] [;] + assert_eq!(to_literal(&stm_tokens[5 + 3]).text, "1000"); + // [let] [f] [=] [12E+99_f64] [;] + assert_eq!(to_literal(&stm_tokens[10 + 3]).text, "12E+99_f64"); + // [let] [s] [=] ["rust1"] [;] + assert_eq!(to_literal(&stm_tokens[15 + 3]).text, "\"rust1\""); +} + +#[test] +fn test_attr_to_token_tree() { + let expansion = parse_to_token_tree_by_syntax( + r#" + #[derive(Copy)] + struct Foo; + "#, + ); + + assert_eq!(to_punct(&expansion.token_trees[0]).char, '#'); + assert_eq!( + to_subtree(&expansion.token_trees[1]).delimiter_kind(), + Some(tt::DelimiterKind::Bracket) + ); +} + +#[test] +fn test_two_idents() { + parse_macro( + r#" + macro_rules! foo { + ($ i:ident, $ j:ident) => { + fn foo() { let a = $ i; let b = $j; } + } + } +"#, + ) + .assert_expand_items("foo! { foo, bar }", "fn foo () {let a = foo ; let b = bar ;}"); +} + +#[test] +fn test_tt_to_stmts() { + let stmts = parse_macro( + r#" + macro_rules! foo { + () => { + let a = 0; + a = 10 + 1; + a + } + } +"#, + ) + .expand_statements("foo!{}"); + + assert_eq!( + format!("{:#?}", stmts).trim(), + r#"MACRO_STMTS@0..15 + LET_STMT@0..7 + LET_KW@0..3 "let" + IDENT_PAT@3..4 + NAME@3..4 + IDENT@3..4 "a" + EQ@4..5 "=" + LITERAL@5..6 + INT_NUMBER@5..6 "0" + SEMICOLON@6..7 ";" + EXPR_STMT@7..14 + BIN_EXPR@7..13 + PATH_EXPR@7..8 + PATH@7..8 + PATH_SEGMENT@7..8 + NAME_REF@7..8 + IDENT@7..8 "a" + EQ@8..9 "=" + BIN_EXPR@9..13 + LITERAL@9..11 + INT_NUMBER@9..11 "10" + PLUS@11..12 "+" + LITERAL@12..13 + INT_NUMBER@12..13 "1" + SEMICOLON@13..14 ";" + EXPR_STMT@14..15 + PATH_EXPR@14..15 + PATH@14..15 + PATH_SEGMENT@14..15 + NAME_REF@14..15 + IDENT@14..15 "a""#, + ); +} + +#[test] +fn test_match_literal() { + parse_macro( + r#" + macro_rules! foo { + ('(') => { + fn foo() {} + } + } +"#, + ) + .assert_expand_items("foo! ['('];", "fn foo () {}"); +} + +// The following tests are port from intellij-rust directly +// https://github.com/intellij-rust/intellij-rust/blob/c4e9feee4ad46e7953b1948c112533360b6087bb/src/test/kotlin/org/rust/lang/core/macros/RsMacroExpansionTest.kt + +#[test] +fn test_path() { + parse_macro( + r#" + macro_rules! foo { + ($ i:path) => { + fn foo() { let a = $ i; } + } + } +"#, + ) + .assert_expand_items("foo! { foo }", "fn foo () {let a = foo ;}") + .assert_expand_items( + "foo! { bar::::baz:: }", + "fn foo () {let a = bar ::< u8 >:: baz ::< u8 > ;}", + ); +} + +#[test] +fn test_two_paths() { + parse_macro( + r#" + macro_rules! foo { + ($ i:path, $ j:path) => { + fn foo() { let a = $ i; let b = $j; } + } + } +"#, + ) + .assert_expand_items("foo! { foo, bar }", "fn foo () {let a = foo ; let b = bar ;}"); +} + +#[test] +fn test_path_with_path() { + parse_macro( + r#" + macro_rules! foo { + ($ i:path) => { + fn foo() { let a = $ i :: bar; } + } + } +"#, + ) + .assert_expand_items("foo! { foo }", "fn foo () {let a = foo :: bar ;}"); +} + +#[test] +fn test_expr() { + parse_macro( + r#" + macro_rules! foo { + ($ i:expr) => { + fn bar() { $ i; } + } + } +"#, + ) + .assert_expand_items( + "foo! { 2 + 2 * baz(3).quux() }", + "fn bar () {2 + 2 * baz (3) . quux () ;}", + ); +} + +#[test] +fn test_last_expr() { + parse_macro( + r#" + macro_rules! vec { + ($($item:expr),*) => { + { + let mut v = Vec::new(); + $( + v.push($item); + )* + v + } + }; + } +"#, + ) + .assert_expand_items( + "vec!(1,2,3);", + "{let mut v = Vec :: new () ; v . push (1) ; v . push (2) ; v . push (3) ; v}", + ); +} + +#[test] +fn test_ty() { + parse_macro( + r#" + macro_rules! foo { + ($ i:ty) => ( + fn bar() -> $ i { unimplemented!() } + ) + } +"#, + ) + .assert_expand_items("foo! { Baz }", "fn bar () -> Baz < u8 > {unimplemented ! ()}"); +} + +#[test] +fn test_ty_with_complex_type() { + parse_macro( + r#" + macro_rules! foo { + ($ i:ty) => ( + fn bar() -> $ i { unimplemented!() } + ) + } +"#, + ) + // Reference lifetime struct with generic type + .assert_expand_items( + "foo! { &'a Baz }", + "fn bar () -> & 'a Baz < u8 > {unimplemented ! ()}", + ) + // extern "Rust" func type + .assert_expand_items( + r#"foo! { extern "Rust" fn() -> Ret }"#, + r#"fn bar () -> extern "Rust" fn () -> Ret {unimplemented ! ()}"#, + ); +} + +#[test] +fn test_pat_() { + parse_macro( + r#" + macro_rules! foo { + ($ i:pat) => { fn foo() { let $ i; } } + } +"#, + ) + .assert_expand_items("foo! { (a, b) }", "fn foo () {let (a , b) ;}"); +} + +#[test] +fn test_stmt() { + parse_macro( + r#" + macro_rules! foo { + ($ i:stmt) => ( + fn bar() { $ i; } + ) + } +"#, + ) + .assert_expand_items("foo! { 2 }", "fn bar () {2 ;}") + .assert_expand_items("foo! { let a = 0 }", "fn bar () {let a = 0 ;}"); +} + +#[test] +fn test_single_item() { + parse_macro( + r#" + macro_rules! foo { + ($ i:item) => ( + $ i + ) + } +"#, + ) + .assert_expand_items("foo! {mod c {}}", "mod c {}"); +} + +#[test] +fn test_all_items() { + parse_macro( + r#" + macro_rules! foo { + ($ ($ i:item)*) => ($ ( + $ i + )*) + } +"#, + ). + assert_expand_items( + r#" + foo! { + extern crate a; + mod b; + mod c {} + use d; + const E: i32 = 0; + static F: i32 = 0; + impl G {} + struct H; + enum I { Foo } + trait J {} + fn h() {} + extern {} + type T = u8; + } +"#, + r#"extern crate a ; mod b ; mod c {} use d ; const E : i32 = 0 ; static F : i32 = 0 ; impl G {} struct H ; enum I {Foo} trait J {} fn h () {} extern {} type T = u8 ;"#, + ); +} + +#[test] +fn test_block() { + parse_macro( + r#" + macro_rules! foo { + ($ i:block) => { fn foo() $ i } + } +"#, + ) + .assert_expand_statements("foo! { { 1; } }", "fn foo () {1 ;}"); +} + +#[test] +fn test_meta() { + parse_macro( + r#" + macro_rules! foo { + ($ i:meta) => ( + #[$ i] + fn bar() {} + ) + } +"#, + ) + .assert_expand_items( + r#"foo! { cfg(target_os = "windows") }"#, + r#"# [cfg (target_os = "windows")] fn bar () {}"#, + ); +} + +#[test] +fn test_meta_doc_comments() { + parse_macro( + r#" + macro_rules! foo { + ($(#[$ i:meta])+) => ( + $(#[$ i])+ + fn bar() {} + ) + } +"#, + ). + assert_expand_items( + r#"foo! { + /// Single Line Doc 1 + /** + MultiLines Doc + */ + }"#, + "# [doc = \" Single Line Doc 1\"] # [doc = \"\\\\n MultiLines Doc\\\\n \"] fn bar () {}", + ); +} + +#[test] +fn test_tt_block() { + parse_macro( + r#" + macro_rules! foo { + ($ i:tt) => { fn foo() $ i } + } + "#, + ) + .assert_expand_items(r#"foo! { { 1; } }"#, r#"fn foo () {1 ;}"#); +} + +#[test] +fn test_tt_group() { + parse_macro( + r#" + macro_rules! foo { + ($($ i:tt)*) => { $($ i)* } + } + "#, + ) + .assert_expand_items(r#"foo! { fn foo() {} }"#, r#"fn foo () {}"#); +} + +#[test] +fn test_tt_composite() { + parse_macro( + r#" + macro_rules! foo { + ($i:tt) => { 0 } + } + "#, + ) + .assert_expand_items(r#"foo! { => }"#, r#"0"#); +} + +#[test] +fn test_tt_composite2() { + let node = parse_macro( + r#" + macro_rules! foo { + ($($tt:tt)*) => { abs!(=> $($tt)*) } + } + "#, + ) + .expand_items(r#"foo!{#}"#); + + let res = format!("{:#?}", &node); + assert_eq_text!( + res.trim(), + r###"MACRO_ITEMS@0..10 + MACRO_CALL@0..10 + PATH@0..3 + PATH_SEGMENT@0..3 + NAME_REF@0..3 + IDENT@0..3 "abs" + BANG@3..4 "!" + TOKEN_TREE@4..10 + L_PAREN@4..5 "(" + EQ@5..6 "=" + R_ANGLE@6..7 ">" + WHITESPACE@7..8 " " + POUND@8..9 "#" + R_PAREN@9..10 ")""### + ); +} + +#[test] +fn test_lifetime() { + parse_macro( + r#" + macro_rules! foo { + ($ lt:lifetime) => { struct Ref<$ lt>{ s: &$ lt str } } + } +"#, + ) + .assert_expand_items(r#"foo!{'a}"#, r#"struct Ref <'a > {s : &'a str}"#); +} + +#[test] +fn test_literal() { + parse_macro( + r#" + macro_rules! foo { + ($ type:ty $ lit:literal) => { const VALUE: $ type = $ lit;}; + } +"#, + ) + .assert_expand_items(r#"foo!(u8 0);"#, r#"const VALUE : u8 = 0 ;"#); +} + +#[test] +fn test_boolean_is_ident() { + parse_macro( + r#" + macro_rules! foo { + ($lit0:literal, $lit1:literal) => { const VALUE: (bool,bool) = ($lit0,$lit1); }; + } +"#, + ) + .assert_expand( + r#"foo!(true,false);"#, + r#" +SUBTREE $ + IDENT const 14 + IDENT VALUE 15 + PUNCH : [alone] 16 + SUBTREE () 17 + IDENT bool 18 + PUNCH , [alone] 19 + IDENT bool 20 + PUNCH = [alone] 21 + SUBTREE () 22 + IDENT true 29 + PUNCH , [joint] 25 + IDENT false 31 + PUNCH ; [alone] 28 +"#, + ); +} + +#[test] +fn test_vis() { + parse_macro( + r#" + macro_rules! foo { + ($ vis:vis $ name:ident) => { $ vis fn $ name() {}}; + } +"#, + ) + .assert_expand_items(r#"foo!(pub foo);"#, r#"pub fn foo () {}"#) + // test optional cases + .assert_expand_items(r#"foo!(foo);"#, r#"fn foo () {}"#); +} + +#[test] +fn test_inner_macro_rules() { + parse_macro( + r#" +macro_rules! foo { + ($a:ident, $b:ident, $c:tt) => { + + macro_rules! bar { + ($bi:ident) => { + fn $bi() -> u8 {$c} + } + } + + bar!($a); + fn $b() -> u8 {$c} + } +} +"#, + ). + assert_expand_items( + r#"foo!(x,y, 1);"#, + r#"macro_rules ! bar {($ bi : ident) => {fn $ bi () -> u8 {1}}} bar ! (x) ; fn y () -> u8 {1}"#, + ); +} + +// The following tests are based on real world situations +#[test] +fn test_vec() { + let fixture = parse_macro( + r#" + macro_rules! vec { + ($($item:expr),*) => { + { + let mut v = Vec::new(); + $( + v.push($item); + )* + v + } + }; +} +"#, + ); + fixture + .assert_expand_items(r#"vec!();"#, r#"{let mut v = Vec :: new () ; v}"#) + .assert_expand_items( + r#"vec![1u32,2];"#, + r#"{let mut v = Vec :: new () ; v . push (1u32) ; v . push (2) ; v}"#, + ); + + let tree = fixture.expand_expr(r#"vec![1u32,2];"#); + + assert_eq!( + format!("{:#?}", tree).trim(), + r#"BLOCK_EXPR@0..45 + L_CURLY@0..1 "{" + LET_STMT@1..20 + LET_KW@1..4 "let" + IDENT_PAT@4..8 + MUT_KW@4..7 "mut" + NAME@7..8 + IDENT@7..8 "v" + EQ@8..9 "=" + CALL_EXPR@9..19 + PATH_EXPR@9..17 + PATH@9..17 + PATH@9..12 + PATH_SEGMENT@9..12 + NAME_REF@9..12 + IDENT@9..12 "Vec" + COLON2@12..14 "::" + PATH_SEGMENT@14..17 + NAME_REF@14..17 + IDENT@14..17 "new" + ARG_LIST@17..19 + L_PAREN@17..18 "(" + R_PAREN@18..19 ")" + SEMICOLON@19..20 ";" + EXPR_STMT@20..33 + METHOD_CALL_EXPR@20..32 + PATH_EXPR@20..21 + PATH@20..21 + PATH_SEGMENT@20..21 + NAME_REF@20..21 + IDENT@20..21 "v" + DOT@21..22 "." + NAME_REF@22..26 + IDENT@22..26 "push" + ARG_LIST@26..32 + L_PAREN@26..27 "(" + LITERAL@27..31 + INT_NUMBER@27..31 "1u32" + R_PAREN@31..32 ")" + SEMICOLON@32..33 ";" + EXPR_STMT@33..43 + METHOD_CALL_EXPR@33..42 + PATH_EXPR@33..34 + PATH@33..34 + PATH_SEGMENT@33..34 + NAME_REF@33..34 + IDENT@33..34 "v" + DOT@34..35 "." + NAME_REF@35..39 + IDENT@35..39 "push" + ARG_LIST@39..42 + L_PAREN@39..40 "(" + LITERAL@40..41 + INT_NUMBER@40..41 "2" + R_PAREN@41..42 ")" + SEMICOLON@42..43 ";" + PATH_EXPR@43..44 + PATH@43..44 + PATH_SEGMENT@43..44 + NAME_REF@43..44 + IDENT@43..44 "v" + R_CURLY@44..45 "}""# + ); +} + +#[test] +fn test_winapi_struct() { + // from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/macros.rs#L366 + + parse_macro( + r#" +macro_rules! STRUCT { + ($(#[$attrs:meta])* struct $name:ident { + $($field:ident: $ftype:ty,)+ + }) => ( + #[repr(C)] #[derive(Copy)] $(#[$attrs])* + pub struct $name { + $(pub $field: $ftype,)+ + } + impl Clone for $name { + #[inline] + fn clone(&self) -> $name { *self } + } + #[cfg(feature = "impl-default")] + impl Default for $name { + #[inline] + fn default() -> $name { unsafe { $crate::_core::mem::zeroed() } } + } + ); +} +"#, + ). + // from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/shared/d3d9caps.rs + assert_expand_items(r#"STRUCT!{struct D3DVSHADERCAPS2_0 {Caps: u8,}}"#, + "# [repr (C)] # [derive (Copy)] pub struct D3DVSHADERCAPS2_0 {pub Caps : u8 ,} impl Clone for D3DVSHADERCAPS2_0 {# [inline] fn clone (& self) -> D3DVSHADERCAPS2_0 {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DVSHADERCAPS2_0 {# [inline] fn default () -> D3DVSHADERCAPS2_0 {unsafe {$crate :: _core :: mem :: zeroed ()}}}" + ) + .assert_expand_items(r#"STRUCT!{#[cfg_attr(target_arch = "x86", repr(packed))] struct D3DCONTENTPROTECTIONCAPS {Caps : u8 ,}}"#, + "# [repr (C)] # [derive (Copy)] # [cfg_attr (target_arch = \"x86\" , repr (packed))] pub struct D3DCONTENTPROTECTIONCAPS {pub Caps : u8 ,} impl Clone for D3DCONTENTPROTECTIONCAPS {# [inline] fn clone (& self) -> D3DCONTENTPROTECTIONCAPS {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DCONTENTPROTECTIONCAPS {# [inline] fn default () -> D3DCONTENTPROTECTIONCAPS {unsafe {$crate :: _core :: mem :: zeroed ()}}}" + ); +} + +#[test] +fn test_int_base() { + parse_macro( + r#" +macro_rules! int_base { + ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => { + #[stable(feature = "rust1", since = "1.0.0")] + impl fmt::$Trait for $T { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + $Radix.fmt_int(*self as $U, f) + } + } + } +} +"#, + ).assert_expand_items(r#" int_base!{Binary for isize as usize -> Binary}"#, + "# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt ::Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}" + ); +} + +#[test] +fn test_generate_pattern_iterators() { + // from https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/str/mod.rs + parse_macro( + r#" +macro_rules! generate_pattern_iterators { + { double ended; with $(#[$common_stability_attribute:meta])*, + $forward_iterator:ident, + $reverse_iterator:ident, $iterty:ty + } => { + fn foo(){} + } +} +"#, + ).assert_expand_items( + r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );"#, + "fn foo () {}", + ); +} + +#[test] +fn test_impl_fn_for_zst() { + // from https://github.com/rust-lang/rust/blob/5d20ff4d2718c820632b38c1e49d4de648a9810b/src/libcore/internal_macros.rs + parse_macro( + r#" +macro_rules! impl_fn_for_zst { + { $( $( #[$attr: meta] )* + struct $Name: ident impl$( <$( $lifetime : lifetime ),+> )? Fn = + |$( $arg: ident: $ArgTy: ty ),*| -> $ReturnTy: ty +$body: block; )+ + } => { + $( + $( #[$attr] )* + struct $Name; + + impl $( <$( $lifetime ),+> )? Fn<($( $ArgTy, )*)> for $Name { + #[inline] + extern "rust-call" fn call(&self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy { + $body + } + } + + impl $( <$( $lifetime ),+> )? FnMut<($( $ArgTy, )*)> for $Name { + #[inline] + extern "rust-call" fn call_mut( + &mut self, + ($( $arg, )*): ($( $ArgTy, )*) + ) -> $ReturnTy { + Fn::call(&*self, ($( $arg, )*)) + } + } + + impl $( <$( $lifetime ),+> )? FnOnce<($( $ArgTy, )*)> for $Name { + type Output = $ReturnTy; + + #[inline] + extern "rust-call" fn call_once(self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy { + Fn::call(&self, ($( $arg, )*)) + } + } + )+ +} + } +"#, + ).assert_expand_items(r#" +impl_fn_for_zst ! { + # [ derive ( Clone ) ] + struct CharEscapeDebugContinue impl Fn = | c : char | -> char :: EscapeDebug { + c . escape_debug_ext ( false ) + } ; + + # [ derive ( Clone ) ] + struct CharEscapeUnicode impl Fn = | c : char | -> char :: EscapeUnicode { + c . escape_unicode ( ) + } ; + # [ derive ( Clone ) ] + struct CharEscapeDefault impl Fn = | c : char | -> char :: EscapeDefault { + c . escape_default ( ) + } ; + } +"#, + "# [derive (Clone)] struct CharEscapeDebugContinue ; impl Fn < (char ,) > for CharEscapeDebugContinue {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeDebug {{c . escape_debug_ext (false)}}} impl FnMut < (char ,) > for CharEscapeDebugContinue {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeDebug {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeDebugContinue {type Output = char :: EscapeDebug ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeDebug {Fn :: call (& self , (c ,))}} # [derive (Clone)] struct CharEscapeUnicode ; impl Fn < (char ,) > for CharEscapeUnicode {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeUnicode {{c . escape_unicode ()}}} impl FnMut < (char ,) > for CharEscapeUnicode {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeUnicode {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeUnicode {type Output = char :: EscapeUnicode ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeUnicode {Fn :: call (& self , (c ,))}} # [derive (Clone)] struct CharEscapeDefault ; impl Fn < (char ,) > for CharEscapeDefault {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeDefault {{c . escape_default ()}}} impl FnMut < (char ,) > for CharEscapeDefault {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeDefault {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeDefault {type Output = char :: EscapeDefault ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeDefault {Fn :: call (& self , (c ,))}}" + ); +} + +#[test] +fn test_impl_nonzero_fmt() { + // from https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/num/mod.rs#L12 + parse_macro( + r#" + macro_rules! impl_nonzero_fmt { + ( #[$stability: meta] ( $( $Trait: ident ),+ ) for $Ty: ident ) => { + fn foo () {} + } + } +"#, + ).assert_expand_items( + r#"impl_nonzero_fmt! { # [stable(feature= "nonzero",since="1.28.0")] (Debug,Display,Binary,Octal,LowerHex,UpperHex) for NonZeroU8}"#, + "fn foo () {}", + ); +} + +#[test] +fn test_cfg_if_items() { + // from https://github.com/rust-lang/rust/blob/33fe1131cadba69d317156847be9a402b89f11bb/src/libstd/macros.rs#L986 + parse_macro( + r#" + macro_rules! __cfg_if_items { + (($($not:meta,)*) ; ) => {}; + (($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => { + __cfg_if_items! { ($($not,)* $($m,)*) ; $($rest)* } + } + } +"#, + ).assert_expand_items( + r#"__cfg_if_items ! { ( rustdoc , ) ; ( ( ) ( # [ cfg ( any ( target_os = "redox" , unix ) ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as unix ; # [ cfg ( windows ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as windows ; # [ cfg ( any ( target_os = "linux" , target_os = "l4re" ) ) ] pub mod linux ; ) ) , }"#, + "__cfg_if_items ! {(rustdoc ,) ;}", + ); +} + +#[test] +fn test_cfg_if_main() { + // from https://github.com/rust-lang/rust/blob/3d211248393686e0f73851fc7548f6605220fbe1/src/libpanic_unwind/macros.rs#L9 + parse_macro( + r#" + macro_rules! cfg_if { + ($( + if #[cfg($($meta:meta),*)] { $($it:item)* } + ) else * else { + $($it2:item)* + }) => { + __cfg_if_items! { + () ; + $( ( ($($meta),*) ($($it)*) ), )* + ( () ($($it2)*) ), + } + }; + + // Internal macro to Apply a cfg attribute to a list of items + (@__apply $m:meta, $($it:item)*) => { + $(#[$m] $it)* + }; + } +"#, + ).assert_expand_items(r#" +cfg_if ! { + if # [ cfg ( target_env = "msvc" ) ] { + // no extra unwinder support needed + } else if # [ cfg ( all ( target_arch = "wasm32" , not ( target_os = "emscripten" ) ) ) ] { + // no unwinder on the system! + } else { + mod libunwind ; + pub use libunwind :: * ; + } + } +"#, + "__cfg_if_items ! {() ; ((target_env = \"msvc\") ()) , ((all (target_arch = \"wasm32\" , not (target_os = \"emscripten\"))) ()) , (() (mod libunwind ; pub use libunwind :: * ;)) ,}" + ).assert_expand_items( + r#" +cfg_if ! { @ __apply cfg ( all ( not ( any ( not ( any ( target_os = "solaris" , target_os = "illumos" ) ) ) ) ) ) , } +"#, + "", + ); +} + +#[test] +fn test_proptest_arbitrary() { + // from https://github.com/AltSysrq/proptest/blob/d1c4b049337d2f75dd6f49a095115f7c532e5129/proptest/src/arbitrary/macros.rs#L16 + parse_macro( + r#" +macro_rules! arbitrary { + ([$($bounds : tt)*] $typ: ty, $strat: ty, $params: ty; + $args: ident => $logic: expr) => { + impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ { + type Parameters = $params; + type Strategy = $strat; + fn arbitrary_with($args: Self::Parameters) -> Self::Strategy { + $logic + } + } + }; + +}"#, + ).assert_expand_items(r#"arbitrary ! ( [ A : Arbitrary ] + Vec < A > , + VecStrategy < A :: Strategy > , + RangedParams1 < A :: Parameters > ; + args => { let product_unpack ! [ range , a ] = args ; vec ( any_with :: < A > ( a ) , range ) } + ) ;"#, + "impl
$crate :: arbitrary :: Arbitrary for Vec < A > {type Parameters = RangedParams1 < A :: Parameters > ; type Strategy = VecStrategy < A :: Strategy > ; fn arbitrary_with (args : Self :: Parameters) -> Self :: Strategy {{let product_unpack ! [range , a] = args ; vec (any_with :: < A > (a) , range)}}}" + ); +} + +#[test] +fn test_old_ridl() { + // This is from winapi 2.8, which do not have a link from github + // + let expanded = parse_macro( + r#" +#[macro_export] +macro_rules! RIDL { + (interface $interface:ident ($vtbl:ident) : $pinterface:ident ($pvtbl:ident) + {$( + fn $method:ident(&mut self $(,$p:ident : $t:ty)*) -> $rtr:ty + ),+} + ) => { + impl $interface { + $(pub unsafe fn $method(&mut self) -> $rtr { + ((*self.lpVtbl).$method)(self $(,$p)*) + })+ + } + }; +}"#, + ).expand_tt(r#" + RIDL!{interface ID3D11Asynchronous(ID3D11AsynchronousVtbl): ID3D11DeviceChild(ID3D11DeviceChildVtbl) { + fn GetDataSize(&mut self) -> UINT + }}"#); + + assert_eq!(expanded.to_string(), "impl ID3D11Asynchronous {pub unsafe fn GetDataSize (& mut self) -> UINT {((* self . lpVtbl) .GetDataSize) (self)}}"); +} + +#[test] +fn test_quick_error() { + let expanded = parse_macro( + r#" +macro_rules! quick_error { + + (SORT [enum $name:ident $( #[$meta:meta] )*] + items [$($( #[$imeta:meta] )* + => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*] + {$( $ifuncs:tt )*} )* ] + buf [ ] + queue [ ] + ) => { + quick_error!(ENUMINITION [enum $name $( #[$meta] )*] + body [] + queue [$( + $( #[$imeta] )* + => + $iitem: $imode [$( $ivar: $ityp ),*] + )*] + ); +}; + +} +"#, + ) + .expand_tt( + r#" +quick_error ! (SORT [enum Wrapped # [derive (Debug)]] items [ + => One : UNIT [] {} + => Two : TUPLE [s :String] {display ("two: {}" , s) from ()} + ] buf [] queue []) ; +"#, + ); + + assert_eq!(expanded.to_string(), "quick_error ! (ENUMINITION [enum Wrapped # [derive (Debug)]] body [] queue [=> One : UNIT [] => Two : TUPLE [s : String]]) ;"); +} + +#[test] +fn test_empty_repeat_vars_in_empty_repeat_vars() { + parse_macro( + r#" +macro_rules! delegate_impl { + ([$self_type:ident, $self_wrap:ty, $self_map:ident] + pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* { + + // "Escaped" associated types. Stripped before making the `trait` + // itself, but forwarded when delegating impls. + $( + @escape [type $assoc_name_ext:ident] + // Associated types. Forwarded. + )* + $( + @section type + $( + $(#[$_assoc_attr:meta])* + type $assoc_name:ident $(: $assoc_bound:ty)*; + )+ + )* + // Methods. Forwarded. Using $self_map!(self) around the self argument. + // Methods must use receiver `self` or explicit type like `self: &Self` + // &self and &mut self are _not_ supported. + $( + @section self + $( + $(#[$_method_attr:meta])* + fn $method_name:ident(self $(: $self_selftype:ty)* $(,$marg:ident : $marg_ty:ty)*) -> $mret:ty; + )+ + )* + // Arbitrary tail that is ignored when forwarding. + $( + @section nodelegate + $($tail:tt)* + )* + }) => { + impl<> $name for $self_wrap where $self_type: $name { + $( + $( + fn $method_name(self $(: $self_selftype)* $(,$marg: $marg_ty)*) -> $mret { + $self_map!(self).$method_name($($marg),*) + } + )* + )* + } + } +} +"#, + ).assert_expand_items( + r#"delegate_impl ! {[G , & 'a mut G , deref] pub trait Data : GraphBase {@ section type type NodeWeight ;}}"#, + "impl <> Data for & \'a mut G where G : Data {}", + ); +} + +#[test] +fn expr_interpolation() { + let expanded = parse_macro( + r#" + macro_rules! id { + ($expr:expr) => { + map($expr) + } + } + "#, + ) + .expand_expr("id!(x + foo);"); + + assert_eq!(expanded.to_string(), "map(x+foo)"); +} + +pub(crate) struct MacroFixture { + rules: MacroRules, +} + +impl MacroFixture { + pub(crate) fn expand_tt(&self, invocation: &str) -> tt::Subtree { + self.try_expand_tt(invocation).unwrap() + } + + fn try_expand_tt(&self, invocation: &str) -> Result { + let source_file = ast::SourceFile::parse(invocation).tree(); + let macro_invocation = + source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); + + let (invocation_tt, _) = ast_to_token_tree(¯o_invocation.token_tree().unwrap()) + .ok_or_else(|| ExpandError::ConversionError)?; + + self.rules.expand(&invocation_tt).result() + } + + fn assert_expand_err(&self, invocation: &str, err: &ExpandError) { + assert_eq!(self.try_expand_tt(invocation).as_ref(), Err(err)); + } + + fn expand_items(&self, invocation: &str) -> SyntaxNode { + let expanded = self.expand_tt(invocation); + token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap().0.syntax_node() + } + + fn expand_statements(&self, invocation: &str) -> SyntaxNode { + let expanded = self.expand_tt(invocation); + token_tree_to_syntax_node(&expanded, FragmentKind::Statements).unwrap().0.syntax_node() + } + + fn expand_expr(&self, invocation: &str) -> SyntaxNode { + let expanded = self.expand_tt(invocation); + token_tree_to_syntax_node(&expanded, FragmentKind::Expr).unwrap().0.syntax_node() + } + + fn assert_expand_tt(&self, invocation: &str, expected: &str) { + let expansion = self.expand_tt(invocation); + assert_eq!(expansion.to_string(), expected); + } + + fn assert_expand(&self, invocation: &str, expected: &str) { + let expansion = self.expand_tt(invocation); + let actual = format!("{:?}", expansion); + test_utils::assert_eq_text!(&actual.trim(), &expected.trim()); + } + + fn assert_expand_items(&self, invocation: &str, expected: &str) -> &MacroFixture { + self.assert_expansion(FragmentKind::Items, invocation, expected); + self + } + + fn assert_expand_statements(&self, invocation: &str, expected: &str) -> &MacroFixture { + self.assert_expansion(FragmentKind::Statements, invocation, expected); + self + } + + fn assert_expansion(&self, kind: FragmentKind, invocation: &str, expected: &str) { + let expanded = self.expand_tt(invocation); + assert_eq!(expanded.to_string(), expected); + + let expected = expected.replace("$crate", "C_C__C"); + + // wrap the given text to a macro call + let expected = { + let wrapped = format!("wrap_macro!( {} )", expected); + let wrapped = ast::SourceFile::parse(&wrapped); + let wrapped = + wrapped.tree().syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let mut wrapped = ast_to_token_tree(&wrapped).unwrap().0; + wrapped.delimiter = None; + wrapped + }; + + let expanded_tree = token_tree_to_syntax_node(&expanded, kind).unwrap().0.syntax_node(); + let expanded_tree = debug_dump_ignore_spaces(&expanded_tree).trim().to_string(); + + let expected_tree = token_tree_to_syntax_node(&expected, kind).unwrap().0.syntax_node(); + let expected_tree = debug_dump_ignore_spaces(&expected_tree).trim().to_string(); + + let expected_tree = expected_tree.replace("C_C__C", "$crate"); + assert_eq!( + expanded_tree, expected_tree, + "\nleft:\n{}\nright:\n{}", + expanded_tree, expected_tree, + ); + } +} + +fn parse_macro_to_tt(ra_fixture: &str) -> tt::Subtree { + let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap(); + let macro_definition = + source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); + + let (definition_tt, _) = ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap(); + + let parsed = parse_to_token_tree( + &ra_fixture[macro_definition.token_tree().unwrap().syntax().text_range()], + ) + .unwrap() + .0; + assert_eq!(definition_tt, parsed); + + definition_tt +} + +pub(crate) fn parse_macro(ra_fixture: &str) -> MacroFixture { + let definition_tt = parse_macro_to_tt(ra_fixture); + let rules = MacroRules::parse(&definition_tt).unwrap(); + MacroFixture { rules } +} + +pub(crate) fn parse_macro_error(ra_fixture: &str) -> ParseError { + let definition_tt = parse_macro_to_tt(ra_fixture); + + match MacroRules::parse(&definition_tt) { + Ok(_) => panic!("Expect error"), + Err(err) => err, + } +} + +pub(crate) fn parse_to_token_tree_by_syntax(ra_fixture: &str) -> tt::Subtree { + let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap(); + let tt = syntax_node_to_token_tree(source_file.syntax()).unwrap().0; + + let parsed = parse_to_token_tree(ra_fixture).unwrap().0; + assert_eq!(tt, parsed); + + parsed +} + +fn debug_dump_ignore_spaces(node: &syntax::SyntaxNode) -> String { + let mut level = 0; + let mut buf = String::new(); + macro_rules! indent { + () => { + for _ in 0..level { + buf.push_str(" "); + } + }; + } + + for event in node.preorder_with_tokens() { + match event { + WalkEvent::Enter(element) => { + match element { + NodeOrToken::Node(node) => { + indent!(); + writeln!(buf, "{:?}", node.kind()).unwrap(); + } + NodeOrToken::Token(token) => match token.kind() { + syntax::SyntaxKind::WHITESPACE => {} + _ => { + indent!(); + writeln!(buf, "{:?}", token.kind()).unwrap(); + } + }, + } + level += 1; + } + WalkEvent::Leave(_) => level -= 1, + } + } + + buf +} + +#[test] +fn test_issue_2520() { + let macro_fixture = parse_macro( + r#" + macro_rules! my_macro { + { + ( $( + $( [] $sname:ident : $stype:ty )? + $( [$expr:expr] $nname:ident : $ntype:ty )? + ),* ) + } => { + Test { + $( + $( $sname, )? + )* + } + }; + } + "#, + ); + + macro_fixture.assert_expand_items( + r#"my_macro ! { + ([] p1 : u32 , [|_| S0K0] s : S0K0 , [] k0 : i32) + }"#, + "Test {p1 , k0 ,}", + ); +} + +#[test] +fn test_issue_3861() { + let macro_fixture = parse_macro( + r#" + macro_rules! rgb_color { + ($p:expr, $t: ty) => { + pub fn new() { + let _ = 0 as $t << $p; + } + }; + } + "#, + ); + + macro_fixture.expand_items(r#"rgb_color!(8 + 8, u32);"#); +} + +#[test] +fn test_repeat_bad_var() { + // FIXME: the second rule of the macro should be removed and an error about + // `$( $c )+` raised + parse_macro( + r#" + macro_rules! foo { + ($( $b:ident )+) => { + $( $c )+ + }; + ($( $b:ident )+) => { + $( $b )+ + } + } + "#, + ) + .assert_expand_items("foo!(b0 b1);", "b0 b1"); +} + +#[test] +fn test_no_space_after_semi_colon() { + let expanded = parse_macro( + r#" + macro_rules! with_std { ($($i:item)*) => ($(#[cfg(feature = "std")]$i)*) } + "#, + ) + .expand_items(r#"with_std! {mod m;mod f;}"#); + + let dump = format!("{:#?}", expanded); + assert_eq_text!( + dump.trim(), + r###"MACRO_ITEMS@0..52 + MODULE@0..26 + ATTR@0..21 + POUND@0..1 "#" + L_BRACK@1..2 "[" + PATH@2..5 + PATH_SEGMENT@2..5 + NAME_REF@2..5 + IDENT@2..5 "cfg" + TOKEN_TREE@5..20 + L_PAREN@5..6 "(" + IDENT@6..13 "feature" + EQ@13..14 "=" + STRING@14..19 "\"std\"" + R_PAREN@19..20 ")" + R_BRACK@20..21 "]" + MOD_KW@21..24 "mod" + NAME@24..25 + IDENT@24..25 "m" + SEMICOLON@25..26 ";" + MODULE@26..52 + ATTR@26..47 + POUND@26..27 "#" + L_BRACK@27..28 "[" + PATH@28..31 + PATH_SEGMENT@28..31 + NAME_REF@28..31 + IDENT@28..31 "cfg" + TOKEN_TREE@31..46 + L_PAREN@31..32 "(" + IDENT@32..39 "feature" + EQ@39..40 "=" + STRING@40..45 "\"std\"" + R_PAREN@45..46 ")" + R_BRACK@46..47 "]" + MOD_KW@47..50 "mod" + NAME@50..51 + IDENT@50..51 "f" + SEMICOLON@51..52 ";""###, + ); +} + +// https://github.com/rust-lang/rust/blob/master/src/test/ui/issues/issue-57597.rs +#[test] +fn test_rustc_issue_57597() { + fn test_error(fixture: &str) { + assert_eq!(parse_macro_error(fixture), ParseError::RepetitionEmtpyTokenTree); + } + + test_error("macro_rules! foo { ($($($i:ident)?)+) => {}; }"); + test_error("macro_rules! foo { ($($($i:ident)?)*) => {}; }"); + test_error("macro_rules! foo { ($($($i:ident)?)?) => {}; }"); + test_error("macro_rules! foo { ($($($($i:ident)?)?)?) => {}; }"); + test_error("macro_rules! foo { ($($($($i:ident)*)?)?) => {}; }"); + test_error("macro_rules! foo { ($($($($i:ident)?)*)?) => {}; }"); + test_error("macro_rules! foo { ($($($($i:ident)?)?)*) => {}; }"); + test_error("macro_rules! foo { ($($($($i:ident)*)*)?) => {}; }"); + test_error("macro_rules! foo { ($($($($i:ident)?)*)*) => {}; }"); + test_error("macro_rules! foo { ($($($($i:ident)?)*)+) => {}; }"); + test_error("macro_rules! foo { ($($($($i:ident)+)?)*) => {}; }"); + test_error("macro_rules! foo { ($($($($i:ident)+)*)?) => {}; }"); +} + +#[test] +fn test_expand_bad_literal() { + parse_macro( + r#" + macro_rules! foo { ($i:literal) => {}; } + "#, + ) + .assert_expand_err(r#"foo!(&k");"#, &ExpandError::BindingError("".into())); +} + +#[test] +fn test_empty_comments() { + parse_macro( + r#" + macro_rules! one_arg_macro { ($fmt:expr) => (); } + "#, + ) + .assert_expand_err( + r#"one_arg_macro!(/**/)"#, + &ExpandError::BindingError("expected Expr".into()), + ); +} diff --git a/crates/ra_mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs similarity index 100% rename from crates/ra_mbe/src/tt_iter.rs rename to crates/mbe/src/tt_iter.rs diff --git a/crates/parser/Cargo.toml b/crates/parser/Cargo.toml new file mode 100644 index 0000000000..358be92d12 --- /dev/null +++ b/crates/parser/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "parser" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +drop_bomb = "0.1.4" diff --git a/crates/ra_parser/src/event.rs b/crates/parser/src/event.rs similarity index 100% rename from crates/ra_parser/src/event.rs rename to crates/parser/src/event.rs diff --git a/crates/parser/src/grammar.rs b/crates/parser/src/grammar.rs new file mode 100644 index 0000000000..562e92252b --- /dev/null +++ b/crates/parser/src/grammar.rs @@ -0,0 +1,293 @@ +//! This is the actual "grammar" of the Rust language. +//! +//! Each function in this module and its children corresponds +//! to a production of the formal grammar. Submodules roughly +//! correspond to different *areas* of the grammar. By convention, +//! each submodule starts with `use super::*` import and exports +//! "public" productions via `pub(super)`. +//! +//! See docs for `Parser` to learn about API, available to the grammar, +//! and see docs for `Event` to learn how this actually manages to +//! produce parse trees. +//! +//! Code in this module also contains inline tests, which start with +//! `// test name-of-the-test` comment and look like this: +//! +//! ``` +//! // test function_with_zero_parameters +//! // fn foo() {} +//! ``` +//! +//! After adding a new inline-test, run `cargo xtask codegen` to +//! extract it as a standalone text-fixture into +//! `crates/syntax/test_data/parser/`, and run `cargo test` once to +//! create the "gold" value. +//! +//! Coding convention: rules like `where_clause` always produce either a +//! node or an error, rules like `opt_where_clause` may produce nothing. +//! Non-opt rules typically start with `assert!(p.at(FIRST_TOKEN))`, the +//! caller is responsible for branching on the first token. +mod attributes; +mod expressions; +mod items; +mod params; +mod paths; +mod patterns; +mod type_args; +mod type_params; +mod types; + +use crate::{ + parser::{CompletedMarker, Marker, Parser}, + SyntaxKind::{self, *}, + TokenSet, +}; + +pub(crate) fn root(p: &mut Parser) { + let m = p.start(); + p.eat(SHEBANG); + items::mod_contents(p, false); + m.complete(p, SOURCE_FILE); +} + +/// Various pieces of syntax that can be parsed by macros by example +pub(crate) mod fragments { + use super::*; + + pub(crate) use super::{ + expressions::block_expr, paths::type_path as path, patterns::pattern, types::type_, + }; + + pub(crate) fn expr(p: &mut Parser) { + let _ = expressions::expr(p); + } + + pub(crate) fn stmt(p: &mut Parser) { + expressions::stmt(p, expressions::StmtWithSemi::No) + } + + pub(crate) fn opt_visibility(p: &mut Parser) { + let _ = super::opt_visibility(p); + } + + // Parse a meta item , which excluded [], e.g : #[ MetaItem ] + pub(crate) fn meta_item(p: &mut Parser) { + fn is_delimiter(p: &mut Parser) -> bool { + matches!(p.current(), T!['{'] | T!['('] | T!['[']) + } + + if is_delimiter(p) { + items::token_tree(p); + return; + } + + let m = p.start(); + while !p.at(EOF) { + if is_delimiter(p) { + items::token_tree(p); + break; + } else { + // https://doc.rust-lang.org/reference/attributes.html + // https://doc.rust-lang.org/reference/paths.html#simple-paths + // The start of an meta must be a simple path + match p.current() { + IDENT | T![::] | T![super] | T![self] | T![crate] => p.bump_any(), + T![=] => { + p.bump_any(); + match p.current() { + c if c.is_literal() => p.bump_any(), + T![true] | T![false] => p.bump_any(), + _ => {} + } + break; + } + _ => break, + } + } + } + + m.complete(p, TOKEN_TREE); + } + + pub(crate) fn item(p: &mut Parser) { + items::item_or_macro(p, true) + } + + pub(crate) fn macro_items(p: &mut Parser) { + let m = p.start(); + items::mod_contents(p, false); + m.complete(p, MACRO_ITEMS); + } + + pub(crate) fn macro_stmts(p: &mut Parser) { + let m = p.start(); + + while !p.at(EOF) { + if p.at(T![;]) { + p.bump(T![;]); + continue; + } + + expressions::stmt(p, expressions::StmtWithSemi::Optional); + } + + m.complete(p, MACRO_STMTS); + } +} + +pub(crate) fn reparser( + node: SyntaxKind, + first_child: Option, + parent: Option, +) -> Option { + let res = match node { + BLOCK_EXPR => expressions::block_expr, + RECORD_FIELD_LIST => items::record_field_list, + RECORD_EXPR_FIELD_LIST => items::record_expr_field_list, + VARIANT_LIST => items::variant_list, + MATCH_ARM_LIST => items::match_arm_list, + USE_TREE_LIST => items::use_tree_list, + EXTERN_ITEM_LIST => items::extern_item_list, + TOKEN_TREE if first_child? == T!['{'] => items::token_tree, + ASSOC_ITEM_LIST => match parent? { + IMPL => items::assoc_item_list, + TRAIT => items::assoc_item_list, + _ => return None, + }, + ITEM_LIST => items::item_list, + _ => return None, + }; + Some(res) +} + +#[derive(Clone, Copy, PartialEq, Eq)] +enum BlockLike { + Block, + NotBlock, +} + +impl BlockLike { + fn is_block(self) -> bool { + self == BlockLike::Block + } +} + +fn opt_visibility(p: &mut Parser) -> bool { + match p.current() { + T![pub] => { + let m = p.start(); + p.bump(T![pub]); + if p.at(T!['(']) { + match p.nth(1) { + // test crate_visibility + // pub(crate) struct S; + // pub(self) struct S; + // pub(self) struct S; + // pub(self) struct S; + T![crate] | T![self] | T![super] => { + p.bump_any(); + p.bump_any(); + p.expect(T![')']); + } + T![in] => { + p.bump_any(); + p.bump_any(); + paths::use_path(p); + p.expect(T![')']); + } + _ => (), + } + } + m.complete(p, VISIBILITY); + } + // test crate_keyword_vis + // crate fn main() { } + // struct S { crate field: u32 } + // struct T(crate u32); + // + // test crate_keyword_path + // fn foo() { crate::foo(); } + T![crate] if !p.nth_at(1, T![::]) => { + let m = p.start(); + p.bump(T![crate]); + m.complete(p, VISIBILITY); + } + _ => return false, + } + true +} + +fn opt_rename(p: &mut Parser) { + if p.at(T![as]) { + let m = p.start(); + p.bump(T![as]); + if !p.eat(T![_]) { + name(p); + } + m.complete(p, RENAME); + } +} + +fn abi(p: &mut Parser) { + assert!(p.at(T![extern])); + let abi = p.start(); + p.bump(T![extern]); + match p.current() { + STRING | RAW_STRING => p.bump_any(), + _ => (), + } + abi.complete(p, ABI); +} + +fn opt_ret_type(p: &mut Parser) -> bool { + if p.at(T![->]) { + let m = p.start(); + p.bump(T![->]); + types::type_no_bounds(p); + m.complete(p, RET_TYPE); + true + } else { + false + } +} + +fn name_r(p: &mut Parser, recovery: TokenSet) { + if p.at(IDENT) { + let m = p.start(); + p.bump(IDENT); + m.complete(p, NAME); + } else { + p.err_recover("expected a name", recovery); + } +} + +fn name(p: &mut Parser) { + name_r(p, TokenSet::EMPTY) +} + +fn name_ref(p: &mut Parser) { + if p.at(IDENT) { + let m = p.start(); + p.bump(IDENT); + m.complete(p, NAME_REF); + } else { + p.err_and_bump("expected identifier"); + } +} + +fn name_ref_or_index(p: &mut Parser) { + assert!(p.at(IDENT) || p.at(INT_NUMBER)); + let m = p.start(); + p.bump_any(); + m.complete(p, NAME_REF); +} + +fn error_block(p: &mut Parser, message: &str) { + assert!(p.at(T!['{'])); + let m = p.start(); + p.error(message); + p.bump(T!['{']); + expressions::expr_block_contents(p); + p.eat(T!['}']); + m.complete(p, ERROR); +} diff --git a/crates/parser/src/grammar/attributes.rs b/crates/parser/src/grammar/attributes.rs new file mode 100644 index 0000000000..dab0f62c3c --- /dev/null +++ b/crates/parser/src/grammar/attributes.rs @@ -0,0 +1,48 @@ +//! FIXME: write short doc here + +use super::*; + +pub(super) fn inner_attrs(p: &mut Parser) { + while p.at(T![#]) && p.nth(1) == T![!] { + attr(p, true) + } +} + +pub(super) fn outer_attrs(p: &mut Parser) { + while p.at(T![#]) { + attr(p, false) + } +} + +fn attr(p: &mut Parser, inner: bool) { + let attr = p.start(); + assert!(p.at(T![#])); + p.bump(T![#]); + + if inner { + assert!(p.at(T![!])); + p.bump(T![!]); + } + + if p.eat(T!['[']) { + paths::use_path(p); + + match p.current() { + T![=] => { + p.bump(T![=]); + if expressions::literal(p).is_none() { + p.error("expected literal"); + } + } + T!['('] | T!['['] | T!['{'] => items::token_tree(p), + _ => {} + } + + if !p.eat(T![']']) { + p.error("expected `]`"); + } + } else { + p.error("expected `[`"); + } + attr.complete(p, ATTR); +} diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs new file mode 100644 index 0000000000..e72929f8cc --- /dev/null +++ b/crates/parser/src/grammar/expressions.rs @@ -0,0 +1,651 @@ +//! FIXME: write short doc here + +mod atom; + +pub(crate) use self::atom::{block_expr, match_arm_list}; +pub(super) use self::atom::{literal, LITERAL_FIRST}; +use super::*; + +pub(super) enum StmtWithSemi { + Yes, + No, + Optional, +} + +const EXPR_FIRST: TokenSet = LHS_FIRST; + +pub(super) fn expr(p: &mut Parser) -> (Option, BlockLike) { + let r = Restrictions { forbid_structs: false, prefer_stmt: false }; + expr_bp(p, r, 1) +} + +pub(super) fn expr_with_attrs(p: &mut Parser) -> bool { + let m = p.start(); + let has_attrs = p.at(T![#]); + attributes::outer_attrs(p); + + let (cm, _block_like) = expr(p); + let success = cm.is_some(); + + match (has_attrs, cm) { + (true, Some(cm)) => { + let kind = cm.kind(); + cm.undo_completion(p).abandon(p); + m.complete(p, kind); + } + _ => m.abandon(p), + } + + success +} + +pub(super) fn expr_stmt(p: &mut Parser) -> (Option, BlockLike) { + let r = Restrictions { forbid_structs: false, prefer_stmt: true }; + expr_bp(p, r, 1) +} + +fn expr_no_struct(p: &mut Parser) { + let r = Restrictions { forbid_structs: true, prefer_stmt: false }; + expr_bp(p, r, 1); +} + +fn is_expr_stmt_attr_allowed(kind: SyntaxKind) -> bool { + let forbid = matches!(kind, BIN_EXPR | RANGE_EXPR); + !forbid +} + +pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) { + let m = p.start(); + // test attr_on_expr_stmt + // fn foo() { + // #[A] foo(); + // #[B] bar!{} + // #[C] #[D] {} + // #[D] return (); + // } + let has_attrs = p.at(T![#]); + attributes::outer_attrs(p); + + if p.at(T![let]) { + let_stmt(p, m, with_semi); + return; + } + + // test block_items + // fn a() { fn b() {} } + let m = match items::maybe_item(p, m) { + Ok(()) => return, + Err(m) => m, + }; + + let (cm, blocklike) = expr_stmt(p); + let kind = cm.as_ref().map(|cm| cm.kind()).unwrap_or(ERROR); + + if has_attrs && !is_expr_stmt_attr_allowed(kind) { + // test_err attr_on_expr_not_allowed + // fn foo() { + // #[A] 1 + 2; + // #[B] if true {}; + // } + p.error(format!("attributes are not allowed on {:?}", kind)); + } + + if p.at(T!['}']) { + // test attr_on_last_expr_in_block + // fn foo() { + // { #[A] bar!()? } + // #[B] &() + // } + if let Some(cm) = cm { + cm.undo_completion(p).abandon(p); + m.complete(p, kind); + } else { + m.abandon(p); + } + } else { + // test no_semi_after_block + // fn foo() { + // if true {} + // loop {} + // match () {} + // while true {} + // for _ in () {} + // {} + // {} + // macro_rules! test { + // () => {} + // } + // test!{} + // } + + match with_semi { + StmtWithSemi::Yes => { + if blocklike.is_block() { + p.eat(T![;]); + } else { + p.expect(T![;]); + } + } + StmtWithSemi::No => {} + StmtWithSemi::Optional => { + if p.at(T![;]) { + p.eat(T![;]); + } + } + } + + m.complete(p, EXPR_STMT); + } + + // test let_stmt + // fn foo() { + // let a; + // let b: i32; + // let c = 92; + // let d: i32 = 92; + // let e: !; + // let _: ! = {}; + // let f = #[attr]||{}; + // } + fn let_stmt(p: &mut Parser, m: Marker, with_semi: StmtWithSemi) { + assert!(p.at(T![let])); + p.bump(T![let]); + patterns::pattern(p); + if p.at(T![:]) { + types::ascription(p); + } + if p.eat(T![=]) { + expressions::expr_with_attrs(p); + } + + match with_semi { + StmtWithSemi::Yes => { + p.expect(T![;]); + } + StmtWithSemi::No => {} + StmtWithSemi::Optional => { + if p.at(T![;]) { + p.eat(T![;]); + } + } + } + m.complete(p, LET_STMT); + } +} + +pub(super) fn expr_block_contents(p: &mut Parser) { + // This is checked by a validator + attributes::inner_attrs(p); + + while !p.at(EOF) && !p.at(T!['}']) { + // test nocontentexpr + // fn foo(){ + // ;;;some_expr();;;;{;;;};;;;Ok(()) + // } + + // test nocontentexpr_after_item + // fn simple_function() { + // enum LocalEnum { + // One, + // Two, + // }; + // fn f() {}; + // struct S {}; + // } + + if p.at(T![;]) { + p.bump(T![;]); + continue; + } + + stmt(p, StmtWithSemi::Yes) + } +} + +#[derive(Clone, Copy)] +struct Restrictions { + forbid_structs: bool, + prefer_stmt: bool, +} + +/// Binding powers of operators for a Pratt parser. +/// +/// See https://www.oilshell.org/blog/2016/11/03.html +#[rustfmt::skip] +fn current_op(p: &Parser) -> (u8, SyntaxKind) { + const NOT_AN_OP: (u8, SyntaxKind) = (0, T![@]); + match p.current() { + T![|] if p.at(T![||]) => (3, T![||]), + T![|] if p.at(T![|=]) => (1, T![|=]), + T![|] => (6, T![|]), + T![>] if p.at(T![>>=]) => (1, T![>>=]), + T![>] if p.at(T![>>]) => (9, T![>>]), + T![>] if p.at(T![>=]) => (5, T![>=]), + T![>] => (5, T![>]), + T![=] if p.at(T![=>]) => NOT_AN_OP, + T![=] if p.at(T![==]) => (5, T![==]), + T![=] => (1, T![=]), + T![<] if p.at(T![<=]) => (5, T![<=]), + T![<] if p.at(T![<<=]) => (1, T![<<=]), + T![<] if p.at(T![<<]) => (9, T![<<]), + T![<] => (5, T![<]), + T![+] if p.at(T![+=]) => (1, T![+=]), + T![+] => (10, T![+]), + T![^] if p.at(T![^=]) => (1, T![^=]), + T![^] => (7, T![^]), + T![%] if p.at(T![%=]) => (1, T![%=]), + T![%] => (11, T![%]), + T![&] if p.at(T![&=]) => (1, T![&=]), + T![&] if p.at(T![&&]) => (4, T![&&]), + T![&] => (8, T![&]), + T![/] if p.at(T![/=]) => (1, T![/=]), + T![/] => (11, T![/]), + T![*] if p.at(T![*=]) => (1, T![*=]), + T![*] => (11, T![*]), + T![.] if p.at(T![..=]) => (2, T![..=]), + T![.] if p.at(T![..]) => (2, T![..]), + T![!] if p.at(T![!=]) => (5, T![!=]), + T![-] if p.at(T![-=]) => (1, T![-=]), + T![-] => (10, T![-]), + T![as] => (12, T![as]), + + _ => NOT_AN_OP + } +} + +// Parses expression with binding power of at least bp. +fn expr_bp(p: &mut Parser, mut r: Restrictions, bp: u8) -> (Option, BlockLike) { + let mut lhs = match lhs(p, r) { + Some((lhs, blocklike)) => { + // test stmt_bin_expr_ambiguity + // fn foo() { + // let _ = {1} & 2; + // {1} &2; + // } + if r.prefer_stmt && blocklike.is_block() { + return (Some(lhs), BlockLike::Block); + } + lhs + } + None => return (None, BlockLike::NotBlock), + }; + + loop { + let is_range = p.at(T![..]) || p.at(T![..=]); + let (op_bp, op) = current_op(p); + if op_bp < bp { + break; + } + // test as_precedence + // fn foo() { + // let _ = &1 as *const i32; + // } + if p.at(T![as]) { + lhs = cast_expr(p, lhs); + continue; + } + let m = lhs.precede(p); + p.bump(op); + + // test binop_resets_statementness + // fn foo() { + // v = {1}&2; + // } + r = Restrictions { prefer_stmt: false, ..r }; + + if is_range { + // test postfix_range + // fn foo() { + // let x = 1..; + // match 1.. { _ => () }; + // match a.b()..S { _ => () }; + // } + let has_trailing_expression = + p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{'])); + if !has_trailing_expression { + // no RHS + lhs = m.complete(p, RANGE_EXPR); + break; + } + } + + expr_bp(p, Restrictions { prefer_stmt: false, ..r }, op_bp + 1); + lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR }); + } + (Some(lhs), BlockLike::NotBlock) +} + +const LHS_FIRST: TokenSet = + atom::ATOM_EXPR_FIRST.union(token_set![T![&], T![*], T![!], T![.], T![-]]); + +fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { + let m; + let kind = match p.current() { + // test ref_expr + // fn foo() { + // // reference operator + // let _ = &1; + // let _ = &mut &f(); + // let _ = &raw; + // let _ = &raw.0; + // // raw reference operator + // let _ = &raw mut foo; + // let _ = &raw const foo; + // } + T![&] => { + m = p.start(); + p.bump(T![&]); + if p.at(IDENT) + && p.at_contextual_kw("raw") + && (p.nth_at(1, T![mut]) || p.nth_at(1, T![const])) + { + p.bump_remap(T![raw]); + p.bump_any(); + } else { + p.eat(T![mut]); + } + REF_EXPR + } + // test unary_expr + // fn foo() { + // **&1; + // !!true; + // --1; + // } + T![*] | T![!] | T![-] => { + m = p.start(); + p.bump_any(); + PREFIX_EXPR + } + _ => { + // test full_range_expr + // fn foo() { xs[..]; } + for &op in [T![..=], T![..]].iter() { + if p.at(op) { + m = p.start(); + p.bump(op); + if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{'])) { + expr_bp(p, r, 2); + } + return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock)); + } + } + + // test expression_after_block + // fn foo() { + // let mut p = F{x: 5}; + // {p}.x = 10; + // } + // + let (lhs, blocklike) = atom::atom_expr(p, r)?; + return Some(postfix_expr(p, lhs, blocklike, !(r.prefer_stmt && blocklike.is_block()))); + } + }; + // parse the interior of the unary expression + expr_bp(p, r, 255); + Some((m.complete(p, kind), BlockLike::NotBlock)) +} + +fn postfix_expr( + p: &mut Parser, + mut lhs: CompletedMarker, + // Calls are disallowed if the type is a block and we prefer statements because the call cannot be disambiguated from a tuple + // E.g. `while true {break}();` is parsed as + // `while true {break}; ();` + mut block_like: BlockLike, + mut allow_calls: bool, +) -> (CompletedMarker, BlockLike) { + loop { + lhs = match p.current() { + // test stmt_postfix_expr_ambiguity + // fn foo() { + // match () { + // _ => {} + // () => {} + // [] => {} + // } + // } + T!['('] if allow_calls => call_expr(p, lhs), + T!['['] if allow_calls => index_expr(p, lhs), + T![.] => match postfix_dot_expr(p, lhs) { + Ok(it) => it, + Err(it) => { + lhs = it; + break; + } + }, + T![?] => try_expr(p, lhs), + _ => break, + }; + allow_calls = true; + block_like = BlockLike::NotBlock; + } + return (lhs, block_like); + + fn postfix_dot_expr( + p: &mut Parser, + lhs: CompletedMarker, + ) -> Result { + assert!(p.at(T![.])); + if p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::])) { + return Ok(method_call_expr(p, lhs)); + } + + // test await_expr + // fn foo() { + // x.await; + // x.0.await; + // x.0().await?.hello(); + // } + if p.nth(1) == T![await] { + let m = lhs.precede(p); + p.bump(T![.]); + p.bump(T![await]); + return Ok(m.complete(p, AWAIT_EXPR)); + } + + if p.at(T![..=]) || p.at(T![..]) { + return Err(lhs); + } + + Ok(field_expr(p, lhs)) + } +} + +// test call_expr +// fn foo() { +// let _ = f(); +// let _ = f()(1)(1, 2,); +// let _ = f(::func()); +// f(::func()); +// } +fn call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { + assert!(p.at(T!['('])); + let m = lhs.precede(p); + arg_list(p); + m.complete(p, CALL_EXPR) +} + +// test index_expr +// fn foo() { +// x[1][2]; +// } +fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { + assert!(p.at(T!['['])); + let m = lhs.precede(p); + p.bump(T!['[']); + expr(p); + p.expect(T![']']); + m.complete(p, INDEX_EXPR) +} + +// test method_call_expr +// fn foo() { +// x.foo(); +// y.bar::(1, 2,); +// } +fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { + assert!(p.at(T![.]) && p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::]))); + let m = lhs.precede(p); + p.bump_any(); + name_ref(p); + type_args::opt_generic_arg_list(p, true); + if p.at(T!['(']) { + arg_list(p); + } + m.complete(p, METHOD_CALL_EXPR) +} + +// test field_expr +// fn foo() { +// x.foo; +// x.0.bar; +// x.0(); +// } + +// test_err bad_tuple_index_expr +// fn foo() { +// x.0.; +// x.1i32; +// x.0x01; +// } +fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { + assert!(p.at(T![.])); + let m = lhs.precede(p); + p.bump(T![.]); + if p.at(IDENT) || p.at(INT_NUMBER) { + name_ref_or_index(p) + } else if p.at(FLOAT_NUMBER) { + // FIXME: How to recover and instead parse INT + T![.]? + p.bump_any(); + } else { + p.error("expected field name or number") + } + m.complete(p, FIELD_EXPR) +} + +// test try_expr +// fn foo() { +// x?; +// } +fn try_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { + assert!(p.at(T![?])); + let m = lhs.precede(p); + p.bump(T![?]); + m.complete(p, TRY_EXPR) +} + +// test cast_expr +// fn foo() { +// 82 as i32; +// 81 as i8 + 1; +// 79 as i16 - 1; +// 0x36 as u8 <= 0x37; +// } +fn cast_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { + assert!(p.at(T![as])); + let m = lhs.precede(p); + p.bump(T![as]); + // Use type_no_bounds(), because cast expressions are not + // allowed to have bounds. + types::type_no_bounds(p); + m.complete(p, CAST_EXPR) +} + +fn arg_list(p: &mut Parser) { + assert!(p.at(T!['('])); + let m = p.start(); + p.bump(T!['(']); + while !p.at(T![')']) && !p.at(EOF) { + // test arg_with_attr + // fn main() { + // foo(#[attr] 92) + // } + if !expr_with_attrs(p) { + break; + } + if !p.at(T![')']) && !p.expect(T![,]) { + break; + } + } + p.eat(T![')']); + m.complete(p, ARG_LIST); +} + +// test path_expr +// fn foo() { +// let _ = a; +// let _ = a::b; +// let _ = ::a::; +// let _ = format!(); +// } +fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) { + assert!(paths::is_path_start(p)); + let m = p.start(); + paths::expr_path(p); + match p.current() { + T!['{'] if !r.forbid_structs => { + record_expr_field_list(p); + (m.complete(p, RECORD_EXPR), BlockLike::NotBlock) + } + T![!] if !p.at(T![!=]) => { + let block_like = items::macro_call_after_excl(p); + (m.complete(p, MACRO_CALL), block_like) + } + _ => (m.complete(p, PATH_EXPR), BlockLike::NotBlock), + } +} + +// test record_lit +// fn foo() { +// S {}; +// S { x, y: 32, }; +// S { x, y: 32, ..Default::default() }; +// TupleStruct { 0: 1 }; +// } +pub(crate) fn record_expr_field_list(p: &mut Parser) { + assert!(p.at(T!['{'])); + let m = p.start(); + p.bump(T!['{']); + while !p.at(EOF) && !p.at(T!['}']) { + let m = p.start(); + // test record_literal_field_with_attr + // fn main() { + // S { #[cfg(test)] field: 1 } + // } + attributes::outer_attrs(p); + + match p.current() { + IDENT | INT_NUMBER => { + // test_err record_literal_before_ellipsis_recovery + // fn main() { + // S { field ..S::default() } + // } + if p.nth_at(1, T![:]) || p.nth_at(1, T![..]) { + name_ref_or_index(p); + p.expect(T![:]); + } + expr(p); + m.complete(p, RECORD_EXPR_FIELD); + } + T![.] if p.at(T![..]) => { + m.abandon(p); + p.bump(T![..]); + expr(p); + } + T!['{'] => { + error_block(p, "expected a field"); + m.abandon(p); + } + _ => { + p.err_and_bump("expected identifier"); + m.abandon(p); + } + } + if !p.at(T!['}']) { + p.expect(T![,]); + } + } + p.expect(T!['}']); + m.complete(p, RECORD_EXPR_FIELD_LIST); +} diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs new file mode 100644 index 0000000000..ba6dd2fbcc --- /dev/null +++ b/crates/parser/src/grammar/expressions/atom.rs @@ -0,0 +1,611 @@ +//! FIXME: write short doc here + +use super::*; + +// test expr_literals +// fn foo() { +// let _ = true; +// let _ = false; +// let _ = 1; +// let _ = 2.0; +// let _ = b'a'; +// let _ = 'b'; +// let _ = "c"; +// let _ = r"d"; +// let _ = b"e"; +// let _ = br"f"; +// } +pub(crate) const LITERAL_FIRST: TokenSet = token_set![ + TRUE_KW, + FALSE_KW, + INT_NUMBER, + FLOAT_NUMBER, + BYTE, + CHAR, + STRING, + RAW_STRING, + BYTE_STRING, + RAW_BYTE_STRING +]; + +pub(crate) fn literal(p: &mut Parser) -> Option { + if !p.at_ts(LITERAL_FIRST) { + return None; + } + let m = p.start(); + p.bump_any(); + Some(m.complete(p, LITERAL)) +} + +// E.g. for after the break in `if break {}`, this should not match +pub(super) const ATOM_EXPR_FIRST: TokenSet = + LITERAL_FIRST.union(paths::PATH_FIRST).union(token_set![ + T!['('], + T!['{'], + T!['['], + L_DOLLAR, + T![|], + T![move], + T![box], + T![if], + T![while], + T![match], + T![unsafe], + T![return], + T![break], + T![continue], + T![async], + T![try], + T![loop], + T![for], + LIFETIME, + ]); + +const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW, R_DOLLAR]; + +pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { + if let Some(m) = literal(p) { + return Some((m, BlockLike::NotBlock)); + } + if paths::is_path_start(p) { + return Some(path_expr(p, r)); + } + let la = p.nth(1); + let done = match p.current() { + T!['('] => tuple_expr(p), + T!['['] => array_expr(p), + L_DOLLAR => meta_var_expr(p), + T![|] => closure_expr(p), + T![move] if la == T![|] => closure_expr(p), + T![async] if la == T![|] || (la == T![move] && p.nth(2) == T![|]) => closure_expr(p), + T![if] => if_expr(p), + + T![loop] => loop_expr(p, None), + T![box] => box_expr(p, None), + T![for] => for_expr(p, None), + T![while] => while_expr(p, None), + T![try] => try_block_expr(p, None), + LIFETIME if la == T![:] => { + let m = p.start(); + label(p); + match p.current() { + T![loop] => loop_expr(p, Some(m)), + T![for] => for_expr(p, Some(m)), + T![while] => while_expr(p, Some(m)), + // test labeled_block + // fn f() { 'label: {}; } + T!['{'] => { + block_expr(p); + m.complete(p, EFFECT_EXPR) + } + _ => { + // test_err misplaced_label_err + // fn main() { + // 'loop: impl + // } + p.error("expected a loop"); + m.complete(p, ERROR); + return None; + } + } + } + T![async] if la == T!['{'] || (la == T![move] && p.nth(2) == T!['{']) => { + let m = p.start(); + p.bump(T![async]); + p.eat(T![move]); + block_expr(p); + m.complete(p, EFFECT_EXPR) + } + T![match] => match_expr(p), + // test unsafe_block + // fn f() { unsafe { } } + T![unsafe] if la == T!['{'] => { + let m = p.start(); + p.bump(T![unsafe]); + block_expr(p); + m.complete(p, EFFECT_EXPR) + } + T!['{'] => { + // test for_range_from + // fn foo() { + // for x in 0 .. { + // break; + // } + // } + block_expr_unchecked(p) + } + T![return] => return_expr(p), + T![continue] => continue_expr(p), + T![break] => break_expr(p, r), + _ => { + p.err_recover("expected expression", EXPR_RECOVERY_SET); + return None; + } + }; + let blocklike = match done.kind() { + IF_EXPR | WHILE_EXPR | FOR_EXPR | LOOP_EXPR | MATCH_EXPR | BLOCK_EXPR | EFFECT_EXPR => { + BlockLike::Block + } + _ => BlockLike::NotBlock, + }; + Some((done, blocklike)) +} + +// test tuple_expr +// fn foo() { +// (); +// (1); +// (1,); +// } +fn tuple_expr(p: &mut Parser) -> CompletedMarker { + assert!(p.at(T!['('])); + let m = p.start(); + p.expect(T!['(']); + + let mut saw_comma = false; + let mut saw_expr = false; + while !p.at(EOF) && !p.at(T![')']) { + saw_expr = true; + if !p.at_ts(EXPR_FIRST) { + p.error("expected expression"); + break; + } + expr(p); + if !p.at(T![')']) { + saw_comma = true; + p.expect(T![,]); + } + } + p.expect(T![')']); + m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR }) +} + +// test array_expr +// fn foo() { +// []; +// [1]; +// [1, 2,]; +// [1; 2]; +// } +fn array_expr(p: &mut Parser) -> CompletedMarker { + assert!(p.at(T!['['])); + let m = p.start(); + + let mut n_exprs = 0u32; + let mut has_semi = false; + + p.bump(T!['[']); + while !p.at(EOF) && !p.at(T![']']) { + n_exprs += 1; + + // test array_attrs + // const A: &[i64] = &[1, #[cfg(test)] 2]; + if !expr_with_attrs(p) { + break; + } + + if n_exprs == 1 && p.eat(T![;]) { + has_semi = true; + continue; + } + + if has_semi || !p.at(T![']']) && !p.expect(T![,]) { + break; + } + } + p.expect(T![']']); + + m.complete(p, ARRAY_EXPR) +} + +// test lambda_expr +// fn foo() { +// || (); +// || -> i32 { 92 }; +// |x| x; +// move |x: i32,| x; +// async || {}; +// move || {}; +// async move || {}; +// } +fn closure_expr(p: &mut Parser) -> CompletedMarker { + assert!( + p.at(T![|]) + || (p.at(T![move]) && p.nth(1) == T![|]) + || (p.at(T![async]) && p.nth(1) == T![|]) + || (p.at(T![async]) && p.nth(1) == T![move] && p.nth(2) == T![|]) + ); + let m = p.start(); + p.eat(T![async]); + p.eat(T![move]); + params::param_list_closure(p); + if opt_ret_type(p) { + // test lambda_ret_block + // fn main() { || -> i32 { 92 }(); } + block_expr(p); + } else { + if p.at_ts(EXPR_FIRST) { + expr(p); + } else { + p.error("expected expression"); + } + } + m.complete(p, CLOSURE_EXPR) +} + +// test if_expr +// fn foo() { +// if true {}; +// if true {} else {}; +// if true {} else if false {} else {}; +// if S {}; +// if { true } { } else { }; +// } +fn if_expr(p: &mut Parser) -> CompletedMarker { + assert!(p.at(T![if])); + let m = p.start(); + p.bump(T![if]); + condition(p); + block_expr(p); + if p.at(T![else]) { + p.bump(T![else]); + if p.at(T![if]) { + if_expr(p); + } else { + block_expr(p); + } + } + m.complete(p, IF_EXPR) +} + +// test label +// fn foo() { +// 'a: loop {} +// 'b: while true {} +// 'c: for x in () {} +// } +fn label(p: &mut Parser) { + assert!(p.at(LIFETIME) && p.nth(1) == T![:]); + let m = p.start(); + p.bump(LIFETIME); + p.bump_any(); + m.complete(p, LABEL); +} + +// test loop_expr +// fn foo() { +// loop {}; +// } +fn loop_expr(p: &mut Parser, m: Option) -> CompletedMarker { + assert!(p.at(T![loop])); + let m = m.unwrap_or_else(|| p.start()); + p.bump(T![loop]); + block_expr(p); + m.complete(p, LOOP_EXPR) +} + +// test while_expr +// fn foo() { +// while true {}; +// while let Some(x) = it.next() {}; +// while { true } {}; +// } +fn while_expr(p: &mut Parser, m: Option) -> CompletedMarker { + assert!(p.at(T![while])); + let m = m.unwrap_or_else(|| p.start()); + p.bump(T![while]); + condition(p); + block_expr(p); + m.complete(p, WHILE_EXPR) +} + +// test for_expr +// fn foo() { +// for x in [] {}; +// } +fn for_expr(p: &mut Parser, m: Option) -> CompletedMarker { + assert!(p.at(T![for])); + let m = m.unwrap_or_else(|| p.start()); + p.bump(T![for]); + patterns::pattern(p); + p.expect(T![in]); + expr_no_struct(p); + block_expr(p); + m.complete(p, FOR_EXPR) +} + +// test cond +// fn foo() { if let Some(_) = None {} } +// fn bar() { +// if let Some(_) | Some(_) = None {} +// if let | Some(_) = None {} +// while let Some(_) | Some(_) = None {} +// while let | Some(_) = None {} +// } +fn condition(p: &mut Parser) { + let m = p.start(); + if p.eat(T![let]) { + patterns::pattern_top(p); + p.expect(T![=]); + } + expr_no_struct(p); + m.complete(p, CONDITION); +} + +// test match_expr +// fn foo() { +// match () { }; +// match S {}; +// match { } { _ => () }; +// match { S {} } {}; +// } +fn match_expr(p: &mut Parser) -> CompletedMarker { + assert!(p.at(T![match])); + let m = p.start(); + p.bump(T![match]); + expr_no_struct(p); + if p.at(T!['{']) { + match_arm_list(p); + } else { + p.error("expected `{`") + } + m.complete(p, MATCH_EXPR) +} + +pub(crate) fn match_arm_list(p: &mut Parser) { + assert!(p.at(T!['{'])); + let m = p.start(); + p.eat(T!['{']); + + // test match_arms_inner_attribute + // fn foo() { + // match () { + // #![doc("Inner attribute")] + // #![doc("Can be")] + // #![doc("Stacked")] + // _ => (), + // } + // } + attributes::inner_attrs(p); + + while !p.at(EOF) && !p.at(T!['}']) { + if p.at(T!['{']) { + error_block(p, "expected match arm"); + continue; + } + + // test match_arms_commas + // fn foo() { + // match () { + // _ => (), + // _ => {} + // _ => () + // } + // } + if match_arm(p).is_block() { + p.eat(T![,]); + } else if !p.at(T!['}']) { + p.expect(T![,]); + } + } + p.expect(T!['}']); + m.complete(p, MATCH_ARM_LIST); +} + +// test match_arm +// fn foo() { +// match () { +// _ => (), +// _ if Test > Test{field: 0} => (), +// X | Y if Z => (), +// | X | Y if Z => (), +// | X => (), +// }; +// } +fn match_arm(p: &mut Parser) -> BlockLike { + let m = p.start(); + // test match_arms_outer_attributes + // fn foo() { + // match () { + // #[cfg(feature = "some")] + // _ => (), + // #[cfg(feature = "other")] + // _ => (), + // #[cfg(feature = "many")] + // #[cfg(feature = "attributes")] + // #[cfg(feature = "before")] + // _ => (), + // } + // } + attributes::outer_attrs(p); + + patterns::pattern_top_r(p, TokenSet::EMPTY); + if p.at(T![if]) { + match_guard(p); + } + p.expect(T![=>]); + let blocklike = expr_stmt(p).1; + m.complete(p, MATCH_ARM); + blocklike +} + +// test match_guard +// fn foo() { +// match () { +// _ if foo => (), +// } +// } +fn match_guard(p: &mut Parser) -> CompletedMarker { + assert!(p.at(T![if])); + let m = p.start(); + p.bump(T![if]); + expr(p); + m.complete(p, MATCH_GUARD) +} + +// test block +// fn a() {} +// fn b() { let _ = 1; } +// fn c() { 1; 2; } +// fn d() { 1; 2 } +pub(crate) fn block_expr(p: &mut Parser) { + if !p.at(T!['{']) { + p.error("expected a block"); + return; + } + block_expr_unchecked(p); +} + +fn block_expr_unchecked(p: &mut Parser) -> CompletedMarker { + assert!(p.at(T!['{'])); + let m = p.start(); + p.bump(T!['{']); + expr_block_contents(p); + p.expect(T!['}']); + m.complete(p, BLOCK_EXPR) +} + +// test return_expr +// fn foo() { +// return; +// return 92; +// } +fn return_expr(p: &mut Parser) -> CompletedMarker { + assert!(p.at(T![return])); + let m = p.start(); + p.bump(T![return]); + if p.at_ts(EXPR_FIRST) { + expr(p); + } + m.complete(p, RETURN_EXPR) +} + +// test continue_expr +// fn foo() { +// loop { +// continue; +// continue 'l; +// } +// } +fn continue_expr(p: &mut Parser) -> CompletedMarker { + assert!(p.at(T![continue])); + let m = p.start(); + p.bump(T![continue]); + p.eat(LIFETIME); + m.complete(p, CONTINUE_EXPR) +} + +// test break_expr +// fn foo() { +// loop { +// break; +// break 'l; +// break 92; +// break 'l 92; +// } +// } +fn break_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker { + assert!(p.at(T![break])); + let m = p.start(); + p.bump(T![break]); + p.eat(LIFETIME); + // test break_ambiguity + // fn foo(){ + // if break {} + // while break {} + // for i in break {} + // match break {} + // } + if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{'])) { + expr(p); + } + m.complete(p, BREAK_EXPR) +} + +// test try_block_expr +// fn foo() { +// let _ = try {}; +// } +fn try_block_expr(p: &mut Parser, m: Option) -> CompletedMarker { + assert!(p.at(T![try])); + let m = m.unwrap_or_else(|| p.start()); + // Special-case `try!` as macro. + // This is a hack until we do proper edition support + if p.nth_at(1, T![!]) { + // test try_macro_fallback + // fn foo() { try!(Ok(())); } + let path = p.start(); + let path_segment = p.start(); + let name_ref = p.start(); + p.bump_remap(IDENT); + name_ref.complete(p, NAME_REF); + path_segment.complete(p, PATH_SEGMENT); + path.complete(p, PATH); + let _block_like = items::macro_call_after_excl(p); + return m.complete(p, MACRO_CALL); + } + + p.bump(T![try]); + block_expr(p); + m.complete(p, EFFECT_EXPR) +} + +// test box_expr +// fn foo() { +// let x = box 1i32; +// let y = (box 1i32, box 2i32); +// let z = Foo(box 1i32, box 2i32); +// } +fn box_expr(p: &mut Parser, m: Option) -> CompletedMarker { + assert!(p.at(T![box])); + let m = m.unwrap_or_else(|| p.start()); + p.bump(T![box]); + if p.at_ts(EXPR_FIRST) { + expr(p); + } + m.complete(p, BOX_EXPR) +} + +/// Expression from `$var` macro expansion, wrapped in dollars +fn meta_var_expr(p: &mut Parser) -> CompletedMarker { + assert!(p.at(L_DOLLAR)); + let m = p.start(); + p.bump(L_DOLLAR); + let (completed, _is_block) = + expr_bp(p, Restrictions { forbid_structs: false, prefer_stmt: false }, 1); + + match (completed, p.current()) { + (Some(it), R_DOLLAR) => { + p.bump(R_DOLLAR); + m.abandon(p); + it + } + _ => { + while !p.at(R_DOLLAR) { + p.bump_any() + } + p.bump(R_DOLLAR); + m.complete(p, ERROR) + } + } +} diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs new file mode 100644 index 0000000000..8fd8f3b800 --- /dev/null +++ b/crates/parser/src/grammar/items.rs @@ -0,0 +1,444 @@ +//! FIXME: write short doc here + +mod consts; +mod adt; +mod traits; +mod use_item; + +pub(crate) use self::{ + adt::{record_field_list, variant_list}, + expressions::{match_arm_list, record_expr_field_list}, + traits::assoc_item_list, + use_item::use_tree_list, +}; +use super::*; + +// test mod_contents +// fn foo() {} +// macro_rules! foo {} +// foo::bar!(); +// super::baz! {} +// struct S; +pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) { + attributes::inner_attrs(p); + while !(stop_on_r_curly && p.at(T!['}']) || p.at(EOF)) { + item_or_macro(p, stop_on_r_curly) + } +} + +pub(super) const ITEM_RECOVERY_SET: TokenSet = token_set![ + FN_KW, + STRUCT_KW, + ENUM_KW, + IMPL_KW, + TRAIT_KW, + CONST_KW, + STATIC_KW, + LET_KW, + MOD_KW, + PUB_KW, + CRATE_KW, + USE_KW, + MACRO_KW, + T![;], +]; + +pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool) { + let m = p.start(); + attributes::outer_attrs(p); + let m = match maybe_item(p, m) { + Ok(()) => { + if p.at(T![;]) { + p.err_and_bump( + "expected item, found `;`\n\ + consider removing this semicolon", + ); + } + return; + } + Err(m) => m, + }; + if paths::is_use_path_start(p) { + match macro_call(p) { + BlockLike::Block => (), + BlockLike::NotBlock => { + p.expect(T![;]); + } + } + m.complete(p, MACRO_CALL); + } else { + m.abandon(p); + if p.at(T!['{']) { + error_block(p, "expected an item"); + } else if p.at(T!['}']) && !stop_on_r_curly { + let e = p.start(); + p.error("unmatched `}`"); + p.bump(T!['}']); + e.complete(p, ERROR); + } else if !p.at(EOF) && !p.at(T!['}']) { + p.err_and_bump("expected an item"); + } else { + p.error("expected an item"); + } + } +} + +pub(super) fn maybe_item(p: &mut Parser, m: Marker) -> Result<(), Marker> { + // test_err pub_expr + // fn foo() { pub 92; } + let has_visibility = opt_visibility(p); + + let m = match items_without_modifiers(p, m) { + Ok(()) => return Ok(()), + Err(m) => m, + }; + + let mut has_mods = false; + + // modifiers + has_mods |= p.eat(T![const]); + + // test_err async_without_semicolon + // fn foo() { let _ = async {} } + if p.at(T![async]) && p.nth(1) != T!['{'] && p.nth(1) != T![move] && p.nth(1) != T![|] { + p.eat(T![async]); + has_mods = true; + } + + // test_err unsafe_block_in_mod + // fn foo(){} unsafe { } fn bar(){} + if p.at(T![unsafe]) && p.nth(1) != T!['{'] { + p.eat(T![unsafe]); + has_mods = true; + } + + if p.at(T![extern]) { + has_mods = true; + abi(p); + } + if p.at(IDENT) && p.at_contextual_kw("auto") && p.nth(1) == T![trait] { + p.bump_remap(T![auto]); + has_mods = true; + } + + // test default_item + // default impl T for Foo {} + if p.at(IDENT) && p.at_contextual_kw("default") { + match p.nth(1) { + T![fn] | T![type] | T![const] | T![impl] => { + p.bump_remap(T![default]); + has_mods = true; + } + T![unsafe] => { + // test default_unsafe_item + // default unsafe impl T for Foo { + // default unsafe fn foo() {} + // } + if matches!(p.nth(2), T![impl] | T![fn]) { + p.bump_remap(T![default]); + p.bump(T![unsafe]); + has_mods = true; + } + } + _ => (), + } + } + + // test existential_type + // existential type Foo: Fn() -> usize; + if p.at(IDENT) && p.at_contextual_kw("existential") && p.nth(1) == T![type] { + p.bump_remap(T![existential]); + has_mods = true; + } + + // items + match p.current() { + // test fn + // fn foo() {} + T![fn] => { + fn_(p); + m.complete(p, FN); + } + + // test trait + // trait T {} + T![trait] => { + traits::trait_(p); + m.complete(p, TRAIT); + } + + T![const] => { + consts::konst(p, m); + } + + // test impl + // impl T for S {} + T![impl] => { + traits::impl_(p); + m.complete(p, IMPL); + } + + T![type] => { + type_alias(p, m); + } + _ => { + if !has_visibility && !has_mods { + return Err(m); + } else { + if has_mods { + p.error("expected existential, fn, trait or impl"); + } else { + p.error("expected an item"); + } + m.complete(p, ERROR); + } + } + } + Ok(()) +} + +fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> { + let la = p.nth(1); + match p.current() { + // test extern_crate + // extern crate foo; + T![extern] if la == T![crate] => extern_crate(p, m), + T![type] => { + type_alias(p, m); + } + T![mod] => mod_item(p, m), + T![struct] => { + // test struct_items + // struct Foo; + // struct Foo {} + // struct Foo(); + // struct Foo(String, usize); + // struct Foo { + // a: i32, + // b: f32, + // } + adt::strukt(p, m); + } + // test pub_macro_def + // pub macro m($:ident) {} + T![macro] => { + macro_def(p, m); + } + IDENT if p.at_contextual_kw("union") && p.nth(1) == IDENT => { + // test union_items + // union Foo {} + // union Foo { + // a: i32, + // b: f32, + // } + adt::union(p, m); + } + T![enum] => adt::enum_(p, m), + T![use] => use_item::use_(p, m), + T![const] if (la == IDENT || la == T![_] || la == T![mut]) => consts::konst(p, m), + T![static] => consts::static_(p, m), + // test extern_block + // extern {} + T![extern] + if la == T!['{'] || ((la == STRING || la == RAW_STRING) && p.nth(2) == T!['{']) => + { + abi(p); + extern_item_list(p); + m.complete(p, EXTERN_BLOCK); + } + _ => return Err(m), + }; + Ok(()) +} + +fn extern_crate(p: &mut Parser, m: Marker) { + assert!(p.at(T![extern])); + p.bump(T![extern]); + assert!(p.at(T![crate])); + p.bump(T![crate]); + + if p.at(T![self]) { + p.bump(T![self]); + } else { + name_ref(p); + } + + opt_rename(p); + p.expect(T![;]); + m.complete(p, EXTERN_CRATE); +} + +pub(crate) fn extern_item_list(p: &mut Parser) { + assert!(p.at(T!['{'])); + let m = p.start(); + p.bump(T!['{']); + mod_contents(p, true); + p.expect(T!['}']); + m.complete(p, EXTERN_ITEM_LIST); +} + +fn fn_(p: &mut Parser) { + assert!(p.at(T![fn])); + p.bump(T![fn]); + + name_r(p, ITEM_RECOVERY_SET); + // test function_type_params + // fn foo(){} + type_params::opt_generic_param_list(p); + + if p.at(T!['(']) { + params::param_list_fn_def(p); + } else { + p.error("expected function arguments"); + } + // test function_ret_type + // fn foo() {} + // fn bar() -> () {} + opt_ret_type(p); + + // test function_where_clause + // fn foo() where T: Copy {} + type_params::opt_where_clause(p); + + // test fn_decl + // trait T { fn foo(); } + if p.at(T![;]) { + p.bump(T![;]); + } else { + expressions::block_expr(p) + } +} + +// test type_item +// type Foo = Bar; +fn type_alias(p: &mut Parser, m: Marker) { + assert!(p.at(T![type])); + p.bump(T![type]); + + name(p); + + // test type_item_type_params + // type Result = (); + type_params::opt_generic_param_list(p); + + if p.at(T![:]) { + type_params::bounds(p); + } + + // test type_item_where_clause + // type Foo where Foo: Copy = (); + type_params::opt_where_clause(p); + if p.eat(T![=]) { + types::type_(p); + } + p.expect(T![;]); + m.complete(p, TYPE_ALIAS); +} + +pub(crate) fn mod_item(p: &mut Parser, m: Marker) { + assert!(p.at(T![mod])); + p.bump(T![mod]); + + name(p); + if p.at(T!['{']) { + item_list(p); + } else if !p.eat(T![;]) { + p.error("expected `;` or `{`"); + } + m.complete(p, MODULE); +} + +pub(crate) fn item_list(p: &mut Parser) { + assert!(p.at(T!['{'])); + let m = p.start(); + p.bump(T!['{']); + mod_contents(p, true); + p.expect(T!['}']); + m.complete(p, ITEM_LIST); +} + +// test macro_def +// macro m { ($i:ident) => {} } +// macro m($i:ident) {} +fn macro_def(p: &mut Parser, m: Marker) { + p.expect(T![macro]); + name_r(p, ITEM_RECOVERY_SET); + if p.at(T!['{']) { + token_tree(p); + } else if !p.at(T!['(']) { + p.error("unmatched `(`"); + } else { + let m = p.start(); + token_tree(p); + match p.current() { + T!['{'] | T!['['] | T!['('] => token_tree(p), + _ => p.error("expected `{`, `[`, `(`"), + } + m.complete(p, TOKEN_TREE); + } + + m.complete(p, MACRO_DEF); +} + +fn macro_call(p: &mut Parser) -> BlockLike { + assert!(paths::is_use_path_start(p)); + paths::use_path(p); + macro_call_after_excl(p) +} + +pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike { + p.expect(T![!]); + if p.at(IDENT) { + name(p); + } + // Special-case `macro_rules! try`. + // This is a hack until we do proper edition support + + // test try_macro_rules + // macro_rules! try { () => {} } + if p.at(T![try]) { + let m = p.start(); + p.bump_remap(IDENT); + m.complete(p, NAME); + } + + match p.current() { + T!['{'] => { + token_tree(p); + BlockLike::Block + } + T!['('] | T!['['] => { + token_tree(p); + BlockLike::NotBlock + } + _ => { + p.error("expected `{`, `[`, `(`"); + BlockLike::NotBlock + } + } +} + +pub(crate) fn token_tree(p: &mut Parser) { + let closing_paren_kind = match p.current() { + T!['{'] => T!['}'], + T!['('] => T![')'], + T!['['] => T![']'], + _ => unreachable!(), + }; + let m = p.start(); + p.bump_any(); + while !p.at(EOF) && !p.at(closing_paren_kind) { + match p.current() { + T!['{'] | T!['('] | T!['['] => token_tree(p), + T!['}'] => { + p.error("unmatched `}`"); + m.complete(p, TOKEN_TREE); + return; + } + T![')'] | T![']'] => p.err_and_bump("unmatched brace"), + _ => p.bump_any(), + } + } + p.expect(closing_paren_kind); + m.complete(p, TOKEN_TREE); +} diff --git a/crates/parser/src/grammar/items/adt.rs b/crates/parser/src/grammar/items/adt.rs new file mode 100644 index 0000000000..67c0c56970 --- /dev/null +++ b/crates/parser/src/grammar/items/adt.rs @@ -0,0 +1,178 @@ +//! FIXME: write short doc here + +use super::*; + +pub(super) fn strukt(p: &mut Parser, m: Marker) { + assert!(p.at(T![struct])); + p.bump(T![struct]); + struct_or_union(p, m, T![struct], STRUCT); +} + +pub(super) fn union(p: &mut Parser, m: Marker) { + assert!(p.at_contextual_kw("union")); + p.bump_remap(T![union]); + struct_or_union(p, m, T![union], UNION); +} + +fn struct_or_union(p: &mut Parser, m: Marker, kw: SyntaxKind, def: SyntaxKind) { + name_r(p, ITEM_RECOVERY_SET); + type_params::opt_generic_param_list(p); + match p.current() { + T![where] => { + type_params::opt_where_clause(p); + match p.current() { + T![;] => { + p.bump(T![;]); + } + T!['{'] => record_field_list(p), + _ => { + //FIXME: special case `(` error message + p.error("expected `;` or `{`"); + } + } + } + T![;] if kw == T![struct] => { + p.bump(T![;]); + } + T!['{'] => record_field_list(p), + T!['('] if kw == T![struct] => { + tuple_field_list(p); + // test tuple_struct_where + // struct Test(T) where T: Clone; + // struct Test(T); + type_params::opt_where_clause(p); + p.expect(T![;]); + } + _ if kw == T![struct] => { + p.error("expected `;`, `{`, or `(`"); + } + _ => { + p.error("expected `{`"); + } + } + m.complete(p, def); +} + +pub(super) fn enum_(p: &mut Parser, m: Marker) { + assert!(p.at(T![enum])); + p.bump(T![enum]); + name_r(p, ITEM_RECOVERY_SET); + type_params::opt_generic_param_list(p); + type_params::opt_where_clause(p); + if p.at(T!['{']) { + variant_list(p); + } else { + p.error("expected `{`") + } + m.complete(p, ENUM); +} + +pub(crate) fn variant_list(p: &mut Parser) { + assert!(p.at(T!['{'])); + let m = p.start(); + p.bump(T!['{']); + while !p.at(EOF) && !p.at(T!['}']) { + if p.at(T!['{']) { + error_block(p, "expected enum variant"); + continue; + } + let var = p.start(); + attributes::outer_attrs(p); + if p.at(IDENT) { + name(p); + match p.current() { + T!['{'] => record_field_list(p), + T!['('] => tuple_field_list(p), + _ => (), + } + + // test variant_discriminant + // enum E { X(i32) = 10 } + if p.eat(T![=]) { + expressions::expr(p); + } + var.complete(p, VARIANT); + } else { + var.abandon(p); + p.err_and_bump("expected enum variant"); + } + if !p.at(T!['}']) { + p.expect(T![,]); + } + } + p.expect(T!['}']); + m.complete(p, VARIANT_LIST); +} + +pub(crate) fn record_field_list(p: &mut Parser) { + assert!(p.at(T!['{'])); + let m = p.start(); + p.bump(T!['{']); + while !p.at(T!['}']) && !p.at(EOF) { + if p.at(T!['{']) { + error_block(p, "expected field"); + continue; + } + record_field_def(p); + if !p.at(T!['}']) { + p.expect(T![,]); + } + } + p.expect(T!['}']); + m.complete(p, RECORD_FIELD_LIST); + + fn record_field_def(p: &mut Parser) { + let m = p.start(); + // test record_field_attrs + // struct S { + // #[serde(with = "url_serde")] + // pub uri: Uri, + // } + attributes::outer_attrs(p); + opt_visibility(p); + if p.at(IDENT) { + name(p); + p.expect(T![:]); + types::type_(p); + m.complete(p, RECORD_FIELD); + } else { + m.abandon(p); + p.err_and_bump("expected field declaration"); + } + } +} + +fn tuple_field_list(p: &mut Parser) { + assert!(p.at(T!['('])); + let m = p.start(); + if !p.expect(T!['(']) { + return; + } + while !p.at(T![')']) && !p.at(EOF) { + let m = p.start(); + // test tuple_field_attrs + // struct S ( + // #[serde(with = "url_serde")] + // pub Uri, + // ); + // + // enum S { + // Uri(#[serde(with = "url_serde")] Uri), + // } + attributes::outer_attrs(p); + opt_visibility(p); + if !p.at_ts(types::TYPE_FIRST) { + p.error("expected a type"); + m.complete(p, ERROR); + break; + } + types::type_(p); + m.complete(p, TUPLE_FIELD); + + if !p.at(T![')']) { + p.expect(T![,]); + } + } + p.expect(T![')']); + m.complete(p, TUPLE_FIELD_LIST); +} diff --git a/crates/parser/src/grammar/items/consts.rs b/crates/parser/src/grammar/items/consts.rs new file mode 100644 index 0000000000..eb7d1f8281 --- /dev/null +++ b/crates/parser/src/grammar/items/consts.rs @@ -0,0 +1,33 @@ +//! FIXME: write short doc here + +use super::*; + +pub(super) fn static_(p: &mut Parser, m: Marker) { + const_or_static(p, m, T![static], STATIC) +} + +pub(super) fn konst(p: &mut Parser, m: Marker) { + const_or_static(p, m, T![const], CONST) +} + +fn const_or_static(p: &mut Parser, m: Marker, kw: SyntaxKind, def: SyntaxKind) { + assert!(p.at(kw)); + p.bump(kw); + p.eat(T![mut]); // FIXME: validator to forbid const mut + + // Allow `_` in place of an identifier in a `const`. + let is_const_underscore = kw == T![const] && p.eat(T![_]); + if !is_const_underscore { + name(p); + } + + // test_err static_underscore + // static _: i32 = 5; + + types::ascription(p); + if p.eat(T![=]) { + expressions::expr(p); + } + p.expect(T![;]); + m.complete(p, def); +} diff --git a/crates/parser/src/grammar/items/traits.rs b/crates/parser/src/grammar/items/traits.rs new file mode 100644 index 0000000000..8394020daf --- /dev/null +++ b/crates/parser/src/grammar/items/traits.rs @@ -0,0 +1,131 @@ +//! FIXME: write short doc here + +use super::*; + +// test trait_item +// trait T: Hash + Clone where U: Copy {} +// trait X: Hash + Clone where U: Copy {} +pub(super) fn trait_(p: &mut Parser) { + assert!(p.at(T![trait])); + p.bump(T![trait]); + name_r(p, ITEM_RECOVERY_SET); + type_params::opt_generic_param_list(p); + // test trait_alias + // trait Z = T; + // trait Z = T where U: Copy; + // trait Z = where Self: T; + if p.eat(T![=]) { + type_params::bounds_without_colon(p); + type_params::opt_where_clause(p); + p.expect(T![;]); + return; + } + if p.at(T![:]) { + type_params::bounds(p); + } + type_params::opt_where_clause(p); + if p.at(T!['{']) { + assoc_item_list(p); + } else { + p.error("expected `{`"); + } +} + +// test impl_def +// impl Foo {} +pub(super) fn impl_(p: &mut Parser) { + assert!(p.at(T![impl])); + p.bump(T![impl]); + if choose_type_params_over_qpath(p) { + type_params::opt_generic_param_list(p); + } + + // FIXME: never type + // impl ! {} + + // test impl_def_neg + // impl !Send for X {} + p.eat(T![!]); + impl_type(p); + if p.eat(T![for]) { + impl_type(p); + } + type_params::opt_where_clause(p); + if p.at(T!['{']) { + assoc_item_list(p); + } else { + p.error("expected `{`"); + } +} + +// test impl_item_list +// impl F { +// type A = i32; +// const B: i32 = 92; +// fn foo() {} +// fn bar(&self) {} +// } +pub(crate) fn assoc_item_list(p: &mut Parser) { + assert!(p.at(T!['{'])); + let m = p.start(); + p.bump(T!['{']); + // test impl_inner_attributes + // enum F{} + // impl F { + // //! This is a doc comment + // #![doc("This is also a doc comment")] + // } + attributes::inner_attrs(p); + + while !p.at(EOF) && !p.at(T!['}']) { + if p.at(T!['{']) { + error_block(p, "expected an item"); + continue; + } + item_or_macro(p, true); + } + p.expect(T!['}']); + m.complete(p, ASSOC_ITEM_LIST); +} + +// test impl_type_params +// impl Bar {} +fn choose_type_params_over_qpath(p: &Parser) -> bool { + // There's an ambiguity between generic parameters and qualified paths in impls. + // If we see `<` it may start both, so we have to inspect some following tokens. + // The following combinations can only start generics, + // but not qualified paths (with one exception): + // `<` `>` - empty generic parameters + // `<` `#` - generic parameters with attributes + // `<` `const` - const generic parameters + // `<` (LIFETIME|IDENT) `>` - single generic parameter + // `<` (LIFETIME|IDENT) `,` - first generic parameter in a list + // `<` (LIFETIME|IDENT) `:` - generic parameter with bounds + // `<` (LIFETIME|IDENT) `=` - generic parameter with a default + // The only truly ambiguous case is + // `<` IDENT `>` `::` IDENT ... + // we disambiguate it in favor of generics (`impl ::absolute::Path { ... }`) + // because this is what almost always expected in practice, qualified paths in impls + // (`impl ::AssocTy { ... }`) aren't even allowed by type checker at the moment. + if !p.at(T![<]) { + return false; + } + if p.nth(1) == T![#] || p.nth(1) == T![>] || p.nth(1) == CONST_KW { + return true; + } + (p.nth(1) == LIFETIME || p.nth(1) == IDENT) + && (p.nth(2) == T![>] || p.nth(2) == T![,] || p.nth(2) == T![:] || p.nth(2) == T![=]) +} + +// test_err impl_type +// impl Type {} +// impl Trait1 for T {} +// impl impl NotType {} +// impl Trait2 for impl NotType {} +pub(crate) fn impl_type(p: &mut Parser) { + if p.at(T![impl]) { + p.error("expected trait or type"); + return; + } + types::type_(p); +} diff --git a/crates/parser/src/grammar/items/use_item.rs b/crates/parser/src/grammar/items/use_item.rs new file mode 100644 index 0000000000..20e6a13cf9 --- /dev/null +++ b/crates/parser/src/grammar/items/use_item.rs @@ -0,0 +1,132 @@ +//! FIXME: write short doc here + +use super::*; + +pub(super) fn use_(p: &mut Parser, m: Marker) { + assert!(p.at(T![use])); + p.bump(T![use]); + use_tree(p, true); + p.expect(T![;]); + m.complete(p, USE); +} + +/// Parse a use 'tree', such as `some::path` in `use some::path;` +/// Note that this is called both by `use_item` and `use_tree_list`, +/// so handles both `some::path::{inner::path}` and `inner::path` in +/// `use some::path::{inner::path};` +fn use_tree(p: &mut Parser, top_level: bool) { + let m = p.start(); + match p.current() { + // Finish the use_tree for cases of e.g. + // `use some::path::{self, *};` or `use *;` + // This does not handle cases such as `use some::path::*` + // N.B. in Rust 2015 `use *;` imports all from crate root + // however in Rust 2018 `use *;` errors: ('cannot glob-import all possible crates') + // FIXME: Add this error (if not out of scope) + + // test use_star + // use *; + // use ::*; + // use some::path::{*}; + // use some::path::{::*}; + T![*] => p.bump(T![*]), + T![:] if p.at(T![::]) && p.nth(2) == T![*] => { + // Parse `use ::*;`, which imports all from the crate root in Rust 2015 + // This is invalid inside a use_tree_list, (e.g. `use some::path::{::*}`) + // but still parses and errors later: ('crate root in paths can only be used in start position') + // FIXME: Add this error (if not out of scope) + // In Rust 2018, it is always invalid (see above) + p.bump(T![::]); + p.bump(T![*]); + } + // Open a use tree list + // Handles cases such as `use {some::path};` or `{inner::path}` in + // `use some::path::{{inner::path}, other::path}` + + // test use_tree_list + // use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`) + // use {path::from::root}; // Rust 2015 + // use ::{some::arbritrary::path}; // Rust 2015 + // use ::{{{root::export}}}; // Nonsensical but perfectly legal nesting + T!['{'] => { + use_tree_list(p); + } + T![:] if p.at(T![::]) && p.nth(2) == T!['{'] => { + p.bump(T![::]); + use_tree_list(p); + } + // Parse a 'standard' path. + // Also handles aliases (e.g. `use something as something_else`) + + // test use_path + // use ::crate_name; // Rust 2018 - All flavours + // use crate_name; // Rust 2018 - Anchored paths + // use item_in_scope_or_crate_name; // Rust 2018 - Uniform Paths + // + // use self::module::Item; + // use crate::Item; + // use self::some::Struct; + // use crate_name::some_item; + _ if paths::is_use_path_start(p) => { + paths::use_path(p); + match p.current() { + T![as] => { + // test use_alias + // use some::path as some_name; + // use some::{ + // other::path as some_other_name, + // different::path as different_name, + // yet::another::path, + // running::out::of::synonyms::for_::different::* + // }; + // use Trait as _; + opt_rename(p); + } + T![:] if p.at(T![::]) => { + p.bump(T![::]); + match p.current() { + T![*] => { + p.bump(T![*]); + } + // test use_tree_list_after_path + // use crate::{Item}; + // use self::{Item}; + T!['{'] => use_tree_list(p), + _ => { + // is this unreachable? + p.error("expected `{` or `*`"); + } + } + } + _ => (), + } + } + _ => { + m.abandon(p); + let msg = "expected one of `*`, `::`, `{`, `self`, `super` or an identifier"; + if top_level { + p.err_recover(msg, ITEM_RECOVERY_SET); + } else { + // if we are parsing a nested tree, we have to eat a token to + // main balanced `{}` + p.err_and_bump(msg); + } + return; + } + } + m.complete(p, USE_TREE); +} + +pub(crate) fn use_tree_list(p: &mut Parser) { + assert!(p.at(T!['{'])); + let m = p.start(); + p.bump(T!['{']); + while !p.at(EOF) && !p.at(T!['}']) { + use_tree(p, false); + if !p.at(T!['}']) { + p.expect(T![,]); + } + } + p.expect(T!['}']); + m.complete(p, USE_TREE_LIST); +} diff --git a/crates/parser/src/grammar/params.rs b/crates/parser/src/grammar/params.rs new file mode 100644 index 0000000000..a665ffc133 --- /dev/null +++ b/crates/parser/src/grammar/params.rs @@ -0,0 +1,188 @@ +//! FIXME: write short doc here + +use super::*; + +// test param_list +// fn a() {} +// fn b(x: i32) {} +// fn c(x: i32, ) {} +// fn d(x: i32, y: ()) {} +pub(super) fn param_list_fn_def(p: &mut Parser) { + list_(p, Flavor::FnDef) +} + +// test param_list_opt_patterns +// fn foo)>(){} +pub(super) fn param_list_fn_trait(p: &mut Parser) { + list_(p, Flavor::FnTrait) +} + +pub(super) fn param_list_fn_ptr(p: &mut Parser) { + list_(p, Flavor::FnPointer) +} + +pub(super) fn param_list_closure(p: &mut Parser) { + list_(p, Flavor::Closure) +} + +#[derive(Debug, Clone, Copy)] +enum Flavor { + FnDef, // Includes trait fn params; omitted param idents are not supported + FnTrait, // Params for `Fn(...)`/`FnMut(...)`/`FnOnce(...)` annotations + FnPointer, + Closure, +} + +fn list_(p: &mut Parser, flavor: Flavor) { + use Flavor::*; + + let (bra, ket) = match flavor { + Closure => (T![|], T![|]), + FnDef | FnTrait | FnPointer => (T!['('], T![')']), + }; + + let m = p.start(); + p.bump(bra); + + if let FnDef = flavor { + // test self_param_outer_attr + // fn f(#[must_use] self) {} + attributes::outer_attrs(p); + opt_self_param(p); + } + + while !p.at(EOF) && !p.at(ket) { + // test param_outer_arg + // fn f(#[attr1] pat: Type) {} + attributes::outer_attrs(p); + + if !p.at_ts(PARAM_FIRST) { + p.error("expected value parameter"); + break; + } + let param = param(p, flavor); + if !p.at(ket) { + p.expect(T![,]); + } + if let Variadic(true) = param { + break; + } + } + + p.expect(ket); + m.complete(p, PARAM_LIST); +} + +const PARAM_FIRST: TokenSet = patterns::PATTERN_FIRST.union(types::TYPE_FIRST); + +struct Variadic(bool); + +fn param(p: &mut Parser, flavor: Flavor) -> Variadic { + let mut res = Variadic(false); + let m = p.start(); + match flavor { + // test param_list_vararg + // extern "C" { fn printf(format: *const i8, ...) -> i32; } + Flavor::FnDef | Flavor::FnPointer if p.eat(T![...]) => res = Variadic(true), + + // test fn_def_param + // fn foo((x, y): (i32, i32)) {} + Flavor::FnDef => { + patterns::pattern(p); + if variadic_param(p) { + res = Variadic(true) + } else { + types::ascription(p); + } + } + // test value_parameters_no_patterns + // type F = Box; + Flavor::FnTrait => { + types::type_(p); + } + // test fn_pointer_param_ident_path + // type Foo = fn(Bar::Baz); + // type Qux = fn(baz: Bar::Baz); + + // test fn_pointer_unnamed_arg + // type Foo = fn(_: bar); + Flavor::FnPointer => { + if (p.at(IDENT) || p.at(UNDERSCORE)) && p.nth(1) == T![:] && !p.nth_at(1, T![::]) { + patterns::pattern_single(p); + if variadic_param(p) { + res = Variadic(true) + } else { + types::ascription(p); + } + } else { + types::type_(p); + } + } + // test closure_params + // fn main() { + // let foo = |bar, baz: Baz, qux: Qux::Quux| (); + // } + Flavor::Closure => { + patterns::pattern_single(p); + if p.at(T![:]) && !p.at(T![::]) { + types::ascription(p); + } + } + } + m.complete(p, PARAM); + res +} + +fn variadic_param(p: &mut Parser) -> bool { + if p.at(T![:]) && p.nth_at(1, T![...]) { + p.bump(T![:]); + p.bump(T![...]); + true + } else { + false + } +} + +// test self_param +// impl S { +// fn a(self) {} +// fn b(&self,) {} +// fn c(&'a self,) {} +// fn d(&'a mut self, x: i32) {} +// fn e(mut self) {} +// } +fn opt_self_param(p: &mut Parser) { + let m; + if p.at(T![self]) || p.at(T![mut]) && p.nth(1) == T![self] { + m = p.start(); + p.eat(T![mut]); + p.eat(T![self]); + // test arb_self_types + // impl S { + // fn a(self: &Self) {} + // fn b(mut self: Box) {} + // } + if p.at(T![:]) { + types::ascription(p); + } + } else { + let la1 = p.nth(1); + let la2 = p.nth(2); + let la3 = p.nth(3); + let n_toks = match (p.current(), la1, la2, la3) { + (T![&], T![self], _, _) => 2, + (T![&], T![mut], T![self], _) => 3, + (T![&], LIFETIME, T![self], _) => 3, + (T![&], LIFETIME, T![mut], T![self]) => 4, + _ => return, + }; + m = p.start(); + for _ in 0..n_toks { + p.bump_any(); + } + } + m.complete(p, SELF_PARAM); + if !p.at(T![')']) { + p.expect(T![,]); + } +} diff --git a/crates/parser/src/grammar/paths.rs b/crates/parser/src/grammar/paths.rs new file mode 100644 index 0000000000..52562afa41 --- /dev/null +++ b/crates/parser/src/grammar/paths.rs @@ -0,0 +1,115 @@ +//! FIXME: write short doc here + +use super::*; + +pub(super) const PATH_FIRST: TokenSet = + token_set![IDENT, T![self], T![super], T![crate], T![:], T![<]]; + +pub(super) fn is_path_start(p: &Parser) -> bool { + is_use_path_start(p) || p.at(T![<]) +} + +pub(super) fn is_use_path_start(p: &Parser) -> bool { + match p.current() { + IDENT | T![self] | T![super] | T![crate] => true, + T![:] if p.at(T![::]) => true, + _ => false, + } +} + +pub(super) fn use_path(p: &mut Parser) { + path(p, Mode::Use) +} + +pub(crate) fn type_path(p: &mut Parser) { + path(p, Mode::Type) +} + +pub(super) fn expr_path(p: &mut Parser) { + path(p, Mode::Expr) +} + +#[derive(Clone, Copy, Eq, PartialEq)] +enum Mode { + Use, + Type, + Expr, +} + +fn path(p: &mut Parser, mode: Mode) { + let path = p.start(); + path_segment(p, mode, true); + let mut qual = path.complete(p, PATH); + loop { + let use_tree = matches!(p.nth(2), T![*] | T!['{']); + if p.at(T![::]) && !use_tree { + let path = qual.precede(p); + p.bump(T![::]); + path_segment(p, mode, false); + let path = path.complete(p, PATH); + qual = path; + } else { + break; + } + } +} + +fn path_segment(p: &mut Parser, mode: Mode, first: bool) { + let m = p.start(); + // test qual_paths + // type X = ::Output; + // fn foo() { ::default(); } + if first && p.eat(T![<]) { + types::type_(p); + if p.eat(T![as]) { + if is_use_path_start(p) { + types::path_type(p); + } else { + p.error("expected a trait"); + } + } + p.expect(T![>]); + } else { + let mut empty = true; + if first { + p.eat(T![::]); + empty = false; + } + match p.current() { + IDENT => { + name_ref(p); + opt_path_type_args(p, mode); + } + // test crate_path + // use crate::foo; + T![self] | T![super] | T![crate] => p.bump_any(), + _ => { + p.err_recover("expected identifier", items::ITEM_RECOVERY_SET); + if empty { + // test_err empty_segment + // use crate::; + m.abandon(p); + return; + } + } + }; + } + m.complete(p, PATH_SEGMENT); +} + +fn opt_path_type_args(p: &mut Parser, mode: Mode) { + match mode { + Mode::Use => {} + Mode::Type => { + // test path_fn_trait_args + // type F = Box ()>; + if p.at(T!['(']) { + params::param_list_fn_trait(p); + opt_ret_type(p); + } else { + type_args::opt_generic_arg_list(p, false) + } + } + Mode::Expr => type_args::opt_generic_arg_list(p, true), + } +} diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs new file mode 100644 index 0000000000..07b1d6dd53 --- /dev/null +++ b/crates/parser/src/grammar/patterns.rs @@ -0,0 +1,379 @@ +//! FIXME: write short doc here + +use super::*; + +pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST + .union(paths::PATH_FIRST) + .union(token_set![T![box], T![ref], T![mut], T!['('], T!['['], T![&], T![_], T![-], T![.]]); + +pub(crate) fn pattern(p: &mut Parser) { + pattern_r(p, PAT_RECOVERY_SET); +} + +/// Parses a pattern list separated by pipes `|` +pub(super) fn pattern_top(p: &mut Parser) { + pattern_top_r(p, PAT_RECOVERY_SET) +} + +pub(crate) fn pattern_single(p: &mut Parser) { + pattern_single_r(p, PAT_RECOVERY_SET); +} + +/// Parses a pattern list separated by pipes `|` +/// using the given `recovery_set` +pub(super) fn pattern_top_r(p: &mut Parser, recovery_set: TokenSet) { + p.eat(T![|]); + pattern_r(p, recovery_set); +} + +/// Parses a pattern list separated by pipes `|`, with no leading `|`,using the +/// given `recovery_set` +// test or_pattern +// fn main() { +// match () { +// (_ | _) => (), +// &(_ | _) => (), +// (_ | _,) => (), +// [_ | _,] => (), +// } +// } +fn pattern_r(p: &mut Parser, recovery_set: TokenSet) { + let m = p.start(); + pattern_single_r(p, recovery_set); + + if !p.at(T![|]) { + m.abandon(p); + return; + } + while p.eat(T![|]) { + pattern_single_r(p, recovery_set); + } + m.complete(p, OR_PAT); +} + +fn pattern_single_r(p: &mut Parser, recovery_set: TokenSet) { + if let Some(lhs) = atom_pat(p, recovery_set) { + // test range_pat + // fn main() { + // match 92 { + // 0 ... 100 => (), + // 101 ..= 200 => (), + // 200 .. 301=> (), + // } + // } + for &range_op in [T![...], T![..=], T![..]].iter() { + if p.at(range_op) { + let m = lhs.precede(p); + p.bump(range_op); + atom_pat(p, recovery_set); + m.complete(p, RANGE_PAT); + return; + } + } + } +} + +const PAT_RECOVERY_SET: TokenSet = + token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA]; + +fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option { + let m = match p.nth(0) { + T![box] => box_pat(p), + T![ref] | T![mut] => ident_pat(p, true), + IDENT => match p.nth(1) { + // Checks the token after an IDENT to see if a pattern is a path (Struct { .. }) or macro + // (T![x]). + T!['('] | T!['{'] | T![!] => path_or_macro_pat(p), + T![:] if p.nth_at(1, T![::]) => path_or_macro_pat(p), + _ => ident_pat(p, true), + }, + + // test type_path_in_pattern + // fn main() { let <_>::Foo = (); } + _ if paths::is_path_start(p) => path_or_macro_pat(p), + _ if is_literal_pat_start(p) => literal_pat(p), + + T![.] if p.at(T![..]) => rest_pat(p), + T![_] => wildcard_pat(p), + T![&] => ref_pat(p), + T!['('] => tuple_pat(p), + T!['['] => slice_pat(p), + + _ => { + p.err_recover("expected pattern", recovery_set); + return None; + } + }; + + Some(m) +} + +fn is_literal_pat_start(p: &Parser) -> bool { + p.at(T![-]) && (p.nth(1) == INT_NUMBER || p.nth(1) == FLOAT_NUMBER) + || p.at_ts(expressions::LITERAL_FIRST) +} + +// test literal_pattern +// fn main() { +// match () { +// -1 => (), +// 92 => (), +// 'c' => (), +// "hello" => (), +// } +// } +fn literal_pat(p: &mut Parser) -> CompletedMarker { + assert!(is_literal_pat_start(p)); + let m = p.start(); + if p.at(T![-]) { + p.bump(T![-]); + } + expressions::literal(p); + m.complete(p, LITERAL_PAT) +} + +// test path_part +// fn foo() { +// let foo::Bar = (); +// let ::Bar = (); +// let Bar { .. } = (); +// let Bar(..) = (); +// } +fn path_or_macro_pat(p: &mut Parser) -> CompletedMarker { + assert!(paths::is_path_start(p)); + let m = p.start(); + paths::expr_path(p); + let kind = match p.current() { + T!['('] => { + tuple_pat_fields(p); + TUPLE_STRUCT_PAT + } + T!['{'] => { + record_pat_field_list(p); + RECORD_PAT + } + // test marco_pat + // fn main() { + // let m!(x) = 0; + // } + T![!] => { + items::macro_call_after_excl(p); + return m.complete(p, MACRO_CALL).precede(p).complete(p, MACRO_PAT); + } + _ => PATH_PAT, + }; + m.complete(p, kind) +} + +// test tuple_pat_fields +// fn foo() { +// let S() = (); +// let S(_) = (); +// let S(_,) = (); +// let S(_, .. , x) = (); +// } +fn tuple_pat_fields(p: &mut Parser) { + assert!(p.at(T!['('])); + p.bump(T!['(']); + pat_list(p, T![')']); + p.expect(T![')']); +} + +// test record_field_pat_list +// fn foo() { +// let S {} = (); +// let S { f, ref mut g } = (); +// let S { h: _, ..} = (); +// let S { h: _, } = (); +// } +fn record_pat_field_list(p: &mut Parser) { + assert!(p.at(T!['{'])); + let m = p.start(); + p.bump(T!['{']); + while !p.at(EOF) && !p.at(T!['}']) { + match p.current() { + // A trailing `..` is *not* treated as a REST_PAT. + T![.] if p.at(T![..]) => p.bump(T![..]), + T!['{'] => error_block(p, "expected ident"), + + c => { + let m = p.start(); + match c { + // test record_field_pat + // fn foo() { + // let S { 0: 1 } = (); + // let S { x: 1 } = (); + // } + IDENT | INT_NUMBER if p.nth(1) == T![:] => { + name_ref_or_index(p); + p.bump(T![:]); + pattern(p); + } + T![box] => { + // FIXME: not all box patterns should be allowed + box_pat(p); + } + _ => { + ident_pat(p, false); + } + } + m.complete(p, RECORD_PAT_FIELD); + } + } + if !p.at(T!['}']) { + p.expect(T![,]); + } + } + p.expect(T!['}']); + m.complete(p, RECORD_PAT_FIELD_LIST); +} + +// test placeholder_pat +// fn main() { let _ = (); } +fn wildcard_pat(p: &mut Parser) -> CompletedMarker { + assert!(p.at(T![_])); + let m = p.start(); + p.bump(T![_]); + m.complete(p, WILDCARD_PAT) +} + +// test dot_dot_pat +// fn main() { +// let .. = (); +// // +// // Tuples +// // +// let (a, ..) = (); +// let (a, ..,) = (); +// let Tuple(a, ..) = (); +// let Tuple(a, ..,) = (); +// let (.., ..) = (); +// let Tuple(.., ..) = (); +// let (.., a, ..) = (); +// let Tuple(.., a, ..) = (); +// // +// // Slices +// // +// let [..] = (); +// let [head, ..] = (); +// let [head, tail @ ..] = (); +// let [head, .., cons] = (); +// let [head, mid @ .., cons] = (); +// let [head, .., .., cons] = (); +// let [head, .., mid, tail @ ..] = (); +// let [head, .., mid, .., cons] = (); +// } +fn rest_pat(p: &mut Parser) -> CompletedMarker { + assert!(p.at(T![..])); + let m = p.start(); + p.bump(T![..]); + m.complete(p, REST_PAT) +} + +// test ref_pat +// fn main() { +// let &a = (); +// let &mut b = (); +// } +fn ref_pat(p: &mut Parser) -> CompletedMarker { + assert!(p.at(T![&])); + let m = p.start(); + p.bump(T![&]); + p.eat(T![mut]); + pattern_single(p); + m.complete(p, REF_PAT) +} + +// test tuple_pat +// fn main() { +// let (a, b, ..) = (); +// let (a,) = (); +// let (..) = (); +// let () = (); +// } +fn tuple_pat(p: &mut Parser) -> CompletedMarker { + assert!(p.at(T!['('])); + let m = p.start(); + p.bump(T!['(']); + let mut has_comma = false; + let mut has_pat = false; + let mut has_rest = false; + while !p.at(EOF) && !p.at(T![')']) { + has_pat = true; + if !p.at_ts(PATTERN_FIRST) { + p.error("expected a pattern"); + break; + } + has_rest |= p.at(T![..]); + + pattern(p); + if !p.at(T![')']) { + has_comma = true; + p.expect(T![,]); + } + } + p.expect(T![')']); + + m.complete(p, if !has_comma && !has_rest && has_pat { PAREN_PAT } else { TUPLE_PAT }) +} + +// test slice_pat +// fn main() { +// let [a, b, ..] = []; +// } +fn slice_pat(p: &mut Parser) -> CompletedMarker { + assert!(p.at(T!['['])); + let m = p.start(); + p.bump(T!['[']); + pat_list(p, T![']']); + p.expect(T![']']); + m.complete(p, SLICE_PAT) +} + +fn pat_list(p: &mut Parser, ket: SyntaxKind) { + while !p.at(EOF) && !p.at(ket) { + if !p.at_ts(PATTERN_FIRST) { + p.error("expected a pattern"); + break; + } + + pattern(p); + if !p.at(ket) { + p.expect(T![,]); + } + } +} + +// test bind_pat +// fn main() { +// let a = (); +// let mut b = (); +// let ref c = (); +// let ref mut d = (); +// let e @ _ = (); +// let ref mut f @ g @ _ = (); +// } +fn ident_pat(p: &mut Parser, with_at: bool) -> CompletedMarker { + let m = p.start(); + p.eat(T![ref]); + p.eat(T![mut]); + name(p); + if with_at && p.eat(T![@]) { + pattern_single(p); + } + m.complete(p, IDENT_PAT) +} + +// test box_pat +// fn main() { +// let box i = (); +// let box Outer { box i, j: box Inner(box &x) } = (); +// let box ref mut i = (); +// } +fn box_pat(p: &mut Parser) -> CompletedMarker { + assert!(p.at(T![box])); + let m = p.start(); + p.bump(T![box]); + pattern_single(p); + m.complete(p, BOX_PAT) +} diff --git a/crates/parser/src/grammar/type_args.rs b/crates/parser/src/grammar/type_args.rs new file mode 100644 index 0000000000..f2d34a7499 --- /dev/null +++ b/crates/parser/src/grammar/type_args.rs @@ -0,0 +1,63 @@ +//! FIXME: write short doc here + +use super::*; + +pub(super) fn opt_generic_arg_list(p: &mut Parser, colon_colon_required: bool) { + let m; + if p.at(T![::]) && p.nth(2) == T![<] { + m = p.start(); + p.bump(T![::]); + p.bump(T![<]); + } else if !colon_colon_required && p.at(T![<]) && p.nth(1) != T![=] { + m = p.start(); + p.bump(T![<]); + } else { + return; + } + + while !p.at(EOF) && !p.at(T![>]) { + generic_arg(p); + if !p.at(T![>]) && !p.expect(T![,]) { + break; + } + } + p.expect(T![>]); + m.complete(p, GENERIC_ARG_LIST); +} + +// test type_arg +// type A = B<'static, i32, 1, { 2 }, Item=u64>; +fn generic_arg(p: &mut Parser) { + let m = p.start(); + match p.current() { + LIFETIME => { + p.bump(LIFETIME); + m.complete(p, LIFETIME_ARG); + } + // test associated_type_bounds + // fn print_all>(printables: T) {} + IDENT if p.nth(1) == T![:] && p.nth(2) != T![:] => { + name_ref(p); + type_params::bounds(p); + m.complete(p, ASSOC_TYPE_ARG); + } + IDENT if p.nth(1) == T![=] => { + name_ref(p); + p.bump_any(); + types::type_(p); + m.complete(p, ASSOC_TYPE_ARG); + } + T!['{'] => { + expressions::block_expr(p); + m.complete(p, CONST_ARG); + } + k if k.is_literal() => { + expressions::literal(p); + m.complete(p, CONST_ARG); + } + _ => { + types::type_(p); + m.complete(p, TYPE_ARG); + } + } +} diff --git a/crates/parser/src/grammar/type_params.rs b/crates/parser/src/grammar/type_params.rs new file mode 100644 index 0000000000..bc7d8d7244 --- /dev/null +++ b/crates/parser/src/grammar/type_params.rs @@ -0,0 +1,209 @@ +//! FIXME: write short doc here + +use super::*; + +pub(super) fn opt_generic_param_list(p: &mut Parser) { + if !p.at(T![<]) { + return; + } + generic_param_list(p); +} + +fn generic_param_list(p: &mut Parser) { + assert!(p.at(T![<])); + let m = p.start(); + p.bump(T![<]); + + while !p.at(EOF) && !p.at(T![>]) { + let m = p.start(); + + // test generic_lifetime_type_attribute + // fn foo<#[derive(Lifetime)] 'a, #[derive(Type)] T>(_: &'a T) { + // } + attributes::outer_attrs(p); + + match p.current() { + LIFETIME => lifetime_param(p, m), + IDENT => type_param(p, m), + CONST_KW => const_param(p, m), + _ => { + m.abandon(p); + p.err_and_bump("expected type parameter") + } + } + if !p.at(T![>]) && !p.expect(T![,]) { + break; + } + } + p.expect(T![>]); + m.complete(p, GENERIC_PARAM_LIST); +} + +fn lifetime_param(p: &mut Parser, m: Marker) { + assert!(p.at(LIFETIME)); + p.bump(LIFETIME); + if p.at(T![:]) { + lifetime_bounds(p); + } + m.complete(p, LIFETIME_PARAM); +} + +fn type_param(p: &mut Parser, m: Marker) { + assert!(p.at(IDENT)); + name(p); + if p.at(T![:]) { + bounds(p); + } + // test type_param_default + // struct S; + if p.at(T![=]) { + p.bump(T![=]); + types::type_(p) + } + m.complete(p, TYPE_PARAM); +} + +// test const_param +// struct S; +fn const_param(p: &mut Parser, m: Marker) { + assert!(p.at(CONST_KW)); + p.bump(T![const]); + name(p); + types::ascription(p); + m.complete(p, CONST_PARAM); +} + +// test type_param_bounds +// struct S; +pub(super) fn bounds(p: &mut Parser) { + assert!(p.at(T![:])); + p.bump(T![:]); + bounds_without_colon(p); +} + +fn lifetime_bounds(p: &mut Parser) { + assert!(p.at(T![:])); + p.bump(T![:]); + while p.at(LIFETIME) { + p.bump(LIFETIME); + if !p.eat(T![+]) { + break; + } + } +} + +pub(super) fn bounds_without_colon_m(p: &mut Parser, marker: Marker) -> CompletedMarker { + while type_bound(p) { + if !p.eat(T![+]) { + break; + } + } + + marker.complete(p, TYPE_BOUND_LIST) +} + +pub(super) fn bounds_without_colon(p: &mut Parser) { + let m = p.start(); + bounds_without_colon_m(p, m); +} + +fn type_bound(p: &mut Parser) -> bool { + let m = p.start(); + let has_paren = p.eat(T!['(']); + p.eat(T![?]); + match p.current() { + LIFETIME => p.bump(LIFETIME), + T![for] => types::for_type(p), + _ if paths::is_use_path_start(p) => types::path_type_(p, false), + _ => { + m.abandon(p); + return false; + } + } + if has_paren { + p.expect(T![')']); + } + m.complete(p, TYPE_BOUND); + + true +} + +// test where_clause +// fn foo() +// where +// 'a: 'b + 'c, +// T: Clone + Copy + 'static, +// Iterator::Item: 'a, +// ::Item: 'a +// {} +pub(super) fn opt_where_clause(p: &mut Parser) { + if !p.at(T![where]) { + return; + } + let m = p.start(); + p.bump(T![where]); + + while is_where_predicate(p) { + where_predicate(p); + + let comma = p.eat(T![,]); + + if is_where_clause_end(p) { + break; + } + + if !comma { + p.error("expected comma"); + } + } + + m.complete(p, WHERE_CLAUSE); +} + +fn is_where_predicate(p: &mut Parser) -> bool { + match p.current() { + LIFETIME => true, + T![impl] => false, + token => types::TYPE_FIRST.contains(token), + } +} + +fn is_where_clause_end(p: &mut Parser) -> bool { + matches!(p.current(), T!['{'] | T![;] | T![=]) +} + +fn where_predicate(p: &mut Parser) { + let m = p.start(); + match p.current() { + LIFETIME => { + p.bump(LIFETIME); + if p.at(T![:]) { + bounds(p); + } else { + p.error("expected colon"); + } + } + T![impl] => { + p.error("expected lifetime or type"); + } + _ => { + // test where_pred_for + // fn for_trait() + // where + // for<'a> F: Fn(&'a str) + // { } + if p.at(T![for]) { + types::for_binder(p); + } + + types::type_(p); + + if p.at(T![:]) { + bounds(p); + } else { + p.error("expected colon"); + } + } + } + m.complete(p, WHERE_PRED); +} diff --git a/crates/parser/src/grammar/types.rs b/crates/parser/src/grammar/types.rs new file mode 100644 index 0000000000..c876545f44 --- /dev/null +++ b/crates/parser/src/grammar/types.rs @@ -0,0 +1,324 @@ +//! FIXME: write short doc here + +use super::*; + +pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![ + T!['('], + T!['['], + T![<], + T![!], + T![*], + T![&], + T![_], + T![fn], + T![unsafe], + T![extern], + T![for], + T![impl], + T![dyn], +]); + +const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA, L_DOLLAR]; + +pub(crate) fn type_(p: &mut Parser) { + type_with_bounds_cond(p, true); +} + +pub(super) fn type_no_bounds(p: &mut Parser) { + type_with_bounds_cond(p, false); +} + +fn type_with_bounds_cond(p: &mut Parser, allow_bounds: bool) { + match p.current() { + T!['('] => paren_or_tuple_type(p), + T![!] => never_type(p), + T![*] => ptr_type(p), + T!['['] => array_or_slice_type(p), + T![&] => ref_type(p), + T![_] => infer_type(p), + T![fn] | T![unsafe] | T![extern] => fn_ptr_type(p), + T![for] => for_type(p), + T![impl] => impl_trait_type(p), + T![dyn] => dyn_trait_type(p), + // Some path types are not allowed to have bounds (no plus) + T![<] => path_type_(p, allow_bounds), + _ if paths::is_use_path_start(p) => path_or_macro_type_(p, allow_bounds), + _ => { + p.err_recover("expected type", TYPE_RECOVERY_SET); + } + } +} + +pub(super) fn ascription(p: &mut Parser) { + p.expect(T![:]); + type_(p) +} + +fn paren_or_tuple_type(p: &mut Parser) { + assert!(p.at(T!['('])); + let m = p.start(); + p.bump(T!['(']); + let mut n_types: u32 = 0; + let mut trailing_comma: bool = false; + while !p.at(EOF) && !p.at(T![')']) { + n_types += 1; + type_(p); + if p.eat(T![,]) { + trailing_comma = true; + } else { + trailing_comma = false; + break; + } + } + p.expect(T![')']); + + let kind = if n_types == 1 && !trailing_comma { + // test paren_type + // type T = (i32); + PAREN_TYPE + } else { + // test unit_type + // type T = (); + + // test singleton_tuple_type + // type T = (i32,); + TUPLE_TYPE + }; + m.complete(p, kind); +} + +// test never_type +// type Never = !; +fn never_type(p: &mut Parser) { + assert!(p.at(T![!])); + let m = p.start(); + p.bump(T![!]); + m.complete(p, NEVER_TYPE); +} + +fn ptr_type(p: &mut Parser) { + assert!(p.at(T![*])); + let m = p.start(); + p.bump(T![*]); + + match p.current() { + // test pointer_type_mut + // type M = *mut (); + // type C = *mut (); + T![mut] | T![const] => p.bump_any(), + _ => { + // test_err pointer_type_no_mutability + // type T = *(); + p.error( + "expected mut or const in raw pointer type \ + (use `*mut T` or `*const T` as appropriate)", + ); + } + }; + + type_no_bounds(p); + m.complete(p, PTR_TYPE); +} + +fn array_or_slice_type(p: &mut Parser) { + assert!(p.at(T!['['])); + let m = p.start(); + p.bump(T!['[']); + + type_(p); + let kind = match p.current() { + // test slice_type + // type T = [()]; + T![']'] => { + p.bump(T![']']); + SLICE_TYPE + } + + // test array_type + // type T = [(); 92]; + T![;] => { + p.bump(T![;]); + expressions::expr(p); + p.expect(T![']']); + ARRAY_TYPE + } + // test_err array_type_missing_semi + // type T = [() 92]; + _ => { + p.error("expected `;` or `]`"); + SLICE_TYPE + } + }; + m.complete(p, kind); +} + +// test reference_type; +// type A = &(); +// type B = &'static (); +// type C = &mut (); +fn ref_type(p: &mut Parser) { + assert!(p.at(T![&])); + let m = p.start(); + p.bump(T![&]); + p.eat(LIFETIME); + p.eat(T![mut]); + type_no_bounds(p); + m.complete(p, REF_TYPE); +} + +// test placeholder_type +// type Placeholder = _; +fn infer_type(p: &mut Parser) { + assert!(p.at(T![_])); + let m = p.start(); + p.bump(T![_]); + m.complete(p, INFER_TYPE); +} + +// test fn_pointer_type +// type A = fn(); +// type B = unsafe fn(); +// type C = unsafe extern "C" fn(); +// type D = extern "C" fn ( u8 , ... ) -> u8; +fn fn_ptr_type(p: &mut Parser) { + let m = p.start(); + p.eat(T![unsafe]); + if p.at(T![extern]) { + abi(p); + } + // test_err fn_pointer_type_missing_fn + // type F = unsafe (); + if !p.eat(T![fn]) { + m.abandon(p); + p.error("expected `fn`"); + return; + } + if p.at(T!['(']) { + params::param_list_fn_ptr(p); + } else { + p.error("expected parameters") + } + // test fn_pointer_type_with_ret + // type F = fn() -> (); + opt_ret_type(p); + m.complete(p, FN_PTR_TYPE); +} + +pub(super) fn for_binder(p: &mut Parser) { + assert!(p.at(T![for])); + p.bump(T![for]); + if p.at(T![<]) { + type_params::opt_generic_param_list(p); + } else { + p.error("expected `<`"); + } +} + +// test for_type +// type A = for<'a> fn() -> (); +// type B = for<'a> unsafe extern "C" fn(&'a ()) -> (); +// type Obj = for<'a> PartialEq<&'a i32>; +pub(super) fn for_type(p: &mut Parser) { + assert!(p.at(T![for])); + let m = p.start(); + for_binder(p); + match p.current() { + T![fn] | T![unsafe] | T![extern] => {} + // OK: legacy trait object format + _ if paths::is_use_path_start(p) => {} + _ => { + p.error("expected a function pointer or path"); + } + } + type_no_bounds(p); + m.complete(p, FOR_TYPE); +} + +// test impl_trait_type +// type A = impl Iterator> + 'a; +fn impl_trait_type(p: &mut Parser) { + assert!(p.at(T![impl])); + let m = p.start(); + p.bump(T![impl]); + type_params::bounds_without_colon(p); + m.complete(p, IMPL_TRAIT_TYPE); +} + +// test dyn_trait_type +// type A = dyn Iterator> + 'a; +fn dyn_trait_type(p: &mut Parser) { + assert!(p.at(T![dyn])); + let m = p.start(); + p.bump(T![dyn]); + type_params::bounds_without_colon(p); + m.complete(p, DYN_TRAIT_TYPE); +} + +// test path_type +// type A = Foo; +// type B = ::Foo; +// type C = self::Foo; +// type D = super::Foo; +pub(super) fn path_type(p: &mut Parser) { + path_type_(p, true) +} + +// test macro_call_type +// type A = foo!(); +// type B = crate::foo!(); +fn path_or_macro_type_(p: &mut Parser, allow_bounds: bool) { + assert!(paths::is_path_start(p)); + let m = p.start(); + paths::type_path(p); + + let kind = if p.at(T![!]) && !p.at(T![!=]) { + items::macro_call_after_excl(p); + MACRO_CALL + } else { + PATH_TYPE + }; + + let path = m.complete(p, kind); + + if allow_bounds { + opt_path_type_bounds_as_dyn_trait_type(p, path); + } +} + +pub(super) fn path_type_(p: &mut Parser, allow_bounds: bool) { + assert!(paths::is_path_start(p)); + let m = p.start(); + paths::type_path(p); + + // test path_type_with_bounds + // fn foo() -> Box {} + // fn foo() -> Box {} + let path = m.complete(p, PATH_TYPE); + if allow_bounds { + opt_path_type_bounds_as_dyn_trait_type(p, path); + } +} + +/// This turns a parsed PATH_TYPE optionally into a DYN_TRAIT_TYPE +/// with a TYPE_BOUND_LIST +fn opt_path_type_bounds_as_dyn_trait_type(p: &mut Parser, path_type_marker: CompletedMarker) { + if !p.at(T![+]) { + return; + } + + // First create a TYPE_BOUND from the completed PATH_TYPE + let m = path_type_marker.precede(p).complete(p, TYPE_BOUND); + + // Next setup a marker for the TYPE_BOUND_LIST + let m = m.precede(p); + + // This gets consumed here so it gets properly set + // in the TYPE_BOUND_LIST + p.eat(T![+]); + + // Parse rest of the bounds into the TYPE_BOUND_LIST + let m = type_params::bounds_without_colon_m(p, m); + + // Finally precede everything with DYN_TRAIT_TYPE + m.precede(p).complete(p, DYN_TRAIT_TYPE); +} diff --git a/crates/parser/src/lib.rs b/crates/parser/src/lib.rs new file mode 100644 index 0000000000..41e62116f8 --- /dev/null +++ b/crates/parser/src/lib.rs @@ -0,0 +1,149 @@ +//! The Rust parser. +//! +//! The parser doesn't know about concrete representation of tokens and syntax +//! trees. Abstract `TokenSource` and `TreeSink` traits are used instead. As a +//! consequence, this crates does not contain a lexer. +//! +//! The `Parser` struct from the `parser` module is a cursor into the sequence +//! of tokens. Parsing routines use `Parser` to inspect current state and +//! advance the parsing. +//! +//! The actual parsing happens in the `grammar` module. +//! +//! Tests for this crate live in `syntax` crate. + +#[macro_use] +mod token_set; +#[macro_use] +mod syntax_kind; +mod event; +mod parser; +mod grammar; + +pub(crate) use token_set::TokenSet; + +pub use syntax_kind::SyntaxKind; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ParseError(pub Box); + +/// `TokenSource` abstracts the source of the tokens parser operates on. +/// +/// Hopefully this will allow us to treat text and token trees in the same way! +pub trait TokenSource { + fn current(&self) -> Token; + + /// Lookahead n token + fn lookahead_nth(&self, n: usize) -> Token; + + /// bump cursor to next token + fn bump(&mut self); + + /// Is the current token a specified keyword? + fn is_keyword(&self, kw: &str) -> bool; +} + +/// `Token` abstracts the cursor of `TokenSource` operates on. +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +pub struct Token { + /// What is the current token? + pub kind: SyntaxKind, + + /// Is the current token joined to the next one (`> >` vs `>>`). + pub is_jointed_to_next: bool, +} + +/// `TreeSink` abstracts details of a particular syntax tree implementation. +pub trait TreeSink { + /// Adds new token to the current branch. + fn token(&mut self, kind: SyntaxKind, n_tokens: u8); + + /// Start new branch and make it current. + fn start_node(&mut self, kind: SyntaxKind); + + /// Finish current branch and restore previous + /// branch as current. + fn finish_node(&mut self); + + fn error(&mut self, error: ParseError); +} + +fn parse_from_tokens(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink, f: F) +where + F: FnOnce(&mut parser::Parser), +{ + let mut p = parser::Parser::new(token_source); + f(&mut p); + let events = p.finish(); + event::process(tree_sink, events); +} + +/// Parse given tokens into the given sink as a rust file. +pub fn parse(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) { + parse_from_tokens(token_source, tree_sink, grammar::root); +} + +#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] +pub enum FragmentKind { + Path, + Expr, + Statement, + Type, + Pattern, + Item, + Block, + Visibility, + MetaItem, + + // These kinds are used when parsing the result of expansion + // FIXME: use separate fragment kinds for macro inputs and outputs? + Items, + Statements, +} + +pub fn parse_fragment( + token_source: &mut dyn TokenSource, + tree_sink: &mut dyn TreeSink, + fragment_kind: FragmentKind, +) { + let parser: fn(&'_ mut parser::Parser) = match fragment_kind { + FragmentKind::Path => grammar::fragments::path, + FragmentKind::Expr => grammar::fragments::expr, + FragmentKind::Type => grammar::fragments::type_, + FragmentKind::Pattern => grammar::fragments::pattern, + FragmentKind::Item => grammar::fragments::item, + FragmentKind::Block => grammar::fragments::block_expr, + FragmentKind::Visibility => grammar::fragments::opt_visibility, + FragmentKind::MetaItem => grammar::fragments::meta_item, + FragmentKind::Statement => grammar::fragments::stmt, + FragmentKind::Items => grammar::fragments::macro_items, + FragmentKind::Statements => grammar::fragments::macro_stmts, + }; + parse_from_tokens(token_source, tree_sink, parser) +} + +/// A parsing function for a specific braced-block. +pub struct Reparser(fn(&mut parser::Parser)); + +impl Reparser { + /// If the node is a braced block, return the corresponding `Reparser`. + pub fn for_node( + node: SyntaxKind, + first_child: Option, + parent: Option, + ) -> Option { + grammar::reparser(node, first_child, parent).map(Reparser) + } + + /// Re-parse given tokens using this `Reparser`. + /// + /// Tokens must start with `{`, end with `}` and form a valid brace + /// sequence. + pub fn parse(self, token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) { + let Reparser(r) = self; + let mut p = parser::Parser::new(token_source); + r(&mut p); + let events = p.finish(); + event::process(tree_sink, events); + } +} diff --git a/crates/parser/src/parser.rs b/crates/parser/src/parser.rs new file mode 100644 index 0000000000..d2487acc3b --- /dev/null +++ b/crates/parser/src/parser.rs @@ -0,0 +1,350 @@ +//! FIXME: write short doc here + +use std::cell::Cell; + +use drop_bomb::DropBomb; + +use crate::{ + event::Event, + ParseError, + SyntaxKind::{self, EOF, ERROR, TOMBSTONE}, + TokenSet, TokenSource, T, +}; + +/// `Parser` struct provides the low-level API for +/// navigating through the stream of tokens and +/// constructing the parse tree. The actual parsing +/// happens in the `grammar` module. +/// +/// However, the result of this `Parser` is not a real +/// tree, but rather a flat stream of events of the form +/// "start expression, consume number literal, +/// finish expression". See `Event` docs for more. +pub(crate) struct Parser<'t> { + token_source: &'t mut dyn TokenSource, + events: Vec, + steps: Cell, +} + +impl<'t> Parser<'t> { + pub(super) fn new(token_source: &'t mut dyn TokenSource) -> Parser<'t> { + Parser { token_source, events: Vec::new(), steps: Cell::new(0) } + } + + pub(crate) fn finish(self) -> Vec { + self.events + } + + /// Returns the kind of the current token. + /// If parser has already reached the end of input, + /// the special `EOF` kind is returned. + pub(crate) fn current(&self) -> SyntaxKind { + self.nth(0) + } + + /// Lookahead operation: returns the kind of the next nth + /// token. + pub(crate) fn nth(&self, n: usize) -> SyntaxKind { + assert!(n <= 3); + + let steps = self.steps.get(); + assert!(steps <= 10_000_000, "the parser seems stuck"); + self.steps.set(steps + 1); + + self.token_source.lookahead_nth(n).kind + } + + /// Checks if the current token is `kind`. + pub(crate) fn at(&self, kind: SyntaxKind) -> bool { + self.nth_at(0, kind) + } + + pub(crate) fn nth_at(&self, n: usize, kind: SyntaxKind) -> bool { + match kind { + T![-=] => self.at_composite2(n, T![-], T![=]), + T![->] => self.at_composite2(n, T![-], T![>]), + T![::] => self.at_composite2(n, T![:], T![:]), + T![!=] => self.at_composite2(n, T![!], T![=]), + T![..] => self.at_composite2(n, T![.], T![.]), + T![*=] => self.at_composite2(n, T![*], T![=]), + T![/=] => self.at_composite2(n, T![/], T![=]), + T![&&] => self.at_composite2(n, T![&], T![&]), + T![&=] => self.at_composite2(n, T![&], T![=]), + T![%=] => self.at_composite2(n, T![%], T![=]), + T![^=] => self.at_composite2(n, T![^], T![=]), + T![+=] => self.at_composite2(n, T![+], T![=]), + T![<<] => self.at_composite2(n, T![<], T![<]), + T![<=] => self.at_composite2(n, T![<], T![=]), + T![==] => self.at_composite2(n, T![=], T![=]), + T![=>] => self.at_composite2(n, T![=], T![>]), + T![>=] => self.at_composite2(n, T![>], T![=]), + T![>>] => self.at_composite2(n, T![>], T![>]), + T![|=] => self.at_composite2(n, T![|], T![=]), + T![||] => self.at_composite2(n, T![|], T![|]), + + T![...] => self.at_composite3(n, T![.], T![.], T![.]), + T![..=] => self.at_composite3(n, T![.], T![.], T![=]), + T![<<=] => self.at_composite3(n, T![<], T![<], T![=]), + T![>>=] => self.at_composite3(n, T![>], T![>], T![=]), + + _ => self.token_source.lookahead_nth(n).kind == kind, + } + } + + /// Consume the next token if `kind` matches. + pub(crate) fn eat(&mut self, kind: SyntaxKind) -> bool { + if !self.at(kind) { + return false; + } + let n_raw_tokens = match kind { + T![-=] + | T![->] + | T![::] + | T![!=] + | T![..] + | T![*=] + | T![/=] + | T![&&] + | T![&=] + | T![%=] + | T![^=] + | T![+=] + | T![<<] + | T![<=] + | T![==] + | T![=>] + | T![>=] + | T![>>] + | T![|=] + | T![||] => 2, + + T![...] | T![..=] | T![<<=] | T![>>=] => 3, + _ => 1, + }; + self.do_bump(kind, n_raw_tokens); + true + } + + fn at_composite2(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind) -> bool { + let t1 = self.token_source.lookahead_nth(n); + if t1.kind != k1 || !t1.is_jointed_to_next { + return false; + } + let t2 = self.token_source.lookahead_nth(n + 1); + t2.kind == k2 + } + + fn at_composite3(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind, k3: SyntaxKind) -> bool { + let t1 = self.token_source.lookahead_nth(n); + if t1.kind != k1 || !t1.is_jointed_to_next { + return false; + } + let t2 = self.token_source.lookahead_nth(n + 1); + if t2.kind != k2 || !t2.is_jointed_to_next { + return false; + } + let t3 = self.token_source.lookahead_nth(n + 2); + t3.kind == k3 + } + + /// Checks if the current token is in `kinds`. + pub(crate) fn at_ts(&self, kinds: TokenSet) -> bool { + kinds.contains(self.current()) + } + + /// Checks if the current token is contextual keyword with text `t`. + pub(crate) fn at_contextual_kw(&self, kw: &str) -> bool { + self.token_source.is_keyword(kw) + } + + /// Starts a new node in the syntax tree. All nodes and tokens + /// consumed between the `start` and the corresponding `Marker::complete` + /// belong to the same node. + pub(crate) fn start(&mut self) -> Marker { + let pos = self.events.len() as u32; + self.push_event(Event::tombstone()); + Marker::new(pos) + } + + /// Consume the next token if `kind` matches. + pub(crate) fn bump(&mut self, kind: SyntaxKind) { + assert!(self.eat(kind)); + } + + /// Advances the parser by one token + pub(crate) fn bump_any(&mut self) { + let kind = self.nth(0); + if kind == EOF { + return; + } + self.do_bump(kind, 1) + } + + /// Advances the parser by one token, remapping its kind. + /// This is useful to create contextual keywords from + /// identifiers. For example, the lexer creates an `union` + /// *identifier* token, but the parser remaps it to the + /// `union` keyword, and keyword is what ends up in the + /// final tree. + pub(crate) fn bump_remap(&mut self, kind: SyntaxKind) { + if self.nth(0) == EOF { + // FIXME: panic!? + return; + } + self.do_bump(kind, 1); + } + + /// Emit error with the `message` + /// FIXME: this should be much more fancy and support + /// structured errors with spans and notes, like rustc + /// does. + pub(crate) fn error>(&mut self, message: T) { + let msg = ParseError(Box::new(message.into())); + self.push_event(Event::Error { msg }) + } + + /// Consume the next token if it is `kind` or emit an error + /// otherwise. + pub(crate) fn expect(&mut self, kind: SyntaxKind) -> bool { + if self.eat(kind) { + return true; + } + self.error(format!("expected {:?}", kind)); + false + } + + /// Create an error node and consume the next token. + pub(crate) fn err_and_bump(&mut self, message: &str) { + self.err_recover(message, TokenSet::EMPTY); + } + + /// Create an error node and consume the next token. + pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) { + match self.current() { + T!['{'] | T!['}'] => { + self.error(message); + return; + } + _ => (), + } + + if self.at_ts(recovery) { + self.error(message); + return; + } + + let m = self.start(); + self.error(message); + self.bump_any(); + m.complete(self, ERROR); + } + + fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) { + for _ in 0..n_raw_tokens { + self.token_source.bump(); + } + + self.push_event(Event::Token { kind, n_raw_tokens }); + } + + fn push_event(&mut self, event: Event) { + self.events.push(event) + } +} + +/// See `Parser::start`. +pub(crate) struct Marker { + pos: u32, + bomb: DropBomb, +} + +impl Marker { + fn new(pos: u32) -> Marker { + Marker { pos, bomb: DropBomb::new("Marker must be either completed or abandoned") } + } + + /// Finishes the syntax tree node and assigns `kind` to it, + /// and mark the create a `CompletedMarker` for possible future + /// operation like `.precede()` to deal with forward_parent. + pub(crate) fn complete(mut self, p: &mut Parser, kind: SyntaxKind) -> CompletedMarker { + self.bomb.defuse(); + let idx = self.pos as usize; + match &mut p.events[idx] { + Event::Start { kind: slot, .. } => { + *slot = kind; + } + _ => unreachable!(), + } + let finish_pos = p.events.len() as u32; + p.push_event(Event::Finish); + CompletedMarker::new(self.pos, finish_pos, kind) + } + + /// Abandons the syntax tree node. All its children + /// are attached to its parent instead. + pub(crate) fn abandon(mut self, p: &mut Parser) { + self.bomb.defuse(); + let idx = self.pos as usize; + if idx == p.events.len() - 1 { + match p.events.pop() { + Some(Event::Start { kind: TOMBSTONE, forward_parent: None }) => (), + _ => unreachable!(), + } + } + } +} + +pub(crate) struct CompletedMarker { + start_pos: u32, + finish_pos: u32, + kind: SyntaxKind, +} + +impl CompletedMarker { + fn new(start_pos: u32, finish_pos: u32, kind: SyntaxKind) -> Self { + CompletedMarker { start_pos, finish_pos, kind } + } + + /// This method allows to create a new node which starts + /// *before* the current one. That is, parser could start + /// node `A`, then complete it, and then after parsing the + /// whole `A`, decide that it should have started some node + /// `B` before starting `A`. `precede` allows to do exactly + /// that. See also docs about `forward_parent` in `Event::Start`. + /// + /// Given completed events `[START, FINISH]` and its corresponding + /// `CompletedMarker(pos: 0, _)`. + /// Append a new `START` events as `[START, FINISH, NEWSTART]`, + /// then mark `NEWSTART` as `START`'s parent with saving its relative + /// distance to `NEWSTART` into forward_parent(=2 in this case); + pub(crate) fn precede(self, p: &mut Parser) -> Marker { + let new_pos = p.start(); + let idx = self.start_pos as usize; + match &mut p.events[idx] { + Event::Start { forward_parent, .. } => { + *forward_parent = Some(new_pos.pos - self.start_pos); + } + _ => unreachable!(), + } + new_pos + } + + /// Undo this completion and turns into a `Marker` + pub(crate) fn undo_completion(self, p: &mut Parser) -> Marker { + let start_idx = self.start_pos as usize; + let finish_idx = self.finish_pos as usize; + match &mut p.events[start_idx] { + Event::Start { kind, forward_parent: None } => *kind = TOMBSTONE, + _ => unreachable!(), + } + match &mut p.events[finish_idx] { + slot @ Event::Finish => *slot = Event::tombstone(), + _ => unreachable!(), + } + Marker::new(self.start_pos) + } + + pub(crate) fn kind(&self) -> SyntaxKind { + self.kind + } +} diff --git a/crates/ra_parser/src/syntax_kind.rs b/crates/parser/src/syntax_kind.rs similarity index 100% rename from crates/ra_parser/src/syntax_kind.rs rename to crates/parser/src/syntax_kind.rs diff --git a/crates/ra_parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs similarity index 100% rename from crates/ra_parser/src/syntax_kind/generated.rs rename to crates/parser/src/syntax_kind/generated.rs diff --git a/crates/ra_parser/src/token_set.rs b/crates/parser/src/token_set.rs similarity index 100% rename from crates/ra_parser/src/token_set.rs rename to crates/parser/src/token_set.rs diff --git a/crates/paths/Cargo.toml b/crates/paths/Cargo.toml index cbe2c26e20..5ac18d63b3 100644 --- a/crates/paths/Cargo.toml +++ b/crates/paths/Cargo.toml @@ -1,9 +1,9 @@ [package] name = "paths" -version = "0.1.0" +version = "0.0.0" +license = "MIT OR Apache-2.0" authors = ["rust-analyzer developers"] edition = "2018" -license = "MIT OR Apache-2.0" [lib] doctest = false diff --git a/crates/proc_macro_api/Cargo.toml b/crates/proc_macro_api/Cargo.toml new file mode 100644 index 0000000000..a3a4c11033 --- /dev/null +++ b/crates/proc_macro_api/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "proc_macro_api" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +log = "0.4.8" +crossbeam-channel = "0.4.0" +jod-thread = "0.1.1" + +tt = { path = "../tt" } diff --git a/crates/proc_macro_api/src/lib.rs b/crates/proc_macro_api/src/lib.rs new file mode 100644 index 0000000000..15db57eb28 --- /dev/null +++ b/crates/proc_macro_api/src/lib.rs @@ -0,0 +1,111 @@ +//! Client-side Proc-Macro crate +//! +//! We separate proc-macro expanding logic to an extern program to allow +//! different implementations (e.g. wasm or dylib loading). And this crate +//! is used to provide basic infrastructure for communication between two +//! processes: Client (RA itself), Server (the external program) + +mod rpc; +mod process; +pub mod msg; + +use std::{ + ffi::OsStr, + io, + path::{Path, PathBuf}, + sync::Arc, +}; + +use tt::{SmolStr, Subtree}; + +use crate::process::{ProcMacroProcessSrv, ProcMacroProcessThread}; + +pub use rpc::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask, ProcMacroKind}; + +#[derive(Debug, Clone)] +pub struct ProcMacroProcessExpander { + process: Arc, + dylib_path: PathBuf, + name: SmolStr, +} + +impl Eq for ProcMacroProcessExpander {} +impl PartialEq for ProcMacroProcessExpander { + fn eq(&self, other: &Self) -> bool { + self.name == other.name + && self.dylib_path == other.dylib_path + && Arc::ptr_eq(&self.process, &other.process) + } +} + +impl tt::TokenExpander for ProcMacroProcessExpander { + fn expand( + &self, + subtree: &Subtree, + _attr: Option<&Subtree>, + ) -> Result { + self.process.custom_derive(&self.dylib_path, subtree, &self.name) + } +} + +#[derive(Debug)] +enum ProcMacroClientKind { + Process { process: Arc, thread: ProcMacroProcessThread }, + Dummy, +} + +#[derive(Debug)] +pub struct ProcMacroClient { + kind: ProcMacroClientKind, +} + +impl ProcMacroClient { + pub fn extern_process( + process_path: PathBuf, + args: impl IntoIterator>, + ) -> io::Result { + let (thread, process) = ProcMacroProcessSrv::run(process_path, args)?; + Ok(ProcMacroClient { + kind: ProcMacroClientKind::Process { process: Arc::new(process), thread }, + }) + } + + pub fn dummy() -> ProcMacroClient { + ProcMacroClient { kind: ProcMacroClientKind::Dummy } + } + + pub fn by_dylib_path(&self, dylib_path: &Path) -> Vec<(SmolStr, Arc)> { + match &self.kind { + ProcMacroClientKind::Dummy => vec![], + ProcMacroClientKind::Process { process, .. } => { + let macros = match process.find_proc_macros(dylib_path) { + Err(err) => { + eprintln!("Failed to find proc macros. Error: {:#?}", err); + return vec![]; + } + Ok(macros) => macros, + }; + + macros + .into_iter() + .filter_map(|(name, kind)| { + // FIXME: Support custom derive only for now. + match kind { + ProcMacroKind::CustomDerive => { + let name = SmolStr::new(&name); + let expander: Arc = + Arc::new(ProcMacroProcessExpander { + process: process.clone(), + name: name.clone(), + dylib_path: dylib_path.into(), + }); + Some((name, expander)) + } + _ => None, + } + }) + .collect() + } + } + } +} diff --git a/crates/proc_macro_api/src/msg.rs b/crates/proc_macro_api/src/msg.rs new file mode 100644 index 0000000000..f84ebdbc57 --- /dev/null +++ b/crates/proc_macro_api/src/msg.rs @@ -0,0 +1,89 @@ +//! Defines messages for cross-process message passing based on `ndjson` wire protocol + +use std::{ + convert::TryFrom, + io::{self, BufRead, Write}, +}; + +use serde::{de::DeserializeOwned, Deserialize, Serialize}; + +use crate::{ + rpc::{ListMacrosResult, ListMacrosTask}, + ExpansionResult, ExpansionTask, +}; + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub enum Request { + ListMacro(ListMacrosTask), + ExpansionMacro(ExpansionTask), +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub enum Response { + Error(ResponseError), + ListMacro(ListMacrosResult), + ExpansionMacro(ExpansionResult), +} + +macro_rules! impl_try_from_response { + ($ty:ty, $tag:ident) => { + impl TryFrom for $ty { + type Error = &'static str; + fn try_from(value: Response) -> Result { + match value { + Response::$tag(res) => Ok(res), + _ => Err(concat!("Failed to convert response to ", stringify!($tag))), + } + } + } + }; +} + +impl_try_from_response!(ListMacrosResult, ListMacro); +impl_try_from_response!(ExpansionResult, ExpansionMacro); + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ResponseError { + pub code: ErrorCode, + pub message: String, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub enum ErrorCode { + ServerErrorEnd, + ExpansionError, +} + +pub trait Message: Serialize + DeserializeOwned { + fn read(inp: &mut impl BufRead) -> io::Result> { + Ok(match read_json(inp)? { + None => None, + Some(text) => Some(serde_json::from_str(&text)?), + }) + } + fn write(self, out: &mut impl Write) -> io::Result<()> { + let text = serde_json::to_string(&self)?; + write_json(out, &text) + } +} + +impl Message for Request {} +impl Message for Response {} + +fn read_json(inp: &mut impl BufRead) -> io::Result> { + let mut buf = String::new(); + inp.read_line(&mut buf)?; + buf.pop(); // Remove traling '\n' + Ok(match buf.len() { + 0 => None, + _ => Some(buf), + }) +} + +fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { + log::debug!("> {}", msg); + out.write_all(msg.as_bytes())?; + out.write_all(b"\n")?; + out.flush()?; + Ok(()) +} diff --git a/crates/proc_macro_api/src/process.rs b/crates/proc_macro_api/src/process.rs new file mode 100644 index 0000000000..51ffcaa786 --- /dev/null +++ b/crates/proc_macro_api/src/process.rs @@ -0,0 +1,201 @@ +//! Handle process life-time and message passing for proc-macro client + +use std::{ + convert::{TryFrom, TryInto}, + ffi::{OsStr, OsString}, + io::{self, BufRead, BufReader, Write}, + path::{Path, PathBuf}, + process::{Child, Command, Stdio}, + sync::{Arc, Weak}, +}; + +use crossbeam_channel::{bounded, Receiver, Sender}; +use tt::Subtree; + +use crate::{ + msg::{ErrorCode, Message, Request, Response, ResponseError}, + rpc::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask, ProcMacroKind}, +}; + +#[derive(Debug, Default)] +pub(crate) struct ProcMacroProcessSrv { + inner: Option>>, +} + +#[derive(Debug)] +pub(crate) struct ProcMacroProcessThread { + // XXX: drop order is significant + sender: Arc>, + handle: jod_thread::JoinHandle<()>, +} + +impl ProcMacroProcessSrv { + pub fn run( + process_path: PathBuf, + args: impl IntoIterator>, + ) -> io::Result<(ProcMacroProcessThread, ProcMacroProcessSrv)> { + let process = Process::run(process_path, args)?; + + let (task_tx, task_rx) = bounded(0); + let handle = jod_thread::spawn(move || { + client_loop(task_rx, process); + }); + + let task_tx = Arc::new(task_tx); + let srv = ProcMacroProcessSrv { inner: Some(Arc::downgrade(&task_tx)) }; + let thread = ProcMacroProcessThread { handle, sender: task_tx }; + + Ok((thread, srv)) + } + + pub fn find_proc_macros( + &self, + dylib_path: &Path, + ) -> Result, tt::ExpansionError> { + let task = ListMacrosTask { lib: dylib_path.to_path_buf() }; + + let result: ListMacrosResult = self.send_task(Request::ListMacro(task))?; + Ok(result.macros) + } + + pub fn custom_derive( + &self, + dylib_path: &Path, + subtree: &Subtree, + derive_name: &str, + ) -> Result { + let task = ExpansionTask { + macro_body: subtree.clone(), + macro_name: derive_name.to_string(), + attributes: None, + lib: dylib_path.to_path_buf(), + }; + + let result: ExpansionResult = self.send_task(Request::ExpansionMacro(task))?; + Ok(result.expansion) + } + + pub fn send_task(&self, req: Request) -> Result + where + R: TryFrom, + { + let sender = match &self.inner { + None => return Err(tt::ExpansionError::Unknown("No sender is found.".to_string())), + Some(it) => it, + }; + + let (result_tx, result_rx) = bounded(0); + let sender = match sender.upgrade() { + None => { + return Err(tt::ExpansionError::Unknown("Proc macro process is closed.".into())) + } + Some(it) => it, + }; + sender.send(Task { req, result_tx }).unwrap(); + let res = result_rx + .recv() + .map_err(|_| tt::ExpansionError::Unknown("Proc macro thread is closed.".into()))?; + + match res { + Some(Response::Error(err)) => { + return Err(tt::ExpansionError::ExpansionError(err.message)); + } + Some(res) => Ok(res.try_into().map_err(|err| { + tt::ExpansionError::Unknown(format!("Fail to get response, reason : {:#?} ", err)) + })?), + None => Err(tt::ExpansionError::Unknown("Empty result".into())), + } + } +} + +fn client_loop(task_rx: Receiver, mut process: Process) { + let (mut stdin, mut stdout) = match process.stdio() { + None => return, + Some(it) => it, + }; + + for task in task_rx { + let Task { req, result_tx } = task; + + match send_request(&mut stdin, &mut stdout, req) { + Ok(res) => result_tx.send(res).unwrap(), + Err(_err) => { + let res = Response::Error(ResponseError { + code: ErrorCode::ServerErrorEnd, + message: "Server closed".into(), + }); + result_tx.send(res.into()).unwrap(); + // Restart the process + if process.restart().is_err() { + break; + } + let stdio = match process.stdio() { + None => break, + Some(it) => it, + }; + stdin = stdio.0; + stdout = stdio.1; + } + } + } +} + +struct Task { + req: Request, + result_tx: Sender>, +} + +struct Process { + path: PathBuf, + args: Vec, + child: Child, +} + +impl Drop for Process { + fn drop(&mut self) { + let _ = self.child.kill(); + } +} + +impl Process { + fn run( + path: PathBuf, + args: impl IntoIterator>, + ) -> io::Result { + let args = args.into_iter().map(|s| s.as_ref().into()).collect(); + let child = mk_child(&path, &args)?; + Ok(Process { path, args, child }) + } + + fn restart(&mut self) -> io::Result<()> { + let _ = self.child.kill(); + self.child = mk_child(&self.path, &self.args)?; + Ok(()) + } + + fn stdio(&mut self) -> Option<(impl Write, impl BufRead)> { + let stdin = self.child.stdin.take()?; + let stdout = self.child.stdout.take()?; + let read = BufReader::new(stdout); + + Some((stdin, read)) + } +} + +fn mk_child(path: &Path, args: impl IntoIterator>) -> io::Result { + Command::new(&path) + .args(args) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::inherit()) + .spawn() +} + +fn send_request( + mut writer: &mut impl Write, + mut reader: &mut impl BufRead, + req: Request, +) -> io::Result> { + req.write(&mut writer)?; + Ok(Response::read(&mut reader)?) +} diff --git a/crates/proc_macro_api/src/rpc.rs b/crates/proc_macro_api/src/rpc.rs new file mode 100644 index 0000000000..47624163ee --- /dev/null +++ b/crates/proc_macro_api/src/rpc.rs @@ -0,0 +1,267 @@ +//! Data structure serialization related stuff for RPC +//! +//! Defines all necessary rpc serialization data structures, +//! which includes `tt` related data and some task messages. +//! Although adding `Serialize` and `Deserialize` traits to `tt` directly seems +//! to be much easier, we deliberately duplicate `tt` structs with `#[serde(with = "XXDef")]` +//! for separation of code responsibility. + +use std::path::PathBuf; + +use serde::{Deserialize, Serialize}; +use tt::{ + Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, SmolStr, Spacing, Subtree, TokenId, + TokenTree, +}; + +#[derive(Clone, Eq, PartialEq, Debug, Serialize, Deserialize)] +pub struct ListMacrosTask { + pub lib: PathBuf, +} + +#[derive(Clone, Eq, PartialEq, Debug, Serialize, Deserialize)] +pub enum ProcMacroKind { + CustomDerive, + FuncLike, + Attr, +} + +#[derive(Clone, Eq, PartialEq, Debug, Default, Serialize, Deserialize)] +pub struct ListMacrosResult { + pub macros: Vec<(String, ProcMacroKind)>, +} + +#[derive(Clone, Eq, PartialEq, Debug, Serialize, Deserialize)] +pub struct ExpansionTask { + /// Argument of macro call. + /// + /// In custom derive this will be a struct or enum; in attribute-like macro - underlying + /// item; in function-like macro - the macro body. + #[serde(with = "SubtreeDef")] + pub macro_body: Subtree, + + /// Name of macro to expand. + /// + /// In custom derive this is the name of the derived trait (`Serialize`, `Getters`, etc.). + /// In attribute-like and function-like macros - single name of macro itself (`show_streams`). + pub macro_name: String, + + /// Possible attributes for the attribute-like macros. + #[serde(with = "opt_subtree_def")] + pub attributes: Option, + + pub lib: PathBuf, +} + +#[derive(Clone, Eq, PartialEq, Debug, Default, Serialize, Deserialize)] +pub struct ExpansionResult { + #[serde(with = "SubtreeDef")] + pub expansion: Subtree, +} + +#[derive(Serialize, Deserialize)] +#[serde(remote = "DelimiterKind")] +enum DelimiterKindDef { + Parenthesis, + Brace, + Bracket, +} + +#[derive(Serialize, Deserialize)] +#[serde(remote = "TokenId")] +struct TokenIdDef(u32); + +#[derive(Serialize, Deserialize)] +#[serde(remote = "Delimiter")] +struct DelimiterDef { + #[serde(with = "TokenIdDef")] + pub id: TokenId, + #[serde(with = "DelimiterKindDef")] + pub kind: DelimiterKind, +} + +#[derive(Serialize, Deserialize)] +#[serde(remote = "Subtree")] +struct SubtreeDef { + #[serde(default, with = "opt_delimiter_def")] + pub delimiter: Option, + #[serde(with = "vec_token_tree")] + pub token_trees: Vec, +} + +#[derive(Serialize, Deserialize)] +#[serde(remote = "TokenTree")] +enum TokenTreeDef { + #[serde(with = "LeafDef")] + Leaf(Leaf), + #[serde(with = "SubtreeDef")] + Subtree(Subtree), +} + +#[derive(Serialize, Deserialize)] +#[serde(remote = "Leaf")] +enum LeafDef { + #[serde(with = "LiteralDef")] + Literal(Literal), + #[serde(with = "PunctDef")] + Punct(Punct), + #[serde(with = "IdentDef")] + Ident(Ident), +} + +#[derive(Serialize, Deserialize)] +#[serde(remote = "Literal")] +struct LiteralDef { + pub text: SmolStr, + #[serde(with = "TokenIdDef")] + pub id: TokenId, +} + +#[derive(Serialize, Deserialize)] +#[serde(remote = "Punct")] +struct PunctDef { + pub char: char, + #[serde(with = "SpacingDef")] + pub spacing: Spacing, + #[serde(with = "TokenIdDef")] + pub id: TokenId, +} + +#[derive(Serialize, Deserialize)] +#[serde(remote = "Spacing")] +enum SpacingDef { + Alone, + Joint, +} + +#[derive(Serialize, Deserialize)] +#[serde(remote = "Ident")] +struct IdentDef { + pub text: SmolStr, + #[serde(with = "TokenIdDef")] + pub id: TokenId, +} + +mod opt_delimiter_def { + use super::{Delimiter, DelimiterDef}; + use serde::{Deserialize, Deserializer, Serialize, Serializer}; + + pub fn serialize(value: &Option, serializer: S) -> Result + where + S: Serializer, + { + #[derive(Serialize)] + struct Helper<'a>(#[serde(with = "DelimiterDef")] &'a Delimiter); + value.as_ref().map(Helper).serialize(serializer) + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> + where + D: Deserializer<'de>, + { + #[derive(Deserialize)] + struct Helper(#[serde(with = "DelimiterDef")] Delimiter); + let helper = Option::deserialize(deserializer)?; + Ok(helper.map(|Helper(external)| external)) + } +} + +mod opt_subtree_def { + use super::{Subtree, SubtreeDef}; + use serde::{Deserialize, Deserializer, Serialize, Serializer}; + + pub fn serialize(value: &Option, serializer: S) -> Result + where + S: Serializer, + { + #[derive(Serialize)] + struct Helper<'a>(#[serde(with = "SubtreeDef")] &'a Subtree); + value.as_ref().map(Helper).serialize(serializer) + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> + where + D: Deserializer<'de>, + { + #[derive(Deserialize)] + struct Helper(#[serde(with = "SubtreeDef")] Subtree); + let helper = Option::deserialize(deserializer)?; + Ok(helper.map(|Helper(external)| external)) + } +} + +mod vec_token_tree { + use super::{TokenTree, TokenTreeDef}; + use serde::{ser::SerializeSeq, Deserialize, Deserializer, Serialize, Serializer}; + + pub fn serialize(value: &Vec, serializer: S) -> Result + where + S: Serializer, + { + #[derive(Serialize)] + struct Helper<'a>(#[serde(with = "TokenTreeDef")] &'a TokenTree); + + let items: Vec<_> = value.iter().map(Helper).collect(); + let mut seq = serializer.serialize_seq(Some(items.len()))?; + for element in items { + seq.serialize_element(&element)?; + } + seq.end() + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> + where + D: Deserializer<'de>, + { + #[derive(Deserialize)] + struct Helper(#[serde(with = "TokenTreeDef")] TokenTree); + + let helper = Vec::deserialize(deserializer)?; + Ok(helper.into_iter().map(|Helper(external)| external).collect()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn fixture_token_tree() -> Subtree { + let mut subtree = Subtree::default(); + subtree + .token_trees + .push(TokenTree::Leaf(Ident { text: "struct".into(), id: TokenId(0) }.into())); + subtree + .token_trees + .push(TokenTree::Leaf(Ident { text: "Foo".into(), id: TokenId(1) }.into())); + subtree.token_trees.push(TokenTree::Subtree( + Subtree { + delimiter: Some(Delimiter { id: TokenId(2), kind: DelimiterKind::Brace }), + token_trees: vec![], + } + .into(), + )); + subtree + } + + #[test] + fn test_proc_macro_rpc_works() { + let tt = fixture_token_tree(); + let task = ExpansionTask { + macro_body: tt.clone(), + macro_name: Default::default(), + attributes: None, + lib: Default::default(), + }; + + let json = serde_json::to_string(&task).unwrap(); + let back: ExpansionTask = serde_json::from_str(&json).unwrap(); + + assert_eq!(task.macro_body, back.macro_body); + + let result = ExpansionResult { expansion: tt.clone() }; + let json = serde_json::to_string(&result).unwrap(); + let back: ExpansionResult = serde_json::from_str(&json).unwrap(); + + assert_eq!(result, back); + } +} diff --git a/crates/proc_macro_srv/Cargo.toml b/crates/proc_macro_srv/Cargo.toml new file mode 100644 index 0000000000..7171f08084 --- /dev/null +++ b/crates/proc_macro_srv/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "proc_macro_srv" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +goblin = "0.2.1" +libloading = "0.6.0" +memmap = "0.7" + +tt = { path = "../tt" } +mbe = { path = "../mbe" } +proc_macro_api = { path = "../proc_macro_api" } +test_utils = { path = "../test_utils" } + +[dev-dependencies] +cargo_metadata = "0.11.1" +difference = "2.0.0" +# used as proc macro test target +serde_derive = "1.0.106" + +toolchain = { path = "../toolchain" } diff --git a/crates/proc_macro_srv/src/cli.rs b/crates/proc_macro_srv/src/cli.rs new file mode 100644 index 0000000000..d428b95675 --- /dev/null +++ b/crates/proc_macro_srv/src/cli.rs @@ -0,0 +1,39 @@ +//! Driver for proc macro server + +use crate::ProcMacroSrv; +use proc_macro_api::msg::{self, Message}; +use std::io; + +pub fn run() -> io::Result<()> { + let mut srv = ProcMacroSrv::default(); + + while let Some(req) = read_request()? { + let res = match req { + msg::Request::ListMacro(task) => srv.list_macros(&task).map(msg::Response::ListMacro), + msg::Request::ExpansionMacro(task) => { + srv.expand(&task).map(msg::Response::ExpansionMacro) + } + }; + + let msg = res.unwrap_or_else(|err| { + msg::Response::Error(msg::ResponseError { + code: msg::ErrorCode::ExpansionError, + message: err, + }) + }); + + if let Err(err) = write_response(msg) { + eprintln!("Write message error: {}", err); + } + } + + Ok(()) +} + +fn read_request() -> io::Result> { + msg::Request::read(&mut io::stdin().lock()) +} + +fn write_response(msg: msg::Response) -> io::Result<()> { + msg.write(&mut io::stdout().lock()) +} diff --git a/crates/proc_macro_srv/src/dylib.rs b/crates/proc_macro_srv/src/dylib.rs new file mode 100644 index 0000000000..f8f705da8c --- /dev/null +++ b/crates/proc_macro_srv/src/dylib.rs @@ -0,0 +1,224 @@ +//! Handles dynamic library loading for proc macro + +use crate::{proc_macro::bridge, rustc_server::TokenStream}; +use std::fs::File; +use std::path::{Path, PathBuf}; + +use goblin::{mach::Mach, Object}; +use libloading::Library; +use memmap::Mmap; +use proc_macro_api::ProcMacroKind; +use std::io; + +const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; + +fn invalid_data_err(e: impl Into>) -> io::Error { + io::Error::new(io::ErrorKind::InvalidData, e) +} + +fn is_derive_registrar_symbol(symbol: &str) -> bool { + symbol.contains(NEW_REGISTRAR_SYMBOL) +} + +fn find_registrar_symbol(file: &Path) -> io::Result> { + let file = File::open(file)?; + let buffer = unsafe { Mmap::map(&file)? }; + let object = Object::parse(&buffer).map_err(invalid_data_err)?; + + let name = match object { + Object::Elf(elf) => { + let symbols = elf.dynstrtab.to_vec().map_err(invalid_data_err)?; + symbols.into_iter().find(|s| is_derive_registrar_symbol(s)).map(&str::to_owned) + } + Object::PE(pe) => pe + .exports + .iter() + .flat_map(|s| s.name) + .find(|s| is_derive_registrar_symbol(s)) + .map(&str::to_owned), + Object::Mach(Mach::Binary(binary)) => { + let exports = binary.exports().map_err(invalid_data_err)?; + exports + .iter() + .map(|s| { + // In macos doc: + // https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man3/dlsym.3.html + // Unlike other dyld API's, the symbol name passed to dlsym() must NOT be + // prepended with an underscore. + if s.name.starts_with('_') { + &s.name[1..] + } else { + &s.name + } + }) + .find(|s| is_derive_registrar_symbol(s)) + .map(&str::to_owned) + } + _ => return Ok(None), + }; + return Ok(name); +} + +/// Loads dynamic library in platform dependent manner. +/// +/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described +/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample) +/// and [here](https://github.com/rust-lang/rust/issues/60593). +/// +/// Usage of RTLD_DEEPBIND +/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1) +/// +/// It seems that on Windows that behaviour is default, so we do nothing in that case. +#[cfg(windows)] +fn load_library(file: &Path) -> Result { + Library::new(file) +} + +#[cfg(unix)] +fn load_library(file: &Path) -> Result { + use libloading::os::unix::Library as UnixLibrary; + use std::os::raw::c_int; + + const RTLD_NOW: c_int = 0x00002; + const RTLD_DEEPBIND: c_int = 0x00008; + + UnixLibrary::open(Some(file), RTLD_NOW | RTLD_DEEPBIND).map(|lib| lib.into()) +} + +struct ProcMacroLibraryLibloading { + // Hold the dylib to prevent it from unloading + _lib: Library, + exported_macros: Vec, +} + +impl ProcMacroLibraryLibloading { + fn open(file: &Path) -> io::Result { + let symbol_name = find_registrar_symbol(file)?.ok_or_else(|| { + invalid_data_err(format!("Cannot find registrar symbol in file {}", file.display())) + })?; + + let lib = load_library(file).map_err(invalid_data_err)?; + let exported_macros = { + let macros: libloading::Symbol<&&[bridge::client::ProcMacro]> = + unsafe { lib.get(symbol_name.as_bytes()) }.map_err(invalid_data_err)?; + macros.to_vec() + }; + + Ok(ProcMacroLibraryLibloading { _lib: lib, exported_macros }) + } +} + +pub struct Expander { + inner: ProcMacroLibraryLibloading, +} + +impl Expander { + pub fn new(lib: &Path) -> io::Result { + // Some libraries for dynamic loading require canonicalized path even when it is + // already absolute + let lib = lib.canonicalize()?; + + let lib = ensure_file_with_lock_free_access(&lib)?; + + let library = ProcMacroLibraryLibloading::open(&lib)?; + + Ok(Expander { inner: library }) + } + + pub fn expand( + &self, + macro_name: &str, + macro_body: &tt::Subtree, + attributes: Option<&tt::Subtree>, + ) -> Result { + let parsed_body = TokenStream::with_subtree(macro_body.clone()); + + let parsed_attributes = attributes + .map_or(crate::rustc_server::TokenStream::new(), |attr| { + TokenStream::with_subtree(attr.clone()) + }); + + for proc_macro in &self.inner.exported_macros { + match proc_macro { + bridge::client::ProcMacro::CustomDerive { trait_name, client, .. } + if *trait_name == macro_name => + { + let res = client.run( + &crate::proc_macro::bridge::server::SameThread, + crate::rustc_server::Rustc::default(), + parsed_body, + ); + return res.map(|it| it.subtree); + } + bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => { + let res = client.run( + &crate::proc_macro::bridge::server::SameThread, + crate::rustc_server::Rustc::default(), + parsed_body, + ); + return res.map(|it| it.subtree); + } + bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => { + let res = client.run( + &crate::proc_macro::bridge::server::SameThread, + crate::rustc_server::Rustc::default(), + parsed_attributes, + parsed_body, + ); + return res.map(|it| it.subtree); + } + _ => continue, + } + } + + Err(bridge::PanicMessage::String("Nothing to expand".to_string())) + } + + pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { + self.inner + .exported_macros + .iter() + .map(|proc_macro| match proc_macro { + bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { + (trait_name.to_string(), ProcMacroKind::CustomDerive) + } + bridge::client::ProcMacro::Bang { name, .. } => { + (name.to_string(), ProcMacroKind::FuncLike) + } + bridge::client::ProcMacro::Attr { name, .. } => { + (name.to_string(), ProcMacroKind::Attr) + } + }) + .collect() + } +} + +/// Copy the dylib to temp directory to prevent locking in Windows +#[cfg(windows)] +fn ensure_file_with_lock_free_access(path: &Path) -> io::Result { + use std::{ffi::OsString, time::SystemTime}; + + let mut to = std::env::temp_dir(); + + let file_name = path.file_name().ok_or_else(|| { + io::Error::new( + io::ErrorKind::InvalidInput, + format!("File path is invalid: {}", path.display()), + ) + })?; + + // generate a time deps unique number + let t = SystemTime::now().duration_since(std::time::UNIX_EPOCH).expect("Time went backwards"); + + let mut unique_name = OsString::from(t.as_millis().to_string()); + unique_name.push(file_name); + + to.push(unique_name); + std::fs::copy(path, &to).unwrap(); + Ok(to) +} + +#[cfg(unix)] +fn ensure_file_with_lock_free_access(path: &Path) -> io::Result { + Ok(path.to_path_buf()) +} diff --git a/crates/proc_macro_srv/src/lib.rs b/crates/proc_macro_srv/src/lib.rs new file mode 100644 index 0000000000..7e4e4ad505 --- /dev/null +++ b/crates/proc_macro_srv/src/lib.rs @@ -0,0 +1,69 @@ +//! RA Proc Macro Server +//! +//! This library is able to call compiled Rust custom derive dynamic libraries on arbitrary code. +//! The general idea here is based on https://github.com/fedochet/rust-proc-macro-expander. +//! +//! But we adapt it to better fit RA needs: +//! +//! * We use `tt` for proc-macro `TokenStream` server, it is easier to manipulate and interact with +//! RA than `proc-macro2` token stream. +//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable` +//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)… + +#[allow(dead_code)] +#[doc(hidden)] +mod proc_macro; + +#[doc(hidden)] +mod rustc_server; + +mod dylib; + +use proc_macro::bridge::client::TokenStream; +use proc_macro_api::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask}; +use std::{ + collections::{hash_map::Entry, HashMap}, + fs, + path::{Path, PathBuf}, + time::SystemTime, +}; + +#[derive(Default)] +pub(crate) struct ProcMacroSrv { + expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>, +} + +impl ProcMacroSrv { + pub fn expand(&mut self, task: &ExpansionTask) -> Result { + let expander = self.expander(&task.lib)?; + match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) { + Ok(expansion) => Ok(ExpansionResult { expansion }), + Err(msg) => { + Err(format!("Cannot perform expansion for {}: error {:?}", &task.macro_name, msg)) + } + } + } + + pub fn list_macros(&mut self, task: &ListMacrosTask) -> Result { + let expander = self.expander(&task.lib)?; + Ok(ListMacrosResult { macros: expander.list_macros() }) + } + + fn expander(&mut self, path: &Path) -> Result<&dylib::Expander, String> { + let time = fs::metadata(path).and_then(|it| it.modified()).map_err(|err| { + format!("Failed to get file metadata for {}: {:?}", path.display(), err) + })?; + + Ok(match self.expanders.entry((path.to_path_buf(), time)) { + Entry::Vacant(v) => v.insert(dylib::Expander::new(path).map_err(|err| { + format!("Cannot create expander for {}: {:?}", path.display(), err) + })?), + Entry::Occupied(e) => e.into_mut(), + }) + } +} + +pub mod cli; + +#[cfg(test)] +mod tests; diff --git a/crates/ra_proc_macro_srv/src/proc_macro/bridge/buffer.rs b/crates/proc_macro_srv/src/proc_macro/bridge/buffer.rs similarity index 100% rename from crates/ra_proc_macro_srv/src/proc_macro/bridge/buffer.rs rename to crates/proc_macro_srv/src/proc_macro/bridge/buffer.rs diff --git a/crates/ra_proc_macro_srv/src/proc_macro/bridge/client.rs b/crates/proc_macro_srv/src/proc_macro/bridge/client.rs similarity index 100% rename from crates/ra_proc_macro_srv/src/proc_macro/bridge/client.rs rename to crates/proc_macro_srv/src/proc_macro/bridge/client.rs diff --git a/crates/ra_proc_macro_srv/src/proc_macro/bridge/closure.rs b/crates/proc_macro_srv/src/proc_macro/bridge/closure.rs similarity index 100% rename from crates/ra_proc_macro_srv/src/proc_macro/bridge/closure.rs rename to crates/proc_macro_srv/src/proc_macro/bridge/closure.rs diff --git a/crates/ra_proc_macro_srv/src/proc_macro/bridge/handle.rs b/crates/proc_macro_srv/src/proc_macro/bridge/handle.rs similarity index 100% rename from crates/ra_proc_macro_srv/src/proc_macro/bridge/handle.rs rename to crates/proc_macro_srv/src/proc_macro/bridge/handle.rs diff --git a/crates/ra_proc_macro_srv/src/proc_macro/bridge/mod.rs b/crates/proc_macro_srv/src/proc_macro/bridge/mod.rs similarity index 100% rename from crates/ra_proc_macro_srv/src/proc_macro/bridge/mod.rs rename to crates/proc_macro_srv/src/proc_macro/bridge/mod.rs diff --git a/crates/ra_proc_macro_srv/src/proc_macro/bridge/rpc.rs b/crates/proc_macro_srv/src/proc_macro/bridge/rpc.rs similarity index 100% rename from crates/ra_proc_macro_srv/src/proc_macro/bridge/rpc.rs rename to crates/proc_macro_srv/src/proc_macro/bridge/rpc.rs diff --git a/crates/ra_proc_macro_srv/src/proc_macro/bridge/scoped_cell.rs b/crates/proc_macro_srv/src/proc_macro/bridge/scoped_cell.rs similarity index 100% rename from crates/ra_proc_macro_srv/src/proc_macro/bridge/scoped_cell.rs rename to crates/proc_macro_srv/src/proc_macro/bridge/scoped_cell.rs diff --git a/crates/ra_proc_macro_srv/src/proc_macro/bridge/server.rs b/crates/proc_macro_srv/src/proc_macro/bridge/server.rs similarity index 100% rename from crates/ra_proc_macro_srv/src/proc_macro/bridge/server.rs rename to crates/proc_macro_srv/src/proc_macro/bridge/server.rs diff --git a/crates/ra_proc_macro_srv/src/proc_macro/diagnostic.rs b/crates/proc_macro_srv/src/proc_macro/diagnostic.rs similarity index 100% rename from crates/ra_proc_macro_srv/src/proc_macro/diagnostic.rs rename to crates/proc_macro_srv/src/proc_macro/diagnostic.rs diff --git a/crates/ra_proc_macro_srv/src/proc_macro/mod.rs b/crates/proc_macro_srv/src/proc_macro/mod.rs similarity index 100% rename from crates/ra_proc_macro_srv/src/proc_macro/mod.rs rename to crates/proc_macro_srv/src/proc_macro/mod.rs diff --git a/crates/proc_macro_srv/src/rustc_server.rs b/crates/proc_macro_srv/src/rustc_server.rs new file mode 100644 index 0000000000..7d1695c86a --- /dev/null +++ b/crates/proc_macro_srv/src/rustc_server.rs @@ -0,0 +1,704 @@ +//! Rustc proc-macro server implementation with tt +//! +//! Based on idea from https://github.com/fedochet/rust-proc-macro-expander +//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that +//! we could provide any TokenStream implementation. +//! The original idea from fedochet is using proc-macro2 as backend, +//! we use tt instead for better intergation with RA. +//! +//! FIXME: No span and source file information is implemented yet + +use crate::proc_macro::bridge::{self, server}; + +use std::collections::{Bound, HashMap}; +use std::hash::Hash; +use std::iter::FromIterator; +use std::str::FromStr; +use std::{ascii, vec::IntoIter}; + +type Group = tt::Subtree; +type TokenTree = tt::TokenTree; +type Punct = tt::Punct; +type Spacing = tt::Spacing; +type Literal = tt::Literal; +type Span = tt::TokenId; + +#[derive(Debug, Clone)] +pub struct TokenStream { + pub subtree: tt::Subtree, +} + +impl TokenStream { + pub fn new() -> Self { + TokenStream { subtree: Default::default() } + } + + pub fn with_subtree(subtree: tt::Subtree) -> Self { + TokenStream { subtree } + } + + pub fn is_empty(&self) -> bool { + self.subtree.token_trees.is_empty() + } +} + +/// Creates a token stream containing a single token tree. +impl From for TokenStream { + fn from(tree: TokenTree) -> TokenStream { + TokenStream { subtree: tt::Subtree { delimiter: None, token_trees: vec![tree] } } + } +} + +/// Collects a number of token trees into a single stream. +impl FromIterator for TokenStream { + fn from_iter>(trees: I) -> Self { + trees.into_iter().map(TokenStream::from).collect() + } +} + +/// A "flattening" operation on token streams, collects token trees +/// from multiple token streams into a single stream. +impl FromIterator for TokenStream { + fn from_iter>(streams: I) -> Self { + let mut builder = TokenStreamBuilder::new(); + streams.into_iter().for_each(|stream| builder.push(stream)); + builder.build() + } +} + +impl Extend for TokenStream { + fn extend>(&mut self, trees: I) { + self.extend(trees.into_iter().map(TokenStream::from)); + } +} + +impl Extend for TokenStream { + fn extend>(&mut self, streams: I) { + for item in streams { + for tkn in item { + match tkn { + tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => { + self.subtree.token_trees.extend(subtree.token_trees); + } + _ => { + self.subtree.token_trees.push(tkn); + } + } + } + } + } +} + +type Level = crate::proc_macro::Level; +type LineColumn = crate::proc_macro::LineColumn; +type SourceFile = crate::proc_macro::SourceFile; + +/// A structure representing a diagnostic message and associated children +/// messages. +#[derive(Clone, Debug)] +pub struct Diagnostic { + level: Level, + message: String, + spans: Vec, + children: Vec, +} + +impl Diagnostic { + /// Creates a new diagnostic with the given `level` and `message`. + pub fn new>(level: Level, message: T) -> Diagnostic { + Diagnostic { level, message: message.into(), spans: vec![], children: vec![] } + } +} + +// Rustc Server Ident has to be `Copyable` +// We use a stub here for bypassing +#[derive(Hash, Eq, PartialEq, Copy, Clone)] +pub struct IdentId(u32); + +#[derive(Clone, Hash, Eq, PartialEq)] +struct IdentData(tt::Ident); + +#[derive(Default)] +struct IdentInterner { + idents: HashMap, + ident_data: Vec, +} + +impl IdentInterner { + fn intern(&mut self, data: &IdentData) -> u32 { + if let Some(index) = self.idents.get(data) { + return *index; + } + + let index = self.idents.len() as u32; + self.ident_data.push(data.clone()); + self.idents.insert(data.clone(), index); + index + } + + fn get(&self, index: u32) -> &IdentData { + &self.ident_data[index as usize] + } + + #[allow(unused)] + fn get_mut(&mut self, index: u32) -> &mut IdentData { + self.ident_data.get_mut(index as usize).expect("Should be consistent") + } +} + +pub struct TokenStreamBuilder { + acc: TokenStream, +} + +/// Public implementation details for the `TokenStream` type, such as iterators. +pub mod token_stream { + use std::str::FromStr; + + use super::{TokenStream, TokenTree}; + + /// An iterator over `TokenStream`'s `TokenTree`s. + /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, + /// and returns whole groups as token trees. + impl IntoIterator for TokenStream { + type Item = TokenTree; + type IntoIter = super::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.subtree.token_trees.into_iter() + } + } + + type LexError = String; + + /// Attempts to break the string into tokens and parse those tokens into a token stream. + /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters + /// or characters not existing in the language. + /// All tokens in the parsed stream get `Span::call_site()` spans. + /// + /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to + /// change these errors into `LexError`s later. + impl FromStr for TokenStream { + type Err = LexError; + + fn from_str(src: &str) -> Result { + let (subtree, _token_map) = + mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?; + + let tt: tt::TokenTree = subtree.into(); + Ok(tt.into()) + } + } + + impl ToString for TokenStream { + fn to_string(&self) -> String { + let tt = self.subtree.clone().into(); + to_text(&tt) + } + } + + fn to_text(tkn: &tt::TokenTree) -> String { + match tkn { + tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => ident.text.clone().into(), + tt::TokenTree::Leaf(tt::Leaf::Literal(literal)) => literal.text.clone().into(), + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => format!("{}", punct.char), + tt::TokenTree::Subtree(subtree) => { + let content = subtree + .token_trees + .iter() + .map(|tkn| { + let s = to_text(tkn); + if let tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) = tkn { + if punct.spacing == tt::Spacing::Alone { + return s + " "; + } + } + s + }) + .collect::>() + .concat(); + let (open, close) = match subtree.delimiter.map(|it| it.kind) { + None => ("", ""), + Some(tt::DelimiterKind::Brace) => ("{", "}"), + Some(tt::DelimiterKind::Parenthesis) => ("(", ")"), + Some(tt::DelimiterKind::Bracket) => ("[", "]"), + }; + format!("{}{}{}", open, content, close) + } + } + } +} + +impl TokenStreamBuilder { + fn new() -> TokenStreamBuilder { + TokenStreamBuilder { acc: TokenStream::new() } + } + + fn push(&mut self, stream: TokenStream) { + self.acc.extend(stream.into_iter()) + } + + fn build(self) -> TokenStream { + self.acc + } +} + +#[derive(Clone)] +pub struct TokenStreamIter { + trees: IntoIter, +} + +#[derive(Default)] +pub struct Rustc { + ident_interner: IdentInterner, + // FIXME: store span information here. +} + +impl server::Types for Rustc { + type TokenStream = TokenStream; + type TokenStreamBuilder = TokenStreamBuilder; + type TokenStreamIter = TokenStreamIter; + type Group = Group; + type Punct = Punct; + type Ident = IdentId; + type Literal = Literal; + type SourceFile = SourceFile; + type Diagnostic = Diagnostic; + type Span = Span; + type MultiSpan = Vec; +} + +impl server::TokenStream for Rustc { + fn new(&mut self) -> Self::TokenStream { + Self::TokenStream::new() + } + + fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { + stream.is_empty() + } + fn from_str(&mut self, src: &str) -> Self::TokenStream { + Self::TokenStream::from_str(src).expect("cannot parse string") + } + fn to_string(&mut self, stream: &Self::TokenStream) -> String { + stream.to_string() + } + fn from_token_tree( + &mut self, + tree: bridge::TokenTree, + ) -> Self::TokenStream { + match tree { + bridge::TokenTree::Group(group) => { + let tree = TokenTree::from(group); + Self::TokenStream::from_iter(vec![tree]) + } + + bridge::TokenTree::Ident(IdentId(index)) => { + let IdentData(ident) = self.ident_interner.get(index).clone(); + let ident: tt::Ident = ident; + let leaf = tt::Leaf::from(ident); + let tree = TokenTree::from(leaf); + Self::TokenStream::from_iter(vec![tree]) + } + + bridge::TokenTree::Literal(literal) => { + let leaf = tt::Leaf::from(literal); + let tree = TokenTree::from(leaf); + Self::TokenStream::from_iter(vec![tree]) + } + + bridge::TokenTree::Punct(p) => { + let leaf = tt::Leaf::from(p); + let tree = TokenTree::from(leaf); + Self::TokenStream::from_iter(vec![tree]) + } + } + } + + fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter { + let trees: Vec = stream.into_iter().collect(); + TokenStreamIter { trees: trees.into_iter() } + } +} + +impl server::TokenStreamBuilder for Rustc { + fn new(&mut self) -> Self::TokenStreamBuilder { + Self::TokenStreamBuilder::new() + } + fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) { + builder.push(stream) + } + fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream { + builder.build() + } +} + +impl server::TokenStreamIter for Rustc { + fn next( + &mut self, + iter: &mut Self::TokenStreamIter, + ) -> Option> { + iter.trees.next().map(|tree| match tree { + TokenTree::Subtree(group) => bridge::TokenTree::Group(group), + TokenTree::Leaf(tt::Leaf::Ident(ident)) => { + bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident)))) + } + TokenTree::Leaf(tt::Leaf::Literal(literal)) => bridge::TokenTree::Literal(literal), + TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct), + }) + } +} + +fn delim_to_internal(d: bridge::Delimiter) -> Option { + let kind = match d { + bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis, + bridge::Delimiter::Brace => tt::DelimiterKind::Brace, + bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket, + bridge::Delimiter::None => return None, + }; + Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }) +} + +fn delim_to_external(d: Option) -> bridge::Delimiter { + match d.map(|it| it.kind) { + Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis, + Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace, + Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket, + None => bridge::Delimiter::None, + } +} + +fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing { + match spacing { + bridge::Spacing::Alone => Spacing::Alone, + bridge::Spacing::Joint => Spacing::Joint, + } +} + +fn spacing_to_external(spacing: Spacing) -> bridge::Spacing { + match spacing { + Spacing::Alone => bridge::Spacing::Alone, + Spacing::Joint => bridge::Spacing::Joint, + } +} + +impl server::Group for Rustc { + fn new(&mut self, delimiter: bridge::Delimiter, stream: Self::TokenStream) -> Self::Group { + Self::Group { + delimiter: delim_to_internal(delimiter), + token_trees: stream.subtree.token_trees, + } + } + fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter { + delim_to_external(group.delimiter) + } + + // NOTE: Return value of do not include delimiter + fn stream(&mut self, group: &Self::Group) -> Self::TokenStream { + TokenStream { + subtree: tt::Subtree { delimiter: None, token_trees: group.token_trees.clone() }, + } + } + + fn span(&mut self, group: &Self::Group) -> Self::Span { + group.delimiter.map(|it| it.id).unwrap_or_else(|| tt::TokenId::unspecified()) + } + + fn set_span(&mut self, _group: &mut Self::Group, _span: Self::Span) { + // FIXME handle span + } + + fn span_open(&mut self, _group: &Self::Group) -> Self::Span { + // FIXME handle span + // MySpan(self.span_interner.intern(&MySpanData(group.span_open()))) + tt::TokenId::unspecified() + } + + fn span_close(&mut self, _group: &Self::Group) -> Self::Span { + // FIXME handle span + tt::TokenId::unspecified() + } +} + +impl server::Punct for Rustc { + fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct { + tt::Punct { + char: ch, + spacing: spacing_to_internal(spacing), + id: tt::TokenId::unspecified(), + } + } + fn as_char(&mut self, punct: Self::Punct) -> char { + punct.char + } + fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing { + spacing_to_external(punct.spacing) + } + fn span(&mut self, _punct: Self::Punct) -> Self::Span { + // FIXME handle span + tt::TokenId::unspecified() + } + fn with_span(&mut self, punct: Self::Punct, _span: Self::Span) -> Self::Punct { + // FIXME handle span + punct + } +} + +impl server::Ident for Rustc { + fn new(&mut self, string: &str, _span: Self::Span, _is_raw: bool) -> Self::Ident { + IdentId( + self.ident_interner.intern(&IdentData(tt::Ident { + text: string.into(), + id: tt::TokenId::unspecified(), + })), + ) + } + + fn span(&mut self, _ident: Self::Ident) -> Self::Span { + // FIXME handle span + tt::TokenId::unspecified() + } + fn with_span(&mut self, ident: Self::Ident, _span: Self::Span) -> Self::Ident { + // FIXME handle span + ident + } +} + +impl server::Literal for Rustc { + fn debug_kind(&mut self, _literal: &Self::Literal) -> String { + // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these. + // They must still be present to be ABI-compatible and work with upstream proc_macro. + "".to_owned() + } + fn symbol(&mut self, literal: &Self::Literal) -> String { + literal.text.to_string() + } + fn suffix(&mut self, _literal: &Self::Literal) -> Option { + None + } + + fn integer(&mut self, n: &str) -> Self::Literal { + let n: i128 = n.parse().unwrap(); + Literal { text: n.to_string().into(), id: tt::TokenId::unspecified() } + } + + fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal { + macro_rules! def_suffixed_integer { + ($kind:ident, $($ty:ty),*) => { + match $kind { + $( + stringify!($ty) => { + let n: $ty = n.parse().unwrap(); + format!(concat!("{}", stringify!($ty)), n) + } + )* + _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind), + } + } + } + + let text = + def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128}; + + Literal { text: text.into(), id: tt::TokenId::unspecified() } + } + + fn float(&mut self, n: &str) -> Self::Literal { + let n: f64 = n.parse().unwrap(); + let mut text = f64::to_string(&n); + if !text.contains('.') { + text += ".0" + } + Literal { text: text.into(), id: tt::TokenId::unspecified() } + } + + fn f32(&mut self, n: &str) -> Self::Literal { + let n: f32 = n.parse().unwrap(); + let text = format!("{}f32", n); + Literal { text: text.into(), id: tt::TokenId::unspecified() } + } + + fn f64(&mut self, n: &str) -> Self::Literal { + let n: f64 = n.parse().unwrap(); + let text = format!("{}f64", n); + Literal { text: text.into(), id: tt::TokenId::unspecified() } + } + + fn string(&mut self, string: &str) -> Self::Literal { + let mut escaped = String::new(); + for ch in string.chars() { + escaped.extend(ch.escape_debug()); + } + Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() } + } + + fn character(&mut self, ch: char) -> Self::Literal { + Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() } + } + + fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal { + let string = bytes + .iter() + .cloned() + .flat_map(ascii::escape_default) + .map(Into::::into) + .collect::(); + + Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() } + } + + fn span(&mut self, literal: &Self::Literal) -> Self::Span { + literal.id + } + + fn set_span(&mut self, _literal: &mut Self::Literal, _span: Self::Span) { + // FIXME handle span + } + + fn subspan( + &mut self, + _literal: &Self::Literal, + _start: Bound, + _end: Bound, + ) -> Option { + // FIXME handle span + None + } +} + +impl server::SourceFile for Rustc { + fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool { + file1.eq(file2) + } + fn path(&mut self, file: &Self::SourceFile) -> String { + String::from( + file.path().to_str().expect("non-UTF8 file path in `proc_macro::SourceFile::path`"), + ) + } + fn is_real(&mut self, file: &Self::SourceFile) -> bool { + file.is_real() + } +} + +impl server::Diagnostic for Rustc { + fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic { + let mut diag = Diagnostic::new(level, msg); + diag.spans = spans; + diag + } + + fn sub( + &mut self, + _diag: &mut Self::Diagnostic, + _level: Level, + _msg: &str, + _spans: Self::MultiSpan, + ) { + // FIXME handle diagnostic + // + } + + fn emit(&mut self, _diag: Self::Diagnostic) { + // FIXME handle diagnostic + // diag.emit() + } +} + +impl server::Span for Rustc { + fn debug(&mut self, span: Self::Span) -> String { + format!("{:?}", span.0) + } + fn def_site(&mut self) -> Self::Span { + // MySpan(self.span_interner.intern(&MySpanData(Span::def_site()))) + // FIXME handle span + tt::TokenId::unspecified() + } + fn call_site(&mut self) -> Self::Span { + // MySpan(self.span_interner.intern(&MySpanData(Span::call_site()))) + // FIXME handle span + tt::TokenId::unspecified() + } + fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { + // let MySpanData(span) = self.span_interner.get(span.0); + unimplemented!() + } + + /// Recent feature, not yet in the proc_macro + /// + /// See PR: + /// https://github.com/rust-lang/rust/pull/55780 + fn source_text(&mut self, _span: Self::Span) -> Option { + None + } + + fn parent(&mut self, _span: Self::Span) -> Option { + // FIXME handle span + None + } + fn source(&mut self, span: Self::Span) -> Self::Span { + // FIXME handle span + span + } + fn start(&mut self, _span: Self::Span) -> LineColumn { + // FIXME handle span + LineColumn { line: 0, column: 0 } + } + fn end(&mut self, _span: Self::Span) -> LineColumn { + // FIXME handle span + LineColumn { line: 0, column: 0 } + } + fn join(&mut self, _first: Self::Span, _second: Self::Span) -> Option { + None + } + fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { + // FIXME handle span + tt::TokenId::unspecified() + } + + fn mixed_site(&mut self) -> Self::Span { + // FIXME handle span + tt::TokenId::unspecified() + } +} + +impl server::MultiSpan for Rustc { + fn new(&mut self) -> Self::MultiSpan { + // FIXME handle span + vec![] + } + + fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) { + //TODP + other.push(span) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::proc_macro::bridge::server::Literal; + + #[test] + fn test_rustc_server_literals() { + let mut srv = Rustc { ident_interner: IdentInterner::default() }; + assert_eq!(srv.integer("1234").text, "1234"); + + assert_eq!(srv.typed_integer("12", "u8").text, "12u8"); + assert_eq!(srv.typed_integer("255", "u16").text, "255u16"); + assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32"); + assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64"); + assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128"); + assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize"); + assert_eq!(srv.typed_integer("127", "i8").text, "127i8"); + assert_eq!(srv.typed_integer("255", "i16").text, "255i16"); + assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32"); + assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64"); + assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128"); + assert_eq!(srv.float("0").text, "0.0"); + assert_eq!(srv.float("15684.5867").text, "15684.5867"); + assert_eq!(srv.f32("15684.58").text, "15684.58f32"); + assert_eq!(srv.f64("15684.58").text, "15684.58f64"); + + assert_eq!(srv.string("hello_world").text, "\"hello_world\""); + assert_eq!(srv.character('c').text, "'c'"); + assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\""); + } +} diff --git a/crates/ra_proc_macro_srv/src/tests/fixtures/test_serialize_proc_macro.txt b/crates/proc_macro_srv/src/tests/fixtures/test_serialize_proc_macro.txt similarity index 100% rename from crates/ra_proc_macro_srv/src/tests/fixtures/test_serialize_proc_macro.txt rename to crates/proc_macro_srv/src/tests/fixtures/test_serialize_proc_macro.txt diff --git a/crates/ra_proc_macro_srv/src/tests/mod.rs b/crates/proc_macro_srv/src/tests/mod.rs similarity index 100% rename from crates/ra_proc_macro_srv/src/tests/mod.rs rename to crates/proc_macro_srv/src/tests/mod.rs diff --git a/crates/proc_macro_srv/src/tests/utils.rs b/crates/proc_macro_srv/src/tests/utils.rs new file mode 100644 index 0000000000..5828512d6e --- /dev/null +++ b/crates/proc_macro_srv/src/tests/utils.rs @@ -0,0 +1,64 @@ +//! utils used in proc-macro tests + +use crate::dylib; +use crate::ProcMacroSrv; +use proc_macro_api::ListMacrosTask; +use std::str::FromStr; +use test_utils::assert_eq_text; + +mod fixtures { + use cargo_metadata::Message; + use std::process::Command; + + // Use current project metadata to get the proc-macro dylib path + pub fn dylib_path(crate_name: &str, version: &str) -> std::path::PathBuf { + let command = Command::new(toolchain::cargo()) + .args(&["check", "--message-format", "json"]) + .output() + .unwrap() + .stdout; + + for message in Message::parse_stream(command.as_slice()) { + match message.unwrap() { + Message::CompilerArtifact(artifact) => { + if artifact.target.kind.contains(&"proc-macro".to_string()) { + let repr = format!("{} {}", crate_name, version); + if artifact.package_id.repr.starts_with(&repr) { + return artifact.filenames[0].clone(); + } + } + } + _ => (), // Unknown message + } + } + + panic!("No proc-macro dylib for {} found!", crate_name); + } +} + +fn parse_string(code: &str) -> Option { + Some(crate::rustc_server::TokenStream::from_str(code).unwrap()) +} + +pub fn assert_expand( + crate_name: &str, + macro_name: &str, + version: &str, + ra_fixture: &str, + expect: &str, +) { + let path = fixtures::dylib_path(crate_name, version); + let expander = dylib::Expander::new(&path).unwrap(); + let fixture = parse_string(ra_fixture).unwrap(); + + let res = expander.expand(macro_name, &fixture.subtree, None).unwrap(); + assert_eq_text!(&format!("{:?}", res), &expect.trim()); +} + +pub fn list(crate_name: &str, version: &str) -> Vec { + let path = fixtures::dylib_path(crate_name, version); + let task = ListMacrosTask { lib: path }; + let mut srv = ProcMacroSrv::default(); + let res = srv.list_macros(&task).unwrap(); + res.macros.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect() +} diff --git a/crates/profile/Cargo.toml b/crates/profile/Cargo.toml new file mode 100644 index 0000000000..e271e3a567 --- /dev/null +++ b/crates/profile/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "profile" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +once_cell = "1.3.1" +cfg-if = "0.1.10" +libc = "0.2.73" +backtrace = { version = "0.3.44", optional = true } + +arena = { path = "../arena" } + +[target.'cfg(target_os = "linux")'.dependencies] +perf-event = "0.4" + +[features] +cpu_profiler = [] + +# Uncomment to enable for the whole crate graph +# default = [ "backtrace" ] +# default = [ "cpu_profiler" ] diff --git a/crates/ra_prof/src/google_cpu_profiler.rs b/crates/profile/src/google_cpu_profiler.rs similarity index 100% rename from crates/ra_prof/src/google_cpu_profiler.rs rename to crates/profile/src/google_cpu_profiler.rs diff --git a/crates/profile/src/hprof.rs b/crates/profile/src/hprof.rs new file mode 100644 index 0000000000..934cc8e37a --- /dev/null +++ b/crates/profile/src/hprof.rs @@ -0,0 +1,240 @@ +//! Simple hierarchical profiler +use once_cell::sync::Lazy; +use std::{ + cell::RefCell, + collections::{BTreeMap, HashSet}, + io::{stderr, Write}, + sync::{ + atomic::{AtomicBool, Ordering}, + RwLock, + }, + time::{Duration, Instant}, +}; + +use crate::tree::{Idx, Tree}; + +/// Filtering syntax +/// env RA_PROFILE=* // dump everything +/// env RA_PROFILE=foo|bar|baz // enabled only selected entries +/// env RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms +pub fn init() { + let spec = std::env::var("RA_PROFILE").unwrap_or_default(); + init_from(&spec); +} + +pub fn init_from(spec: &str) { + let filter = if spec.is_empty() { Filter::disabled() } else { Filter::from_spec(spec) }; + filter.install(); +} + +pub type Label = &'static str; + +/// This function starts a profiling scope in the current execution stack with a given description. +/// It returns a `Profile` struct that measures elapsed time between this method invocation and `Profile` struct drop. +/// It supports nested profiling scopes in case when this function is invoked multiple times at the execution stack. +/// In this case the profiling information will be nested at the output. +/// Profiling information is being printed in the stderr. +/// +/// # Example +/// ``` +/// profile::init_from("profile1|profile2@2"); +/// profiling_function1(); +/// +/// fn profiling_function1() { +/// let _p = profile::span("profile1"); +/// profiling_function2(); +/// } +/// +/// fn profiling_function2() { +/// let _p = profile::span("profile2"); +/// } +/// ``` +/// This will print in the stderr the following: +/// ```text +/// 0ms - profile +/// 0ms - profile2 +/// ``` +pub fn span(label: Label) -> ProfileSpan { + assert!(!label.is_empty()); + + if PROFILING_ENABLED.load(Ordering::Relaxed) + && PROFILE_STACK.with(|stack| stack.borrow_mut().push(label)) + { + ProfileSpan(Some(ProfilerImpl { label, detail: None })) + } else { + ProfileSpan(None) + } +} + +pub struct ProfileSpan(Option); + +struct ProfilerImpl { + label: Label, + detail: Option, +} + +impl ProfileSpan { + pub fn detail(mut self, detail: impl FnOnce() -> String) -> ProfileSpan { + if let Some(profiler) = &mut self.0 { + profiler.detail = Some(detail()) + } + self + } +} + +impl Drop for ProfilerImpl { + fn drop(&mut self) { + PROFILE_STACK.with(|it| it.borrow_mut().pop(self.label, self.detail.take())); + } +} + +static PROFILING_ENABLED: AtomicBool = AtomicBool::new(false); +static FILTER: Lazy> = Lazy::new(Default::default); +thread_local!(static PROFILE_STACK: RefCell = RefCell::new(ProfileStack::new())); + +#[derive(Default, Clone, Debug)] +struct Filter { + depth: usize, + allowed: HashSet, + longer_than: Duration, + version: usize, +} + +impl Filter { + fn disabled() -> Filter { + Filter::default() + } + + fn from_spec(mut spec: &str) -> Filter { + let longer_than = if let Some(idx) = spec.rfind('>') { + let longer_than = spec[idx + 1..].parse().expect("invalid profile longer_than"); + spec = &spec[..idx]; + Duration::from_millis(longer_than) + } else { + Duration::new(0, 0) + }; + + let depth = if let Some(idx) = spec.rfind('@') { + let depth: usize = spec[idx + 1..].parse().expect("invalid profile depth"); + spec = &spec[..idx]; + depth + } else { + 999 + }; + let allowed = + if spec == "*" { HashSet::new() } else { spec.split('|').map(String::from).collect() }; + Filter { depth, allowed, longer_than, version: 0 } + } + + fn install(mut self) { + PROFILING_ENABLED.store(self.depth > 0, Ordering::SeqCst); + let mut old = FILTER.write().unwrap(); + self.version = old.version + 1; + *old = self; + } +} + +struct ProfileStack { + starts: Vec, + filter: Filter, + messages: Tree, +} + +#[derive(Default)] +struct Message { + duration: Duration, + label: Label, + detail: Option, +} + +impl ProfileStack { + fn new() -> ProfileStack { + ProfileStack { starts: Vec::new(), messages: Tree::default(), filter: Default::default() } + } + + fn push(&mut self, label: Label) -> bool { + if self.starts.is_empty() { + if let Ok(f) = FILTER.try_read() { + if f.version > self.filter.version { + self.filter = f.clone(); + } + }; + } + if self.starts.len() > self.filter.depth { + return false; + } + let allowed = &self.filter.allowed; + if self.starts.is_empty() && !allowed.is_empty() && !allowed.contains(label) { + return false; + } + + self.starts.push(Instant::now()); + self.messages.start(); + true + } + + pub fn pop(&mut self, label: Label, detail: Option) { + let start = self.starts.pop().unwrap(); + let duration = start.elapsed(); + self.messages.finish(Message { duration, label, detail }); + if self.starts.is_empty() { + let longer_than = self.filter.longer_than; + // Convert to millis for comparison to avoid problems with rounding + // (otherwise we could print `0ms` despite user's `>0` filter when + // `duration` is just a few nanos). + if duration.as_millis() > longer_than.as_millis() { + if let Some(root) = self.messages.root() { + print(&self.messages, root, 0, longer_than, &mut stderr().lock()); + } + } + self.messages.clear(); + } + } +} + +fn print( + tree: &Tree, + curr: Idx, + level: u32, + longer_than: Duration, + out: &mut impl Write, +) { + let current_indent = " ".repeat(level as usize); + let detail = tree[curr].detail.as_ref().map(|it| format!(" @ {}", it)).unwrap_or_default(); + writeln!( + out, + "{}{:5}ms - {}{}", + current_indent, + tree[curr].duration.as_millis(), + tree[curr].label, + detail, + ) + .expect("printing profiling info"); + + let mut accounted_for = Duration::default(); + let mut short_children = BTreeMap::new(); // Use `BTreeMap` to get deterministic output. + for child in tree.children(curr) { + accounted_for += tree[child].duration; + + if tree[child].duration.as_millis() > longer_than.as_millis() { + print(tree, child, level + 1, longer_than, out) + } else { + let (total_duration, cnt) = + short_children.entry(tree[child].label).or_insert((Duration::default(), 0)); + *total_duration += tree[child].duration; + *cnt += 1; + } + } + + for (child_msg, (duration, count)) in short_children.iter() { + let millis = duration.as_millis(); + writeln!(out, " {}{:5}ms - {} ({} calls)", current_indent, millis, child_msg, count) + .expect("printing profiling info"); + } + + let unaccounted = tree[curr].duration - accounted_for; + if tree.children(curr).next().is_some() && unaccounted > longer_than { + writeln!(out, " {}{:5}ms - ???", current_indent, unaccounted.as_millis()) + .expect("printing profiling info"); + } +} diff --git a/crates/profile/src/lib.rs b/crates/profile/src/lib.rs new file mode 100644 index 0000000000..ab19271c70 --- /dev/null +++ b/crates/profile/src/lib.rs @@ -0,0 +1,109 @@ +//! A collection of tools for profiling rust-analyzer. + +mod stop_watch; +mod memory_usage; +#[cfg(feature = "cpu_profiler")] +mod google_cpu_profiler; +mod hprof; +mod tree; + +use std::cell::RefCell; + +pub use crate::{ + hprof::{init, init_from, span}, + memory_usage::{Bytes, MemoryUsage}, + stop_watch::{StopWatch, StopWatchSpan}, +}; + +/// Prints backtrace to stderr, useful for debugging. +#[cfg(feature = "backtrace")] +pub fn print_backtrace() { + let bt = backtrace::Backtrace::new(); + eprintln!("{:?}", bt); +} +#[cfg(not(feature = "backtrace"))] +pub fn print_backtrace() { + eprintln!( + r#"enable the backtrace feature: + profile = {{ path = "../profile", features = [ "backtrace"] }} +"# + ); +} + +thread_local!(static IN_SCOPE: RefCell = RefCell::new(false)); + +/// Allows to check if the current code is withing some dynamic scope, can be +/// useful during debugging to figure out why a function is called. +pub struct Scope { + prev: bool, +} + +impl Scope { + #[must_use] + pub fn enter() -> Scope { + let prev = IN_SCOPE.with(|slot| std::mem::replace(&mut *slot.borrow_mut(), true)); + Scope { prev } + } + pub fn is_active() -> bool { + IN_SCOPE.with(|slot| *slot.borrow()) + } +} + +impl Drop for Scope { + fn drop(&mut self) { + IN_SCOPE.with(|slot| *slot.borrow_mut() = self.prev); + } +} + +/// A wrapper around google_cpu_profiler. +/// +/// Usage: +/// 1. Install gpref_tools (https://github.com/gperftools/gperftools), probably packaged with your Linux distro. +/// 2. Build with `cpu_profiler` feature. +/// 3. Tun the code, the *raw* output would be in the `./out.profile` file. +/// 4. Install pprof for visualization (https://github.com/google/pprof). +/// 5. Bump sampling frequency to once per ms: `export CPUPROFILE_FREQUENCY=1000` +/// 6. Use something like `pprof -svg target/release/rust-analyzer ./out.profile` to see the results. +/// +/// For example, here's how I run profiling on NixOS: +/// +/// ```bash +/// $ nix-shell -p gperftools --run \ +/// 'cargo run --release -p rust-analyzer -- parse < ~/projects/rustbench/parser.rs > /dev/null' +/// ``` +/// +/// See this diff for how to profile completions: +/// +/// https://github.com/rust-analyzer/rust-analyzer/pull/5306 +#[derive(Debug)] +pub struct CpuSpan { + _private: (), +} + +#[must_use] +pub fn cpu_span() -> CpuSpan { + #[cfg(feature = "cpu_profiler")] + { + google_cpu_profiler::start("./out.profile".as_ref()) + } + + #[cfg(not(feature = "cpu_profiler"))] + { + eprintln!("cpu_profiler feature is disabled") + } + + CpuSpan { _private: () } +} + +impl Drop for CpuSpan { + fn drop(&mut self) { + #[cfg(feature = "cpu_profiler")] + { + google_cpu_profiler::stop() + } + } +} + +pub fn memory_usage() -> MemoryUsage { + MemoryUsage::current() +} diff --git a/crates/profile/src/memory_usage.rs b/crates/profile/src/memory_usage.rs new file mode 100644 index 0000000000..83390212ae --- /dev/null +++ b/crates/profile/src/memory_usage.rs @@ -0,0 +1,75 @@ +//! FIXME: write short doc here +use std::fmt; + +use cfg_if::cfg_if; + +#[derive(Copy, Clone)] +pub struct MemoryUsage { + pub allocated: Bytes, +} + +impl fmt::Display for MemoryUsage { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + write!(fmt, "{}", self.allocated) + } +} + +impl std::ops::Sub for MemoryUsage { + type Output = MemoryUsage; + fn sub(self, rhs: MemoryUsage) -> MemoryUsage { + MemoryUsage { allocated: self.allocated - rhs.allocated } + } +} + +impl MemoryUsage { + pub fn current() -> MemoryUsage { + cfg_if! { + if #[cfg(all(target_os = "linux", target_env = "gnu"))] { + // Note: This is incredibly slow. + let alloc = unsafe { libc::mallinfo() }.uordblks as isize; + MemoryUsage { allocated: Bytes(alloc) } + } else { + MemoryUsage { allocated: Bytes(0) } + } + } + } +} + +#[derive(Default, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] +pub struct Bytes(isize); + +impl Bytes { + pub fn megabytes(self) -> isize { + self.0 / 1024 / 1024 + } +} + +impl fmt::Display for Bytes { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let bytes = self.0; + let mut value = bytes; + let mut suffix = "b"; + if value.abs() > 4096 { + value /= 1024; + suffix = "kb"; + if value.abs() > 4096 { + value /= 1024; + suffix = "mb"; + } + } + f.pad(&format!("{}{}", value, suffix)) + } +} + +impl std::ops::AddAssign for Bytes { + fn add_assign(&mut self, x: usize) { + self.0 += x as isize; + } +} + +impl std::ops::Sub for Bytes { + type Output = Bytes; + fn sub(self, rhs: Bytes) -> Bytes { + Bytes(self.0 - rhs.0) + } +} diff --git a/crates/ra_prof/src/stop_watch.rs b/crates/profile/src/stop_watch.rs similarity index 100% rename from crates/ra_prof/src/stop_watch.rs rename to crates/profile/src/stop_watch.rs diff --git a/crates/profile/src/tree.rs b/crates/profile/src/tree.rs new file mode 100644 index 0000000000..096f58511c --- /dev/null +++ b/crates/profile/src/tree.rs @@ -0,0 +1,84 @@ +//! A simple tree implementation which tries to not allocate all over the place. +use std::ops; + +use arena::Arena; + +#[derive(Default)] +pub struct Tree { + nodes: Arena>, + current_path: Vec<(Idx, Option>)>, +} + +pub type Idx = arena::Idx>; + +impl Tree { + pub fn start(&mut self) + where + T: Default, + { + let me = self.nodes.alloc(Node::new(T::default())); + if let Some((parent, last_child)) = self.current_path.last_mut() { + let slot = match *last_child { + Some(last_child) => &mut self.nodes[last_child].next_sibling, + None => &mut self.nodes[*parent].first_child, + }; + let prev = slot.replace(me); + assert!(prev.is_none()); + *last_child = Some(me); + } + + self.current_path.push((me, None)); + } + + pub fn finish(&mut self, data: T) { + let (me, _last_child) = self.current_path.pop().unwrap(); + self.nodes[me].data = data; + } + + pub fn root(&self) -> Option> { + self.nodes.iter().next().map(|(idx, _)| idx) + } + + pub fn children(&self, idx: Idx) -> impl Iterator> + '_ { + NodeIter { nodes: &self.nodes, next: self.nodes[idx].first_child } + } + pub fn clear(&mut self) { + self.nodes.clear(); + self.current_path.clear(); + } +} + +impl ops::Index> for Tree { + type Output = T; + fn index(&self, index: Idx) -> &T { + &self.nodes[index].data + } +} + +pub struct Node { + data: T, + first_child: Option>, + next_sibling: Option>, +} + +impl Node { + fn new(data: T) -> Node { + Node { data, first_child: None, next_sibling: None } + } +} + +struct NodeIter<'a, T> { + nodes: &'a Arena>, + next: Option>, +} + +impl<'a, T> Iterator for NodeIter<'a, T> { + type Item = Idx; + + fn next(&mut self) -> Option> { + self.next.map(|next| { + self.next = self.nodes[next].next_sibling; + next + }) + } +} diff --git a/crates/project_model/Cargo.toml b/crates/project_model/Cargo.toml new file mode 100644 index 0000000000..386f72f419 --- /dev/null +++ b/crates/project_model/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "project_model" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +log = "0.4.8" +rustc-hash = "1.1.0" +cargo_metadata = "0.11.1" +serde = { version = "1.0.106", features = ["derive"] } +serde_json = "1.0.48" +anyhow = "1.0.26" + +arena = { path = "../arena" } +cfg = { path = "../cfg" } +base_db = { path = "../base_db" } +toolchain = { path = "../toolchain" } +proc_macro_api = { path = "../proc_macro_api" } +paths = { path = "../paths" } +stdx = { path = "../stdx" } diff --git a/crates/project_model/src/cargo_workspace.rs b/crates/project_model/src/cargo_workspace.rs new file mode 100644 index 0000000000..e5c2d2b256 --- /dev/null +++ b/crates/project_model/src/cargo_workspace.rs @@ -0,0 +1,362 @@ +//! FIXME: write short doc here + +use std::{ + ffi::OsStr, + ops, + path::{Path, PathBuf}, + process::Command, +}; + +use anyhow::{Context, Result}; +use arena::{Arena, Idx}; +use base_db::Edition; +use cargo_metadata::{BuildScript, CargoOpt, Message, MetadataCommand, PackageId}; +use paths::{AbsPath, AbsPathBuf}; +use rustc_hash::FxHashMap; + +use crate::cfg_flag::CfgFlag; + +/// `CargoWorkspace` represents the logical structure of, well, a Cargo +/// workspace. It pretty closely mirrors `cargo metadata` output. +/// +/// Note that internally, rust analyzer uses a different structure: +/// `CrateGraph`. `CrateGraph` is lower-level: it knows only about the crates, +/// while this knows about `Packages` & `Targets`: purely cargo-related +/// concepts. +/// +/// We use absolute paths here, `cargo metadata` guarantees to always produce +/// abs paths. +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct CargoWorkspace { + packages: Arena, + targets: Arena, + workspace_root: AbsPathBuf, +} + +impl ops::Index for CargoWorkspace { + type Output = PackageData; + fn index(&self, index: Package) -> &PackageData { + &self.packages[index] + } +} + +impl ops::Index for CargoWorkspace { + type Output = TargetData; + fn index(&self, index: Target) -> &TargetData { + &self.targets[index] + } +} + +#[derive(Default, Clone, Debug, PartialEq, Eq)] +pub struct CargoConfig { + /// Do not activate the `default` feature. + pub no_default_features: bool, + + /// Activate all available features + pub all_features: bool, + + /// List of features to activate. + /// This will be ignored if `cargo_all_features` is true. + pub features: Vec, + + /// Runs cargo check on launch to figure out the correct values of OUT_DIR + pub load_out_dirs_from_check: bool, + + /// rustc target + pub target: Option, +} + +pub type Package = Idx; + +pub type Target = Idx; + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct PackageData { + pub version: String, + pub name: String, + pub manifest: AbsPathBuf, + pub targets: Vec, + pub is_member: bool, + pub dependencies: Vec, + pub edition: Edition, + pub features: Vec, + pub cfgs: Vec, + pub out_dir: Option, + pub proc_macro_dylib_path: Option, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct PackageDependency { + pub pkg: Package, + pub name: String, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct TargetData { + pub package: Package, + pub name: String, + pub root: AbsPathBuf, + pub kind: TargetKind, + pub is_proc_macro: bool, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum TargetKind { + Bin, + /// Any kind of Cargo lib crate-type (dylib, rlib, proc-macro, ...). + Lib, + Example, + Test, + Bench, + Other, +} + +impl TargetKind { + fn new(kinds: &[String]) -> TargetKind { + for kind in kinds { + return match kind.as_str() { + "bin" => TargetKind::Bin, + "test" => TargetKind::Test, + "bench" => TargetKind::Bench, + "example" => TargetKind::Example, + "proc-macro" => TargetKind::Lib, + _ if kind.contains("lib") => TargetKind::Lib, + _ => continue, + }; + } + TargetKind::Other + } +} + +impl PackageData { + pub fn root(&self) -> &AbsPath { + self.manifest.parent().unwrap() + } +} + +impl CargoWorkspace { + pub fn from_cargo_metadata( + cargo_toml: &AbsPath, + cargo_features: &CargoConfig, + ) -> Result { + let mut meta = MetadataCommand::new(); + meta.cargo_path(toolchain::cargo()); + meta.manifest_path(cargo_toml.to_path_buf()); + if cargo_features.all_features { + meta.features(CargoOpt::AllFeatures); + } else { + if cargo_features.no_default_features { + // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` + // https://github.com/oli-obk/cargo_metadata/issues/79 + meta.features(CargoOpt::NoDefaultFeatures); + } + if !cargo_features.features.is_empty() { + meta.features(CargoOpt::SomeFeatures(cargo_features.features.clone())); + } + } + if let Some(parent) = cargo_toml.parent() { + meta.current_dir(parent.to_path_buf()); + } + if let Some(target) = cargo_features.target.as_ref() { + meta.other_options(vec![String::from("--filter-platform"), target.clone()]); + } + let mut meta = meta.exec().with_context(|| { + format!("Failed to run `cargo metadata --manifest-path {}`", cargo_toml.display()) + })?; + + let mut out_dir_by_id = FxHashMap::default(); + let mut cfgs = FxHashMap::default(); + let mut proc_macro_dylib_paths = FxHashMap::default(); + if cargo_features.load_out_dirs_from_check { + let resources = load_extern_resources(cargo_toml, cargo_features)?; + out_dir_by_id = resources.out_dirs; + cfgs = resources.cfgs; + proc_macro_dylib_paths = resources.proc_dylib_paths; + } + + let mut pkg_by_id = FxHashMap::default(); + let mut packages = Arena::default(); + let mut targets = Arena::default(); + + let ws_members = &meta.workspace_members; + + meta.packages.sort_by(|a, b| a.id.cmp(&b.id)); + for meta_pkg in meta.packages { + let cargo_metadata::Package { id, edition, name, manifest_path, version, .. } = + meta_pkg; + let is_member = ws_members.contains(&id); + let edition = edition + .parse::() + .with_context(|| format!("Failed to parse edition {}", edition))?; + let pkg = packages.alloc(PackageData { + name, + version: version.to_string(), + manifest: AbsPathBuf::assert(manifest_path), + targets: Vec::new(), + is_member, + edition, + dependencies: Vec::new(), + features: Vec::new(), + cfgs: cfgs.get(&id).cloned().unwrap_or_default(), + out_dir: out_dir_by_id.get(&id).cloned(), + proc_macro_dylib_path: proc_macro_dylib_paths.get(&id).cloned(), + }); + let pkg_data = &mut packages[pkg]; + pkg_by_id.insert(id, pkg); + for meta_tgt in meta_pkg.targets { + let is_proc_macro = meta_tgt.kind.as_slice() == ["proc-macro"]; + let tgt = targets.alloc(TargetData { + package: pkg, + name: meta_tgt.name, + root: AbsPathBuf::assert(meta_tgt.src_path.clone()), + kind: TargetKind::new(meta_tgt.kind.as_slice()), + is_proc_macro, + }); + pkg_data.targets.push(tgt); + } + } + let resolve = meta.resolve.expect("metadata executed with deps"); + for mut node in resolve.nodes { + let source = match pkg_by_id.get(&node.id) { + Some(&src) => src, + // FIXME: replace this and a similar branch below with `.unwrap`, once + // https://github.com/rust-lang/cargo/issues/7841 + // is fixed and hits stable (around 1.43-is probably?). + None => { + log::error!("Node id do not match in cargo metadata, ignoring {}", node.id); + continue; + } + }; + node.deps.sort_by(|a, b| a.pkg.cmp(&b.pkg)); + for dep_node in node.deps { + let pkg = match pkg_by_id.get(&dep_node.pkg) { + Some(&pkg) => pkg, + None => { + log::error!( + "Dep node id do not match in cargo metadata, ignoring {}", + dep_node.pkg + ); + continue; + } + }; + let dep = PackageDependency { name: dep_node.name, pkg }; + packages[source].dependencies.push(dep); + } + packages[source].features.extend(node.features); + } + + let workspace_root = AbsPathBuf::assert(meta.workspace_root); + Ok(CargoWorkspace { packages, targets, workspace_root: workspace_root }) + } + + pub fn packages<'a>(&'a self) -> impl Iterator + ExactSizeIterator + 'a { + self.packages.iter().map(|(id, _pkg)| id) + } + + pub fn target_by_root(&self, root: &AbsPath) -> Option { + self.packages() + .filter_map(|pkg| self[pkg].targets.iter().find(|&&it| &self[it].root == root)) + .next() + .copied() + } + + pub fn workspace_root(&self) -> &AbsPath { + &self.workspace_root + } + + pub fn package_flag(&self, package: &PackageData) -> String { + if self.is_unique(&*package.name) { + package.name.clone() + } else { + format!("{}:{}", package.name, package.version) + } + } + + fn is_unique(&self, name: &str) -> bool { + self.packages.iter().filter(|(_, v)| v.name == name).count() == 1 + } +} + +#[derive(Debug, Clone, Default)] +pub struct ExternResources { + out_dirs: FxHashMap, + proc_dylib_paths: FxHashMap, + cfgs: FxHashMap>, +} + +pub fn load_extern_resources( + cargo_toml: &Path, + cargo_features: &CargoConfig, +) -> Result { + let mut cmd = Command::new(toolchain::cargo()); + cmd.args(&["check", "--message-format=json", "--manifest-path"]).arg(cargo_toml); + if cargo_features.all_features { + cmd.arg("--all-features"); + } else { + if cargo_features.no_default_features { + // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` + // https://github.com/oli-obk/cargo_metadata/issues/79 + cmd.arg("--no-default-features"); + } + if !cargo_features.features.is_empty() { + cmd.arg("--features"); + cmd.arg(cargo_features.features.join(" ")); + } + } + + let output = cmd.output()?; + + let mut res = ExternResources::default(); + + for message in cargo_metadata::Message::parse_stream(output.stdout.as_slice()) { + if let Ok(message) = message { + match message { + Message::BuildScriptExecuted(BuildScript { package_id, out_dir, cfgs, .. }) => { + let cfgs = { + let mut acc = Vec::new(); + for cfg in cfgs { + match cfg.parse::() { + Ok(it) => acc.push(it), + Err(err) => { + anyhow::bail!("invalid cfg from cargo-metadata: {}", err) + } + }; + } + acc + }; + // cargo_metadata crate returns default (empty) path for + // older cargos, which is not absolute, so work around that. + if out_dir != PathBuf::default() { + let out_dir = AbsPathBuf::assert(out_dir); + res.out_dirs.insert(package_id.clone(), out_dir); + res.cfgs.insert(package_id, cfgs); + } + } + Message::CompilerArtifact(message) => { + if message.target.kind.contains(&"proc-macro".to_string()) { + let package_id = message.package_id; + // Skip rmeta file + if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name)) + { + let filename = AbsPathBuf::assert(filename.clone()); + res.proc_dylib_paths.insert(package_id, filename); + } + } + } + Message::CompilerMessage(_) => (), + Message::Unknown => (), + Message::BuildFinished(_) => {} + Message::TextLine(_) => {} + } + } + } + Ok(res) +} + +// FIXME: File a better way to know if it is a dylib +fn is_dylib(path: &Path) -> bool { + match path.extension().and_then(OsStr::to_str).map(|it| it.to_string().to_lowercase()) { + None => false, + Some(ext) => matches!(ext.as_str(), "dll" | "dylib" | "so"), + } +} diff --git a/crates/project_model/src/cfg_flag.rs b/crates/project_model/src/cfg_flag.rs new file mode 100644 index 0000000000..e92962cf67 --- /dev/null +++ b/crates/project_model/src/cfg_flag.rs @@ -0,0 +1,51 @@ +//! Parsing of CfgFlags as command line arguments, as in +//! +//! rustc main.rs --cfg foo --cfg 'feature="bar"' +use std::str::FromStr; + +use cfg::CfgOptions; +use stdx::split_once; + +#[derive(Clone, Eq, PartialEq, Debug)] +pub enum CfgFlag { + Atom(String), + KeyValue { key: String, value: String }, +} + +impl FromStr for CfgFlag { + type Err = String; + fn from_str(s: &str) -> Result { + let res = match split_once(s, '=') { + Some((key, value)) => { + if !(value.starts_with('"') && value.ends_with('"')) { + return Err(format!("Invalid cfg ({:?}), value should be in quotes", s)); + } + let key = key.to_string(); + let value = value[1..value.len() - 1].to_string(); + CfgFlag::KeyValue { key, value } + } + None => CfgFlag::Atom(s.into()), + }; + Ok(res) + } +} + +impl<'de> serde::Deserialize<'de> for CfgFlag { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom) + } +} + +impl Extend for CfgOptions { + fn extend>(&mut self, iter: T) { + for cfg_flag in iter { + match cfg_flag { + CfgFlag::Atom(it) => self.insert_atom(it.into()), + CfgFlag::KeyValue { key, value } => self.insert_key_value(key.into(), value.into()), + } + } + } +} diff --git a/crates/project_model/src/lib.rs b/crates/project_model/src/lib.rs new file mode 100644 index 0000000000..1f5a94d7f9 --- /dev/null +++ b/crates/project_model/src/lib.rs @@ -0,0 +1,544 @@ +//! FIXME: write short doc here + +mod cargo_workspace; +mod project_json; +mod sysroot; +mod cfg_flag; + +use std::{ + fs::{self, read_dir, ReadDir}, + io, + process::Command, +}; + +use anyhow::{bail, Context, Result}; +use base_db::{CrateGraph, CrateId, CrateName, Edition, Env, FileId}; +use cfg::CfgOptions; +use paths::{AbsPath, AbsPathBuf}; +use rustc_hash::{FxHashMap, FxHashSet}; + +use crate::cfg_flag::CfgFlag; + +pub use crate::{ + cargo_workspace::{CargoConfig, CargoWorkspace, Package, Target, TargetKind}, + project_json::{ProjectJson, ProjectJsonData}, + sysroot::Sysroot, +}; + +pub use proc_macro_api::ProcMacroClient; + +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum ProjectWorkspace { + /// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`. + Cargo { cargo: CargoWorkspace, sysroot: Sysroot }, + /// Project workspace was manually specified using a `rust-project.json` file. + Json { project: ProjectJson }, +} + +/// `PackageRoot` describes a package root folder. +/// Which may be an external dependency, or a member of +/// the current workspace. +#[derive(Debug, Clone, Eq, PartialEq, Hash)] +pub struct PackageRoot { + /// Is a member of the current workspace + pub is_member: bool, + pub include: Vec, + pub exclude: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub enum ProjectManifest { + ProjectJson(AbsPathBuf), + CargoToml(AbsPathBuf), +} + +impl ProjectManifest { + pub fn from_manifest_file(path: AbsPathBuf) -> Result { + if path.ends_with("rust-project.json") { + return Ok(ProjectManifest::ProjectJson(path)); + } + if path.ends_with("Cargo.toml") { + return Ok(ProjectManifest::CargoToml(path)); + } + bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display()) + } + + pub fn discover_single(path: &AbsPath) -> Result { + let mut candidates = ProjectManifest::discover(path)?; + let res = match candidates.pop() { + None => bail!("no projects"), + Some(it) => it, + }; + + if !candidates.is_empty() { + bail!("more than one project") + } + Ok(res) + } + + pub fn discover(path: &AbsPath) -> io::Result> { + if let Some(project_json) = find_in_parent_dirs(path, "rust-project.json") { + return Ok(vec![ProjectManifest::ProjectJson(project_json)]); + } + return find_cargo_toml(path) + .map(|paths| paths.into_iter().map(ProjectManifest::CargoToml).collect()); + + fn find_cargo_toml(path: &AbsPath) -> io::Result> { + match find_in_parent_dirs(path, "Cargo.toml") { + Some(it) => Ok(vec![it]), + None => Ok(find_cargo_toml_in_child_dir(read_dir(path)?)), + } + } + + fn find_in_parent_dirs(path: &AbsPath, target_file_name: &str) -> Option { + if path.ends_with(target_file_name) { + return Some(path.to_path_buf()); + } + + let mut curr = Some(path); + + while let Some(path) = curr { + let candidate = path.join(target_file_name); + if candidate.exists() { + return Some(candidate); + } + curr = path.parent(); + } + + None + } + + fn find_cargo_toml_in_child_dir(entities: ReadDir) -> Vec { + // Only one level down to avoid cycles the easy way and stop a runaway scan with large projects + entities + .filter_map(Result::ok) + .map(|it| it.path().join("Cargo.toml")) + .filter(|it| it.exists()) + .map(AbsPathBuf::assert) + .collect() + } + } + + pub fn discover_all(paths: &[impl AsRef]) -> Vec { + let mut res = paths + .iter() + .filter_map(|it| ProjectManifest::discover(it.as_ref()).ok()) + .flatten() + .collect::>() + .into_iter() + .collect::>(); + res.sort(); + res + } +} + +impl ProjectWorkspace { + pub fn load( + manifest: ProjectManifest, + cargo_config: &CargoConfig, + with_sysroot: bool, + ) -> Result { + let res = match manifest { + ProjectManifest::ProjectJson(project_json) => { + let file = fs::read_to_string(&project_json).with_context(|| { + format!("Failed to read json file {}", project_json.display()) + })?; + let data = serde_json::from_str(&file).with_context(|| { + format!("Failed to deserialize json file {}", project_json.display()) + })?; + let project_location = project_json.parent().unwrap().to_path_buf(); + let project = ProjectJson::new(&project_location, data); + ProjectWorkspace::Json { project } + } + ProjectManifest::CargoToml(cargo_toml) => { + let cargo = CargoWorkspace::from_cargo_metadata(&cargo_toml, cargo_config) + .with_context(|| { + format!( + "Failed to read Cargo metadata from Cargo.toml file {}", + cargo_toml.display() + ) + })?; + let sysroot = if with_sysroot { + Sysroot::discover(&cargo_toml).with_context(|| { + format!( + "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?", + cargo_toml.display() + ) + })? + } else { + Sysroot::default() + }; + ProjectWorkspace::Cargo { cargo, sysroot } + } + }; + + Ok(res) + } + + /// Returns the roots for the current `ProjectWorkspace` + /// The return type contains the path and whether or not + /// the root is a member of the current workspace + pub fn to_roots(&self) -> Vec { + match self { + ProjectWorkspace::Json { project } => project + .crates + .iter() + .map(|krate| PackageRoot { + is_member: krate.is_workspace_member, + include: krate.include.clone(), + exclude: krate.exclude.clone(), + }) + .collect::>() + .into_iter() + .collect::>(), + ProjectWorkspace::Cargo { cargo, sysroot } => cargo + .packages() + .map(|pkg| { + let is_member = cargo[pkg].is_member; + let pkg_root = cargo[pkg].root().to_path_buf(); + + let mut include = vec![pkg_root.clone()]; + include.extend(cargo[pkg].out_dir.clone()); + + let mut exclude = vec![pkg_root.join(".git")]; + if is_member { + exclude.push(pkg_root.join("target")); + } else { + exclude.push(pkg_root.join("tests")); + exclude.push(pkg_root.join("examples")); + exclude.push(pkg_root.join("benches")); + } + PackageRoot { is_member, include, exclude } + }) + .chain(sysroot.crates().map(|krate| PackageRoot { + is_member: false, + include: vec![sysroot[krate].root_dir().to_path_buf()], + exclude: Vec::new(), + })) + .collect(), + } + } + + pub fn proc_macro_dylib_paths(&self) -> Vec { + match self { + ProjectWorkspace::Json { project } => project + .crates + .iter() + .filter_map(|krate| krate.proc_macro_dylib_path.as_ref()) + .cloned() + .collect(), + ProjectWorkspace::Cargo { cargo, sysroot: _sysroot } => cargo + .packages() + .filter_map(|pkg| cargo[pkg].proc_macro_dylib_path.as_ref()) + .cloned() + .collect(), + } + } + + pub fn n_packages(&self) -> usize { + match self { + ProjectWorkspace::Json { project, .. } => project.crates.len(), + ProjectWorkspace::Cargo { cargo, sysroot } => { + cargo.packages().len() + sysroot.crates().len() + } + } + } + + pub fn to_crate_graph( + &self, + target: Option<&str>, + proc_macro_client: &ProcMacroClient, + load: &mut dyn FnMut(&AbsPath) -> Option, + ) -> CrateGraph { + let mut crate_graph = CrateGraph::default(); + match self { + ProjectWorkspace::Json { project } => { + let mut cfg_cache: FxHashMap, Vec> = FxHashMap::default(); + let crates: FxHashMap<_, _> = project + .crates + .iter() + .enumerate() + .filter_map(|(seq_index, krate)| { + let file_path = &krate.root_module; + let file_id = load(&file_path)?; + + let env = krate.env.clone().into_iter().collect(); + let proc_macro = krate + .proc_macro_dylib_path + .clone() + .map(|it| proc_macro_client.by_dylib_path(&it)); + + let target = krate.target.as_deref().or(target); + let target_cfgs = cfg_cache + .entry(target) + .or_insert_with(|| get_rustc_cfg_options(target)); + + let mut cfg_options = CfgOptions::default(); + cfg_options.extend(target_cfgs.iter().chain(krate.cfg.iter()).cloned()); + + // FIXME: No crate name in json definition such that we cannot add OUT_DIR to env + Some(( + CrateId(seq_index as u32), + crate_graph.add_crate_root( + file_id, + krate.edition, + // FIXME json definitions can store the crate name + None, + cfg_options, + env, + proc_macro.unwrap_or_default(), + ), + )) + }) + .collect(); + + for (id, krate) in project.crates.iter().enumerate() { + for dep in &krate.deps { + let from_crate_id = CrateId(id as u32); + let to_crate_id = dep.crate_id; + if let (Some(&from), Some(&to)) = + (crates.get(&from_crate_id), crates.get(&to_crate_id)) + { + if crate_graph.add_dep(from, dep.name.clone(), to).is_err() { + log::error!( + "cyclic dependency {:?} -> {:?}", + from_crate_id, + to_crate_id + ); + } + } + } + } + } + ProjectWorkspace::Cargo { cargo, sysroot } => { + let mut cfg_options = CfgOptions::default(); + cfg_options.extend(get_rustc_cfg_options(target)); + + let sysroot_crates: FxHashMap<_, _> = sysroot + .crates() + .filter_map(|krate| { + let file_id = load(&sysroot[krate].root)?; + + let env = Env::default(); + let proc_macro = vec![]; + let name = sysroot[krate].name.clone(); + let crate_id = crate_graph.add_crate_root( + file_id, + Edition::Edition2018, + Some(name), + cfg_options.clone(), + env, + proc_macro, + ); + Some((krate, crate_id)) + }) + .collect(); + + for from in sysroot.crates() { + for &to in sysroot[from].deps.iter() { + let name = &sysroot[to].name; + if let (Some(&from), Some(&to)) = + (sysroot_crates.get(&from), sysroot_crates.get(&to)) + { + if crate_graph.add_dep(from, CrateName::new(name).unwrap(), to).is_err() + { + log::error!("cyclic dependency between sysroot crates") + } + } + } + } + + let libcore = sysroot.core().and_then(|it| sysroot_crates.get(&it).copied()); + let liballoc = sysroot.alloc().and_then(|it| sysroot_crates.get(&it).copied()); + let libstd = sysroot.std().and_then(|it| sysroot_crates.get(&it).copied()); + let libproc_macro = + sysroot.proc_macro().and_then(|it| sysroot_crates.get(&it).copied()); + + let mut pkg_to_lib_crate = FxHashMap::default(); + let mut pkg_crates = FxHashMap::default(); + + // Add test cfg for non-sysroot crates + cfg_options.insert_atom("test".into()); + cfg_options.insert_atom("debug_assertions".into()); + + // Next, create crates for each package, target pair + for pkg in cargo.packages() { + let mut lib_tgt = None; + for &tgt in cargo[pkg].targets.iter() { + let root = cargo[tgt].root.as_path(); + if let Some(file_id) = load(root) { + let edition = cargo[pkg].edition; + let cfg_options = { + let mut opts = cfg_options.clone(); + for feature in cargo[pkg].features.iter() { + opts.insert_key_value("feature".into(), feature.into()); + } + opts.extend(cargo[pkg].cfgs.iter().cloned()); + opts + }; + let mut env = Env::default(); + if let Some(out_dir) = &cargo[pkg].out_dir { + // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!() + if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) { + env.set("OUT_DIR", out_dir); + } + } + let proc_macro = cargo[pkg] + .proc_macro_dylib_path + .as_ref() + .map(|it| proc_macro_client.by_dylib_path(&it)) + .unwrap_or_default(); + + let crate_id = crate_graph.add_crate_root( + file_id, + edition, + Some(cargo[pkg].name.clone()), + cfg_options, + env, + proc_macro.clone(), + ); + if cargo[tgt].kind == TargetKind::Lib { + lib_tgt = Some((crate_id, cargo[tgt].name.clone())); + pkg_to_lib_crate.insert(pkg, crate_id); + } + if cargo[tgt].is_proc_macro { + if let Some(proc_macro) = libproc_macro { + if crate_graph + .add_dep( + crate_id, + CrateName::new("proc_macro").unwrap(), + proc_macro, + ) + .is_err() + { + log::error!( + "cyclic dependency on proc_macro for {}", + &cargo[pkg].name + ) + } + } + } + + pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id); + } + } + + // Set deps to the core, std and to the lib target of the current package + for &from in pkg_crates.get(&pkg).into_iter().flatten() { + if let Some((to, name)) = lib_tgt.clone() { + if to != from + && crate_graph + .add_dep( + from, + // For root projects with dashes in their name, + // cargo metadata does not do any normalization, + // so we do it ourselves currently + CrateName::normalize_dashes(&name), + to, + ) + .is_err() + { + { + log::error!( + "cyclic dependency between targets of {}", + &cargo[pkg].name + ) + } + } + } + // core is added as a dependency before std in order to + // mimic rustcs dependency order + if let Some(core) = libcore { + if crate_graph + .add_dep(from, CrateName::new("core").unwrap(), core) + .is_err() + { + log::error!("cyclic dependency on core for {}", &cargo[pkg].name) + } + } + if let Some(alloc) = liballoc { + if crate_graph + .add_dep(from, CrateName::new("alloc").unwrap(), alloc) + .is_err() + { + log::error!("cyclic dependency on alloc for {}", &cargo[pkg].name) + } + } + if let Some(std) = libstd { + if crate_graph + .add_dep(from, CrateName::new("std").unwrap(), std) + .is_err() + { + log::error!("cyclic dependency on std for {}", &cargo[pkg].name) + } + } + } + } + + // Now add a dep edge from all targets of upstream to the lib + // target of downstream. + for pkg in cargo.packages() { + for dep in cargo[pkg].dependencies.iter() { + if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) { + for &from in pkg_crates.get(&pkg).into_iter().flatten() { + if crate_graph + .add_dep(from, CrateName::new(&dep.name).unwrap(), to) + .is_err() + { + log::error!( + "cyclic dependency {} -> {}", + &cargo[pkg].name, + &cargo[dep.pkg].name + ) + } + } + } + } + } + } + } + crate_graph + } +} + +fn get_rustc_cfg_options(target: Option<&str>) -> Vec { + let mut res = Vec::new(); + + // Some nightly-only cfgs, which are required for stdlib + res.push(CfgFlag::Atom("target_thread_local".into())); + for &ty in ["8", "16", "32", "64", "cas", "ptr"].iter() { + for &key in ["target_has_atomic", "target_has_atomic_load_store"].iter() { + res.push(CfgFlag::KeyValue { key: key.to_string(), value: ty.into() }); + } + } + + let rustc_cfgs = { + let mut cmd = Command::new(toolchain::rustc()); + cmd.args(&["--print", "cfg", "-O"]); + if let Some(target) = target { + cmd.args(&["--target", target]); + } + utf8_stdout(cmd) + }; + + match rustc_cfgs { + Ok(rustc_cfgs) => res.extend(rustc_cfgs.lines().map(|it| it.parse().unwrap())), + Err(e) => log::error!("failed to get rustc cfgs: {:#}", e), + } + + res +} + +fn utf8_stdout(mut cmd: Command) -> Result { + let output = cmd.output().with_context(|| format!("{:?} failed", cmd))?; + if !output.status.success() { + match String::from_utf8(output.stderr) { + Ok(stderr) if !stderr.is_empty() => { + bail!("{:?} failed, {}\nstderr:\n{}", cmd, output.status, stderr) + } + _ => bail!("{:?} failed, {}", cmd, output.status), + } + } + let stdout = String::from_utf8(output.stdout)?; + Ok(stdout) +} diff --git a/crates/project_model/src/project_json.rs b/crates/project_model/src/project_json.rs new file mode 100644 index 0000000000..060ea5b7dc --- /dev/null +++ b/crates/project_model/src/project_json.rs @@ -0,0 +1,143 @@ +//! FIXME: write short doc here + +use std::path::PathBuf; + +use base_db::{CrateId, CrateName, Dependency, Edition}; +use paths::{AbsPath, AbsPathBuf}; +use rustc_hash::FxHashMap; +use serde::{de, Deserialize}; + +use crate::cfg_flag::CfgFlag; + +/// Roots and crates that compose this Rust project. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct ProjectJson { + pub(crate) crates: Vec, +} + +/// A crate points to the root module of a crate and lists the dependencies of the crate. This is +/// useful in creating the crate graph. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct Crate { + pub(crate) root_module: AbsPathBuf, + pub(crate) edition: Edition, + pub(crate) deps: Vec, + pub(crate) cfg: Vec, + pub(crate) target: Option, + pub(crate) env: FxHashMap, + pub(crate) proc_macro_dylib_path: Option, + pub(crate) is_workspace_member: bool, + pub(crate) include: Vec, + pub(crate) exclude: Vec, +} + +impl ProjectJson { + pub fn new(base: &AbsPath, data: ProjectJsonData) -> ProjectJson { + ProjectJson { + crates: data + .crates + .into_iter() + .map(|crate_data| { + let is_workspace_member = crate_data.is_workspace_member.unwrap_or_else(|| { + crate_data.root_module.is_relative() + && !crate_data.root_module.starts_with("..") + || crate_data.root_module.starts_with(base) + }); + let root_module = base.join(crate_data.root_module); + let (include, exclude) = match crate_data.source { + Some(src) => { + let absolutize = |dirs: Vec| { + dirs.into_iter().map(|it| base.join(it)).collect::>() + }; + (absolutize(src.include_dirs), absolutize(src.exclude_dirs)) + } + None => (vec![root_module.parent().unwrap().to_path_buf()], Vec::new()), + }; + + Crate { + root_module, + edition: crate_data.edition.into(), + deps: crate_data + .deps + .into_iter() + .map(|dep_data| Dependency { + crate_id: CrateId(dep_data.krate as u32), + name: dep_data.name, + }) + .collect::>(), + cfg: crate_data.cfg, + target: crate_data.target, + env: crate_data.env, + proc_macro_dylib_path: crate_data + .proc_macro_dylib_path + .map(|it| base.join(it)), + is_workspace_member, + include, + exclude, + } + }) + .collect::>(), + } + } +} + +#[derive(Deserialize)] +pub struct ProjectJsonData { + crates: Vec, +} + +#[derive(Deserialize)] +struct CrateData { + root_module: PathBuf, + edition: EditionData, + deps: Vec, + #[serde(default)] + cfg: Vec, + target: Option, + #[serde(default)] + env: FxHashMap, + proc_macro_dylib_path: Option, + is_workspace_member: Option, + source: Option, +} + +#[derive(Deserialize)] +#[serde(rename = "edition")] +enum EditionData { + #[serde(rename = "2015")] + Edition2015, + #[serde(rename = "2018")] + Edition2018, +} + +impl From for Edition { + fn from(data: EditionData) -> Self { + match data { + EditionData::Edition2015 => Edition::Edition2015, + EditionData::Edition2018 => Edition::Edition2018, + } + } +} + +#[derive(Deserialize)] +struct DepData { + /// Identifies a crate by position in the crates array. + #[serde(rename = "crate")] + krate: usize, + #[serde(deserialize_with = "deserialize_crate_name")] + name: CrateName, +} + +#[derive(Deserialize)] +struct CrateSource { + include_dirs: Vec, + exclude_dirs: Vec, +} + +fn deserialize_crate_name<'de, D>(de: D) -> Result +where + D: de::Deserializer<'de>, +{ + let name = String::deserialize(de)?; + CrateName::new(&name).map_err(|err| de::Error::custom(format!("invalid crate name: {:?}", err))) +} diff --git a/crates/project_model/src/sysroot.rs b/crates/project_model/src/sysroot.rs new file mode 100644 index 0000000000..8239797b6b --- /dev/null +++ b/crates/project_model/src/sysroot.rs @@ -0,0 +1,173 @@ +//! FIXME: write short doc here + +use std::{convert::TryFrom, env, ops, path::Path, process::Command}; + +use anyhow::{bail, format_err, Result}; +use arena::{Arena, Idx}; +use paths::{AbsPath, AbsPathBuf}; + +use crate::utf8_stdout; + +#[derive(Default, Debug, Clone, Eq, PartialEq)] +pub struct Sysroot { + crates: Arena, +} + +pub type SysrootCrate = Idx; + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct SysrootCrateData { + pub name: String, + pub root: AbsPathBuf, + pub deps: Vec, +} + +impl ops::Index for Sysroot { + type Output = SysrootCrateData; + fn index(&self, index: SysrootCrate) -> &SysrootCrateData { + &self.crates[index] + } +} + +impl Sysroot { + pub fn core(&self) -> Option { + self.by_name("core") + } + + pub fn alloc(&self) -> Option { + self.by_name("alloc") + } + + pub fn std(&self) -> Option { + self.by_name("std") + } + + pub fn proc_macro(&self) -> Option { + self.by_name("proc_macro") + } + + pub fn crates<'a>(&'a self) -> impl Iterator + ExactSizeIterator + 'a { + self.crates.iter().map(|(id, _data)| id) + } + + pub fn discover(cargo_toml: &AbsPath) -> Result { + let src = get_or_install_rust_src(cargo_toml)?; + let mut sysroot = Sysroot { crates: Arena::default() }; + for name in SYSROOT_CRATES.trim().lines() { + // FIXME: remove this path when 1.47 comes out + // https://github.com/rust-lang/rust/pull/73265 + let root = src.join(format!("lib{}", name)).join("lib.rs"); + if root.exists() { + sysroot.crates.alloc(SysrootCrateData { + name: name.into(), + root, + deps: Vec::new(), + }); + } else { + let root = src.join(name).join("src/lib.rs"); + if root.exists() { + sysroot.crates.alloc(SysrootCrateData { + name: name.into(), + root, + deps: Vec::new(), + }); + } + } + } + if let Some(std) = sysroot.std() { + for dep in STD_DEPS.trim().lines() { + if let Some(dep) = sysroot.by_name(dep) { + sysroot.crates[std].deps.push(dep) + } + } + } + if let Some(alloc) = sysroot.alloc() { + if let Some(core) = sysroot.core() { + sysroot.crates[alloc].deps.push(core); + } + } + Ok(sysroot) + } + + fn by_name(&self, name: &str) -> Option { + self.crates.iter().find(|(_id, data)| data.name == name).map(|(id, _data)| id) + } +} + +fn get_or_install_rust_src(cargo_toml: &AbsPath) -> Result { + if let Ok(path) = env::var("RUST_SRC_PATH") { + let path = AbsPathBuf::try_from(path.as_str()) + .map_err(|path| format_err!("RUST_SRC_PATH must be absolute: {}", path.display()))?; + return Ok(path); + } + let current_dir = cargo_toml.parent().unwrap(); + let mut rustc = Command::new(toolchain::rustc()); + rustc.current_dir(current_dir).args(&["--print", "sysroot"]); + let stdout = utf8_stdout(rustc)?; + let sysroot_path = AbsPath::assert(Path::new(stdout.trim())); + let mut src = get_rust_src(sysroot_path); + if src.is_none() { + let mut rustup = Command::new(toolchain::rustup()); + rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]); + utf8_stdout(rustup)?; + src = get_rust_src(sysroot_path); + } + match src { + Some(r) => Ok(r), + None => bail!( + "can't load standard library from sysroot\n\ + {}\n\ + (discovered via `rustc --print sysroot`)\n\ + try running `rustup component add rust-src` or set `RUST_SRC_PATH`", + sysroot_path.display(), + ), + } +} + +fn get_rust_src(sysroot_path: &AbsPath) -> Option { + // try the new path first since the old one still exists + let mut src_path = sysroot_path.join("lib/rustlib/src/rust/library"); + if !src_path.exists() { + // FIXME: remove this path when 1.47 comes out + // https://github.com/rust-lang/rust/pull/73265 + src_path = sysroot_path.join("lib/rustlib/src/rust/src"); + } + if src_path.exists() { + Some(src_path) + } else { + None + } +} + +impl SysrootCrateData { + pub fn root_dir(&self) -> &AbsPath { + self.root.parent().unwrap() + } +} + +const SYSROOT_CRATES: &str = " +alloc +core +panic_abort +panic_unwind +proc_macro +profiler_builtins +rtstartup +std +stdarch +term +test +unwind"; + +const STD_DEPS: &str = " +alloc +core +panic_abort +panic_unwind +profiler_builtins +rtstartup +proc_macro +stdarch +term +test +unwind"; diff --git a/crates/ra_arena/Cargo.toml b/crates/ra_arena/Cargo.toml deleted file mode 100644 index 66c3738f4d..0000000000 --- a/crates/ra_arena/Cargo.toml +++ /dev/null @@ -1,9 +0,0 @@ -[package] -edition = "2018" -name = "ra_arena" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" - -[lib] -doctest = false diff --git a/crates/ra_assists/Cargo.toml b/crates/ra_assists/Cargo.toml deleted file mode 100644 index bd2905f080..0000000000 --- a/crates/ra_assists/Cargo.toml +++ /dev/null @@ -1,25 +0,0 @@ -[package] -edition = "2018" -name = "ra_assists" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -rustc-hash = "1.1.0" -itertools = "0.9.0" -either = "1.5.3" - -stdx = { path = "../stdx" } - -ra_syntax = { path = "../ra_syntax" } -ra_text_edit = { path = "../ra_text_edit" } -ra_fmt = { path = "../ra_fmt" } -ra_prof = { path = "../ra_prof" } -ra_db = { path = "../ra_db" } -ra_ide_db = { path = "../ra_ide_db" } -hir = { path = "../ra_hir", package = "ra_hir" } -test_utils = { path = "../test_utils" } diff --git a/crates/ra_assists/src/assist_context.rs b/crates/ra_assists/src/assist_context.rs deleted file mode 100644 index afd3fd4b9e..0000000000 --- a/crates/ra_assists/src/assist_context.rs +++ /dev/null @@ -1,306 +0,0 @@ -//! See `AssistContext` - -use std::mem; - -use algo::find_covering_element; -use hir::Semantics; -use ra_db::{FileId, FileRange}; -use ra_fmt::{leading_indent, reindent}; -use ra_ide_db::{ - source_change::{SourceChange, SourceFileEdit}, - RootDatabase, -}; -use ra_syntax::{ - algo::{self, find_node_at_offset, SyntaxRewriter}, - AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize, - TokenAtOffset, -}; -use ra_text_edit::TextEditBuilder; - -use crate::{ - assist_config::{AssistConfig, SnippetCap}, - Assist, AssistId, AssistKind, GroupLabel, ResolvedAssist, -}; - -/// `AssistContext` allows to apply an assist or check if it could be applied. -/// -/// Assists use a somewhat over-engineered approach, given the current needs. -/// The assists workflow consists of two phases. In the first phase, a user asks -/// for the list of available assists. In the second phase, the user picks a -/// particular assist and it gets applied. -/// -/// There are two peculiarities here: -/// -/// * first, we ideally avoid computing more things then necessary to answer "is -/// assist applicable" in the first phase. -/// * second, when we are applying assist, we don't have a guarantee that there -/// weren't any changes between the point when user asked for assists and when -/// they applied a particular assist. So, when applying assist, we need to do -/// all the checks from scratch. -/// -/// To avoid repeating the same code twice for both "check" and "apply" -/// functions, we use an approach reminiscent of that of Django's function based -/// views dealing with forms. Each assist receives a runtime parameter, -/// `resolve`. It first check if an edit is applicable (potentially computing -/// info required to compute the actual edit). If it is applicable, and -/// `resolve` is `true`, it then computes the actual edit. -/// -/// So, to implement the original assists workflow, we can first apply each edit -/// with `resolve = false`, and then applying the selected edit again, with -/// `resolve = true` this time. -/// -/// Note, however, that we don't actually use such two-phase logic at the -/// moment, because the LSP API is pretty awkward in this place, and it's much -/// easier to just compute the edit eagerly :-) -pub(crate) struct AssistContext<'a> { - pub(crate) config: &'a AssistConfig, - pub(crate) sema: Semantics<'a, RootDatabase>, - pub(crate) frange: FileRange, - source_file: SourceFile, -} - -impl<'a> AssistContext<'a> { - pub(crate) fn new( - sema: Semantics<'a, RootDatabase>, - config: &'a AssistConfig, - frange: FileRange, - ) -> AssistContext<'a> { - let source_file = sema.parse(frange.file_id); - AssistContext { config, sema, frange, source_file } - } - - pub(crate) fn db(&self) -> &RootDatabase { - self.sema.db - } - - pub(crate) fn source_file(&self) -> &SourceFile { - &self.source_file - } - - // NB, this ignores active selection. - pub(crate) fn offset(&self) -> TextSize { - self.frange.range.start() - } - - pub(crate) fn token_at_offset(&self) -> TokenAtOffset { - self.source_file.syntax().token_at_offset(self.offset()) - } - pub(crate) fn find_token_at_offset(&self, kind: SyntaxKind) -> Option { - self.token_at_offset().find(|it| it.kind() == kind) - } - pub(crate) fn find_node_at_offset(&self) -> Option { - find_node_at_offset(self.source_file.syntax(), self.offset()) - } - pub(crate) fn find_node_at_offset_with_descend(&self) -> Option { - self.sema.find_node_at_offset_with_descend(self.source_file.syntax(), self.offset()) - } - pub(crate) fn covering_element(&self) -> SyntaxElement { - find_covering_element(self.source_file.syntax(), self.frange.range) - } - // FIXME: remove - pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement { - find_covering_element(self.source_file.syntax(), range) - } -} - -pub(crate) struct Assists { - resolve: bool, - file: FileId, - buf: Vec<(Assist, Option)>, - allowed: Option>, -} - -impl Assists { - pub(crate) fn new_resolved(ctx: &AssistContext) -> Assists { - Assists { - resolve: true, - file: ctx.frange.file_id, - buf: Vec::new(), - allowed: ctx.config.allowed.clone(), - } - } - - pub(crate) fn new_unresolved(ctx: &AssistContext) -> Assists { - Assists { - resolve: false, - file: ctx.frange.file_id, - buf: Vec::new(), - allowed: ctx.config.allowed.clone(), - } - } - - pub(crate) fn finish_unresolved(self) -> Vec { - assert!(!self.resolve); - self.finish() - .into_iter() - .map(|(label, edit)| { - assert!(edit.is_none()); - label - }) - .collect() - } - - pub(crate) fn finish_resolved(self) -> Vec { - assert!(self.resolve); - self.finish() - .into_iter() - .map(|(label, edit)| ResolvedAssist { assist: label, source_change: edit.unwrap() }) - .collect() - } - - pub(crate) fn add( - &mut self, - id: AssistId, - label: impl Into, - target: TextRange, - f: impl FnOnce(&mut AssistBuilder), - ) -> Option<()> { - if !self.is_allowed(&id) { - return None; - } - let label = Assist::new(id, label.into(), None, target); - self.add_impl(label, f) - } - - pub(crate) fn add_group( - &mut self, - group: &GroupLabel, - id: AssistId, - label: impl Into, - target: TextRange, - f: impl FnOnce(&mut AssistBuilder), - ) -> Option<()> { - if !self.is_allowed(&id) { - return None; - } - - let label = Assist::new(id, label.into(), Some(group.clone()), target); - self.add_impl(label, f) - } - - fn add_impl(&mut self, label: Assist, f: impl FnOnce(&mut AssistBuilder)) -> Option<()> { - let source_change = if self.resolve { - let mut builder = AssistBuilder::new(self.file); - f(&mut builder); - Some(builder.finish()) - } else { - None - }; - - self.buf.push((label, source_change)); - Some(()) - } - - fn finish(mut self) -> Vec<(Assist, Option)> { - self.buf.sort_by_key(|(label, _edit)| label.target.len()); - self.buf - } - - fn is_allowed(&self, id: &AssistId) -> bool { - match &self.allowed { - Some(allowed) => allowed.iter().any(|kind| kind.contains(id.1)), - None => true, - } - } -} - -pub(crate) struct AssistBuilder { - edit: TextEditBuilder, - file_id: FileId, - is_snippet: bool, - change: SourceChange, -} - -impl AssistBuilder { - pub(crate) fn new(file_id: FileId) -> AssistBuilder { - AssistBuilder { - edit: TextEditBuilder::default(), - file_id, - is_snippet: false, - change: SourceChange::default(), - } - } - - pub(crate) fn edit_file(&mut self, file_id: FileId) { - self.file_id = file_id; - } - - fn commit(&mut self) { - let edit = mem::take(&mut self.edit).finish(); - if !edit.is_empty() { - let new_edit = SourceFileEdit { file_id: self.file_id, edit }; - assert!(!self.change.source_file_edits.iter().any(|it| it.file_id == new_edit.file_id)); - self.change.source_file_edits.push(new_edit); - } - } - - /// Remove specified `range` of text. - pub(crate) fn delete(&mut self, range: TextRange) { - self.edit.delete(range) - } - /// Append specified `text` at the given `offset` - pub(crate) fn insert(&mut self, offset: TextSize, text: impl Into) { - self.edit.insert(offset, text.into()) - } - /// Append specified `snippet` at the given `offset` - pub(crate) fn insert_snippet( - &mut self, - _cap: SnippetCap, - offset: TextSize, - snippet: impl Into, - ) { - self.is_snippet = true; - self.insert(offset, snippet); - } - /// Replaces specified `range` of text with a given string. - pub(crate) fn replace(&mut self, range: TextRange, replace_with: impl Into) { - self.edit.replace(range, replace_with.into()) - } - /// Replaces specified `range` of text with a given `snippet`. - pub(crate) fn replace_snippet( - &mut self, - _cap: SnippetCap, - range: TextRange, - snippet: impl Into, - ) { - self.is_snippet = true; - self.replace(range, snippet); - } - pub(crate) fn replace_ast(&mut self, old: N, new: N) { - algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit) - } - /// Replaces specified `node` of text with a given string, reindenting the - /// string to maintain `node`'s existing indent. - // FIXME: remove in favor of ra_syntax::edit::IndentLevel::increase_indent - pub(crate) fn replace_node_and_indent( - &mut self, - node: &SyntaxNode, - replace_with: impl Into, - ) { - let mut replace_with = replace_with.into(); - if let Some(indent) = leading_indent(node) { - replace_with = reindent(&replace_with, &indent) - } - self.replace(node.text_range(), replace_with) - } - pub(crate) fn rewrite(&mut self, rewriter: SyntaxRewriter) { - let node = rewriter.rewrite_root().unwrap(); - let new = rewriter.rewrite(&node); - algo::diff(&node, &new).into_text_edit(&mut self.edit); - } - - // FIXME: kill this API - /// Get access to the raw `TextEditBuilder`. - pub(crate) fn text_edit_builder(&mut self) -> &mut TextEditBuilder { - &mut self.edit - } - - fn finish(mut self) -> SourceChange { - self.commit(); - let mut change = mem::take(&mut self.change); - if self.is_snippet { - change.is_snippet = true; - } - change - } -} diff --git a/crates/ra_assists/src/ast_transform.rs b/crates/ra_assists/src/ast_transform.rs deleted file mode 100644 index 15ec75c956..0000000000 --- a/crates/ra_assists/src/ast_transform.rs +++ /dev/null @@ -1,212 +0,0 @@ -//! `AstTransformer`s are functions that replace nodes in an AST and can be easily combined. -use rustc_hash::FxHashMap; - -use hir::{HirDisplay, PathResolution, SemanticsScope}; -use ra_syntax::{ - algo::SyntaxRewriter, - ast::{self, AstNode}, -}; - -pub trait AstTransform<'a> { - fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option; - - fn chain_before(self, other: Box + 'a>) -> Box + 'a>; - fn or + 'a>(self, other: T) -> Box + 'a> - where - Self: Sized + 'a, - { - self.chain_before(Box::new(other)) - } -} - -struct NullTransformer; - -impl<'a> AstTransform<'a> for NullTransformer { - fn get_substitution(&self, _node: &ra_syntax::SyntaxNode) -> Option { - None - } - fn chain_before(self, other: Box + 'a>) -> Box + 'a> { - other - } -} - -pub struct SubstituteTypeParams<'a> { - source_scope: &'a SemanticsScope<'a>, - substs: FxHashMap, - previous: Box + 'a>, -} - -impl<'a> SubstituteTypeParams<'a> { - pub fn for_trait_impl( - source_scope: &'a SemanticsScope<'a>, - // FIXME: there's implicit invariant that `trait_` and `source_scope` match... - trait_: hir::Trait, - impl_def: ast::Impl, - ) -> SubstituteTypeParams<'a> { - let substs = get_syntactic_substs(impl_def).unwrap_or_default(); - let generic_def: hir::GenericDef = trait_.into(); - let substs_by_param: FxHashMap<_, _> = generic_def - .params(source_scope.db) - .into_iter() - // this is a trait impl, so we need to skip the first type parameter -- this is a bit hacky - .skip(1) - // The actual list of trait type parameters may be longer than the one - // used in the `impl` block due to trailing default type parametrs. - // For that case we extend the `substs` with an empty iterator so we - // can still hit those trailing values and check if they actually have - // a default type. If they do, go for that type from `hir` to `ast` so - // the resulting change can be applied correctly. - .zip(substs.into_iter().map(Some).chain(std::iter::repeat(None))) - .filter_map(|(k, v)| match v { - Some(v) => Some((k, v)), - None => { - let default = k.default(source_scope.db)?; - Some(( - k, - ast::make::ty( - &default - .display_source_code(source_scope.db, source_scope.module()?.into()) - .ok()?, - ), - )) - } - }) - .collect(); - return SubstituteTypeParams { - source_scope, - substs: substs_by_param, - previous: Box::new(NullTransformer), - }; - - // FIXME: It would probably be nicer if we could get this via HIR (i.e. get the - // trait ref, and then go from the types in the substs back to the syntax). - fn get_syntactic_substs(impl_def: ast::Impl) -> Option> { - let target_trait = impl_def.trait_()?; - let path_type = match target_trait { - ast::Type::PathType(path) => path, - _ => return None, - }; - let generic_arg_list = path_type.path()?.segment()?.generic_arg_list()?; - - let mut result = Vec::new(); - for generic_arg in generic_arg_list.generic_args() { - match generic_arg { - ast::GenericArg::TypeArg(type_arg) => result.push(type_arg.ty()?), - ast::GenericArg::AssocTypeArg(_) - | ast::GenericArg::LifetimeArg(_) - | ast::GenericArg::ConstArg(_) => (), - } - } - - Some(result) - } - } - fn get_substitution_inner( - &self, - node: &ra_syntax::SyntaxNode, - ) -> Option { - let type_ref = ast::Type::cast(node.clone())?; - let path = match &type_ref { - ast::Type::PathType(path_type) => path_type.path()?, - _ => return None, - }; - // FIXME: use `hir::Path::from_src` instead. - #[allow(deprecated)] - let path = hir::Path::from_ast(path)?; - let resolution = self.source_scope.resolve_hir_path(&path)?; - match resolution { - hir::PathResolution::TypeParam(tp) => Some(self.substs.get(&tp)?.syntax().clone()), - _ => None, - } - } -} - -impl<'a> AstTransform<'a> for SubstituteTypeParams<'a> { - fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option { - self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node)) - } - fn chain_before(self, other: Box + 'a>) -> Box + 'a> { - Box::new(SubstituteTypeParams { previous: other, ..self }) - } -} - -pub struct QualifyPaths<'a> { - target_scope: &'a SemanticsScope<'a>, - source_scope: &'a SemanticsScope<'a>, - previous: Box + 'a>, -} - -impl<'a> QualifyPaths<'a> { - pub fn new(target_scope: &'a SemanticsScope<'a>, source_scope: &'a SemanticsScope<'a>) -> Self { - Self { target_scope, source_scope, previous: Box::new(NullTransformer) } - } - - fn get_substitution_inner( - &self, - node: &ra_syntax::SyntaxNode, - ) -> Option { - // FIXME handle value ns? - let from = self.target_scope.module()?; - let p = ast::Path::cast(node.clone())?; - if p.segment().and_then(|s| s.param_list()).is_some() { - // don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway - return None; - } - // FIXME: use `hir::Path::from_src` instead. - #[allow(deprecated)] - let hir_path = hir::Path::from_ast(p.clone()); - let resolution = self.source_scope.resolve_hir_path(&hir_path?)?; - match resolution { - PathResolution::Def(def) => { - let found_path = from.find_use_path(self.source_scope.db.upcast(), def)?; - let mut path = path_to_ast(found_path); - - let type_args = p - .segment() - .and_then(|s| s.generic_arg_list()) - .map(|arg_list| apply(self, arg_list)); - if let Some(type_args) = type_args { - let last_segment = path.segment().unwrap(); - path = path.with_segment(last_segment.with_type_args(type_args)) - } - - Some(path.syntax().clone()) - } - PathResolution::Local(_) - | PathResolution::TypeParam(_) - | PathResolution::SelfType(_) => None, - PathResolution::Macro(_) => None, - PathResolution::AssocItem(_) => None, - } - } -} - -pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N { - SyntaxRewriter::from_fn(|element| match element { - ra_syntax::SyntaxElement::Node(n) => { - let replacement = transformer.get_substitution(&n)?; - Some(replacement.into()) - } - _ => None, - }) - .rewrite_ast(&node) -} - -impl<'a> AstTransform<'a> for QualifyPaths<'a> { - fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option { - self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node)) - } - fn chain_before(self, other: Box + 'a>) -> Box + 'a> { - Box::new(QualifyPaths { previous: other, ..self }) - } -} - -pub(crate) fn path_to_ast(path: hir::ModPath) -> ast::Path { - let parse = ast::SourceFile::parse(&path.to_string()); - parse - .tree() - .syntax() - .descendants() - .find_map(ast::Path::cast) - .unwrap_or_else(|| panic!("failed to parse path {:?}, `{}`", path, path)) -} diff --git a/crates/ra_assists/src/handlers/add_custom_impl.rs b/crates/ra_assists/src/handlers/add_custom_impl.rs deleted file mode 100644 index b67438b6ba..0000000000 --- a/crates/ra_assists/src/handlers/add_custom_impl.rs +++ /dev/null @@ -1,208 +0,0 @@ -use ra_syntax::{ - ast::{self, AstNode}, - Direction, SmolStr, - SyntaxKind::{IDENT, WHITESPACE}, - TextRange, TextSize, -}; -use stdx::SepBy; - -use crate::{ - assist_context::{AssistContext, Assists}, - AssistId, AssistKind, -}; - -// Assist: add_custom_impl -// -// Adds impl block for derived trait. -// -// ``` -// #[derive(Deb<|>ug, Display)] -// struct S; -// ``` -// -> -// ``` -// #[derive(Display)] -// struct S; -// -// impl Debug for S { -// $0 -// } -// ``` -pub(crate) fn add_custom_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let attr = ctx.find_node_at_offset::()?; - let input = attr.token_tree()?; - - let attr_name = attr - .syntax() - .descendants_with_tokens() - .filter(|t| t.kind() == IDENT) - .find_map(|i| i.into_token()) - .filter(|t| *t.text() == "derive")? - .text() - .clone(); - - let trait_token = - ctx.token_at_offset().find(|t| t.kind() == IDENT && *t.text() != attr_name)?; - - let annotated = attr.syntax().siblings(Direction::Next).find_map(ast::Name::cast)?; - let annotated_name = annotated.syntax().text().to_string(); - let start_offset = annotated.syntax().parent()?.text_range().end(); - - let label = - format!("Add custom impl `{}` for `{}`", trait_token.text().as_str(), annotated_name); - - let target = attr.syntax().text_range(); - acc.add(AssistId("add_custom_impl", AssistKind::Refactor), label, target, |builder| { - let new_attr_input = input - .syntax() - .descendants_with_tokens() - .filter(|t| t.kind() == IDENT) - .filter_map(|t| t.into_token().map(|t| t.text().clone())) - .filter(|t| t != trait_token.text()) - .collect::>(); - let has_more_derives = !new_attr_input.is_empty(); - let new_attr_input = new_attr_input.iter().sep_by(", ").surround_with("(", ")").to_string(); - - if has_more_derives { - builder.replace(input.syntax().text_range(), new_attr_input); - } else { - let attr_range = attr.syntax().text_range(); - builder.delete(attr_range); - - let line_break_range = attr - .syntax() - .next_sibling_or_token() - .filter(|t| t.kind() == WHITESPACE) - .map(|t| t.text_range()) - .unwrap_or_else(|| TextRange::new(TextSize::from(0), TextSize::from(0))); - builder.delete(line_break_range); - } - - match ctx.config.snippet_cap { - Some(cap) => { - builder.insert_snippet( - cap, - start_offset, - format!("\n\nimpl {} for {} {{\n $0\n}}", trait_token, annotated_name), - ); - } - None => { - builder.insert( - start_offset, - format!("\n\nimpl {} for {} {{\n\n}}", trait_token, annotated_name), - ); - } - } - }) -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - #[test] - fn add_custom_impl_for_unique_input() { - check_assist( - add_custom_impl, - " -#[derive(Debu<|>g)] -struct Foo { - bar: String, -} - ", - " -struct Foo { - bar: String, -} - -impl Debug for Foo { - $0 -} - ", - ) - } - - #[test] - fn add_custom_impl_for_with_visibility_modifier() { - check_assist( - add_custom_impl, - " -#[derive(Debug<|>)] -pub struct Foo { - bar: String, -} - ", - " -pub struct Foo { - bar: String, -} - -impl Debug for Foo { - $0 -} - ", - ) - } - - #[test] - fn add_custom_impl_when_multiple_inputs() { - check_assist( - add_custom_impl, - " -#[derive(Display, Debug<|>, Serialize)] -struct Foo {} - ", - " -#[derive(Display, Serialize)] -struct Foo {} - -impl Debug for Foo { - $0 -} - ", - ) - } - - #[test] - fn test_ignore_derive_macro_without_input() { - check_assist_not_applicable( - add_custom_impl, - " -#[derive(<|>)] -struct Foo {} - ", - ) - } - - #[test] - fn test_ignore_if_cursor_on_param() { - check_assist_not_applicable( - add_custom_impl, - " -#[derive<|>(Debug)] -struct Foo {} - ", - ); - - check_assist_not_applicable( - add_custom_impl, - " -#[derive(Debug)<|>] -struct Foo {} - ", - ) - } - - #[test] - fn test_ignore_if_not_derive() { - check_assist_not_applicable( - add_custom_impl, - " -#[allow(non_camel_<|>case_types)] -struct Foo {} - ", - ) - } -} diff --git a/crates/ra_assists/src/handlers/add_explicit_type.rs b/crates/ra_assists/src/handlers/add_explicit_type.rs deleted file mode 100644 index 135a2ac9c9..0000000000 --- a/crates/ra_assists/src/handlers/add_explicit_type.rs +++ /dev/null @@ -1,211 +0,0 @@ -use hir::HirDisplay; -use ra_syntax::{ - ast::{self, AstNode, LetStmt, NameOwner}, - TextRange, -}; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: add_explicit_type -// -// Specify type for a let binding. -// -// ``` -// fn main() { -// let x<|> = 92; -// } -// ``` -// -> -// ``` -// fn main() { -// let x: i32 = 92; -// } -// ``` -pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let let_stmt = ctx.find_node_at_offset::()?; - let module = ctx.sema.scope(let_stmt.syntax()).module()?; - let expr = let_stmt.initializer()?; - // Must be a binding - let pat = match let_stmt.pat()? { - ast::Pat::IdentPat(bind_pat) => bind_pat, - _ => return None, - }; - let pat_range = pat.syntax().text_range(); - // The binding must have a name - let name = pat.name()?; - let name_range = name.syntax().text_range(); - let stmt_range = let_stmt.syntax().text_range(); - let eq_range = let_stmt.eq_token()?.text_range(); - // Assist should only be applicable if cursor is between 'let' and '=' - let let_range = TextRange::new(stmt_range.start(), eq_range.start()); - let cursor_in_range = let_range.contains_range(ctx.frange.range); - if !cursor_in_range { - return None; - } - // Assist not applicable if the type has already been specified - // and it has no placeholders - let ascribed_ty = let_stmt.ty(); - if let Some(ty) = &ascribed_ty { - if ty.syntax().descendants().find_map(ast::InferType::cast).is_none() { - return None; - } - } - // Infer type - let ty = ctx.sema.type_of_expr(&expr)?; - - if ty.contains_unknown() || ty.is_closure() { - return None; - } - - let inferred_type = ty.display_source_code(ctx.db(), module.into()).ok()?; - acc.add( - AssistId("add_explicit_type", AssistKind::RefactorRewrite), - format!("Insert explicit type `{}`", inferred_type), - pat_range, - |builder| match ascribed_ty { - Some(ascribed_ty) => { - builder.replace(ascribed_ty.syntax().text_range(), inferred_type); - } - None => { - builder.insert(name_range.end(), format!(": {}", inferred_type)); - } - }, - ) -} - -#[cfg(test)] -mod tests { - use super::*; - - use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; - - #[test] - fn add_explicit_type_target() { - check_assist_target(add_explicit_type, "fn f() { let a<|> = 1; }", "a"); - } - - #[test] - fn add_explicit_type_works_for_simple_expr() { - check_assist(add_explicit_type, "fn f() { let a<|> = 1; }", "fn f() { let a: i32 = 1; }"); - } - - #[test] - fn add_explicit_type_works_for_underscore() { - check_assist( - add_explicit_type, - "fn f() { let a<|>: _ = 1; }", - "fn f() { let a: i32 = 1; }", - ); - } - - #[test] - fn add_explicit_type_works_for_nested_underscore() { - check_assist( - add_explicit_type, - r#" - enum Option { - Some(T), - None - } - - fn f() { - let a<|>: Option<_> = Option::Some(1); - }"#, - r#" - enum Option { - Some(T), - None - } - - fn f() { - let a: Option = Option::Some(1); - }"#, - ); - } - - #[test] - fn add_explicit_type_works_for_macro_call() { - check_assist( - add_explicit_type, - r"macro_rules! v { () => {0u64} } fn f() { let a<|> = v!(); }", - r"macro_rules! v { () => {0u64} } fn f() { let a: u64 = v!(); }", - ); - } - - #[test] - fn add_explicit_type_works_for_macro_call_recursive() { - check_assist( - add_explicit_type, - r#"macro_rules! u { () => {0u64} } macro_rules! v { () => {u!()} } fn f() { let a<|> = v!(); }"#, - r#"macro_rules! u { () => {0u64} } macro_rules! v { () => {u!()} } fn f() { let a: u64 = v!(); }"#, - ); - } - - #[test] - fn add_explicit_type_not_applicable_if_ty_not_inferred() { - check_assist_not_applicable(add_explicit_type, "fn f() { let a<|> = None; }"); - } - - #[test] - fn add_explicit_type_not_applicable_if_ty_already_specified() { - check_assist_not_applicable(add_explicit_type, "fn f() { let a<|>: i32 = 1; }"); - } - - #[test] - fn add_explicit_type_not_applicable_if_specified_ty_is_tuple() { - check_assist_not_applicable(add_explicit_type, "fn f() { let a<|>: (i32, i32) = (3, 4); }"); - } - - #[test] - fn add_explicit_type_not_applicable_if_cursor_after_equals() { - check_assist_not_applicable( - add_explicit_type, - "fn f() {let a =<|> match 1 {2 => 3, 3 => 5};}", - ) - } - - #[test] - fn add_explicit_type_not_applicable_if_cursor_before_let() { - check_assist_not_applicable( - add_explicit_type, - "fn f() <|>{let a = match 1 {2 => 3, 3 => 5};}", - ) - } - - #[test] - fn closure_parameters_are_not_added() { - check_assist_not_applicable( - add_explicit_type, - r#" -fn main() { - let multiply_by_two<|> = |i| i * 3; - let six = multiply_by_two(2); -}"#, - ) - } - - #[test] - fn default_generics_should_not_be_added() { - check_assist( - add_explicit_type, - r#" -struct Test { - k: K, - t: T, -} - -fn main() { - let test<|> = Test { t: 23u8, k: 33 }; -}"#, - r#" -struct Test { - k: K, - t: T, -} - -fn main() { - let test: Test = Test { t: 23u8, k: 33 }; -}"#, - ); - } -} diff --git a/crates/ra_assists/src/handlers/add_missing_impl_members.rs b/crates/ra_assists/src/handlers/add_missing_impl_members.rs deleted file mode 100644 index 95a750aeec..0000000000 --- a/crates/ra_assists/src/handlers/add_missing_impl_members.rs +++ /dev/null @@ -1,711 +0,0 @@ -use hir::HasSource; -use ra_syntax::{ - ast::{ - self, - edit::{self, AstNodeEdit, IndentLevel}, - make, AstNode, NameOwner, - }, - SmolStr, -}; - -use crate::{ - assist_context::{AssistContext, Assists}, - ast_transform::{self, AstTransform, QualifyPaths, SubstituteTypeParams}, - utils::{get_missing_assoc_items, render_snippet, resolve_target_trait, Cursor}, - AssistId, AssistKind, -}; - -#[derive(PartialEq)] -enum AddMissingImplMembersMode { - DefaultMethodsOnly, - NoDefaultMethods, -} - -// Assist: add_impl_missing_members -// -// Adds scaffold for required impl members. -// -// ``` -// trait Trait { -// Type X; -// fn foo(&self) -> T; -// fn bar(&self) {} -// } -// -// impl Trait for () {<|> -// -// } -// ``` -// -> -// ``` -// trait Trait { -// Type X; -// fn foo(&self) -> T; -// fn bar(&self) {} -// } -// -// impl Trait for () { -// fn foo(&self) -> u32 { -// ${0:todo!()} -// } -// -// } -// ``` -pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - add_missing_impl_members_inner( - acc, - ctx, - AddMissingImplMembersMode::NoDefaultMethods, - "add_impl_missing_members", - "Implement missing members", - ) -} - -// Assist: add_impl_default_members -// -// Adds scaffold for overriding default impl members. -// -// ``` -// trait Trait { -// Type X; -// fn foo(&self); -// fn bar(&self) {} -// } -// -// impl Trait for () { -// Type X = (); -// fn foo(&self) {}<|> -// -// } -// ``` -// -> -// ``` -// trait Trait { -// Type X; -// fn foo(&self); -// fn bar(&self) {} -// } -// -// impl Trait for () { -// Type X = (); -// fn foo(&self) {} -// $0fn bar(&self) {} -// -// } -// ``` -pub(crate) fn add_missing_default_members(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - add_missing_impl_members_inner( - acc, - ctx, - AddMissingImplMembersMode::DefaultMethodsOnly, - "add_impl_default_members", - "Implement default members", - ) -} - -fn add_missing_impl_members_inner( - acc: &mut Assists, - ctx: &AssistContext, - mode: AddMissingImplMembersMode, - assist_id: &'static str, - label: &'static str, -) -> Option<()> { - let _p = ra_prof::profile("add_missing_impl_members_inner"); - let impl_def = ctx.find_node_at_offset::()?; - let impl_item_list = impl_def.assoc_item_list()?; - - let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?; - - let def_name = |item: &ast::AssocItem| -> Option { - match item { - ast::AssocItem::Fn(def) => def.name(), - ast::AssocItem::TypeAlias(def) => def.name(), - ast::AssocItem::Const(def) => def.name(), - ast::AssocItem::MacroCall(_) => None, - } - .map(|it| it.text().clone()) - }; - - let missing_items = get_missing_assoc_items(&ctx.sema, &impl_def) - .iter() - .map(|i| match i { - hir::AssocItem::Function(i) => ast::AssocItem::Fn(i.source(ctx.db()).value), - hir::AssocItem::TypeAlias(i) => ast::AssocItem::TypeAlias(i.source(ctx.db()).value), - hir::AssocItem::Const(i) => ast::AssocItem::Const(i.source(ctx.db()).value), - }) - .filter(|t| def_name(&t).is_some()) - .filter(|t| match t { - ast::AssocItem::Fn(def) => match mode { - AddMissingImplMembersMode::DefaultMethodsOnly => def.body().is_some(), - AddMissingImplMembersMode::NoDefaultMethods => def.body().is_none(), - }, - _ => mode == AddMissingImplMembersMode::NoDefaultMethods, - }) - .collect::>(); - - if missing_items.is_empty() { - return None; - } - - let target = impl_def.syntax().text_range(); - acc.add(AssistId(assist_id, AssistKind::QuickFix), label, target, |builder| { - let n_existing_items = impl_item_list.assoc_items().count(); - let source_scope = ctx.sema.scope_for_def(trait_); - let target_scope = ctx.sema.scope(impl_item_list.syntax()); - let ast_transform = QualifyPaths::new(&target_scope, &source_scope) - .or(SubstituteTypeParams::for_trait_impl(&source_scope, trait_, impl_def)); - let items = missing_items - .into_iter() - .map(|it| ast_transform::apply(&*ast_transform, it)) - .map(|it| match it { - ast::AssocItem::Fn(def) => ast::AssocItem::Fn(add_body(def)), - ast::AssocItem::TypeAlias(def) => ast::AssocItem::TypeAlias(def.remove_bounds()), - _ => it, - }) - .map(|it| edit::remove_attrs_and_docs(&it)); - let new_impl_item_list = impl_item_list.append_items(items); - let first_new_item = new_impl_item_list.assoc_items().nth(n_existing_items).unwrap(); - - let original_range = impl_item_list.syntax().text_range(); - match ctx.config.snippet_cap { - None => builder.replace(original_range, new_impl_item_list.to_string()), - Some(cap) => { - let mut cursor = Cursor::Before(first_new_item.syntax()); - let placeholder; - if let ast::AssocItem::Fn(func) = &first_new_item { - if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) { - if m.syntax().text() == "todo!()" { - placeholder = m; - cursor = Cursor::Replace(placeholder.syntax()); - } - } - } - builder.replace_snippet( - cap, - original_range, - render_snippet(cap, new_impl_item_list.syntax(), cursor), - ) - } - }; - }) -} - -fn add_body(fn_def: ast::Fn) -> ast::Fn { - if fn_def.body().is_some() { - return fn_def; - } - let body = make::block_expr(None, Some(make::expr_todo())).indent(IndentLevel(1)); - fn_def.with_body(body) -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - #[test] - fn test_add_missing_impl_members() { - check_assist( - add_missing_impl_members, - r#" -trait Foo { - type Output; - - const CONST: usize = 42; - - fn foo(&self); - fn bar(&self); - fn baz(&self); -} - -struct S; - -impl Foo for S { - fn bar(&self) {} -<|> -}"#, - r#" -trait Foo { - type Output; - - const CONST: usize = 42; - - fn foo(&self); - fn bar(&self); - fn baz(&self); -} - -struct S; - -impl Foo for S { - fn bar(&self) {} - $0type Output; - const CONST: usize = 42; - fn foo(&self) { - todo!() - } - fn baz(&self) { - todo!() - } - -}"#, - ); - } - - #[test] - fn test_copied_overriden_members() { - check_assist( - add_missing_impl_members, - r#" -trait Foo { - fn foo(&self); - fn bar(&self) -> bool { true } - fn baz(&self) -> u32 { 42 } -} - -struct S; - -impl Foo for S { - fn bar(&self) {} -<|> -}"#, - r#" -trait Foo { - fn foo(&self); - fn bar(&self) -> bool { true } - fn baz(&self) -> u32 { 42 } -} - -struct S; - -impl Foo for S { - fn bar(&self) {} - fn foo(&self) { - ${0:todo!()} - } - -}"#, - ); - } - - #[test] - fn test_empty_impl_def() { - check_assist( - add_missing_impl_members, - r#" -trait Foo { fn foo(&self); } -struct S; -impl Foo for S { <|> }"#, - r#" -trait Foo { fn foo(&self); } -struct S; -impl Foo for S { - fn foo(&self) { - ${0:todo!()} - } -}"#, - ); - } - - #[test] - fn fill_in_type_params_1() { - check_assist( - add_missing_impl_members, - r#" -trait Foo { fn foo(&self, t: T) -> &T; } -struct S; -impl Foo for S { <|> }"#, - r#" -trait Foo { fn foo(&self, t: T) -> &T; } -struct S; -impl Foo for S { - fn foo(&self, t: u32) -> &u32 { - ${0:todo!()} - } -}"#, - ); - } - - #[test] - fn fill_in_type_params_2() { - check_assist( - add_missing_impl_members, - r#" -trait Foo { fn foo(&self, t: T) -> &T; } -struct S; -impl Foo for S { <|> }"#, - r#" -trait Foo { fn foo(&self, t: T) -> &T; } -struct S; -impl Foo for S { - fn foo(&self, t: U) -> &U { - ${0:todo!()} - } -}"#, - ); - } - - #[test] - fn test_cursor_after_empty_impl_def() { - check_assist( - add_missing_impl_members, - r#" -trait Foo { fn foo(&self); } -struct S; -impl Foo for S {}<|>"#, - r#" -trait Foo { fn foo(&self); } -struct S; -impl Foo for S { - fn foo(&self) { - ${0:todo!()} - } -}"#, - ) - } - - #[test] - fn test_qualify_path_1() { - check_assist( - add_missing_impl_members, - r#" -mod foo { - pub struct Bar; - trait Foo { fn foo(&self, bar: Bar); } -} -struct S; -impl foo::Foo for S { <|> }"#, - r#" -mod foo { - pub struct Bar; - trait Foo { fn foo(&self, bar: Bar); } -} -struct S; -impl foo::Foo for S { - fn foo(&self, bar: foo::Bar) { - ${0:todo!()} - } -}"#, - ); - } - - #[test] - fn test_qualify_path_generic() { - check_assist( - add_missing_impl_members, - r#" -mod foo { - pub struct Bar; - trait Foo { fn foo(&self, bar: Bar); } -} -struct S; -impl foo::Foo for S { <|> }"#, - r#" -mod foo { - pub struct Bar; - trait Foo { fn foo(&self, bar: Bar); } -} -struct S; -impl foo::Foo for S { - fn foo(&self, bar: foo::Bar) { - ${0:todo!()} - } -}"#, - ); - } - - #[test] - fn test_qualify_path_and_substitute_param() { - check_assist( - add_missing_impl_members, - r#" -mod foo { - pub struct Bar; - trait Foo { fn foo(&self, bar: Bar); } -} -struct S; -impl foo::Foo for S { <|> }"#, - r#" -mod foo { - pub struct Bar; - trait Foo { fn foo(&self, bar: Bar); } -} -struct S; -impl foo::Foo for S { - fn foo(&self, bar: foo::Bar) { - ${0:todo!()} - } -}"#, - ); - } - - #[test] - fn test_substitute_param_no_qualify() { - // when substituting params, the substituted param should not be qualified! - check_assist( - add_missing_impl_members, - r#" -mod foo { - trait Foo { fn foo(&self, bar: T); } - pub struct Param; -} -struct Param; -struct S; -impl foo::Foo for S { <|> }"#, - r#" -mod foo { - trait Foo { fn foo(&self, bar: T); } - pub struct Param; -} -struct Param; -struct S; -impl foo::Foo for S { - fn foo(&self, bar: Param) { - ${0:todo!()} - } -}"#, - ); - } - - #[test] - fn test_qualify_path_associated_item() { - check_assist( - add_missing_impl_members, - r#" -mod foo { - pub struct Bar; - impl Bar { type Assoc = u32; } - trait Foo { fn foo(&self, bar: Bar::Assoc); } -} -struct S; -impl foo::Foo for S { <|> }"#, - r#" -mod foo { - pub struct Bar; - impl Bar { type Assoc = u32; } - trait Foo { fn foo(&self, bar: Bar::Assoc); } -} -struct S; -impl foo::Foo for S { - fn foo(&self, bar: foo::Bar::Assoc) { - ${0:todo!()} - } -}"#, - ); - } - - #[test] - fn test_qualify_path_nested() { - check_assist( - add_missing_impl_members, - r#" -mod foo { - pub struct Bar; - pub struct Baz; - trait Foo { fn foo(&self, bar: Bar); } -} -struct S; -impl foo::Foo for S { <|> }"#, - r#" -mod foo { - pub struct Bar; - pub struct Baz; - trait Foo { fn foo(&self, bar: Bar); } -} -struct S; -impl foo::Foo for S { - fn foo(&self, bar: foo::Bar) { - ${0:todo!()} - } -}"#, - ); - } - - #[test] - fn test_qualify_path_fn_trait_notation() { - check_assist( - add_missing_impl_members, - r#" -mod foo { - pub trait Fn { type Output; } - trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); } -} -struct S; -impl foo::Foo for S { <|> }"#, - r#" -mod foo { - pub trait Fn { type Output; } - trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); } -} -struct S; -impl foo::Foo for S { - fn foo(&self, bar: dyn Fn(u32) -> i32) { - ${0:todo!()} - } -}"#, - ); - } - - #[test] - fn test_empty_trait() { - check_assist_not_applicable( - add_missing_impl_members, - r#" -trait Foo; -struct S; -impl Foo for S { <|> }"#, - ) - } - - #[test] - fn test_ignore_unnamed_trait_members_and_default_methods() { - check_assist_not_applicable( - add_missing_impl_members, - r#" -trait Foo { - fn (arg: u32); - fn valid(some: u32) -> bool { false } -} -struct S; -impl Foo for S { <|> }"#, - ) - } - - #[test] - fn test_with_docstring_and_attrs() { - check_assist( - add_missing_impl_members, - r#" -#[doc(alias = "test alias")] -trait Foo { - /// doc string - type Output; - - #[must_use] - fn foo(&self); -} -struct S; -impl Foo for S {}<|>"#, - r#" -#[doc(alias = "test alias")] -trait Foo { - /// doc string - type Output; - - #[must_use] - fn foo(&self); -} -struct S; -impl Foo for S { - $0type Output; - fn foo(&self) { - todo!() - } -}"#, - ) - } - - #[test] - fn test_default_methods() { - check_assist( - add_missing_default_members, - r#" -trait Foo { - type Output; - - const CONST: usize = 42; - - fn valid(some: u32) -> bool { false } - fn foo(some: u32) -> bool; -} -struct S; -impl Foo for S { <|> }"#, - r#" -trait Foo { - type Output; - - const CONST: usize = 42; - - fn valid(some: u32) -> bool { false } - fn foo(some: u32) -> bool; -} -struct S; -impl Foo for S { - $0fn valid(some: u32) -> bool { false } -}"#, - ) - } - - #[test] - fn test_generic_single_default_parameter() { - check_assist( - add_missing_impl_members, - r#" -trait Foo { - fn bar(&self, other: &T); -} - -struct S; -impl Foo for S { <|> }"#, - r#" -trait Foo { - fn bar(&self, other: &T); -} - -struct S; -impl Foo for S { - fn bar(&self, other: &Self) { - ${0:todo!()} - } -}"#, - ) - } - - #[test] - fn test_generic_default_parameter_is_second() { - check_assist( - add_missing_impl_members, - r#" -trait Foo { - fn bar(&self, this: &T1, that: &T2); -} - -struct S; -impl Foo for S { <|> }"#, - r#" -trait Foo { - fn bar(&self, this: &T1, that: &T2); -} - -struct S; -impl Foo for S { - fn bar(&self, this: &T, that: &Self) { - ${0:todo!()} - } -}"#, - ) - } - - #[test] - fn test_assoc_type_bounds_are_removed() { - check_assist( - add_missing_impl_members, - r#" -trait Tr { - type Ty: Copy + 'static; -} - -impl Tr for ()<|> { -}"#, - r#" -trait Tr { - type Ty: Copy + 'static; -} - -impl Tr for () { - $0type Ty; -}"#, - ) - } -} diff --git a/crates/ra_assists/src/handlers/add_turbo_fish.rs b/crates/ra_assists/src/handlers/add_turbo_fish.rs deleted file mode 100644 index 0c565e89af..0000000000 --- a/crates/ra_assists/src/handlers/add_turbo_fish.rs +++ /dev/null @@ -1,164 +0,0 @@ -use ra_ide_db::defs::{classify_name_ref, Definition, NameRefClass}; -use ra_syntax::{ast, AstNode, SyntaxKind, T}; -use test_utils::mark; - -use crate::{ - assist_context::{AssistContext, Assists}, - AssistId, AssistKind, -}; - -// Assist: add_turbo_fish -// -// Adds `::<_>` to a call of a generic method or function. -// -// ``` -// fn make() -> T { todo!() } -// fn main() { -// let x = make<|>(); -// } -// ``` -// -> -// ``` -// fn make() -> T { todo!() } -// fn main() { -// let x = make::<${0:_}>(); -// } -// ``` -pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let ident = ctx.find_token_at_offset(SyntaxKind::IDENT).or_else(|| { - let arg_list = ctx.find_node_at_offset::()?; - if arg_list.args().count() > 0 { - return None; - } - mark::hit!(add_turbo_fish_after_call); - arg_list.l_paren_token()?.prev_token().filter(|it| it.kind() == SyntaxKind::IDENT) - })?; - let next_token = ident.next_token()?; - if next_token.kind() == T![::] { - mark::hit!(add_turbo_fish_one_fish_is_enough); - return None; - } - let name_ref = ast::NameRef::cast(ident.parent())?; - let def = match classify_name_ref(&ctx.sema, &name_ref)? { - NameRefClass::Definition(def) => def, - NameRefClass::FieldShorthand { .. } => return None, - }; - let fun = match def { - Definition::ModuleDef(hir::ModuleDef::Function(it)) => it, - _ => return None, - }; - let generics = hir::GenericDef::Function(fun).params(ctx.sema.db); - if generics.is_empty() { - mark::hit!(add_turbo_fish_non_generic); - return None; - } - acc.add( - AssistId("add_turbo_fish", AssistKind::RefactorRewrite), - "Add `::<>`", - ident.text_range(), - |builder| match ctx.config.snippet_cap { - Some(cap) => builder.insert_snippet(cap, ident.text_range().end(), "::<${0:_}>"), - None => builder.insert(ident.text_range().end(), "::<_>"), - }, - ) -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - use test_utils::mark; - - #[test] - fn add_turbo_fish_function() { - check_assist( - add_turbo_fish, - r#" -fn make() -> T {} -fn main() { - make<|>(); -} -"#, - r#" -fn make() -> T {} -fn main() { - make::<${0:_}>(); -} -"#, - ); - } - - #[test] - fn add_turbo_fish_after_call() { - mark::check!(add_turbo_fish_after_call); - check_assist( - add_turbo_fish, - r#" -fn make() -> T {} -fn main() { - make()<|>; -} -"#, - r#" -fn make() -> T {} -fn main() { - make::<${0:_}>(); -} -"#, - ); - } - - #[test] - fn add_turbo_fish_method() { - check_assist( - add_turbo_fish, - r#" -struct S; -impl S { - fn make(&self) -> T {} -} -fn main() { - S.make<|>(); -} -"#, - r#" -struct S; -impl S { - fn make(&self) -> T {} -} -fn main() { - S.make::<${0:_}>(); -} -"#, - ); - } - - #[test] - fn add_turbo_fish_one_fish_is_enough() { - mark::check!(add_turbo_fish_one_fish_is_enough); - check_assist_not_applicable( - add_turbo_fish, - r#" -fn make() -> T {} -fn main() { - make<|>::<()>(); -} -"#, - ); - } - - #[test] - fn add_turbo_fish_non_generic() { - mark::check!(add_turbo_fish_non_generic); - check_assist_not_applicable( - add_turbo_fish, - r#" -fn make() -> () {} -fn main() { - make<|>(); -} -"#, - ); - } -} diff --git a/crates/ra_assists/src/handlers/apply_demorgan.rs b/crates/ra_assists/src/handlers/apply_demorgan.rs deleted file mode 100644 index de701f8b83..0000000000 --- a/crates/ra_assists/src/handlers/apply_demorgan.rs +++ /dev/null @@ -1,93 +0,0 @@ -use ra_syntax::ast::{self, AstNode}; - -use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKind, Assists}; - -// Assist: apply_demorgan -// -// Apply [De Morgan's law](https://en.wikipedia.org/wiki/De_Morgan%27s_laws). -// This transforms expressions of the form `!l || !r` into `!(l && r)`. -// This also works with `&&`. This assist can only be applied with the cursor -// on either `||` or `&&`, with both operands being a negation of some kind. -// This means something of the form `!x` or `x != y`. -// -// ``` -// fn main() { -// if x != 4 ||<|> !y {} -// } -// ``` -// -> -// ``` -// fn main() { -// if !(x == 4 && y) {} -// } -// ``` -pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let expr = ctx.find_node_at_offset::()?; - let op = expr.op_kind()?; - let op_range = expr.op_token()?.text_range(); - let opposite_op = opposite_logic_op(op)?; - let cursor_in_range = op_range.contains_range(ctx.frange.range); - if !cursor_in_range { - return None; - } - - let lhs = expr.lhs()?; - let lhs_range = lhs.syntax().text_range(); - let not_lhs = invert_boolean_expression(lhs); - - let rhs = expr.rhs()?; - let rhs_range = rhs.syntax().text_range(); - let not_rhs = invert_boolean_expression(rhs); - - acc.add( - AssistId("apply_demorgan", AssistKind::RefactorRewrite), - "Apply De Morgan's law", - op_range, - |edit| { - edit.replace(op_range, opposite_op); - edit.replace(lhs_range, format!("!({}", not_lhs.syntax().text())); - edit.replace(rhs_range, format!("{})", not_rhs.syntax().text())); - }, - ) -} - -// Return the opposite text for a given logical operator, if it makes sense -fn opposite_logic_op(kind: ast::BinOp) -> Option<&'static str> { - match kind { - ast::BinOp::BooleanOr => Some("&&"), - ast::BinOp::BooleanAnd => Some("||"), - _ => None, - } -} - -#[cfg(test)] -mod tests { - use super::*; - - use crate::tests::{check_assist, check_assist_not_applicable}; - - #[test] - fn demorgan_turns_and_into_or() { - check_assist(apply_demorgan, "fn f() { !x &&<|> !x }", "fn f() { !(x || x) }") - } - - #[test] - fn demorgan_turns_or_into_and() { - check_assist(apply_demorgan, "fn f() { !x ||<|> !x }", "fn f() { !(x && x) }") - } - - #[test] - fn demorgan_removes_inequality() { - check_assist(apply_demorgan, "fn f() { x != x ||<|> !x }", "fn f() { !(x == x && x) }") - } - - #[test] - fn demorgan_general_case() { - check_assist(apply_demorgan, "fn f() { x ||<|> x }", "fn f() { !(!x && !x) }") - } - - #[test] - fn demorgan_doesnt_apply_with_cursor_not_on_op() { - check_assist_not_applicable(apply_demorgan, "fn f() { <|> !x || !x }") - } -} diff --git a/crates/ra_assists/src/handlers/auto_import.rs b/crates/ra_assists/src/handlers/auto_import.rs deleted file mode 100644 index 01e7b7a44c..0000000000 --- a/crates/ra_assists/src/handlers/auto_import.rs +++ /dev/null @@ -1,1089 +0,0 @@ -use std::collections::BTreeSet; - -use either::Either; -use hir::{ - AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution, Semantics, Trait, - Type, -}; -use ra_ide_db::{imports_locator, RootDatabase}; -use ra_prof::profile; -use ra_syntax::{ - ast::{self, AstNode}, - SyntaxNode, -}; -use rustc_hash::FxHashSet; - -use crate::{ - utils::insert_use_statement, AssistContext, AssistId, AssistKind, Assists, GroupLabel, -}; - -// Assist: auto_import -// -// If the name is unresolved, provides all possible imports for it. -// -// ``` -// fn main() { -// let map = HashMap<|>::new(); -// } -// # pub mod std { pub mod collections { pub struct HashMap { } } } -// ``` -// -> -// ``` -// use std::collections::HashMap; -// -// fn main() { -// let map = HashMap::new(); -// } -// # pub mod std { pub mod collections { pub struct HashMap { } } } -// ``` -pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let auto_import_assets = AutoImportAssets::new(ctx)?; - let proposed_imports = auto_import_assets.search_for_imports(ctx); - if proposed_imports.is_empty() { - return None; - } - - let range = ctx.sema.original_range(&auto_import_assets.syntax_under_caret).range; - let group = auto_import_assets.get_import_group_message(); - for import in proposed_imports { - acc.add_group( - &group, - AssistId("auto_import", AssistKind::QuickFix), - format!("Import `{}`", &import), - range, - |builder| { - insert_use_statement( - &auto_import_assets.syntax_under_caret, - &import, - ctx, - builder.text_edit_builder(), - ); - }, - ); - } - Some(()) -} - -#[derive(Debug)] -struct AutoImportAssets { - import_candidate: ImportCandidate, - module_with_name_to_import: Module, - syntax_under_caret: SyntaxNode, -} - -impl AutoImportAssets { - fn new(ctx: &AssistContext) -> Option { - if let Some(path_under_caret) = ctx.find_node_at_offset_with_descend::() { - Self::for_regular_path(path_under_caret, &ctx) - } else { - Self::for_method_call(ctx.find_node_at_offset_with_descend()?, &ctx) - } - } - - fn for_method_call(method_call: ast::MethodCallExpr, ctx: &AssistContext) -> Option { - let syntax_under_caret = method_call.syntax().to_owned(); - let module_with_name_to_import = ctx.sema.scope(&syntax_under_caret).module()?; - Some(Self { - import_candidate: ImportCandidate::for_method_call(&ctx.sema, &method_call)?, - module_with_name_to_import, - syntax_under_caret, - }) - } - - fn for_regular_path(path_under_caret: ast::Path, ctx: &AssistContext) -> Option { - let syntax_under_caret = path_under_caret.syntax().to_owned(); - if syntax_under_caret.ancestors().find_map(ast::Use::cast).is_some() { - return None; - } - - let module_with_name_to_import = ctx.sema.scope(&syntax_under_caret).module()?; - Some(Self { - import_candidate: ImportCandidate::for_regular_path(&ctx.sema, &path_under_caret)?, - module_with_name_to_import, - syntax_under_caret, - }) - } - - fn get_search_query(&self) -> &str { - match &self.import_candidate { - ImportCandidate::UnqualifiedName(name) => name, - ImportCandidate::QualifierStart(qualifier_start) => qualifier_start, - ImportCandidate::TraitAssocItem(_, trait_assoc_item_name) => trait_assoc_item_name, - ImportCandidate::TraitMethod(_, trait_method_name) => trait_method_name, - } - } - - fn get_import_group_message(&self) -> GroupLabel { - let name = match &self.import_candidate { - ImportCandidate::UnqualifiedName(name) => format!("Import {}", name), - ImportCandidate::QualifierStart(qualifier_start) => { - format!("Import {}", qualifier_start) - } - ImportCandidate::TraitAssocItem(_, trait_assoc_item_name) => { - format!("Import a trait for item {}", trait_assoc_item_name) - } - ImportCandidate::TraitMethod(_, trait_method_name) => { - format!("Import a trait for method {}", trait_method_name) - } - }; - GroupLabel(name) - } - - fn search_for_imports(&self, ctx: &AssistContext) -> BTreeSet { - let _p = profile("auto_import::search_for_imports"); - let db = ctx.db(); - let current_crate = self.module_with_name_to_import.krate(); - imports_locator::find_imports(&ctx.sema, current_crate, &self.get_search_query()) - .into_iter() - .filter_map(|candidate| match &self.import_candidate { - ImportCandidate::TraitAssocItem(assoc_item_type, _) => { - let located_assoc_item = match candidate { - Either::Left(ModuleDef::Function(located_function)) => located_function - .as_assoc_item(db) - .map(|assoc| assoc.container(db)) - .and_then(Self::assoc_to_trait), - Either::Left(ModuleDef::Const(located_const)) => located_const - .as_assoc_item(db) - .map(|assoc| assoc.container(db)) - .and_then(Self::assoc_to_trait), - _ => None, - }?; - - let mut trait_candidates = FxHashSet::default(); - trait_candidates.insert(located_assoc_item.into()); - - assoc_item_type - .iterate_path_candidates( - db, - current_crate, - &trait_candidates, - None, - |_, assoc| Self::assoc_to_trait(assoc.container(db)), - ) - .map(ModuleDef::from) - .map(Either::Left) - } - ImportCandidate::TraitMethod(function_callee, _) => { - let located_assoc_item = - if let Either::Left(ModuleDef::Function(located_function)) = candidate { - located_function - .as_assoc_item(db) - .map(|assoc| assoc.container(db)) - .and_then(Self::assoc_to_trait) - } else { - None - }?; - - let mut trait_candidates = FxHashSet::default(); - trait_candidates.insert(located_assoc_item.into()); - - function_callee - .iterate_method_candidates( - db, - current_crate, - &trait_candidates, - None, - |_, function| { - Self::assoc_to_trait(function.as_assoc_item(db)?.container(db)) - }, - ) - .map(ModuleDef::from) - .map(Either::Left) - } - _ => Some(candidate), - }) - .filter_map(|candidate| match candidate { - Either::Left(module_def) => { - self.module_with_name_to_import.find_use_path(db, module_def) - } - Either::Right(macro_def) => { - self.module_with_name_to_import.find_use_path(db, macro_def) - } - }) - .filter(|use_path| !use_path.segments.is_empty()) - .take(20) - .collect::>() - } - - fn assoc_to_trait(assoc: AssocItemContainer) -> Option { - if let AssocItemContainer::Trait(extracted_trait) = assoc { - Some(extracted_trait) - } else { - None - } - } -} - -#[derive(Debug)] -enum ImportCandidate { - /// Simple name like 'HashMap' - UnqualifiedName(String), - /// First part of the qualified name. - /// For 'std::collections::HashMap', that will be 'std'. - QualifierStart(String), - /// A trait associated function (with no self parameter) or associated constant. - /// For 'test_mod::TestEnum::test_function', `Type` is the `test_mod::TestEnum` expression type - /// and `String` is the `test_function` - TraitAssocItem(Type, String), - /// A trait method with self parameter. - /// For 'test_enum.test_method()', `Type` is the `test_enum` expression type - /// and `String` is the `test_method` - TraitMethod(Type, String), -} - -impl ImportCandidate { - fn for_method_call( - sema: &Semantics, - method_call: &ast::MethodCallExpr, - ) -> Option { - if sema.resolve_method_call(method_call).is_some() { - return None; - } - Some(Self::TraitMethod( - sema.type_of_expr(&method_call.expr()?)?, - method_call.name_ref()?.syntax().to_string(), - )) - } - - fn for_regular_path( - sema: &Semantics, - path_under_caret: &ast::Path, - ) -> Option { - if sema.resolve_path(path_under_caret).is_some() { - return None; - } - - let segment = path_under_caret.segment()?; - if let Some(qualifier) = path_under_caret.qualifier() { - let qualifier_start = qualifier.syntax().descendants().find_map(ast::NameRef::cast)?; - let qualifier_start_path = - qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?; - if let Some(qualifier_start_resolution) = sema.resolve_path(&qualifier_start_path) { - let qualifier_resolution = if qualifier_start_path == qualifier { - qualifier_start_resolution - } else { - sema.resolve_path(&qualifier)? - }; - if let PathResolution::Def(ModuleDef::Adt(assoc_item_path)) = qualifier_resolution { - Some(ImportCandidate::TraitAssocItem( - assoc_item_path.ty(sema.db), - segment.syntax().to_string(), - )) - } else { - None - } - } else { - Some(ImportCandidate::QualifierStart(qualifier_start.syntax().to_string())) - } - } else { - Some(ImportCandidate::UnqualifiedName( - segment.syntax().descendants().find_map(ast::NameRef::cast)?.syntax().to_string(), - )) - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; - - #[test] - fn applicable_when_found_an_import() { - check_assist( - auto_import, - r" - <|>PubStruct - - pub mod PubMod { - pub struct PubStruct; - } - ", - r" - use PubMod::PubStruct; - - PubStruct - - pub mod PubMod { - pub struct PubStruct; - } - ", - ); - } - - #[test] - fn applicable_when_found_an_import_in_macros() { - check_assist( - auto_import, - r" - macro_rules! foo { - ($i:ident) => { fn foo(a: $i) {} } - } - foo!(Pub<|>Struct); - - pub mod PubMod { - pub struct PubStruct; - } - ", - r" - use PubMod::PubStruct; - - macro_rules! foo { - ($i:ident) => { fn foo(a: $i) {} } - } - foo!(PubStruct); - - pub mod PubMod { - pub struct PubStruct; - } - ", - ); - } - - #[test] - fn auto_imports_are_merged() { - check_assist( - auto_import, - r" - use PubMod::PubStruct1; - - struct Test { - test: Pub<|>Struct2, - } - - pub mod PubMod { - pub struct PubStruct1; - pub struct PubStruct2 { - _t: T, - } - } - ", - r" - use PubMod::{PubStruct2, PubStruct1}; - - struct Test { - test: PubStruct2, - } - - pub mod PubMod { - pub struct PubStruct1; - pub struct PubStruct2 { - _t: T, - } - } - ", - ); - } - - #[test] - fn applicable_when_found_multiple_imports() { - check_assist( - auto_import, - r" - PubSt<|>ruct - - pub mod PubMod1 { - pub struct PubStruct; - } - pub mod PubMod2 { - pub struct PubStruct; - } - pub mod PubMod3 { - pub struct PubStruct; - } - ", - r" - use PubMod3::PubStruct; - - PubStruct - - pub mod PubMod1 { - pub struct PubStruct; - } - pub mod PubMod2 { - pub struct PubStruct; - } - pub mod PubMod3 { - pub struct PubStruct; - } - ", - ); - } - - #[test] - fn not_applicable_for_already_imported_types() { - check_assist_not_applicable( - auto_import, - r" - use PubMod::PubStruct; - - PubStruct<|> - - pub mod PubMod { - pub struct PubStruct; - } - ", - ); - } - - #[test] - fn not_applicable_for_types_with_private_paths() { - check_assist_not_applicable( - auto_import, - r" - PrivateStruct<|> - - pub mod PubMod { - struct PrivateStruct; - } - ", - ); - } - - #[test] - fn not_applicable_when_no_imports_found() { - check_assist_not_applicable( - auto_import, - " - PubStruct<|>", - ); - } - - #[test] - fn not_applicable_in_import_statements() { - check_assist_not_applicable( - auto_import, - r" - use PubStruct<|>; - - pub mod PubMod { - pub struct PubStruct; - }", - ); - } - - #[test] - fn function_import() { - check_assist( - auto_import, - r" - test_function<|> - - pub mod PubMod { - pub fn test_function() {}; - } - ", - r" - use PubMod::test_function; - - test_function - - pub mod PubMod { - pub fn test_function() {}; - } - ", - ); - } - - #[test] - fn macro_import() { - check_assist( - auto_import, - r" -//- /lib.rs crate:crate_with_macro -#[macro_export] -macro_rules! foo { - () => () -} - -//- /main.rs crate:main deps:crate_with_macro -fn main() { - foo<|> -} -", - r"use crate_with_macro::foo; - -fn main() { - foo -} -", - ); - } - - #[test] - fn auto_import_target() { - check_assist_target( - auto_import, - r" - struct AssistInfo { - group_label: Option<<|>GroupLabel>, - } - - mod m { pub struct GroupLabel; } - ", - "GroupLabel", - ) - } - - #[test] - fn not_applicable_when_path_start_is_imported() { - check_assist_not_applicable( - auto_import, - r" - pub mod mod1 { - pub mod mod2 { - pub mod mod3 { - pub struct TestStruct; - } - } - } - - use mod1::mod2; - fn main() { - mod2::mod3::TestStruct<|> - } - ", - ); - } - - #[test] - fn not_applicable_for_imported_function() { - check_assist_not_applicable( - auto_import, - r" - pub mod test_mod { - pub fn test_function() {} - } - - use test_mod::test_function; - fn main() { - test_function<|> - } - ", - ); - } - - #[test] - fn associated_struct_function() { - check_assist( - auto_import, - r" - mod test_mod { - pub struct TestStruct {} - impl TestStruct { - pub fn test_function() {} - } - } - - fn main() { - TestStruct::test_function<|> - } - ", - r" - use test_mod::TestStruct; - - mod test_mod { - pub struct TestStruct {} - impl TestStruct { - pub fn test_function() {} - } - } - - fn main() { - TestStruct::test_function - } - ", - ); - } - - #[test] - fn associated_struct_const() { - check_assist( - auto_import, - r" - mod test_mod { - pub struct TestStruct {} - impl TestStruct { - const TEST_CONST: u8 = 42; - } - } - - fn main() { - TestStruct::TEST_CONST<|> - } - ", - r" - use test_mod::TestStruct; - - mod test_mod { - pub struct TestStruct {} - impl TestStruct { - const TEST_CONST: u8 = 42; - } - } - - fn main() { - TestStruct::TEST_CONST - } - ", - ); - } - - #[test] - fn associated_trait_function() { - check_assist( - auto_import, - r" - mod test_mod { - pub trait TestTrait { - fn test_function(); - } - pub struct TestStruct {} - impl TestTrait for TestStruct { - fn test_function() {} - } - } - - fn main() { - test_mod::TestStruct::test_function<|> - } - ", - r" - use test_mod::TestTrait; - - mod test_mod { - pub trait TestTrait { - fn test_function(); - } - pub struct TestStruct {} - impl TestTrait for TestStruct { - fn test_function() {} - } - } - - fn main() { - test_mod::TestStruct::test_function - } - ", - ); - } - - #[test] - fn not_applicable_for_imported_trait_for_function() { - check_assist_not_applicable( - auto_import, - r" - mod test_mod { - pub trait TestTrait { - fn test_function(); - } - pub trait TestTrait2 { - fn test_function(); - } - pub enum TestEnum { - One, - Two, - } - impl TestTrait2 for TestEnum { - fn test_function() {} - } - impl TestTrait for TestEnum { - fn test_function() {} - } - } - - use test_mod::TestTrait2; - fn main() { - test_mod::TestEnum::test_function<|>; - } - ", - ) - } - - #[test] - fn associated_trait_const() { - check_assist( - auto_import, - r" - mod test_mod { - pub trait TestTrait { - const TEST_CONST: u8; - } - pub struct TestStruct {} - impl TestTrait for TestStruct { - const TEST_CONST: u8 = 42; - } - } - - fn main() { - test_mod::TestStruct::TEST_CONST<|> - } - ", - r" - use test_mod::TestTrait; - - mod test_mod { - pub trait TestTrait { - const TEST_CONST: u8; - } - pub struct TestStruct {} - impl TestTrait for TestStruct { - const TEST_CONST: u8 = 42; - } - } - - fn main() { - test_mod::TestStruct::TEST_CONST - } - ", - ); - } - - #[test] - fn not_applicable_for_imported_trait_for_const() { - check_assist_not_applicable( - auto_import, - r" - mod test_mod { - pub trait TestTrait { - const TEST_CONST: u8; - } - pub trait TestTrait2 { - const TEST_CONST: f64; - } - pub enum TestEnum { - One, - Two, - } - impl TestTrait2 for TestEnum { - const TEST_CONST: f64 = 42.0; - } - impl TestTrait for TestEnum { - const TEST_CONST: u8 = 42; - } - } - - use test_mod::TestTrait2; - fn main() { - test_mod::TestEnum::TEST_CONST<|>; - } - ", - ) - } - - #[test] - fn trait_method() { - check_assist( - auto_import, - r" - mod test_mod { - pub trait TestTrait { - fn test_method(&self); - } - pub struct TestStruct {} - impl TestTrait for TestStruct { - fn test_method(&self) {} - } - } - - fn main() { - let test_struct = test_mod::TestStruct {}; - test_struct.test_meth<|>od() - } - ", - r" - use test_mod::TestTrait; - - mod test_mod { - pub trait TestTrait { - fn test_method(&self); - } - pub struct TestStruct {} - impl TestTrait for TestStruct { - fn test_method(&self) {} - } - } - - fn main() { - let test_struct = test_mod::TestStruct {}; - test_struct.test_method() - } - ", - ); - } - - #[test] - fn trait_method_cross_crate() { - check_assist( - auto_import, - r" - //- /main.rs crate:main deps:dep - fn main() { - let test_struct = dep::test_mod::TestStruct {}; - test_struct.test_meth<|>od() - } - //- /dep.rs crate:dep - pub mod test_mod { - pub trait TestTrait { - fn test_method(&self); - } - pub struct TestStruct {} - impl TestTrait for TestStruct { - fn test_method(&self) {} - } - } - ", - r" - use dep::test_mod::TestTrait; - - fn main() { - let test_struct = dep::test_mod::TestStruct {}; - test_struct.test_method() - } - ", - ); - } - - #[test] - fn assoc_fn_cross_crate() { - check_assist( - auto_import, - r" - //- /main.rs crate:main deps:dep - fn main() { - dep::test_mod::TestStruct::test_func<|>tion - } - //- /dep.rs crate:dep - pub mod test_mod { - pub trait TestTrait { - fn test_function(); - } - pub struct TestStruct {} - impl TestTrait for TestStruct { - fn test_function() {} - } - } - ", - r" - use dep::test_mod::TestTrait; - - fn main() { - dep::test_mod::TestStruct::test_function - } - ", - ); - } - - #[test] - fn assoc_const_cross_crate() { - check_assist( - auto_import, - r" - //- /main.rs crate:main deps:dep - fn main() { - dep::test_mod::TestStruct::CONST<|> - } - //- /dep.rs crate:dep - pub mod test_mod { - pub trait TestTrait { - const CONST: bool; - } - pub struct TestStruct {} - impl TestTrait for TestStruct { - const CONST: bool = true; - } - } - ", - r" - use dep::test_mod::TestTrait; - - fn main() { - dep::test_mod::TestStruct::CONST - } - ", - ); - } - - #[test] - fn assoc_fn_as_method_cross_crate() { - check_assist_not_applicable( - auto_import, - r" - //- /main.rs crate:main deps:dep - fn main() { - let test_struct = dep::test_mod::TestStruct {}; - test_struct.test_func<|>tion() - } - //- /dep.rs crate:dep - pub mod test_mod { - pub trait TestTrait { - fn test_function(); - } - pub struct TestStruct {} - impl TestTrait for TestStruct { - fn test_function() {} - } - } - ", - ); - } - - #[test] - fn private_trait_cross_crate() { - check_assist_not_applicable( - auto_import, - r" - //- /main.rs crate:main deps:dep - fn main() { - let test_struct = dep::test_mod::TestStruct {}; - test_struct.test_meth<|>od() - } - //- /dep.rs crate:dep - pub mod test_mod { - trait TestTrait { - fn test_method(&self); - } - pub struct TestStruct {} - impl TestTrait for TestStruct { - fn test_method(&self) {} - } - } - ", - ); - } - - #[test] - fn not_applicable_for_imported_trait_for_method() { - check_assist_not_applicable( - auto_import, - r" - mod test_mod { - pub trait TestTrait { - fn test_method(&self); - } - pub trait TestTrait2 { - fn test_method(&self); - } - pub enum TestEnum { - One, - Two, - } - impl TestTrait2 for TestEnum { - fn test_method(&self) {} - } - impl TestTrait for TestEnum { - fn test_method(&self) {} - } - } - - use test_mod::TestTrait2; - fn main() { - let one = test_mod::TestEnum::One; - one.test<|>_method(); - } - ", - ) - } - - #[test] - fn dep_import() { - check_assist( - auto_import, - r" -//- /lib.rs crate:dep -pub struct Struct; - -//- /main.rs crate:main deps:dep -fn main() { - Struct<|> -} -", - r"use dep::Struct; - -fn main() { - Struct -} -", - ); - } - - #[test] - fn whole_segment() { - // Tests that only imports whose last segment matches the identifier get suggested. - check_assist( - auto_import, - r" -//- /lib.rs crate:dep -pub mod fmt { - pub trait Display {} -} - -pub fn panic_fmt() {} - -//- /main.rs crate:main deps:dep -struct S; - -impl f<|>mt::Display for S {} -", - r"use dep::fmt; - -struct S; - -impl fmt::Display for S {} -", - ); - } - - #[test] - fn macro_generated() { - // Tests that macro-generated items are suggested from external crates. - check_assist( - auto_import, - r" -//- /lib.rs crate:dep -macro_rules! mac { - () => { - pub struct Cheese; - }; -} - -mac!(); - -//- /main.rs crate:main deps:dep -fn main() { - Cheese<|>; -} -", - r"use dep::Cheese; - -fn main() { - Cheese; -} -", - ); - } - - #[test] - fn casing() { - // Tests that differently cased names don't interfere and we only suggest the matching one. - check_assist( - auto_import, - r" -//- /lib.rs crate:dep -pub struct FMT; -pub struct fmt; - -//- /main.rs crate:main deps:dep -fn main() { - FMT<|>; -} -", - r"use dep::FMT; - -fn main() { - FMT; -} -", - ); - } -} diff --git a/crates/ra_assists/src/handlers/change_return_type_to_result.rs b/crates/ra_assists/src/handlers/change_return_type_to_result.rs deleted file mode 100644 index b83c944049..0000000000 --- a/crates/ra_assists/src/handlers/change_return_type_to_result.rs +++ /dev/null @@ -1,991 +0,0 @@ -use ra_syntax::{ - ast::{self, BlockExpr, Expr, LoopBodyOwner}, - AstNode, SyntaxNode, -}; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; -use test_utils::mark; - -// Assist: change_return_type_to_result -// -// Change the function's return type to Result. -// -// ``` -// fn foo() -> i32<|> { 42i32 } -// ``` -// -> -// ``` -// fn foo() -> Result { Ok(42i32) } -// ``` -pub(crate) fn change_return_type_to_result(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let ret_type = ctx.find_node_at_offset::()?; - // FIXME: extend to lambdas as well - let fn_def = ret_type.syntax().parent().and_then(ast::Fn::cast)?; - - let type_ref = &ret_type.ty()?; - let ret_type_str = type_ref.syntax().text().to_string(); - let first_part_ret_type = ret_type_str.splitn(2, '<').next(); - if let Some(ret_type_first_part) = first_part_ret_type { - if ret_type_first_part.ends_with("Result") { - mark::hit!(change_return_type_to_result_simple_return_type_already_result); - return None; - } - } - - let block_expr = &fn_def.body()?; - - acc.add( - AssistId("change_return_type_to_result", AssistKind::RefactorRewrite), - "Wrap return type in Result", - type_ref.syntax().text_range(), - |builder| { - let mut tail_return_expr_collector = TailReturnCollector::new(); - tail_return_expr_collector.collect_jump_exprs(block_expr, false); - tail_return_expr_collector.collect_tail_exprs(block_expr); - - for ret_expr_arg in tail_return_expr_collector.exprs_to_wrap { - builder.replace_node_and_indent(&ret_expr_arg, format!("Ok({})", ret_expr_arg)); - } - - match ctx.config.snippet_cap { - Some(cap) => { - let snippet = format!("Result<{}, ${{0:_}}>", type_ref); - builder.replace_snippet(cap, type_ref.syntax().text_range(), snippet) - } - None => builder - .replace(type_ref.syntax().text_range(), format!("Result<{}, _>", type_ref)), - } - }, - ) -} - -struct TailReturnCollector { - exprs_to_wrap: Vec, -} - -impl TailReturnCollector { - fn new() -> Self { - Self { exprs_to_wrap: vec![] } - } - /// Collect all`return` expression - fn collect_jump_exprs(&mut self, block_expr: &BlockExpr, collect_break: bool) { - let statements = block_expr.statements(); - for stmt in statements { - let expr = match &stmt { - ast::Stmt::ExprStmt(stmt) => stmt.expr(), - ast::Stmt::LetStmt(stmt) => stmt.initializer(), - ast::Stmt::Item(_) => continue, - }; - if let Some(expr) = &expr { - self.handle_exprs(expr, collect_break); - } - } - - // Browse tail expressions for each block - if let Some(expr) = block_expr.expr() { - if let Some(last_exprs) = get_tail_expr_from_block(&expr) { - for last_expr in last_exprs { - let last_expr = match last_expr { - NodeType::Node(expr) | NodeType::Leaf(expr) => expr, - }; - - if let Some(last_expr) = Expr::cast(last_expr.clone()) { - self.handle_exprs(&last_expr, collect_break); - } else if let Some(expr_stmt) = ast::Stmt::cast(last_expr) { - let expr_stmt = match &expr_stmt { - ast::Stmt::ExprStmt(stmt) => stmt.expr(), - ast::Stmt::LetStmt(stmt) => stmt.initializer(), - ast::Stmt::Item(_) => None, - }; - if let Some(expr) = &expr_stmt { - self.handle_exprs(expr, collect_break); - } - } - } - } - } - } - - fn handle_exprs(&mut self, expr: &Expr, collect_break: bool) { - match expr { - Expr::BlockExpr(block_expr) => { - self.collect_jump_exprs(&block_expr, collect_break); - } - Expr::ReturnExpr(ret_expr) => { - if let Some(ret_expr_arg) = &ret_expr.expr() { - self.exprs_to_wrap.push(ret_expr_arg.syntax().clone()); - } - } - Expr::BreakExpr(break_expr) if collect_break => { - if let Some(break_expr_arg) = &break_expr.expr() { - self.exprs_to_wrap.push(break_expr_arg.syntax().clone()); - } - } - Expr::IfExpr(if_expr) => { - for block in if_expr.blocks() { - self.collect_jump_exprs(&block, collect_break); - } - } - Expr::LoopExpr(loop_expr) => { - if let Some(block_expr) = loop_expr.loop_body() { - self.collect_jump_exprs(&block_expr, collect_break); - } - } - Expr::ForExpr(for_expr) => { - if let Some(block_expr) = for_expr.loop_body() { - self.collect_jump_exprs(&block_expr, collect_break); - } - } - Expr::WhileExpr(while_expr) => { - if let Some(block_expr) = while_expr.loop_body() { - self.collect_jump_exprs(&block_expr, collect_break); - } - } - Expr::MatchExpr(match_expr) => { - if let Some(arm_list) = match_expr.match_arm_list() { - arm_list.arms().filter_map(|match_arm| match_arm.expr()).for_each(|expr| { - self.handle_exprs(&expr, collect_break); - }); - } - } - _ => {} - } - } - - fn collect_tail_exprs(&mut self, block: &BlockExpr) { - if let Some(expr) = block.expr() { - self.handle_exprs(&expr, true); - self.fetch_tail_exprs(&expr); - } - } - - fn fetch_tail_exprs(&mut self, expr: &Expr) { - if let Some(exprs) = get_tail_expr_from_block(expr) { - for node_type in &exprs { - match node_type { - NodeType::Leaf(expr) => { - self.exprs_to_wrap.push(expr.clone()); - } - NodeType::Node(expr) => match &Expr::cast(expr.clone()) { - Some(last_expr) => { - self.fetch_tail_exprs(last_expr); - } - None => { - self.exprs_to_wrap.push(expr.clone()); - } - }, - } - } - } - } -} - -#[derive(Debug)] -enum NodeType { - Leaf(SyntaxNode), - Node(SyntaxNode), -} - -/// Get a tail expression inside a block -fn get_tail_expr_from_block(expr: &Expr) -> Option> { - match expr { - Expr::IfExpr(if_expr) => { - let mut nodes = vec![]; - for block in if_expr.blocks() { - if let Some(block_expr) = block.expr() { - if let Some(tail_exprs) = get_tail_expr_from_block(&block_expr) { - nodes.extend(tail_exprs); - } - } else if let Some(last_expr) = block.syntax().last_child() { - nodes.push(NodeType::Node(last_expr)); - } else { - nodes.push(NodeType::Node(block.syntax().clone())); - } - } - Some(nodes) - } - Expr::LoopExpr(loop_expr) => { - loop_expr.syntax().last_child().map(|lc| vec![NodeType::Node(lc)]) - } - Expr::ForExpr(for_expr) => { - for_expr.syntax().last_child().map(|lc| vec![NodeType::Node(lc)]) - } - Expr::WhileExpr(while_expr) => { - while_expr.syntax().last_child().map(|lc| vec![NodeType::Node(lc)]) - } - Expr::BlockExpr(block_expr) => { - block_expr.expr().map(|lc| vec![NodeType::Node(lc.syntax().clone())]) - } - Expr::MatchExpr(match_expr) => { - let arm_list = match_expr.match_arm_list()?; - let arms: Vec = arm_list - .arms() - .filter_map(|match_arm| match_arm.expr()) - .map(|expr| match expr { - Expr::ReturnExpr(ret_expr) => NodeType::Node(ret_expr.syntax().clone()), - Expr::BreakExpr(break_expr) => NodeType::Node(break_expr.syntax().clone()), - _ => match expr.syntax().last_child() { - Some(last_expr) => NodeType::Node(last_expr), - None => NodeType::Node(expr.syntax().clone()), - }, - }) - .collect(); - - Some(arms) - } - Expr::BreakExpr(expr) => expr.expr().map(|e| vec![NodeType::Leaf(e.syntax().clone())]), - Expr::ReturnExpr(ret_expr) => Some(vec![NodeType::Node(ret_expr.syntax().clone())]), - Expr::CallExpr(call_expr) => Some(vec![NodeType::Leaf(call_expr.syntax().clone())]), - Expr::Literal(lit_expr) => Some(vec![NodeType::Leaf(lit_expr.syntax().clone())]), - Expr::TupleExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]), - Expr::ArrayExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]), - Expr::ParenExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]), - Expr::PathExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]), - Expr::RecordExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]), - Expr::IndexExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]), - Expr::MethodCallExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]), - Expr::AwaitExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]), - Expr::CastExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]), - Expr::RefExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]), - Expr::PrefixExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]), - Expr::RangeExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]), - Expr::BinExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]), - Expr::MacroCall(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]), - Expr::BoxExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]), - _ => None, - } -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - #[test] - fn change_return_type_to_result_simple() { - check_assist( - change_return_type_to_result, - r#"fn foo() -> i3<|>2 { - let test = "test"; - return 42i32; - }"#, - r#"fn foo() -> Result { - let test = "test"; - return Ok(42i32); - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_return_type() { - check_assist( - change_return_type_to_result, - r#"fn foo() -> i32<|> { - let test = "test"; - return 42i32; - }"#, - r#"fn foo() -> Result { - let test = "test"; - return Ok(42i32); - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_return_type_bad_cursor() { - check_assist_not_applicable( - change_return_type_to_result, - r#"fn foo() -> i32 { - let test = "test";<|> - return 42i32; - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_return_type_already_result_std() { - check_assist_not_applicable( - change_return_type_to_result, - r#"fn foo() -> std::result::Result, String> { - let test = "test"; - return 42i32; - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_return_type_already_result() { - mark::check!(change_return_type_to_result_simple_return_type_already_result); - check_assist_not_applicable( - change_return_type_to_result, - r#"fn foo() -> Result, String> { - let test = "test"; - return 42i32; - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_with_cursor() { - check_assist( - change_return_type_to_result, - r#"fn foo() -> <|>i32 { - let test = "test"; - return 42i32; - }"#, - r#"fn foo() -> Result { - let test = "test"; - return Ok(42i32); - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_with_tail() { - check_assist( - change_return_type_to_result, - r#"fn foo() -><|> i32 { - let test = "test"; - 42i32 - }"#, - r#"fn foo() -> Result { - let test = "test"; - Ok(42i32) - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_with_tail_only() { - check_assist( - change_return_type_to_result, - r#"fn foo() -> i32<|> { - 42i32 - }"#, - r#"fn foo() -> Result { - Ok(42i32) - }"#, - ); - } - #[test] - fn change_return_type_to_result_simple_with_tail_block_like() { - check_assist( - change_return_type_to_result, - r#"fn foo() -> i32<|> { - if true { - 42i32 - } else { - 24i32 - } - }"#, - r#"fn foo() -> Result { - if true { - Ok(42i32) - } else { - Ok(24i32) - } - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_with_nested_if() { - check_assist( - change_return_type_to_result, - r#"fn foo() -> i32<|> { - if true { - if false { - 1 - } else { - 2 - } - } else { - 24i32 - } - }"#, - r#"fn foo() -> Result { - if true { - if false { - Ok(1) - } else { - Ok(2) - } - } else { - Ok(24i32) - } - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_with_await() { - check_assist( - change_return_type_to_result, - r#"async fn foo() -> i<|>32 { - if true { - if false { - 1.await - } else { - 2.await - } - } else { - 24i32.await - } - }"#, - r#"async fn foo() -> Result { - if true { - if false { - Ok(1.await) - } else { - Ok(2.await) - } - } else { - Ok(24i32.await) - } - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_with_array() { - check_assist( - change_return_type_to_result, - r#"fn foo() -> [i32;<|> 3] { - [1, 2, 3] - }"#, - r#"fn foo() -> Result<[i32; 3], ${0:_}> { - Ok([1, 2, 3]) - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_with_cast() { - check_assist( - change_return_type_to_result, - r#"fn foo() -<|>> i32 { - if true { - if false { - 1 as i32 - } else { - 2 as i32 - } - } else { - 24 as i32 - } - }"#, - r#"fn foo() -> Result { - if true { - if false { - Ok(1 as i32) - } else { - Ok(2 as i32) - } - } else { - Ok(24 as i32) - } - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_with_tail_block_like_match() { - check_assist( - change_return_type_to_result, - r#"fn foo() -> i32<|> { - let my_var = 5; - match my_var { - 5 => 42i32, - _ => 24i32, - } - }"#, - r#"fn foo() -> Result { - let my_var = 5; - match my_var { - 5 => Ok(42i32), - _ => Ok(24i32), - } - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_with_loop_with_tail() { - check_assist( - change_return_type_to_result, - r#"fn foo() -> i32<|> { - let my_var = 5; - loop { - println!("test"); - 5 - } - - my_var - }"#, - r#"fn foo() -> Result { - let my_var = 5; - loop { - println!("test"); - 5 - } - - Ok(my_var) - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_with_loop_in_let_stmt() { - check_assist( - change_return_type_to_result, - r#"fn foo() -> i32<|> { - let my_var = let x = loop { - break 1; - }; - - my_var - }"#, - r#"fn foo() -> Result { - let my_var = let x = loop { - break 1; - }; - - Ok(my_var) - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_with_tail_block_like_match_return_expr() { - check_assist( - change_return_type_to_result, - r#"fn foo() -> i32<|> { - let my_var = 5; - let res = match my_var { - 5 => 42i32, - _ => return 24i32, - }; - - res - }"#, - r#"fn foo() -> Result { - let my_var = 5; - let res = match my_var { - 5 => 42i32, - _ => return Ok(24i32), - }; - - Ok(res) - }"#, - ); - - check_assist( - change_return_type_to_result, - r#"fn foo() -> i32<|> { - let my_var = 5; - let res = if my_var == 5 { - 42i32 - } else { - return 24i32; - }; - - res - }"#, - r#"fn foo() -> Result { - let my_var = 5; - let res = if my_var == 5 { - 42i32 - } else { - return Ok(24i32); - }; - - Ok(res) - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_with_tail_block_like_match_deeper() { - check_assist( - change_return_type_to_result, - r#"fn foo() -> i32<|> { - let my_var = 5; - match my_var { - 5 => { - if true { - 42i32 - } else { - 25i32 - } - }, - _ => { - let test = "test"; - if test == "test" { - return bar(); - } - 53i32 - }, - } - }"#, - r#"fn foo() -> Result { - let my_var = 5; - match my_var { - 5 => { - if true { - Ok(42i32) - } else { - Ok(25i32) - } - }, - _ => { - let test = "test"; - if test == "test" { - return Ok(bar()); - } - Ok(53i32) - }, - } - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_with_tail_block_like_early_return() { - check_assist( - change_return_type_to_result, - r#"fn foo() -> i<|>32 { - let test = "test"; - if test == "test" { - return 24i32; - } - 53i32 - }"#, - r#"fn foo() -> Result { - let test = "test"; - if test == "test" { - return Ok(24i32); - } - Ok(53i32) - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_with_closure() { - check_assist( - change_return_type_to_result, - r#"fn foo(the_field: u32) -><|> u32 { - let true_closure = || { - return true; - }; - if the_field < 5 { - let mut i = 0; - - - if true_closure() { - return 99; - } else { - return 0; - } - } - - the_field - }"#, - r#"fn foo(the_field: u32) -> Result { - let true_closure = || { - return true; - }; - if the_field < 5 { - let mut i = 0; - - - if true_closure() { - return Ok(99); - } else { - return Ok(0); - } - } - - Ok(the_field) - }"#, - ); - - check_assist( - change_return_type_to_result, - r#"fn foo(the_field: u32) -> u32<|> { - let true_closure = || { - return true; - }; - if the_field < 5 { - let mut i = 0; - - - if true_closure() { - return 99; - } else { - return 0; - } - } - let t = None; - - t.unwrap_or_else(|| the_field) - }"#, - r#"fn foo(the_field: u32) -> Result { - let true_closure = || { - return true; - }; - if the_field < 5 { - let mut i = 0; - - - if true_closure() { - return Ok(99); - } else { - return Ok(0); - } - } - let t = None; - - Ok(t.unwrap_or_else(|| the_field)) - }"#, - ); - } - - #[test] - fn change_return_type_to_result_simple_with_weird_forms() { - check_assist( - change_return_type_to_result, - r#"fn foo() -> i32<|> { - let test = "test"; - if test == "test" { - return 24i32; - } - let mut i = 0; - loop { - if i == 1 { - break 55; - } - i += 1; - } - }"#, - r#"fn foo() -> Result { - let test = "test"; - if test == "test" { - return Ok(24i32); - } - let mut i = 0; - loop { - if i == 1 { - break Ok(55); - } - i += 1; - } - }"#, - ); - - check_assist( - change_return_type_to_result, - r#"fn foo() -> i32<|> { - let test = "test"; - if test == "test" { - return 24i32; - } - let mut i = 0; - loop { - loop { - if i == 1 { - break 55; - } - i += 1; - } - } - }"#, - r#"fn foo() -> Result { - let test = "test"; - if test == "test" { - return Ok(24i32); - } - let mut i = 0; - loop { - loop { - if i == 1 { - break Ok(55); - } - i += 1; - } - } - }"#, - ); - - check_assist( - change_return_type_to_result, - r#"fn foo() -> i3<|>2 { - let test = "test"; - let other = 5; - if test == "test" { - let res = match other { - 5 => 43, - _ => return 56, - }; - } - let mut i = 0; - loop { - loop { - if i == 1 { - break 55; - } - i += 1; - } - } - }"#, - r#"fn foo() -> Result { - let test = "test"; - let other = 5; - if test == "test" { - let res = match other { - 5 => 43, - _ => return Ok(56), - }; - } - let mut i = 0; - loop { - loop { - if i == 1 { - break Ok(55); - } - i += 1; - } - } - }"#, - ); - - check_assist( - change_return_type_to_result, - r#"fn foo(the_field: u32) -> u32<|> { - if the_field < 5 { - let mut i = 0; - loop { - if i > 5 { - return 55u32; - } - i += 3; - } - - match i { - 5 => return 99, - _ => return 0, - }; - } - - the_field - }"#, - r#"fn foo(the_field: u32) -> Result { - if the_field < 5 { - let mut i = 0; - loop { - if i > 5 { - return Ok(55u32); - } - i += 3; - } - - match i { - 5 => return Ok(99), - _ => return Ok(0), - }; - } - - Ok(the_field) - }"#, - ); - - check_assist( - change_return_type_to_result, - r#"fn foo(the_field: u32) -> u3<|>2 { - if the_field < 5 { - let mut i = 0; - - match i { - 5 => return 99, - _ => return 0, - } - } - - the_field - }"#, - r#"fn foo(the_field: u32) -> Result { - if the_field < 5 { - let mut i = 0; - - match i { - 5 => return Ok(99), - _ => return Ok(0), - } - } - - Ok(the_field) - }"#, - ); - - check_assist( - change_return_type_to_result, - r#"fn foo(the_field: u32) -> u32<|> { - if the_field < 5 { - let mut i = 0; - - if i == 5 { - return 99 - } else { - return 0 - } - } - - the_field - }"#, - r#"fn foo(the_field: u32) -> Result { - if the_field < 5 { - let mut i = 0; - - if i == 5 { - return Ok(99) - } else { - return Ok(0) - } - } - - Ok(the_field) - }"#, - ); - - check_assist( - change_return_type_to_result, - r#"fn foo(the_field: u32) -> <|>u32 { - if the_field < 5 { - let mut i = 0; - - if i == 5 { - return 99; - } else { - return 0; - } - } - - the_field - }"#, - r#"fn foo(the_field: u32) -> Result { - if the_field < 5 { - let mut i = 0; - - if i == 5 { - return Ok(99); - } else { - return Ok(0); - } - } - - Ok(the_field) - }"#, - ); - } -} diff --git a/crates/ra_assists/src/handlers/change_visibility.rs b/crates/ra_assists/src/handlers/change_visibility.rs deleted file mode 100644 index 724daa93f4..0000000000 --- a/crates/ra_assists/src/handlers/change_visibility.rs +++ /dev/null @@ -1,200 +0,0 @@ -use ra_syntax::{ - ast::{self, NameOwner, VisibilityOwner}, - AstNode, - SyntaxKind::{CONST, ENUM, FN, MODULE, STATIC, STRUCT, TRAIT, VISIBILITY}, - T, -}; -use test_utils::mark; - -use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists}; - -// Assist: change_visibility -// -// Adds or changes existing visibility specifier. -// -// ``` -// <|>fn frobnicate() {} -// ``` -// -> -// ``` -// pub(crate) fn frobnicate() {} -// ``` -pub(crate) fn change_visibility(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - if let Some(vis) = ctx.find_node_at_offset::() { - return change_vis(acc, vis); - } - add_vis(acc, ctx) -} - -fn add_vis(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let item_keyword = ctx.token_at_offset().find(|leaf| { - matches!( - leaf.kind(), - T![const] | T![static] | T![fn] | T![mod] | T![struct] | T![enum] | T![trait] - ) - }); - - let (offset, target) = if let Some(keyword) = item_keyword { - let parent = keyword.parent(); - let def_kws = vec![CONST, STATIC, FN, MODULE, STRUCT, ENUM, TRAIT]; - // Parent is not a definition, can't add visibility - if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) { - return None; - } - // Already have visibility, do nothing - if parent.children().any(|child| child.kind() == VISIBILITY) { - return None; - } - (vis_offset(&parent), keyword.text_range()) - } else if let Some(field_name) = ctx.find_node_at_offset::() { - let field = field_name.syntax().ancestors().find_map(ast::RecordField::cast)?; - if field.name()? != field_name { - mark::hit!(change_visibility_field_false_positive); - return None; - } - if field.visibility().is_some() { - return None; - } - (vis_offset(field.syntax()), field_name.syntax().text_range()) - } else if let Some(field) = ctx.find_node_at_offset::() { - if field.visibility().is_some() { - return None; - } - (vis_offset(field.syntax()), field.syntax().text_range()) - } else { - return None; - }; - - acc.add( - AssistId("change_visibility", AssistKind::RefactorRewrite), - "Change visibility to pub(crate)", - target, - |edit| { - edit.insert(offset, "pub(crate) "); - }, - ) -} - -fn change_vis(acc: &mut Assists, vis: ast::Visibility) -> Option<()> { - if vis.syntax().text() == "pub" { - let target = vis.syntax().text_range(); - return acc.add( - AssistId("change_visibility", AssistKind::RefactorRewrite), - "Change Visibility to pub(crate)", - target, - |edit| { - edit.replace(vis.syntax().text_range(), "pub(crate)"); - }, - ); - } - if vis.syntax().text() == "pub(crate)" { - let target = vis.syntax().text_range(); - return acc.add( - AssistId("change_visibility", AssistKind::RefactorRewrite), - "Change visibility to pub", - target, - |edit| { - edit.replace(vis.syntax().text_range(), "pub"); - }, - ); - } - None -} - -#[cfg(test)] -mod tests { - use test_utils::mark; - - use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; - - use super::*; - - #[test] - fn change_visibility_adds_pub_crate_to_items() { - check_assist(change_visibility, "<|>fn foo() {}", "pub(crate) fn foo() {}"); - check_assist(change_visibility, "f<|>n foo() {}", "pub(crate) fn foo() {}"); - check_assist(change_visibility, "<|>struct Foo {}", "pub(crate) struct Foo {}"); - check_assist(change_visibility, "<|>mod foo {}", "pub(crate) mod foo {}"); - check_assist(change_visibility, "<|>trait Foo {}", "pub(crate) trait Foo {}"); - check_assist(change_visibility, "m<|>od {}", "pub(crate) mod {}"); - check_assist(change_visibility, "unsafe f<|>n foo() {}", "pub(crate) unsafe fn foo() {}"); - } - - #[test] - fn change_visibility_works_with_struct_fields() { - check_assist( - change_visibility, - r"struct S { <|>field: u32 }", - r"struct S { pub(crate) field: u32 }", - ); - check_assist(change_visibility, r"struct S ( <|>u32 )", r"struct S ( pub(crate) u32 )"); - } - - #[test] - fn change_visibility_field_false_positive() { - mark::check!(change_visibility_field_false_positive); - check_assist_not_applicable( - change_visibility, - r"struct S { field: [(); { let <|>x = ();}] }", - ) - } - - #[test] - fn change_visibility_pub_to_pub_crate() { - check_assist(change_visibility, "<|>pub fn foo() {}", "pub(crate) fn foo() {}") - } - - #[test] - fn change_visibility_pub_crate_to_pub() { - check_assist(change_visibility, "<|>pub(crate) fn foo() {}", "pub fn foo() {}") - } - - #[test] - fn change_visibility_const() { - check_assist(change_visibility, "<|>const FOO = 3u8;", "pub(crate) const FOO = 3u8;"); - } - - #[test] - fn change_visibility_static() { - check_assist(change_visibility, "<|>static FOO = 3u8;", "pub(crate) static FOO = 3u8;"); - } - - #[test] - fn change_visibility_handles_comment_attrs() { - check_assist( - change_visibility, - r" - /// docs - - // comments - - #[derive(Debug)] - <|>struct Foo; - ", - r" - /// docs - - // comments - - #[derive(Debug)] - pub(crate) struct Foo; - ", - ) - } - - #[test] - fn not_applicable_for_enum_variants() { - check_assist_not_applicable( - change_visibility, - r"mod foo { pub enum Foo {Foo1} } - fn main() { foo::Foo::Foo1<|> } ", - ); - } - - #[test] - fn change_visibility_target() { - check_assist_target(change_visibility, "<|>fn foo() {}", "fn"); - check_assist_target(change_visibility, "pub(crate)<|> fn foo() {}", "pub(crate)"); - check_assist_target(change_visibility, "struct S { <|>field: u32 }", "field"); - } -} diff --git a/crates/ra_assists/src/handlers/early_return.rs b/crates/ra_assists/src/handlers/early_return.rs deleted file mode 100644 index 6816a2709e..0000000000 --- a/crates/ra_assists/src/handlers/early_return.rs +++ /dev/null @@ -1,515 +0,0 @@ -use std::{iter::once, ops::RangeInclusive}; - -use ra_syntax::{ - algo::replace_children, - ast::{ - self, - edit::{AstNodeEdit, IndentLevel}, - make, - }, - AstNode, - SyntaxKind::{FN, LOOP_EXPR, L_CURLY, R_CURLY, WHILE_EXPR, WHITESPACE}, - SyntaxNode, -}; - -use crate::{ - assist_context::{AssistContext, Assists}, - utils::invert_boolean_expression, - AssistId, AssistKind, -}; - -// Assist: convert_to_guarded_return -// -// Replace a large conditional with a guarded return. -// -// ``` -// fn main() { -// <|>if cond { -// foo(); -// bar(); -// } -// } -// ``` -// -> -// ``` -// fn main() { -// if !cond { -// return; -// } -// foo(); -// bar(); -// } -// ``` -pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let if_expr: ast::IfExpr = ctx.find_node_at_offset()?; - if if_expr.else_branch().is_some() { - return None; - } - - let cond = if_expr.condition()?; - - // Check if there is an IfLet that we can handle. - let if_let_pat = match cond.pat() { - None => None, // No IfLet, supported. - Some(ast::Pat::TupleStructPat(pat)) if pat.fields().count() == 1 => { - let path = pat.path()?; - match path.qualifier() { - None => { - let bound_ident = pat.fields().next().unwrap(); - Some((path, bound_ident)) - } - Some(_) => return None, - } - } - Some(_) => return None, // Unsupported IfLet. - }; - - let cond_expr = cond.expr()?; - let then_block = if_expr.then_branch()?; - - let parent_block = if_expr.syntax().parent()?.ancestors().find_map(ast::BlockExpr::cast)?; - - if parent_block.expr()? != if_expr.clone().into() { - return None; - } - - // check for early return and continue - let first_in_then_block = then_block.syntax().first_child()?; - if ast::ReturnExpr::can_cast(first_in_then_block.kind()) - || ast::ContinueExpr::can_cast(first_in_then_block.kind()) - || first_in_then_block - .children() - .any(|x| ast::ReturnExpr::can_cast(x.kind()) || ast::ContinueExpr::can_cast(x.kind())) - { - return None; - } - - let parent_container = parent_block.syntax().parent()?; - - let early_expression: ast::Expr = match parent_container.kind() { - WHILE_EXPR | LOOP_EXPR => make::expr_continue(), - FN => make::expr_return(), - _ => return None, - }; - - if then_block.syntax().first_child_or_token().map(|t| t.kind() == L_CURLY).is_none() { - return None; - } - - then_block.syntax().last_child_or_token().filter(|t| t.kind() == R_CURLY)?; - - let target = if_expr.syntax().text_range(); - acc.add( - AssistId("convert_to_guarded_return", AssistKind::RefactorRewrite), - "Convert to guarded return", - target, - |edit| { - let if_indent_level = IndentLevel::from_node(&if_expr.syntax()); - let new_block = match if_let_pat { - None => { - // If. - let new_expr = { - let then_branch = - make::block_expr(once(make::expr_stmt(early_expression).into()), None); - let cond = invert_boolean_expression(cond_expr); - make::expr_if(make::condition(cond, None), then_branch) - .indent(if_indent_level) - }; - replace(new_expr.syntax(), &then_block, &parent_block, &if_expr) - } - Some((path, bound_ident)) => { - // If-let. - let match_expr = { - let happy_arm = { - let pat = make::tuple_struct_pat( - path, - once(make::ident_pat(make::name("it")).into()), - ); - let expr = { - let name_ref = make::name_ref("it"); - let segment = make::path_segment(name_ref); - let path = make::path_unqualified(segment); - make::expr_path(path) - }; - make::match_arm(once(pat.into()), expr) - }; - - let sad_arm = make::match_arm( - // FIXME: would be cool to use `None` or `Err(_)` if appropriate - once(make::wildcard_pat().into()), - early_expression, - ); - - make::expr_match(cond_expr, make::match_arm_list(vec![happy_arm, sad_arm])) - }; - - let let_stmt = make::let_stmt( - make::ident_pat(make::name(&bound_ident.syntax().to_string())).into(), - Some(match_expr), - ); - let let_stmt = let_stmt.indent(if_indent_level); - replace(let_stmt.syntax(), &then_block, &parent_block, &if_expr) - } - }; - edit.replace_ast(parent_block, ast::BlockExpr::cast(new_block).unwrap()); - - fn replace( - new_expr: &SyntaxNode, - then_block: &ast::BlockExpr, - parent_block: &ast::BlockExpr, - if_expr: &ast::IfExpr, - ) -> SyntaxNode { - let then_block_items = then_block.dedent(IndentLevel(1)); - let end_of_then = then_block_items.syntax().last_child_or_token().unwrap(); - let end_of_then = - if end_of_then.prev_sibling_or_token().map(|n| n.kind()) == Some(WHITESPACE) { - end_of_then.prev_sibling_or_token().unwrap() - } else { - end_of_then - }; - let mut then_statements = new_expr.children_with_tokens().chain( - then_block_items - .syntax() - .children_with_tokens() - .skip(1) - .take_while(|i| *i != end_of_then), - ); - replace_children( - &parent_block.syntax(), - RangeInclusive::new( - if_expr.clone().syntax().clone().into(), - if_expr.syntax().clone().into(), - ), - &mut then_statements, - ) - } - }, - ) -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - #[test] - fn convert_inside_fn() { - check_assist( - convert_to_guarded_return, - r#" - fn main() { - bar(); - if<|> true { - foo(); - - //comment - bar(); - } - } - "#, - r#" - fn main() { - bar(); - if !true { - return; - } - foo(); - - //comment - bar(); - } - "#, - ); - } - - #[test] - fn convert_let_inside_fn() { - check_assist( - convert_to_guarded_return, - r#" - fn main(n: Option) { - bar(); - if<|> let Some(n) = n { - foo(n); - - //comment - bar(); - } - } - "#, - r#" - fn main(n: Option) { - bar(); - let n = match n { - Some(it) => it, - _ => return, - }; - foo(n); - - //comment - bar(); - } - "#, - ); - } - - #[test] - fn convert_if_let_result() { - check_assist( - convert_to_guarded_return, - r#" - fn main() { - if<|> let Ok(x) = Err(92) { - foo(x); - } - } - "#, - r#" - fn main() { - let x = match Err(92) { - Ok(it) => it, - _ => return, - }; - foo(x); - } - "#, - ); - } - - #[test] - fn convert_let_ok_inside_fn() { - check_assist( - convert_to_guarded_return, - r#" - fn main(n: Option) { - bar(); - if<|> let Ok(n) = n { - foo(n); - - //comment - bar(); - } - } - "#, - r#" - fn main(n: Option) { - bar(); - let n = match n { - Ok(it) => it, - _ => return, - }; - foo(n); - - //comment - bar(); - } - "#, - ); - } - - #[test] - fn convert_inside_while() { - check_assist( - convert_to_guarded_return, - r#" - fn main() { - while true { - if<|> true { - foo(); - bar(); - } - } - } - "#, - r#" - fn main() { - while true { - if !true { - continue; - } - foo(); - bar(); - } - } - "#, - ); - } - - #[test] - fn convert_let_inside_while() { - check_assist( - convert_to_guarded_return, - r#" - fn main() { - while true { - if<|> let Some(n) = n { - foo(n); - bar(); - } - } - } - "#, - r#" - fn main() { - while true { - let n = match n { - Some(it) => it, - _ => continue, - }; - foo(n); - bar(); - } - } - "#, - ); - } - - #[test] - fn convert_inside_loop() { - check_assist( - convert_to_guarded_return, - r#" - fn main() { - loop { - if<|> true { - foo(); - bar(); - } - } - } - "#, - r#" - fn main() { - loop { - if !true { - continue; - } - foo(); - bar(); - } - } - "#, - ); - } - - #[test] - fn convert_let_inside_loop() { - check_assist( - convert_to_guarded_return, - r#" - fn main() { - loop { - if<|> let Some(n) = n { - foo(n); - bar(); - } - } - } - "#, - r#" - fn main() { - loop { - let n = match n { - Some(it) => it, - _ => continue, - }; - foo(n); - bar(); - } - } - "#, - ); - } - - #[test] - fn ignore_already_converted_if() { - check_assist_not_applicable( - convert_to_guarded_return, - r#" - fn main() { - if<|> true { - return; - } - } - "#, - ); - } - - #[test] - fn ignore_already_converted_loop() { - check_assist_not_applicable( - convert_to_guarded_return, - r#" - fn main() { - loop { - if<|> true { - continue; - } - } - } - "#, - ); - } - - #[test] - fn ignore_return() { - check_assist_not_applicable( - convert_to_guarded_return, - r#" - fn main() { - if<|> true { - return - } - } - "#, - ); - } - - #[test] - fn ignore_else_branch() { - check_assist_not_applicable( - convert_to_guarded_return, - r#" - fn main() { - if<|> true { - foo(); - } else { - bar() - } - } - "#, - ); - } - - #[test] - fn ignore_statements_aftert_if() { - check_assist_not_applicable( - convert_to_guarded_return, - r#" - fn main() { - if<|> true { - foo(); - } - bar(); - } - "#, - ); - } - - #[test] - fn ignore_statements_inside_if() { - check_assist_not_applicable( - convert_to_guarded_return, - r#" - fn main() { - if false { - if<|> true { - foo(); - } - } - } - "#, - ); - } -} diff --git a/crates/ra_assists/src/handlers/expand_glob_import.rs b/crates/ra_assists/src/handlers/expand_glob_import.rs deleted file mode 100644 index eb216a81a1..0000000000 --- a/crates/ra_assists/src/handlers/expand_glob_import.rs +++ /dev/null @@ -1,391 +0,0 @@ -use hir::{AssocItem, MacroDef, ModuleDef, Name, PathResolution, ScopeDef, SemanticsScope}; -use ra_ide_db::{ - defs::{classify_name_ref, Definition, NameRefClass}, - RootDatabase, -}; -use ra_syntax::{algo, ast, match_ast, AstNode, SyntaxNode, SyntaxToken, T}; - -use crate::{ - assist_context::{AssistBuilder, AssistContext, Assists}, - AssistId, AssistKind, -}; - -use either::Either; - -// Assist: expand_glob_import -// -// Expands glob imports. -// -// ``` -// mod foo { -// pub struct Bar; -// pub struct Baz; -// } -// -// use foo::*<|>; -// -// fn qux(bar: Bar, baz: Baz) {} -// ``` -// -> -// ``` -// mod foo { -// pub struct Bar; -// pub struct Baz; -// } -// -// use foo::{Baz, Bar}; -// -// fn qux(bar: Bar, baz: Baz) {} -// ``` -pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let star = ctx.find_token_at_offset(T![*])?; - let mod_path = find_mod_path(&star)?; - - let source_file = ctx.source_file(); - let scope = ctx.sema.scope_at_offset(source_file.syntax(), ctx.offset()); - - let defs_in_mod = find_defs_in_mod(ctx, scope, &mod_path)?; - let name_refs_in_source_file = - source_file.syntax().descendants().filter_map(ast::NameRef::cast).collect(); - let used_names = find_used_names(ctx, defs_in_mod, name_refs_in_source_file); - - let parent = star.parent().parent()?; - acc.add( - AssistId("expand_glob_import", AssistKind::RefactorRewrite), - "Expand glob import", - parent.text_range(), - |builder| { - replace_ast(builder, &parent, mod_path, used_names); - }, - ) -} - -fn find_mod_path(star: &SyntaxToken) -> Option { - star.ancestors().find_map(|n| ast::UseTree::cast(n).and_then(|u| u.path())) -} - -#[derive(PartialEq)] -enum Def { - ModuleDef(ModuleDef), - MacroDef(MacroDef), -} - -impl Def { - fn name(&self, db: &RootDatabase) -> Option { - match self { - Def::ModuleDef(def) => def.name(db), - Def::MacroDef(def) => def.name(db), - } - } -} - -fn find_defs_in_mod( - ctx: &AssistContext, - from: SemanticsScope<'_>, - path: &ast::Path, -) -> Option> { - let hir_path = ctx.sema.lower_path(&path)?; - let module = if let Some(PathResolution::Def(ModuleDef::Module(module))) = - from.resolve_hir_path_qualifier(&hir_path) - { - module - } else { - return None; - }; - - let module_scope = module.scope(ctx.db(), from.module()); - - let mut defs = vec![]; - for (_, def) in module_scope { - match def { - ScopeDef::ModuleDef(def) => defs.push(Def::ModuleDef(def)), - ScopeDef::MacroDef(def) => defs.push(Def::MacroDef(def)), - _ => continue, - } - } - - Some(defs) -} - -fn find_used_names( - ctx: &AssistContext, - defs_in_mod: Vec, - name_refs_in_source_file: Vec, -) -> Vec { - let defs_in_source_file = name_refs_in_source_file - .iter() - .filter_map(|r| classify_name_ref(&ctx.sema, r)) - .filter_map(|rc| match rc { - NameRefClass::Definition(Definition::ModuleDef(def)) => Some(Def::ModuleDef(def)), - NameRefClass::Definition(Definition::Macro(def)) => Some(Def::MacroDef(def)), - _ => None, - }) - .collect::>(); - - defs_in_mod - .iter() - .filter(|def| { - if let Def::ModuleDef(ModuleDef::Trait(tr)) = def { - for item in tr.items(ctx.db()) { - if let AssocItem::Function(f) = item { - if defs_in_source_file.contains(&Def::ModuleDef(ModuleDef::Function(f))) { - return true; - } - } - } - } - - defs_in_source_file.contains(def) - }) - .filter_map(|d| d.name(ctx.db())) - .collect() -} - -fn replace_ast( - builder: &mut AssistBuilder, - node: &SyntaxNode, - path: ast::Path, - used_names: Vec, -) { - let replacement: Either = match used_names.as_slice() { - [name] => Either::Left(ast::make::use_tree( - ast::make::path_from_text(&format!("{}::{}", path, name)), - None, - None, - false, - )), - names => Either::Right(ast::make::use_tree_list(names.iter().map(|n| { - ast::make::use_tree(ast::make::path_from_text(&n.to_string()), None, None, false) - }))), - }; - - let mut replace_node = |replacement: Either| { - algo::diff(node, &replacement.either(|u| u.syntax().clone(), |ut| ut.syntax().clone())) - .into_text_edit(builder.text_edit_builder()); - }; - - match_ast! { - match node { - ast::UseTree(use_tree) => { - replace_node(replacement); - }, - ast::UseTreeList(use_tree_list) => { - replace_node(replacement); - }, - ast::Use(use_item) => { - builder.replace_ast(use_item, ast::make::use_(replacement.left_or_else(|ut| ast::make::use_tree(path, Some(ut), None, false)))); - }, - _ => {}, - } - } -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - #[test] - fn expanding_glob_import() { - check_assist( - expand_glob_import, - r" -mod foo { - pub struct Bar; - pub struct Baz; - pub struct Qux; - - pub fn f() {} -} - -use foo::*<|>; - -fn qux(bar: Bar, baz: Baz) { - f(); -} -", - r" -mod foo { - pub struct Bar; - pub struct Baz; - pub struct Qux; - - pub fn f() {} -} - -use foo::{Baz, Bar, f}; - -fn qux(bar: Bar, baz: Baz) { - f(); -} -", - ) - } - - #[test] - fn expanding_glob_import_with_existing_explicit_names() { - check_assist( - expand_glob_import, - r" -mod foo { - pub struct Bar; - pub struct Baz; - pub struct Qux; - - pub fn f() {} -} - -use foo::{*<|>, f}; - -fn qux(bar: Bar, baz: Baz) { - f(); -} -", - r" -mod foo { - pub struct Bar; - pub struct Baz; - pub struct Qux; - - pub fn f() {} -} - -use foo::{Baz, Bar, f}; - -fn qux(bar: Bar, baz: Baz) { - f(); -} -", - ) - } - - #[test] - fn expanding_nested_glob_import() { - check_assist( - expand_glob_import, - r" -mod foo { - mod bar { - pub struct Bar; - pub struct Baz; - pub struct Qux; - - pub fn f() {} - } - - mod baz { - pub fn g() {} - } -} - -use foo::{bar::{*<|>, f}, baz::*}; - -fn qux(bar: Bar, baz: Baz) { - f(); - g(); -} -", - r" -mod foo { - mod bar { - pub struct Bar; - pub struct Baz; - pub struct Qux; - - pub fn f() {} - } - - mod baz { - pub fn g() {} - } -} - -use foo::{bar::{Baz, Bar, f}, baz::*}; - -fn qux(bar: Bar, baz: Baz) { - f(); - g(); -} -", - ) - } - - #[test] - fn expanding_glob_import_with_macro_defs() { - check_assist( - expand_glob_import, - r" -//- /lib.rs crate:foo -#[macro_export] -macro_rules! bar { - () => () -} - -pub fn baz() {} - -//- /main.rs crate:main deps:foo -use foo::*<|>; - -fn main() { - bar!(); - baz(); -} -", - r" -use foo::{bar, baz}; - -fn main() { - bar!(); - baz(); -} -", - ) - } - - #[test] - fn expanding_glob_import_with_trait_method_uses() { - check_assist( - expand_glob_import, - r" -//- /lib.rs crate:foo -pub trait Tr { - fn method(&self) {} -} -impl Tr for () {} - -//- /main.rs crate:main deps:foo -use foo::*<|>; - -fn main() { - ().method(); -} -", - r" -use foo::Tr; - -fn main() { - ().method(); -} -", - ) - } - - #[test] - fn expanding_is_not_applicable_if_cursor_is_not_in_star_token() { - check_assist_not_applicable( - expand_glob_import, - r" - mod foo { - pub struct Bar; - pub struct Baz; - pub struct Qux; - } - - use foo::Bar<|>; - - fn qux(bar: Bar, baz: Baz) {} - ", - ) - } -} diff --git a/crates/ra_assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ra_assists/src/handlers/extract_struct_from_enum_variant.rs deleted file mode 100644 index ccec688cae..0000000000 --- a/crates/ra_assists/src/handlers/extract_struct_from_enum_variant.rs +++ /dev/null @@ -1,321 +0,0 @@ -use hir::{EnumVariant, Module, ModuleDef, Name}; -use ra_db::FileId; -use ra_fmt::leading_indent; -use ra_ide_db::{defs::Definition, search::Reference, RootDatabase}; -use ra_syntax::{ - algo::find_node_at_offset, - ast::{self, ArgListOwner, AstNode, NameOwner, VisibilityOwner}, - SourceFile, SyntaxNode, TextRange, TextSize, -}; -use rustc_hash::FxHashSet; - -use crate::{ - assist_context::AssistBuilder, utils::insert_use_statement, AssistContext, AssistId, - AssistKind, Assists, -}; - -// Assist: extract_struct_from_enum_variant -// -// Extracts a struct from enum variant. -// -// ``` -// enum A { <|>One(u32, u32) } -// ``` -// -> -// ``` -// struct One(pub u32, pub u32); -// -// enum A { One(One) } -// ``` -pub(crate) fn extract_struct_from_enum_variant( - acc: &mut Assists, - ctx: &AssistContext, -) -> Option<()> { - let variant = ctx.find_node_at_offset::()?; - let field_list = match variant.kind() { - ast::StructKind::Tuple(field_list) => field_list, - _ => return None, - }; - let variant_name = variant.name()?.to_string(); - let variant_hir = ctx.sema.to_def(&variant)?; - if existing_struct_def(ctx.db(), &variant_name, &variant_hir) { - return None; - } - let enum_ast = variant.parent_enum(); - let visibility = enum_ast.visibility(); - let enum_hir = ctx.sema.to_def(&enum_ast)?; - let variant_hir_name = variant_hir.name(ctx.db()); - let enum_module_def = ModuleDef::from(enum_hir); - let current_module = enum_hir.module(ctx.db()); - let target = variant.syntax().text_range(); - acc.add( - AssistId("extract_struct_from_enum_variant", AssistKind::RefactorRewrite), - "Extract struct from enum variant", - target, - |builder| { - let definition = Definition::ModuleDef(ModuleDef::EnumVariant(variant_hir)); - let res = definition.find_usages(&ctx.sema, None); - let start_offset = variant.parent_enum().syntax().text_range().start(); - let mut visited_modules_set = FxHashSet::default(); - visited_modules_set.insert(current_module); - for reference in res { - let source_file = ctx.sema.parse(reference.file_range.file_id); - update_reference( - ctx, - builder, - reference, - &source_file, - &enum_module_def, - &variant_hir_name, - &mut visited_modules_set, - ); - } - extract_struct_def( - builder, - enum_ast.syntax(), - &variant_name, - &field_list.to_string(), - start_offset, - ctx.frange.file_id, - &visibility, - ); - let list_range = field_list.syntax().text_range(); - update_variant(builder, &variant_name, ctx.frange.file_id, list_range); - }, - ) -} - -fn existing_struct_def(db: &RootDatabase, variant_name: &str, variant: &EnumVariant) -> bool { - variant - .parent_enum(db) - .module(db) - .scope(db, None) - .into_iter() - .any(|(name, _)| name.to_string() == variant_name.to_string()) -} - -fn insert_import( - ctx: &AssistContext, - builder: &mut AssistBuilder, - path: &ast::PathExpr, - module: &Module, - enum_module_def: &ModuleDef, - variant_hir_name: &Name, -) -> Option<()> { - let db = ctx.db(); - let mod_path = module.find_use_path(db, enum_module_def.clone()); - if let Some(mut mod_path) = mod_path { - mod_path.segments.pop(); - mod_path.segments.push(variant_hir_name.clone()); - insert_use_statement(path.syntax(), &mod_path, ctx, builder.text_edit_builder()); - } - Some(()) -} - -fn extract_struct_def( - builder: &mut AssistBuilder, - enum_ast: &SyntaxNode, - variant_name: &str, - variant_list: &str, - start_offset: TextSize, - file_id: FileId, - visibility: &Option, -) -> Option<()> { - let visibility_string = if let Some(visibility) = visibility { - format!("{} ", visibility.to_string()) - } else { - "".to_string() - }; - let indent = if let Some(indent) = leading_indent(enum_ast) { - indent.to_string() - } else { - "".to_string() - }; - let struct_def = format!( - r#"{}struct {}{}; - -{}"#, - visibility_string, - variant_name, - list_with_visibility(variant_list), - indent - ); - builder.edit_file(file_id); - builder.insert(start_offset, struct_def); - Some(()) -} - -fn update_variant( - builder: &mut AssistBuilder, - variant_name: &str, - file_id: FileId, - list_range: TextRange, -) -> Option<()> { - let inside_variant_range = TextRange::new( - list_range.start().checked_add(TextSize::from(1))?, - list_range.end().checked_sub(TextSize::from(1))?, - ); - builder.edit_file(file_id); - builder.replace(inside_variant_range, variant_name); - Some(()) -} - -fn update_reference( - ctx: &AssistContext, - builder: &mut AssistBuilder, - reference: Reference, - source_file: &SourceFile, - enum_module_def: &ModuleDef, - variant_hir_name: &Name, - visited_modules_set: &mut FxHashSet, -) -> Option<()> { - let path_expr: ast::PathExpr = find_node_at_offset::( - source_file.syntax(), - reference.file_range.range.start(), - )?; - let call = path_expr.syntax().parent().and_then(ast::CallExpr::cast)?; - let list = call.arg_list()?; - let segment = path_expr.path()?.segment()?; - let module = ctx.sema.scope(&path_expr.syntax()).module()?; - let list_range = list.syntax().text_range(); - let inside_list_range = TextRange::new( - list_range.start().checked_add(TextSize::from(1))?, - list_range.end().checked_sub(TextSize::from(1))?, - ); - builder.edit_file(reference.file_range.file_id); - if !visited_modules_set.contains(&module) { - if insert_import(ctx, builder, &path_expr, &module, enum_module_def, variant_hir_name) - .is_some() - { - visited_modules_set.insert(module); - } - } - builder.replace(inside_list_range, format!("{}{}", segment, list)); - Some(()) -} - -fn list_with_visibility(list: &str) -> String { - list.split(',') - .map(|part| { - let index = if part.chars().next().unwrap() == '(' { 1usize } else { 0 }; - let mut mod_part = part.trim().to_string(); - mod_part.insert_str(index, "pub "); - mod_part - }) - .collect::>() - .join(", ") -} - -#[cfg(test)] -mod tests { - - use crate::{ - tests::{check_assist, check_assist_not_applicable}, - utils::FamousDefs, - }; - - use super::*; - - #[test] - fn test_extract_struct_several_fields() { - check_assist( - extract_struct_from_enum_variant, - "enum A { <|>One(u32, u32) }", - r#"struct One(pub u32, pub u32); - -enum A { One(One) }"#, - ); - } - - #[test] - fn test_extract_struct_one_field() { - check_assist( - extract_struct_from_enum_variant, - "enum A { <|>One(u32) }", - r#"struct One(pub u32); - -enum A { One(One) }"#, - ); - } - - #[test] - fn test_extract_struct_pub_visibility() { - check_assist( - extract_struct_from_enum_variant, - "pub enum A { <|>One(u32, u32) }", - r#"pub struct One(pub u32, pub u32); - -pub enum A { One(One) }"#, - ); - } - - #[test] - fn test_extract_struct_with_complex_imports() { - check_assist( - extract_struct_from_enum_variant, - r#"mod my_mod { - fn another_fn() { - let m = my_other_mod::MyEnum::MyField(1, 1); - } - - pub mod my_other_mod { - fn another_fn() { - let m = MyEnum::MyField(1, 1); - } - - pub enum MyEnum { - <|>MyField(u8, u8), - } - } -} - -fn another_fn() { - let m = my_mod::my_other_mod::MyEnum::MyField(1, 1); -}"#, - r#"use my_mod::my_other_mod::MyField; - -mod my_mod { - use my_other_mod::MyField; - - fn another_fn() { - let m = my_other_mod::MyEnum::MyField(MyField(1, 1)); - } - - pub mod my_other_mod { - fn another_fn() { - let m = MyEnum::MyField(MyField(1, 1)); - } - - pub struct MyField(pub u8, pub u8); - - pub enum MyEnum { - MyField(MyField), - } - } -} - -fn another_fn() { - let m = my_mod::my_other_mod::MyEnum::MyField(MyField(1, 1)); -}"#, - ); - } - - fn check_not_applicable(ra_fixture: &str) { - let fixture = - format!("//- /main.rs crate:main deps:core\n{}\n{}", ra_fixture, FamousDefs::FIXTURE); - check_assist_not_applicable(extract_struct_from_enum_variant, &fixture) - } - - #[test] - fn test_extract_enum_not_applicable_for_element_with_no_fields() { - check_not_applicable("enum A { <|>One }"); - } - - #[test] - fn test_extract_enum_not_applicable_if_struct_exists() { - check_not_applicable( - r#"struct One; - enum A { <|>One(u8) }"#, - ); - } -} diff --git a/crates/ra_assists/src/handlers/extract_variable.rs b/crates/ra_assists/src/handlers/extract_variable.rs deleted file mode 100644 index cc62db0c44..0000000000 --- a/crates/ra_assists/src/handlers/extract_variable.rs +++ /dev/null @@ -1,588 +0,0 @@ -use ra_syntax::{ - ast::{self, AstNode}, - SyntaxKind::{ - BLOCK_EXPR, BREAK_EXPR, CLOSURE_EXPR, COMMENT, LOOP_EXPR, MATCH_ARM, PATH_EXPR, RETURN_EXPR, - }, - SyntaxNode, -}; -use stdx::format_to; -use test_utils::mark; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: extract_variable -// -// Extracts subexpression into a variable. -// -// ``` -// fn main() { -// <|>(1 + 2)<|> * 4; -// } -// ``` -// -> -// ``` -// fn main() { -// let $0var_name = (1 + 2); -// var_name * 4; -// } -// ``` -pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - if ctx.frange.range.is_empty() { - return None; - } - let node = ctx.covering_element(); - if node.kind() == COMMENT { - mark::hit!(extract_var_in_comment_is_not_applicable); - return None; - } - let to_extract = node.ancestors().find_map(valid_target_expr)?; - let anchor = Anchor::from(&to_extract)?; - let indent = anchor.syntax().prev_sibling_or_token()?.as_token()?.clone(); - let target = to_extract.syntax().text_range(); - acc.add( - AssistId("extract_variable", AssistKind::RefactorExtract), - "Extract into variable", - target, - move |edit| { - let field_shorthand = - match to_extract.syntax().parent().and_then(ast::RecordExprField::cast) { - Some(field) => field.name_ref(), - None => None, - }; - - let mut buf = String::new(); - - let var_name = match &field_shorthand { - Some(it) => it.to_string(), - None => "var_name".to_string(), - }; - let expr_range = match &field_shorthand { - Some(it) => it.syntax().text_range().cover(to_extract.syntax().text_range()), - None => to_extract.syntax().text_range(), - }; - - if let Anchor::WrapInBlock(_) = anchor { - format_to!(buf, "{{ let {} = ", var_name); - } else { - format_to!(buf, "let {} = ", var_name); - }; - format_to!(buf, "{}", to_extract.syntax()); - - if let Anchor::Replace(stmt) = anchor { - mark::hit!(test_extract_var_expr_stmt); - if stmt.semicolon_token().is_none() { - buf.push_str(";"); - } - match ctx.config.snippet_cap { - Some(cap) => { - let snip = buf - .replace(&format!("let {}", var_name), &format!("let $0{}", var_name)); - edit.replace_snippet(cap, expr_range, snip) - } - None => edit.replace(expr_range, buf), - } - return; - } - - buf.push_str(";"); - - // We want to maintain the indent level, - // but we do not want to duplicate possible - // extra newlines in the indent block - let text = indent.text(); - if text.starts_with('\n') { - buf.push_str("\n"); - buf.push_str(text.trim_start_matches('\n')); - } else { - buf.push_str(text); - } - - edit.replace(expr_range, var_name.clone()); - let offset = anchor.syntax().text_range().start(); - match ctx.config.snippet_cap { - Some(cap) => { - let snip = - buf.replace(&format!("let {}", var_name), &format!("let $0{}", var_name)); - edit.insert_snippet(cap, offset, snip) - } - None => edit.insert(offset, buf), - } - - if let Anchor::WrapInBlock(_) = anchor { - edit.insert(anchor.syntax().text_range().end(), " }"); - } - }, - ) -} - -/// Check whether the node is a valid expression which can be extracted to a variable. -/// In general that's true for any expression, but in some cases that would produce invalid code. -fn valid_target_expr(node: SyntaxNode) -> Option { - match node.kind() { - PATH_EXPR | LOOP_EXPR => None, - BREAK_EXPR => ast::BreakExpr::cast(node).and_then(|e| e.expr()), - RETURN_EXPR => ast::ReturnExpr::cast(node).and_then(|e| e.expr()), - BLOCK_EXPR => { - ast::BlockExpr::cast(node).filter(|it| it.is_standalone()).map(ast::Expr::from) - } - _ => ast::Expr::cast(node), - } -} - -enum Anchor { - Before(SyntaxNode), - Replace(ast::ExprStmt), - WrapInBlock(SyntaxNode), -} - -impl Anchor { - fn from(to_extract: &ast::Expr) -> Option { - to_extract.syntax().ancestors().find_map(|node| { - if let Some(expr) = - node.parent().and_then(ast::BlockExpr::cast).and_then(|it| it.expr()) - { - if expr.syntax() == &node { - mark::hit!(test_extract_var_last_expr); - return Some(Anchor::Before(node)); - } - } - - if let Some(parent) = node.parent() { - if parent.kind() == MATCH_ARM || parent.kind() == CLOSURE_EXPR { - return Some(Anchor::WrapInBlock(node)); - } - } - - if let Some(stmt) = ast::Stmt::cast(node.clone()) { - if let ast::Stmt::ExprStmt(stmt) = stmt { - if stmt.expr().as_ref() == Some(to_extract) { - return Some(Anchor::Replace(stmt)); - } - } - return Some(Anchor::Before(node)); - } - None - }) - } - - fn syntax(&self) -> &SyntaxNode { - match self { - Anchor::Before(it) | Anchor::WrapInBlock(it) => it, - Anchor::Replace(stmt) => stmt.syntax(), - } - } -} - -#[cfg(test)] -mod tests { - use test_utils::mark; - - use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; - - use super::*; - - #[test] - fn test_extract_var_simple() { - check_assist( - extract_variable, - r#" -fn foo() { - foo(<|>1 + 1<|>); -}"#, - r#" -fn foo() { - let $0var_name = 1 + 1; - foo(var_name); -}"#, - ); - } - - #[test] - fn extract_var_in_comment_is_not_applicable() { - mark::check!(extract_var_in_comment_is_not_applicable); - check_assist_not_applicable(extract_variable, "fn main() { 1 + /* <|>comment<|> */ 1; }"); - } - - #[test] - fn test_extract_var_expr_stmt() { - mark::check!(test_extract_var_expr_stmt); - check_assist( - extract_variable, - r#" -fn foo() { - <|>1 + 1<|>; -}"#, - r#" -fn foo() { - let $0var_name = 1 + 1; -}"#, - ); - check_assist( - extract_variable, - " -fn foo() { - <|>{ let x = 0; x }<|> - something_else(); -}", - " -fn foo() { - let $0var_name = { let x = 0; x }; - something_else(); -}", - ); - } - - #[test] - fn test_extract_var_part_of_expr_stmt() { - check_assist( - extract_variable, - " -fn foo() { - <|>1<|> + 1; -}", - " -fn foo() { - let $0var_name = 1; - var_name + 1; -}", - ); - } - - #[test] - fn test_extract_var_last_expr() { - mark::check!(test_extract_var_last_expr); - check_assist( - extract_variable, - r#" -fn foo() { - bar(<|>1 + 1<|>) -} -"#, - r#" -fn foo() { - let $0var_name = 1 + 1; - bar(var_name) -} -"#, - ); - check_assist( - extract_variable, - r#" -fn foo() { - <|>bar(1 + 1)<|> -} -"#, - r#" -fn foo() { - let $0var_name = bar(1 + 1); - var_name -} -"#, - ) - } - - #[test] - fn test_extract_var_in_match_arm_no_block() { - check_assist( - extract_variable, - " -fn main() { - let x = true; - let tuple = match x { - true => (<|>2 + 2<|>, true) - _ => (0, false) - }; -} -", - " -fn main() { - let x = true; - let tuple = match x { - true => { let $0var_name = 2 + 2; (var_name, true) } - _ => (0, false) - }; -} -", - ); - } - - #[test] - fn test_extract_var_in_match_arm_with_block() { - check_assist( - extract_variable, - " -fn main() { - let x = true; - let tuple = match x { - true => { - let y = 1; - (<|>2 + y<|>, true) - } - _ => (0, false) - }; -} -", - " -fn main() { - let x = true; - let tuple = match x { - true => { - let y = 1; - let $0var_name = 2 + y; - (var_name, true) - } - _ => (0, false) - }; -} -", - ); - } - - #[test] - fn test_extract_var_in_closure_no_block() { - check_assist( - extract_variable, - " -fn main() { - let lambda = |x: u32| <|>x * 2<|>; -} -", - " -fn main() { - let lambda = |x: u32| { let $0var_name = x * 2; var_name }; -} -", - ); - } - - #[test] - fn test_extract_var_in_closure_with_block() { - check_assist( - extract_variable, - " -fn main() { - let lambda = |x: u32| { <|>x * 2<|> }; -} -", - " -fn main() { - let lambda = |x: u32| { let $0var_name = x * 2; var_name }; -} -", - ); - } - - #[test] - fn test_extract_var_path_simple() { - check_assist( - extract_variable, - " -fn main() { - let o = <|>Some(true)<|>; -} -", - " -fn main() { - let $0var_name = Some(true); - let o = var_name; -} -", - ); - } - - #[test] - fn test_extract_var_path_method() { - check_assist( - extract_variable, - " -fn main() { - let v = <|>bar.foo()<|>; -} -", - " -fn main() { - let $0var_name = bar.foo(); - let v = var_name; -} -", - ); - } - - #[test] - fn test_extract_var_return() { - check_assist( - extract_variable, - " -fn foo() -> u32 { - <|>return 2 + 2<|>; -} -", - " -fn foo() -> u32 { - let $0var_name = 2 + 2; - return var_name; -} -", - ); - } - - #[test] - fn test_extract_var_does_not_add_extra_whitespace() { - check_assist( - extract_variable, - " -fn foo() -> u32 { - - - <|>return 2 + 2<|>; -} -", - " -fn foo() -> u32 { - - - let $0var_name = 2 + 2; - return var_name; -} -", - ); - - check_assist( - extract_variable, - " -fn foo() -> u32 { - - <|>return 2 + 2<|>; -} -", - " -fn foo() -> u32 { - - let $0var_name = 2 + 2; - return var_name; -} -", - ); - - check_assist( - extract_variable, - " -fn foo() -> u32 { - let foo = 1; - - // bar - - - <|>return 2 + 2<|>; -} -", - " -fn foo() -> u32 { - let foo = 1; - - // bar - - - let $0var_name = 2 + 2; - return var_name; -} -", - ); - } - - #[test] - fn test_extract_var_break() { - check_assist( - extract_variable, - " -fn main() { - let result = loop { - <|>break 2 + 2<|>; - }; -} -", - " -fn main() { - let result = loop { - let $0var_name = 2 + 2; - break var_name; - }; -} -", - ); - } - - #[test] - fn test_extract_var_for_cast() { - check_assist( - extract_variable, - " -fn main() { - let v = <|>0f32 as u32<|>; -} -", - " -fn main() { - let $0var_name = 0f32 as u32; - let v = var_name; -} -", - ); - } - - #[test] - fn extract_var_field_shorthand() { - check_assist( - extract_variable, - r#" -struct S { - foo: i32 -} - -fn main() { - S { foo: <|>1 + 1<|> } -} -"#, - r#" -struct S { - foo: i32 -} - -fn main() { - let $0foo = 1 + 1; - S { foo } -} -"#, - ) - } - - #[test] - fn test_extract_var_for_return_not_applicable() { - check_assist_not_applicable(extract_variable, "fn foo() { <|>return<|>; } "); - } - - #[test] - fn test_extract_var_for_break_not_applicable() { - check_assist_not_applicable(extract_variable, "fn main() { loop { <|>break<|>; }; }"); - } - - // FIXME: This is not quite correct, but good enough(tm) for the sorting heuristic - #[test] - fn extract_var_target() { - check_assist_target(extract_variable, "fn foo() -> u32 { <|>return 2 + 2<|>; }", "2 + 2"); - - check_assist_target( - extract_variable, - " -fn main() { - let x = true; - let tuple = match x { - true => (<|>2 + 2<|>, true) - _ => (0, false) - }; -} -", - "2 + 2", - ); - } -} diff --git a/crates/ra_assists/src/handlers/fill_match_arms.rs b/crates/ra_assists/src/handlers/fill_match_arms.rs deleted file mode 100644 index 6698d1a27a..0000000000 --- a/crates/ra_assists/src/handlers/fill_match_arms.rs +++ /dev/null @@ -1,747 +0,0 @@ -use std::iter; - -use hir::{Adt, HasSource, ModuleDef, Semantics}; -use itertools::Itertools; -use ra_ide_db::RootDatabase; -use ra_syntax::ast::{self, make, AstNode, MatchArm, NameOwner, Pat}; -use test_utils::mark; - -use crate::{ - utils::{render_snippet, Cursor, FamousDefs}, - AssistContext, AssistId, AssistKind, Assists, -}; - -// Assist: fill_match_arms -// -// Adds missing clauses to a `match` expression. -// -// ``` -// enum Action { Move { distance: u32 }, Stop } -// -// fn handle(action: Action) { -// match action { -// <|> -// } -// } -// ``` -// -> -// ``` -// enum Action { Move { distance: u32 }, Stop } -// -// fn handle(action: Action) { -// match action { -// $0Action::Move { distance } => {} -// Action::Stop => {} -// } -// } -// ``` -pub(crate) fn fill_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let match_expr = ctx.find_node_at_offset::()?; - let match_arm_list = match_expr.match_arm_list()?; - - let expr = match_expr.expr()?; - - let mut arms: Vec = match_arm_list.arms().collect(); - if arms.len() == 1 { - if let Some(Pat::WildcardPat(..)) = arms[0].pat() { - arms.clear(); - } - } - - let module = ctx.sema.scope(expr.syntax()).module()?; - - let missing_arms: Vec = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr) { - let variants = enum_def.variants(ctx.db()); - - let mut variants = variants - .into_iter() - .filter_map(|variant| build_pat(ctx.db(), module, variant)) - .filter(|variant_pat| is_variant_missing(&mut arms, variant_pat)) - .map(|pat| make::match_arm(iter::once(pat), make::expr_empty_block())) - .collect::>(); - if Some(enum_def) == FamousDefs(&ctx.sema, module.krate()).core_option_Option() { - // Match `Some` variant first. - mark::hit!(option_order); - variants.reverse() - } - variants - } else if let Some(enum_defs) = resolve_tuple_of_enum_def(&ctx.sema, &expr) { - // Partial fill not currently supported for tuple of enums. - if !arms.is_empty() { - return None; - } - - // We do not currently support filling match arms for a tuple - // containing a single enum. - if enum_defs.len() < 2 { - return None; - } - - // When calculating the match arms for a tuple of enums, we want - // to create a match arm for each possible combination of enum - // values. The `multi_cartesian_product` method transforms - // Vec> into Vec<(EnumVariant, .., EnumVariant)> - // where each tuple represents a proposed match arm. - enum_defs - .into_iter() - .map(|enum_def| enum_def.variants(ctx.db())) - .multi_cartesian_product() - .map(|variants| { - let patterns = - variants.into_iter().filter_map(|variant| build_pat(ctx.db(), module, variant)); - ast::Pat::from(make::tuple_pat(patterns)) - }) - .filter(|variant_pat| is_variant_missing(&mut arms, variant_pat)) - .map(|pat| make::match_arm(iter::once(pat), make::expr_empty_block())) - .collect() - } else { - return None; - }; - - if missing_arms.is_empty() { - return None; - } - - let target = match_expr.syntax().text_range(); - acc.add( - AssistId("fill_match_arms", AssistKind::QuickFix), - "Fill match arms", - target, - |builder| { - let new_arm_list = match_arm_list.remove_placeholder(); - let n_old_arms = new_arm_list.arms().count(); - let new_arm_list = new_arm_list.append_arms(missing_arms); - let first_new_arm = new_arm_list.arms().nth(n_old_arms); - let old_range = match_arm_list.syntax().text_range(); - match (first_new_arm, ctx.config.snippet_cap) { - (Some(first_new_arm), Some(cap)) => { - let extend_lifetime; - let cursor = - match first_new_arm.syntax().descendants().find_map(ast::WildcardPat::cast) - { - Some(it) => { - extend_lifetime = it.syntax().clone(); - Cursor::Replace(&extend_lifetime) - } - None => Cursor::Before(first_new_arm.syntax()), - }; - let snippet = render_snippet(cap, new_arm_list.syntax(), cursor); - builder.replace_snippet(cap, old_range, snippet); - } - _ => builder.replace(old_range, new_arm_list.to_string()), - } - }, - ) -} - -fn is_variant_missing(existing_arms: &mut Vec, var: &Pat) -> bool { - existing_arms.iter().filter_map(|arm| arm.pat()).all(|pat| { - // Special casee OrPat as separate top-level pats - let top_level_pats: Vec = match pat { - Pat::OrPat(pats) => pats.pats().collect::>(), - _ => vec![pat], - }; - - !top_level_pats.iter().any(|pat| does_pat_match_variant(pat, var)) - }) -} - -fn does_pat_match_variant(pat: &Pat, var: &Pat) -> bool { - let first_node_text = |pat: &Pat| pat.syntax().first_child().map(|node| node.text()); - - let pat_head = match pat { - Pat::IdentPat(bind_pat) => { - if let Some(p) = bind_pat.pat() { - first_node_text(&p) - } else { - return false; - } - } - pat => first_node_text(pat), - }; - - let var_head = first_node_text(var); - - pat_head == var_head -} - -fn resolve_enum_def(sema: &Semantics, expr: &ast::Expr) -> Option { - sema.type_of_expr(&expr)?.autoderef(sema.db).find_map(|ty| match ty.as_adt() { - Some(Adt::Enum(e)) => Some(e), - _ => None, - }) -} - -fn resolve_tuple_of_enum_def( - sema: &Semantics, - expr: &ast::Expr, -) -> Option> { - sema.type_of_expr(&expr)? - .tuple_fields(sema.db) - .iter() - .map(|ty| { - ty.autoderef(sema.db).find_map(|ty| match ty.as_adt() { - Some(Adt::Enum(e)) => Some(e), - // For now we only handle expansion for a tuple of enums. Here - // we map non-enum items to None and rely on `collect` to - // convert Vec> into Option>. - _ => None, - }) - }) - .collect() -} - -fn build_pat(db: &RootDatabase, module: hir::Module, var: hir::EnumVariant) -> Option { - let path = crate::ast_transform::path_to_ast(module.find_use_path(db, ModuleDef::from(var))?); - - // FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though - let pat: ast::Pat = match var.source(db).value.kind() { - ast::StructKind::Tuple(field_list) => { - let pats = iter::repeat(make::wildcard_pat().into()).take(field_list.fields().count()); - make::tuple_struct_pat(path, pats).into() - } - ast::StructKind::Record(field_list) => { - let pats = field_list.fields().map(|f| make::ident_pat(f.name().unwrap()).into()); - make::record_pat(path, pats).into() - } - ast::StructKind::Unit => make::path_pat(path), - }; - - Some(pat) -} - -#[cfg(test)] -mod tests { - use test_utils::mark; - - use crate::{ - tests::{check_assist, check_assist_not_applicable, check_assist_target}, - utils::FamousDefs, - }; - - use super::fill_match_arms; - - #[test] - fn all_match_arms_provided() { - check_assist_not_applicable( - fill_match_arms, - r#" - enum A { - As, - Bs{x:i32, y:Option}, - Cs(i32, Option), - } - fn main() { - match A::As<|> { - A::As, - A::Bs{x,y:Some(_)} => {} - A::Cs(_, Some(_)) => {} - } - } - "#, - ); - } - - #[test] - fn tuple_of_non_enum() { - // for now this case is not handled, although it potentially could be - // in the future - check_assist_not_applicable( - fill_match_arms, - r#" - fn main() { - match (0, false)<|> { - } - } - "#, - ); - } - - #[test] - fn partial_fill_record_tuple() { - check_assist( - fill_match_arms, - r#" - enum A { - As, - Bs { x: i32, y: Option }, - Cs(i32, Option), - } - fn main() { - match A::As<|> { - A::Bs { x, y: Some(_) } => {} - A::Cs(_, Some(_)) => {} - } - } - "#, - r#" - enum A { - As, - Bs { x: i32, y: Option }, - Cs(i32, Option), - } - fn main() { - match A::As { - A::Bs { x, y: Some(_) } => {} - A::Cs(_, Some(_)) => {} - $0A::As => {} - } - } - "#, - ); - } - - #[test] - fn partial_fill_or_pat() { - check_assist( - fill_match_arms, - r#" -enum A { As, Bs, Cs(Option) } -fn main() { - match A::As<|> { - A::Cs(_) | A::Bs => {} - } -} -"#, - r#" -enum A { As, Bs, Cs(Option) } -fn main() { - match A::As { - A::Cs(_) | A::Bs => {} - $0A::As => {} - } -} -"#, - ); - } - - #[test] - fn partial_fill() { - check_assist( - fill_match_arms, - r#" -enum A { As, Bs, Cs, Ds(String), Es(B) } -enum B { Xs, Ys } -fn main() { - match A::As<|> { - A::Bs if 0 < 1 => {} - A::Ds(_value) => { let x = 1; } - A::Es(B::Xs) => (), - } -} -"#, - r#" -enum A { As, Bs, Cs, Ds(String), Es(B) } -enum B { Xs, Ys } -fn main() { - match A::As { - A::Bs if 0 < 1 => {} - A::Ds(_value) => { let x = 1; } - A::Es(B::Xs) => (), - $0A::As => {} - A::Cs => {} - } -} -"#, - ); - } - - #[test] - fn partial_fill_bind_pat() { - check_assist( - fill_match_arms, - r#" -enum A { As, Bs, Cs(Option) } -fn main() { - match A::As<|> { - A::As(_) => {} - a @ A::Bs(_) => {} - } -} -"#, - r#" -enum A { As, Bs, Cs(Option) } -fn main() { - match A::As { - A::As(_) => {} - a @ A::Bs(_) => {} - A::Cs(${0:_}) => {} - } -} -"#, - ); - } - - #[test] - fn fill_match_arms_empty_body() { - check_assist( - fill_match_arms, - r#" -enum A { As, Bs, Cs(String), Ds(String, String), Es { x: usize, y: usize } } - -fn main() { - let a = A::As; - match a<|> {} -} -"#, - r#" -enum A { As, Bs, Cs(String), Ds(String, String), Es { x: usize, y: usize } } - -fn main() { - let a = A::As; - match a { - $0A::As => {} - A::Bs => {} - A::Cs(_) => {} - A::Ds(_, _) => {} - A::Es { x, y } => {} - } -} -"#, - ); - } - - #[test] - fn fill_match_arms_tuple_of_enum() { - check_assist( - fill_match_arms, - r#" - enum A { One, Two } - enum B { One, Two } - - fn main() { - let a = A::One; - let b = B::One; - match (a<|>, b) {} - } - "#, - r#" - enum A { One, Two } - enum B { One, Two } - - fn main() { - let a = A::One; - let b = B::One; - match (a, b) { - $0(A::One, B::One) => {} - (A::One, B::Two) => {} - (A::Two, B::One) => {} - (A::Two, B::Two) => {} - } - } - "#, - ); - } - - #[test] - fn fill_match_arms_tuple_of_enum_ref() { - check_assist( - fill_match_arms, - r#" - enum A { One, Two } - enum B { One, Two } - - fn main() { - let a = A::One; - let b = B::One; - match (&a<|>, &b) {} - } - "#, - r#" - enum A { One, Two } - enum B { One, Two } - - fn main() { - let a = A::One; - let b = B::One; - match (&a, &b) { - $0(A::One, B::One) => {} - (A::One, B::Two) => {} - (A::Two, B::One) => {} - (A::Two, B::Two) => {} - } - } - "#, - ); - } - - #[test] - fn fill_match_arms_tuple_of_enum_partial() { - check_assist_not_applicable( - fill_match_arms, - r#" - enum A { One, Two } - enum B { One, Two } - - fn main() { - let a = A::One; - let b = B::One; - match (a<|>, b) { - (A::Two, B::One) => {} - } - } - "#, - ); - } - - #[test] - fn fill_match_arms_tuple_of_enum_not_applicable() { - check_assist_not_applicable( - fill_match_arms, - r#" - enum A { One, Two } - enum B { One, Two } - - fn main() { - let a = A::One; - let b = B::One; - match (a<|>, b) { - (A::Two, B::One) => {} - (A::One, B::One) => {} - (A::One, B::Two) => {} - (A::Two, B::Two) => {} - } - } - "#, - ); - } - - #[test] - fn fill_match_arms_single_element_tuple_of_enum() { - // For now we don't hande the case of a single element tuple, but - // we could handle this in the future if `make::tuple_pat` allowed - // creating a tuple with a single pattern. - check_assist_not_applicable( - fill_match_arms, - r#" - enum A { One, Two } - - fn main() { - let a = A::One; - match (a<|>, ) { - } - } - "#, - ); - } - - #[test] - fn test_fill_match_arm_refs() { - check_assist( - fill_match_arms, - r#" - enum A { As } - - fn foo(a: &A) { - match a<|> { - } - } - "#, - r#" - enum A { As } - - fn foo(a: &A) { - match a { - $0A::As => {} - } - } - "#, - ); - - check_assist( - fill_match_arms, - r#" - enum A { - Es { x: usize, y: usize } - } - - fn foo(a: &mut A) { - match a<|> { - } - } - "#, - r#" - enum A { - Es { x: usize, y: usize } - } - - fn foo(a: &mut A) { - match a { - $0A::Es { x, y } => {} - } - } - "#, - ); - } - - #[test] - fn fill_match_arms_target() { - check_assist_target( - fill_match_arms, - r#" - enum E { X, Y } - - fn main() { - match E::X<|> {} - } - "#, - "match E::X {}", - ); - } - - #[test] - fn fill_match_arms_trivial_arm() { - check_assist( - fill_match_arms, - r#" - enum E { X, Y } - - fn main() { - match E::X { - <|>_ => {} - } - } - "#, - r#" - enum E { X, Y } - - fn main() { - match E::X { - $0E::X => {} - E::Y => {} - } - } - "#, - ); - } - - #[test] - fn fill_match_arms_qualifies_path() { - check_assist( - fill_match_arms, - r#" - mod foo { pub enum E { X, Y } } - use foo::E::X; - - fn main() { - match X { - <|> - } - } - "#, - r#" - mod foo { pub enum E { X, Y } } - use foo::E::X; - - fn main() { - match X { - $0X => {} - foo::E::Y => {} - } - } - "#, - ); - } - - #[test] - fn fill_match_arms_preserves_comments() { - check_assist( - fill_match_arms, - r#" - enum A { One, Two } - fn foo(a: A) { - match a { - // foo bar baz<|> - A::One => {} - // This is where the rest should be - } - } - "#, - r#" - enum A { One, Two } - fn foo(a: A) { - match a { - // foo bar baz - A::One => {} - // This is where the rest should be - $0A::Two => {} - } - } - "#, - ); - } - - #[test] - fn fill_match_arms_preserves_comments_empty() { - check_assist( - fill_match_arms, - r#" - enum A { One, Two } - fn foo(a: A) { - match a { - // foo bar baz<|> - } - } - "#, - r#" - enum A { One, Two } - fn foo(a: A) { - match a { - // foo bar baz - $0A::One => {} - A::Two => {} - } - } - "#, - ); - } - - #[test] - fn fill_match_arms_placeholder() { - check_assist( - fill_match_arms, - r#" - enum A { One, Two, } - fn foo(a: A) { - match a<|> { - _ => (), - } - } - "#, - r#" - enum A { One, Two, } - fn foo(a: A) { - match a { - $0A::One => {} - A::Two => {} - } - } - "#, - ); - } - - #[test] - fn option_order() { - mark::check!(option_order); - let before = r#" -fn foo(opt: Option) { - match opt<|> { - } -} -"#; - let before = &format!("//- /main.rs crate:main deps:core{}{}", before, FamousDefs::FIXTURE); - - check_assist( - fill_match_arms, - before, - r#" -fn foo(opt: Option) { - match opt { - Some(${0:_}) => {} - None => {} - } -} -"#, - ); - } -} diff --git a/crates/ra_assists/src/handlers/fix_visibility.rs b/crates/ra_assists/src/handlers/fix_visibility.rs deleted file mode 100644 index 1aefa79cc3..0000000000 --- a/crates/ra_assists/src/handlers/fix_visibility.rs +++ /dev/null @@ -1,607 +0,0 @@ -use hir::{db::HirDatabase, HasSource, HasVisibility, PathResolution}; -use ra_db::FileId; -use ra_syntax::{ast, AstNode, TextRange, TextSize}; - -use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists}; -use ast::VisibilityOwner; - -// FIXME: this really should be a fix for diagnostic, rather than an assist. - -// Assist: fix_visibility -// -// Makes inaccessible item public. -// -// ``` -// mod m { -// fn frobnicate() {} -// } -// fn main() { -// m::frobnicate<|>() {} -// } -// ``` -// -> -// ``` -// mod m { -// $0pub(crate) fn frobnicate() {} -// } -// fn main() { -// m::frobnicate() {} -// } -// ``` -pub(crate) fn fix_visibility(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - add_vis_to_referenced_module_def(acc, ctx) - .or_else(|| add_vis_to_referenced_record_field(acc, ctx)) -} - -fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let path: ast::Path = ctx.find_node_at_offset()?; - let path_res = ctx.sema.resolve_path(&path)?; - let def = match path_res { - PathResolution::Def(def) => def, - _ => return None, - }; - - let current_module = ctx.sema.scope(&path.syntax()).module()?; - let target_module = def.module(ctx.db())?; - - let vis = target_module.visibility_of(ctx.db(), &def)?; - if vis.is_visible_from(ctx.db(), current_module.into()) { - return None; - }; - - let (offset, current_visibility, target, target_file, target_name) = - target_data_for_def(ctx.db(), def)?; - - let missing_visibility = - if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" }; - - let assist_label = match target_name { - None => format!("Change visibility to {}", missing_visibility), - Some(name) => format!("Change visibility of {} to {}", name, missing_visibility), - }; - - acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| { - builder.edit_file(target_file); - match ctx.config.snippet_cap { - Some(cap) => match current_visibility { - Some(current_visibility) => builder.replace_snippet( - cap, - current_visibility.syntax().text_range(), - format!("$0{}", missing_visibility), - ), - None => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)), - }, - None => match current_visibility { - Some(current_visibility) => { - builder.replace(current_visibility.syntax().text_range(), missing_visibility) - } - None => builder.insert(offset, format!("{} ", missing_visibility)), - }, - } - }) -} - -fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let record_field: ast::RecordExprField = ctx.find_node_at_offset()?; - let (record_field_def, _) = ctx.sema.resolve_record_field(&record_field)?; - - let current_module = ctx.sema.scope(record_field.syntax()).module()?; - let visibility = record_field_def.visibility(ctx.db()); - if visibility.is_visible_from(ctx.db(), current_module.into()) { - return None; - } - - let parent = record_field_def.parent_def(ctx.db()); - let parent_name = parent.name(ctx.db()); - let target_module = parent.module(ctx.db()); - - let in_file_source = record_field_def.source(ctx.db()); - let (offset, current_visibility, target) = match in_file_source.value { - hir::FieldSource::Named(it) => { - let s = it.syntax(); - (vis_offset(s), it.visibility(), s.text_range()) - } - hir::FieldSource::Pos(it) => { - let s = it.syntax(); - (vis_offset(s), it.visibility(), s.text_range()) - } - }; - - let missing_visibility = - if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" }; - let target_file = in_file_source.file_id.original_file(ctx.db()); - - let target_name = record_field_def.name(ctx.db()); - let assist_label = - format!("Change visibility of {}.{} to {}", parent_name, target_name, missing_visibility); - - acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| { - builder.edit_file(target_file); - match ctx.config.snippet_cap { - Some(cap) => match current_visibility { - Some(current_visibility) => builder.replace_snippet( - cap, - dbg!(current_visibility.syntax()).text_range(), - format!("$0{}", missing_visibility), - ), - None => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)), - }, - None => match current_visibility { - Some(current_visibility) => { - builder.replace(current_visibility.syntax().text_range(), missing_visibility) - } - None => builder.insert(offset, format!("{} ", missing_visibility)), - }, - } - }) -} - -fn target_data_for_def( - db: &dyn HirDatabase, - def: hir::ModuleDef, -) -> Option<(TextSize, Option, TextRange, FileId, Option)> { - fn offset_target_and_file_id( - db: &dyn HirDatabase, - x: S, - ) -> (TextSize, Option, TextRange, FileId) - where - S: HasSource, - Ast: AstNode + ast::VisibilityOwner, - { - let source = x.source(db); - let in_file_syntax = source.syntax(); - let file_id = in_file_syntax.file_id; - let syntax = in_file_syntax.value; - let current_visibility = source.value.visibility(); - ( - vis_offset(syntax), - current_visibility, - syntax.text_range(), - file_id.original_file(db.upcast()), - ) - } - - let target_name; - let (offset, current_visibility, target, target_file) = match def { - hir::ModuleDef::Function(f) => { - target_name = Some(f.name(db)); - offset_target_and_file_id(db, f) - } - hir::ModuleDef::Adt(adt) => { - target_name = Some(adt.name(db)); - match adt { - hir::Adt::Struct(s) => offset_target_and_file_id(db, s), - hir::Adt::Union(u) => offset_target_and_file_id(db, u), - hir::Adt::Enum(e) => offset_target_and_file_id(db, e), - } - } - hir::ModuleDef::Const(c) => { - target_name = c.name(db); - offset_target_and_file_id(db, c) - } - hir::ModuleDef::Static(s) => { - target_name = s.name(db); - offset_target_and_file_id(db, s) - } - hir::ModuleDef::Trait(t) => { - target_name = Some(t.name(db)); - offset_target_and_file_id(db, t) - } - hir::ModuleDef::TypeAlias(t) => { - target_name = Some(t.name(db)); - offset_target_and_file_id(db, t) - } - hir::ModuleDef::Module(m) => { - target_name = m.name(db); - let in_file_source = m.declaration_source(db)?; - let file_id = in_file_source.file_id.original_file(db.upcast()); - let syntax = in_file_source.value.syntax(); - (vis_offset(syntax), in_file_source.value.visibility(), syntax.text_range(), file_id) - } - // Enum variants can't be private, we can't modify builtin types - hir::ModuleDef::EnumVariant(_) | hir::ModuleDef::BuiltinType(_) => return None, - }; - - Some((offset, current_visibility, target, target_file, target_name)) -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - #[test] - fn fix_visibility_of_fn() { - check_assist( - fix_visibility, - r"mod foo { fn foo() {} } - fn main() { foo::foo<|>() } ", - r"mod foo { $0pub(crate) fn foo() {} } - fn main() { foo::foo() } ", - ); - check_assist_not_applicable( - fix_visibility, - r"mod foo { pub fn foo() {} } - fn main() { foo::foo<|>() } ", - ) - } - - #[test] - fn fix_visibility_of_adt_in_submodule() { - check_assist( - fix_visibility, - r"mod foo { struct Foo; } - fn main() { foo::Foo<|> } ", - r"mod foo { $0pub(crate) struct Foo; } - fn main() { foo::Foo } ", - ); - check_assist_not_applicable( - fix_visibility, - r"mod foo { pub struct Foo; } - fn main() { foo::Foo<|> } ", - ); - check_assist( - fix_visibility, - r"mod foo { enum Foo; } - fn main() { foo::Foo<|> } ", - r"mod foo { $0pub(crate) enum Foo; } - fn main() { foo::Foo } ", - ); - check_assist_not_applicable( - fix_visibility, - r"mod foo { pub enum Foo; } - fn main() { foo::Foo<|> } ", - ); - check_assist( - fix_visibility, - r"mod foo { union Foo; } - fn main() { foo::Foo<|> } ", - r"mod foo { $0pub(crate) union Foo; } - fn main() { foo::Foo } ", - ); - check_assist_not_applicable( - fix_visibility, - r"mod foo { pub union Foo; } - fn main() { foo::Foo<|> } ", - ); - } - - #[test] - fn fix_visibility_of_adt_in_other_file() { - check_assist( - fix_visibility, - r" -//- /main.rs -mod foo; -fn main() { foo::Foo<|> } - -//- /foo.rs -struct Foo; -", - r"$0pub(crate) struct Foo; -", - ); - } - - #[test] - fn fix_visibility_of_struct_field() { - check_assist( - fix_visibility, - r"mod foo { pub struct Foo { bar: (), } } - fn main() { foo::Foo { <|>bar: () }; } ", - r"mod foo { pub struct Foo { $0pub(crate) bar: (), } } - fn main() { foo::Foo { bar: () }; } ", - ); - check_assist( - fix_visibility, - r" -//- /lib.rs -mod foo; -fn main() { foo::Foo { <|>bar: () }; } -//- /foo.rs -pub struct Foo { bar: () } -", - r"pub struct Foo { $0pub(crate) bar: () } -", - ); - check_assist_not_applicable( - fix_visibility, - r"mod foo { pub struct Foo { pub bar: (), } } - fn main() { foo::Foo { <|>bar: () }; } ", - ); - check_assist_not_applicable( - fix_visibility, - r" -//- /lib.rs -mod foo; -fn main() { foo::Foo { <|>bar: () }; } -//- /foo.rs -pub struct Foo { pub bar: () } -", - ); - } - - #[test] - fn fix_visibility_of_enum_variant_field() { - check_assist( - fix_visibility, - r"mod foo { pub enum Foo { Bar { bar: () } } } - fn main() { foo::Foo::Bar { <|>bar: () }; } ", - r"mod foo { pub enum Foo { Bar { $0pub(crate) bar: () } } } - fn main() { foo::Foo::Bar { bar: () }; } ", - ); - check_assist( - fix_visibility, - r" -//- /lib.rs -mod foo; -fn main() { foo::Foo::Bar { <|>bar: () }; } -//- /foo.rs -pub enum Foo { Bar { bar: () } } -", - r"pub enum Foo { Bar { $0pub(crate) bar: () } } -", - ); - check_assist_not_applicable( - fix_visibility, - r"mod foo { pub struct Foo { pub bar: (), } } - fn main() { foo::Foo { <|>bar: () }; } ", - ); - check_assist_not_applicable( - fix_visibility, - r" -//- /lib.rs -mod foo; -fn main() { foo::Foo { <|>bar: () }; } -//- /foo.rs -pub struct Foo { pub bar: () } -", - ); - } - - #[test] - #[ignore] - // FIXME reenable this test when `Semantics::resolve_record_field` works with union fields - fn fix_visibility_of_union_field() { - check_assist( - fix_visibility, - r"mod foo { pub union Foo { bar: (), } } - fn main() { foo::Foo { <|>bar: () }; } ", - r"mod foo { pub union Foo { $0pub(crate) bar: (), } } - fn main() { foo::Foo { bar: () }; } ", - ); - check_assist( - fix_visibility, - r" -//- /lib.rs -mod foo; -fn main() { foo::Foo { <|>bar: () }; } -//- /foo.rs -pub union Foo { bar: () } -", - r"pub union Foo { $0pub(crate) bar: () } -", - ); - check_assist_not_applicable( - fix_visibility, - r"mod foo { pub union Foo { pub bar: (), } } - fn main() { foo::Foo { <|>bar: () }; } ", - ); - check_assist_not_applicable( - fix_visibility, - r" -//- /lib.rs -mod foo; -fn main() { foo::Foo { <|>bar: () }; } -//- /foo.rs -pub union Foo { pub bar: () } -", - ); - } - - #[test] - fn fix_visibility_of_const() { - check_assist( - fix_visibility, - r"mod foo { const FOO: () = (); } - fn main() { foo::FOO<|> } ", - r"mod foo { $0pub(crate) const FOO: () = (); } - fn main() { foo::FOO } ", - ); - check_assist_not_applicable( - fix_visibility, - r"mod foo { pub const FOO: () = (); } - fn main() { foo::FOO<|> } ", - ); - } - - #[test] - fn fix_visibility_of_static() { - check_assist( - fix_visibility, - r"mod foo { static FOO: () = (); } - fn main() { foo::FOO<|> } ", - r"mod foo { $0pub(crate) static FOO: () = (); } - fn main() { foo::FOO } ", - ); - check_assist_not_applicable( - fix_visibility, - r"mod foo { pub static FOO: () = (); } - fn main() { foo::FOO<|> } ", - ); - } - - #[test] - fn fix_visibility_of_trait() { - check_assist( - fix_visibility, - r"mod foo { trait Foo { fn foo(&self) {} } } - fn main() { let x: &dyn foo::<|>Foo; } ", - r"mod foo { $0pub(crate) trait Foo { fn foo(&self) {} } } - fn main() { let x: &dyn foo::Foo; } ", - ); - check_assist_not_applicable( - fix_visibility, - r"mod foo { pub trait Foo { fn foo(&self) {} } } - fn main() { let x: &dyn foo::Foo<|>; } ", - ); - } - - #[test] - fn fix_visibility_of_type_alias() { - check_assist( - fix_visibility, - r"mod foo { type Foo = (); } - fn main() { let x: foo::Foo<|>; } ", - r"mod foo { $0pub(crate) type Foo = (); } - fn main() { let x: foo::Foo; } ", - ); - check_assist_not_applicable( - fix_visibility, - r"mod foo { pub type Foo = (); } - fn main() { let x: foo::Foo<|>; } ", - ); - } - - #[test] - fn fix_visibility_of_module() { - check_assist( - fix_visibility, - r"mod foo { mod bar { fn bar() {} } } - fn main() { foo::bar<|>::bar(); } ", - r"mod foo { $0pub(crate) mod bar { fn bar() {} } } - fn main() { foo::bar::bar(); } ", - ); - - check_assist( - fix_visibility, - r" -//- /main.rs -mod foo; -fn main() { foo::bar<|>::baz(); } - -//- /foo.rs -mod bar { - pub fn baz() {} -} -", - r"$0pub(crate) mod bar { - pub fn baz() {} -} -", - ); - - check_assist_not_applicable( - fix_visibility, - r"mod foo { pub mod bar { pub fn bar() {} } } - fn main() { foo::bar<|>::bar(); } ", - ); - } - - #[test] - fn fix_visibility_of_inline_module_in_other_file() { - check_assist( - fix_visibility, - r" -//- /main.rs -mod foo; -fn main() { foo::bar<|>::baz(); } - -//- /foo.rs -mod bar; -//- /foo/bar.rs -pub fn baz() {} -", - r"$0pub(crate) mod bar; -", - ); - } - - #[test] - fn fix_visibility_of_module_declaration_in_other_file() { - check_assist( - fix_visibility, - r" -//- /main.rs -mod foo; -fn main() { foo::bar<|>>::baz(); } - -//- /foo.rs -mod bar { - pub fn baz() {} -} -", - r"$0pub(crate) mod bar { - pub fn baz() {} -} -", - ); - } - - #[test] - fn adds_pub_when_target_is_in_another_crate() { - check_assist( - fix_visibility, - r" -//- /main.rs crate:a deps:foo -foo::Bar<|> -//- /lib.rs crate:foo -struct Bar; -", - r"$0pub struct Bar; -", - ) - } - - #[test] - fn replaces_pub_crate_with_pub() { - check_assist( - fix_visibility, - r" -//- /main.rs crate:a deps:foo -foo::Bar<|> -//- /lib.rs crate:foo -pub(crate) struct Bar; -", - r"$0pub struct Bar; -", - ); - check_assist( - fix_visibility, - r" -//- /main.rs crate:a deps:foo -fn main() { - foo::Foo { <|>bar: () }; -} -//- /lib.rs crate:foo -pub struct Foo { pub(crate) bar: () } -", - r"pub struct Foo { $0pub bar: () } -", - ); - } - - #[test] - #[ignore] - // FIXME handle reexports properly - fn fix_visibility_of_reexport() { - check_assist( - fix_visibility, - r" - mod foo { - use bar::Baz; - mod bar { pub(super) struct Baz; } - } - foo::Baz<|> - ", - r" - mod foo { - $0pub(crate) use bar::Baz; - mod bar { pub(super) struct Baz; } - } - foo::Baz - ", - ) - } -} diff --git a/crates/ra_assists/src/handlers/flip_binexpr.rs b/crates/ra_assists/src/handlers/flip_binexpr.rs deleted file mode 100644 index 3cd5326505..0000000000 --- a/crates/ra_assists/src/handlers/flip_binexpr.rs +++ /dev/null @@ -1,142 +0,0 @@ -use ra_syntax::ast::{AstNode, BinExpr, BinOp}; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: flip_binexpr -// -// Flips operands of a binary expression. -// -// ``` -// fn main() { -// let _ = 90 +<|> 2; -// } -// ``` -// -> -// ``` -// fn main() { -// let _ = 2 + 90; -// } -// ``` -pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let expr = ctx.find_node_at_offset::()?; - let lhs = expr.lhs()?.syntax().clone(); - let rhs = expr.rhs()?.syntax().clone(); - let op_range = expr.op_token()?.text_range(); - // The assist should be applied only if the cursor is on the operator - let cursor_in_range = op_range.contains_range(ctx.frange.range); - if !cursor_in_range { - return None; - } - let action: FlipAction = expr.op_kind()?.into(); - // The assist should not be applied for certain operators - if let FlipAction::DontFlip = action { - return None; - } - - acc.add( - AssistId("flip_binexpr", AssistKind::RefactorRewrite), - "Flip binary expression", - op_range, - |edit| { - if let FlipAction::FlipAndReplaceOp(new_op) = action { - edit.replace(op_range, new_op); - } - edit.replace(lhs.text_range(), rhs.text()); - edit.replace(rhs.text_range(), lhs.text()); - }, - ) -} - -enum FlipAction { - // Flip the expression - Flip, - // Flip the expression and replace the operator with this string - FlipAndReplaceOp(&'static str), - // Do not flip the expression - DontFlip, -} - -impl From for FlipAction { - fn from(op_kind: BinOp) -> Self { - match op_kind { - kind if kind.is_assignment() => FlipAction::DontFlip, - BinOp::GreaterTest => FlipAction::FlipAndReplaceOp("<"), - BinOp::GreaterEqualTest => FlipAction::FlipAndReplaceOp("<="), - BinOp::LesserTest => FlipAction::FlipAndReplaceOp(">"), - BinOp::LesserEqualTest => FlipAction::FlipAndReplaceOp(">="), - _ => FlipAction::Flip, - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; - - #[test] - fn flip_binexpr_target_is_the_op() { - check_assist_target(flip_binexpr, "fn f() { let res = 1 ==<|> 2; }", "==") - } - - #[test] - fn flip_binexpr_not_applicable_for_assignment() { - check_assist_not_applicable(flip_binexpr, "fn f() { let mut _x = 1; _x +=<|> 2 }") - } - - #[test] - fn flip_binexpr_works_for_eq() { - check_assist( - flip_binexpr, - "fn f() { let res = 1 ==<|> 2; }", - "fn f() { let res = 2 == 1; }", - ) - } - - #[test] - fn flip_binexpr_works_for_gt() { - check_assist(flip_binexpr, "fn f() { let res = 1 ><|> 2; }", "fn f() { let res = 2 < 1; }") - } - - #[test] - fn flip_binexpr_works_for_lteq() { - check_assist( - flip_binexpr, - "fn f() { let res = 1 <=<|> 2; }", - "fn f() { let res = 2 >= 1; }", - ) - } - - #[test] - fn flip_binexpr_works_for_complex_expr() { - check_assist( - flip_binexpr, - "fn f() { let res = (1 + 1) ==<|> (2 + 2); }", - "fn f() { let res = (2 + 2) == (1 + 1); }", - ) - } - - #[test] - fn flip_binexpr_works_inside_match() { - check_assist( - flip_binexpr, - r#" - fn dyn_eq(&self, other: &dyn Diagnostic) -> bool { - match other.downcast_ref::() { - None => false, - Some(it) => it ==<|> self, - } - } - "#, - r#" - fn dyn_eq(&self, other: &dyn Diagnostic) -> bool { - match other.downcast_ref::() { - None => false, - Some(it) => self == it, - } - } - "#, - ) - } -} diff --git a/crates/ra_assists/src/handlers/flip_comma.rs b/crates/ra_assists/src/handlers/flip_comma.rs deleted file mode 100644 index 55a971dc77..0000000000 --- a/crates/ra_assists/src/handlers/flip_comma.rs +++ /dev/null @@ -1,84 +0,0 @@ -use ra_syntax::{algo::non_trivia_sibling, Direction, T}; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: flip_comma -// -// Flips two comma-separated items. -// -// ``` -// fn main() { -// ((1, 2),<|> (3, 4)); -// } -// ``` -// -> -// ``` -// fn main() { -// ((3, 4), (1, 2)); -// } -// ``` -pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let comma = ctx.find_token_at_offset(T![,])?; - let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?; - let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?; - - // Don't apply a "flip" in case of a last comma - // that typically comes before punctuation - if next.kind().is_punct() { - return None; - } - - acc.add( - AssistId("flip_comma", AssistKind::RefactorRewrite), - "Flip comma", - comma.text_range(), - |edit| { - edit.replace(prev.text_range(), next.to_string()); - edit.replace(next.text_range(), prev.to_string()); - }, - ) -} - -#[cfg(test)] -mod tests { - use super::*; - - use crate::tests::{check_assist, check_assist_target}; - - #[test] - fn flip_comma_works_for_function_parameters() { - check_assist( - flip_comma, - "fn foo(x: i32,<|> y: Result<(), ()>) {}", - "fn foo(y: Result<(), ()>, x: i32) {}", - ) - } - - #[test] - fn flip_comma_target() { - check_assist_target(flip_comma, "fn foo(x: i32,<|> y: Result<(), ()>) {}", ",") - } - - #[test] - #[should_panic] - fn flip_comma_before_punct() { - // See https://github.com/rust-analyzer/rust-analyzer/issues/1619 - // "Flip comma" assist shouldn't be applicable to the last comma in enum or struct - // declaration body. - check_assist_target( - flip_comma, - "pub enum Test { \ - A,<|> \ - }", - ",", - ); - - check_assist_target( - flip_comma, - "pub struct Test { \ - foo: usize,<|> \ - }", - ",", - ); - } -} diff --git a/crates/ra_assists/src/handlers/flip_trait_bound.rs b/crates/ra_assists/src/handlers/flip_trait_bound.rs deleted file mode 100644 index 1234f4d296..0000000000 --- a/crates/ra_assists/src/handlers/flip_trait_bound.rs +++ /dev/null @@ -1,121 +0,0 @@ -use ra_syntax::{ - algo::non_trivia_sibling, - ast::{self, AstNode}, - Direction, T, -}; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: flip_trait_bound -// -// Flips two trait bounds. -// -// ``` -// fn foo Copy>() { } -// ``` -// -> -// ``` -// fn foo() { } -// ``` -pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - // We want to replicate the behavior of `flip_binexpr` by only suggesting - // the assist when the cursor is on a `+` - let plus = ctx.find_token_at_offset(T![+])?; - - // Make sure we're in a `TypeBoundList` - if ast::TypeBoundList::cast(plus.parent()).is_none() { - return None; - } - - let (before, after) = ( - non_trivia_sibling(plus.clone().into(), Direction::Prev)?, - non_trivia_sibling(plus.clone().into(), Direction::Next)?, - ); - - let target = plus.text_range(); - acc.add( - AssistId("flip_trait_bound", AssistKind::RefactorRewrite), - "Flip trait bounds", - target, - |edit| { - edit.replace(before.text_range(), after.to_string()); - edit.replace(after.text_range(), before.to_string()); - }, - ) -} - -#[cfg(test)] -mod tests { - use super::*; - - use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; - - #[test] - fn flip_trait_bound_assist_available() { - check_assist_target(flip_trait_bound, "struct S where T: A <|>+ B + C { }", "+") - } - - #[test] - fn flip_trait_bound_not_applicable_for_single_trait_bound() { - check_assist_not_applicable(flip_trait_bound, "struct S where T: <|>A { }") - } - - #[test] - fn flip_trait_bound_works_for_struct() { - check_assist( - flip_trait_bound, - "struct S where T: A <|>+ B { }", - "struct S where T: B + A { }", - ) - } - - #[test] - fn flip_trait_bound_works_for_trait_impl() { - check_assist( - flip_trait_bound, - "impl X for S where T: A +<|> B { }", - "impl X for S where T: B + A { }", - ) - } - - #[test] - fn flip_trait_bound_works_for_fn() { - check_assist(flip_trait_bound, "fn f+ B>(t: T) { }", "fn f(t: T) { }") - } - - #[test] - fn flip_trait_bound_works_for_fn_where_clause() { - check_assist( - flip_trait_bound, - "fn f(t: T) where T: A +<|> B { }", - "fn f(t: T) where T: B + A { }", - ) - } - - #[test] - fn flip_trait_bound_works_for_lifetime() { - check_assist( - flip_trait_bound, - "fn f(t: T) where T: A <|>+ 'static { }", - "fn f(t: T) where T: 'static + A { }", - ) - } - - #[test] - fn flip_trait_bound_works_for_complex_bounds() { - check_assist( - flip_trait_bound, - "struct S where T: A <|>+ b_mod::B + C { }", - "struct S where T: b_mod::B + A + C { }", - ) - } - - #[test] - fn flip_trait_bound_works_for_long_bounds() { - check_assist( - flip_trait_bound, - "struct S where T: A + B + C + D + E + F +<|> G + H + I + J { }", - "struct S where T: A + B + C + D + E + G + F + H + I + J { }", - ) - } -} diff --git a/crates/ra_assists/src/handlers/generate_derive.rs b/crates/ra_assists/src/handlers/generate_derive.rs deleted file mode 100644 index 90ece9fab0..0000000000 --- a/crates/ra_assists/src/handlers/generate_derive.rs +++ /dev/null @@ -1,132 +0,0 @@ -use ra_syntax::{ - ast::{self, AstNode, AttrsOwner}, - SyntaxKind::{COMMENT, WHITESPACE}, - TextSize, -}; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: generate_derive -// -// Adds a new `#[derive()]` clause to a struct or enum. -// -// ``` -// struct Point { -// x: u32, -// y: u32,<|> -// } -// ``` -// -> -// ``` -// #[derive($0)] -// struct Point { -// x: u32, -// y: u32, -// } -// ``` -pub(crate) fn generate_derive(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let cap = ctx.config.snippet_cap?; - let nominal = ctx.find_node_at_offset::()?; - let node_start = derive_insertion_offset(&nominal)?; - let target = nominal.syntax().text_range(); - acc.add( - AssistId("generate_derive", AssistKind::Generate), - "Add `#[derive]`", - target, - |builder| { - let derive_attr = nominal - .attrs() - .filter_map(|x| x.as_simple_call()) - .filter(|(name, _arg)| name == "derive") - .map(|(_name, arg)| arg) - .next(); - match derive_attr { - None => { - builder.insert_snippet(cap, node_start, "#[derive($0)]\n"); - } - Some(tt) => { - // Just move the cursor. - builder.insert_snippet( - cap, - tt.syntax().text_range().end() - TextSize::of(')'), - "$0", - ) - } - }; - }, - ) -} - -// Insert `derive` after doc comments. -fn derive_insertion_offset(nominal: &ast::AdtDef) -> Option { - let non_ws_child = nominal - .syntax() - .children_with_tokens() - .find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?; - Some(non_ws_child.text_range().start()) -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_target}; - - use super::*; - - #[test] - fn add_derive_new() { - check_assist( - generate_derive, - "struct Foo { a: i32, <|>}", - "#[derive($0)]\nstruct Foo { a: i32, }", - ); - check_assist( - generate_derive, - "struct Foo { <|> a: i32, }", - "#[derive($0)]\nstruct Foo { a: i32, }", - ); - } - - #[test] - fn add_derive_existing() { - check_assist( - generate_derive, - "#[derive(Clone)]\nstruct Foo { a: i32<|>, }", - "#[derive(Clone$0)]\nstruct Foo { a: i32, }", - ); - } - - #[test] - fn add_derive_new_with_doc_comment() { - check_assist( - generate_derive, - " -/// `Foo` is a pretty important struct. -/// It does stuff. -struct Foo { a: i32<|>, } - ", - " -/// `Foo` is a pretty important struct. -/// It does stuff. -#[derive($0)] -struct Foo { a: i32, } - ", - ); - } - - #[test] - fn add_derive_target() { - check_assist_target( - generate_derive, - " -struct SomeThingIrrelevant; -/// `Foo` is a pretty important struct. -/// It does stuff. -struct Foo { a: i32<|>, } -struct EvenMoreIrrelevant; - ", - "/// `Foo` is a pretty important struct. -/// It does stuff. -struct Foo { a: i32, }", - ); - } -} diff --git a/crates/ra_assists/src/handlers/generate_from_impl_for_enum.rs b/crates/ra_assists/src/handlers/generate_from_impl_for_enum.rs deleted file mode 100644 index 4c1aef8a21..0000000000 --- a/crates/ra_assists/src/handlers/generate_from_impl_for_enum.rs +++ /dev/null @@ -1,200 +0,0 @@ -use ra_ide_db::RootDatabase; -use ra_syntax::ast::{self, AstNode, NameOwner}; -use test_utils::mark; - -use crate::{utils::FamousDefs, AssistContext, AssistId, AssistKind, Assists}; - -// Assist: generate_from_impl_for_enum -// -// Adds a From impl for an enum variant with one tuple field. -// -// ``` -// enum A { <|>One(u32) } -// ``` -// -> -// ``` -// enum A { One(u32) } -// -// impl From for A { -// fn from(v: u32) -> Self { -// A::One(v) -// } -// } -// ``` -pub(crate) fn generate_from_impl_for_enum(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let variant = ctx.find_node_at_offset::()?; - let variant_name = variant.name()?; - let enum_name = variant.parent_enum().name()?; - let field_list = match variant.kind() { - ast::StructKind::Tuple(field_list) => field_list, - _ => return None, - }; - if field_list.fields().count() != 1 { - return None; - } - let field_type = field_list.fields().next()?.ty()?; - let path = match field_type { - ast::Type::PathType(it) => it, - _ => return None, - }; - - if existing_from_impl(&ctx.sema, &variant).is_some() { - mark::hit!(test_add_from_impl_already_exists); - return None; - } - - let target = variant.syntax().text_range(); - acc.add( - AssistId("generate_from_impl_for_enum", AssistKind::Generate), - "Generate `From` impl for this enum variant", - target, - |edit| { - let start_offset = variant.parent_enum().syntax().text_range().end(); - let buf = format!( - r#" - -impl From<{0}> for {1} {{ - fn from(v: {0}) -> Self {{ - {1}::{2}(v) - }} -}}"#, - path.syntax(), - enum_name, - variant_name - ); - edit.insert(start_offset, buf); - }, - ) -} - -fn existing_from_impl( - sema: &'_ hir::Semantics<'_, RootDatabase>, - variant: &ast::Variant, -) -> Option<()> { - let variant = sema.to_def(variant)?; - let enum_ = variant.parent_enum(sema.db); - let krate = enum_.module(sema.db).krate(); - - let from_trait = FamousDefs(sema, krate).core_convert_From()?; - - let enum_type = enum_.ty(sema.db); - - let wrapped_type = variant.fields(sema.db).get(0)?.signature_ty(sema.db); - - if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) { - Some(()) - } else { - None - } -} - -#[cfg(test)] -mod tests { - use test_utils::mark; - - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - #[test] - fn test_generate_from_impl_for_enum() { - check_assist( - generate_from_impl_for_enum, - "enum A { <|>One(u32) }", - r#"enum A { One(u32) } - -impl From for A { - fn from(v: u32) -> Self { - A::One(v) - } -}"#, - ); - } - - #[test] - fn test_generate_from_impl_for_enum_complicated_path() { - check_assist( - generate_from_impl_for_enum, - r#"enum A { <|>One(foo::bar::baz::Boo) }"#, - r#"enum A { One(foo::bar::baz::Boo) } - -impl From for A { - fn from(v: foo::bar::baz::Boo) -> Self { - A::One(v) - } -}"#, - ); - } - - fn check_not_applicable(ra_fixture: &str) { - let fixture = - format!("//- /main.rs crate:main deps:core\n{}\n{}", ra_fixture, FamousDefs::FIXTURE); - check_assist_not_applicable(generate_from_impl_for_enum, &fixture) - } - - #[test] - fn test_add_from_impl_no_element() { - check_not_applicable("enum A { <|>One }"); - } - - #[test] - fn test_add_from_impl_more_than_one_element_in_tuple() { - check_not_applicable("enum A { <|>One(u32, String) }"); - } - - #[test] - fn test_add_from_impl_struct_variant() { - check_not_applicable("enum A { <|>One { x: u32 } }"); - } - - #[test] - fn test_add_from_impl_already_exists() { - mark::check!(test_add_from_impl_already_exists); - check_not_applicable( - r#" -enum A { <|>One(u32), } - -impl From for A { - fn from(v: u32) -> Self { - A::One(v) - } -} -"#, - ); - } - - #[test] - fn test_add_from_impl_different_variant_impl_exists() { - check_assist( - generate_from_impl_for_enum, - r#"enum A { <|>One(u32), Two(String), } - -impl From for A { - fn from(v: String) -> Self { - A::Two(v) - } -} - -pub trait From { - fn from(T) -> Self; -}"#, - r#"enum A { One(u32), Two(String), } - -impl From for A { - fn from(v: u32) -> Self { - A::One(v) - } -} - -impl From for A { - fn from(v: String) -> Self { - A::Two(v) - } -} - -pub trait From { - fn from(T) -> Self; -}"#, - ); - } -} diff --git a/crates/ra_assists/src/handlers/generate_function.rs b/crates/ra_assists/src/handlers/generate_function.rs deleted file mode 100644 index acc97e6482..0000000000 --- a/crates/ra_assists/src/handlers/generate_function.rs +++ /dev/null @@ -1,1058 +0,0 @@ -use hir::HirDisplay; -use ra_db::FileId; -use ra_syntax::{ - ast::{ - self, - edit::{AstNodeEdit, IndentLevel}, - make, ArgListOwner, AstNode, ModuleItemOwner, - }, - SyntaxKind, SyntaxNode, TextSize, -}; -use rustc_hash::{FxHashMap, FxHashSet}; - -use crate::{ - assist_config::SnippetCap, - utils::{render_snippet, Cursor}, - AssistContext, AssistId, AssistKind, Assists, -}; - -// Assist: generate_function -// -// Adds a stub function with a signature matching the function under the cursor. -// -// ``` -// struct Baz; -// fn baz() -> Baz { Baz } -// fn foo() { -// bar<|>("", baz()); -// } -// -// ``` -// -> -// ``` -// struct Baz; -// fn baz() -> Baz { Baz } -// fn foo() { -// bar("", baz()); -// } -// -// fn bar(arg: &str, baz: Baz) { -// ${0:todo!()} -// } -// -// ``` -pub(crate) fn generate_function(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let path_expr: ast::PathExpr = ctx.find_node_at_offset()?; - let call = path_expr.syntax().parent().and_then(ast::CallExpr::cast)?; - let path = path_expr.path()?; - - if ctx.sema.resolve_path(&path).is_some() { - // The function call already resolves, no need to add a function - return None; - } - - let target_module = match path.qualifier() { - Some(qualifier) => match ctx.sema.resolve_path(&qualifier) { - Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))) => Some(module), - _ => return None, - }, - None => None, - }; - - let function_builder = FunctionBuilder::from_call(&ctx, &call, &path, target_module)?; - - let target = call.syntax().text_range(); - acc.add( - AssistId("generate_function", AssistKind::Generate), - format!("Generate `{}` function", function_builder.fn_name), - target, - |builder| { - let function_template = function_builder.render(); - builder.edit_file(function_template.file); - let new_fn = function_template.to_string(ctx.config.snippet_cap); - match ctx.config.snippet_cap { - Some(cap) => builder.insert_snippet(cap, function_template.insert_offset, new_fn), - None => builder.insert(function_template.insert_offset, new_fn), - } - }, - ) -} - -struct FunctionTemplate { - insert_offset: TextSize, - placeholder_expr: ast::MacroCall, - leading_ws: String, - fn_def: ast::Fn, - trailing_ws: String, - file: FileId, -} - -impl FunctionTemplate { - fn to_string(&self, cap: Option) -> String { - let f = match cap { - Some(cap) => render_snippet( - cap, - self.fn_def.syntax(), - Cursor::Replace(self.placeholder_expr.syntax()), - ), - None => self.fn_def.to_string(), - }; - format!("{}{}{}", self.leading_ws, f, self.trailing_ws) - } -} - -struct FunctionBuilder { - target: GeneratedFunctionTarget, - fn_name: ast::Name, - type_params: Option, - params: ast::ParamList, - file: FileId, - needs_pub: bool, -} - -impl FunctionBuilder { - /// Prepares a generated function that matches `call`. - /// The function is generated in `target_module` or next to `call` - fn from_call( - ctx: &AssistContext, - call: &ast::CallExpr, - path: &ast::Path, - target_module: Option, - ) -> Option { - let mut file = ctx.frange.file_id; - let target = match &target_module { - Some(target_module) => { - let module_source = target_module.definition_source(ctx.db()); - let (in_file, target) = next_space_for_fn_in_module(ctx.sema.db, &module_source)?; - file = in_file; - target - } - None => next_space_for_fn_after_call_site(&call)?, - }; - let needs_pub = target_module.is_some(); - let target_module = target_module.or_else(|| ctx.sema.scope(target.syntax()).module())?; - let fn_name = fn_name(&path)?; - let (type_params, params) = fn_args(ctx, target_module, &call)?; - - Some(Self { target, fn_name, type_params, params, file, needs_pub }) - } - - fn render(self) -> FunctionTemplate { - let placeholder_expr = make::expr_todo(); - let fn_body = make::block_expr(vec![], Some(placeholder_expr)); - let visibility = if self.needs_pub { Some(make::visibility_pub_crate()) } else { None }; - let mut fn_def = - make::fn_(visibility, self.fn_name, self.type_params, self.params, fn_body); - let leading_ws; - let trailing_ws; - - let insert_offset = match self.target { - GeneratedFunctionTarget::BehindItem(it) => { - let indent = IndentLevel::from_node(&it); - leading_ws = format!("\n\n{}", indent); - fn_def = fn_def.indent(indent); - trailing_ws = String::new(); - it.text_range().end() - } - GeneratedFunctionTarget::InEmptyItemList(it) => { - let indent = IndentLevel::from_node(it.syntax()); - leading_ws = format!("\n{}", indent + 1); - fn_def = fn_def.indent(indent + 1); - trailing_ws = format!("\n{}", indent); - it.syntax().text_range().start() + TextSize::of('{') - } - }; - - let placeholder_expr = - fn_def.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); - FunctionTemplate { - insert_offset, - placeholder_expr, - leading_ws, - fn_def, - trailing_ws, - file: self.file, - } - } -} - -enum GeneratedFunctionTarget { - BehindItem(SyntaxNode), - InEmptyItemList(ast::ItemList), -} - -impl GeneratedFunctionTarget { - fn syntax(&self) -> &SyntaxNode { - match self { - GeneratedFunctionTarget::BehindItem(it) => it, - GeneratedFunctionTarget::InEmptyItemList(it) => it.syntax(), - } - } -} - -fn fn_name(call: &ast::Path) -> Option { - let name = call.segment()?.syntax().to_string(); - Some(make::name(&name)) -} - -/// Computes the type variables and arguments required for the generated function -fn fn_args( - ctx: &AssistContext, - target_module: hir::Module, - call: &ast::CallExpr, -) -> Option<(Option, ast::ParamList)> { - let mut arg_names = Vec::new(); - let mut arg_types = Vec::new(); - for arg in call.arg_list()?.args() { - arg_names.push(match fn_arg_name(&arg) { - Some(name) => name, - None => String::from("arg"), - }); - arg_types.push(match fn_arg_type(ctx, target_module, &arg) { - Some(ty) => ty, - None => String::from("()"), - }); - } - deduplicate_arg_names(&mut arg_names); - let params = arg_names.into_iter().zip(arg_types).map(|(name, ty)| make::param(name, ty)); - Some((None, make::param_list(params))) -} - -/// Makes duplicate argument names unique by appending incrementing numbers. -/// -/// ``` -/// let mut names: Vec = -/// vec!["foo".into(), "foo".into(), "bar".into(), "baz".into(), "bar".into()]; -/// deduplicate_arg_names(&mut names); -/// let expected: Vec = -/// vec!["foo_1".into(), "foo_2".into(), "bar_1".into(), "baz".into(), "bar_2".into()]; -/// assert_eq!(names, expected); -/// ``` -fn deduplicate_arg_names(arg_names: &mut Vec) { - let arg_name_counts = arg_names.iter().fold(FxHashMap::default(), |mut m, name| { - *m.entry(name).or_insert(0) += 1; - m - }); - let duplicate_arg_names: FxHashSet = arg_name_counts - .into_iter() - .filter(|(_, count)| *count >= 2) - .map(|(name, _)| name.clone()) - .collect(); - - let mut counter_per_name = FxHashMap::default(); - for arg_name in arg_names.iter_mut() { - if duplicate_arg_names.contains(arg_name) { - let counter = counter_per_name.entry(arg_name.clone()).or_insert(1); - arg_name.push('_'); - arg_name.push_str(&counter.to_string()); - *counter += 1; - } - } -} - -fn fn_arg_name(fn_arg: &ast::Expr) -> Option { - match fn_arg { - ast::Expr::CastExpr(cast_expr) => fn_arg_name(&cast_expr.expr()?), - _ => Some( - fn_arg - .syntax() - .descendants() - .filter(|d| ast::NameRef::can_cast(d.kind())) - .last()? - .to_string(), - ), - } -} - -fn fn_arg_type( - ctx: &AssistContext, - target_module: hir::Module, - fn_arg: &ast::Expr, -) -> Option { - let ty = ctx.sema.type_of_expr(fn_arg)?; - if ty.is_unknown() { - return None; - } - - if let Ok(rendered) = ty.display_source_code(ctx.db(), target_module.into()) { - Some(rendered) - } else { - None - } -} - -/// Returns the position inside the current mod or file -/// directly after the current block -/// We want to write the generated function directly after -/// fns, impls or macro calls, but inside mods -fn next_space_for_fn_after_call_site(expr: &ast::CallExpr) -> Option { - let mut ancestors = expr.syntax().ancestors().peekable(); - let mut last_ancestor: Option = None; - while let Some(next_ancestor) = ancestors.next() { - match next_ancestor.kind() { - SyntaxKind::SOURCE_FILE => { - break; - } - SyntaxKind::ITEM_LIST => { - if ancestors.peek().map(|a| a.kind()) == Some(SyntaxKind::MODULE) { - break; - } - } - _ => {} - } - last_ancestor = Some(next_ancestor); - } - last_ancestor.map(GeneratedFunctionTarget::BehindItem) -} - -fn next_space_for_fn_in_module( - db: &dyn hir::db::AstDatabase, - module_source: &hir::InFile, -) -> Option<(FileId, GeneratedFunctionTarget)> { - let file = module_source.file_id.original_file(db); - let assist_item = match &module_source.value { - hir::ModuleSource::SourceFile(it) => { - if let Some(last_item) = it.items().last() { - GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()) - } else { - GeneratedFunctionTarget::BehindItem(it.syntax().clone()) - } - } - hir::ModuleSource::Module(it) => { - if let Some(last_item) = it.item_list().and_then(|it| it.items().last()) { - GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()) - } else { - GeneratedFunctionTarget::InEmptyItemList(it.item_list()?) - } - } - }; - Some((file, assist_item)) -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - #[test] - fn add_function_with_no_args() { - check_assist( - generate_function, - r" -fn foo() { - bar<|>(); -} -", - r" -fn foo() { - bar(); -} - -fn bar() { - ${0:todo!()} -} -", - ) - } - - #[test] - fn add_function_from_method() { - // This ensures that the function is correctly generated - // in the next outer mod or file - check_assist( - generate_function, - r" -impl Foo { - fn foo() { - bar<|>(); - } -} -", - r" -impl Foo { - fn foo() { - bar(); - } -} - -fn bar() { - ${0:todo!()} -} -", - ) - } - - #[test] - fn add_function_directly_after_current_block() { - // The new fn should not be created at the end of the file or module - check_assist( - generate_function, - r" -fn foo1() { - bar<|>(); -} - -fn foo2() {} -", - r" -fn foo1() { - bar(); -} - -fn bar() { - ${0:todo!()} -} - -fn foo2() {} -", - ) - } - - #[test] - fn add_function_with_no_args_in_same_module() { - check_assist( - generate_function, - r" -mod baz { - fn foo() { - bar<|>(); - } -} -", - r" -mod baz { - fn foo() { - bar(); - } - - fn bar() { - ${0:todo!()} - } -} -", - ) - } - - #[test] - fn add_function_with_function_call_arg() { - check_assist( - generate_function, - r" -struct Baz; -fn baz() -> Baz { todo!() } -fn foo() { - bar<|>(baz()); -} -", - r" -struct Baz; -fn baz() -> Baz { todo!() } -fn foo() { - bar(baz()); -} - -fn bar(baz: Baz) { - ${0:todo!()} -} -", - ); - } - - #[test] - fn add_function_with_method_call_arg() { - check_assist( - generate_function, - r" -struct Baz; -impl Baz { - fn foo(&self) -> Baz { - ba<|>r(self.baz()) - } - fn baz(&self) -> Baz { - Baz - } -} -", - r" -struct Baz; -impl Baz { - fn foo(&self) -> Baz { - bar(self.baz()) - } - fn baz(&self) -> Baz { - Baz - } -} - -fn bar(baz: Baz) { - ${0:todo!()} -} -", - ) - } - - #[test] - fn add_function_with_string_literal_arg() { - check_assist( - generate_function, - r#" -fn foo() { - <|>bar("bar") -} -"#, - r#" -fn foo() { - bar("bar") -} - -fn bar(arg: &str) { - ${0:todo!()} -} -"#, - ) - } - - #[test] - fn add_function_with_char_literal_arg() { - check_assist( - generate_function, - r#" -fn foo() { - <|>bar('x') -} -"#, - r#" -fn foo() { - bar('x') -} - -fn bar(arg: char) { - ${0:todo!()} -} -"#, - ) - } - - #[test] - fn add_function_with_int_literal_arg() { - check_assist( - generate_function, - r" -fn foo() { - <|>bar(42) -} -", - r" -fn foo() { - bar(42) -} - -fn bar(arg: i32) { - ${0:todo!()} -} -", - ) - } - - #[test] - fn add_function_with_cast_int_literal_arg() { - check_assist( - generate_function, - r" -fn foo() { - <|>bar(42 as u8) -} -", - r" -fn foo() { - bar(42 as u8) -} - -fn bar(arg: u8) { - ${0:todo!()} -} -", - ) - } - - #[test] - fn name_of_cast_variable_is_used() { - // Ensures that the name of the cast type isn't used - // in the generated function signature. - check_assist( - generate_function, - r" -fn foo() { - let x = 42; - bar<|>(x as u8) -} -", - r" -fn foo() { - let x = 42; - bar(x as u8) -} - -fn bar(x: u8) { - ${0:todo!()} -} -", - ) - } - - #[test] - fn add_function_with_variable_arg() { - check_assist( - generate_function, - r" -fn foo() { - let worble = (); - <|>bar(worble) -} -", - r" -fn foo() { - let worble = (); - bar(worble) -} - -fn bar(worble: ()) { - ${0:todo!()} -} -", - ) - } - - #[test] - fn add_function_with_impl_trait_arg() { - check_assist( - generate_function, - r" -trait Foo {} -fn foo() -> impl Foo { - todo!() -} -fn baz() { - <|>bar(foo()) -} -", - r" -trait Foo {} -fn foo() -> impl Foo { - todo!() -} -fn baz() { - bar(foo()) -} - -fn bar(foo: impl Foo) { - ${0:todo!()} -} -", - ) - } - - #[test] - fn borrowed_arg() { - check_assist( - generate_function, - r" -struct Baz; -fn baz() -> Baz { todo!() } - -fn foo() { - bar<|>(&baz()) -} -", - r" -struct Baz; -fn baz() -> Baz { todo!() } - -fn foo() { - bar(&baz()) -} - -fn bar(baz: &Baz) { - ${0:todo!()} -} -", - ) - } - - #[test] - fn add_function_with_qualified_path_arg() { - check_assist( - generate_function, - r" -mod Baz { - pub struct Bof; - pub fn baz() -> Bof { Bof } -} -fn foo() { - <|>bar(Baz::baz()) -} -", - r" -mod Baz { - pub struct Bof; - pub fn baz() -> Bof { Bof } -} -fn foo() { - bar(Baz::baz()) -} - -fn bar(baz: Baz::Bof) { - ${0:todo!()} -} -", - ) - } - - #[test] - #[ignore] - // FIXME fix printing the generics of a `Ty` to make this test pass - fn add_function_with_generic_arg() { - check_assist( - generate_function, - r" -fn foo(t: T) { - <|>bar(t) -} -", - r" -fn foo(t: T) { - bar(t) -} - -fn bar(t: T) { - ${0:todo!()} -} -", - ) - } - - #[test] - #[ignore] - // FIXME Fix function type printing to make this test pass - fn add_function_with_fn_arg() { - check_assist( - generate_function, - r" -struct Baz; -impl Baz { - fn new() -> Self { Baz } -} -fn foo() { - <|>bar(Baz::new); -} -", - r" -struct Baz; -impl Baz { - fn new() -> Self { Baz } -} -fn foo() { - bar(Baz::new); -} - -fn bar(arg: fn() -> Baz) { - ${0:todo!()} -} -", - ) - } - - #[test] - #[ignore] - // FIXME Fix closure type printing to make this test pass - fn add_function_with_closure_arg() { - check_assist( - generate_function, - r" -fn foo() { - let closure = |x: i64| x - 1; - <|>bar(closure) -} -", - r" -fn foo() { - let closure = |x: i64| x - 1; - bar(closure) -} - -fn bar(closure: impl Fn(i64) -> i64) { - ${0:todo!()} -} -", - ) - } - - #[test] - fn unresolveable_types_default_to_unit() { - check_assist( - generate_function, - r" -fn foo() { - <|>bar(baz) -} -", - r" -fn foo() { - bar(baz) -} - -fn bar(baz: ()) { - ${0:todo!()} -} -", - ) - } - - #[test] - fn arg_names_dont_overlap() { - check_assist( - generate_function, - r" -struct Baz; -fn baz() -> Baz { Baz } -fn foo() { - <|>bar(baz(), baz()) -} -", - r" -struct Baz; -fn baz() -> Baz { Baz } -fn foo() { - bar(baz(), baz()) -} - -fn bar(baz_1: Baz, baz_2: Baz) { - ${0:todo!()} -} -", - ) - } - - #[test] - fn arg_name_counters_start_at_1_per_name() { - check_assist( - generate_function, - r#" -struct Baz; -fn baz() -> Baz { Baz } -fn foo() { - <|>bar(baz(), baz(), "foo", "bar") -} -"#, - r#" -struct Baz; -fn baz() -> Baz { Baz } -fn foo() { - bar(baz(), baz(), "foo", "bar") -} - -fn bar(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) { - ${0:todo!()} -} -"#, - ) - } - - #[test] - fn add_function_in_module() { - check_assist( - generate_function, - r" -mod bar {} - -fn foo() { - bar::my_fn<|>() -} -", - r" -mod bar { - pub(crate) fn my_fn() { - ${0:todo!()} - } -} - -fn foo() { - bar::my_fn() -} -", - ) - } - - #[test] - #[ignore] - // Ignored until local imports are supported. - // See https://github.com/rust-analyzer/rust-analyzer/issues/1165 - fn qualified_path_uses_correct_scope() { - check_assist( - generate_function, - " -mod foo { - pub struct Foo; -} -fn bar() { - use foo::Foo; - let foo = Foo; - baz<|>(foo) -} -", - " -mod foo { - pub struct Foo; -} -fn bar() { - use foo::Foo; - let foo = Foo; - baz(foo) -} - -fn baz(foo: foo::Foo) { - ${0:todo!()} -} -", - ) - } - - #[test] - fn add_function_in_module_containing_other_items() { - check_assist( - generate_function, - r" -mod bar { - fn something_else() {} -} - -fn foo() { - bar::my_fn<|>() -} -", - r" -mod bar { - fn something_else() {} - - pub(crate) fn my_fn() { - ${0:todo!()} - } -} - -fn foo() { - bar::my_fn() -} -", - ) - } - - #[test] - fn add_function_in_nested_module() { - check_assist( - generate_function, - r" -mod bar { - mod baz {} -} - -fn foo() { - bar::baz::my_fn<|>() -} -", - r" -mod bar { - mod baz { - pub(crate) fn my_fn() { - ${0:todo!()} - } - } -} - -fn foo() { - bar::baz::my_fn() -} -", - ) - } - - #[test] - fn add_function_in_another_file() { - check_assist( - generate_function, - r" -//- /main.rs -mod foo; - -fn main() { - foo::bar<|>() -} -//- /foo.rs -", - r" - - -pub(crate) fn bar() { - ${0:todo!()} -}", - ) - } - - #[test] - fn add_function_not_applicable_if_function_already_exists() { - check_assist_not_applicable( - generate_function, - r" -fn foo() { - bar<|>(); -} - -fn bar() {} -", - ) - } - - #[test] - fn add_function_not_applicable_if_unresolved_variable_in_call_is_selected() { - check_assist_not_applicable( - // bar is resolved, but baz isn't. - // The assist is only active if the cursor is on an unresolved path, - // but the assist should only be offered if the path is a function call. - generate_function, - r" -fn foo() { - bar(b<|>az); -} - -fn bar(baz: ()) {} -", - ) - } - - #[test] - #[ignore] - fn create_method_with_no_args() { - check_assist( - generate_function, - r" -struct Foo; -impl Foo { - fn foo(&self) { - self.bar()<|>; - } -} - ", - r" -struct Foo; -impl Foo { - fn foo(&self) { - self.bar(); - } - fn bar(&self) { - todo!(); - } -} - ", - ) - } -} diff --git a/crates/ra_assists/src/handlers/generate_impl.rs b/crates/ra_assists/src/handlers/generate_impl.rs deleted file mode 100644 index d9b87c9c0d..0000000000 --- a/crates/ra_assists/src/handlers/generate_impl.rs +++ /dev/null @@ -1,109 +0,0 @@ -use ra_syntax::ast::{self, AstNode, GenericParamsOwner, NameOwner}; -use stdx::{format_to, SepBy}; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: generate_impl -// -// Adds a new inherent impl for a type. -// -// ``` -// struct Ctx { -// data: T,<|> -// } -// ``` -// -> -// ``` -// struct Ctx { -// data: T, -// } -// -// impl Ctx { -// $0 -// } -// ``` -pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let nominal = ctx.find_node_at_offset::()?; - let name = nominal.name()?; - let target = nominal.syntax().text_range(); - acc.add( - AssistId("generate_impl", AssistKind::Generate), - format!("Generate impl for `{}`", name), - target, - |edit| { - let type_params = nominal.generic_param_list(); - let start_offset = nominal.syntax().text_range().end(); - let mut buf = String::new(); - buf.push_str("\n\nimpl"); - if let Some(type_params) = &type_params { - format_to!(buf, "{}", type_params.syntax()); - } - buf.push_str(" "); - buf.push_str(name.text().as_str()); - if let Some(type_params) = type_params { - let lifetime_params = type_params - .lifetime_params() - .filter_map(|it| it.lifetime_token()) - .map(|it| it.text().clone()); - let type_params = type_params - .type_params() - .filter_map(|it| it.name()) - .map(|it| it.text().clone()); - - let generic_params = lifetime_params.chain(type_params).sep_by(", "); - format_to!(buf, "<{}>", generic_params) - } - match ctx.config.snippet_cap { - Some(cap) => { - buf.push_str(" {\n $0\n}"); - edit.insert_snippet(cap, start_offset, buf); - } - None => { - buf.push_str(" {\n}"); - edit.insert(start_offset, buf); - } - } - }, - ) -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_target}; - - use super::*; - - #[test] - fn test_add_impl() { - check_assist( - generate_impl, - "struct Foo {<|>}\n", - "struct Foo {}\n\nimpl Foo {\n $0\n}\n", - ); - check_assist( - generate_impl, - "struct Foo {<|>}", - "struct Foo {}\n\nimpl Foo {\n $0\n}", - ); - check_assist( - generate_impl, - "struct Foo<'a, T: Foo<'a>> {<|>}", - "struct Foo<'a, T: Foo<'a>> {}\n\nimpl<'a, T: Foo<'a>> Foo<'a, T> {\n $0\n}", - ); - } - - #[test] - fn add_impl_target() { - check_assist_target( - generate_impl, - " -struct SomeThingIrrelevant; -/// Has a lifetime parameter -struct Foo<'a, T: Foo<'a>> {<|>} -struct EvenMoreIrrelevant; -", - "/// Has a lifetime parameter -struct Foo<'a, T: Foo<'a>> {}", - ); - } -} diff --git a/crates/ra_assists/src/handlers/generate_new.rs b/crates/ra_assists/src/handlers/generate_new.rs deleted file mode 100644 index b84aa24b6c..0000000000 --- a/crates/ra_assists/src/handlers/generate_new.rs +++ /dev/null @@ -1,420 +0,0 @@ -use hir::Adt; -use ra_syntax::{ - ast::{self, AstNode, GenericParamsOwner, NameOwner, StructKind, VisibilityOwner}, - T, -}; -use stdx::{format_to, SepBy}; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: generate_new -// -// Adds a new inherent impl for a type. -// -// ``` -// struct Ctx { -// data: T,<|> -// } -// ``` -// -> -// ``` -// struct Ctx { -// data: T, -// } -// -// impl Ctx { -// fn $0new(data: T) -> Self { Self { data } } -// } -// -// ``` -pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let strukt = ctx.find_node_at_offset::()?; - - // We want to only apply this to non-union structs with named fields - let field_list = match strukt.kind() { - StructKind::Record(named) => named, - _ => return None, - }; - - // Return early if we've found an existing new fn - let impl_def = find_struct_impl(&ctx, &strukt)?; - - let target = strukt.syntax().text_range(); - acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| { - let mut buf = String::with_capacity(512); - - if impl_def.is_some() { - buf.push('\n'); - } - - let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v)); - - let params = field_list - .fields() - .filter_map(|f| Some(format!("{}: {}", f.name()?.syntax(), f.ty()?.syntax()))) - .sep_by(", "); - let fields = field_list.fields().filter_map(|f| f.name()).sep_by(", "); - - format_to!(buf, " {}fn new({}) -> Self {{ Self {{ {} }} }}", vis, params, fields); - - let start_offset = impl_def - .and_then(|impl_def| { - buf.push('\n'); - let start = impl_def - .syntax() - .descendants_with_tokens() - .find(|t| t.kind() == T!['{'])? - .text_range() - .end(); - - Some(start) - }) - .unwrap_or_else(|| { - buf = generate_impl_text(&strukt, &buf); - strukt.syntax().text_range().end() - }); - - match ctx.config.snippet_cap { - None => builder.insert(start_offset, buf), - Some(cap) => { - buf = buf.replace("fn new", "fn $0new"); - builder.insert_snippet(cap, start_offset, buf); - } - } - }) -} - -// Generates the surrounding `impl Type { }` including type and lifetime -// parameters -fn generate_impl_text(strukt: &ast::Struct, code: &str) -> String { - let type_params = strukt.generic_param_list(); - let mut buf = String::with_capacity(code.len()); - buf.push_str("\n\nimpl"); - if let Some(type_params) = &type_params { - format_to!(buf, "{}", type_params.syntax()); - } - buf.push_str(" "); - buf.push_str(strukt.name().unwrap().text().as_str()); - if let Some(type_params) = type_params { - let lifetime_params = type_params - .lifetime_params() - .filter_map(|it| it.lifetime_token()) - .map(|it| it.text().clone()); - let type_params = - type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone()); - format_to!(buf, "<{}>", lifetime_params.chain(type_params).sep_by(", ")) - } - - format_to!(buf, " {{\n{}\n}}\n", code); - - buf -} - -// Uses a syntax-driven approach to find any impl blocks for the struct that -// exist within the module/file -// -// Returns `None` if we've found an existing `new` fn -// -// FIXME: change the new fn checking to a more semantic approach when that's more -// viable (e.g. we process proc macros, etc) -fn find_struct_impl(ctx: &AssistContext, strukt: &ast::Struct) -> Option> { - let db = ctx.db(); - let module = strukt.syntax().ancestors().find(|node| { - ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind()) - })?; - - let struct_def = ctx.sema.to_def(strukt)?; - - let block = module.descendants().filter_map(ast::Impl::cast).find_map(|impl_blk| { - let blk = ctx.sema.to_def(&impl_blk)?; - - // FIXME: handle e.g. `struct S; impl S {}` - // (we currently use the wrong type parameter) - // also we wouldn't want to use e.g. `impl S` - let same_ty = match blk.target_ty(db).as_adt() { - Some(def) => def == Adt::Struct(struct_def), - None => false, - }; - let not_trait_impl = blk.target_trait(db).is_none(); - - if !(same_ty && not_trait_impl) { - None - } else { - Some(impl_blk) - } - }); - - if let Some(ref impl_blk) = block { - if has_new_fn(impl_blk) { - return None; - } - } - - Some(block) -} - -fn has_new_fn(imp: &ast::Impl) -> bool { - if let Some(il) = imp.assoc_item_list() { - for item in il.assoc_items() { - if let ast::AssocItem::Fn(f) = item { - if let Some(name) = f.name() { - if name.text().eq_ignore_ascii_case("new") { - return true; - } - } - } - } - } - - false -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; - - use super::*; - - #[test] - #[rustfmt::skip] - fn test_generate_new() { - // Check output of generation - check_assist( - generate_new, -"struct Foo {<|>}", -"struct Foo {} - -impl Foo { - fn $0new() -> Self { Self { } } -} -", - ); - check_assist( - generate_new, -"struct Foo {<|>}", -"struct Foo {} - -impl Foo { - fn $0new() -> Self { Self { } } -} -", - ); - check_assist( - generate_new, -"struct Foo<'a, T: Foo<'a>> {<|>}", -"struct Foo<'a, T: Foo<'a>> {} - -impl<'a, T: Foo<'a>> Foo<'a, T> { - fn $0new() -> Self { Self { } } -} -", - ); - check_assist( - generate_new, -"struct Foo { baz: String <|>}", -"struct Foo { baz: String } - -impl Foo { - fn $0new(baz: String) -> Self { Self { baz } } -} -", - ); - check_assist( - generate_new, -"struct Foo { baz: String, qux: Vec <|>}", -"struct Foo { baz: String, qux: Vec } - -impl Foo { - fn $0new(baz: String, qux: Vec) -> Self { Self { baz, qux } } -} -", - ); - - // Check that visibility modifiers don't get brought in for fields - check_assist( - generate_new, -"struct Foo { pub baz: String, pub qux: Vec <|>}", -"struct Foo { pub baz: String, pub qux: Vec } - -impl Foo { - fn $0new(baz: String, qux: Vec) -> Self { Self { baz, qux } } -} -", - ); - - // Check that it reuses existing impls - check_assist( - generate_new, -"struct Foo {<|>} - -impl Foo {} -", -"struct Foo {} - -impl Foo { - fn $0new() -> Self { Self { } } -} -", - ); - check_assist( - generate_new, -"struct Foo {<|>} - -impl Foo { - fn qux(&self) {} -} -", -"struct Foo {} - -impl Foo { - fn $0new() -> Self { Self { } } - - fn qux(&self) {} -} -", - ); - - check_assist( - generate_new, -"struct Foo {<|>} - -impl Foo { - fn qux(&self) {} - fn baz() -> i32 { - 5 - } -} -", -"struct Foo {} - -impl Foo { - fn $0new() -> Self { Self { } } - - fn qux(&self) {} - fn baz() -> i32 { - 5 - } -} -", - ); - - // Check visibility of new fn based on struct - check_assist( - generate_new, -"pub struct Foo {<|>}", -"pub struct Foo {} - -impl Foo { - pub fn $0new() -> Self { Self { } } -} -", - ); - check_assist( - generate_new, -"pub(crate) struct Foo {<|>}", -"pub(crate) struct Foo {} - -impl Foo { - pub(crate) fn $0new() -> Self { Self { } } -} -", - ); - } - - #[test] - fn generate_new_not_applicable_if_fn_exists() { - check_assist_not_applicable( - generate_new, - " -struct Foo {<|>} - -impl Foo { - fn new() -> Self { - Self - } -}", - ); - - check_assist_not_applicable( - generate_new, - " -struct Foo {<|>} - -impl Foo { - fn New() -> Self { - Self - } -}", - ); - } - - #[test] - fn generate_new_target() { - check_assist_target( - generate_new, - " -struct SomeThingIrrelevant; -/// Has a lifetime parameter -struct Foo<'a, T: Foo<'a>> {<|>} -struct EvenMoreIrrelevant; -", - "/// Has a lifetime parameter -struct Foo<'a, T: Foo<'a>> {}", - ); - } - - #[test] - fn test_unrelated_new() { - check_assist( - generate_new, - r##" -pub struct AstId { - file_id: HirFileId, - file_ast_id: FileAstId, -} - -impl AstId { - pub fn new(file_id: HirFileId, file_ast_id: FileAstId) -> AstId { - AstId { file_id, file_ast_id } - } -} - -pub struct Source { - pub file_id: HirFileId,<|> - pub ast: T, -} - -impl Source { - pub fn map U, U>(self, f: F) -> Source { - Source { file_id: self.file_id, ast: f(self.ast) } - } -} -"##, - r##" -pub struct AstId { - file_id: HirFileId, - file_ast_id: FileAstId, -} - -impl AstId { - pub fn new(file_id: HirFileId, file_ast_id: FileAstId) -> AstId { - AstId { file_id, file_ast_id } - } -} - -pub struct Source { - pub file_id: HirFileId, - pub ast: T, -} - -impl Source { - pub fn $0new(file_id: HirFileId, ast: T) -> Self { Self { file_id, ast } } - - pub fn map U, U>(self, f: F) -> Source { - Source { file_id: self.file_id, ast: f(self.ast) } - } -} -"##, - ); - } -} diff --git a/crates/ra_assists/src/handlers/inline_local_variable.rs b/crates/ra_assists/src/handlers/inline_local_variable.rs deleted file mode 100644 index 3c58020f80..0000000000 --- a/crates/ra_assists/src/handlers/inline_local_variable.rs +++ /dev/null @@ -1,695 +0,0 @@ -use ra_ide_db::defs::Definition; -use ra_syntax::{ - ast::{self, AstNode, AstToken}, - TextRange, -}; -use test_utils::mark; - -use crate::{ - assist_context::{AssistContext, Assists}, - AssistId, AssistKind, -}; - -// Assist: inline_local_variable -// -// Inlines local variable. -// -// ``` -// fn main() { -// let x<|> = 1 + 2; -// x * 4; -// } -// ``` -// -> -// ``` -// fn main() { -// (1 + 2) * 4; -// } -// ``` -pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let let_stmt = ctx.find_node_at_offset::()?; - let bind_pat = match let_stmt.pat()? { - ast::Pat::IdentPat(pat) => pat, - _ => return None, - }; - if bind_pat.mut_token().is_some() { - mark::hit!(test_not_inline_mut_variable); - return None; - } - if !bind_pat.syntax().text_range().contains_inclusive(ctx.offset()) { - mark::hit!(not_applicable_outside_of_bind_pat); - return None; - } - let initializer_expr = let_stmt.initializer()?; - - let def = ctx.sema.to_def(&bind_pat)?; - let def = Definition::Local(def); - let refs = def.find_usages(&ctx.sema, None); - if refs.is_empty() { - mark::hit!(test_not_applicable_if_variable_unused); - return None; - }; - - let delete_range = if let Some(whitespace) = let_stmt - .syntax() - .next_sibling_or_token() - .and_then(|it| ast::Whitespace::cast(it.as_token()?.clone())) - { - TextRange::new( - let_stmt.syntax().text_range().start(), - whitespace.syntax().text_range().end(), - ) - } else { - let_stmt.syntax().text_range() - }; - - let mut wrap_in_parens = vec![true; refs.len()]; - - for (i, desc) in refs.iter().enumerate() { - let usage_node = ctx - .covering_node_for_range(desc.file_range.range) - .ancestors() - .find_map(ast::PathExpr::cast)?; - let usage_parent_option = usage_node.syntax().parent().and_then(ast::Expr::cast); - let usage_parent = match usage_parent_option { - Some(u) => u, - None => { - wrap_in_parens[i] = false; - continue; - } - }; - - wrap_in_parens[i] = match (&initializer_expr, usage_parent) { - (ast::Expr::CallExpr(_), _) - | (ast::Expr::IndexExpr(_), _) - | (ast::Expr::MethodCallExpr(_), _) - | (ast::Expr::FieldExpr(_), _) - | (ast::Expr::TryExpr(_), _) - | (ast::Expr::RefExpr(_), _) - | (ast::Expr::Literal(_), _) - | (ast::Expr::TupleExpr(_), _) - | (ast::Expr::ArrayExpr(_), _) - | (ast::Expr::ParenExpr(_), _) - | (ast::Expr::PathExpr(_), _) - | (ast::Expr::BlockExpr(_), _) - | (ast::Expr::EffectExpr(_), _) - | (_, ast::Expr::CallExpr(_)) - | (_, ast::Expr::TupleExpr(_)) - | (_, ast::Expr::ArrayExpr(_)) - | (_, ast::Expr::ParenExpr(_)) - | (_, ast::Expr::ForExpr(_)) - | (_, ast::Expr::WhileExpr(_)) - | (_, ast::Expr::BreakExpr(_)) - | (_, ast::Expr::ReturnExpr(_)) - | (_, ast::Expr::MatchExpr(_)) => false, - _ => true, - }; - } - - let init_str = initializer_expr.syntax().text().to_string(); - let init_in_paren = format!("({})", &init_str); - - let target = bind_pat.syntax().text_range(); - acc.add( - AssistId("inline_local_variable", AssistKind::RefactorInline), - "Inline variable", - target, - move |builder| { - builder.delete(delete_range); - for (desc, should_wrap) in refs.iter().zip(wrap_in_parens) { - let replacement = - if should_wrap { init_in_paren.clone() } else { init_str.clone() }; - builder.replace(desc.file_range.range, replacement) - } - }, - ) -} - -#[cfg(test)] -mod tests { - use test_utils::mark; - - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - #[test] - fn test_inline_let_bind_literal_expr() { - check_assist( - inline_local_variable, - r" -fn bar(a: usize) {} -fn foo() { - let a<|> = 1; - a + 1; - if a > 10 { - } - - while a > 10 { - - } - let b = a * 10; - bar(a); -}", - r" -fn bar(a: usize) {} -fn foo() { - 1 + 1; - if 1 > 10 { - } - - while 1 > 10 { - - } - let b = 1 * 10; - bar(1); -}", - ); - } - - #[test] - fn test_inline_let_bind_bin_expr() { - check_assist( - inline_local_variable, - r" -fn bar(a: usize) {} -fn foo() { - let a<|> = 1 + 1; - a + 1; - if a > 10 { - } - - while a > 10 { - - } - let b = a * 10; - bar(a); -}", - r" -fn bar(a: usize) {} -fn foo() { - (1 + 1) + 1; - if (1 + 1) > 10 { - } - - while (1 + 1) > 10 { - - } - let b = (1 + 1) * 10; - bar(1 + 1); -}", - ); - } - - #[test] - fn test_inline_let_bind_function_call_expr() { - check_assist( - inline_local_variable, - r" -fn bar(a: usize) {} -fn foo() { - let a<|> = bar(1); - a + 1; - if a > 10 { - } - - while a > 10 { - - } - let b = a * 10; - bar(a); -}", - r" -fn bar(a: usize) {} -fn foo() { - bar(1) + 1; - if bar(1) > 10 { - } - - while bar(1) > 10 { - - } - let b = bar(1) * 10; - bar(bar(1)); -}", - ); - } - - #[test] - fn test_inline_let_bind_cast_expr() { - check_assist( - inline_local_variable, - r" -fn bar(a: usize): usize { a } -fn foo() { - let a<|> = bar(1) as u64; - a + 1; - if a > 10 { - } - - while a > 10 { - - } - let b = a * 10; - bar(a); -}", - r" -fn bar(a: usize): usize { a } -fn foo() { - (bar(1) as u64) + 1; - if (bar(1) as u64) > 10 { - } - - while (bar(1) as u64) > 10 { - - } - let b = (bar(1) as u64) * 10; - bar(bar(1) as u64); -}", - ); - } - - #[test] - fn test_inline_let_bind_block_expr() { - check_assist( - inline_local_variable, - r" -fn foo() { - let a<|> = { 10 + 1 }; - a + 1; - if a > 10 { - } - - while a > 10 { - - } - let b = a * 10; - bar(a); -}", - r" -fn foo() { - { 10 + 1 } + 1; - if { 10 + 1 } > 10 { - } - - while { 10 + 1 } > 10 { - - } - let b = { 10 + 1 } * 10; - bar({ 10 + 1 }); -}", - ); - } - - #[test] - fn test_inline_let_bind_paren_expr() { - check_assist( - inline_local_variable, - r" -fn foo() { - let a<|> = ( 10 + 1 ); - a + 1; - if a > 10 { - } - - while a > 10 { - - } - let b = a * 10; - bar(a); -}", - r" -fn foo() { - ( 10 + 1 ) + 1; - if ( 10 + 1 ) > 10 { - } - - while ( 10 + 1 ) > 10 { - - } - let b = ( 10 + 1 ) * 10; - bar(( 10 + 1 )); -}", - ); - } - - #[test] - fn test_not_inline_mut_variable() { - mark::check!(test_not_inline_mut_variable); - check_assist_not_applicable( - inline_local_variable, - r" -fn foo() { - let mut a<|> = 1 + 1; - a + 1; -}", - ); - } - - #[test] - fn test_call_expr() { - check_assist( - inline_local_variable, - r" -fn foo() { - let a<|> = bar(10 + 1); - let b = a * 10; - let c = a as usize; -}", - r" -fn foo() { - let b = bar(10 + 1) * 10; - let c = bar(10 + 1) as usize; -}", - ); - } - - #[test] - fn test_index_expr() { - check_assist( - inline_local_variable, - r" -fn foo() { - let x = vec![1, 2, 3]; - let a<|> = x[0]; - let b = a * 10; - let c = a as usize; -}", - r" -fn foo() { - let x = vec![1, 2, 3]; - let b = x[0] * 10; - let c = x[0] as usize; -}", - ); - } - - #[test] - fn test_method_call_expr() { - check_assist( - inline_local_variable, - r" -fn foo() { - let bar = vec![1]; - let a<|> = bar.len(); - let b = a * 10; - let c = a as usize; -}", - r" -fn foo() { - let bar = vec![1]; - let b = bar.len() * 10; - let c = bar.len() as usize; -}", - ); - } - - #[test] - fn test_field_expr() { - check_assist( - inline_local_variable, - r" -struct Bar { - foo: usize -} - -fn foo() { - let bar = Bar { foo: 1 }; - let a<|> = bar.foo; - let b = a * 10; - let c = a as usize; -}", - r" -struct Bar { - foo: usize -} - -fn foo() { - let bar = Bar { foo: 1 }; - let b = bar.foo * 10; - let c = bar.foo as usize; -}", - ); - } - - #[test] - fn test_try_expr() { - check_assist( - inline_local_variable, - r" -fn foo() -> Option { - let bar = Some(1); - let a<|> = bar?; - let b = a * 10; - let c = a as usize; - None -}", - r" -fn foo() -> Option { - let bar = Some(1); - let b = bar? * 10; - let c = bar? as usize; - None -}", - ); - } - - #[test] - fn test_ref_expr() { - check_assist( - inline_local_variable, - r" -fn foo() { - let bar = 10; - let a<|> = &bar; - let b = a * 10; -}", - r" -fn foo() { - let bar = 10; - let b = &bar * 10; -}", - ); - } - - #[test] - fn test_tuple_expr() { - check_assist( - inline_local_variable, - r" -fn foo() { - let a<|> = (10, 20); - let b = a[0]; -}", - r" -fn foo() { - let b = (10, 20)[0]; -}", - ); - } - - #[test] - fn test_array_expr() { - check_assist( - inline_local_variable, - r" -fn foo() { - let a<|> = [1, 2, 3]; - let b = a.len(); -}", - r" -fn foo() { - let b = [1, 2, 3].len(); -}", - ); - } - - #[test] - fn test_paren() { - check_assist( - inline_local_variable, - r" -fn foo() { - let a<|> = (10 + 20); - let b = a * 10; - let c = a as usize; -}", - r" -fn foo() { - let b = (10 + 20) * 10; - let c = (10 + 20) as usize; -}", - ); - } - - #[test] - fn test_path_expr() { - check_assist( - inline_local_variable, - r" -fn foo() { - let d = 10; - let a<|> = d; - let b = a * 10; - let c = a as usize; -}", - r" -fn foo() { - let d = 10; - let b = d * 10; - let c = d as usize; -}", - ); - } - - #[test] - fn test_block_expr() { - check_assist( - inline_local_variable, - r" -fn foo() { - let a<|> = { 10 }; - let b = a * 10; - let c = a as usize; -}", - r" -fn foo() { - let b = { 10 } * 10; - let c = { 10 } as usize; -}", - ); - } - - #[test] - fn test_used_in_different_expr1() { - check_assist( - inline_local_variable, - r" -fn foo() { - let a<|> = 10 + 20; - let b = a * 10; - let c = (a, 20); - let d = [a, 10]; - let e = (a); -}", - r" -fn foo() { - let b = (10 + 20) * 10; - let c = (10 + 20, 20); - let d = [10 + 20, 10]; - let e = (10 + 20); -}", - ); - } - - #[test] - fn test_used_in_for_expr() { - check_assist( - inline_local_variable, - r" -fn foo() { - let a<|> = vec![10, 20]; - for i in a {} -}", - r" -fn foo() { - for i in vec![10, 20] {} -}", - ); - } - - #[test] - fn test_used_in_while_expr() { - check_assist( - inline_local_variable, - r" -fn foo() { - let a<|> = 1 > 0; - while a {} -}", - r" -fn foo() { - while 1 > 0 {} -}", - ); - } - - #[test] - fn test_used_in_break_expr() { - check_assist( - inline_local_variable, - r" -fn foo() { - let a<|> = 1 + 1; - loop { - break a; - } -}", - r" -fn foo() { - loop { - break 1 + 1; - } -}", - ); - } - - #[test] - fn test_used_in_return_expr() { - check_assist( - inline_local_variable, - r" -fn foo() { - let a<|> = 1 > 0; - return a; -}", - r" -fn foo() { - return 1 > 0; -}", - ); - } - - #[test] - fn test_used_in_match_expr() { - check_assist( - inline_local_variable, - r" -fn foo() { - let a<|> = 1 > 0; - match a {} -}", - r" -fn foo() { - match 1 > 0 {} -}", - ); - } - - #[test] - fn test_not_applicable_if_variable_unused() { - mark::check!(test_not_applicable_if_variable_unused); - check_assist_not_applicable( - inline_local_variable, - r" -fn foo() { - let <|>a = 0; -} - ", - ) - } - - #[test] - fn not_applicable_outside_of_bind_pat() { - mark::check!(not_applicable_outside_of_bind_pat); - check_assist_not_applicable( - inline_local_variable, - r" -fn main() { - let x = <|>1 + 2; - x * 4; -} -", - ) - } -} diff --git a/crates/ra_assists/src/handlers/introduce_named_lifetime.rs b/crates/ra_assists/src/handlers/introduce_named_lifetime.rs deleted file mode 100644 index fbaf3c06b5..0000000000 --- a/crates/ra_assists/src/handlers/introduce_named_lifetime.rs +++ /dev/null @@ -1,318 +0,0 @@ -use ra_syntax::{ - ast::{self, GenericParamsOwner, NameOwner}, - AstNode, SyntaxKind, TextRange, TextSize, -}; -use rustc_hash::FxHashSet; - -use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists}; - -static ASSIST_NAME: &str = "introduce_named_lifetime"; -static ASSIST_LABEL: &str = "Introduce named lifetime"; - -// Assist: introduce_named_lifetime -// -// Change an anonymous lifetime to a named lifetime. -// -// ``` -// impl Cursor<'_<|>> { -// fn node(self) -> &SyntaxNode { -// match self { -// Cursor::Replace(node) | Cursor::Before(node) => node, -// } -// } -// } -// ``` -// -> -// ``` -// impl<'a> Cursor<'a> { -// fn node(self) -> &SyntaxNode { -// match self { -// Cursor::Replace(node) | Cursor::Before(node) => node, -// } -// } -// } -// ``` -// FIXME: How can we handle renaming any one of multiple anonymous lifetimes? -// FIXME: should also add support for the case fun(f: &Foo) -> &<|>Foo -pub(crate) fn introduce_named_lifetime(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let lifetime_token = ctx - .find_token_at_offset(SyntaxKind::LIFETIME) - .filter(|lifetime| lifetime.text() == "'_")?; - if let Some(fn_def) = lifetime_token.ancestors().find_map(ast::Fn::cast) { - generate_fn_def_assist(acc, &fn_def, lifetime_token.text_range()) - } else if let Some(impl_def) = lifetime_token.ancestors().find_map(ast::Impl::cast) { - generate_impl_def_assist(acc, &impl_def, lifetime_token.text_range()) - } else { - None - } -} - -/// Generate the assist for the fn def case -fn generate_fn_def_assist( - acc: &mut Assists, - fn_def: &ast::Fn, - lifetime_loc: TextRange, -) -> Option<()> { - let param_list: ast::ParamList = fn_def.param_list()?; - let new_lifetime_param = generate_unique_lifetime_param_name(&fn_def.generic_param_list())?; - let end_of_fn_ident = fn_def.name()?.ident_token()?.text_range().end(); - let self_param = - // use the self if it's a reference and has no explicit lifetime - param_list.self_param().filter(|p| p.lifetime_token().is_none() && p.amp_token().is_some()); - // compute the location which implicitly has the same lifetime as the anonymous lifetime - let loc_needing_lifetime = if let Some(self_param) = self_param { - // if we have a self reference, use that - Some(self_param.self_token()?.text_range().start()) - } else { - // otherwise, if there's a single reference parameter without a named liftime, use that - let fn_params_without_lifetime: Vec<_> = param_list - .params() - .filter_map(|param| match param.ty() { - Some(ast::Type::RefType(ascribed_type)) - if ascribed_type.lifetime_token() == None => - { - Some(ascribed_type.amp_token()?.text_range().end()) - } - _ => None, - }) - .collect(); - match fn_params_without_lifetime.len() { - 1 => Some(fn_params_without_lifetime.into_iter().nth(0)?), - 0 => None, - // multiple unnnamed is invalid. assist is not applicable - _ => return None, - } - }; - acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| { - add_lifetime_param(fn_def, builder, end_of_fn_ident, new_lifetime_param); - builder.replace(lifetime_loc, format!("'{}", new_lifetime_param)); - loc_needing_lifetime.map(|loc| builder.insert(loc, format!("'{} ", new_lifetime_param))); - }) -} - -/// Generate the assist for the impl def case -fn generate_impl_def_assist( - acc: &mut Assists, - impl_def: &ast::Impl, - lifetime_loc: TextRange, -) -> Option<()> { - let new_lifetime_param = generate_unique_lifetime_param_name(&impl_def.generic_param_list())?; - let end_of_impl_kw = impl_def.impl_token()?.text_range().end(); - acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| { - add_lifetime_param(impl_def, builder, end_of_impl_kw, new_lifetime_param); - builder.replace(lifetime_loc, format!("'{}", new_lifetime_param)); - }) -} - -/// Given a type parameter list, generate a unique lifetime parameter name -/// which is not in the list -fn generate_unique_lifetime_param_name( - existing_type_param_list: &Option, -) -> Option { - match existing_type_param_list { - Some(type_params) => { - let used_lifetime_params: FxHashSet<_> = type_params - .lifetime_params() - .map(|p| p.syntax().text().to_string()[1..].to_owned()) - .collect(); - (b'a'..=b'z').map(char::from).find(|c| !used_lifetime_params.contains(&c.to_string())) - } - None => Some('a'), - } -} - -/// Add the lifetime param to `builder`. If there are type parameters in `type_params_owner`, add it to the end. Otherwise -/// add new type params brackets with the lifetime parameter at `new_type_params_loc`. -fn add_lifetime_param( - type_params_owner: &TypeParamsOwner, - builder: &mut AssistBuilder, - new_type_params_loc: TextSize, - new_lifetime_param: char, -) { - match type_params_owner.generic_param_list() { - // add the new lifetime parameter to an existing type param list - Some(type_params) => { - builder.insert( - (u32::from(type_params.syntax().text_range().end()) - 1).into(), - format!(", '{}", new_lifetime_param), - ); - } - // create a new type param list containing only the new lifetime parameter - None => { - builder.insert(new_type_params_loc, format!("<'{}>", new_lifetime_param)); - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::tests::{check_assist, check_assist_not_applicable}; - - #[test] - fn test_example_case() { - check_assist( - introduce_named_lifetime, - r#"impl Cursor<'_<|>> { - fn node(self) -> &SyntaxNode { - match self { - Cursor::Replace(node) | Cursor::Before(node) => node, - } - } - }"#, - r#"impl<'a> Cursor<'a> { - fn node(self) -> &SyntaxNode { - match self { - Cursor::Replace(node) | Cursor::Before(node) => node, - } - } - }"#, - ); - } - - #[test] - fn test_example_case_simplified() { - check_assist( - introduce_named_lifetime, - r#"impl Cursor<'_<|>> {"#, - r#"impl<'a> Cursor<'a> {"#, - ); - } - - #[test] - fn test_example_case_cursor_after_tick() { - check_assist( - introduce_named_lifetime, - r#"impl Cursor<'<|>_> {"#, - r#"impl<'a> Cursor<'a> {"#, - ); - } - - #[test] - fn test_impl_with_other_type_param() { - check_assist( - introduce_named_lifetime, - "impl fmt::Display for SepByBuilder<'_<|>, I> - where - I: Iterator, - I::Item: fmt::Display, - {", - "impl fmt::Display for SepByBuilder<'a, I> - where - I: Iterator, - I::Item: fmt::Display, - {", - ) - } - - #[test] - fn test_example_case_cursor_before_tick() { - check_assist( - introduce_named_lifetime, - r#"impl Cursor<<|>'_> {"#, - r#"impl<'a> Cursor<'a> {"#, - ); - } - - #[test] - fn test_not_applicable_cursor_position() { - check_assist_not_applicable(introduce_named_lifetime, r#"impl Cursor<'_><|> {"#); - check_assist_not_applicable(introduce_named_lifetime, r#"impl Cursor<|><'_> {"#); - } - - #[test] - fn test_not_applicable_lifetime_already_name() { - check_assist_not_applicable(introduce_named_lifetime, r#"impl Cursor<'a<|>> {"#); - check_assist_not_applicable(introduce_named_lifetime, r#"fn my_fun<'a>() -> X<'a<|>>"#); - } - - #[test] - fn test_with_type_parameter() { - check_assist( - introduce_named_lifetime, - r#"impl Cursor>"#, - r#"impl Cursor"#, - ); - } - - #[test] - fn test_with_existing_lifetime_name_conflict() { - check_assist( - introduce_named_lifetime, - r#"impl<'a, 'b> Cursor<'a, 'b, '_<|>>"#, - r#"impl<'a, 'b, 'c> Cursor<'a, 'b, 'c>"#, - ); - } - - #[test] - fn test_function_return_value_anon_lifetime_param() { - check_assist( - introduce_named_lifetime, - r#"fn my_fun() -> X<'_<|>>"#, - r#"fn my_fun<'a>() -> X<'a>"#, - ); - } - - #[test] - fn test_function_return_value_anon_reference_lifetime() { - check_assist( - introduce_named_lifetime, - r#"fn my_fun() -> &'_<|> X"#, - r#"fn my_fun<'a>() -> &'a X"#, - ); - } - - #[test] - fn test_function_param_anon_lifetime() { - check_assist( - introduce_named_lifetime, - r#"fn my_fun(x: X<'_<|>>)"#, - r#"fn my_fun<'a>(x: X<'a>)"#, - ); - } - - #[test] - fn test_function_add_lifetime_to_params() { - check_assist( - introduce_named_lifetime, - r#"fn my_fun(f: &Foo) -> X<'_<|>>"#, - r#"fn my_fun<'a>(f: &'a Foo) -> X<'a>"#, - ); - } - - #[test] - fn test_function_add_lifetime_to_params_in_presence_of_other_lifetime() { - check_assist( - introduce_named_lifetime, - r#"fn my_fun<'other>(f: &Foo, b: &'other Bar) -> X<'_<|>>"#, - r#"fn my_fun<'other, 'a>(f: &'a Foo, b: &'other Bar) -> X<'a>"#, - ); - } - - #[test] - fn test_function_not_applicable_without_self_and_multiple_unnamed_param_lifetimes() { - // this is not permitted under lifetime elision rules - check_assist_not_applicable( - introduce_named_lifetime, - r#"fn my_fun(f: &Foo, b: &Bar) -> X<'_<|>>"#, - ); - } - - #[test] - fn test_function_add_lifetime_to_self_ref_param() { - check_assist( - introduce_named_lifetime, - r#"fn my_fun<'other>(&self, f: &Foo, b: &'other Bar) -> X<'_<|>>"#, - r#"fn my_fun<'other, 'a>(&'a self, f: &Foo, b: &'other Bar) -> X<'a>"#, - ); - } - - #[test] - fn test_function_add_lifetime_to_param_with_non_ref_self() { - check_assist( - introduce_named_lifetime, - r#"fn my_fun<'other>(self, f: &Foo, b: &'other Bar) -> X<'_<|>>"#, - r#"fn my_fun<'other, 'a>(self, f: &'a Foo, b: &'other Bar) -> X<'a>"#, - ); - } -} diff --git a/crates/ra_assists/src/handlers/invert_if.rs b/crates/ra_assists/src/handlers/invert_if.rs deleted file mode 100644 index bbe3f36436..0000000000 --- a/crates/ra_assists/src/handlers/invert_if.rs +++ /dev/null @@ -1,109 +0,0 @@ -use ra_syntax::{ - ast::{self, AstNode}, - T, -}; - -use crate::{ - assist_context::{AssistContext, Assists}, - utils::invert_boolean_expression, - AssistId, AssistKind, -}; - -// Assist: invert_if -// -// Apply invert_if -// This transforms if expressions of the form `if !x {A} else {B}` into `if x {B} else {A}` -// This also works with `!=`. This assist can only be applied with the cursor -// on `if`. -// -// ``` -// fn main() { -// if<|> !y { A } else { B } -// } -// ``` -// -> -// ``` -// fn main() { -// if y { B } else { A } -// } -// ``` - -pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let if_keyword = ctx.find_token_at_offset(T![if])?; - let expr = ast::IfExpr::cast(if_keyword.parent())?; - let if_range = if_keyword.text_range(); - let cursor_in_range = if_range.contains_range(ctx.frange.range); - if !cursor_in_range { - return None; - } - - // This assist should not apply for if-let. - if expr.condition()?.pat().is_some() { - return None; - } - - let cond = expr.condition()?.expr()?; - let then_node = expr.then_branch()?.syntax().clone(); - let else_block = match expr.else_branch()? { - ast::ElseBranch::Block(it) => it, - ast::ElseBranch::IfExpr(_) => return None, - }; - - let cond_range = cond.syntax().text_range(); - let flip_cond = invert_boolean_expression(cond); - let else_node = else_block.syntax(); - let else_range = else_node.text_range(); - let then_range = then_node.text_range(); - acc.add(AssistId("invert_if", AssistKind::RefactorRewrite), "Invert if", if_range, |edit| { - edit.replace(cond_range, flip_cond.syntax().text()); - edit.replace(else_range, then_node.text()); - edit.replace(then_range, else_node.text()); - }) -} - -#[cfg(test)] -mod tests { - use super::*; - - use crate::tests::{check_assist, check_assist_not_applicable}; - - #[test] - fn invert_if_remove_inequality() { - check_assist( - invert_if, - "fn f() { i<|>f x != 3 { 1 } else { 3 + 2 } }", - "fn f() { if x == 3 { 3 + 2 } else { 1 } }", - ) - } - - #[test] - fn invert_if_remove_not() { - check_assist( - invert_if, - "fn f() { <|>if !cond { 3 * 2 } else { 1 } }", - "fn f() { if cond { 1 } else { 3 * 2 } }", - ) - } - - #[test] - fn invert_if_general_case() { - check_assist( - invert_if, - "fn f() { i<|>f cond { 3 * 2 } else { 1 } }", - "fn f() { if !cond { 1 } else { 3 * 2 } }", - ) - } - - #[test] - fn invert_if_doesnt_apply_with_cursor_not_on_if() { - check_assist_not_applicable(invert_if, "fn f() { if !<|>cond { 3 * 2 } else { 1 } }") - } - - #[test] - fn invert_if_doesnt_apply_with_if_let() { - check_assist_not_applicable( - invert_if, - "fn f() { i<|>f let Some(_) = Some(1) { 1 } else { 0 } }", - ) - } -} diff --git a/crates/ra_assists/src/handlers/merge_imports.rs b/crates/ra_assists/src/handlers/merge_imports.rs deleted file mode 100644 index c775fe25c5..0000000000 --- a/crates/ra_assists/src/handlers/merge_imports.rs +++ /dev/null @@ -1,294 +0,0 @@ -use std::iter::successors; - -use ra_syntax::{ - algo::{neighbor, skip_trivia_token, SyntaxRewriter}, - ast::{self, edit::AstNodeEdit, make}, - AstNode, Direction, InsertPosition, SyntaxElement, T, -}; - -use crate::{ - assist_context::{AssistContext, Assists}, - AssistId, AssistKind, -}; - -// Assist: merge_imports -// -// Merges two imports with a common prefix. -// -// ``` -// use std::<|>fmt::Formatter; -// use std::io; -// ``` -// -> -// ``` -// use std::{fmt::Formatter, io}; -// ``` -pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let tree: ast::UseTree = ctx.find_node_at_offset()?; - let mut rewriter = SyntaxRewriter::default(); - let mut offset = ctx.offset(); - - if let Some(use_item) = tree.syntax().parent().and_then(ast::Use::cast) { - let (merged, to_delete) = next_prev() - .filter_map(|dir| neighbor(&use_item, dir)) - .filter_map(|it| Some((it.clone(), it.use_tree()?))) - .find_map(|(use_item, use_tree)| { - Some((try_merge_trees(&tree, &use_tree)?, use_item)) - })?; - - rewriter.replace_ast(&tree, &merged); - rewriter += to_delete.remove(); - - if to_delete.syntax().text_range().end() < offset { - offset -= to_delete.syntax().text_range().len(); - } - } else { - let (merged, to_delete) = next_prev() - .filter_map(|dir| neighbor(&tree, dir)) - .find_map(|use_tree| Some((try_merge_trees(&tree, &use_tree)?, use_tree.clone())))?; - - rewriter.replace_ast(&tree, &merged); - rewriter += to_delete.remove(); - - if to_delete.syntax().text_range().end() < offset { - offset -= to_delete.syntax().text_range().len(); - } - }; - - let target = tree.syntax().text_range(); - acc.add( - AssistId("merge_imports", AssistKind::RefactorRewrite), - "Merge imports", - target, - |builder| { - builder.rewrite(rewriter); - }, - ) -} - -fn next_prev() -> impl Iterator { - [Direction::Next, Direction::Prev].iter().copied() -} - -fn try_merge_trees(old: &ast::UseTree, new: &ast::UseTree) -> Option { - let lhs_path = old.path()?; - let rhs_path = new.path()?; - - let (lhs_prefix, rhs_prefix) = common_prefix(&lhs_path, &rhs_path)?; - - let lhs = old.split_prefix(&lhs_prefix); - let rhs = new.split_prefix(&rhs_prefix); - - let should_insert_comma = lhs - .use_tree_list()? - .r_curly_token() - .and_then(|it| skip_trivia_token(it.prev_token()?, Direction::Prev)) - .map(|it| it.kind() != T![,]) - .unwrap_or(true); - - let mut to_insert: Vec = Vec::new(); - if should_insert_comma { - to_insert.push(make::token(T![,]).into()); - to_insert.push(make::tokens::single_space().into()); - } - to_insert.extend( - rhs.use_tree_list()? - .syntax() - .children_with_tokens() - .filter(|it| it.kind() != T!['{'] && it.kind() != T!['}']), - ); - let use_tree_list = lhs.use_tree_list()?; - let pos = InsertPosition::Before(use_tree_list.r_curly_token()?.into()); - let use_tree_list = use_tree_list.insert_children(pos, to_insert); - Some(lhs.with_use_tree_list(use_tree_list)) -} - -fn common_prefix(lhs: &ast::Path, rhs: &ast::Path) -> Option<(ast::Path, ast::Path)> { - let mut res = None; - let mut lhs_curr = first_path(&lhs); - let mut rhs_curr = first_path(&rhs); - loop { - match (lhs_curr.segment(), rhs_curr.segment()) { - (Some(lhs), Some(rhs)) if lhs.syntax().text() == rhs.syntax().text() => (), - _ => break, - } - res = Some((lhs_curr.clone(), rhs_curr.clone())); - - match (lhs_curr.parent_path(), rhs_curr.parent_path()) { - (Some(lhs), Some(rhs)) => { - lhs_curr = lhs; - rhs_curr = rhs; - } - _ => break, - } - } - - res -} - -fn first_path(path: &ast::Path) -> ast::Path { - successors(Some(path.clone()), |it| it.qualifier()).last().unwrap() -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - #[test] - fn test_merge_first() { - check_assist( - merge_imports, - r" -use std::fmt<|>::Debug; -use std::fmt::Display; -", - r" -use std::fmt::{Debug, Display}; -", - ) - } - - #[test] - fn test_merge_second() { - check_assist( - merge_imports, - r" -use std::fmt::Debug; -use std::fmt<|>::Display; -", - r" -use std::fmt::{Display, Debug}; -", - ); - } - - #[test] - fn test_merge_nested() { - check_assist( - merge_imports, - r" -use std::{fmt<|>::Debug, fmt::Display}; -", - r" -use std::{fmt::{Debug, Display}}; -", - ); - check_assist( - merge_imports, - r" -use std::{fmt::Debug, fmt<|>::Display}; -", - r" -use std::{fmt::{Display, Debug}}; -", - ); - } - - #[test] - fn test_merge_single_wildcard_diff_prefixes() { - check_assist( - merge_imports, - r" -use std<|>::cell::*; -use std::str; -", - r" -use std::{cell::*, str}; -", - ) - } - - #[test] - fn test_merge_both_wildcard_diff_prefixes() { - check_assist( - merge_imports, - r" -use std<|>::cell::*; -use std::str::*; -", - r" -use std::{cell::*, str::*}; -", - ) - } - - #[test] - fn removes_just_enough_whitespace() { - check_assist( - merge_imports, - r" -use foo<|>::bar; -use foo::baz; - -/// Doc comment -", - r" -use foo::{bar, baz}; - -/// Doc comment -", - ); - } - - #[test] - fn works_with_trailing_comma() { - check_assist( - merge_imports, - r" -use { - foo<|>::bar, - foo::baz, -}; -", - r" -use { - foo::{bar, baz}, -}; -", - ); - check_assist( - merge_imports, - r" -use { - foo::baz, - foo<|>::bar, -}; -", - r" -use { - foo::{bar, baz}, -}; -", - ); - } - - #[test] - fn test_double_comma() { - check_assist( - merge_imports, - r" -use foo::bar::baz; -use foo::<|>{ - FooBar, -}; -", - r" -use foo::{ - FooBar, -bar::baz}; -", - ) - } - - #[test] - fn test_empty_use() { - check_assist_not_applicable( - merge_imports, - r" -use std::<|> -fn main() {}", - ); - } -} diff --git a/crates/ra_assists/src/handlers/merge_match_arms.rs b/crates/ra_assists/src/handlers/merge_match_arms.rs deleted file mode 100644 index 5632922823..0000000000 --- a/crates/ra_assists/src/handlers/merge_match_arms.rs +++ /dev/null @@ -1,248 +0,0 @@ -use std::iter::successors; - -use ra_syntax::{ - algo::neighbor, - ast::{self, AstNode}, - Direction, -}; - -use crate::{AssistContext, AssistId, AssistKind, Assists, TextRange}; - -// Assist: merge_match_arms -// -// Merges identical match arms. -// -// ``` -// enum Action { Move { distance: u32 }, Stop } -// -// fn handle(action: Action) { -// match action { -// <|>Action::Move(..) => foo(), -// Action::Stop => foo(), -// } -// } -// ``` -// -> -// ``` -// enum Action { Move { distance: u32 }, Stop } -// -// fn handle(action: Action) { -// match action { -// Action::Move(..) | Action::Stop => foo(), -// } -// } -// ``` -pub(crate) fn merge_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let current_arm = ctx.find_node_at_offset::()?; - // Don't try to handle arms with guards for now - can add support for this later - if current_arm.guard().is_some() { - return None; - } - let current_expr = current_arm.expr()?; - let current_text_range = current_arm.syntax().text_range(); - - // We check if the following match arms match this one. We could, but don't, - // compare to the previous match arm as well. - let arms_to_merge = successors(Some(current_arm), |it| neighbor(it, Direction::Next)) - .take_while(|arm| { - if arm.guard().is_some() { - return false; - } - match arm.expr() { - Some(expr) => expr.syntax().text() == current_expr.syntax().text(), - None => false, - } - }) - .collect::>(); - - if arms_to_merge.len() <= 1 { - return None; - } - - acc.add( - AssistId("merge_match_arms", AssistKind::RefactorRewrite), - "Merge match arms", - current_text_range, - |edit| { - let pats = if arms_to_merge.iter().any(contains_placeholder) { - "_".into() - } else { - arms_to_merge - .iter() - .filter_map(ast::MatchArm::pat) - .map(|x| x.syntax().to_string()) - .collect::>() - .join(" | ") - }; - - let arm = format!("{} => {}", pats, current_expr.syntax().text()); - - let start = arms_to_merge.first().unwrap().syntax().text_range().start(); - let end = arms_to_merge.last().unwrap().syntax().text_range().end(); - - edit.replace(TextRange::new(start, end), arm); - }, - ) -} - -fn contains_placeholder(a: &ast::MatchArm) -> bool { - matches!(a.pat(), Some(ast::Pat::WildcardPat(..))) -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - #[test] - fn merge_match_arms_single_patterns() { - check_assist( - merge_match_arms, - r#" - #[derive(Debug)] - enum X { A, B, C } - - fn main() { - let x = X::A; - let y = match x { - X::A => { 1i32<|> } - X::B => { 1i32 } - X::C => { 2i32 } - } - } - "#, - r#" - #[derive(Debug)] - enum X { A, B, C } - - fn main() { - let x = X::A; - let y = match x { - X::A | X::B => { 1i32 } - X::C => { 2i32 } - } - } - "#, - ); - } - - #[test] - fn merge_match_arms_multiple_patterns() { - check_assist( - merge_match_arms, - r#" - #[derive(Debug)] - enum X { A, B, C, D, E } - - fn main() { - let x = X::A; - let y = match x { - X::A | X::B => {<|> 1i32 }, - X::C | X::D => { 1i32 }, - X::E => { 2i32 }, - } - } - "#, - r#" - #[derive(Debug)] - enum X { A, B, C, D, E } - - fn main() { - let x = X::A; - let y = match x { - X::A | X::B | X::C | X::D => { 1i32 }, - X::E => { 2i32 }, - } - } - "#, - ); - } - - #[test] - fn merge_match_arms_placeholder_pattern() { - check_assist( - merge_match_arms, - r#" - #[derive(Debug)] - enum X { A, B, C, D, E } - - fn main() { - let x = X::A; - let y = match x { - X::A => { 1i32 }, - X::B => { 2i<|>32 }, - _ => { 2i32 } - } - } - "#, - r#" - #[derive(Debug)] - enum X { A, B, C, D, E } - - fn main() { - let x = X::A; - let y = match x { - X::A => { 1i32 }, - _ => { 2i32 } - } - } - "#, - ); - } - - #[test] - fn merges_all_subsequent_arms() { - check_assist( - merge_match_arms, - r#" - enum X { A, B, C, D, E } - - fn main() { - match X::A { - X::A<|> => 92, - X::B => 92, - X::C => 92, - X::D => 62, - _ => panic!(), - } - } - "#, - r#" - enum X { A, B, C, D, E } - - fn main() { - match X::A { - X::A | X::B | X::C => 92, - X::D => 62, - _ => panic!(), - } - } - "#, - ) - } - - #[test] - fn merge_match_arms_rejects_guards() { - check_assist_not_applicable( - merge_match_arms, - r#" - #[derive(Debug)] - enum X { - A(i32), - B, - C - } - - fn main() { - let x = X::A; - let y = match x { - X::A(a) if a > 5 => { <|>1i32 }, - X::B => { 1i32 }, - X::C => { 2i32 } - } - } - "#, - ); - } -} diff --git a/crates/ra_assists/src/handlers/move_bounds.rs b/crates/ra_assists/src/handlers/move_bounds.rs deleted file mode 100644 index 6d394443eb..0000000000 --- a/crates/ra_assists/src/handlers/move_bounds.rs +++ /dev/null @@ -1,152 +0,0 @@ -use ra_syntax::{ - ast::{self, edit::AstNodeEdit, make, AstNode, NameOwner, TypeBoundsOwner}, - match_ast, - SyntaxKind::*, - T, -}; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: move_bounds_to_where_clause -// -// Moves inline type bounds to a where clause. -// -// ``` -// fn applyF: FnOnce(T) -> U>(f: F, x: T) -> U { -// f(x) -// } -// ``` -// -> -// ``` -// fn apply(f: F, x: T) -> U where F: FnOnce(T) -> U { -// f(x) -// } -// ``` -pub(crate) fn move_bounds_to_where_clause(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let type_param_list = ctx.find_node_at_offset::()?; - - let mut type_params = type_param_list.type_params(); - if type_params.all(|p| p.type_bound_list().is_none()) { - return None; - } - - let parent = type_param_list.syntax().parent()?; - if parent.children_with_tokens().any(|it| it.kind() == WHERE_CLAUSE) { - return None; - } - - let anchor = match_ast! { - match parent { - ast::Fn(it) => it.body()?.syntax().clone().into(), - ast::Trait(it) => it.assoc_item_list()?.syntax().clone().into(), - ast::Impl(it) => it.assoc_item_list()?.syntax().clone().into(), - ast::Enum(it) => it.variant_list()?.syntax().clone().into(), - ast::Struct(it) => { - it.syntax().children_with_tokens() - .find(|it| it.kind() == RECORD_FIELD_LIST || it.kind() == T![;])? - }, - _ => return None - } - }; - - let target = type_param_list.syntax().text_range(); - acc.add( - AssistId("move_bounds_to_where_clause", AssistKind::RefactorRewrite), - "Move to where clause", - target, - |edit| { - let new_params = type_param_list - .type_params() - .filter(|it| it.type_bound_list().is_some()) - .map(|type_param| { - let without_bounds = type_param.remove_bounds(); - (type_param, without_bounds) - }); - - let new_type_param_list = type_param_list.replace_descendants(new_params); - edit.replace_ast(type_param_list.clone(), new_type_param_list); - - let where_clause = { - let predicates = type_param_list.type_params().filter_map(build_predicate); - make::where_clause(predicates) - }; - - let to_insert = match anchor.prev_sibling_or_token() { - Some(ref elem) if elem.kind() == WHITESPACE => { - format!("{} ", where_clause.syntax()) - } - _ => format!(" {}", where_clause.syntax()), - }; - edit.insert(anchor.text_range().start(), to_insert); - }, - ) -} - -fn build_predicate(param: ast::TypeParam) -> Option { - let path = { - let name_ref = make::name_ref(¶m.name()?.syntax().to_string()); - let segment = make::path_segment(name_ref); - make::path_unqualified(segment) - }; - let predicate = make::where_pred(path, param.type_bound_list()?.bounds()); - Some(predicate) -} - -#[cfg(test)] -mod tests { - use super::*; - - use crate::tests::check_assist; - - #[test] - fn move_bounds_to_where_clause_fn() { - check_assist( - move_bounds_to_where_clause, - r#" - fn fooF: FnOnce(T) -> T>() {} - "#, - r#" - fn foo() where T: u32, F: FnOnce(T) -> T {} - "#, - ); - } - - #[test] - fn move_bounds_to_where_clause_impl() { - check_assist( - move_bounds_to_where_clause, - r#" - implT> A {} - "#, - r#" - impl A where U: u32 {} - "#, - ); - } - - #[test] - fn move_bounds_to_where_clause_struct() { - check_assist( - move_bounds_to_where_clause, - r#" - struct A<<|>T: Iterator> {} - "#, - r#" - struct A where T: Iterator {} - "#, - ); - } - - #[test] - fn move_bounds_to_where_clause_tuple_struct() { - check_assist( - move_bounds_to_where_clause, - r#" - struct Pair<<|>T: u32>(T, T); - "#, - r#" - struct Pair(T, T) where T: u32; - "#, - ); - } -} diff --git a/crates/ra_assists/src/handlers/move_guard.rs b/crates/ra_assists/src/handlers/move_guard.rs deleted file mode 100644 index 4060d34c64..0000000000 --- a/crates/ra_assists/src/handlers/move_guard.rs +++ /dev/null @@ -1,303 +0,0 @@ -use ra_syntax::{ - ast::{AstNode, IfExpr, MatchArm}, - SyntaxKind::WHITESPACE, -}; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: move_guard_to_arm_body -// -// Moves match guard into match arm body. -// -// ``` -// enum Action { Move { distance: u32 }, Stop } -// -// fn handle(action: Action) { -// match action { -// Action::Move { distance } <|>if distance > 10 => foo(), -// _ => (), -// } -// } -// ``` -// -> -// ``` -// enum Action { Move { distance: u32 }, Stop } -// -// fn handle(action: Action) { -// match action { -// Action::Move { distance } => if distance > 10 { foo() }, -// _ => (), -// } -// } -// ``` -pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let match_arm = ctx.find_node_at_offset::()?; - let guard = match_arm.guard()?; - let space_before_guard = guard.syntax().prev_sibling_or_token(); - - let guard_conditions = guard.expr()?; - let arm_expr = match_arm.expr()?; - let buf = format!("if {} {{ {} }}", guard_conditions.syntax().text(), arm_expr.syntax().text()); - - let target = guard.syntax().text_range(); - acc.add( - AssistId("move_guard_to_arm_body", AssistKind::RefactorRewrite), - "Move guard to arm body", - target, - |edit| { - match space_before_guard { - Some(element) if element.kind() == WHITESPACE => { - edit.delete(element.text_range()); - } - _ => (), - }; - - edit.delete(guard.syntax().text_range()); - edit.replace_node_and_indent(arm_expr.syntax(), buf); - }, - ) -} - -// Assist: move_arm_cond_to_match_guard -// -// Moves if expression from match arm body into a guard. -// -// ``` -// enum Action { Move { distance: u32 }, Stop } -// -// fn handle(action: Action) { -// match action { -// Action::Move { distance } => <|>if distance > 10 { foo() }, -// _ => (), -// } -// } -// ``` -// -> -// ``` -// enum Action { Move { distance: u32 }, Stop } -// -// fn handle(action: Action) { -// match action { -// Action::Move { distance } if distance > 10 => foo(), -// _ => (), -// } -// } -// ``` -pub(crate) fn move_arm_cond_to_match_guard(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let match_arm: MatchArm = ctx.find_node_at_offset::()?; - let match_pat = match_arm.pat()?; - - let arm_body = match_arm.expr()?; - let if_expr: IfExpr = IfExpr::cast(arm_body.syntax().clone())?; - let cond = if_expr.condition()?; - let then_block = if_expr.then_branch()?; - - // Not support if with else branch - if if_expr.else_branch().is_some() { - return None; - } - // Not support moving if let to arm guard - if cond.pat().is_some() { - return None; - } - - let buf = format!(" if {}", cond.syntax().text()); - - let target = if_expr.syntax().text_range(); - acc.add( - AssistId("move_arm_cond_to_match_guard", AssistKind::RefactorRewrite), - "Move condition to match guard", - target, - |edit| { - let then_only_expr = then_block.statements().next().is_none(); - - match &then_block.expr() { - Some(then_expr) if then_only_expr => { - edit.replace(if_expr.syntax().text_range(), then_expr.syntax().text()) - } - _ => edit.replace(if_expr.syntax().text_range(), then_block.syntax().text()), - } - - edit.insert(match_pat.syntax().text_range().end(), buf); - }, - ) -} - -#[cfg(test)] -mod tests { - use super::*; - - use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; - - #[test] - fn move_guard_to_arm_body_target() { - check_assist_target( - move_guard_to_arm_body, - r#" - fn f() { - let t = 'a'; - let chars = "abcd"; - match t { - '\r' <|>if chars.clone().next() == Some('\n') => false, - _ => true - } - } - "#, - r#"if chars.clone().next() == Some('\n')"#, - ); - } - - #[test] - fn move_guard_to_arm_body_works() { - check_assist( - move_guard_to_arm_body, - r#" - fn f() { - let t = 'a'; - let chars = "abcd"; - match t { - '\r' <|>if chars.clone().next() == Some('\n') => false, - _ => true - } - } - "#, - r#" - fn f() { - let t = 'a'; - let chars = "abcd"; - match t { - '\r' => if chars.clone().next() == Some('\n') { false }, - _ => true - } - } - "#, - ); - } - - #[test] - fn move_guard_to_arm_body_works_complex_match() { - check_assist( - move_guard_to_arm_body, - r#" - fn f() { - match x { - <|>y @ 4 | y @ 5 if y > 5 => true, - _ => false - } - } - "#, - r#" - fn f() { - match x { - y @ 4 | y @ 5 => if y > 5 { true }, - _ => false - } - } - "#, - ); - } - - #[test] - fn move_arm_cond_to_match_guard_works() { - check_assist( - move_arm_cond_to_match_guard, - r#" - fn f() { - let t = 'a'; - let chars = "abcd"; - match t { - '\r' => if chars.clone().next() == Some('\n') { <|>false }, - _ => true - } - } - "#, - r#" - fn f() { - let t = 'a'; - let chars = "abcd"; - match t { - '\r' if chars.clone().next() == Some('\n') => false, - _ => true - } - } - "#, - ); - } - - #[test] - fn move_arm_cond_to_match_guard_if_let_not_works() { - check_assist_not_applicable( - move_arm_cond_to_match_guard, - r#" - fn f() { - let t = 'a'; - let chars = "abcd"; - match t { - '\r' => if let Some(_) = chars.clone().next() { <|>false }, - _ => true - } - } - "#, - ); - } - - #[test] - fn move_arm_cond_to_match_guard_if_empty_body_works() { - check_assist( - move_arm_cond_to_match_guard, - r#" - fn f() { - let t = 'a'; - let chars = "abcd"; - match t { - '\r' => if chars.clone().next().is_some() { <|> }, - _ => true - } - } - "#, - r#" - fn f() { - let t = 'a'; - let chars = "abcd"; - match t { - '\r' if chars.clone().next().is_some() => { }, - _ => true - } - } - "#, - ); - } - - #[test] - fn move_arm_cond_to_match_guard_if_multiline_body_works() { - check_assist( - move_arm_cond_to_match_guard, - r#" - fn f() { - let mut t = 'a'; - let chars = "abcd"; - match t { - '\r' => if chars.clone().next().is_some() { - t = 'e';<|> - false - }, - _ => true - } - } - "#, - r#" - fn f() { - let mut t = 'a'; - let chars = "abcd"; - match t { - '\r' if chars.clone().next().is_some() => { - t = 'e'; - false - }, - _ => true - } - } - "#, - ); - } -} diff --git a/crates/ra_assists/src/handlers/raw_string.rs b/crates/ra_assists/src/handlers/raw_string.rs deleted file mode 100644 index 4c797178f1..0000000000 --- a/crates/ra_assists/src/handlers/raw_string.rs +++ /dev/null @@ -1,504 +0,0 @@ -use std::borrow::Cow; - -use ra_syntax::{ - ast::{self, HasQuotes, HasStringValue}, - AstToken, - SyntaxKind::{RAW_STRING, STRING}, - TextRange, TextSize, -}; -use test_utils::mark; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: make_raw_string -// -// Adds `r#` to a plain string literal. -// -// ``` -// fn main() { -// "Hello,<|> World!"; -// } -// ``` -// -> -// ``` -// fn main() { -// r#"Hello, World!"#; -// } -// ``` -pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?; - let value = token.value()?; - let target = token.syntax().text_range(); - acc.add( - AssistId("make_raw_string", AssistKind::RefactorRewrite), - "Rewrite as raw string", - target, - |edit| { - let hashes = "#".repeat(required_hashes(&value).max(1)); - if matches!(value, Cow::Borrowed(_)) { - // Avoid replacing the whole string to better position the cursor. - edit.insert(token.syntax().text_range().start(), format!("r{}", hashes)); - edit.insert(token.syntax().text_range().end(), format!("{}", hashes)); - } else { - edit.replace( - token.syntax().text_range(), - format!("r{}\"{}\"{}", hashes, value, hashes), - ); - } - }, - ) -} - -// Assist: make_usual_string -// -// Turns a raw string into a plain string. -// -// ``` -// fn main() { -// r#"Hello,<|> "World!""#; -// } -// ``` -// -> -// ``` -// fn main() { -// "Hello, \"World!\""; -// } -// ``` -pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?; - let value = token.value()?; - let target = token.syntax().text_range(); - acc.add( - AssistId("make_usual_string", AssistKind::RefactorRewrite), - "Rewrite as regular string", - target, - |edit| { - // parse inside string to escape `"` - let escaped = value.escape_default().to_string(); - if let Some(offsets) = token.quote_offsets() { - if token.text()[offsets.contents - token.syntax().text_range().start()] == escaped { - edit.replace(offsets.quotes.0, "\""); - edit.replace(offsets.quotes.1, "\""); - return; - } - } - - edit.replace(token.syntax().text_range(), format!("\"{}\"", escaped)); - }, - ) -} - -// Assist: add_hash -// -// Adds a hash to a raw string literal. -// -// ``` -// fn main() { -// r#"Hello,<|> World!"#; -// } -// ``` -// -> -// ``` -// fn main() { -// r##"Hello, World!"##; -// } -// ``` -pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let token = ctx.find_token_at_offset(RAW_STRING)?; - let target = token.text_range(); - acc.add(AssistId("add_hash", AssistKind::Refactor), "Add #", target, |edit| { - edit.insert(token.text_range().start() + TextSize::of('r'), "#"); - edit.insert(token.text_range().end(), "#"); - }) -} - -// Assist: remove_hash -// -// Removes a hash from a raw string literal. -// -// ``` -// fn main() { -// r#"Hello,<|> World!"#; -// } -// ``` -// -> -// ``` -// fn main() { -// r"Hello, World!"; -// } -// ``` -pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?; - - let text = token.text().as_str(); - if !text.starts_with("r#") && text.ends_with('#') { - return None; - } - - let existing_hashes = text.chars().skip(1).take_while(|&it| it == '#').count(); - - let text_range = token.syntax().text_range(); - let internal_text = &text[token.text_range_between_quotes()? - text_range.start()]; - - if existing_hashes == required_hashes(internal_text) { - mark::hit!(cant_remove_required_hash); - return None; - } - - acc.add(AssistId("remove_hash", AssistKind::RefactorRewrite), "Remove #", text_range, |edit| { - edit.delete(TextRange::at(text_range.start() + TextSize::of('r'), TextSize::of('#'))); - edit.delete(TextRange::new(text_range.end() - TextSize::of('#'), text_range.end())); - }) -} - -fn required_hashes(s: &str) -> usize { - let mut res = 0usize; - for idx in s.match_indices('"').map(|(i, _)| i) { - let (_, sub) = s.split_at(idx + 1); - let n_hashes = sub.chars().take_while(|c| *c == '#').count(); - res = res.max(n_hashes + 1) - } - res -} - -#[test] -fn test_required_hashes() { - assert_eq!(0, required_hashes("abc")); - assert_eq!(0, required_hashes("###")); - assert_eq!(1, required_hashes("\"")); - assert_eq!(2, required_hashes("\"#abc")); - assert_eq!(0, required_hashes("#abc")); - assert_eq!(3, required_hashes("#ab\"##c")); - assert_eq!(5, required_hashes("#ab\"##\"####c")); -} - -#[cfg(test)] -mod tests { - use test_utils::mark; - - use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; - - use super::*; - - #[test] - fn make_raw_string_target() { - check_assist_target( - make_raw_string, - r#" - fn f() { - let s = <|>"random\nstring"; - } - "#, - r#""random\nstring""#, - ); - } - - #[test] - fn make_raw_string_works() { - check_assist( - make_raw_string, - r#" -fn f() { - let s = <|>"random\nstring"; -} -"#, - r##" -fn f() { - let s = r#"random -string"#; -} -"##, - ) - } - - #[test] - fn make_raw_string_works_inside_macros() { - check_assist( - make_raw_string, - r#" - fn f() { - format!(<|>"x = {}", 92) - } - "#, - r##" - fn f() { - format!(r#"x = {}"#, 92) - } - "##, - ) - } - - #[test] - fn make_raw_string_hashes_inside_works() { - check_assist( - make_raw_string, - r###" -fn f() { - let s = <|>"#random##\nstring"; -} -"###, - r####" -fn f() { - let s = r#"#random## -string"#; -} -"####, - ) - } - - #[test] - fn make_raw_string_closing_hashes_inside_works() { - check_assist( - make_raw_string, - r###" -fn f() { - let s = <|>"#random\"##\nstring"; -} -"###, - r####" -fn f() { - let s = r###"#random"## -string"###; -} -"####, - ) - } - - #[test] - fn make_raw_string_nothing_to_unescape_works() { - check_assist( - make_raw_string, - r#" - fn f() { - let s = <|>"random string"; - } - "#, - r##" - fn f() { - let s = r#"random string"#; - } - "##, - ) - } - - #[test] - fn make_raw_string_not_works_on_partial_string() { - check_assist_not_applicable( - make_raw_string, - r#" - fn f() { - let s = "foo<|> - } - "#, - ) - } - - #[test] - fn make_usual_string_not_works_on_partial_string() { - check_assist_not_applicable( - make_usual_string, - r#" - fn main() { - let s = r#"bar<|> - } - "#, - ) - } - - #[test] - fn add_hash_target() { - check_assist_target( - add_hash, - r#" - fn f() { - let s = <|>r"random string"; - } - "#, - r#"r"random string""#, - ); - } - - #[test] - fn add_hash_works() { - check_assist( - add_hash, - r#" - fn f() { - let s = <|>r"random string"; - } - "#, - r##" - fn f() { - let s = r#"random string"#; - } - "##, - ) - } - - #[test] - fn add_more_hash_works() { - check_assist( - add_hash, - r##" - fn f() { - let s = <|>r#"random"string"#; - } - "##, - r###" - fn f() { - let s = r##"random"string"##; - } - "###, - ) - } - - #[test] - fn add_hash_not_works() { - check_assist_not_applicable( - add_hash, - r#" - fn f() { - let s = <|>"random string"; - } - "#, - ); - } - - #[test] - fn remove_hash_target() { - check_assist_target( - remove_hash, - r##" - fn f() { - let s = <|>r#"random string"#; - } - "##, - r##"r#"random string"#"##, - ); - } - - #[test] - fn remove_hash_works() { - check_assist( - remove_hash, - r##"fn f() { let s = <|>r#"random string"#; }"##, - r#"fn f() { let s = r"random string"; }"#, - ) - } - - #[test] - fn cant_remove_required_hash() { - mark::check!(cant_remove_required_hash); - check_assist_not_applicable( - remove_hash, - r##" - fn f() { - let s = <|>r#"random"str"ing"#; - } - "##, - ) - } - - #[test] - fn remove_more_hash_works() { - check_assist( - remove_hash, - r###" - fn f() { - let s = <|>r##"random string"##; - } - "###, - r##" - fn f() { - let s = r#"random string"#; - } - "##, - ) - } - - #[test] - fn remove_hash_doesnt_work() { - check_assist_not_applicable(remove_hash, r#"fn f() { let s = <|>"random string"; }"#); - } - - #[test] - fn remove_hash_no_hash_doesnt_work() { - check_assist_not_applicable(remove_hash, r#"fn f() { let s = <|>r"random string"; }"#); - } - - #[test] - fn make_usual_string_target() { - check_assist_target( - make_usual_string, - r##" - fn f() { - let s = <|>r#"random string"#; - } - "##, - r##"r#"random string"#"##, - ); - } - - #[test] - fn make_usual_string_works() { - check_assist( - make_usual_string, - r##" - fn f() { - let s = <|>r#"random string"#; - } - "##, - r#" - fn f() { - let s = "random string"; - } - "#, - ) - } - - #[test] - fn make_usual_string_with_quote_works() { - check_assist( - make_usual_string, - r##" - fn f() { - let s = <|>r#"random"str"ing"#; - } - "##, - r#" - fn f() { - let s = "random\"str\"ing"; - } - "#, - ) - } - - #[test] - fn make_usual_string_more_hash_works() { - check_assist( - make_usual_string, - r###" - fn f() { - let s = <|>r##"random string"##; - } - "###, - r##" - fn f() { - let s = "random string"; - } - "##, - ) - } - - #[test] - fn make_usual_string_not_works() { - check_assist_not_applicable( - make_usual_string, - r#" - fn f() { - let s = <|>"random string"; - } - "#, - ); - } -} diff --git a/crates/ra_assists/src/handlers/remove_dbg.rs b/crates/ra_assists/src/handlers/remove_dbg.rs deleted file mode 100644 index 9430ce1b5b..0000000000 --- a/crates/ra_assists/src/handlers/remove_dbg.rs +++ /dev/null @@ -1,205 +0,0 @@ -use ra_syntax::{ - ast::{self, AstNode}, - TextRange, TextSize, T, -}; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: remove_dbg -// -// Removes `dbg!()` macro call. -// -// ``` -// fn main() { -// <|>dbg!(92); -// } -// ``` -// -> -// ``` -// fn main() { -// 92; -// } -// ``` -pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let macro_call = ctx.find_node_at_offset::()?; - - if !is_valid_macrocall(¯o_call, "dbg")? { - return None; - } - - let is_leaf = macro_call.syntax().next_sibling().is_none(); - - let macro_end = if macro_call.semicolon_token().is_some() { - macro_call.syntax().text_range().end() - TextSize::of(';') - } else { - macro_call.syntax().text_range().end() - }; - - // macro_range determines what will be deleted and replaced with macro_content - let macro_range = TextRange::new(macro_call.syntax().text_range().start(), macro_end); - let paste_instead_of_dbg = { - let text = macro_call.token_tree()?.syntax().text(); - - // leafiness determines if we should include the parenthesis or not - let slice_index: TextRange = if is_leaf { - // leaf means - we can extract the contents of the dbg! in text - TextRange::new(TextSize::of('('), text.len() - TextSize::of(')')) - } else { - // not leaf - means we should keep the parens - TextRange::up_to(text.len()) - }; - text.slice(slice_index).to_string() - }; - - let target = macro_call.syntax().text_range(); - acc.add(AssistId("remove_dbg", AssistKind::Refactor), "Remove dbg!()", target, |builder| { - builder.replace(macro_range, paste_instead_of_dbg); - }) -} - -/// Verifies that the given macro_call actually matches the given name -/// and contains proper ending tokens -fn is_valid_macrocall(macro_call: &ast::MacroCall, macro_name: &str) -> Option { - let path = macro_call.path()?; - let name_ref = path.segment()?.name_ref()?; - - // Make sure it is actually a dbg-macro call, dbg followed by ! - let excl = path.syntax().next_sibling_or_token()?; - - if name_ref.text() != macro_name || excl.kind() != T![!] { - return None; - } - - let node = macro_call.token_tree()?.syntax().clone(); - let first_child = node.first_child_or_token()?; - let last_child = node.last_child_or_token()?; - - match (first_child.kind(), last_child.kind()) { - (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']) => Some(true), - _ => Some(false), - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; - - #[test] - fn test_remove_dbg() { - check_assist(remove_dbg, "<|>dbg!(1 + 1)", "1 + 1"); - - check_assist(remove_dbg, "dbg!<|>((1 + 1))", "(1 + 1)"); - - check_assist(remove_dbg, "dbg!(1 <|>+ 1)", "1 + 1"); - - check_assist(remove_dbg, "let _ = <|>dbg!(1 + 1)", "let _ = 1 + 1"); - - check_assist( - remove_dbg, - " -fn foo(n: usize) { - if let Some(_) = dbg!(n.<|>checked_sub(4)) { - // ... - } -} -", - " -fn foo(n: usize) { - if let Some(_) = n.checked_sub(4) { - // ... - } -} -", - ); - } - - #[test] - fn test_remove_dbg_with_brackets_and_braces() { - check_assist(remove_dbg, "dbg![<|>1 + 1]", "1 + 1"); - check_assist(remove_dbg, "dbg!{<|>1 + 1}", "1 + 1"); - } - - #[test] - fn test_remove_dbg_not_applicable() { - check_assist_not_applicable(remove_dbg, "<|>vec![1, 2, 3]"); - check_assist_not_applicable(remove_dbg, "<|>dbg(5, 6, 7)"); - check_assist_not_applicable(remove_dbg, "<|>dbg!(5, 6, 7"); - } - - #[test] - fn test_remove_dbg_target() { - check_assist_target( - remove_dbg, - " -fn foo(n: usize) { - if let Some(_) = dbg!(n.<|>checked_sub(4)) { - // ... - } -} -", - "dbg!(n.checked_sub(4))", - ); - } - - #[test] - fn test_remove_dbg_keep_semicolon() { - // https://github.com/rust-analyzer/rust-analyzer/issues/5129#issuecomment-651399779 - // not quite though - // adding a comment at the end of the line makes - // the ast::MacroCall to include the semicolon at the end - check_assist( - remove_dbg, - r#"let res = <|>dbg!(1 * 20); // needless comment"#, - r#"let res = 1 * 20; // needless comment"#, - ); - } - - #[test] - fn test_remove_dbg_keep_expression() { - check_assist( - remove_dbg, - r#"let res = <|>dbg!(a + b).foo();"#, - r#"let res = (a + b).foo();"#, - ); - } - - #[test] - fn test_remove_dbg_from_inside_fn() { - check_assist_target( - remove_dbg, - r#" -fn square(x: u32) -> u32 { - x * x -} - -fn main() { - let x = square(dbg<|>!(5 + 10)); - println!("{}", x); -}"#, - "dbg!(5 + 10)", - ); - - check_assist( - remove_dbg, - r#" -fn square(x: u32) -> u32 { - x * x -} - -fn main() { - let x = square(dbg<|>!(5 + 10)); - println!("{}", x); -}"#, - r#" -fn square(x: u32) -> u32 { - x * x -} - -fn main() { - let x = square(5 + 10); - println!("{}", x); -}"#, - ); - } -} diff --git a/crates/ra_assists/src/handlers/remove_mut.rs b/crates/ra_assists/src/handlers/remove_mut.rs deleted file mode 100644 index ef55c354ee..0000000000 --- a/crates/ra_assists/src/handlers/remove_mut.rs +++ /dev/null @@ -1,37 +0,0 @@ -use ra_syntax::{SyntaxKind, TextRange, T}; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: remove_mut -// -// Removes the `mut` keyword. -// -// ``` -// impl Walrus { -// fn feed(&mut<|> self, amount: u32) {} -// } -// ``` -// -> -// ``` -// impl Walrus { -// fn feed(&self, amount: u32) {} -// } -// ``` -pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let mut_token = ctx.find_token_at_offset(T![mut])?; - let delete_from = mut_token.text_range().start(); - let delete_to = match mut_token.next_token() { - Some(it) if it.kind() == SyntaxKind::WHITESPACE => it.text_range().end(), - _ => mut_token.text_range().end(), - }; - - let target = mut_token.text_range(); - acc.add( - AssistId("remove_mut", AssistKind::Refactor), - "Remove `mut` keyword", - target, - |builder| { - builder.delete(TextRange::new(delete_from, delete_to)); - }, - ) -} diff --git a/crates/ra_assists/src/handlers/reorder_fields.rs b/crates/ra_assists/src/handlers/reorder_fields.rs deleted file mode 100644 index c9b743a06b..0000000000 --- a/crates/ra_assists/src/handlers/reorder_fields.rs +++ /dev/null @@ -1,220 +0,0 @@ -use itertools::Itertools; -use rustc_hash::FxHashMap; - -use hir::{Adt, ModuleDef, PathResolution, Semantics, Struct}; -use ra_ide_db::RootDatabase; -use ra_syntax::{algo, ast, match_ast, AstNode, SyntaxKind, SyntaxKind::*, SyntaxNode}; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: reorder_fields -// -// Reorder the fields of record literals and record patterns in the same order as in -// the definition. -// -// ``` -// struct Foo {foo: i32, bar: i32}; -// const test: Foo = <|>Foo {bar: 0, foo: 1} -// ``` -// -> -// ``` -// struct Foo {foo: i32, bar: i32}; -// const test: Foo = Foo {foo: 1, bar: 0} -// ``` -// -pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - reorder::(acc, ctx).or_else(|| reorder::(acc, ctx)) -} - -fn reorder(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let record = ctx.find_node_at_offset::()?; - let path = record.syntax().children().find_map(ast::Path::cast)?; - - let ranks = compute_fields_ranks(&path, &ctx)?; - - let fields = get_fields(&record.syntax()); - let sorted_fields = sorted_by_rank(&fields, |node| { - *ranks.get(&get_field_name(node)).unwrap_or(&usize::max_value()) - }); - - if sorted_fields == fields { - return None; - } - - let target = record.syntax().text_range(); - acc.add( - AssistId("reorder_fields", AssistKind::RefactorRewrite), - "Reorder record fields", - target, - |edit| { - for (old, new) in fields.iter().zip(&sorted_fields) { - algo::diff(old, new).into_text_edit(edit.text_edit_builder()); - } - }, - ) -} - -fn get_fields_kind(node: &SyntaxNode) -> Vec { - match node.kind() { - RECORD_EXPR => vec![RECORD_EXPR_FIELD], - RECORD_PAT => vec![RECORD_PAT_FIELD, IDENT_PAT], - _ => vec![], - } -} - -fn get_field_name(node: &SyntaxNode) -> String { - let res = match_ast! { - match node { - ast::RecordExprField(field) => field.field_name().map(|it| it.to_string()), - ast::RecordPatField(field) => field.field_name().map(|it| it.to_string()), - _ => None, - } - }; - res.unwrap_or_default() -} - -fn get_fields(record: &SyntaxNode) -> Vec { - let kinds = get_fields_kind(record); - record.children().flat_map(|n| n.children()).filter(|n| kinds.contains(&n.kind())).collect() -} - -fn sorted_by_rank( - fields: &[SyntaxNode], - get_rank: impl Fn(&SyntaxNode) -> usize, -) -> Vec { - fields.iter().cloned().sorted_by_key(get_rank).collect() -} - -fn struct_definition(path: &ast::Path, sema: &Semantics) -> Option { - match sema.resolve_path(path) { - Some(PathResolution::Def(ModuleDef::Adt(Adt::Struct(s)))) => Some(s), - _ => None, - } -} - -fn compute_fields_ranks(path: &ast::Path, ctx: &AssistContext) -> Option> { - Some( - struct_definition(path, &ctx.sema)? - .fields(ctx.db()) - .iter() - .enumerate() - .map(|(idx, field)| (field.name(ctx.db()).to_string(), idx)) - .collect(), - ) -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - #[test] - fn not_applicable_if_sorted() { - check_assist_not_applicable( - reorder_fields, - r#" - struct Foo { - foo: i32, - bar: i32, - } - - const test: Foo = <|>Foo { foo: 0, bar: 0 }; - "#, - ) - } - - #[test] - fn trivial_empty_fields() { - check_assist_not_applicable( - reorder_fields, - r#" - struct Foo {}; - const test: Foo = <|>Foo {} - "#, - ) - } - - #[test] - fn reorder_struct_fields() { - check_assist( - reorder_fields, - r#" - struct Foo {foo: i32, bar: i32}; - const test: Foo = <|>Foo {bar: 0, foo: 1} - "#, - r#" - struct Foo {foo: i32, bar: i32}; - const test: Foo = Foo {foo: 1, bar: 0} - "#, - ) - } - - #[test] - fn reorder_struct_pattern() { - check_assist( - reorder_fields, - r#" - struct Foo { foo: i64, bar: i64, baz: i64 } - - fn f(f: Foo) -> { - match f { - <|>Foo { baz: 0, ref mut bar, .. } => (), - _ => () - } - } - "#, - r#" - struct Foo { foo: i64, bar: i64, baz: i64 } - - fn f(f: Foo) -> { - match f { - Foo { ref mut bar, baz: 0, .. } => (), - _ => () - } - } - "#, - ) - } - - #[test] - fn reorder_with_extra_field() { - check_assist( - reorder_fields, - r#" - struct Foo { - foo: String, - bar: String, - } - - impl Foo { - fn new() -> Foo { - let foo = String::new(); - <|>Foo { - bar: foo.clone(), - extra: "Extra field", - foo, - } - } - } - "#, - r#" - struct Foo { - foo: String, - bar: String, - } - - impl Foo { - fn new() -> Foo { - let foo = String::new(); - Foo { - foo, - bar: foo.clone(), - extra: "Extra field", - } - } - } - "#, - ) - } -} diff --git a/crates/ra_assists/src/handlers/replace_if_let_with_match.rs b/crates/ra_assists/src/handlers/replace_if_let_with_match.rs deleted file mode 100644 index ecafb74a1e..0000000000 --- a/crates/ra_assists/src/handlers/replace_if_let_with_match.rs +++ /dev/null @@ -1,255 +0,0 @@ -use ra_fmt::unwrap_trivial_block; -use ra_syntax::{ - ast::{ - self, - edit::{AstNodeEdit, IndentLevel}, - make, - }, - AstNode, -}; - -use crate::{utils::TryEnum, AssistContext, AssistId, AssistKind, Assists}; - -// Assist: replace_if_let_with_match -// -// Replaces `if let` with an else branch with a `match` expression. -// -// ``` -// enum Action { Move { distance: u32 }, Stop } -// -// fn handle(action: Action) { -// <|>if let Action::Move { distance } = action { -// foo(distance) -// } else { -// bar() -// } -// } -// ``` -// -> -// ``` -// enum Action { Move { distance: u32 }, Stop } -// -// fn handle(action: Action) { -// match action { -// Action::Move { distance } => foo(distance), -// _ => bar(), -// } -// } -// ``` -pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let if_expr: ast::IfExpr = ctx.find_node_at_offset()?; - let cond = if_expr.condition()?; - let pat = cond.pat()?; - let expr = cond.expr()?; - let then_block = if_expr.then_branch()?; - let else_block = match if_expr.else_branch()? { - ast::ElseBranch::Block(it) => it, - ast::ElseBranch::IfExpr(_) => return None, - }; - - let target = if_expr.syntax().text_range(); - acc.add( - AssistId("replace_if_let_with_match", AssistKind::RefactorRewrite), - "Replace with match", - target, - move |edit| { - let match_expr = { - let then_arm = { - let then_block = then_block.reset_indent().indent(IndentLevel(1)); - let then_expr = unwrap_trivial_block(then_block); - make::match_arm(vec![pat.clone()], then_expr) - }; - let else_arm = { - let pattern = ctx - .sema - .type_of_pat(&pat) - .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty)) - .map(|it| it.sad_pattern()) - .unwrap_or_else(|| make::wildcard_pat().into()); - let else_expr = unwrap_trivial_block(else_block); - make::match_arm(vec![pattern], else_expr) - }; - let match_expr = - make::expr_match(expr, make::match_arm_list(vec![then_arm, else_arm])); - match_expr.indent(IndentLevel::from_node(if_expr.syntax())) - }; - - edit.replace_ast::(if_expr.into(), match_expr); - }, - ) -} - -#[cfg(test)] -mod tests { - use super::*; - - use crate::tests::{check_assist, check_assist_target}; - - #[test] - fn test_replace_if_let_with_match_unwraps_simple_expressions() { - check_assist( - replace_if_let_with_match, - r#" -impl VariantData { - pub fn is_struct(&self) -> bool { - if <|>let VariantData::Struct(..) = *self { - true - } else { - false - } - } -} "#, - r#" -impl VariantData { - pub fn is_struct(&self) -> bool { - match *self { - VariantData::Struct(..) => true, - _ => false, - } - } -} "#, - ) - } - - #[test] - fn test_replace_if_let_with_match_doesnt_unwrap_multiline_expressions() { - check_assist( - replace_if_let_with_match, - r#" -fn foo() { - if <|>let VariantData::Struct(..) = a { - bar( - 123 - ) - } else { - false - } -} "#, - r#" -fn foo() { - match a { - VariantData::Struct(..) => { - bar( - 123 - ) - } - _ => false, - } -} "#, - ) - } - - #[test] - fn replace_if_let_with_match_target() { - check_assist_target( - replace_if_let_with_match, - r#" -impl VariantData { - pub fn is_struct(&self) -> bool { - if <|>let VariantData::Struct(..) = *self { - true - } else { - false - } - } -} "#, - "if let VariantData::Struct(..) = *self { - true - } else { - false - }", - ); - } - - #[test] - fn special_case_option() { - check_assist( - replace_if_let_with_match, - r#" -enum Option { Some(T), None } -use Option::*; - -fn foo(x: Option) { - <|>if let Some(x) = x { - println!("{}", x) - } else { - println!("none") - } -} - "#, - r#" -enum Option { Some(T), None } -use Option::*; - -fn foo(x: Option) { - match x { - Some(x) => println!("{}", x), - None => println!("none"), - } -} - "#, - ); - } - - #[test] - fn special_case_result() { - check_assist( - replace_if_let_with_match, - r#" -enum Result { Ok(T), Err(E) } -use Result::*; - -fn foo(x: Result) { - <|>if let Ok(x) = x { - println!("{}", x) - } else { - println!("none") - } -} - "#, - r#" -enum Result { Ok(T), Err(E) } -use Result::*; - -fn foo(x: Result) { - match x { - Ok(x) => println!("{}", x), - Err(_) => println!("none"), - } -} - "#, - ); - } - - #[test] - fn nested_indent() { - check_assist( - replace_if_let_with_match, - r#" -fn main() { - if true { - <|>if let Ok(rel_path) = path.strip_prefix(root_path) { - let rel_path = RelativePathBuf::from_path(rel_path).ok()?; - Some((*id, rel_path)) - } else { - None - } - } -} -"#, - r#" -fn main() { - if true { - match path.strip_prefix(root_path) { - Ok(rel_path) => { - let rel_path = RelativePathBuf::from_path(rel_path).ok()?; - Some((*id, rel_path)) - } - _ => None, - } - } -} -"#, - ) - } -} diff --git a/crates/ra_assists/src/handlers/replace_let_with_if_let.rs b/crates/ra_assists/src/handlers/replace_let_with_if_let.rs deleted file mode 100644 index e4d436decc..0000000000 --- a/crates/ra_assists/src/handlers/replace_let_with_if_let.rs +++ /dev/null @@ -1,100 +0,0 @@ -use std::iter::once; - -use ra_syntax::{ - ast::{ - self, - edit::{AstNodeEdit, IndentLevel}, - make, - }, - AstNode, T, -}; - -use crate::{utils::TryEnum, AssistContext, AssistId, AssistKind, Assists}; - -// Assist: replace_let_with_if_let -// -// Replaces `let` with an `if-let`. -// -// ``` -// # enum Option { Some(T), None } -// -// fn main(action: Action) { -// <|>let x = compute(); -// } -// -// fn compute() -> Option { None } -// ``` -// -> -// ``` -// # enum Option { Some(T), None } -// -// fn main(action: Action) { -// if let Some(x) = compute() { -// } -// } -// -// fn compute() -> Option { None } -// ``` -pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let let_kw = ctx.find_token_at_offset(T![let])?; - let let_stmt = let_kw.ancestors().find_map(ast::LetStmt::cast)?; - let init = let_stmt.initializer()?; - let original_pat = let_stmt.pat()?; - let ty = ctx.sema.type_of_expr(&init)?; - let happy_variant = TryEnum::from_ty(&ctx.sema, &ty).map(|it| it.happy_case()); - - let target = let_kw.text_range(); - acc.add( - AssistId("replace_let_with_if_let", AssistKind::RefactorRewrite), - "Replace with if-let", - target, - |edit| { - let with_placeholder: ast::Pat = match happy_variant { - None => make::wildcard_pat().into(), - Some(var_name) => make::tuple_struct_pat( - make::path_unqualified(make::path_segment(make::name_ref(var_name))), - once(make::wildcard_pat().into()), - ) - .into(), - }; - let block = - make::block_expr(None, None).indent(IndentLevel::from_node(let_stmt.syntax())); - let if_ = make::expr_if(make::condition(init, Some(with_placeholder)), block); - let stmt = make::expr_stmt(if_); - - let placeholder = stmt.syntax().descendants().find_map(ast::WildcardPat::cast).unwrap(); - let stmt = stmt.replace_descendant(placeholder.into(), original_pat); - - edit.replace_ast(ast::Stmt::from(let_stmt), ast::Stmt::from(stmt)); - }, - ) -} - -#[cfg(test)] -mod tests { - use crate::tests::check_assist; - - use super::*; - - #[test] - fn replace_let_unknown_enum() { - check_assist( - replace_let_with_if_let, - r" -enum E { X(T), Y(T) } - -fn main() { - <|>let x = E::X(92); -} - ", - r" -enum E { X(T), Y(T) } - -fn main() { - if let x = E::X(92) { - } -} - ", - ) - } -} diff --git a/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs b/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs deleted file mode 100644 index da0a860c59..0000000000 --- a/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs +++ /dev/null @@ -1,688 +0,0 @@ -use hir; -use ra_syntax::{algo::SyntaxRewriter, ast, match_ast, AstNode, SmolStr, SyntaxNode}; - -use crate::{ - utils::{find_insert_use_container, insert_use_statement}, - AssistContext, AssistId, AssistKind, Assists, -}; - -// Assist: replace_qualified_name_with_use -// -// Adds a use statement for a given fully-qualified name. -// -// ``` -// fn process(map: std::collections::<|>HashMap) {} -// ``` -// -> -// ``` -// use std::collections::HashMap; -// -// fn process(map: HashMap) {} -// ``` -pub(crate) fn replace_qualified_name_with_use( - acc: &mut Assists, - ctx: &AssistContext, -) -> Option<()> { - let path: ast::Path = ctx.find_node_at_offset()?; - // We don't want to mess with use statements - if path.syntax().ancestors().find_map(ast::Use::cast).is_some() { - return None; - } - - let hir_path = ctx.sema.lower_path(&path)?; - let segments = collect_hir_path_segments(&hir_path)?; - if segments.len() < 2 { - return None; - } - - let target = path.syntax().text_range(); - acc.add( - AssistId("replace_qualified_name_with_use", AssistKind::RefactorRewrite), - "Replace qualified path with use", - target, - |builder| { - let path_to_import = hir_path.mod_path().clone(); - let container = match find_insert_use_container(path.syntax(), ctx) { - Some(c) => c, - None => return, - }; - insert_use_statement(path.syntax(), &path_to_import, ctx, builder.text_edit_builder()); - - // Now that we've brought the name into scope, re-qualify all paths that could be - // affected (that is, all paths inside the node we added the `use` to). - let mut rewriter = SyntaxRewriter::default(); - let syntax = container.either(|l| l.syntax().clone(), |r| r.syntax().clone()); - shorten_paths(&mut rewriter, syntax, path); - builder.rewrite(rewriter); - }, - ) -} - -fn collect_hir_path_segments(path: &hir::Path) -> Option> { - let mut ps = Vec::::with_capacity(10); - match path.kind() { - hir::PathKind::Abs => ps.push("".into()), - hir::PathKind::Crate => ps.push("crate".into()), - hir::PathKind::Plain => {} - hir::PathKind::Super(0) => ps.push("self".into()), - hir::PathKind::Super(lvl) => { - let mut chain = "super".to_string(); - for _ in 0..*lvl { - chain += "::super"; - } - ps.push(chain.into()); - } - hir::PathKind::DollarCrate(_) => return None, - } - ps.extend(path.segments().iter().map(|it| it.name.to_string().into())); - Some(ps) -} - -/// Adds replacements to `re` that shorten `path` in all descendants of `node`. -fn shorten_paths(rewriter: &mut SyntaxRewriter<'static>, node: SyntaxNode, path: ast::Path) { - for child in node.children() { - match_ast! { - match child { - // Don't modify `use` items, as this can break the `use` item when injecting a new - // import into the use tree. - ast::Use(_it) => continue, - // Don't descend into submodules, they don't have the same `use` items in scope. - ast::Module(_it) => continue, - - ast::Path(p) => { - match maybe_replace_path(rewriter, p.clone(), path.clone()) { - Some(()) => {}, - None => shorten_paths(rewriter, p.syntax().clone(), path.clone()), - } - }, - _ => shorten_paths(rewriter, child, path.clone()), - } - } - } -} - -fn maybe_replace_path( - rewriter: &mut SyntaxRewriter<'static>, - path: ast::Path, - target: ast::Path, -) -> Option<()> { - if !path_eq(path.clone(), target) { - return None; - } - - // Shorten `path`, leaving only its last segment. - if let Some(parent) = path.qualifier() { - rewriter.delete(parent.syntax()); - } - if let Some(double_colon) = path.coloncolon_token() { - rewriter.delete(&double_colon); - } - - Some(()) -} - -fn path_eq(lhs: ast::Path, rhs: ast::Path) -> bool { - let mut lhs_curr = lhs; - let mut rhs_curr = rhs; - loop { - match (lhs_curr.segment(), rhs_curr.segment()) { - (Some(lhs), Some(rhs)) if lhs.syntax().text() == rhs.syntax().text() => (), - _ => return false, - } - - match (lhs_curr.qualifier(), rhs_curr.qualifier()) { - (Some(lhs), Some(rhs)) => { - lhs_curr = lhs; - rhs_curr = rhs; - } - (None, None) => return true, - _ => return false, - } - } -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - #[test] - fn test_replace_add_use_no_anchor() { - check_assist( - replace_qualified_name_with_use, - r" -std::fmt::Debug<|> - ", - r" -use std::fmt::Debug; - -Debug - ", - ); - } - #[test] - fn test_replace_add_use_no_anchor_with_item_below() { - check_assist( - replace_qualified_name_with_use, - r" -std::fmt::Debug<|> - -fn main() { -} - ", - r" -use std::fmt::Debug; - -Debug - -fn main() { -} - ", - ); - } - - #[test] - fn test_replace_add_use_no_anchor_with_item_above() { - check_assist( - replace_qualified_name_with_use, - r" -fn main() { -} - -std::fmt::Debug<|> - ", - r" -use std::fmt::Debug; - -fn main() { -} - -Debug - ", - ); - } - - #[test] - fn test_replace_add_use_no_anchor_2seg() { - check_assist( - replace_qualified_name_with_use, - r" -std::fmt<|>::Debug - ", - r" -use std::fmt; - -fmt::Debug - ", - ); - } - - #[test] - fn test_replace_add_use() { - check_assist( - replace_qualified_name_with_use, - r" -use stdx; - -impl std::fmt::Debug<|> for Foo { -} - ", - r" -use stdx; -use std::fmt::Debug; - -impl Debug for Foo { -} - ", - ); - } - - #[test] - fn test_replace_file_use_other_anchor() { - check_assist( - replace_qualified_name_with_use, - r" -impl std::fmt::Debug<|> for Foo { -} - ", - r" -use std::fmt::Debug; - -impl Debug for Foo { -} - ", - ); - } - - #[test] - fn test_replace_add_use_other_anchor_indent() { - check_assist( - replace_qualified_name_with_use, - r" - impl std::fmt::Debug<|> for Foo { - } - ", - r" - use std::fmt::Debug; - - impl Debug for Foo { - } - ", - ); - } - - #[test] - fn test_replace_split_different() { - check_assist( - replace_qualified_name_with_use, - r" -use std::fmt; - -impl std::io<|> for Foo { -} - ", - r" -use std::{io, fmt}; - -impl io for Foo { -} - ", - ); - } - - #[test] - fn test_replace_split_self_for_use() { - check_assist( - replace_qualified_name_with_use, - r" -use std::fmt; - -impl std::fmt::Debug<|> for Foo { -} - ", - r" -use std::fmt::{self, Debug, }; - -impl Debug for Foo { -} - ", - ); - } - - #[test] - fn test_replace_split_self_for_target() { - check_assist( - replace_qualified_name_with_use, - r" -use std::fmt::Debug; - -impl std::fmt<|> for Foo { -} - ", - r" -use std::fmt::{self, Debug}; - -impl fmt for Foo { -} - ", - ); - } - - #[test] - fn test_replace_add_to_nested_self_nested() { - check_assist( - replace_qualified_name_with_use, - r" -use std::fmt::{Debug, nested::{Display}}; - -impl std::fmt::nested<|> for Foo { -} -", - r" -use std::fmt::{Debug, nested::{Display, self}}; - -impl nested for Foo { -} -", - ); - } - - #[test] - fn test_replace_add_to_nested_self_already_included() { - check_assist( - replace_qualified_name_with_use, - r" -use std::fmt::{Debug, nested::{self, Display}}; - -impl std::fmt::nested<|> for Foo { -} -", - r" -use std::fmt::{Debug, nested::{self, Display}}; - -impl nested for Foo { -} -", - ); - } - - #[test] - fn test_replace_add_to_nested_nested() { - check_assist( - replace_qualified_name_with_use, - r" -use std::fmt::{Debug, nested::{Display}}; - -impl std::fmt::nested::Debug<|> for Foo { -} -", - r" -use std::fmt::{Debug, nested::{Display, Debug}}; - -impl Debug for Foo { -} -", - ); - } - - #[test] - fn test_replace_split_common_target_longer() { - check_assist( - replace_qualified_name_with_use, - r" -use std::fmt::Debug; - -impl std::fmt::nested::Display<|> for Foo { -} -", - r" -use std::fmt::{nested::Display, Debug}; - -impl Display for Foo { -} -", - ); - } - - #[test] - fn test_replace_split_common_use_longer() { - check_assist( - replace_qualified_name_with_use, - r" -use std::fmt::nested::Debug; - -impl std::fmt::Display<|> for Foo { -} -", - r" -use std::fmt::{Display, nested::Debug}; - -impl Display for Foo { -} -", - ); - } - - #[test] - fn test_replace_use_nested_import() { - check_assist( - replace_qualified_name_with_use, - r" -use crate::{ - ty::{Substs, Ty}, - AssocItem, -}; - -fn foo() { crate::ty::lower<|>::trait_env() } -", - r" -use crate::{ - ty::{Substs, Ty, lower}, - AssocItem, -}; - -fn foo() { lower::trait_env() } -", - ); - } - - #[test] - fn test_replace_alias() { - check_assist( - replace_qualified_name_with_use, - r" -use std::fmt as foo; - -impl foo::Debug<|> for Foo { -} -", - r" -use std::fmt as foo; - -impl Debug for Foo { -} -", - ); - } - - #[test] - fn test_replace_not_applicable_one_segment() { - check_assist_not_applicable( - replace_qualified_name_with_use, - r" -impl foo<|> for Foo { -} -", - ); - } - - #[test] - fn test_replace_not_applicable_in_use() { - check_assist_not_applicable( - replace_qualified_name_with_use, - r" -use std::fmt<|>; -", - ); - } - - #[test] - fn test_replace_add_use_no_anchor_in_mod_mod() { - check_assist( - replace_qualified_name_with_use, - r" -mod foo { - mod bar { - std::fmt::Debug<|> - } -} - ", - r" -mod foo { - mod bar { - use std::fmt::Debug; - - Debug - } -} - ", - ); - } - - #[test] - fn inserts_imports_after_inner_attributes() { - check_assist( - replace_qualified_name_with_use, - r" -#![allow(dead_code)] - -fn main() { - std::fmt::Debug<|> -} - ", - r" -#![allow(dead_code)] -use std::fmt::Debug; - -fn main() { - Debug -} - ", - ); - } - - #[test] - fn replaces_all_affected_paths() { - check_assist( - replace_qualified_name_with_use, - r" -fn main() { - std::fmt::Debug<|>; - let x: std::fmt::Debug = std::fmt::Debug; -} - ", - r" -use std::fmt::Debug; - -fn main() { - Debug; - let x: Debug = Debug; -} - ", - ); - } - - #[test] - fn replaces_all_affected_paths_mod() { - check_assist( - replace_qualified_name_with_use, - r" -mod m { - fn f() { - std::fmt::Debug<|>; - let x: std::fmt::Debug = std::fmt::Debug; - } - fn g() { - std::fmt::Debug; - } -} - -fn f() { - std::fmt::Debug; -} - ", - r" -mod m { - use std::fmt::Debug; - - fn f() { - Debug; - let x: Debug = Debug; - } - fn g() { - Debug; - } -} - -fn f() { - std::fmt::Debug; -} - ", - ); - } - - #[test] - fn does_not_replace_in_submodules() { - check_assist( - replace_qualified_name_with_use, - r" -fn main() { - std::fmt::Debug<|>; -} - -mod sub { - fn f() { - std::fmt::Debug; - } -} - ", - r" -use std::fmt::Debug; - -fn main() { - Debug; -} - -mod sub { - fn f() { - std::fmt::Debug; - } -} - ", - ); - } - - #[test] - fn does_not_replace_in_use() { - check_assist( - replace_qualified_name_with_use, - r" -use std::fmt::Display; - -fn main() { - std::fmt<|>; -} - ", - r" -use std::fmt::{self, Display}; - -fn main() { - fmt; -} - ", - ); - } - - #[test] - fn does_not_replace_pub_use() { - check_assist( - replace_qualified_name_with_use, - r" -pub use std::fmt; - -impl std::io<|> for Foo { -} - ", - r" -use std::io; - -pub use std::fmt; - -impl io for Foo { -} - ", - ); - } - - #[test] - fn does_not_replace_pub_crate_use() { - check_assist( - replace_qualified_name_with_use, - r" -pub(crate) use std::fmt; - -impl std::io<|> for Foo { -} - ", - r" -use std::io; - -pub(crate) use std::fmt; - -impl io for Foo { -} - ", - ); - } -} diff --git a/crates/ra_assists/src/handlers/replace_unwrap_with_match.rs b/crates/ra_assists/src/handlers/replace_unwrap_with_match.rs deleted file mode 100644 index d69f2c1b0e..0000000000 --- a/crates/ra_assists/src/handlers/replace_unwrap_with_match.rs +++ /dev/null @@ -1,187 +0,0 @@ -use std::iter; - -use ra_syntax::{ - ast::{ - self, - edit::{AstNodeEdit, IndentLevel}, - make, - }, - AstNode, -}; - -use crate::{ - utils::{render_snippet, Cursor, TryEnum}, - AssistContext, AssistId, AssistKind, Assists, -}; - -// Assist: replace_unwrap_with_match -// -// Replaces `unwrap` a `match` expression. Works for Result and Option. -// -// ``` -// enum Result { Ok(T), Err(E) } -// fn main() { -// let x: Result = Result::Ok(92); -// let y = x.<|>unwrap(); -// } -// ``` -// -> -// ``` -// enum Result { Ok(T), Err(E) } -// fn main() { -// let x: Result = Result::Ok(92); -// let y = match x { -// Ok(a) => a, -// $0_ => unreachable!(), -// }; -// } -// ``` -pub(crate) fn replace_unwrap_with_match(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let method_call: ast::MethodCallExpr = ctx.find_node_at_offset()?; - let name = method_call.name_ref()?; - if name.text() != "unwrap" { - return None; - } - let caller = method_call.expr()?; - let ty = ctx.sema.type_of_expr(&caller)?; - let happy_variant = TryEnum::from_ty(&ctx.sema, &ty)?.happy_case(); - let target = method_call.syntax().text_range(); - acc.add( - AssistId("replace_unwrap_with_match", AssistKind::RefactorRewrite), - "Replace unwrap with match", - target, - |builder| { - let ok_path = make::path_unqualified(make::path_segment(make::name_ref(happy_variant))); - let it = make::ident_pat(make::name("a")).into(); - let ok_tuple = make::tuple_struct_pat(ok_path, iter::once(it)).into(); - - let bind_path = make::path_unqualified(make::path_segment(make::name_ref("a"))); - let ok_arm = make::match_arm(iter::once(ok_tuple), make::expr_path(bind_path)); - - let unreachable_call = make::expr_unreachable(); - let err_arm = - make::match_arm(iter::once(make::wildcard_pat().into()), unreachable_call); - - let match_arm_list = make::match_arm_list(vec![ok_arm, err_arm]); - let match_expr = make::expr_match(caller.clone(), match_arm_list) - .indent(IndentLevel::from_node(method_call.syntax())); - - let range = method_call.syntax().text_range(); - match ctx.config.snippet_cap { - Some(cap) => { - let err_arm = match_expr - .syntax() - .descendants() - .filter_map(ast::MatchArm::cast) - .last() - .unwrap(); - let snippet = - render_snippet(cap, match_expr.syntax(), Cursor::Before(err_arm.syntax())); - builder.replace_snippet(cap, range, snippet) - } - None => builder.replace(range, match_expr.to_string()), - } - }, - ) -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_target}; - - use super::*; - - #[test] - fn test_replace_result_unwrap_with_match() { - check_assist( - replace_unwrap_with_match, - r" -enum Result { Ok(T), Err(E) } -fn i(a: T) -> T { a } -fn main() { - let x: Result = Result::Ok(92); - let y = i(x).<|>unwrap(); -} - ", - r" -enum Result { Ok(T), Err(E) } -fn i(a: T) -> T { a } -fn main() { - let x: Result = Result::Ok(92); - let y = match i(x) { - Ok(a) => a, - $0_ => unreachable!(), - }; -} - ", - ) - } - - #[test] - fn test_replace_option_unwrap_with_match() { - check_assist( - replace_unwrap_with_match, - r" -enum Option { Some(T), None } -fn i(a: T) -> T { a } -fn main() { - let x = Option::Some(92); - let y = i(x).<|>unwrap(); -} - ", - r" -enum Option { Some(T), None } -fn i(a: T) -> T { a } -fn main() { - let x = Option::Some(92); - let y = match i(x) { - Some(a) => a, - $0_ => unreachable!(), - }; -} - ", - ); - } - - #[test] - fn test_replace_result_unwrap_with_match_chaining() { - check_assist( - replace_unwrap_with_match, - r" -enum Result { Ok(T), Err(E) } -fn i(a: T) -> T { a } -fn main() { - let x: Result = Result::Ok(92); - let y = i(x).<|>unwrap().count_zeroes(); -} - ", - r" -enum Result { Ok(T), Err(E) } -fn i(a: T) -> T { a } -fn main() { - let x: Result = Result::Ok(92); - let y = match i(x) { - Ok(a) => a, - $0_ => unreachable!(), - }.count_zeroes(); -} - ", - ) - } - - #[test] - fn replace_unwrap_with_match_target() { - check_assist_target( - replace_unwrap_with_match, - r" -enum Option { Some(T), None } -fn i(a: T) -> T { a } -fn main() { - let x = Option::Some(92); - let y = i(x).<|>unwrap(); -} - ", - r"i(x).unwrap()", - ); - } -} diff --git a/crates/ra_assists/src/handlers/split_import.rs b/crates/ra_assists/src/handlers/split_import.rs deleted file mode 100644 index 4ca5c3ca14..0000000000 --- a/crates/ra_assists/src/handlers/split_import.rs +++ /dev/null @@ -1,79 +0,0 @@ -use std::iter::successors; - -use ra_syntax::{ast, AstNode, T}; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: split_import -// -// Wraps the tail of import into braces. -// -// ``` -// use std::<|>collections::HashMap; -// ``` -// -> -// ``` -// use std::{collections::HashMap}; -// ``` -pub(crate) fn split_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let colon_colon = ctx.find_token_at_offset(T![::])?; - let path = ast::Path::cast(colon_colon.parent())?.qualifier()?; - let top_path = successors(Some(path.clone()), |it| it.parent_path()).last()?; - - let use_tree = top_path.syntax().ancestors().find_map(ast::UseTree::cast)?; - - let new_tree = use_tree.split_prefix(&path); - if new_tree == use_tree { - return None; - } - - let target = colon_colon.text_range(); - acc.add(AssistId("split_import", AssistKind::RefactorRewrite), "Split import", target, |edit| { - edit.replace_ast(use_tree, new_tree); - }) -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; - - use super::*; - - #[test] - fn test_split_import() { - check_assist( - split_import, - "use crate::<|>db::RootDatabase;", - "use crate::{db::RootDatabase};", - ) - } - - #[test] - fn split_import_works_with_trees() { - check_assist( - split_import, - "use crate:<|>:db::{RootDatabase, FileSymbol}", - "use crate::{db::{RootDatabase, FileSymbol}}", - ) - } - - #[test] - fn split_import_target() { - check_assist_target(split_import, "use crate::<|>db::{RootDatabase, FileSymbol}", "::"); - } - - #[test] - fn issue4044() { - check_assist_not_applicable(split_import, "use crate::<|>:::self;") - } - - #[test] - fn test_empty_use() { - check_assist_not_applicable( - split_import, - r" -use std::<|> -fn main() {}", - ); - } -} diff --git a/crates/ra_assists/src/handlers/unwrap_block.rs b/crates/ra_assists/src/handlers/unwrap_block.rs deleted file mode 100644 index 8b38695a94..0000000000 --- a/crates/ra_assists/src/handlers/unwrap_block.rs +++ /dev/null @@ -1,518 +0,0 @@ -use ra_fmt::unwrap_trivial_block; -use ra_syntax::{ - ast::{ - self, - edit::{AstNodeEdit, IndentLevel}, - }, - AstNode, TextRange, T, -}; - -use crate::{AssistContext, AssistId, AssistKind, Assists}; - -// Assist: unwrap_block -// -// This assist removes if...else, for, while and loop control statements to just keep the body. -// -// ``` -// fn foo() { -// if true {<|> -// println!("foo"); -// } -// } -// ``` -// -> -// ``` -// fn foo() { -// println!("foo"); -// } -// ``` -pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { - let assist_id = AssistId("unwrap_block", AssistKind::RefactorRewrite); - let assist_label = "Unwrap block"; - - let l_curly_token = ctx.find_token_at_offset(T!['{'])?; - let mut block = ast::BlockExpr::cast(l_curly_token.parent())?; - let mut parent = block.syntax().parent()?; - if ast::MatchArm::can_cast(parent.kind()) { - parent = parent.ancestors().find(|it| ast::MatchExpr::can_cast(it.kind()))? - } - - let parent = ast::Expr::cast(parent)?; - - match parent.clone() { - ast::Expr::ForExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::LoopExpr(_) => (), - ast::Expr::MatchExpr(_) => block = block.dedent(IndentLevel(1)), - ast::Expr::IfExpr(if_expr) => { - let then_branch = if_expr.then_branch()?; - if then_branch == block { - if let Some(ancestor) = if_expr.syntax().parent().and_then(ast::IfExpr::cast) { - // For `else if` blocks - let ancestor_then_branch = ancestor.then_branch()?; - - let target = then_branch.syntax().text_range(); - return acc.add(assist_id, assist_label, target, |edit| { - let range_to_del_else_if = TextRange::new( - ancestor_then_branch.syntax().text_range().end(), - l_curly_token.text_range().start(), - ); - let range_to_del_rest = TextRange::new( - then_branch.syntax().text_range().end(), - if_expr.syntax().text_range().end(), - ); - - edit.delete(range_to_del_rest); - edit.delete(range_to_del_else_if); - edit.replace( - target, - update_expr_string(then_branch.to_string(), &[' ', '{']), - ); - }); - } - } else { - let target = block.syntax().text_range(); - return acc.add(assist_id, assist_label, target, |edit| { - let range_to_del = TextRange::new( - then_branch.syntax().text_range().end(), - l_curly_token.text_range().start(), - ); - - edit.delete(range_to_del); - edit.replace(target, update_expr_string(block.to_string(), &[' ', '{'])); - }); - } - } - _ => return None, - }; - - let unwrapped = unwrap_trivial_block(block); - let target = unwrapped.syntax().text_range(); - acc.add(assist_id, assist_label, target, |builder| { - builder.replace( - parent.syntax().text_range(), - update_expr_string(unwrapped.to_string(), &[' ', '{', '\n']), - ); - }) -} - -fn update_expr_string(expr_str: String, trim_start_pat: &[char]) -> String { - let expr_string = expr_str.trim_start_matches(trim_start_pat); - let mut expr_string_lines: Vec<&str> = expr_string.lines().collect(); - expr_string_lines.pop(); // Delete last line - - expr_string_lines - .into_iter() - .map(|line| line.replacen(" ", "", 1)) // Delete indentation - .collect::>() - .join("\n") -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - #[test] - fn simple_if() { - check_assist( - unwrap_block, - r#" - fn main() { - bar(); - if true {<|> - foo(); - - //comment - bar(); - } else { - println!("bar"); - } - } - "#, - r#" - fn main() { - bar(); - foo(); - - //comment - bar(); - } - "#, - ); - } - - #[test] - fn simple_if_else() { - check_assist( - unwrap_block, - r#" - fn main() { - bar(); - if true { - foo(); - - //comment - bar(); - } else {<|> - println!("bar"); - } - } - "#, - r#" - fn main() { - bar(); - if true { - foo(); - - //comment - bar(); - } - println!("bar"); - } - "#, - ); - } - - #[test] - fn simple_if_else_if() { - check_assist( - unwrap_block, - r#" - fn main() { - //bar(); - if true { - println!("true"); - - //comment - //bar(); - } else if false {<|> - println!("bar"); - } else { - println!("foo"); - } - } - "#, - r#" - fn main() { - //bar(); - if true { - println!("true"); - - //comment - //bar(); - } - println!("bar"); - } - "#, - ); - } - - #[test] - fn simple_if_else_if_nested() { - check_assist( - unwrap_block, - r#" - fn main() { - //bar(); - if true { - println!("true"); - - //comment - //bar(); - } else if false { - println!("bar"); - } else if true {<|> - println!("foo"); - } - } - "#, - r#" - fn main() { - //bar(); - if true { - println!("true"); - - //comment - //bar(); - } else if false { - println!("bar"); - } - println!("foo"); - } - "#, - ); - } - - #[test] - fn simple_if_else_if_nested_else() { - check_assist( - unwrap_block, - r#" - fn main() { - //bar(); - if true { - println!("true"); - - //comment - //bar(); - } else if false { - println!("bar"); - } else if true { - println!("foo"); - } else {<|> - println!("else"); - } - } - "#, - r#" - fn main() { - //bar(); - if true { - println!("true"); - - //comment - //bar(); - } else if false { - println!("bar"); - } else if true { - println!("foo"); - } - println!("else"); - } - "#, - ); - } - - #[test] - fn simple_if_else_if_nested_middle() { - check_assist( - unwrap_block, - r#" - fn main() { - //bar(); - if true { - println!("true"); - - //comment - //bar(); - } else if false { - println!("bar"); - } else if true {<|> - println!("foo"); - } else { - println!("else"); - } - } - "#, - r#" - fn main() { - //bar(); - if true { - println!("true"); - - //comment - //bar(); - } else if false { - println!("bar"); - } - println!("foo"); - } - "#, - ); - } - - #[test] - fn simple_if_bad_cursor_position() { - check_assist_not_applicable( - unwrap_block, - r#" - fn main() { - bar();<|> - if true { - foo(); - - //comment - bar(); - } else { - println!("bar"); - } - } - "#, - ); - } - - #[test] - fn simple_for() { - check_assist( - unwrap_block, - r#" - fn main() { - for i in 0..5 {<|> - if true { - foo(); - - //comment - bar(); - } else { - println!("bar"); - } - } - } - "#, - r#" - fn main() { - if true { - foo(); - - //comment - bar(); - } else { - println!("bar"); - } - } - "#, - ); - } - - #[test] - fn simple_if_in_for() { - check_assist( - unwrap_block, - r#" - fn main() { - for i in 0..5 { - if true {<|> - foo(); - - //comment - bar(); - } else { - println!("bar"); - } - } - } - "#, - r#" - fn main() { - for i in 0..5 { - foo(); - - //comment - bar(); - } - } - "#, - ); - } - - #[test] - fn simple_loop() { - check_assist( - unwrap_block, - r#" - fn main() { - loop {<|> - if true { - foo(); - - //comment - bar(); - } else { - println!("bar"); - } - } - } - "#, - r#" - fn main() { - if true { - foo(); - - //comment - bar(); - } else { - println!("bar"); - } - } - "#, - ); - } - - #[test] - fn simple_while() { - check_assist( - unwrap_block, - r#" - fn main() { - while true {<|> - if true { - foo(); - - //comment - bar(); - } else { - println!("bar"); - } - } - } - "#, - r#" - fn main() { - if true { - foo(); - - //comment - bar(); - } else { - println!("bar"); - } - } - "#, - ); - } - - #[test] - fn unwrap_match_arm() { - check_assist( - unwrap_block, - r#" -fn main() { - match rel_path { - Ok(rel_path) => {<|> - let rel_path = RelativePathBuf::from_path(rel_path).ok()?; - Some((*id, rel_path)) - } - Err(_) => None, - } -} -"#, - r#" -fn main() { - let rel_path = RelativePathBuf::from_path(rel_path).ok()?; - Some((*id, rel_path)) -} -"#, - ); - } - - #[test] - fn simple_if_in_while_bad_cursor_position() { - check_assist_not_applicable( - unwrap_block, - r#" - fn main() { - while true { - if true { - foo();<|> - - //comment - bar(); - } else { - println!("bar"); - } - } - } - "#, - ); - } -} diff --git a/crates/ra_assists/src/lib.rs b/crates/ra_assists/src/lib.rs deleted file mode 100644 index 507646cc80..0000000000 --- a/crates/ra_assists/src/lib.rs +++ /dev/null @@ -1,226 +0,0 @@ -//! `ra_assists` crate provides a bunch of code assists, also known as code -//! actions (in LSP) or intentions (in IntelliJ). -//! -//! An assist is a micro-refactoring, which is automatically activated in -//! certain context. For example, if the cursor is over `,`, a "swap `,`" assist -//! becomes available. - -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - -mod assist_config; -mod assist_context; -#[cfg(test)] -mod tests; -pub mod utils; -pub mod ast_transform; - -use hir::Semantics; -use ra_db::FileRange; -use ra_ide_db::{source_change::SourceChange, RootDatabase}; -use ra_syntax::TextRange; - -pub(crate) use crate::assist_context::{AssistContext, Assists}; - -pub use assist_config::AssistConfig; - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum AssistKind { - None, - QuickFix, - Generate, - Refactor, - RefactorExtract, - RefactorInline, - RefactorRewrite, -} - -impl AssistKind { - pub fn contains(self, other: AssistKind) -> bool { - if self == other { - return true; - } - - match self { - AssistKind::None | AssistKind::Generate => return true, - AssistKind::Refactor => match other { - AssistKind::RefactorExtract - | AssistKind::RefactorInline - | AssistKind::RefactorRewrite => return true, - _ => return false, - }, - _ => return false, - } - } -} - -/// Unique identifier of the assist, should not be shown to the user -/// directly. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub struct AssistId(pub &'static str, pub AssistKind); - -#[derive(Clone, Debug)] -pub struct GroupLabel(pub String); - -#[derive(Debug, Clone)] -pub struct Assist { - pub id: AssistId, - /// Short description of the assist, as shown in the UI. - pub label: String, - pub group: Option, - /// Target ranges are used to sort assists: the smaller the target range, - /// the more specific assist is, and so it should be sorted first. - pub target: TextRange, -} - -#[derive(Debug, Clone)] -pub struct ResolvedAssist { - pub assist: Assist, - pub source_change: SourceChange, -} - -impl Assist { - /// Return all the assists applicable at the given position. - /// - /// Assists are returned in the "unresolved" state, that is only labels are - /// returned, without actual edits. - pub fn unresolved(db: &RootDatabase, config: &AssistConfig, range: FileRange) -> Vec { - let sema = Semantics::new(db); - let ctx = AssistContext::new(sema, config, range); - let mut acc = Assists::new_unresolved(&ctx); - handlers::all().iter().for_each(|handler| { - handler(&mut acc, &ctx); - }); - acc.finish_unresolved() - } - - /// Return all the assists applicable at the given position. - /// - /// Assists are returned in the "resolved" state, that is with edit fully - /// computed. - pub fn resolved( - db: &RootDatabase, - config: &AssistConfig, - range: FileRange, - ) -> Vec { - let sema = Semantics::new(db); - let ctx = AssistContext::new(sema, config, range); - let mut acc = Assists::new_resolved(&ctx); - handlers::all().iter().for_each(|handler| { - handler(&mut acc, &ctx); - }); - acc.finish_resolved() - } - - pub(crate) fn new( - id: AssistId, - label: String, - group: Option, - target: TextRange, - ) -> Assist { - // FIXME: make fields private, so that this invariant can't be broken - assert!(label.starts_with(|c: char| c.is_uppercase())); - Assist { id, label, group, target } - } -} - -mod handlers { - use crate::{AssistContext, Assists}; - - pub(crate) type Handler = fn(&mut Assists, &AssistContext) -> Option<()>; - - mod add_custom_impl; - mod add_explicit_type; - mod add_missing_impl_members; - mod add_turbo_fish; - mod apply_demorgan; - mod auto_import; - mod change_return_type_to_result; - mod change_visibility; - mod early_return; - mod expand_glob_import; - mod extract_struct_from_enum_variant; - mod extract_variable; - mod fill_match_arms; - mod fix_visibility; - mod flip_binexpr; - mod flip_comma; - mod flip_trait_bound; - mod generate_derive; - mod generate_from_impl_for_enum; - mod generate_function; - mod generate_impl; - mod generate_new; - mod inline_local_variable; - mod introduce_named_lifetime; - mod invert_if; - mod merge_imports; - mod merge_match_arms; - mod move_bounds; - mod move_guard; - mod raw_string; - mod remove_dbg; - mod remove_mut; - mod reorder_fields; - mod replace_if_let_with_match; - mod replace_let_with_if_let; - mod replace_qualified_name_with_use; - mod replace_unwrap_with_match; - mod split_import; - mod unwrap_block; - - pub(crate) fn all() -> &'static [Handler] { - &[ - // These are alphabetic for the foolish consistency - add_custom_impl::add_custom_impl, - add_explicit_type::add_explicit_type, - add_turbo_fish::add_turbo_fish, - apply_demorgan::apply_demorgan, - auto_import::auto_import, - change_return_type_to_result::change_return_type_to_result, - change_visibility::change_visibility, - early_return::convert_to_guarded_return, - expand_glob_import::expand_glob_import, - extract_struct_from_enum_variant::extract_struct_from_enum_variant, - extract_variable::extract_variable, - fill_match_arms::fill_match_arms, - fix_visibility::fix_visibility, - flip_binexpr::flip_binexpr, - flip_comma::flip_comma, - flip_trait_bound::flip_trait_bound, - generate_derive::generate_derive, - generate_from_impl_for_enum::generate_from_impl_for_enum, - generate_function::generate_function, - generate_impl::generate_impl, - generate_new::generate_new, - inline_local_variable::inline_local_variable, - introduce_named_lifetime::introduce_named_lifetime, - invert_if::invert_if, - merge_imports::merge_imports, - merge_match_arms::merge_match_arms, - move_bounds::move_bounds_to_where_clause, - move_guard::move_arm_cond_to_match_guard, - move_guard::move_guard_to_arm_body, - raw_string::add_hash, - raw_string::make_raw_string, - raw_string::make_usual_string, - raw_string::remove_hash, - remove_dbg::remove_dbg, - remove_mut::remove_mut, - reorder_fields::reorder_fields, - replace_if_let_with_match::replace_if_let_with_match, - replace_let_with_if_let::replace_let_with_if_let, - replace_qualified_name_with_use::replace_qualified_name_with_use, - replace_unwrap_with_match::replace_unwrap_with_match, - split_import::split_import, - unwrap_block::unwrap_block, - // These are manually sorted for better priorities - add_missing_impl_members::add_missing_impl_members, - add_missing_impl_members::add_missing_default_members, - // Are you sure you want to add new assist here, and not to the - // sorted list above? - ] - } -} diff --git a/crates/ra_assists/src/tests.rs b/crates/ra_assists/src/tests.rs deleted file mode 100644 index 18fcb90498..0000000000 --- a/crates/ra_assists/src/tests.rs +++ /dev/null @@ -1,179 +0,0 @@ -mod generated; - -use hir::Semantics; -use ra_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt}; -use ra_ide_db::RootDatabase; -use ra_syntax::TextRange; -use test_utils::{assert_eq_text, extract_offset, extract_range}; - -use crate::{handlers::Handler, Assist, AssistConfig, AssistContext, AssistKind, Assists}; -use stdx::trim_indent; - -pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) { - RootDatabase::with_single_file(text) -} - -pub(crate) fn check_assist(assist: Handler, ra_fixture_before: &str, ra_fixture_after: &str) { - let ra_fixture_after = trim_indent(ra_fixture_after); - check(assist, ra_fixture_before, ExpectedResult::After(&ra_fixture_after)); -} - -// FIXME: instead of having a separate function here, maybe use -// `extract_ranges` and mark the target as ` ` in the -// fixuture? -pub(crate) fn check_assist_target(assist: Handler, ra_fixture: &str, target: &str) { - check(assist, ra_fixture, ExpectedResult::Target(target)); -} - -pub(crate) fn check_assist_not_applicable(assist: Handler, ra_fixture: &str) { - check(assist, ra_fixture, ExpectedResult::NotApplicable); -} - -fn check_doc_test(assist_id: &str, before: &str, after: &str) { - let after = trim_indent(after); - let (db, file_id, selection) = RootDatabase::with_range_or_offset(&before); - let before = db.file_text(file_id).to_string(); - let frange = FileRange { file_id, range: selection.into() }; - - let mut assist = Assist::resolved(&db, &AssistConfig::default(), frange) - .into_iter() - .find(|assist| assist.assist.id.0 == assist_id) - .unwrap_or_else(|| { - panic!( - "\n\nAssist is not applicable: {}\nAvailable assists: {}", - assist_id, - Assist::resolved(&db, &AssistConfig::default(), frange) - .into_iter() - .map(|assist| assist.assist.id.0) - .collect::>() - .join(", ") - ) - }); - - let actual = { - let change = assist.source_change.source_file_edits.pop().unwrap(); - let mut actual = before; - change.edit.apply(&mut actual); - actual - }; - assert_eq_text!(&after, &actual); -} - -enum ExpectedResult<'a> { - NotApplicable, - After(&'a str), - Target(&'a str), -} - -fn check(handler: Handler, before: &str, expected: ExpectedResult) { - let (db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before); - let text_without_caret = db.file_text(file_with_caret_id).to_string(); - - let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() }; - - let sema = Semantics::new(&db); - let config = AssistConfig::default(); - let ctx = AssistContext::new(sema, &config, frange); - let mut acc = Assists::new_resolved(&ctx); - handler(&mut acc, &ctx); - let mut res = acc.finish_resolved(); - let assist = res.pop(); - match (assist, expected) { - (Some(assist), ExpectedResult::After(after)) => { - let mut source_change = assist.source_change; - let change = source_change.source_file_edits.pop().unwrap(); - - let mut actual = db.file_text(change.file_id).as_ref().to_owned(); - change.edit.apply(&mut actual); - assert_eq_text!(after, &actual); - } - (Some(assist), ExpectedResult::Target(target)) => { - let range = assist.assist.target; - assert_eq_text!(&text_without_caret[range], target); - } - (Some(_), ExpectedResult::NotApplicable) => panic!("assist should not be applicable!"), - (None, ExpectedResult::After(_)) | (None, ExpectedResult::Target(_)) => { - panic!("code action is not applicable") - } - (None, ExpectedResult::NotApplicable) => (), - }; -} - -#[test] -fn assist_order_field_struct() { - let before = "struct Foo { <|>bar: u32 }"; - let (before_cursor_pos, before) = extract_offset(before); - let (db, file_id) = with_single_file(&before); - let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) }; - let assists = Assist::resolved(&db, &AssistConfig::default(), frange); - let mut assists = assists.iter(); - - assert_eq!( - assists.next().expect("expected assist").assist.label, - "Change visibility to pub(crate)" - ); - assert_eq!(assists.next().expect("expected assist").assist.label, "Add `#[derive]`"); -} - -#[test] -fn assist_order_if_expr() { - let before = " - pub fn test_some_range(a: int) -> bool { - if let 2..6 = <|>5<|> { - true - } else { - false - } - }"; - let (range, before) = extract_range(before); - let (db, file_id) = with_single_file(&before); - let frange = FileRange { file_id, range }; - let assists = Assist::resolved(&db, &AssistConfig::default(), frange); - let mut assists = assists.iter(); - - assert_eq!(assists.next().expect("expected assist").assist.label, "Extract into variable"); - assert_eq!(assists.next().expect("expected assist").assist.label, "Replace with match"); -} - -#[test] -fn assist_filter_works() { - let before = " - pub fn test_some_range(a: int) -> bool { - if let 2..6 = <|>5<|> { - true - } else { - false - } - }"; - let (range, before) = extract_range(before); - let (db, file_id) = with_single_file(&before); - let frange = FileRange { file_id, range }; - - { - let mut cfg = AssistConfig::default(); - cfg.allowed = Some(vec![AssistKind::Refactor]); - - let assists = Assist::resolved(&db, &cfg, frange); - let mut assists = assists.iter(); - - assert_eq!(assists.next().expect("expected assist").assist.label, "Extract into variable"); - assert_eq!(assists.next().expect("expected assist").assist.label, "Replace with match"); - } - - { - let mut cfg = AssistConfig::default(); - cfg.allowed = Some(vec![AssistKind::RefactorExtract]); - let assists = Assist::resolved(&db, &cfg, frange); - assert_eq!(assists.len(), 1); - - let mut assists = assists.iter(); - assert_eq!(assists.next().expect("expected assist").assist.label, "Extract into variable"); - } - - { - let mut cfg = AssistConfig::default(); - cfg.allowed = Some(vec![AssistKind::QuickFix]); - let assists = Assist::resolved(&db, &cfg, frange); - assert!(assists.is_empty(), "All asserts but quickfixes should be filtered out"); - } -} diff --git a/crates/ra_assists/src/tests/generated.rs b/crates/ra_assists/src/tests/generated.rs deleted file mode 100644 index 97978e7a2e..0000000000 --- a/crates/ra_assists/src/tests/generated.rs +++ /dev/null @@ -1,890 +0,0 @@ -//! Generated file, do not edit by hand, see `xtask/src/codegen` - -use super::check_doc_test; - -#[test] -fn doctest_add_custom_impl() { - check_doc_test( - "add_custom_impl", - r#####" -#[derive(Deb<|>ug, Display)] -struct S; -"#####, - r#####" -#[derive(Display)] -struct S; - -impl Debug for S { - $0 -} -"#####, - ) -} - -#[test] -fn doctest_add_explicit_type() { - check_doc_test( - "add_explicit_type", - r#####" -fn main() { - let x<|> = 92; -} -"#####, - r#####" -fn main() { - let x: i32 = 92; -} -"#####, - ) -} - -#[test] -fn doctest_add_hash() { - check_doc_test( - "add_hash", - r#####" -fn main() { - r#"Hello,<|> World!"#; -} -"#####, - r#####" -fn main() { - r##"Hello, World!"##; -} -"#####, - ) -} - -#[test] -fn doctest_add_impl_default_members() { - check_doc_test( - "add_impl_default_members", - r#####" -trait Trait { - Type X; - fn foo(&self); - fn bar(&self) {} -} - -impl Trait for () { - Type X = (); - fn foo(&self) {}<|> - -} -"#####, - r#####" -trait Trait { - Type X; - fn foo(&self); - fn bar(&self) {} -} - -impl Trait for () { - Type X = (); - fn foo(&self) {} - $0fn bar(&self) {} - -} -"#####, - ) -} - -#[test] -fn doctest_add_impl_missing_members() { - check_doc_test( - "add_impl_missing_members", - r#####" -trait Trait { - Type X; - fn foo(&self) -> T; - fn bar(&self) {} -} - -impl Trait for () {<|> - -} -"#####, - r#####" -trait Trait { - Type X; - fn foo(&self) -> T; - fn bar(&self) {} -} - -impl Trait for () { - fn foo(&self) -> u32 { - ${0:todo!()} - } - -} -"#####, - ) -} - -#[test] -fn doctest_add_turbo_fish() { - check_doc_test( - "add_turbo_fish", - r#####" -fn make() -> T { todo!() } -fn main() { - let x = make<|>(); -} -"#####, - r#####" -fn make() -> T { todo!() } -fn main() { - let x = make::<${0:_}>(); -} -"#####, - ) -} - -#[test] -fn doctest_apply_demorgan() { - check_doc_test( - "apply_demorgan", - r#####" -fn main() { - if x != 4 ||<|> !y {} -} -"#####, - r#####" -fn main() { - if !(x == 4 && y) {} -} -"#####, - ) -} - -#[test] -fn doctest_auto_import() { - check_doc_test( - "auto_import", - r#####" -fn main() { - let map = HashMap<|>::new(); -} -pub mod std { pub mod collections { pub struct HashMap { } } } -"#####, - r#####" -use std::collections::HashMap; - -fn main() { - let map = HashMap::new(); -} -pub mod std { pub mod collections { pub struct HashMap { } } } -"#####, - ) -} - -#[test] -fn doctest_change_return_type_to_result() { - check_doc_test( - "change_return_type_to_result", - r#####" -fn foo() -> i32<|> { 42i32 } -"#####, - r#####" -fn foo() -> Result { Ok(42i32) } -"#####, - ) -} - -#[test] -fn doctest_change_visibility() { - check_doc_test( - "change_visibility", - r#####" -<|>fn frobnicate() {} -"#####, - r#####" -pub(crate) fn frobnicate() {} -"#####, - ) -} - -#[test] -fn doctest_convert_to_guarded_return() { - check_doc_test( - "convert_to_guarded_return", - r#####" -fn main() { - <|>if cond { - foo(); - bar(); - } -} -"#####, - r#####" -fn main() { - if !cond { - return; - } - foo(); - bar(); -} -"#####, - ) -} - -#[test] -fn doctest_expand_glob_import() { - check_doc_test( - "expand_glob_import", - r#####" -mod foo { - pub struct Bar; - pub struct Baz; -} - -use foo::*<|>; - -fn qux(bar: Bar, baz: Baz) {} -"#####, - r#####" -mod foo { - pub struct Bar; - pub struct Baz; -} - -use foo::{Baz, Bar}; - -fn qux(bar: Bar, baz: Baz) {} -"#####, - ) -} - -#[test] -fn doctest_extract_struct_from_enum_variant() { - check_doc_test( - "extract_struct_from_enum_variant", - r#####" -enum A { <|>One(u32, u32) } -"#####, - r#####" -struct One(pub u32, pub u32); - -enum A { One(One) } -"#####, - ) -} - -#[test] -fn doctest_extract_variable() { - check_doc_test( - "extract_variable", - r#####" -fn main() { - <|>(1 + 2)<|> * 4; -} -"#####, - r#####" -fn main() { - let $0var_name = (1 + 2); - var_name * 4; -} -"#####, - ) -} - -#[test] -fn doctest_fill_match_arms() { - check_doc_test( - "fill_match_arms", - r#####" -enum Action { Move { distance: u32 }, Stop } - -fn handle(action: Action) { - match action { - <|> - } -} -"#####, - r#####" -enum Action { Move { distance: u32 }, Stop } - -fn handle(action: Action) { - match action { - $0Action::Move { distance } => {} - Action::Stop => {} - } -} -"#####, - ) -} - -#[test] -fn doctest_fix_visibility() { - check_doc_test( - "fix_visibility", - r#####" -mod m { - fn frobnicate() {} -} -fn main() { - m::frobnicate<|>() {} -} -"#####, - r#####" -mod m { - $0pub(crate) fn frobnicate() {} -} -fn main() { - m::frobnicate() {} -} -"#####, - ) -} - -#[test] -fn doctest_flip_binexpr() { - check_doc_test( - "flip_binexpr", - r#####" -fn main() { - let _ = 90 +<|> 2; -} -"#####, - r#####" -fn main() { - let _ = 2 + 90; -} -"#####, - ) -} - -#[test] -fn doctest_flip_comma() { - check_doc_test( - "flip_comma", - r#####" -fn main() { - ((1, 2),<|> (3, 4)); -} -"#####, - r#####" -fn main() { - ((3, 4), (1, 2)); -} -"#####, - ) -} - -#[test] -fn doctest_flip_trait_bound() { - check_doc_test( - "flip_trait_bound", - r#####" -fn foo Copy>() { } -"#####, - r#####" -fn foo() { } -"#####, - ) -} - -#[test] -fn doctest_generate_derive() { - check_doc_test( - "generate_derive", - r#####" -struct Point { - x: u32, - y: u32,<|> -} -"#####, - r#####" -#[derive($0)] -struct Point { - x: u32, - y: u32, -} -"#####, - ) -} - -#[test] -fn doctest_generate_from_impl_for_enum() { - check_doc_test( - "generate_from_impl_for_enum", - r#####" -enum A { <|>One(u32) } -"#####, - r#####" -enum A { One(u32) } - -impl From for A { - fn from(v: u32) -> Self { - A::One(v) - } -} -"#####, - ) -} - -#[test] -fn doctest_generate_function() { - check_doc_test( - "generate_function", - r#####" -struct Baz; -fn baz() -> Baz { Baz } -fn foo() { - bar<|>("", baz()); -} - -"#####, - r#####" -struct Baz; -fn baz() -> Baz { Baz } -fn foo() { - bar("", baz()); -} - -fn bar(arg: &str, baz: Baz) { - ${0:todo!()} -} - -"#####, - ) -} - -#[test] -fn doctest_generate_impl() { - check_doc_test( - "generate_impl", - r#####" -struct Ctx { - data: T,<|> -} -"#####, - r#####" -struct Ctx { - data: T, -} - -impl Ctx { - $0 -} -"#####, - ) -} - -#[test] -fn doctest_generate_new() { - check_doc_test( - "generate_new", - r#####" -struct Ctx { - data: T,<|> -} -"#####, - r#####" -struct Ctx { - data: T, -} - -impl Ctx { - fn $0new(data: T) -> Self { Self { data } } -} - -"#####, - ) -} - -#[test] -fn doctest_inline_local_variable() { - check_doc_test( - "inline_local_variable", - r#####" -fn main() { - let x<|> = 1 + 2; - x * 4; -} -"#####, - r#####" -fn main() { - (1 + 2) * 4; -} -"#####, - ) -} - -#[test] -fn doctest_introduce_named_lifetime() { - check_doc_test( - "introduce_named_lifetime", - r#####" -impl Cursor<'_<|>> { - fn node(self) -> &SyntaxNode { - match self { - Cursor::Replace(node) | Cursor::Before(node) => node, - } - } -} -"#####, - r#####" -impl<'a> Cursor<'a> { - fn node(self) -> &SyntaxNode { - match self { - Cursor::Replace(node) | Cursor::Before(node) => node, - } - } -} -"#####, - ) -} - -#[test] -fn doctest_invert_if() { - check_doc_test( - "invert_if", - r#####" -fn main() { - if<|> !y { A } else { B } -} -"#####, - r#####" -fn main() { - if y { B } else { A } -} -"#####, - ) -} - -#[test] -fn doctest_make_raw_string() { - check_doc_test( - "make_raw_string", - r#####" -fn main() { - "Hello,<|> World!"; -} -"#####, - r#####" -fn main() { - r#"Hello, World!"#; -} -"#####, - ) -} - -#[test] -fn doctest_make_usual_string() { - check_doc_test( - "make_usual_string", - r#####" -fn main() { - r#"Hello,<|> "World!""#; -} -"#####, - r#####" -fn main() { - "Hello, \"World!\""; -} -"#####, - ) -} - -#[test] -fn doctest_merge_imports() { - check_doc_test( - "merge_imports", - r#####" -use std::<|>fmt::Formatter; -use std::io; -"#####, - r#####" -use std::{fmt::Formatter, io}; -"#####, - ) -} - -#[test] -fn doctest_merge_match_arms() { - check_doc_test( - "merge_match_arms", - r#####" -enum Action { Move { distance: u32 }, Stop } - -fn handle(action: Action) { - match action { - <|>Action::Move(..) => foo(), - Action::Stop => foo(), - } -} -"#####, - r#####" -enum Action { Move { distance: u32 }, Stop } - -fn handle(action: Action) { - match action { - Action::Move(..) | Action::Stop => foo(), - } -} -"#####, - ) -} - -#[test] -fn doctest_move_arm_cond_to_match_guard() { - check_doc_test( - "move_arm_cond_to_match_guard", - r#####" -enum Action { Move { distance: u32 }, Stop } - -fn handle(action: Action) { - match action { - Action::Move { distance } => <|>if distance > 10 { foo() }, - _ => (), - } -} -"#####, - r#####" -enum Action { Move { distance: u32 }, Stop } - -fn handle(action: Action) { - match action { - Action::Move { distance } if distance > 10 => foo(), - _ => (), - } -} -"#####, - ) -} - -#[test] -fn doctest_move_bounds_to_where_clause() { - check_doc_test( - "move_bounds_to_where_clause", - r#####" -fn applyF: FnOnce(T) -> U>(f: F, x: T) -> U { - f(x) -} -"#####, - r#####" -fn apply(f: F, x: T) -> U where F: FnOnce(T) -> U { - f(x) -} -"#####, - ) -} - -#[test] -fn doctest_move_guard_to_arm_body() { - check_doc_test( - "move_guard_to_arm_body", - r#####" -enum Action { Move { distance: u32 }, Stop } - -fn handle(action: Action) { - match action { - Action::Move { distance } <|>if distance > 10 => foo(), - _ => (), - } -} -"#####, - r#####" -enum Action { Move { distance: u32 }, Stop } - -fn handle(action: Action) { - match action { - Action::Move { distance } => if distance > 10 { foo() }, - _ => (), - } -} -"#####, - ) -} - -#[test] -fn doctest_remove_dbg() { - check_doc_test( - "remove_dbg", - r#####" -fn main() { - <|>dbg!(92); -} -"#####, - r#####" -fn main() { - 92; -} -"#####, - ) -} - -#[test] -fn doctest_remove_hash() { - check_doc_test( - "remove_hash", - r#####" -fn main() { - r#"Hello,<|> World!"#; -} -"#####, - r#####" -fn main() { - r"Hello, World!"; -} -"#####, - ) -} - -#[test] -fn doctest_remove_mut() { - check_doc_test( - "remove_mut", - r#####" -impl Walrus { - fn feed(&mut<|> self, amount: u32) {} -} -"#####, - r#####" -impl Walrus { - fn feed(&self, amount: u32) {} -} -"#####, - ) -} - -#[test] -fn doctest_reorder_fields() { - check_doc_test( - "reorder_fields", - r#####" -struct Foo {foo: i32, bar: i32}; -const test: Foo = <|>Foo {bar: 0, foo: 1} -"#####, - r#####" -struct Foo {foo: i32, bar: i32}; -const test: Foo = Foo {foo: 1, bar: 0} -"#####, - ) -} - -#[test] -fn doctest_replace_if_let_with_match() { - check_doc_test( - "replace_if_let_with_match", - r#####" -enum Action { Move { distance: u32 }, Stop } - -fn handle(action: Action) { - <|>if let Action::Move { distance } = action { - foo(distance) - } else { - bar() - } -} -"#####, - r#####" -enum Action { Move { distance: u32 }, Stop } - -fn handle(action: Action) { - match action { - Action::Move { distance } => foo(distance), - _ => bar(), - } -} -"#####, - ) -} - -#[test] -fn doctest_replace_let_with_if_let() { - check_doc_test( - "replace_let_with_if_let", - r#####" -enum Option { Some(T), None } - -fn main(action: Action) { - <|>let x = compute(); -} - -fn compute() -> Option { None } -"#####, - r#####" -enum Option { Some(T), None } - -fn main(action: Action) { - if let Some(x) = compute() { - } -} - -fn compute() -> Option { None } -"#####, - ) -} - -#[test] -fn doctest_replace_qualified_name_with_use() { - check_doc_test( - "replace_qualified_name_with_use", - r#####" -fn process(map: std::collections::<|>HashMap) {} -"#####, - r#####" -use std::collections::HashMap; - -fn process(map: HashMap) {} -"#####, - ) -} - -#[test] -fn doctest_replace_unwrap_with_match() { - check_doc_test( - "replace_unwrap_with_match", - r#####" -enum Result { Ok(T), Err(E) } -fn main() { - let x: Result = Result::Ok(92); - let y = x.<|>unwrap(); -} -"#####, - r#####" -enum Result { Ok(T), Err(E) } -fn main() { - let x: Result = Result::Ok(92); - let y = match x { - Ok(a) => a, - $0_ => unreachable!(), - }; -} -"#####, - ) -} - -#[test] -fn doctest_split_import() { - check_doc_test( - "split_import", - r#####" -use std::<|>collections::HashMap; -"#####, - r#####" -use std::{collections::HashMap}; -"#####, - ) -} - -#[test] -fn doctest_unwrap_block() { - check_doc_test( - "unwrap_block", - r#####" -fn foo() { - if true {<|> - println!("foo"); - } -} -"#####, - r#####" -fn foo() { - println!("foo"); -} -"#####, - ) -} diff --git a/crates/ra_assists/src/utils.rs b/crates/ra_assists/src/utils.rs deleted file mode 100644 index 54d5678d14..0000000000 --- a/crates/ra_assists/src/utils.rs +++ /dev/null @@ -1,275 +0,0 @@ -//! Assorted functions shared by several assists. -pub(crate) mod insert_use; - -use std::{iter, ops}; - -use hir::{Adt, Crate, Enum, ScopeDef, Semantics, Trait, Type}; -use ra_ide_db::RootDatabase; -use ra_syntax::{ - ast::{self, make, NameOwner}, - AstNode, - SyntaxKind::*, - SyntaxNode, TextSize, T, -}; -use rustc_hash::FxHashSet; - -use crate::assist_config::SnippetCap; - -pub(crate) use insert_use::{find_insert_use_container, insert_use_statement}; - -#[derive(Clone, Copy, Debug)] -pub(crate) enum Cursor<'a> { - Replace(&'a SyntaxNode), - Before(&'a SyntaxNode), -} - -impl<'a> Cursor<'a> { - fn node(self) -> &'a SyntaxNode { - match self { - Cursor::Replace(node) | Cursor::Before(node) => node, - } - } -} - -pub(crate) fn render_snippet(_cap: SnippetCap, node: &SyntaxNode, cursor: Cursor) -> String { - assert!(cursor.node().ancestors().any(|it| it == *node)); - let range = cursor.node().text_range() - node.text_range().start(); - let range: ops::Range = range.into(); - - let mut placeholder = cursor.node().to_string(); - escape(&mut placeholder); - let tab_stop = match cursor { - Cursor::Replace(placeholder) => format!("${{0:{}}}", placeholder), - Cursor::Before(placeholder) => format!("$0{}", placeholder), - }; - - let mut buf = node.to_string(); - buf.replace_range(range, &tab_stop); - return buf; - - fn escape(buf: &mut String) { - stdx::replace(buf, '{', r"\{"); - stdx::replace(buf, '}', r"\}"); - stdx::replace(buf, '$', r"\$"); - } -} - -pub fn get_missing_assoc_items( - sema: &Semantics, - impl_def: &ast::Impl, -) -> Vec { - // Names must be unique between constants and functions. However, type aliases - // may share the same name as a function or constant. - let mut impl_fns_consts = FxHashSet::default(); - let mut impl_type = FxHashSet::default(); - - if let Some(item_list) = impl_def.assoc_item_list() { - for item in item_list.assoc_items() { - match item { - ast::AssocItem::Fn(f) => { - if let Some(n) = f.name() { - impl_fns_consts.insert(n.syntax().to_string()); - } - } - - ast::AssocItem::TypeAlias(t) => { - if let Some(n) = t.name() { - impl_type.insert(n.syntax().to_string()); - } - } - - ast::AssocItem::Const(c) => { - if let Some(n) = c.name() { - impl_fns_consts.insert(n.syntax().to_string()); - } - } - ast::AssocItem::MacroCall(_) => (), - } - } - } - - resolve_target_trait(sema, impl_def).map_or(vec![], |target_trait| { - target_trait - .items(sema.db) - .iter() - .filter(|i| match i { - hir::AssocItem::Function(f) => { - !impl_fns_consts.contains(&f.name(sema.db).to_string()) - } - hir::AssocItem::TypeAlias(t) => !impl_type.contains(&t.name(sema.db).to_string()), - hir::AssocItem::Const(c) => c - .name(sema.db) - .map(|n| !impl_fns_consts.contains(&n.to_string())) - .unwrap_or_default(), - }) - .cloned() - .collect() - }) -} - -pub(crate) fn resolve_target_trait( - sema: &Semantics, - impl_def: &ast::Impl, -) -> Option { - let ast_path = - impl_def.trait_().map(|it| it.syntax().clone()).and_then(ast::PathType::cast)?.path()?; - - match sema.resolve_path(&ast_path) { - Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def), - _ => None, - } -} - -pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize { - node.children_with_tokens() - .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) - .map(|it| it.text_range().start()) - .unwrap_or_else(|| node.text_range().start()) -} - -pub(crate) fn invert_boolean_expression(expr: ast::Expr) -> ast::Expr { - if let Some(expr) = invert_special_case(&expr) { - return expr; - } - make::expr_prefix(T![!], expr) -} - -fn invert_special_case(expr: &ast::Expr) -> Option { - match expr { - ast::Expr::BinExpr(bin) => match bin.op_kind()? { - ast::BinOp::NegatedEqualityTest => bin.replace_op(T![==]).map(|it| it.into()), - ast::BinOp::EqualityTest => bin.replace_op(T![!=]).map(|it| it.into()), - _ => None, - }, - ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::PrefixOp::Not => pe.expr(), - // FIXME: - // ast::Expr::Literal(true | false ) - _ => None, - } -} - -#[derive(Clone, Copy)] -pub enum TryEnum { - Result, - Option, -} - -impl TryEnum { - const ALL: [TryEnum; 2] = [TryEnum::Option, TryEnum::Result]; - - pub fn from_ty(sema: &Semantics, ty: &Type) -> Option { - let enum_ = match ty.as_adt() { - Some(Adt::Enum(it)) => it, - _ => return None, - }; - TryEnum::ALL.iter().find_map(|&var| { - if &enum_.name(sema.db).to_string() == var.type_name() { - return Some(var); - } - None - }) - } - - pub(crate) fn happy_case(self) -> &'static str { - match self { - TryEnum::Result => "Ok", - TryEnum::Option => "Some", - } - } - - pub(crate) fn sad_pattern(self) -> ast::Pat { - match self { - TryEnum::Result => make::tuple_struct_pat( - make::path_unqualified(make::path_segment(make::name_ref("Err"))), - iter::once(make::wildcard_pat().into()), - ) - .into(), - TryEnum::Option => make::ident_pat(make::name("None")).into(), - } - } - - fn type_name(self) -> &'static str { - match self { - TryEnum::Result => "Result", - TryEnum::Option => "Option", - } - } -} - -/// Helps with finding well-know things inside the standard library. This is -/// somewhat similar to the known paths infra inside hir, but it different; We -/// want to make sure that IDE specific paths don't become interesting inside -/// the compiler itself as well. -pub(crate) struct FamousDefs<'a, 'b>(pub(crate) &'a Semantics<'b, RootDatabase>, pub(crate) Crate); - -#[allow(non_snake_case)] -impl FamousDefs<'_, '_> { - #[cfg(test)] - pub(crate) const FIXTURE: &'static str = r#"//- /libcore.rs crate:core -pub mod convert { - pub trait From { - fn from(T) -> Self; - } -} - -pub mod option { - pub enum Option { None, Some(T)} -} - -pub mod prelude { - pub use crate::{convert::From, option::Option::{self, *}}; -} -#[prelude_import] -pub use prelude::*; -"#; - - pub(crate) fn core_convert_From(&self) -> Option { - self.find_trait("core:convert:From") - } - - pub(crate) fn core_option_Option(&self) -> Option { - self.find_enum("core:option:Option") - } - - fn find_trait(&self, path: &str) -> Option { - match self.find_def(path)? { - hir::ScopeDef::ModuleDef(hir::ModuleDef::Trait(it)) => Some(it), - _ => None, - } - } - - fn find_enum(&self, path: &str) -> Option { - match self.find_def(path)? { - hir::ScopeDef::ModuleDef(hir::ModuleDef::Adt(hir::Adt::Enum(it))) => Some(it), - _ => None, - } - } - - fn find_def(&self, path: &str) -> Option { - let db = self.0.db; - let mut path = path.split(':'); - let trait_ = path.next_back()?; - let std_crate = path.next()?; - let std_crate = self - .1 - .dependencies(db) - .into_iter() - .find(|dep| &dep.name.to_string() == std_crate)? - .krate; - - let mut module = std_crate.root_module(db)?; - for segment in path { - module = module.children(db).find_map(|child| { - let name = child.name(db)?; - if &name.to_string() == segment { - Some(child) - } else { - None - } - })?; - } - let def = - module.scope(db, None).into_iter().find(|(name, _def)| &name.to_string() == trait_)?.1; - Some(def) - } -} diff --git a/crates/ra_assists/src/utils/insert_use.rs b/crates/ra_assists/src/utils/insert_use.rs deleted file mode 100644 index 32780fceb5..0000000000 --- a/crates/ra_assists/src/utils/insert_use.rs +++ /dev/null @@ -1,526 +0,0 @@ -//! Handle syntactic aspects of inserting a new `use`. -// FIXME: rewrite according to the plan, outlined in -// https://github.com/rust-analyzer/rust-analyzer/issues/3301#issuecomment-592931553 - -use hir::{self, ModPath}; -use ra_syntax::{ - ast::{self, NameOwner, VisibilityOwner}, - AstNode, Direction, SmolStr, - SyntaxKind::{PATH, PATH_SEGMENT}, - SyntaxNode, T, -}; -use ra_text_edit::TextEditBuilder; - -use crate::assist_context::AssistContext; -use either::Either; - -/// Determines the containing syntax node in which to insert a `use` statement affecting `position`. -pub(crate) fn find_insert_use_container( - position: &SyntaxNode, - ctx: &AssistContext, -) -> Option> { - ctx.sema.ancestors_with_macros(position.clone()).find_map(|n| { - if let Some(module) = ast::Module::cast(n.clone()) { - return module.item_list().map(|it| Either::Left(it)); - } - Some(Either::Right(ast::SourceFile::cast(n)?)) - }) -} - -/// Creates and inserts a use statement for the given path to import. -/// The use statement is inserted in the scope most appropriate to the -/// the cursor position given, additionally merged with the existing use imports. -pub(crate) fn insert_use_statement( - // Ideally the position of the cursor, used to - position: &SyntaxNode, - path_to_import: &ModPath, - ctx: &AssistContext, - builder: &mut TextEditBuilder, -) { - let target = path_to_import.to_string().split("::").map(SmolStr::new).collect::>(); - let container = find_insert_use_container(position, ctx); - - if let Some(container) = container { - let syntax = container.either(|l| l.syntax().clone(), |r| r.syntax().clone()); - let action = best_action_for_target(syntax, position.clone(), &target); - make_assist(&action, &target, builder); - } -} - -fn collect_path_segments_raw( - segments: &mut Vec, - mut path: ast::Path, -) -> Option { - let oldlen = segments.len(); - loop { - let mut children = path.syntax().children_with_tokens(); - let (first, second, third) = ( - children.next().map(|n| (n.clone(), n.kind())), - children.next().map(|n| (n.clone(), n.kind())), - children.next().map(|n| (n.clone(), n.kind())), - ); - match (first, second, third) { - (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => { - path = ast::Path::cast(subpath.as_node()?.clone())?; - segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?); - } - (Some((segment, PATH_SEGMENT)), _, _) => { - segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?); - break; - } - (_, _, _) => return None, - } - } - // We need to reverse only the new added segments - let only_new_segments = segments.split_at_mut(oldlen).1; - only_new_segments.reverse(); - Some(segments.len() - oldlen) -} - -fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) { - let mut iter = segments.iter(); - if let Some(s) = iter.next() { - buf.push_str(s); - } - for s in iter { - buf.push_str("::"); - buf.push_str(s); - } -} - -/// Returns the number of common segments. -fn compare_path_segments(left: &[SmolStr], right: &[ast::PathSegment]) -> usize { - left.iter().zip(right).take_while(|(l, r)| compare_path_segment(l, r)).count() -} - -fn compare_path_segment(a: &SmolStr, b: &ast::PathSegment) -> bool { - if let Some(kb) = b.kind() { - match kb { - ast::PathSegmentKind::Name(nameref_b) => a == nameref_b.text(), - ast::PathSegmentKind::SelfKw => a == "self", - ast::PathSegmentKind::SuperKw => a == "super", - ast::PathSegmentKind::CrateKw => a == "crate", - ast::PathSegmentKind::Type { .. } => false, // not allowed in imports - } - } else { - false - } -} - -fn compare_path_segment_with_name(a: &SmolStr, b: &ast::Name) -> bool { - a == b.text() -} - -#[derive(Clone, Debug)] -enum ImportAction { - Nothing, - // Add a brand new use statement. - AddNewUse { - anchor: Option, // anchor node - add_after_anchor: bool, - }, - - // To split an existing use statement creating a nested import. - AddNestedImport { - // how may segments matched with the target path - common_segments: usize, - path_to_split: ast::Path, - // the first segment of path_to_split we want to add into the new nested list - first_segment_to_split: Option, - // Wether to add 'self' in addition to the target path - add_self: bool, - }, - // To add the target path to an existing nested import tree list. - AddInTreeList { - common_segments: usize, - // The UseTreeList where to add the target path - tree_list: ast::UseTreeList, - add_self: bool, - }, -} - -impl ImportAction { - fn add_new_use(anchor: Option, add_after_anchor: bool) -> Self { - ImportAction::AddNewUse { anchor, add_after_anchor } - } - - fn add_nested_import( - common_segments: usize, - path_to_split: ast::Path, - first_segment_to_split: Option, - add_self: bool, - ) -> Self { - ImportAction::AddNestedImport { - common_segments, - path_to_split, - first_segment_to_split, - add_self, - } - } - - fn add_in_tree_list( - common_segments: usize, - tree_list: ast::UseTreeList, - add_self: bool, - ) -> Self { - ImportAction::AddInTreeList { common_segments, tree_list, add_self } - } - - fn better(left: ImportAction, right: ImportAction) -> ImportAction { - if left.is_better(&right) { - left - } else { - right - } - } - - fn is_better(&self, other: &ImportAction) -> bool { - match (self, other) { - (ImportAction::Nothing, _) => true, - (ImportAction::AddInTreeList { .. }, ImportAction::Nothing) => false, - ( - ImportAction::AddNestedImport { common_segments: n, .. }, - ImportAction::AddInTreeList { common_segments: m, .. }, - ) - | ( - ImportAction::AddInTreeList { common_segments: n, .. }, - ImportAction::AddNestedImport { common_segments: m, .. }, - ) - | ( - ImportAction::AddInTreeList { common_segments: n, .. }, - ImportAction::AddInTreeList { common_segments: m, .. }, - ) - | ( - ImportAction::AddNestedImport { common_segments: n, .. }, - ImportAction::AddNestedImport { common_segments: m, .. }, - ) => n > m, - (ImportAction::AddInTreeList { .. }, _) => true, - (ImportAction::AddNestedImport { .. }, ImportAction::Nothing) => false, - (ImportAction::AddNestedImport { .. }, _) => true, - (ImportAction::AddNewUse { .. }, _) => false, - } - } -} - -// Find out the best ImportAction to import target path against current_use_tree. -// If current_use_tree has a nested import the function gets called recursively on every UseTree inside a UseTreeList. -fn walk_use_tree_for_best_action( - current_path_segments: &mut Vec, // buffer containing path segments - current_parent_use_tree_list: Option, // will be Some value if we are in a nested import - current_use_tree: ast::UseTree, // the use tree we are currently examinating - target: &[SmolStr], // the path we want to import -) -> ImportAction { - // We save the number of segments in the buffer so we can restore the correct segments - // before returning. Recursive call will add segments so we need to delete them. - let prev_len = current_path_segments.len(); - - let tree_list = current_use_tree.use_tree_list(); - let alias = current_use_tree.rename(); - - let path = match current_use_tree.path() { - Some(path) => path, - None => { - // If the use item don't have a path, it means it's broken (syntax error) - return ImportAction::add_new_use( - current_use_tree - .syntax() - .ancestors() - .find_map(ast::Use::cast) - .map(|it| it.syntax().clone()), - true, - ); - } - }; - - // This can happen only if current_use_tree is a direct child of a UseItem - if let Some(name) = alias.and_then(|it| it.name()) { - if compare_path_segment_with_name(&target[0], &name) { - return ImportAction::Nothing; - } - } - - collect_path_segments_raw(current_path_segments, path.clone()); - - // We compare only the new segments added in the line just above. - // The first prev_len segments were already compared in 'parent' recursive calls. - let left = target.split_at(prev_len).1; - let right = current_path_segments.split_at(prev_len).1; - let common = compare_path_segments(left, &right); - let mut action = match common { - 0 => ImportAction::add_new_use( - // e.g: target is std::fmt and we can have - // use foo::bar - // We add a brand new use statement - current_use_tree - .syntax() - .ancestors() - .find_map(ast::Use::cast) - .map(|it| it.syntax().clone()), - true, - ), - common if common == left.len() && left.len() == right.len() => { - // e.g: target is std::fmt and we can have - // 1- use std::fmt; - // 2- use std::fmt::{ ... } - if let Some(list) = tree_list { - // In case 2 we need to add self to the nested list - // unless it's already there - let has_self = list.use_trees().map(|it| it.path()).any(|p| { - p.and_then(|it| it.segment()) - .and_then(|it| it.kind()) - .filter(|k| *k == ast::PathSegmentKind::SelfKw) - .is_some() - }); - - if has_self { - ImportAction::Nothing - } else { - ImportAction::add_in_tree_list(current_path_segments.len(), list, true) - } - } else { - // Case 1 - ImportAction::Nothing - } - } - common if common != left.len() && left.len() == right.len() => { - // e.g: target is std::fmt and we have - // use std::io; - // We need to split. - let segments_to_split = current_path_segments.split_at(prev_len + common).1; - ImportAction::add_nested_import( - prev_len + common, - path, - Some(segments_to_split[0].clone()), - false, - ) - } - common if common == right.len() && left.len() > right.len() => { - // e.g: target is std::fmt and we can have - // 1- use std; - // 2- use std::{ ... }; - - // fallback action - let mut better_action = ImportAction::add_new_use( - current_use_tree - .syntax() - .ancestors() - .find_map(ast::Use::cast) - .map(|it| it.syntax().clone()), - true, - ); - if let Some(list) = tree_list { - // Case 2, check recursively if the path is already imported in the nested list - for u in list.use_trees() { - let child_action = walk_use_tree_for_best_action( - current_path_segments, - Some(list.clone()), - u, - target, - ); - if child_action.is_better(&better_action) { - better_action = child_action; - if let ImportAction::Nothing = better_action { - return better_action; - } - } - } - } else { - // Case 1, split adding self - better_action = ImportAction::add_nested_import(prev_len + common, path, None, true) - } - better_action - } - common if common == left.len() && left.len() < right.len() => { - // e.g: target is std::fmt and we can have - // use std::fmt::Debug; - let segments_to_split = current_path_segments.split_at(prev_len + common).1; - ImportAction::add_nested_import( - prev_len + common, - path, - Some(segments_to_split[0].clone()), - true, - ) - } - common if common < left.len() && common < right.len() => { - // e.g: target is std::fmt::nested::Debug - // use std::fmt::Display - let segments_to_split = current_path_segments.split_at(prev_len + common).1; - ImportAction::add_nested_import( - prev_len + common, - path, - Some(segments_to_split[0].clone()), - false, - ) - } - _ => unreachable!(), - }; - - // If we are inside a UseTreeList adding a use statement become adding to the existing - // tree list. - action = match (current_parent_use_tree_list, action.clone()) { - (Some(use_tree_list), ImportAction::AddNewUse { .. }) => { - ImportAction::add_in_tree_list(prev_len, use_tree_list, false) - } - (_, _) => action, - }; - - // We remove the segments added - current_path_segments.truncate(prev_len); - action -} - -fn best_action_for_target( - container: SyntaxNode, - anchor: SyntaxNode, - target: &[SmolStr], -) -> ImportAction { - let mut storage = Vec::with_capacity(16); // this should be the only allocation - let best_action = container - .children() - .filter_map(ast::Use::cast) - .filter(|u| u.visibility().is_none()) - .filter_map(|it| it.use_tree()) - .map(|u| walk_use_tree_for_best_action(&mut storage, None, u, target)) - .fold(None, |best, a| match best { - Some(best) => Some(ImportAction::better(best, a)), - None => Some(a), - }); - - match best_action { - Some(action) => action, - None => { - // We have no action and no UseItem was found in container so we find - // another item and we use it as anchor. - // If there are no items above, we choose the target path itself as anchor. - // todo: we should include even whitespace blocks as anchor candidates - let anchor = container.children().next().or_else(|| Some(anchor)); - - let add_after_anchor = anchor - .clone() - .and_then(ast::Attr::cast) - .map(|attr| attr.kind() == ast::AttrKind::Inner) - .unwrap_or(false); - ImportAction::add_new_use(anchor, add_after_anchor) - } - } -} - -fn make_assist(action: &ImportAction, target: &[SmolStr], edit: &mut TextEditBuilder) { - match action { - ImportAction::AddNewUse { anchor, add_after_anchor } => { - make_assist_add_new_use(anchor, *add_after_anchor, target, edit) - } - ImportAction::AddInTreeList { common_segments, tree_list, add_self } => { - // We know that the fist n segments already exists in the use statement we want - // to modify, so we want to add only the last target.len() - n segments. - let segments_to_add = target.split_at(*common_segments).1; - make_assist_add_in_tree_list(tree_list, segments_to_add, *add_self, edit) - } - ImportAction::AddNestedImport { - common_segments, - path_to_split, - first_segment_to_split, - add_self, - } => { - let segments_to_add = target.split_at(*common_segments).1; - make_assist_add_nested_import( - path_to_split, - first_segment_to_split, - segments_to_add, - *add_self, - edit, - ) - } - _ => {} - } -} - -fn make_assist_add_new_use( - anchor: &Option, - after: bool, - target: &[SmolStr], - edit: &mut TextEditBuilder, -) { - if let Some(anchor) = anchor { - let indent = ra_fmt::leading_indent(anchor); - let mut buf = String::new(); - if after { - buf.push_str("\n"); - if let Some(spaces) = &indent { - buf.push_str(spaces); - } - } - buf.push_str("use "); - fmt_segments_raw(target, &mut buf); - buf.push_str(";"); - if !after { - buf.push_str("\n\n"); - if let Some(spaces) = &indent { - buf.push_str(&spaces); - } - } - let position = if after { anchor.text_range().end() } else { anchor.text_range().start() }; - edit.insert(position, buf); - } -} - -fn make_assist_add_in_tree_list( - tree_list: &ast::UseTreeList, - target: &[SmolStr], - add_self: bool, - edit: &mut TextEditBuilder, -) { - let last = tree_list.use_trees().last(); - if let Some(last) = last { - let mut buf = String::new(); - let comma = last.syntax().siblings(Direction::Next).find(|n| n.kind() == T![,]); - let offset = if let Some(comma) = comma { - comma.text_range().end() - } else { - buf.push_str(","); - last.syntax().text_range().end() - }; - if add_self { - buf.push_str(" self") - } else { - buf.push_str(" "); - } - fmt_segments_raw(target, &mut buf); - edit.insert(offset, buf); - } else { - } -} - -fn make_assist_add_nested_import( - path: &ast::Path, - first_segment_to_split: &Option, - target: &[SmolStr], - add_self: bool, - edit: &mut TextEditBuilder, -) { - let use_tree = path.syntax().ancestors().find_map(ast::UseTree::cast); - if let Some(use_tree) = use_tree { - let (start, add_colon_colon) = if let Some(first_segment_to_split) = first_segment_to_split - { - (first_segment_to_split.syntax().text_range().start(), false) - } else { - (use_tree.syntax().text_range().end(), true) - }; - let end = use_tree.syntax().text_range().end(); - - let mut buf = String::new(); - if add_colon_colon { - buf.push_str("::"); - } - buf.push_str("{"); - if add_self { - buf.push_str("self, "); - } - fmt_segments_raw(target, &mut buf); - if !target.is_empty() { - buf.push_str(", "); - } - edit.insert(start, buf); - edit.insert(end, "}".to_string()); - } -} diff --git a/crates/ra_cfg/Cargo.toml b/crates/ra_cfg/Cargo.toml deleted file mode 100644 index 6425cd6d6f..0000000000 --- a/crates/ra_cfg/Cargo.toml +++ /dev/null @@ -1,18 +0,0 @@ -[package] -edition = "2018" -name = "ra_cfg" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -rustc-hash = "1.1.0" - -ra_syntax = { path = "../ra_syntax" } -tt = { path = "../ra_tt", package = "ra_tt" } - -[dev-dependencies] -mbe = { path = "../ra_mbe", package = "ra_mbe" } diff --git a/crates/ra_cfg/src/cfg_expr.rs b/crates/ra_cfg/src/cfg_expr.rs deleted file mode 100644 index f48928aee8..0000000000 --- a/crates/ra_cfg/src/cfg_expr.rs +++ /dev/null @@ -1,135 +0,0 @@ -//! The condition expression used in `#[cfg(..)]` attributes. -//! -//! See: https://doc.rust-lang.org/reference/conditional-compilation.html#conditional-compilation - -use std::slice::Iter as SliceIter; - -use ra_syntax::SmolStr; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum CfgExpr { - Invalid, - Atom(SmolStr), - KeyValue { key: SmolStr, value: SmolStr }, - All(Vec), - Any(Vec), - Not(Box), -} - -impl CfgExpr { - pub fn parse(tt: &tt::Subtree) -> CfgExpr { - next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid) - } - /// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates. - pub fn fold(&self, query: &dyn Fn(&SmolStr, Option<&SmolStr>) -> bool) -> Option { - match self { - CfgExpr::Invalid => None, - CfgExpr::Atom(name) => Some(query(name, None)), - CfgExpr::KeyValue { key, value } => Some(query(key, Some(value))), - CfgExpr::All(preds) => { - preds.iter().try_fold(true, |s, pred| Some(s && pred.fold(query)?)) - } - CfgExpr::Any(preds) => { - preds.iter().try_fold(false, |s, pred| Some(s || pred.fold(query)?)) - } - CfgExpr::Not(pred) => pred.fold(query).map(|s| !s), - } - } -} - -fn next_cfg_expr(it: &mut SliceIter) -> Option { - let name = match it.next() { - None => return None, - Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(), - Some(_) => return Some(CfgExpr::Invalid), - }; - - // Peek - let ret = match it.as_slice().first() { - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => { - match it.as_slice().get(1) { - Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => { - it.next(); - it.next(); - // FIXME: escape? raw string? - let value = - SmolStr::new(literal.text.trim_start_matches('"').trim_end_matches('"')); - CfgExpr::KeyValue { key: name, value } - } - _ => return Some(CfgExpr::Invalid), - } - } - Some(tt::TokenTree::Subtree(subtree)) => { - it.next(); - let mut sub_it = subtree.token_trees.iter(); - let mut subs = std::iter::from_fn(|| next_cfg_expr(&mut sub_it)).collect(); - match name.as_str() { - "all" => CfgExpr::All(subs), - "any" => CfgExpr::Any(subs), - "not" => CfgExpr::Not(Box::new(subs.pop().unwrap_or(CfgExpr::Invalid))), - _ => CfgExpr::Invalid, - } - } - _ => CfgExpr::Atom(name), - }; - - // Eat comma separator - if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = it.as_slice().first() { - if punct.char == ',' { - it.next(); - } - } - Some(ret) -} - -#[cfg(test)] -mod tests { - use super::*; - - use mbe::{ast_to_token_tree, TokenMap}; - use ra_syntax::ast::{self, AstNode}; - - fn get_token_tree_generated(input: &str) -> (tt::Subtree, TokenMap) { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - ast_to_token_tree(&tt).unwrap() - } - - fn assert_parse_result(input: &str, expected: CfgExpr) { - let (tt, _) = get_token_tree_generated(input); - let cfg = CfgExpr::parse(&tt); - assert_eq!(cfg, expected); - } - - #[test] - fn test_cfg_expr_parser() { - assert_parse_result("#![cfg(foo)]", CfgExpr::Atom("foo".into())); - assert_parse_result("#![cfg(foo,)]", CfgExpr::Atom("foo".into())); - assert_parse_result( - "#![cfg(not(foo))]", - CfgExpr::Not(Box::new(CfgExpr::Atom("foo".into()))), - ); - assert_parse_result("#![cfg(foo(bar))]", CfgExpr::Invalid); - - // Only take the first - assert_parse_result(r#"#![cfg(foo, bar = "baz")]"#, CfgExpr::Atom("foo".into())); - - assert_parse_result( - r#"#![cfg(all(foo, bar = "baz"))]"#, - CfgExpr::All(vec![ - CfgExpr::Atom("foo".into()), - CfgExpr::KeyValue { key: "bar".into(), value: "baz".into() }, - ]), - ); - - assert_parse_result( - r#"#![cfg(any(not(), all(), , bar = "baz",))]"#, - CfgExpr::Any(vec![ - CfgExpr::Not(Box::new(CfgExpr::Invalid)), - CfgExpr::All(vec![]), - CfgExpr::Invalid, - CfgExpr::KeyValue { key: "bar".into(), value: "baz".into() }, - ]), - ); - } -} diff --git a/crates/ra_cfg/src/lib.rs b/crates/ra_cfg/src/lib.rs deleted file mode 100644 index cd5a0a7b64..0000000000 --- a/crates/ra_cfg/src/lib.rs +++ /dev/null @@ -1,51 +0,0 @@ -//! ra_cfg defines conditional compiling options, `cfg` attibute parser and evaluator - -mod cfg_expr; - -use ra_syntax::SmolStr; -use rustc_hash::FxHashSet; - -pub use cfg_expr::CfgExpr; - -/// Configuration options used for conditional compilition on items with `cfg` attributes. -/// We have two kind of options in different namespaces: atomic options like `unix`, and -/// key-value options like `target_arch="x86"`. -/// -/// Note that for key-value options, one key can have multiple values (but not none). -/// `feature` is an example. We have both `feature="foo"` and `feature="bar"` if features -/// `foo` and `bar` are both enabled. And here, we store key-value options as a set of tuple -/// of key and value in `key_values`. -/// -/// See: https://doc.rust-lang.org/reference/conditional-compilation.html#set-configuration-options -#[derive(Debug, Clone, PartialEq, Eq, Default)] -pub struct CfgOptions { - atoms: FxHashSet, - key_values: FxHashSet<(SmolStr, SmolStr)>, -} - -impl CfgOptions { - pub fn check(&self, cfg: &CfgExpr) -> Option { - cfg.fold(&|key, value| match value { - None => self.atoms.contains(key), - Some(value) => self.key_values.contains(&(key.clone(), value.clone())), - }) - } - - pub fn insert_atom(&mut self, key: SmolStr) { - self.atoms.insert(key); - } - - pub fn insert_key_value(&mut self, key: SmolStr, value: SmolStr) { - self.key_values.insert((key, value)); - } - - pub fn append(&mut self, other: &CfgOptions) { - for atom in &other.atoms { - self.atoms.insert(atom.clone()); - } - - for (key, value) in &other.key_values { - self.key_values.insert((key.clone(), value.clone())); - } - } -} diff --git a/crates/ra_db/Cargo.toml b/crates/ra_db/Cargo.toml deleted file mode 100644 index fe73dc0157..0000000000 --- a/crates/ra_db/Cargo.toml +++ /dev/null @@ -1,21 +0,0 @@ -[package] -edition = "2018" -name = "ra_db" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -salsa = "0.15.2" -rustc-hash = "1.1.0" - -ra_syntax = { path = "../ra_syntax" } -ra_cfg = { path = "../ra_cfg" } -ra_prof = { path = "../ra_prof" } -ra_tt = { path = "../ra_tt" } -test_utils = { path = "../test_utils" } -vfs = { path = "../vfs" } -stdx = { path = "../stdx" } diff --git a/crates/ra_db/src/fixture.rs b/crates/ra_db/src/fixture.rs deleted file mode 100644 index 2aafb99654..0000000000 --- a/crates/ra_db/src/fixture.rs +++ /dev/null @@ -1,228 +0,0 @@ -//! Fixtures are strings containing rust source code with optional metadata. -//! A fixture without metadata is parsed into a single source file. -//! Use this to test functionality local to one file. -//! -//! Simple Example: -//! ``` -//! r#" -//! fn main() { -//! println!("Hello World") -//! } -//! "# -//! ``` -//! -//! Metadata can be added to a fixture after a `//-` comment. -//! The basic form is specifying filenames, -//! which is also how to define multiple files in a single test fixture -//! -//! Example using two files in the same crate: -//! ``` -//! " -//! //- /main.rs -//! mod foo; -//! fn main() { -//! foo::bar(); -//! } -//! -//! //- /foo.rs -//! pub fn bar() {} -//! " -//! ``` -//! -//! Example using two crates with one file each, with one crate depending on the other: -//! ``` -//! r#" -//! //- /main.rs crate:a deps:b -//! fn main() { -//! b::foo(); -//! } -//! //- /lib.rs crate:b -//! pub fn b() { -//! println!("Hello World") -//! } -//! "# -//! ``` -//! -//! Metadata allows specifying all settings and variables -//! that are available in a real rust project: -//! - crate names via `crate:cratename` -//! - dependencies via `deps:dep1,dep2` -//! - configuration settings via `cfg:dbg=false,opt_level=2` -//! - environment variables via `env:PATH=/bin,RUST_LOG=debug` -//! -//! Example using all available metadata: -//! ``` -//! " -//! //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo -//! fn insert_source_code_here() {} -//! " -//! ``` -use std::{str::FromStr, sync::Arc}; - -use ra_cfg::CfgOptions; -use rustc_hash::FxHashMap; -use test_utils::{extract_range_or_offset, Fixture, RangeOrOffset, CURSOR_MARKER}; -use vfs::{file_set::FileSet, VfsPath}; - -use crate::{ - input::CrateName, CrateGraph, CrateId, Edition, Env, FileId, FilePosition, SourceDatabaseExt, - SourceRoot, SourceRootId, -}; - -pub const WORKSPACE: SourceRootId = SourceRootId(0); - -pub trait WithFixture: Default + SourceDatabaseExt + 'static { - fn with_single_file(text: &str) -> (Self, FileId) { - let mut db = Self::default(); - let (_, files) = with_files(&mut db, text); - assert_eq!(files.len(), 1); - (db, files[0]) - } - - fn with_files(ra_fixture: &str) -> Self { - let mut db = Self::default(); - let (pos, _) = with_files(&mut db, ra_fixture); - assert!(pos.is_none()); - db - } - - fn with_position(ra_fixture: &str) -> (Self, FilePosition) { - let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture); - let offset = match range_or_offset { - RangeOrOffset::Range(_) => panic!(), - RangeOrOffset::Offset(it) => it, - }; - (db, FilePosition { file_id, offset }) - } - - fn with_range_or_offset(ra_fixture: &str) -> (Self, FileId, RangeOrOffset) { - let mut db = Self::default(); - let (pos, _) = with_files(&mut db, ra_fixture); - let (file_id, range_or_offset) = pos.unwrap(); - (db, file_id, range_or_offset) - } - - fn test_crate(&self) -> CrateId { - let crate_graph = self.crate_graph(); - let mut it = crate_graph.iter(); - let res = it.next().unwrap(); - assert!(it.next().is_none()); - res - } -} - -impl WithFixture for DB {} - -fn with_files( - db: &mut dyn SourceDatabaseExt, - fixture: &str, -) -> (Option<(FileId, RangeOrOffset)>, Vec) { - let fixture = Fixture::parse(fixture); - - let mut files = Vec::new(); - let mut crate_graph = CrateGraph::default(); - let mut crates = FxHashMap::default(); - let mut crate_deps = Vec::new(); - let mut default_crate_root: Option = None; - - let mut file_set = FileSet::default(); - let source_root_id = WORKSPACE; - let source_root_prefix = "/".to_string(); - let mut file_id = FileId(0); - - let mut file_position = None; - - for entry in fixture { - let text = if entry.text.contains(CURSOR_MARKER) { - let (range_or_offset, text) = extract_range_or_offset(&entry.text); - assert!(file_position.is_none()); - file_position = Some((file_id, range_or_offset)); - text.to_string() - } else { - entry.text.clone() - }; - - let meta = FileMeta::from(entry); - assert!(meta.path.starts_with(&source_root_prefix)); - - if let Some(krate) = meta.krate { - let crate_id = crate_graph.add_crate_root( - file_id, - meta.edition, - Some(krate.clone()), - meta.cfg, - meta.env, - Default::default(), - ); - let crate_name = CrateName::new(&krate).unwrap(); - let prev = crates.insert(crate_name.clone(), crate_id); - assert!(prev.is_none()); - for dep in meta.deps { - let dep = CrateName::new(&dep).unwrap(); - crate_deps.push((crate_name.clone(), dep)) - } - } else if meta.path == "/main.rs" || meta.path == "/lib.rs" { - assert!(default_crate_root.is_none()); - default_crate_root = Some(file_id); - } - - db.set_file_text(file_id, Arc::new(text)); - db.set_file_source_root(file_id, source_root_id); - let path = VfsPath::new_virtual_path(meta.path); - file_set.insert(file_id, path.into()); - files.push(file_id); - file_id.0 += 1; - } - - if crates.is_empty() { - let crate_root = default_crate_root.unwrap(); - crate_graph.add_crate_root( - crate_root, - Edition::Edition2018, - None, - CfgOptions::default(), - Env::default(), - Default::default(), - ); - } else { - for (from, to) in crate_deps { - let from_id = crates[&from]; - let to_id = crates[&to]; - crate_graph.add_dep(from_id, CrateName::new(&to).unwrap(), to_id).unwrap(); - } - } - - db.set_source_root(source_root_id, Arc::new(SourceRoot::new_local(file_set))); - db.set_crate_graph(Arc::new(crate_graph)); - - (file_position, files) -} - -struct FileMeta { - path: String, - krate: Option, - deps: Vec, - cfg: CfgOptions, - edition: Edition, - env: Env, -} - -impl From for FileMeta { - fn from(f: Fixture) -> FileMeta { - let mut cfg = CfgOptions::default(); - f.cfg_atoms.iter().for_each(|it| cfg.insert_atom(it.into())); - f.cfg_key_values.iter().for_each(|(k, v)| cfg.insert_key_value(k.into(), v.into())); - - FileMeta { - path: f.path, - krate: f.krate, - deps: f.deps, - cfg, - edition: f - .edition - .as_ref() - .map_or(Edition::Edition2018, |v| Edition::from_str(&v).unwrap()), - env: f.env.into_iter().collect(), - } - } -} diff --git a/crates/ra_db/src/input.rs b/crates/ra_db/src/input.rs deleted file mode 100644 index 6f2e5cfc76..0000000000 --- a/crates/ra_db/src/input.rs +++ /dev/null @@ -1,453 +0,0 @@ -//! This module specifies the input to rust-analyzer. In some sense, this is -//! **the** most important module, because all other fancy stuff is strictly -//! derived from this input. -//! -//! Note that neither this module, nor any other part of the analyzer's core do -//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how -//! actual IO is done and lowered to input. - -use std::{fmt, iter::FromIterator, ops, str::FromStr, sync::Arc}; - -use ra_cfg::CfgOptions; -use ra_syntax::SmolStr; -use ra_tt::TokenExpander; -use rustc_hash::{FxHashMap, FxHashSet}; -use vfs::file_set::FileSet; - -pub use vfs::FileId; - -/// Files are grouped into source roots. A source root is a directory on the -/// file systems which is watched for changes. Typically it corresponds to a -/// Rust crate. Source roots *might* be nested: in this case, a file belongs to -/// the nearest enclosing source root. Paths to files are always relative to a -/// source root, and the analyzer does not know the root path of the source root at -/// all. So, a file from one source root can't refer to a file in another source -/// root by path. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct SourceRootId(pub u32); - -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct SourceRoot { - /// Sysroot or crates.io library. - /// - /// Libraries are considered mostly immutable, this assumption is used to - /// optimize salsa's query structure - pub is_library: bool, - pub(crate) file_set: FileSet, -} - -impl SourceRoot { - pub fn new_local(file_set: FileSet) -> SourceRoot { - SourceRoot { is_library: false, file_set } - } - pub fn new_library(file_set: FileSet) -> SourceRoot { - SourceRoot { is_library: true, file_set } - } - pub fn iter(&self) -> impl Iterator + '_ { - self.file_set.iter() - } -} - -/// `CrateGraph` is a bit of information which turns a set of text files into a -/// number of Rust crates. Each crate is defined by the `FileId` of its root module, -/// the set of cfg flags (not yet implemented) and the set of dependencies. Note -/// that, due to cfg's, there might be several crates for a single `FileId`! As -/// in the rust-lang proper, a crate does not have a name. Instead, names are -/// specified on dependency edges. That is, a crate might be known under -/// different names in different dependent crates. -/// -/// Note that `CrateGraph` is build-system agnostic: it's a concept of the Rust -/// language proper, not a concept of the build system. In practice, we get -/// `CrateGraph` by lowering `cargo metadata` output. -#[derive(Debug, Clone, Default, PartialEq, Eq)] -pub struct CrateGraph { - arena: FxHashMap, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct CrateId(pub u32); - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct CrateName(SmolStr); - -impl CrateName { - /// Creates a crate name, checking for dashes in the string provided. - /// Dashes are not allowed in the crate names, - /// hence the input string is returned as `Err` for those cases. - pub fn new(name: &str) -> Result { - if name.contains('-') { - Err(name) - } else { - Ok(Self(SmolStr::new(name))) - } - } - - /// Creates a crate name, unconditionally replacing the dashes with underscores. - pub fn normalize_dashes(name: &str) -> CrateName { - Self(SmolStr::new(name.replace('-', "_"))) - } -} - -impl fmt::Display for CrateName { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.0) - } -} - -impl ops::Deref for CrateName { - type Target = str; - fn deref(&self) -> &Self::Target { - &*self.0 - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct ProcMacroId(pub u32); - -#[derive(Debug, Clone)] -pub struct ProcMacro { - pub name: SmolStr, - pub expander: Arc, -} - -impl Eq for ProcMacro {} -impl PartialEq for ProcMacro { - fn eq(&self, other: &ProcMacro) -> bool { - self.name == other.name && Arc::ptr_eq(&self.expander, &other.expander) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct CrateData { - pub root_file_id: FileId, - pub edition: Edition, - /// The name to display to the end user. - /// This actual crate name can be different in a particular dependent crate - /// or may even be missing for some cases, such as a dummy crate for the code snippet. - pub display_name: Option, - pub cfg_options: CfgOptions, - pub env: Env, - pub dependencies: Vec, - pub proc_macro: Vec, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum Edition { - Edition2018, - Edition2015, -} - -#[derive(Default, Debug, Clone, PartialEq, Eq)] -pub struct Env { - entries: FxHashMap, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Dependency { - pub crate_id: CrateId, - pub name: CrateName, -} - -impl CrateGraph { - pub fn add_crate_root( - &mut self, - file_id: FileId, - edition: Edition, - display_name: Option, - cfg_options: CfgOptions, - env: Env, - proc_macro: Vec<(SmolStr, Arc)>, - ) -> CrateId { - let proc_macro = - proc_macro.into_iter().map(|(name, it)| ProcMacro { name, expander: it }).collect(); - - let data = CrateData { - root_file_id: file_id, - edition, - display_name, - cfg_options, - env, - proc_macro, - dependencies: Vec::new(), - }; - let crate_id = CrateId(self.arena.len() as u32); - let prev = self.arena.insert(crate_id, data); - assert!(prev.is_none()); - crate_id - } - - pub fn add_dep( - &mut self, - from: CrateId, - name: CrateName, - to: CrateId, - ) -> Result<(), CyclicDependenciesError> { - if self.dfs_find(from, to, &mut FxHashSet::default()) { - return Err(CyclicDependenciesError); - } - self.arena.get_mut(&from).unwrap().add_dep(name, to); - Ok(()) - } - - pub fn is_empty(&self) -> bool { - self.arena.is_empty() - } - - pub fn iter(&self) -> impl Iterator + '_ { - self.arena.keys().copied() - } - - /// Returns an iterator over all transitive dependencies of the given crate. - pub fn transitive_deps(&self, of: CrateId) -> impl Iterator + '_ { - let mut worklist = vec![of]; - let mut deps = FxHashSet::default(); - - while let Some(krate) = worklist.pop() { - if !deps.insert(krate) { - continue; - } - - worklist.extend(self[krate].dependencies.iter().map(|dep| dep.crate_id)); - } - - deps.remove(&of); - deps.into_iter() - } - - // FIXME: this only finds one crate with the given root; we could have multiple - pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option { - let (&crate_id, _) = - self.arena.iter().find(|(_crate_id, data)| data.root_file_id == file_id)?; - Some(crate_id) - } - - /// Extends this crate graph by adding a complete disjoint second crate - /// graph. - /// - /// The ids of the crates in the `other` graph are shifted by the return - /// amount. - pub fn extend(&mut self, other: CrateGraph) -> u32 { - let start = self.arena.len() as u32; - self.arena.extend(other.arena.into_iter().map(|(id, mut data)| { - let new_id = id.shift(start); - for dep in &mut data.dependencies { - dep.crate_id = dep.crate_id.shift(start); - } - (new_id, data) - })); - start - } - - fn dfs_find(&self, target: CrateId, from: CrateId, visited: &mut FxHashSet) -> bool { - if !visited.insert(from) { - return false; - } - - if target == from { - return true; - } - - for dep in &self[from].dependencies { - let crate_id = dep.crate_id; - if self.dfs_find(target, crate_id, visited) { - return true; - } - } - false - } -} - -impl ops::Index for CrateGraph { - type Output = CrateData; - fn index(&self, crate_id: CrateId) -> &CrateData { - &self.arena[&crate_id] - } -} - -impl CrateId { - pub fn shift(self, amount: u32) -> CrateId { - CrateId(self.0 + amount) - } -} - -impl CrateData { - fn add_dep(&mut self, name: CrateName, crate_id: CrateId) { - self.dependencies.push(Dependency { name, crate_id }) - } -} - -impl FromStr for Edition { - type Err = ParseEditionError; - - fn from_str(s: &str) -> Result { - let res = match s { - "2015" => Edition::Edition2015, - "2018" => Edition::Edition2018, - _ => return Err(ParseEditionError { invalid_input: s.to_string() }), - }; - Ok(res) - } -} - -impl fmt::Display for Edition { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(match self { - Edition::Edition2015 => "2015", - Edition::Edition2018 => "2018", - }) - } -} - -impl FromIterator<(String, String)> for Env { - fn from_iter>(iter: T) -> Self { - Env { entries: FromIterator::from_iter(iter) } - } -} - -impl Env { - pub fn set(&mut self, env: &str, value: String) { - self.entries.insert(env.to_owned(), value); - } - - pub fn get(&self, env: &str) -> Option { - self.entries.get(env).cloned() - } -} - -#[derive(Debug)] -pub struct ParseEditionError { - invalid_input: String, -} - -impl fmt::Display for ParseEditionError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "invalid edition: {:?}", self.invalid_input) - } -} - -impl std::error::Error for ParseEditionError {} - -#[derive(Debug)] -pub struct CyclicDependenciesError; - -#[cfg(test)] -mod tests { - use super::{CfgOptions, CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId}; - - #[test] - fn detect_cyclic_dependency_indirect() { - let mut graph = CrateGraph::default(); - let crate1 = graph.add_crate_root( - FileId(1u32), - Edition2018, - None, - CfgOptions::default(), - Env::default(), - Default::default(), - ); - let crate2 = graph.add_crate_root( - FileId(2u32), - Edition2018, - None, - CfgOptions::default(), - Env::default(), - Default::default(), - ); - let crate3 = graph.add_crate_root( - FileId(3u32), - Edition2018, - None, - CfgOptions::default(), - Env::default(), - Default::default(), - ); - assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); - assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); - assert!(graph.add_dep(crate3, CrateName::new("crate1").unwrap(), crate1).is_err()); - } - - #[test] - fn detect_cyclic_dependency_direct() { - let mut graph = CrateGraph::default(); - let crate1 = graph.add_crate_root( - FileId(1u32), - Edition2018, - None, - CfgOptions::default(), - Env::default(), - Default::default(), - ); - let crate2 = graph.add_crate_root( - FileId(2u32), - Edition2018, - None, - CfgOptions::default(), - Env::default(), - Default::default(), - ); - assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); - assert!(graph.add_dep(crate2, CrateName::new("crate2").unwrap(), crate2).is_err()); - } - - #[test] - fn it_works() { - let mut graph = CrateGraph::default(); - let crate1 = graph.add_crate_root( - FileId(1u32), - Edition2018, - None, - CfgOptions::default(), - Env::default(), - Default::default(), - ); - let crate2 = graph.add_crate_root( - FileId(2u32), - Edition2018, - None, - CfgOptions::default(), - Env::default(), - Default::default(), - ); - let crate3 = graph.add_crate_root( - FileId(3u32), - Edition2018, - None, - CfgOptions::default(), - Env::default(), - Default::default(), - ); - assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); - assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); - } - - #[test] - fn dashes_are_normalized() { - let mut graph = CrateGraph::default(); - let crate1 = graph.add_crate_root( - FileId(1u32), - Edition2018, - None, - CfgOptions::default(), - Env::default(), - Default::default(), - ); - let crate2 = graph.add_crate_root( - FileId(2u32), - Edition2018, - None, - CfgOptions::default(), - Env::default(), - Default::default(), - ); - assert!(graph - .add_dep(crate1, CrateName::normalize_dashes("crate-name-with-dashes"), crate2) - .is_ok()); - assert_eq!( - graph[crate1].dependencies, - vec![Dependency { - crate_id: crate2, - name: CrateName::new("crate_name_with_dashes").unwrap() - }] - ); - } -} diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs deleted file mode 100644 index f25be24fe2..0000000000 --- a/crates/ra_db/src/lib.rs +++ /dev/null @@ -1,168 +0,0 @@ -//! ra_db defines basic database traits. The concrete DB is defined by ra_ide. -mod cancellation; -mod input; -pub mod fixture; - -use std::{panic, sync::Arc}; - -use ra_prof::profile; -use ra_syntax::{ast, Parse, SourceFile, TextRange, TextSize}; -use rustc_hash::FxHashSet; - -pub use crate::{ - cancellation::Canceled, - input::{ - CrateData, CrateGraph, CrateId, CrateName, Dependency, Edition, Env, FileId, ProcMacroId, - SourceRoot, SourceRootId, - }, -}; -pub use salsa; -pub use vfs::{file_set::FileSet, VfsPath}; - -#[macro_export] -macro_rules! impl_intern_key { - ($name:ident) => { - impl $crate::salsa::InternKey for $name { - fn from_intern_id(v: $crate::salsa::InternId) -> Self { - $name(v) - } - fn as_intern_id(&self) -> $crate::salsa::InternId { - self.0 - } - } - }; -} - -pub trait Upcast { - fn upcast(&self) -> &T; -} - -pub trait CheckCanceled { - /// Aborts current query if there are pending changes. - /// - /// rust-analyzer needs to be able to answer semantic questions about the - /// code while the code is being modified. A common problem is that a - /// long-running query is being calculated when a new change arrives. - /// - /// We can't just apply the change immediately: this will cause the pending - /// query to see inconsistent state (it will observe an absence of - /// repeatable read). So what we do is we **cancel** all pending queries - /// before applying the change. - /// - /// We implement cancellation by panicking with a special value and catching - /// it on the API boundary. Salsa explicitly supports this use-case. - fn check_canceled(&self); - - fn catch_canceled(&self, f: F) -> Result - where - Self: Sized + panic::RefUnwindSafe, - F: FnOnce(&Self) -> T + panic::UnwindSafe, - { - panic::catch_unwind(|| f(self)).map_err(|err| match err.downcast::() { - Ok(canceled) => *canceled, - Err(payload) => panic::resume_unwind(payload), - }) - } -} - -impl CheckCanceled for T { - fn check_canceled(&self) { - if self.salsa_runtime().is_current_revision_canceled() { - Canceled::throw() - } - } -} - -#[derive(Clone, Copy, Debug)] -pub struct FilePosition { - pub file_id: FileId, - pub offset: TextSize, -} - -#[derive(Clone, Copy, Debug, Eq, PartialEq)] -pub struct FileRange { - pub file_id: FileId, - pub range: TextRange, -} - -pub const DEFAULT_LRU_CAP: usize = 128; - -pub trait FileLoader { - /// Text of the file. - fn file_text(&self, file_id: FileId) -> Arc; - /// Note that we intentionally accept a `&str` and not a `&Path` here. This - /// method exists to handle `#[path = "/some/path.rs"] mod foo;` and such, - /// so the input is guaranteed to be utf-8 string. One might be tempted to - /// introduce some kind of "utf-8 path with / separators", but that's a bad idea. Behold - /// `#[path = "C://no/way"]` - fn resolve_path(&self, anchor: FileId, path: &str) -> Option; - fn relevant_crates(&self, file_id: FileId) -> Arc>; -} - -/// Database which stores all significant input facts: source code and project -/// model. Everything else in rust-analyzer is derived from these queries. -#[salsa::query_group(SourceDatabaseStorage)] -pub trait SourceDatabase: CheckCanceled + FileLoader + std::fmt::Debug { - // Parses the file into the syntax tree. - #[salsa::invoke(parse_query)] - fn parse(&self, file_id: FileId) -> Parse; - - /// The crate graph. - #[salsa::input] - fn crate_graph(&self) -> Arc; -} - -fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse { - let _p = profile("parse_query").detail(|| format!("{:?}", file_id)); - let text = db.file_text(file_id); - SourceFile::parse(&*text) -} - -/// We don't want to give HIR knowledge of source roots, hence we extract these -/// methods into a separate DB. -#[salsa::query_group(SourceDatabaseExtStorage)] -pub trait SourceDatabaseExt: SourceDatabase { - #[salsa::input] - fn file_text(&self, file_id: FileId) -> Arc; - /// Path to a file, relative to the root of its source root. - /// Source root of the file. - #[salsa::input] - fn file_source_root(&self, file_id: FileId) -> SourceRootId; - /// Contents of the source root. - #[salsa::input] - fn source_root(&self, id: SourceRootId) -> Arc; - - fn source_root_crates(&self, id: SourceRootId) -> Arc>; -} - -fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc> { - let graph = db.crate_graph(); - let res = graph - .iter() - .filter(|&krate| { - let root_file = graph[krate].root_file_id; - db.file_source_root(root_file) == id - }) - .collect::>(); - Arc::new(res) -} - -/// Silly workaround for cyclic deps between the traits -pub struct FileLoaderDelegate(pub T); - -impl FileLoader for FileLoaderDelegate<&'_ T> { - fn file_text(&self, file_id: FileId) -> Arc { - SourceDatabaseExt::file_text(self.0, file_id) - } - fn resolve_path(&self, anchor: FileId, path: &str) -> Option { - // FIXME: this *somehow* should be platform agnostic... - let source_root = self.0.file_source_root(anchor); - let source_root = self.0.source_root(source_root); - source_root.file_set.resolve_path(anchor, path) - } - - fn relevant_crates(&self, file_id: FileId) -> Arc> { - let source_root = self.0.file_source_root(file_id); - self.0.source_root_crates(source_root) - } -} diff --git a/crates/ra_fmt/Cargo.toml b/crates/ra_fmt/Cargo.toml deleted file mode 100644 index b4ef93f2b2..0000000000 --- a/crates/ra_fmt/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -edition = "2018" -name = "ra_fmt" -version = "0.1.0" -authors = ["rust-analyzer developers"] -publish = false -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -itertools = "0.9.0" - -ra_syntax = { path = "../ra_syntax" } diff --git a/crates/ra_fmt/src/lib.rs b/crates/ra_fmt/src/lib.rs deleted file mode 100644 index f910ded9da..0000000000 --- a/crates/ra_fmt/src/lib.rs +++ /dev/null @@ -1,96 +0,0 @@ -//! This crate provides some utilities for indenting rust code. - -use std::iter::successors; - -use itertools::Itertools; -use ra_syntax::{ - ast::{self, AstNode, AstToken}, - SmolStr, SyntaxKind, - SyntaxKind::*, - SyntaxNode, SyntaxToken, T, -}; - -pub fn reindent(text: &str, indent: &str) -> String { - let indent = format!("\n{}", indent); - text.lines().intersperse(&indent).collect() -} - -/// If the node is on the beginning of the line, calculate indent. -pub fn leading_indent(node: &SyntaxNode) -> Option { - for token in prev_tokens(node.first_token()?) { - if let Some(ws) = ast::Whitespace::cast(token.clone()) { - let ws_text = ws.text(); - if let Some(pos) = ws_text.rfind('\n') { - return Some(ws_text[pos + 1..].into()); - } - } - if token.text().contains('\n') { - break; - } - } - None -} - -fn prev_tokens(token: SyntaxToken) -> impl Iterator { - successors(token.prev_token(), |token| token.prev_token()) -} - -pub fn unwrap_trivial_block(block: ast::BlockExpr) -> ast::Expr { - extract_trivial_expression(&block) - .filter(|expr| !expr.syntax().text().contains_char('\n')) - .unwrap_or_else(|| block.into()) -} - -pub fn extract_trivial_expression(block: &ast::BlockExpr) -> Option { - let has_anything_else = |thing: &SyntaxNode| -> bool { - let mut non_trivial_children = - block.syntax().children_with_tokens().filter(|it| match it.kind() { - WHITESPACE | T!['{'] | T!['}'] => false, - _ => it.as_node() != Some(thing), - }); - non_trivial_children.next().is_some() - }; - - if let Some(expr) = block.expr() { - if has_anything_else(expr.syntax()) { - return None; - } - return Some(expr); - } - // Unwrap `{ continue; }` - let (stmt,) = block.statements().next_tuple()?; - if let ast::Stmt::ExprStmt(expr_stmt) = stmt { - if has_anything_else(expr_stmt.syntax()) { - return None; - } - let expr = expr_stmt.expr()?; - match expr.syntax().kind() { - CONTINUE_EXPR | BREAK_EXPR | RETURN_EXPR => return Some(expr), - _ => (), - } - } - None -} - -pub fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str { - match left { - T!['('] | T!['['] => return "", - T!['{'] => { - if let USE_TREE = right { - return ""; - } - } - _ => (), - } - match right { - T![')'] | T![']'] => return "", - T!['}'] => { - if let USE_TREE = left { - return ""; - } - } - T![.] => return "", - _ => (), - } - " " -} diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml deleted file mode 100644 index c260bb193c..0000000000 --- a/crates/ra_hir/Cargo.toml +++ /dev/null @@ -1,25 +0,0 @@ -[package] -edition = "2018" -name = "ra_hir" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -log = "0.4.8" -rustc-hash = "1.1.0" -either = "1.5.3" -arrayvec = "0.5.1" - -itertools = "0.9.0" - -stdx = { path = "../stdx" } -ra_syntax = { path = "../ra_syntax" } -ra_db = { path = "../ra_db" } -ra_prof = { path = "../ra_prof" } -hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" } -hir_def = { path = "../ra_hir_def", package = "ra_hir_def" } -hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" } diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs deleted file mode 100644 index 27cdabea03..0000000000 --- a/crates/ra_hir/src/code_model.rs +++ /dev/null @@ -1,1695 +0,0 @@ -//! FIXME: write short doc here -use std::{iter, sync::Arc}; - -use arrayvec::ArrayVec; -use either::Either; -use hir_def::{ - adt::StructKind, - adt::VariantData, - builtin_type::BuiltinType, - docs::Documentation, - expr::{BindingAnnotation, Pat, PatId}, - import_map, - per_ns::PerNs, - resolver::{HasResolver, Resolver}, - src::HasSource as _, - type_ref::{Mutability, TypeRef}, - AdtId, AssocContainerId, ConstId, DefWithBodyId, EnumId, FunctionId, GenericDefId, HasModule, - ImplId, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StaticId, StructId, - TraitId, TypeAliasId, TypeParamId, UnionId, -}; -use hir_expand::{ - diagnostics::DiagnosticSink, - name::{name, AsName}, - MacroDefId, MacroDefKind, -}; -use hir_ty::{ - autoderef, - display::{HirDisplayError, HirFormatter}, - method_resolution, ApplicationTy, CallableDefId, Canonical, FnSig, GenericPredicate, - InEnvironment, Substs, TraitEnvironment, Ty, TyDefId, TypeCtor, -}; -use ra_db::{CrateId, Edition, FileId}; -use ra_prof::profile; -use ra_syntax::{ - ast::{self, AttrsOwner, NameOwner}, - AstNode, -}; -use rustc_hash::FxHashSet; -use stdx::impl_from; - -use crate::{ - db::{DefDatabase, HirDatabase}, - has_source::HasSource, - HirDisplay, InFile, Name, -}; - -/// hir::Crate describes a single crate. It's the main interface with which -/// a crate's dependencies interact. Mostly, it should be just a proxy for the -/// root module. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Crate { - pub(crate) id: CrateId, -} - -#[derive(Debug)] -pub struct CrateDependency { - pub krate: Crate, - pub name: Name, -} - -impl Crate { - pub fn dependencies(self, db: &dyn HirDatabase) -> Vec { - db.crate_graph()[self.id] - .dependencies - .iter() - .map(|dep| { - let krate = Crate { id: dep.crate_id }; - let name = dep.as_name(); - CrateDependency { krate, name } - }) - .collect() - } - - // FIXME: add `transitive_reverse_dependencies`. - pub fn reverse_dependencies(self, db: &dyn HirDatabase) -> Vec { - let crate_graph = db.crate_graph(); - crate_graph - .iter() - .filter(|&krate| { - crate_graph[krate].dependencies.iter().any(|it| it.crate_id == self.id) - }) - .map(|id| Crate { id }) - .collect() - } - - pub fn root_module(self, db: &dyn HirDatabase) -> Option { - let module_id = db.crate_def_map(self.id).root; - Some(Module::new(self, module_id)) - } - - pub fn root_file(self, db: &dyn HirDatabase) -> FileId { - db.crate_graph()[self.id].root_file_id - } - - pub fn edition(self, db: &dyn HirDatabase) -> Edition { - db.crate_graph()[self.id].edition - } - - pub fn display_name(self, db: &dyn HirDatabase) -> Option { - db.crate_graph()[self.id].display_name.clone() - } - - pub fn query_external_importables( - self, - db: &dyn DefDatabase, - query: &str, - ) -> impl Iterator> { - import_map::search_dependencies( - db, - self.into(), - import_map::Query::new(query).anchor_end().case_sensitive().limit(40), - ) - .into_iter() - .map(|item| match item { - ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id.into()), - ItemInNs::Macros(mac_id) => Either::Right(mac_id.into()), - }) - } - - pub fn all(db: &dyn HirDatabase) -> Vec { - db.crate_graph().iter().map(|id| Crate { id }).collect() - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Module { - pub(crate) id: ModuleId, -} - -/// The defs which can be visible in the module. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum ModuleDef { - Module(Module), - Function(Function), - Adt(Adt), - // Can't be directly declared, but can be imported. - EnumVariant(EnumVariant), - Const(Const), - Static(Static), - Trait(Trait), - TypeAlias(TypeAlias), - BuiltinType(BuiltinType), -} -impl_from!( - Module, - Function, - Adt(Struct, Enum, Union), - EnumVariant, - Const, - Static, - Trait, - TypeAlias, - BuiltinType - for ModuleDef -); - -impl ModuleDef { - pub fn module(self, db: &dyn HirDatabase) -> Option { - match self { - ModuleDef::Module(it) => it.parent(db), - ModuleDef::Function(it) => Some(it.module(db)), - ModuleDef::Adt(it) => Some(it.module(db)), - ModuleDef::EnumVariant(it) => Some(it.module(db)), - ModuleDef::Const(it) => Some(it.module(db)), - ModuleDef::Static(it) => Some(it.module(db)), - ModuleDef::Trait(it) => Some(it.module(db)), - ModuleDef::TypeAlias(it) => Some(it.module(db)), - ModuleDef::BuiltinType(_) => None, - } - } - - pub fn definition_visibility(&self, db: &dyn HirDatabase) -> Option { - let module = match self { - ModuleDef::Module(it) => it.parent(db)?, - ModuleDef::Function(it) => return Some(it.visibility(db)), - ModuleDef::Adt(it) => it.module(db), - ModuleDef::EnumVariant(it) => { - let parent = it.parent_enum(db); - let module = it.module(db); - return module.visibility_of(db, &ModuleDef::Adt(Adt::Enum(parent))); - } - ModuleDef::Const(it) => return Some(it.visibility(db)), - ModuleDef::Static(it) => it.module(db), - ModuleDef::Trait(it) => it.module(db), - ModuleDef::TypeAlias(it) => return Some(it.visibility(db)), - ModuleDef::BuiltinType(_) => return None, - }; - - module.visibility_of(db, self) - } - - pub fn name(self, db: &dyn HirDatabase) -> Option { - match self { - ModuleDef::Adt(it) => Some(it.name(db)), - ModuleDef::Trait(it) => Some(it.name(db)), - ModuleDef::Function(it) => Some(it.name(db)), - ModuleDef::EnumVariant(it) => Some(it.name(db)), - ModuleDef::TypeAlias(it) => Some(it.name(db)), - - ModuleDef::Module(it) => it.name(db), - ModuleDef::Const(it) => it.name(db), - ModuleDef::Static(it) => it.name(db), - - ModuleDef::BuiltinType(it) => Some(it.as_name()), - } - } -} - -pub use hir_def::{ - attr::Attrs, item_scope::ItemInNs, item_tree::ItemTreeNode, visibility::Visibility, - AssocItemId, AssocItemLoc, -}; - -impl Module { - pub(crate) fn new(krate: Crate, crate_module_id: LocalModuleId) -> Module { - Module { id: ModuleId { krate: krate.id, local_id: crate_module_id } } - } - - /// Name of this module. - pub fn name(self, db: &dyn HirDatabase) -> Option { - let def_map = db.crate_def_map(self.id.krate); - let parent = def_map[self.id.local_id].parent?; - def_map[parent].children.iter().find_map(|(name, module_id)| { - if *module_id == self.id.local_id { - Some(name.clone()) - } else { - None - } - }) - } - - /// Returns the crate this module is part of. - pub fn krate(self) -> Crate { - Crate { id: self.id.krate } - } - - /// Topmost parent of this module. Every module has a `crate_root`, but some - /// might be missing `krate`. This can happen if a module's file is not included - /// in the module tree of any target in `Cargo.toml`. - pub fn crate_root(self, db: &dyn HirDatabase) -> Module { - let def_map = db.crate_def_map(self.id.krate); - self.with_module_id(def_map.root) - } - - /// Iterates over all child modules. - pub fn children(self, db: &dyn HirDatabase) -> impl Iterator { - let def_map = db.crate_def_map(self.id.krate); - let children = def_map[self.id.local_id] - .children - .iter() - .map(|(_, module_id)| self.with_module_id(*module_id)) - .collect::>(); - children.into_iter() - } - - /// Finds a parent module. - pub fn parent(self, db: &dyn HirDatabase) -> Option { - let def_map = db.crate_def_map(self.id.krate); - let parent_id = def_map[self.id.local_id].parent?; - Some(self.with_module_id(parent_id)) - } - - pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec { - let mut res = vec![self]; - let mut curr = self; - while let Some(next) = curr.parent(db) { - res.push(next); - curr = next - } - res - } - - /// Returns a `ModuleScope`: a set of items, visible in this module. - pub fn scope( - self, - db: &dyn HirDatabase, - visible_from: Option, - ) -> Vec<(Name, ScopeDef)> { - db.crate_def_map(self.id.krate)[self.id.local_id] - .scope - .entries() - .filter_map(|(name, def)| { - if let Some(m) = visible_from { - let filtered = - def.filter_visibility(|vis| vis.is_visible_from(db.upcast(), m.id)); - if filtered.is_none() && !def.is_none() { - None - } else { - Some((name, filtered)) - } - } else { - Some((name, def)) - } - }) - .flat_map(|(name, def)| { - ScopeDef::all_items(def).into_iter().map(move |item| (name.clone(), item)) - }) - .collect() - } - - pub fn visibility_of(self, db: &dyn HirDatabase, def: &ModuleDef) -> Option { - db.crate_def_map(self.id.krate)[self.id.local_id].scope.visibility_of(def.clone().into()) - } - - pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { - let _p = profile("Module::diagnostics"); - let crate_def_map = db.crate_def_map(self.id.krate); - crate_def_map.add_diagnostics(db.upcast(), self.id.local_id, sink); - for decl in self.declarations(db) { - match decl { - crate::ModuleDef::Function(f) => f.diagnostics(db, sink), - crate::ModuleDef::Module(m) => { - // Only add diagnostics from inline modules - if crate_def_map[m.id.local_id].origin.is_inline() { - m.diagnostics(db, sink) - } - } - _ => (), - } - } - - for impl_def in self.impl_defs(db) { - for item in impl_def.items(db) { - if let AssocItem::Function(f) = item { - f.diagnostics(db, sink); - } - } - } - } - - pub fn declarations(self, db: &dyn HirDatabase) -> Vec { - let def_map = db.crate_def_map(self.id.krate); - def_map[self.id.local_id].scope.declarations().map(ModuleDef::from).collect() - } - - pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec { - let def_map = db.crate_def_map(self.id.krate); - def_map[self.id.local_id].scope.impls().map(ImplDef::from).collect() - } - - pub(crate) fn with_module_id(self, module_id: LocalModuleId) -> Module { - Module::new(self.krate(), module_id) - } - - /// Finds a path that can be used to refer to the given item from within - /// this module, if possible. - pub fn find_use_path( - self, - db: &dyn DefDatabase, - item: impl Into, - ) -> Option { - hir_def::find_path::find_path(db, item.into(), self.into()) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Field { - pub(crate) parent: VariantDef, - pub(crate) id: LocalFieldId, -} - -#[derive(Debug, PartialEq, Eq)] -pub enum FieldSource { - Named(ast::RecordField), - Pos(ast::TupleField), -} - -impl Field { - pub fn name(&self, db: &dyn HirDatabase) -> Name { - self.parent.variant_data(db).fields()[self.id].name.clone() - } - - /// Returns the type as in the signature of the struct (i.e., with - /// placeholder types for type parameters). This is good for showing - /// signature help, but not so good to actually get the type of the field - /// when you actually have a variable of the struct. - pub fn signature_ty(&self, db: &dyn HirDatabase) -> Type { - let var_id = self.parent.into(); - let generic_def_id: GenericDefId = match self.parent { - VariantDef::Struct(it) => it.id.into(), - VariantDef::Union(it) => it.id.into(), - VariantDef::EnumVariant(it) => it.parent.id.into(), - }; - let substs = Substs::type_params(db, generic_def_id); - let ty = db.field_types(var_id)[self.id].clone().subst(&substs); - Type::new(db, self.parent.module(db).id.krate, var_id, ty) - } - - pub fn parent_def(&self, _db: &dyn HirDatabase) -> VariantDef { - self.parent - } -} - -impl HasVisibility for Field { - fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - let variant_data = self.parent.variant_data(db); - let visibility = &variant_data.fields()[self.id].visibility; - let parent_id: hir_def::VariantId = self.parent.into(); - visibility.resolve(db.upcast(), &parent_id.resolver(db.upcast())) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Struct { - pub(crate) id: StructId, -} - -impl Struct { - pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) } - } - - pub fn krate(self, db: &dyn HirDatabase) -> Option { - Some(self.module(db).krate()) - } - - pub fn name(self, db: &dyn HirDatabase) -> Name { - db.struct_data(self.id).name.clone() - } - - pub fn fields(self, db: &dyn HirDatabase) -> Vec { - db.struct_data(self.id) - .variant_data - .fields() - .iter() - .map(|(id, _)| Field { parent: self.into(), id }) - .collect() - } - - pub fn ty(self, db: &dyn HirDatabase) -> Type { - Type::from_def(db, self.id.lookup(db.upcast()).container.module(db.upcast()).krate, self.id) - } - - fn variant_data(self, db: &dyn HirDatabase) -> Arc { - db.struct_data(self.id).variant_data.clone() - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Union { - pub(crate) id: UnionId, -} - -impl Union { - pub fn name(self, db: &dyn HirDatabase) -> Name { - db.union_data(self.id).name.clone() - } - - pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) } - } - - pub fn ty(self, db: &dyn HirDatabase) -> Type { - Type::from_def(db, self.id.lookup(db.upcast()).container.module(db.upcast()).krate, self.id) - } - - pub fn fields(self, db: &dyn HirDatabase) -> Vec { - db.union_data(self.id) - .variant_data - .fields() - .iter() - .map(|(id, _)| Field { parent: self.into(), id }) - .collect() - } - - fn variant_data(self, db: &dyn HirDatabase) -> Arc { - db.union_data(self.id).variant_data.clone() - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Enum { - pub(crate) id: EnumId, -} - -impl Enum { - pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) } - } - - pub fn krate(self, db: &dyn HirDatabase) -> Option { - Some(self.module(db).krate()) - } - - pub fn name(self, db: &dyn HirDatabase) -> Name { - db.enum_data(self.id).name.clone() - } - - pub fn variants(self, db: &dyn HirDatabase) -> Vec { - db.enum_data(self.id) - .variants - .iter() - .map(|(id, _)| EnumVariant { parent: self, id }) - .collect() - } - - pub fn ty(self, db: &dyn HirDatabase) -> Type { - Type::from_def(db, self.id.lookup(db.upcast()).container.module(db.upcast()).krate, self.id) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct EnumVariant { - pub(crate) parent: Enum, - pub(crate) id: LocalEnumVariantId, -} - -impl EnumVariant { - pub fn module(self, db: &dyn HirDatabase) -> Module { - self.parent.module(db) - } - pub fn parent_enum(self, _db: &dyn HirDatabase) -> Enum { - self.parent - } - - pub fn name(self, db: &dyn HirDatabase) -> Name { - db.enum_data(self.parent.id).variants[self.id].name.clone() - } - - pub fn fields(self, db: &dyn HirDatabase) -> Vec { - self.variant_data(db) - .fields() - .iter() - .map(|(id, _)| Field { parent: self.into(), id }) - .collect() - } - - pub fn kind(self, db: &dyn HirDatabase) -> StructKind { - self.variant_data(db).kind() - } - - pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc { - db.enum_data(self.parent.id).variants[self.id].variant_data.clone() - } -} - -/// A Data Type -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub enum Adt { - Struct(Struct), - Union(Union), - Enum(Enum), -} -impl_from!(Struct, Union, Enum for Adt); - -impl Adt { - pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool { - let subst = db.generic_defaults(self.into()); - subst.iter().any(|ty| &ty.value == &Ty::Unknown) - } - - /// Turns this ADT into a type. Any type parameters of the ADT will be - /// turned into unknown types, which is good for e.g. finding the most - /// general set of completions, but will not look very nice when printed. - pub fn ty(self, db: &dyn HirDatabase) -> Type { - let id = AdtId::from(self); - Type::from_def(db, id.module(db.upcast()).krate, id) - } - - pub fn module(self, db: &dyn HirDatabase) -> Module { - match self { - Adt::Struct(s) => s.module(db), - Adt::Union(s) => s.module(db), - Adt::Enum(e) => e.module(db), - } - } - - pub fn krate(self, db: &dyn HirDatabase) -> Option { - Some(self.module(db).krate()) - } - - pub fn name(self, db: &dyn HirDatabase) -> Name { - match self { - Adt::Struct(s) => s.name(db), - Adt::Union(u) => u.name(db), - Adt::Enum(e) => e.name(db), - } - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub enum VariantDef { - Struct(Struct), - Union(Union), - EnumVariant(EnumVariant), -} -impl_from!(Struct, Union, EnumVariant for VariantDef); - -impl VariantDef { - pub fn fields(self, db: &dyn HirDatabase) -> Vec { - match self { - VariantDef::Struct(it) => it.fields(db), - VariantDef::Union(it) => it.fields(db), - VariantDef::EnumVariant(it) => it.fields(db), - } - } - - pub fn module(self, db: &dyn HirDatabase) -> Module { - match self { - VariantDef::Struct(it) => it.module(db), - VariantDef::Union(it) => it.module(db), - VariantDef::EnumVariant(it) => it.module(db), - } - } - - pub fn name(&self, db: &dyn HirDatabase) -> Name { - match self { - VariantDef::Struct(s) => s.name(db), - VariantDef::Union(u) => u.name(db), - VariantDef::EnumVariant(e) => e.name(db), - } - } - - pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc { - match self { - VariantDef::Struct(it) => it.variant_data(db), - VariantDef::Union(it) => it.variant_data(db), - VariantDef::EnumVariant(it) => it.variant_data(db), - } - } -} - -/// The defs which have a body. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum DefWithBody { - Function(Function), - Static(Static), - Const(Const), -} -impl_from!(Function, Const, Static for DefWithBody); - -impl DefWithBody { - pub fn module(self, db: &dyn HirDatabase) -> Module { - match self { - DefWithBody::Const(c) => c.module(db), - DefWithBody::Function(f) => f.module(db), - DefWithBody::Static(s) => s.module(db), - } - } - - pub fn name(self, db: &dyn HirDatabase) -> Option { - match self { - DefWithBody::Function(f) => Some(f.name(db)), - DefWithBody::Static(s) => s.name(db), - DefWithBody::Const(c) => c.name(db), - } - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Function { - pub(crate) id: FunctionId, -} - -impl Function { - pub fn module(self, db: &dyn HirDatabase) -> Module { - self.id.lookup(db.upcast()).module(db.upcast()).into() - } - - pub fn krate(self, db: &dyn HirDatabase) -> Option { - Some(self.module(db).krate()) - } - - pub fn name(self, db: &dyn HirDatabase) -> Name { - db.function_data(self.id).name.clone() - } - - pub fn has_self_param(self, db: &dyn HirDatabase) -> bool { - db.function_data(self.id).has_self_param - } - - pub fn params(self, db: &dyn HirDatabase) -> Vec { - db.function_data(self.id).params.clone() - } - - pub fn is_unsafe(self, db: &dyn HirDatabase) -> bool { - db.function_data(self.id).is_unsafe - } - - pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { - hir_ty::diagnostics::validate_body(db, self.id.into(), sink) - } -} - -impl HasVisibility for Function { - fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - let function_data = db.function_data(self.id); - let visibility = &function_data.visibility; - visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Const { - pub(crate) id: ConstId, -} - -impl Const { - pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } - } - - pub fn krate(self, db: &dyn HirDatabase) -> Option { - Some(self.module(db).krate()) - } - - pub fn name(self, db: &dyn HirDatabase) -> Option { - db.const_data(self.id).name.clone() - } -} - -impl HasVisibility for Const { - fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - let function_data = db.const_data(self.id); - let visibility = &function_data.visibility; - visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Static { - pub(crate) id: StaticId, -} - -impl Static { - pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } - } - - pub fn krate(self, db: &dyn HirDatabase) -> Option { - Some(self.module(db).krate()) - } - - pub fn name(self, db: &dyn HirDatabase) -> Option { - db.static_data(self.id).name.clone() - } - - pub fn is_mut(self, db: &dyn HirDatabase) -> bool { - db.static_data(self.id).mutable - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Trait { - pub(crate) id: TraitId, -} - -impl Trait { - pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) } - } - - pub fn name(self, db: &dyn HirDatabase) -> Name { - db.trait_data(self.id).name.clone() - } - - pub fn items(self, db: &dyn HirDatabase) -> Vec { - db.trait_data(self.id).items.iter().map(|(_name, it)| (*it).into()).collect() - } - - pub fn is_auto(self, db: &dyn HirDatabase) -> bool { - db.trait_data(self.id).auto - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct TypeAlias { - pub(crate) id: TypeAliasId, -} - -impl TypeAlias { - pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool { - let subst = db.generic_defaults(self.id.into()); - subst.iter().any(|ty| &ty.value == &Ty::Unknown) - } - - pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } - } - - pub fn krate(self, db: &dyn HirDatabase) -> Option { - Some(self.module(db).krate()) - } - - pub fn type_ref(self, db: &dyn HirDatabase) -> Option { - db.type_alias_data(self.id).type_ref.clone() - } - - pub fn ty(self, db: &dyn HirDatabase) -> Type { - Type::from_def(db, self.id.lookup(db.upcast()).module(db.upcast()).krate, self.id) - } - - pub fn name(self, db: &dyn HirDatabase) -> Name { - db.type_alias_data(self.id).name.clone() - } -} - -impl HasVisibility for TypeAlias { - fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - let function_data = db.type_alias_data(self.id); - let visibility = &function_data.visibility; - visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct MacroDef { - pub(crate) id: MacroDefId, -} - -impl MacroDef { - /// FIXME: right now, this just returns the root module of the crate that - /// defines this macro. The reasons for this is that macros are expanded - /// early, in `ra_hir_expand`, where modules simply do not exist yet. - pub fn module(self, db: &dyn HirDatabase) -> Option { - let krate = self.id.krate?; - let module_id = db.crate_def_map(krate).root; - Some(Module::new(Crate { id: krate }, module_id)) - } - - /// XXX: this parses the file - pub fn name(self, db: &dyn HirDatabase) -> Option { - self.source(db).value.name().map(|it| it.as_name()) - } - - /// Indicate it is a proc-macro - pub fn is_proc_macro(&self) -> bool { - matches!(self.id.kind, MacroDefKind::CustomDerive(_)) - } - - /// Indicate it is a derive macro - pub fn is_derive_macro(&self) -> bool { - matches!(self.id.kind, MacroDefKind::CustomDerive(_) | MacroDefKind::BuiltInDerive(_)) - } -} - -/// Invariant: `inner.as_assoc_item(db).is_some()` -/// We do not actively enforce this invariant. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub enum AssocItem { - Function(Function), - Const(Const), - TypeAlias(TypeAlias), -} -pub enum AssocItemContainer { - Trait(Trait), - ImplDef(ImplDef), -} -pub trait AsAssocItem { - fn as_assoc_item(self, db: &dyn HirDatabase) -> Option; -} - -impl AsAssocItem for Function { - fn as_assoc_item(self, db: &dyn HirDatabase) -> Option { - as_assoc_item(db, AssocItem::Function, self.id) - } -} -impl AsAssocItem for Const { - fn as_assoc_item(self, db: &dyn HirDatabase) -> Option { - as_assoc_item(db, AssocItem::Const, self.id) - } -} -impl AsAssocItem for TypeAlias { - fn as_assoc_item(self, db: &dyn HirDatabase) -> Option { - as_assoc_item(db, AssocItem::TypeAlias, self.id) - } -} -fn as_assoc_item(db: &dyn HirDatabase, ctor: CTOR, id: ID) -> Option -where - ID: Lookup>, - DEF: From, - CTOR: FnOnce(DEF) -> AssocItem, - AST: ItemTreeNode, -{ - match id.lookup(db.upcast()).container { - AssocContainerId::TraitId(_) | AssocContainerId::ImplId(_) => Some(ctor(DEF::from(id))), - AssocContainerId::ContainerId(_) => None, - } -} - -impl AssocItem { - pub fn module(self, db: &dyn HirDatabase) -> Module { - match self { - AssocItem::Function(f) => f.module(db), - AssocItem::Const(c) => c.module(db), - AssocItem::TypeAlias(t) => t.module(db), - } - } - pub fn container(self, db: &dyn HirDatabase) -> AssocItemContainer { - let container = match self { - AssocItem::Function(it) => it.id.lookup(db.upcast()).container, - AssocItem::Const(it) => it.id.lookup(db.upcast()).container, - AssocItem::TypeAlias(it) => it.id.lookup(db.upcast()).container, - }; - match container { - AssocContainerId::TraitId(id) => AssocItemContainer::Trait(id.into()), - AssocContainerId::ImplId(id) => AssocItemContainer::ImplDef(id.into()), - AssocContainerId::ContainerId(_) => panic!("invalid AssocItem"), - } - } -} - -impl HasVisibility for AssocItem { - fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - match self { - AssocItem::Function(f) => f.visibility(db), - AssocItem::Const(c) => c.visibility(db), - AssocItem::TypeAlias(t) => t.visibility(db), - } - } -} - -#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] -pub enum GenericDef { - Function(Function), - Adt(Adt), - Trait(Trait), - TypeAlias(TypeAlias), - ImplDef(ImplDef), - // enum variants cannot have generics themselves, but their parent enums - // can, and this makes some code easier to write - EnumVariant(EnumVariant), - // consts can have type parameters from their parents (i.e. associated consts of traits) - Const(Const), -} -impl_from!( - Function, - Adt(Struct, Enum, Union), - Trait, - TypeAlias, - ImplDef, - EnumVariant, - Const - for GenericDef -); - -impl GenericDef { - pub fn params(self, db: &dyn HirDatabase) -> Vec { - let generics: Arc = db.generic_params(self.into()); - generics - .types - .iter() - .map(|(local_id, _)| TypeParam { id: TypeParamId { parent: self.into(), local_id } }) - .collect() - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct Local { - pub(crate) parent: DefWithBodyId, - pub(crate) pat_id: PatId, -} - -impl Local { - pub fn is_param(self, db: &dyn HirDatabase) -> bool { - let src = self.source(db); - match src.value { - Either::Left(bind_pat) => { - bind_pat.syntax().ancestors().any(|it| ast::Param::can_cast(it.kind())) - } - Either::Right(_self_param) => true, - } - } - - // FIXME: why is this an option? It shouldn't be? - pub fn name(self, db: &dyn HirDatabase) -> Option { - let body = db.body(self.parent.into()); - match &body[self.pat_id] { - Pat::Bind { name, .. } => Some(name.clone()), - _ => None, - } - } - - pub fn is_self(self, db: &dyn HirDatabase) -> bool { - self.name(db) == Some(name![self]) - } - - pub fn is_mut(self, db: &dyn HirDatabase) -> bool { - let body = db.body(self.parent.into()); - match &body[self.pat_id] { - Pat::Bind { mode, .. } => match mode { - BindingAnnotation::Mutable | BindingAnnotation::RefMut => true, - _ => false, - }, - _ => false, - } - } - - pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody { - self.parent.into() - } - - pub fn module(self, db: &dyn HirDatabase) -> Module { - self.parent(db).module(db) - } - - pub fn ty(self, db: &dyn HirDatabase) -> Type { - let def = DefWithBodyId::from(self.parent); - let infer = db.infer(def); - let ty = infer[self.pat_id].clone(); - let krate = def.module(db.upcast()).krate; - Type::new(db, krate, def, ty) - } - - pub fn source(self, db: &dyn HirDatabase) -> InFile> { - let (_body, source_map) = db.body_with_source_map(self.parent.into()); - let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm... - let root = src.file_syntax(db.upcast()); - src.map(|ast| { - ast.map_left(|it| it.cast().unwrap().to_node(&root)).map_right(|it| it.to_node(&root)) - }) - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct TypeParam { - pub(crate) id: TypeParamId, -} - -impl TypeParam { - pub fn name(self, db: &dyn HirDatabase) -> Name { - let params = db.generic_params(self.id.parent); - params.types[self.id.local_id].name.clone().unwrap_or_else(Name::missing) - } - - pub fn module(self, db: &dyn HirDatabase) -> Module { - self.id.parent.module(db.upcast()).into() - } - - pub fn ty(self, db: &dyn HirDatabase) -> Type { - let resolver = self.id.parent.resolver(db.upcast()); - let environment = TraitEnvironment::lower(db, &resolver); - let ty = Ty::Placeholder(self.id); - Type { - krate: self.id.parent.module(db.upcast()).krate, - ty: InEnvironment { value: ty, environment }, - } - } - - pub fn default(self, db: &dyn HirDatabase) -> Option { - let params = db.generic_defaults(self.id.parent); - let local_idx = hir_ty::param_idx(db, self.id)?; - let resolver = self.id.parent.resolver(db.upcast()); - let environment = TraitEnvironment::lower(db, &resolver); - let ty = params.get(local_idx)?.clone(); - let subst = Substs::type_params(db, self.id.parent); - let ty = ty.subst(&subst.prefix(local_idx)); - Some(Type { - krate: self.id.parent.module(db.upcast()).krate, - ty: InEnvironment { value: ty, environment }, - }) - } -} - -// FIXME: rename from `ImplDef` to `Impl` -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct ImplDef { - pub(crate) id: ImplId, -} - -impl ImplDef { - pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec { - let inherent = db.inherent_impls_in_crate(krate.id); - let trait_ = db.trait_impls_in_crate(krate.id); - - inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect() - } - pub fn for_trait(db: &dyn HirDatabase, krate: Crate, trait_: Trait) -> Vec { - let impls = db.trait_impls_in_crate(krate.id); - impls.for_trait(trait_.id).map(Self::from).collect() - } - - pub fn target_trait(self, db: &dyn HirDatabase) -> Option { - db.impl_data(self.id).target_trait.clone() - } - - pub fn target_type(self, db: &dyn HirDatabase) -> TypeRef { - db.impl_data(self.id).target_type.clone() - } - - pub fn target_ty(self, db: &dyn HirDatabase) -> Type { - let impl_data = db.impl_data(self.id); - let resolver = self.id.resolver(db.upcast()); - let ctx = hir_ty::TyLoweringContext::new(db, &resolver); - let environment = TraitEnvironment::lower(db, &resolver); - let ty = Ty::from_hir(&ctx, &impl_data.target_type); - Type { - krate: self.id.lookup(db.upcast()).container.module(db.upcast()).krate, - ty: InEnvironment { value: ty, environment }, - } - } - - pub fn items(self, db: &dyn HirDatabase) -> Vec { - db.impl_data(self.id).items.iter().map(|it| (*it).into()).collect() - } - - pub fn is_negative(self, db: &dyn HirDatabase) -> bool { - db.impl_data(self.id).is_negative - } - - pub fn module(self, db: &dyn HirDatabase) -> Module { - self.id.lookup(db.upcast()).container.module(db.upcast()).into() - } - - pub fn krate(self, db: &dyn HirDatabase) -> Crate { - Crate { id: self.module(db).id.krate } - } - - pub fn is_builtin_derive(self, db: &dyn HirDatabase) -> Option> { - let src = self.source(db); - let item = src.file_id.is_builtin_derive(db.upcast())?; - let hygenic = hir_expand::hygiene::Hygiene::new(db.upcast(), item.file_id); - - let attr = item - .value - .attrs() - .filter_map(|it| { - let path = hir_def::path::ModPath::from_src(it.path()?, &hygenic)?; - if path.as_ident()?.to_string() == "derive" { - Some(it) - } else { - None - } - }) - .last()?; - - Some(item.with_value(attr)) - } -} - -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct Type { - krate: CrateId, - ty: InEnvironment, -} - -impl Type { - pub(crate) fn new_with_resolver( - db: &dyn HirDatabase, - resolver: &Resolver, - ty: Ty, - ) -> Option { - let krate = resolver.krate()?; - Some(Type::new_with_resolver_inner(db, krate, resolver, ty)) - } - pub(crate) fn new_with_resolver_inner( - db: &dyn HirDatabase, - krate: CrateId, - resolver: &Resolver, - ty: Ty, - ) -> Type { - let environment = TraitEnvironment::lower(db, &resolver); - Type { krate, ty: InEnvironment { value: ty, environment } } - } - - fn new(db: &dyn HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type { - let resolver = lexical_env.resolver(db.upcast()); - let environment = TraitEnvironment::lower(db, &resolver); - Type { krate, ty: InEnvironment { value: ty, environment } } - } - - fn from_def( - db: &dyn HirDatabase, - krate: CrateId, - def: impl HasResolver + Into + Into, - ) -> Type { - let substs = Substs::build_for_def(db, def).fill_with_unknown().build(); - let ty = db.ty(def.into()).subst(&substs); - Type::new(db, krate, def, ty) - } - - pub fn is_unit(&self) -> bool { - matches!( - self.ty.value, - Ty::Apply(ApplicationTy { ctor: TypeCtor::Tuple { cardinality: 0 }, .. }) - ) - } - pub fn is_bool(&self) -> bool { - matches!(self.ty.value, Ty::Apply(ApplicationTy { ctor: TypeCtor::Bool, .. })) - } - - pub fn is_mutable_reference(&self) -> bool { - matches!( - self.ty.value, - Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(Mutability::Mut), .. }) - ) - } - - pub fn is_unknown(&self) -> bool { - matches!(self.ty.value, Ty::Unknown) - } - - /// Checks that particular type `ty` implements `std::future::Future`. - /// This function is used in `.await` syntax completion. - pub fn impls_future(&self, db: &dyn HirDatabase) -> bool { - let krate = self.krate; - - let std_future_trait = - db.lang_item(krate, "future_trait".into()).and_then(|it| it.as_trait()); - let std_future_trait = match std_future_trait { - Some(it) => it, - None => return false, - }; - - let canonical_ty = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; - method_resolution::implements_trait( - &canonical_ty, - db, - self.ty.environment.clone(), - krate, - std_future_trait, - ) - } - - pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool { - let trait_ref = hir_ty::TraitRef { - trait_: trait_.id, - substs: Substs::build_for_def(db, trait_.id) - .push(self.ty.value.clone()) - .fill(args.iter().map(|t| t.ty.value.clone())) - .build(), - }; - - let goal = Canonical { - value: hir_ty::InEnvironment::new( - self.ty.environment.clone(), - hir_ty::Obligation::Trait(trait_ref), - ), - kinds: Arc::new([]), - }; - - db.trait_solve(self.krate, goal).is_some() - } - - pub fn as_callable(&self, db: &dyn HirDatabase) -> Option { - let def = match self.ty.value { - Ty::Apply(ApplicationTy { ctor: TypeCtor::FnDef(def), parameters: _ }) => Some(def), - _ => None, - }; - - let sig = self.ty.value.callable_sig(db)?; - Some(Callable { ty: self.clone(), sig, def, is_bound_method: false }) - } - - pub fn is_closure(&self) -> bool { - matches!(&self.ty.value, Ty::Apply(ApplicationTy { ctor: TypeCtor::Closure { .. }, .. })) - } - - pub fn is_fn(&self) -> bool { - matches!(&self.ty.value, - Ty::Apply(ApplicationTy { ctor: TypeCtor::FnDef(..), .. }) | - Ty::Apply(ApplicationTy { ctor: TypeCtor::FnPtr { .. }, .. }) - ) - } - - pub fn is_raw_ptr(&self) -> bool { - matches!(&self.ty.value, Ty::Apply(ApplicationTy { ctor: TypeCtor::RawPtr(..), .. })) - } - - pub fn contains_unknown(&self) -> bool { - return go(&self.ty.value); - - fn go(ty: &Ty) -> bool { - match ty { - Ty::Unknown => true, - Ty::Apply(a_ty) => a_ty.parameters.iter().any(go), - _ => false, - } - } - } - - pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> { - if let Ty::Apply(a_ty) = &self.ty.value { - let variant_id = match a_ty.ctor { - TypeCtor::Adt(AdtId::StructId(s)) => s.into(), - TypeCtor::Adt(AdtId::UnionId(u)) => u.into(), - _ => return Vec::new(), - }; - - return db - .field_types(variant_id) - .iter() - .map(|(local_id, ty)| { - let def = Field { parent: variant_id.into(), id: local_id }; - let ty = ty.clone().subst(&a_ty.parameters); - (def, self.derived(ty)) - }) - .collect(); - }; - Vec::new() - } - - pub fn tuple_fields(&self, _db: &dyn HirDatabase) -> Vec { - let mut res = Vec::new(); - if let Ty::Apply(a_ty) = &self.ty.value { - if let TypeCtor::Tuple { .. } = a_ty.ctor { - for ty in a_ty.parameters.iter() { - let ty = ty.clone(); - res.push(self.derived(ty)); - } - } - }; - res - } - - pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator + 'a { - // There should be no inference vars in types passed here - // FIXME check that? - let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; - let environment = self.ty.environment.clone(); - let ty = InEnvironment { value: canonical, environment }; - autoderef(db, Some(self.krate), ty) - .map(|canonical| canonical.value) - .map(move |ty| self.derived(ty)) - } - - // This would be nicer if it just returned an iterator, but that runs into - // lifetime problems, because we need to borrow temp `CrateImplDefs`. - pub fn iterate_assoc_items( - self, - db: &dyn HirDatabase, - krate: Crate, - mut callback: impl FnMut(AssocItem) -> Option, - ) -> Option { - for krate in self.ty.value.def_crates(db, krate.id)? { - let impls = db.inherent_impls_in_crate(krate); - - for impl_def in impls.for_self_ty(&self.ty.value) { - for &item in db.impl_data(*impl_def).items.iter() { - if let Some(result) = callback(item.into()) { - return Some(result); - } - } - } - } - None - } - - pub fn iterate_method_candidates( - &self, - db: &dyn HirDatabase, - krate: Crate, - traits_in_scope: &FxHashSet, - name: Option<&Name>, - mut callback: impl FnMut(&Ty, Function) -> Option, - ) -> Option { - // There should be no inference vars in types passed here - // FIXME check that? - // FIXME replace Unknown by bound vars here - let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; - - let env = self.ty.environment.clone(); - let krate = krate.id; - - method_resolution::iterate_method_candidates( - &canonical, - db, - env, - krate, - traits_in_scope, - name, - method_resolution::LookupMode::MethodCall, - |ty, it| match it { - AssocItemId::FunctionId(f) => callback(ty, f.into()), - _ => None, - }, - ) - } - - pub fn iterate_path_candidates( - &self, - db: &dyn HirDatabase, - krate: Crate, - traits_in_scope: &FxHashSet, - name: Option<&Name>, - mut callback: impl FnMut(&Ty, AssocItem) -> Option, - ) -> Option { - // There should be no inference vars in types passed here - // FIXME check that? - // FIXME replace Unknown by bound vars here - let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; - - let env = self.ty.environment.clone(); - let krate = krate.id; - - method_resolution::iterate_method_candidates( - &canonical, - db, - env, - krate, - traits_in_scope, - name, - method_resolution::LookupMode::Path, - |ty, it| callback(ty, it.into()), - ) - } - - pub fn as_adt(&self) -> Option { - let (adt, _subst) = self.ty.value.as_adt()?; - Some(adt.into()) - } - - pub fn as_dyn_trait(&self) -> Option { - self.ty.value.dyn_trait().map(Into::into) - } - - pub fn as_impl_traits(&self, db: &dyn HirDatabase) -> Option> { - self.ty.value.impl_trait_bounds(db).map(|it| { - it.into_iter() - .filter_map(|pred| match pred { - hir_ty::GenericPredicate::Implemented(trait_ref) => { - Some(Trait::from(trait_ref.trait_)) - } - _ => None, - }) - .collect() - }) - } - - pub fn as_associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option { - self.ty.value.associated_type_parent_trait(db).map(Into::into) - } - - // FIXME: provide required accessors such that it becomes implementable from outside. - pub fn is_equal_for_find_impls(&self, other: &Type) -> bool { - match (&self.ty.value, &other.ty.value) { - (Ty::Apply(a_original_ty), Ty::Apply(ApplicationTy { ctor, parameters })) => match ctor - { - TypeCtor::Ref(..) => match parameters.as_single() { - Ty::Apply(a_ty) => a_original_ty.ctor == a_ty.ctor, - _ => false, - }, - _ => a_original_ty.ctor == *ctor, - }, - _ => false, - } - } - - fn derived(&self, ty: Ty) -> Type { - Type { - krate: self.krate, - ty: InEnvironment { value: ty, environment: self.ty.environment.clone() }, - } - } - - pub fn walk(&self, db: &dyn HirDatabase, mut cb: impl FnMut(Type)) { - // TypeWalk::walk for a Ty at first visits parameters and only after that the Ty itself. - // We need a different order here. - - fn walk_substs( - db: &dyn HirDatabase, - type_: &Type, - substs: &Substs, - cb: &mut impl FnMut(Type), - ) { - for ty in substs.iter() { - walk_type(db, &type_.derived(ty.clone()), cb); - } - } - - fn walk_bounds( - db: &dyn HirDatabase, - type_: &Type, - bounds: &[GenericPredicate], - cb: &mut impl FnMut(Type), - ) { - for pred in bounds { - match pred { - GenericPredicate::Implemented(trait_ref) => { - cb(type_.clone()); - walk_substs(db, type_, &trait_ref.substs, cb); - } - _ => (), - } - } - } - - fn walk_type(db: &dyn HirDatabase, type_: &Type, cb: &mut impl FnMut(Type)) { - let ty = type_.ty.value.strip_references(); - match ty { - Ty::Apply(ApplicationTy { ctor, parameters }) => { - match ctor { - TypeCtor::Adt(_) => { - cb(type_.derived(ty.clone())); - } - TypeCtor::AssociatedType(_) => { - if let Some(_) = ty.associated_type_parent_trait(db) { - cb(type_.derived(ty.clone())); - } - } - _ => (), - } - - // adt params, tuples, etc... - walk_substs(db, type_, parameters, cb); - } - Ty::Opaque(opaque_ty) => { - if let Some(bounds) = ty.impl_trait_bounds(db) { - walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb); - } - - walk_substs(db, type_, &opaque_ty.parameters, cb); - } - Ty::Placeholder(_) => { - if let Some(bounds) = ty.impl_trait_bounds(db) { - walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb); - } - } - Ty::Dyn(bounds) => { - walk_bounds(db, &type_.derived(ty.clone()), bounds.as_ref(), cb); - } - - _ => (), - } - } - - walk_type(db, self, &mut cb); - } -} - -impl HirDisplay for Type { - fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { - self.ty.value.hir_fmt(f) - } -} - -// FIXME: closures -#[derive(Debug)] -pub struct Callable { - ty: Type, - sig: FnSig, - def: Option, - pub(crate) is_bound_method: bool, -} - -pub enum CallableKind { - Function(Function), - TupleStruct(Struct), - TupleEnumVariant(EnumVariant), - Closure, -} - -impl Callable { - pub fn kind(&self) -> CallableKind { - match self.def { - Some(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()), - Some(CallableDefId::StructId(it)) => CallableKind::TupleStruct(it.into()), - Some(CallableDefId::EnumVariantId(it)) => CallableKind::TupleEnumVariant(it.into()), - None => CallableKind::Closure, - } - } - pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option { - let func = match self.def { - Some(CallableDefId::FunctionId(it)) if self.is_bound_method => it, - _ => return None, - }; - let src = func.lookup(db.upcast()).source(db.upcast()); - let param_list = src.value.param_list()?; - param_list.self_param() - } - pub fn n_params(&self) -> usize { - self.sig.params().len() - if self.is_bound_method { 1 } else { 0 } - } - pub fn params( - &self, - db: &dyn HirDatabase, - ) -> Vec<(Option>, Type)> { - let types = self - .sig - .params() - .iter() - .skip(if self.is_bound_method { 1 } else { 0 }) - .map(|ty| self.ty.derived(ty.clone())); - let patterns = match self.def { - Some(CallableDefId::FunctionId(func)) => { - let src = func.lookup(db.upcast()).source(db.upcast()); - src.value.param_list().map(|param_list| { - param_list - .self_param() - .map(|it| Some(Either::Left(it))) - .filter(|_| !self.is_bound_method) - .into_iter() - .chain(param_list.params().map(|it| it.pat().map(Either::Right))) - }) - } - _ => None, - }; - patterns.into_iter().flatten().chain(iter::repeat(None)).zip(types).collect() - } - pub fn return_type(&self) -> Type { - self.ty.derived(self.sig.ret().clone()) - } -} - -/// For IDE only -#[derive(Debug)] -pub enum ScopeDef { - ModuleDef(ModuleDef), - MacroDef(MacroDef), - GenericParam(TypeParam), - ImplSelfType(ImplDef), - AdtSelfType(Adt), - Local(Local), - Unknown, -} - -impl ScopeDef { - pub fn all_items(def: PerNs) -> ArrayVec<[Self; 3]> { - let mut items = ArrayVec::new(); - - match (def.take_types(), def.take_values()) { - (Some(m1), None) => items.push(ScopeDef::ModuleDef(m1.into())), - (None, Some(m2)) => items.push(ScopeDef::ModuleDef(m2.into())), - (Some(m1), Some(m2)) => { - // Some items, like unit structs and enum variants, are - // returned as both a type and a value. Here we want - // to de-duplicate them. - if m1 != m2 { - items.push(ScopeDef::ModuleDef(m1.into())); - items.push(ScopeDef::ModuleDef(m2.into())); - } else { - items.push(ScopeDef::ModuleDef(m1.into())); - } - } - (None, None) => {} - }; - - if let Some(macro_def_id) = def.take_macros() { - items.push(ScopeDef::MacroDef(macro_def_id.into())); - } - - if items.is_empty() { - items.push(ScopeDef::Unknown); - } - - items - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub enum AttrDef { - Module(Module), - Field(Field), - Adt(Adt), - Function(Function), - EnumVariant(EnumVariant), - Static(Static), - Const(Const), - Trait(Trait), - TypeAlias(TypeAlias), - MacroDef(MacroDef), -} - -impl_from!( - Module, - Field, - Adt(Struct, Enum, Union), - EnumVariant, - Static, - Const, - Function, - Trait, - TypeAlias, - MacroDef - for AttrDef -); - -pub trait HasAttrs { - fn attrs(self, db: &dyn HirDatabase) -> Attrs; -} - -impl> HasAttrs for T { - fn attrs(self, db: &dyn HirDatabase) -> Attrs { - let def: AttrDef = self.into(); - db.attrs(def.into()) - } -} - -pub trait Docs { - fn docs(&self, db: &dyn HirDatabase) -> Option; -} -impl + Copy> Docs for T { - fn docs(&self, db: &dyn HirDatabase) -> Option { - let def: AttrDef = (*self).into(); - db.documentation(def.into()) - } -} - -pub trait HasVisibility { - fn visibility(&self, db: &dyn HirDatabase) -> Visibility; - fn is_visible_from(&self, db: &dyn HirDatabase, module: Module) -> bool { - let vis = self.visibility(db); - vis.is_visible_from(db.upcast(), module.id) - } -} diff --git a/crates/ra_hir/src/diagnostics.rs b/crates/ra_hir/src/diagnostics.rs deleted file mode 100644 index 266b513dcf..0000000000 --- a/crates/ra_hir/src/diagnostics.rs +++ /dev/null @@ -1,8 +0,0 @@ -//! FIXME: write short doc here -pub use hir_def::diagnostics::UnresolvedModule; -pub use hir_expand::diagnostics::{ - AstDiagnostic, Diagnostic, DiagnosticSink, DiagnosticSinkBuilder, -}; -pub use hir_ty::diagnostics::{ - MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkInTailExpr, NoSuchField, -}; diff --git a/crates/ra_hir/src/from_id.rs b/crates/ra_hir/src/from_id.rs deleted file mode 100644 index 679ae81215..0000000000 --- a/crates/ra_hir/src/from_id.rs +++ /dev/null @@ -1,247 +0,0 @@ -//! Utility module for converting between hir_def ids and code_model wrappers. -//! -//! It's unclear if we need this long-term, but it's definitelly useful while we -//! are splitting the hir. - -use hir_def::{ - expr::PatId, AdtId, AssocItemId, AttrDefId, DefWithBodyId, EnumVariantId, FieldId, - GenericDefId, ModuleDefId, VariantId, -}; - -use crate::{ - code_model::ItemInNs, Adt, AssocItem, AttrDef, DefWithBody, EnumVariant, Field, GenericDef, - Local, MacroDef, ModuleDef, VariantDef, -}; - -macro_rules! from_id { - ($(($id:path, $ty:path)),*) => {$( - impl From<$id> for $ty { - fn from(id: $id) -> $ty { - $ty { id } - } - } - impl From<$ty> for $id { - fn from(ty: $ty) -> $id { - ty.id - } - } - )*} -} - -from_id![ - (ra_db::CrateId, crate::Crate), - (hir_def::ModuleId, crate::Module), - (hir_def::StructId, crate::Struct), - (hir_def::UnionId, crate::Union), - (hir_def::EnumId, crate::Enum), - (hir_def::TypeAliasId, crate::TypeAlias), - (hir_def::TraitId, crate::Trait), - (hir_def::StaticId, crate::Static), - (hir_def::ConstId, crate::Const), - (hir_def::FunctionId, crate::Function), - (hir_def::ImplId, crate::ImplDef), - (hir_def::TypeParamId, crate::TypeParam), - (hir_expand::MacroDefId, crate::MacroDef) -]; - -impl From for Adt { - fn from(id: AdtId) -> Self { - match id { - AdtId::StructId(it) => Adt::Struct(it.into()), - AdtId::UnionId(it) => Adt::Union(it.into()), - AdtId::EnumId(it) => Adt::Enum(it.into()), - } - } -} - -impl From for AdtId { - fn from(id: Adt) -> Self { - match id { - Adt::Struct(it) => AdtId::StructId(it.id), - Adt::Union(it) => AdtId::UnionId(it.id), - Adt::Enum(it) => AdtId::EnumId(it.id), - } - } -} - -impl From for EnumVariant { - fn from(id: EnumVariantId) -> Self { - EnumVariant { parent: id.parent.into(), id: id.local_id } - } -} - -impl From for EnumVariantId { - fn from(def: EnumVariant) -> Self { - EnumVariantId { parent: def.parent.id, local_id: def.id } - } -} - -impl From for ModuleDef { - fn from(id: ModuleDefId) -> Self { - match id { - ModuleDefId::ModuleId(it) => ModuleDef::Module(it.into()), - ModuleDefId::FunctionId(it) => ModuleDef::Function(it.into()), - ModuleDefId::AdtId(it) => ModuleDef::Adt(it.into()), - ModuleDefId::EnumVariantId(it) => ModuleDef::EnumVariant(it.into()), - ModuleDefId::ConstId(it) => ModuleDef::Const(it.into()), - ModuleDefId::StaticId(it) => ModuleDef::Static(it.into()), - ModuleDefId::TraitId(it) => ModuleDef::Trait(it.into()), - ModuleDefId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()), - ModuleDefId::BuiltinType(it) => ModuleDef::BuiltinType(it), - } - } -} - -impl From for ModuleDefId { - fn from(id: ModuleDef) -> Self { - match id { - ModuleDef::Module(it) => ModuleDefId::ModuleId(it.into()), - ModuleDef::Function(it) => ModuleDefId::FunctionId(it.into()), - ModuleDef::Adt(it) => ModuleDefId::AdtId(it.into()), - ModuleDef::EnumVariant(it) => ModuleDefId::EnumVariantId(it.into()), - ModuleDef::Const(it) => ModuleDefId::ConstId(it.into()), - ModuleDef::Static(it) => ModuleDefId::StaticId(it.into()), - ModuleDef::Trait(it) => ModuleDefId::TraitId(it.into()), - ModuleDef::TypeAlias(it) => ModuleDefId::TypeAliasId(it.into()), - ModuleDef::BuiltinType(it) => ModuleDefId::BuiltinType(it), - } - } -} - -impl From for DefWithBodyId { - fn from(def: DefWithBody) -> Self { - match def { - DefWithBody::Function(it) => DefWithBodyId::FunctionId(it.id), - DefWithBody::Static(it) => DefWithBodyId::StaticId(it.id), - DefWithBody::Const(it) => DefWithBodyId::ConstId(it.id), - } - } -} - -impl From for DefWithBody { - fn from(def: DefWithBodyId) -> Self { - match def { - DefWithBodyId::FunctionId(it) => DefWithBody::Function(it.into()), - DefWithBodyId::StaticId(it) => DefWithBody::Static(it.into()), - DefWithBodyId::ConstId(it) => DefWithBody::Const(it.into()), - } - } -} - -impl From for AssocItem { - fn from(def: AssocItemId) -> Self { - match def { - AssocItemId::FunctionId(it) => AssocItem::Function(it.into()), - AssocItemId::TypeAliasId(it) => AssocItem::TypeAlias(it.into()), - AssocItemId::ConstId(it) => AssocItem::Const(it.into()), - } - } -} - -impl From for GenericDefId { - fn from(def: GenericDef) -> Self { - match def { - GenericDef::Function(it) => GenericDefId::FunctionId(it.id), - GenericDef::Adt(it) => GenericDefId::AdtId(it.into()), - GenericDef::Trait(it) => GenericDefId::TraitId(it.id), - GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id), - GenericDef::ImplDef(it) => GenericDefId::ImplId(it.id), - GenericDef::EnumVariant(it) => { - GenericDefId::EnumVariantId(EnumVariantId { parent: it.parent.id, local_id: it.id }) - } - GenericDef::Const(it) => GenericDefId::ConstId(it.id), - } - } -} - -impl From for GenericDefId { - fn from(id: Adt) -> Self { - match id { - Adt::Struct(it) => it.id.into(), - Adt::Union(it) => it.id.into(), - Adt::Enum(it) => it.id.into(), - } - } -} - -impl From for VariantDef { - fn from(def: VariantId) -> Self { - match def { - VariantId::StructId(it) => VariantDef::Struct(it.into()), - VariantId::EnumVariantId(it) => VariantDef::EnumVariant(it.into()), - VariantId::UnionId(it) => VariantDef::Union(it.into()), - } - } -} - -impl From for VariantId { - fn from(def: VariantDef) -> Self { - match def { - VariantDef::Struct(it) => VariantId::StructId(it.id), - VariantDef::EnumVariant(it) => VariantId::EnumVariantId(it.into()), - VariantDef::Union(it) => VariantId::UnionId(it.id), - } - } -} - -impl From for FieldId { - fn from(def: Field) -> Self { - FieldId { parent: def.parent.into(), local_id: def.id } - } -} - -impl From for Field { - fn from(def: FieldId) -> Self { - Field { parent: def.parent.into(), id: def.local_id } - } -} - -impl From for AttrDefId { - fn from(def: AttrDef) -> Self { - match def { - AttrDef::Module(it) => AttrDefId::ModuleId(it.id), - AttrDef::Field(it) => AttrDefId::FieldId(it.into()), - AttrDef::Adt(it) => AttrDefId::AdtId(it.into()), - AttrDef::Function(it) => AttrDefId::FunctionId(it.id), - AttrDef::EnumVariant(it) => AttrDefId::EnumVariantId(it.into()), - AttrDef::Static(it) => AttrDefId::StaticId(it.id), - AttrDef::Const(it) => AttrDefId::ConstId(it.id), - AttrDef::Trait(it) => AttrDefId::TraitId(it.id), - AttrDef::TypeAlias(it) => AttrDefId::TypeAliasId(it.id), - AttrDef::MacroDef(it) => AttrDefId::MacroDefId(it.id), - } - } -} - -impl From for GenericDefId { - fn from(item: AssocItem) -> Self { - match item { - AssocItem::Function(f) => f.id.into(), - AssocItem::Const(c) => c.id.into(), - AssocItem::TypeAlias(t) => t.id.into(), - } - } -} - -impl From<(DefWithBodyId, PatId)> for Local { - fn from((parent, pat_id): (DefWithBodyId, PatId)) -> Self { - Local { parent, pat_id } - } -} - -impl From for ItemInNs { - fn from(macro_def: MacroDef) -> Self { - ItemInNs::Macros(macro_def.into()) - } -} - -impl From for ItemInNs { - fn from(module_def: ModuleDef) -> Self { - match module_def { - ModuleDef::Static(_) | ModuleDef::Const(_) | ModuleDef::Function(_) => { - ItemInNs::Values(module_def.into()) - } - _ => ItemInNs::Types(module_def.into()), - } - } -} diff --git a/crates/ra_hir/src/has_source.rs b/crates/ra_hir/src/has_source.rs deleted file mode 100644 index f2eb70057a..0000000000 --- a/crates/ra_hir/src/has_source.rs +++ /dev/null @@ -1,135 +0,0 @@ -//! Provides set of implementation for hir's objects that allows get back location in file. - -use either::Either; -use hir_def::{ - nameres::{ModuleOrigin, ModuleSource}, - src::{HasChildSource, HasSource as _}, - Lookup, VariantId, -}; -use ra_syntax::ast; - -use crate::{ - db::HirDatabase, Const, Enum, EnumVariant, Field, FieldSource, Function, ImplDef, MacroDef, - Module, Static, Struct, Trait, TypeAlias, TypeParam, Union, -}; - -pub use hir_expand::InFile; - -pub trait HasSource { - type Ast; - fn source(self, db: &dyn HirDatabase) -> InFile; -} - -/// NB: Module is !HasSource, because it has two source nodes at the same time: -/// definition and declaration. -impl Module { - /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items. - pub fn definition_source(self, db: &dyn HirDatabase) -> InFile { - let def_map = db.crate_def_map(self.id.krate); - def_map[self.id.local_id].definition_source(db.upcast()) - } - - pub fn is_mod_rs(self, db: &dyn HirDatabase) -> bool { - let def_map = db.crate_def_map(self.id.krate); - match def_map[self.id.local_id].origin { - ModuleOrigin::File { is_mod_rs, .. } => is_mod_rs, - _ => false, - } - } - - /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. - /// `None` for the crate root. - pub fn declaration_source(self, db: &dyn HirDatabase) -> Option> { - let def_map = db.crate_def_map(self.id.krate); - def_map[self.id.local_id].declaration_source(db.upcast()) - } -} - -impl HasSource for Field { - type Ast = FieldSource; - fn source(self, db: &dyn HirDatabase) -> InFile { - let var = VariantId::from(self.parent); - let src = var.child_source(db.upcast()); - src.map(|it| match it[self.id].clone() { - Either::Left(it) => FieldSource::Pos(it), - Either::Right(it) => FieldSource::Named(it), - }) - } -} -impl HasSource for Struct { - type Ast = ast::Struct; - fn source(self, db: &dyn HirDatabase) -> InFile { - self.id.lookup(db.upcast()).source(db.upcast()) - } -} -impl HasSource for Union { - type Ast = ast::Union; - fn source(self, db: &dyn HirDatabase) -> InFile { - self.id.lookup(db.upcast()).source(db.upcast()) - } -} -impl HasSource for Enum { - type Ast = ast::Enum; - fn source(self, db: &dyn HirDatabase) -> InFile { - self.id.lookup(db.upcast()).source(db.upcast()) - } -} -impl HasSource for EnumVariant { - type Ast = ast::Variant; - fn source(self, db: &dyn HirDatabase) -> InFile { - self.parent.id.child_source(db.upcast()).map(|map| map[self.id].clone()) - } -} -impl HasSource for Function { - type Ast = ast::Fn; - fn source(self, db: &dyn HirDatabase) -> InFile { - self.id.lookup(db.upcast()).source(db.upcast()) - } -} -impl HasSource for Const { - type Ast = ast::Const; - fn source(self, db: &dyn HirDatabase) -> InFile { - self.id.lookup(db.upcast()).source(db.upcast()) - } -} -impl HasSource for Static { - type Ast = ast::Static; - fn source(self, db: &dyn HirDatabase) -> InFile { - self.id.lookup(db.upcast()).source(db.upcast()) - } -} -impl HasSource for Trait { - type Ast = ast::Trait; - fn source(self, db: &dyn HirDatabase) -> InFile { - self.id.lookup(db.upcast()).source(db.upcast()) - } -} -impl HasSource for TypeAlias { - type Ast = ast::TypeAlias; - fn source(self, db: &dyn HirDatabase) -> InFile { - self.id.lookup(db.upcast()).source(db.upcast()) - } -} -impl HasSource for MacroDef { - type Ast = ast::MacroCall; - fn source(self, db: &dyn HirDatabase) -> InFile { - InFile { - file_id: self.id.ast_id.expect("MacroDef without ast_id").file_id, - value: self.id.ast_id.expect("MacroDef without ast_id").to_node(db.upcast()), - } - } -} -impl HasSource for ImplDef { - type Ast = ast::Impl; - fn source(self, db: &dyn HirDatabase) -> InFile { - self.id.lookup(db.upcast()).source(db.upcast()) - } -} - -impl HasSource for TypeParam { - type Ast = Either; - fn source(self, db: &dyn HirDatabase) -> InFile { - let child_source = self.id.parent.child_source(db.upcast()); - child_source.map(|it| it[self.id.local_id].clone()) - } -} diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs deleted file mode 100644 index 31f3241c9e..0000000000 --- a/crates/ra_hir/src/lib.rs +++ /dev/null @@ -1,59 +0,0 @@ -//! HIR (previously known as descriptors) provides a high-level object oriented -//! access to Rust code. -//! -//! The principal difference between HIR and syntax trees is that HIR is bound -//! to a particular crate instance. That is, it has cfg flags and features -//! applied. So, the relation between syntax and HIR is many-to-one. -//! -//! HIR is the public API of the all of the compiler logic above syntax trees. -//! It is written in "OO" style. Each type is self contained (as in, it knows it's -//! parents and full context). It should be "clean code". -//! -//! `ra_hir_*` crates are the implementation of the compiler logic. -//! They are written in "ECS" style, with relatively little abstractions. -//! Many types are not self-contained, and explicitly use local indexes, arenas, etc. -//! -//! `ra_hir` is what insulates the "we don't know how to actually write an incremental compiler" -//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary: -//! https://www.tedinski.com/2018/02/06/system-boundaries.html. - -#![recursion_limit = "512"] - -mod semantics; -pub mod db; -mod source_analyzer; - -pub mod diagnostics; - -mod from_id; -mod code_model; - -mod has_source; - -pub use crate::{ - code_model::{ - Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrDef, Callable, CallableKind, Const, - Crate, CrateDependency, DefWithBody, Docs, Enum, EnumVariant, Field, FieldSource, Function, - GenericDef, HasAttrs, HasVisibility, ImplDef, Local, MacroDef, Module, ModuleDef, ScopeDef, - Static, Struct, Trait, Type, TypeAlias, TypeParam, Union, VariantDef, Visibility, - }, - has_source::HasSource, - semantics::{original_range, PathResolution, Semantics, SemanticsScope}, -}; - -pub use hir_def::{ - adt::StructKind, - attr::Attrs, - body::scope::ExprScopes, - builtin_type::BuiltinType, - docs::Documentation, - nameres::ModuleSource, - path::{ModPath, Path, PathKind}, - type_ref::Mutability, -}; -pub use hir_expand::{ - hygiene::Hygiene, name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc, - MacroDefId, /* FIXME */ - MacroFile, Origin, -}; -pub use hir_ty::display::HirDisplay; diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs deleted file mode 100644 index 307b336f20..0000000000 --- a/crates/ra_hir/src/semantics.rs +++ /dev/null @@ -1,731 +0,0 @@ -//! See `Semantics`. - -mod source_to_def; - -use std::{cell::RefCell, fmt, iter::successors}; - -use hir_def::{ - resolver::{self, HasResolver, Resolver}, - AsMacroCall, FunctionId, TraitId, VariantId, -}; -use hir_expand::{diagnostics::AstDiagnostic, hygiene::Hygiene, ExpansionInfo}; -use hir_ty::associated_type_shorthand_candidates; -use itertools::Itertools; -use ra_db::{FileId, FileRange}; -use ra_prof::profile; -use ra_syntax::{ - algo::{find_node_at_offset, skip_trivia_token}, - ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextSize, -}; -use rustc_hash::{FxHashMap, FxHashSet}; - -use crate::{ - db::HirDatabase, - diagnostics::Diagnostic, - semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, - source_analyzer::{resolve_hir_path, resolve_hir_path_qualifier, SourceAnalyzer}, - AssocItem, Callable, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, - ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, VariantDef, -}; -use resolver::TypeNs; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum PathResolution { - /// An item - Def(ModuleDef), - /// A local binding (only value namespace) - Local(Local), - /// A generic parameter - TypeParam(TypeParam), - SelfType(ImplDef), - Macro(MacroDef), - AssocItem(AssocItem), -} - -impl PathResolution { - fn in_type_ns(&self) -> Option { - match self { - PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())), - PathResolution::Def(ModuleDef::BuiltinType(builtin)) => { - Some(TypeNs::BuiltinType(*builtin)) - } - PathResolution::Def(ModuleDef::Const(_)) - | PathResolution::Def(ModuleDef::EnumVariant(_)) - | PathResolution::Def(ModuleDef::Function(_)) - | PathResolution::Def(ModuleDef::Module(_)) - | PathResolution::Def(ModuleDef::Static(_)) - | PathResolution::Def(ModuleDef::Trait(_)) => None, - PathResolution::Def(ModuleDef::TypeAlias(alias)) => { - Some(TypeNs::TypeAliasId((*alias).into())) - } - PathResolution::Local(_) | PathResolution::Macro(_) => None, - PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())), - PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())), - PathResolution::AssocItem(AssocItem::Const(_)) - | PathResolution::AssocItem(AssocItem::Function(_)) => None, - PathResolution::AssocItem(AssocItem::TypeAlias(alias)) => { - Some(TypeNs::TypeAliasId((*alias).into())) - } - } - } - - /// Returns an iterator over associated types that may be specified after this path (using - /// `Ty::Assoc` syntax). - pub fn assoc_type_shorthand_candidates( - &self, - db: &dyn HirDatabase, - mut cb: impl FnMut(TypeAlias) -> Option, - ) -> Option { - associated_type_shorthand_candidates(db, self.in_type_ns()?, |_, _, id| cb(id.into())) - } -} - -/// Primary API to get semantic information, like types, from syntax trees. -pub struct Semantics<'db, DB> { - pub db: &'db DB, - imp: SemanticsImpl<'db>, -} - -pub struct SemanticsImpl<'db> { - pub db: &'db dyn HirDatabase, - s2d_cache: RefCell, - expansion_info_cache: RefCell>>, - cache: RefCell>, -} - -impl fmt::Debug for Semantics<'_, DB> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Semantics {{ ... }}") - } -} - -impl<'db, DB: HirDatabase> Semantics<'db, DB> { - pub fn new(db: &DB) -> Semantics { - let impl_ = SemanticsImpl::new(db); - Semantics { db, imp: impl_ } - } - - pub fn parse(&self, file_id: FileId) -> ast::SourceFile { - self.imp.parse(file_id) - } - - pub fn ast(&self, d: &T) -> ::AST { - let file_id = d.source().file_id; - let root = self.db.parse_or_expand(file_id).unwrap(); - self.imp.cache(root, file_id); - d.ast(self.db.upcast()) - } - - pub fn expand(&self, macro_call: &ast::MacroCall) -> Option { - self.imp.expand(macro_call) - } - - pub fn expand_hypothetical( - &self, - actual_macro_call: &ast::MacroCall, - hypothetical_args: &ast::TokenTree, - token_to_map: SyntaxToken, - ) -> Option<(SyntaxNode, SyntaxToken)> { - self.imp.expand_hypothetical(actual_macro_call, hypothetical_args, token_to_map) - } - - pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { - self.imp.descend_into_macros(token) - } - - pub fn descend_node_at_offset( - &self, - node: &SyntaxNode, - offset: TextSize, - ) -> Option { - self.imp.descend_node_at_offset(node, offset).find_map(N::cast) - } - - pub fn original_range(&self, node: &SyntaxNode) -> FileRange { - self.imp.original_range(node) - } - - pub fn diagnostics_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { - self.imp.diagnostics_range(diagnostics) - } - - pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator + '_ { - self.imp.ancestors_with_macros(node) - } - - pub fn ancestors_at_offset_with_macros( - &self, - node: &SyntaxNode, - offset: TextSize, - ) -> impl Iterator + '_ { - self.imp.ancestors_at_offset_with_macros(node, offset) - } - - /// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, - /// search up until it is of the target AstNode type - pub fn find_node_at_offset_with_macros( - &self, - node: &SyntaxNode, - offset: TextSize, - ) -> Option { - self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) - } - - /// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, - /// descend it and find again - pub fn find_node_at_offset_with_descend( - &self, - node: &SyntaxNode, - offset: TextSize, - ) -> Option { - if let Some(it) = find_node_at_offset(&node, offset) { - return Some(it); - } - - self.imp.descend_node_at_offset(node, offset).find_map(N::cast) - } - - pub fn type_of_expr(&self, expr: &ast::Expr) -> Option { - self.imp.type_of_expr(expr) - } - - pub fn type_of_pat(&self, pat: &ast::Pat) -> Option { - self.imp.type_of_pat(pat) - } - - pub fn type_of_self(&self, param: &ast::SelfParam) -> Option { - self.imp.type_of_self(param) - } - - pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option { - self.imp.resolve_method_call(call).map(Function::from) - } - - pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option { - self.imp.resolve_method_call_as_callable(call) - } - - pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option { - self.imp.resolve_field(field) - } - - pub fn resolve_record_field( - &self, - field: &ast::RecordExprField, - ) -> Option<(Field, Option)> { - self.imp.resolve_record_field(field) - } - - pub fn resolve_record_field_pat(&self, field: &ast::RecordPatField) -> Option { - self.imp.resolve_record_field_pat(field) - } - - pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option { - self.imp.resolve_macro_call(macro_call) - } - - pub fn resolve_path(&self, path: &ast::Path) -> Option { - self.imp.resolve_path(path) - } - - pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option { - self.imp.resolve_variant(record_lit).map(VariantDef::from) - } - - pub fn lower_path(&self, path: &ast::Path) -> Option { - self.imp.lower_path(path) - } - - pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option { - self.imp.resolve_bind_pat_to_const(pat) - } - - // FIXME: use this instead? - // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option; - - pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> { - self.imp.record_literal_missing_fields(literal) - } - - pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> { - self.imp.record_pattern_missing_fields(pattern) - } - - pub fn to_def(&self, src: &T) -> Option { - let src = self.imp.find_file(src.syntax().clone()).with_value(src).cloned(); - T::to_def(&self.imp, src) - } - - pub fn to_module_def(&self, file: FileId) -> Option { - self.imp.to_module_def(file) - } - - pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { - self.imp.scope(node) - } - - pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { - self.imp.scope_at_offset(node, offset) - } - - pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { - self.imp.scope_for_def(def) - } - - pub fn assert_contains_node(&self, node: &SyntaxNode) { - self.imp.assert_contains_node(node) - } -} - -impl<'db> SemanticsImpl<'db> { - fn new(db: &'db dyn HirDatabase) -> Self { - SemanticsImpl { - db, - s2d_cache: Default::default(), - cache: Default::default(), - expansion_info_cache: Default::default(), - } - } - - fn parse(&self, file_id: FileId) -> ast::SourceFile { - let tree = self.db.parse(file_id).tree(); - self.cache(tree.syntax().clone(), file_id.into()); - tree - } - - fn expand(&self, macro_call: &ast::MacroCall) -> Option { - let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); - let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); - let file_id = sa.expand(self.db, macro_call)?; - let node = self.db.parse_or_expand(file_id)?; - self.cache(node.clone(), file_id); - Some(node) - } - - fn expand_hypothetical( - &self, - actual_macro_call: &ast::MacroCall, - hypothetical_args: &ast::TokenTree, - token_to_map: SyntaxToken, - ) -> Option<(SyntaxNode, SyntaxToken)> { - let macro_call = - self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); - let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); - let krate = sa.resolver.krate()?; - let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { - sa.resolver.resolve_path_as_macro(self.db.upcast(), &path) - })?; - hir_expand::db::expand_hypothetical( - self.db.upcast(), - macro_call_id, - hypothetical_args, - token_to_map, - ) - } - - fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { - let _p = profile("descend_into_macros"); - let parent = token.parent(); - let parent = self.find_file(parent); - let sa = self.analyze2(parent.as_ref(), None); - - let token = successors(Some(parent.with_value(token)), |token| { - self.db.check_canceled(); - let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; - let tt = macro_call.token_tree()?; - if !tt.syntax().text_range().contains_range(token.value.text_range()) { - return None; - } - let file_id = sa.expand(self.db, token.with_value(¯o_call))?; - let token = self - .expansion_info_cache - .borrow_mut() - .entry(file_id) - .or_insert_with(|| file_id.expansion_info(self.db.upcast())) - .as_ref()? - .map_token_down(token.as_ref())?; - - self.cache(find_root(&token.value.parent()), token.file_id); - - Some(token) - }) - .last() - .unwrap(); - - token.value - } - - fn descend_node_at_offset( - &self, - node: &SyntaxNode, - offset: TextSize, - ) -> impl Iterator + '_ { - // Handle macro token cases - node.token_at_offset(offset) - .map(|token| self.descend_into_macros(token)) - .map(|it| self.ancestors_with_macros(it.parent())) - .flatten() - } - - fn original_range(&self, node: &SyntaxNode) -> FileRange { - let node = self.find_file(node.clone()); - original_range(self.db, node.as_ref()) - } - - fn diagnostics_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { - let src = diagnostics.source(); - let root = self.db.parse_or_expand(src.file_id).unwrap(); - let node = src.value.to_node(&root); - original_range(self.db, src.with_value(&node)) - } - - fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator + '_ { - let node = self.find_file(node); - node.ancestors_with_macros(self.db.upcast()).map(|it| it.value) - } - - fn ancestors_at_offset_with_macros( - &self, - node: &SyntaxNode, - offset: TextSize, - ) -> impl Iterator + '_ { - node.token_at_offset(offset) - .map(|token| self.ancestors_with_macros(token.parent())) - .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) - } - - fn type_of_expr(&self, expr: &ast::Expr) -> Option { - self.analyze(expr.syntax()).type_of_expr(self.db, &expr) - } - - fn type_of_pat(&self, pat: &ast::Pat) -> Option { - self.analyze(pat.syntax()).type_of_pat(self.db, &pat) - } - - fn type_of_self(&self, param: &ast::SelfParam) -> Option { - self.analyze(param.syntax()).type_of_self(self.db, ¶m) - } - - fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option { - self.analyze(call.syntax()).resolve_method_call(self.db, call) - } - - fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option { - // FIXME: this erases Substs - let func = self.resolve_method_call(call)?; - let ty = self.db.value_ty(func.into()); - let resolver = self.analyze(call.syntax()).resolver; - let ty = Type::new_with_resolver(self.db, &resolver, ty.value)?; - let mut res = ty.as_callable(self.db)?; - res.is_bound_method = true; - Some(res) - } - - fn resolve_field(&self, field: &ast::FieldExpr) -> Option { - self.analyze(field.syntax()).resolve_field(self.db, field) - } - - fn resolve_record_field(&self, field: &ast::RecordExprField) -> Option<(Field, Option)> { - self.analyze(field.syntax()).resolve_record_field(self.db, field) - } - - fn resolve_record_field_pat(&self, field: &ast::RecordPatField) -> Option { - self.analyze(field.syntax()).resolve_record_field_pat(self.db, field) - } - - fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option { - let sa = self.analyze(macro_call.syntax()); - let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); - sa.resolve_macro_call(self.db, macro_call) - } - - fn resolve_path(&self, path: &ast::Path) -> Option { - self.analyze(path.syntax()).resolve_path(self.db, path) - } - - fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option { - self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit) - } - - fn lower_path(&self, path: &ast::Path) -> Option { - let src = self.find_file(path.syntax().clone()); - Path::from_src(path.clone(), &Hygiene::new(self.db.upcast(), src.file_id.into())) - } - - fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option { - self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) - } - - fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> { - self.analyze(literal.syntax()) - .record_literal_missing_fields(self.db, literal) - .unwrap_or_default() - } - - fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> { - self.analyze(pattern.syntax()) - .record_pattern_missing_fields(self.db, pattern) - .unwrap_or_default() - } - - fn with_ctx T, T>(&self, f: F) -> T { - let mut cache = self.s2d_cache.borrow_mut(); - let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache }; - f(&mut ctx) - } - - fn to_module_def(&self, file: FileId) -> Option { - self.with_ctx(|ctx| ctx.file_to_def(file)).map(Module::from) - } - - fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { - let node = self.find_file(node.clone()); - let resolver = self.analyze2(node.as_ref(), None).resolver; - SemanticsScope { db: self.db, resolver } - } - - fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { - let node = self.find_file(node.clone()); - let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; - SemanticsScope { db: self.db, resolver } - } - - fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { - let resolver = def.id.resolver(self.db.upcast()); - SemanticsScope { db: self.db, resolver } - } - - fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { - let src = self.find_file(node.clone()); - self.analyze2(src.as_ref(), None) - } - - fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option) -> SourceAnalyzer { - let _p = profile("Semantics::analyze2"); - - let container = match self.with_ctx(|ctx| ctx.find_container(src)) { - Some(it) => it, - None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src), - }; - - let resolver = match container { - ChildContainer::DefWithBodyId(def) => { - return SourceAnalyzer::new_for_body(self.db, def, src, offset) - } - ChildContainer::TraitId(it) => it.resolver(self.db.upcast()), - ChildContainer::ImplId(it) => it.resolver(self.db.upcast()), - ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()), - ChildContainer::EnumId(it) => it.resolver(self.db.upcast()), - ChildContainer::VariantId(it) => it.resolver(self.db.upcast()), - ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()), - ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()), - }; - SourceAnalyzer::new_for_resolver(resolver, src) - } - - fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { - assert!(root_node.parent().is_none()); - let mut cache = self.cache.borrow_mut(); - let prev = cache.insert(root_node, file_id); - assert!(prev == None || prev == Some(file_id)) - } - - fn assert_contains_node(&self, node: &SyntaxNode) { - self.find_file(node.clone()); - } - - fn lookup(&self, root_node: &SyntaxNode) -> Option { - let cache = self.cache.borrow(); - cache.get(root_node).copied() - } - - fn find_file(&self, node: SyntaxNode) -> InFile { - let root_node = find_root(&node); - let file_id = self.lookup(&root_node).unwrap_or_else(|| { - panic!( - "\n\nFailed to lookup {:?} in this Semantics.\n\ - Make sure to use only query nodes, derived from this instance of Semantics.\n\ - root node: {:?}\n\ - known nodes: {}\n\n", - node, - root_node, - self.cache - .borrow() - .keys() - .map(|it| format!("{:?}", it)) - .collect::>() - .join(", ") - ) - }); - InFile::new(file_id, node) - } -} - -pub trait ToDef: AstNode + Clone { - type Def; - - fn to_def(sema: &SemanticsImpl, src: InFile) -> Option; -} - -macro_rules! to_def_impls { - ($(($def:path, $ast:path, $meth:ident)),* ,) => {$( - impl ToDef for $ast { - type Def = $def; - fn to_def(sema: &SemanticsImpl, src: InFile) -> Option { - sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from) - } - } - )*} -} - -to_def_impls![ - (crate::Module, ast::Module, module_to_def), - (crate::Struct, ast::Struct, struct_to_def), - (crate::Enum, ast::Enum, enum_to_def), - (crate::Union, ast::Union, union_to_def), - (crate::Trait, ast::Trait, trait_to_def), - (crate::ImplDef, ast::Impl, impl_to_def), - (crate::TypeAlias, ast::TypeAlias, type_alias_to_def), - (crate::Const, ast::Const, const_to_def), - (crate::Static, ast::Static, static_to_def), - (crate::Function, ast::Fn, fn_to_def), - (crate::Field, ast::RecordField, record_field_to_def), - (crate::Field, ast::TupleField, tuple_field_to_def), - (crate::EnumVariant, ast::Variant, enum_variant_to_def), - (crate::TypeParam, ast::TypeParam, type_param_to_def), - (crate::MacroDef, ast::MacroCall, macro_call_to_def), // this one is dubious, not all calls are macros - (crate::Local, ast::IdentPat, bind_pat_to_def), -]; - -fn find_root(node: &SyntaxNode) -> SyntaxNode { - node.ancestors().last().unwrap() -} - -#[derive(Debug)] -pub struct SemanticsScope<'a> { - pub db: &'a dyn HirDatabase, - resolver: Resolver, -} - -impl<'a> SemanticsScope<'a> { - pub fn module(&self) -> Option { - Some(Module { id: self.resolver.module()? }) - } - - /// Note: `FxHashSet` should be treated as an opaque type, passed into `Type - // FIXME: rename to visible_traits to not repeat scope? - pub fn traits_in_scope(&self) -> FxHashSet { - let resolver = &self.resolver; - resolver.traits_in_scope(self.db.upcast()) - } - - pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) { - let resolver = &self.resolver; - - resolver.process_all_names(self.db.upcast(), &mut |name, def| { - let def = match def { - resolver::ScopeDef::PerNs(it) => { - let items = ScopeDef::all_items(it); - for item in items { - f(name.clone(), item); - } - return; - } - resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()), - resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()), - resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }), - resolver::ScopeDef::Local(pat_id) => { - let parent = resolver.body_owner().unwrap().into(); - ScopeDef::Local(Local { parent, pat_id }) - } - }; - f(name, def) - }) - } - - pub fn resolve_hir_path(&self, path: &Path) -> Option { - resolve_hir_path(self.db, &self.resolver, path) - } - - /// Resolves a path where we know it is a qualifier of another path. - /// - /// For example, if we have: - /// ``` - /// mod my { - /// pub mod foo { - /// struct Bar; - /// } - /// - /// pub fn foo() {} - /// } - /// ``` - /// then we know that `foo` in `my::foo::Bar` refers to the module, not the function. - pub fn resolve_hir_path_qualifier(&self, path: &Path) -> Option { - resolve_hir_path_qualifier(self.db, &self.resolver, path) - } -} - -// FIXME: Change `HasSource` trait to work with `Semantics` and remove this? -pub fn original_range(db: &dyn HirDatabase, node: InFile<&SyntaxNode>) -> FileRange { - if let Some(range) = original_range_opt(db, node) { - let original_file = range.file_id.original_file(db.upcast()); - if range.file_id == original_file.into() { - return FileRange { file_id: original_file, range: range.value }; - } - - log::error!("Fail to mapping up more for {:?}", range); - return FileRange { file_id: range.file_id.original_file(db.upcast()), range: range.value }; - } - - // Fall back to whole macro call - if let Some(expansion) = node.file_id.expansion_info(db.upcast()) { - if let Some(call_node) = expansion.call_node() { - return FileRange { - file_id: call_node.file_id.original_file(db.upcast()), - range: call_node.value.text_range(), - }; - } - } - - FileRange { file_id: node.file_id.original_file(db.upcast()), range: node.value.text_range() } -} - -fn original_range_opt( - db: &dyn HirDatabase, - node: InFile<&SyntaxNode>, -) -> Option> { - let expansion = node.file_id.expansion_info(db.upcast())?; - - // the input node has only one token ? - let single = skip_trivia_token(node.value.first_token()?, Direction::Next)? - == skip_trivia_token(node.value.last_token()?, Direction::Prev)?; - - Some(node.value.descendants().find_map(|it| { - let first = skip_trivia_token(it.first_token()?, Direction::Next)?; - let first = ascend_call_token(db, &expansion, node.with_value(first))?; - - let last = skip_trivia_token(it.last_token()?, Direction::Prev)?; - let last = ascend_call_token(db, &expansion, node.with_value(last))?; - - if (!single && first == last) || (first.file_id != last.file_id) { - return None; - } - - Some(first.with_value(first.value.text_range().cover(last.value.text_range()))) - })?) -} - -fn ascend_call_token( - db: &dyn HirDatabase, - expansion: &ExpansionInfo, - token: InFile, -) -> Option> { - let (mapped, origin) = expansion.map_token_up(token.as_ref())?; - if origin != Origin::Call { - return None; - } - if let Some(info) = mapped.file_id.expansion_info(db.upcast()) { - return ascend_call_token(db, &info, mapped); - } - Some(mapped) -} diff --git a/crates/ra_hir/src/semantics/source_to_def.rs b/crates/ra_hir/src/semantics/source_to_def.rs deleted file mode 100644 index 863e8e5ff7..0000000000 --- a/crates/ra_hir/src/semantics/source_to_def.rs +++ /dev/null @@ -1,276 +0,0 @@ -//! Maps *syntax* of various definitions to their semantic ids. - -use hir_def::{ - child_by_source::ChildBySource, - dyn_map::DynMap, - expr::PatId, - keys::{self, Key}, - ConstId, DefWithBodyId, EnumId, EnumVariantId, FieldId, FunctionId, GenericDefId, ImplId, - ModuleId, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, VariantId, -}; -use hir_expand::{name::AsName, AstId, MacroDefKind}; -use ra_db::FileId; -use ra_prof::profile; -use ra_syntax::{ - ast::{self, NameOwner}, - match_ast, AstNode, SyntaxNode, -}; -use rustc_hash::FxHashMap; -use stdx::impl_from; - -use crate::{db::HirDatabase, InFile, MacroDefId}; - -pub(super) type SourceToDefCache = FxHashMap; - -pub(super) struct SourceToDefCtx<'a, 'b> { - pub(super) db: &'b dyn HirDatabase, - pub(super) cache: &'a mut SourceToDefCache, -} - -impl SourceToDefCtx<'_, '_> { - pub(super) fn file_to_def(&mut self, file: FileId) -> Option { - let _p = profile("SourceBinder::to_module_def"); - let (krate, local_id) = self.db.relevant_crates(file).iter().find_map(|&crate_id| { - let crate_def_map = self.db.crate_def_map(crate_id); - let local_id = crate_def_map.modules_for_file(file).next()?; - Some((crate_id, local_id)) - })?; - Some(ModuleId { krate, local_id }) - } - - pub(super) fn module_to_def(&mut self, src: InFile) -> Option { - let _p = profile("module_to_def"); - let parent_declaration = src - .as_ref() - .map(|it| it.syntax()) - .cloned() - .ancestors_with_macros(self.db.upcast()) - .skip(1) - .find_map(|it| { - let m = ast::Module::cast(it.value.clone())?; - Some(it.with_value(m)) - }); - - let parent_module = match parent_declaration { - Some(parent_declaration) => self.module_to_def(parent_declaration), - None => { - let file_id = src.file_id.original_file(self.db.upcast()); - self.file_to_def(file_id) - } - }?; - - let child_name = src.value.name()?.as_name(); - let def_map = self.db.crate_def_map(parent_module.krate); - let child_id = *def_map[parent_module.local_id].children.get(&child_name)?; - Some(ModuleId { krate: parent_module.krate, local_id: child_id }) - } - - pub(super) fn trait_to_def(&mut self, src: InFile) -> Option { - self.to_def(src, keys::TRAIT) - } - pub(super) fn impl_to_def(&mut self, src: InFile) -> Option { - self.to_def(src, keys::IMPL) - } - pub(super) fn fn_to_def(&mut self, src: InFile) -> Option { - self.to_def(src, keys::FUNCTION) - } - pub(super) fn struct_to_def(&mut self, src: InFile) -> Option { - self.to_def(src, keys::STRUCT) - } - pub(super) fn enum_to_def(&mut self, src: InFile) -> Option { - self.to_def(src, keys::ENUM) - } - pub(super) fn union_to_def(&mut self, src: InFile) -> Option { - self.to_def(src, keys::UNION) - } - pub(super) fn static_to_def(&mut self, src: InFile) -> Option { - self.to_def(src, keys::STATIC) - } - pub(super) fn const_to_def(&mut self, src: InFile) -> Option { - self.to_def(src, keys::CONST) - } - pub(super) fn type_alias_to_def(&mut self, src: InFile) -> Option { - self.to_def(src, keys::TYPE_ALIAS) - } - pub(super) fn record_field_to_def(&mut self, src: InFile) -> Option { - self.to_def(src, keys::RECORD_FIELD) - } - pub(super) fn tuple_field_to_def(&mut self, src: InFile) -> Option { - self.to_def(src, keys::TUPLE_FIELD) - } - pub(super) fn enum_variant_to_def( - &mut self, - src: InFile, - ) -> Option { - self.to_def(src, keys::VARIANT) - } - pub(super) fn bind_pat_to_def( - &mut self, - src: InFile, - ) -> Option<(DefWithBodyId, PatId)> { - let container = self.find_pat_container(src.as_ref().map(|it| it.syntax()))?; - let (_body, source_map) = self.db.body_with_source_map(container); - let src = src.map(ast::Pat::from); - let pat_id = source_map.node_pat(src.as_ref())?; - Some((container, pat_id)) - } - - fn to_def( - &mut self, - src: InFile, - key: Key, - ) -> Option { - let container = self.find_container(src.as_ref().map(|it| it.syntax()))?; - let db = self.db; - let dyn_map = - &*self.cache.entry(container).or_insert_with(|| container.child_by_source(db)); - dyn_map[key].get(&src).copied() - } - - pub(super) fn type_param_to_def(&mut self, src: InFile) -> Option { - let container: ChildContainer = - self.find_type_param_container(src.as_ref().map(|it| it.syntax()))?.into(); - let db = self.db; - let dyn_map = - &*self.cache.entry(container).or_insert_with(|| container.child_by_source(db)); - dyn_map[keys::TYPE_PARAM].get(&src).copied() - } - - // FIXME: use DynMap as well? - pub(super) fn macro_call_to_def(&mut self, src: InFile) -> Option { - let kind = MacroDefKind::Declarative; - let file_id = src.file_id.original_file(self.db.upcast()); - let krate = self.file_to_def(file_id)?.krate; - let file_ast_id = self.db.ast_id_map(src.file_id).ast_id(&src.value); - let ast_id = Some(AstId::new(src.file_id, file_ast_id)); - Some(MacroDefId { krate: Some(krate), ast_id, kind, local_inner: false }) - } - - pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option { - for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) { - let res: ChildContainer = match_ast! { - match (container.value) { - ast::Module(it) => { - let def = self.module_to_def(container.with_value(it))?; - def.into() - }, - ast::Trait(it) => { - let def = self.trait_to_def(container.with_value(it))?; - def.into() - }, - ast::Impl(it) => { - let def = self.impl_to_def(container.with_value(it))?; - def.into() - }, - ast::Fn(it) => { - let def = self.fn_to_def(container.with_value(it))?; - DefWithBodyId::from(def).into() - }, - ast::Struct(it) => { - let def = self.struct_to_def(container.with_value(it))?; - VariantId::from(def).into() - }, - ast::Enum(it) => { - let def = self.enum_to_def(container.with_value(it))?; - def.into() - }, - ast::Union(it) => { - let def = self.union_to_def(container.with_value(it))?; - VariantId::from(def).into() - }, - ast::Static(it) => { - let def = self.static_to_def(container.with_value(it))?; - DefWithBodyId::from(def).into() - }, - ast::Const(it) => { - let def = self.const_to_def(container.with_value(it))?; - DefWithBodyId::from(def).into() - }, - ast::TypeAlias(it) => { - let def = self.type_alias_to_def(container.with_value(it))?; - def.into() - }, - _ => continue, - } - }; - return Some(res); - } - - let def = self.file_to_def(src.file_id.original_file(self.db.upcast()))?; - Some(def.into()) - } - - fn find_type_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option { - for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) { - let res: GenericDefId = match_ast! { - match (container.value) { - ast::Fn(it) => self.fn_to_def(container.with_value(it))?.into(), - ast::Struct(it) => self.struct_to_def(container.with_value(it))?.into(), - ast::Enum(it) => self.enum_to_def(container.with_value(it))?.into(), - ast::Trait(it) => self.trait_to_def(container.with_value(it))?.into(), - ast::TypeAlias(it) => self.type_alias_to_def(container.with_value(it))?.into(), - ast::Impl(it) => self.impl_to_def(container.with_value(it))?.into(), - _ => continue, - } - }; - return Some(res); - } - None - } - - fn find_pat_container(&mut self, src: InFile<&SyntaxNode>) -> Option { - for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) { - let res: DefWithBodyId = match_ast! { - match (container.value) { - ast::Const(it) => self.const_to_def(container.with_value(it))?.into(), - ast::Static(it) => self.static_to_def(container.with_value(it))?.into(), - ast::Fn(it) => self.fn_to_def(container.with_value(it))?.into(), - _ => continue, - } - }; - return Some(res); - } - None - } -} - -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] -pub(crate) enum ChildContainer { - DefWithBodyId(DefWithBodyId), - ModuleId(ModuleId), - TraitId(TraitId), - ImplId(ImplId), - EnumId(EnumId), - VariantId(VariantId), - TypeAliasId(TypeAliasId), - /// XXX: this might be the same def as, for example an `EnumId`. However, - /// here the children generic parameters, and not, eg enum variants. - GenericDefId(GenericDefId), -} -impl_from! { - DefWithBodyId, - ModuleId, - TraitId, - ImplId, - EnumId, - VariantId, - TypeAliasId, - GenericDefId - for ChildContainer -} - -impl ChildContainer { - fn child_by_source(self, db: &dyn HirDatabase) -> DynMap { - let db = db.upcast(); - match self { - ChildContainer::DefWithBodyId(it) => it.child_by_source(db), - ChildContainer::ModuleId(it) => it.child_by_source(db), - ChildContainer::TraitId(it) => it.child_by_source(db), - ChildContainer::ImplId(it) => it.child_by_source(db), - ChildContainer::EnumId(it) => it.child_by_source(db), - ChildContainer::VariantId(it) => it.child_by_source(db), - ChildContainer::TypeAliasId(_) => DynMap::default(), - ChildContainer::GenericDefId(it) => it.child_by_source(db), - } - } -} diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs deleted file mode 100644 index d0cb62ef01..0000000000 --- a/crates/ra_hir/src/source_analyzer.rs +++ /dev/null @@ -1,535 +0,0 @@ -//! Lookup hir elements using positions in the source code. This is a lossy -//! transformation: in general, a single source might correspond to several -//! modules, functions, etc, due to macros, cfgs and `#[path=]` attributes on -//! modules. -//! -//! So, this modules should not be used during hir construction, it exists -//! purely for "IDE needs". -use std::{iter::once, sync::Arc}; - -use hir_def::{ - body::{ - scope::{ExprScopes, ScopeId}, - Body, BodySourceMap, - }, - expr::{ExprId, Pat, PatId}, - resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, - AsMacroCall, DefWithBodyId, FieldId, FunctionId, LocalFieldId, VariantId, -}; -use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile}; -use hir_ty::{ - diagnostics::{record_literal_missing_fields, record_pattern_missing_fields}, - InferenceResult, Substs, Ty, -}; -use ra_syntax::{ - ast::{self, AstNode}, - SyntaxNode, TextRange, TextSize, -}; - -use crate::{ - db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Field, Function, Local, - MacroDef, ModPath, ModuleDef, Path, PathKind, Static, Struct, Trait, Type, TypeAlias, - TypeParam, -}; -use ra_db::CrateId; - -/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of -/// original source files. It should not be used inside the HIR itself. -#[derive(Debug)] -pub(crate) struct SourceAnalyzer { - file_id: HirFileId, - pub(crate) resolver: Resolver, - body: Option>, - body_source_map: Option>, - infer: Option>, - scopes: Option>, -} - -impl SourceAnalyzer { - pub(crate) fn new_for_body( - db: &dyn HirDatabase, - def: DefWithBodyId, - node: InFile<&SyntaxNode>, - offset: Option, - ) -> SourceAnalyzer { - let (body, source_map) = db.body_with_source_map(def); - let scopes = db.expr_scopes(def); - let scope = match offset { - None => scope_for(&scopes, &source_map, node), - Some(offset) => scope_for_offset(db, &scopes, &source_map, node.with_value(offset)), - }; - let resolver = resolver_for_scope(db.upcast(), def, scope); - SourceAnalyzer { - resolver, - body: Some(body), - body_source_map: Some(source_map), - infer: Some(db.infer(def)), - scopes: Some(scopes), - file_id: node.file_id, - } - } - - pub(crate) fn new_for_resolver( - resolver: Resolver, - node: InFile<&SyntaxNode>, - ) -> SourceAnalyzer { - SourceAnalyzer { - resolver, - body: None, - body_source_map: None, - infer: None, - scopes: None, - file_id: node.file_id, - } - } - - fn expr_id(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option { - let src = match expr { - ast::Expr::MacroCall(call) => { - self.expand_expr(db, InFile::new(self.file_id, call.clone()))? - } - _ => InFile::new(self.file_id, expr.clone()), - }; - let sm = self.body_source_map.as_ref()?; - sm.node_expr(src.as_ref()) - } - - fn pat_id(&self, pat: &ast::Pat) -> Option { - // FIXME: macros, see `expr_id` - let src = InFile { file_id: self.file_id, value: pat }; - self.body_source_map.as_ref()?.node_pat(src) - } - - fn expand_expr( - &self, - db: &dyn HirDatabase, - expr: InFile, - ) -> Option> { - let macro_file = self.body_source_map.as_ref()?.node_macro_file(expr.as_ref())?; - let expanded = db.parse_or_expand(macro_file)?; - - let res = match ast::MacroCall::cast(expanded.clone()) { - Some(call) => self.expand_expr(db, InFile::new(macro_file, call))?, - _ => InFile::new(macro_file, ast::Expr::cast(expanded)?), - }; - Some(res) - } - - pub(crate) fn type_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option { - let expr_id = self.expr_id(db, expr)?; - let ty = self.infer.as_ref()?[expr_id].clone(); - Type::new_with_resolver(db, &self.resolver, ty) - } - - pub(crate) fn type_of_pat(&self, db: &dyn HirDatabase, pat: &ast::Pat) -> Option { - let pat_id = self.pat_id(pat)?; - let ty = self.infer.as_ref()?[pat_id].clone(); - Type::new_with_resolver(db, &self.resolver, ty) - } - - pub(crate) fn type_of_self( - &self, - db: &dyn HirDatabase, - param: &ast::SelfParam, - ) -> Option { - let src = InFile { file_id: self.file_id, value: param }; - let pat_id = self.body_source_map.as_ref()?.node_self_param(src)?; - let ty = self.infer.as_ref()?[pat_id].clone(); - Type::new_with_resolver(db, &self.resolver, ty) - } - - pub(crate) fn resolve_method_call( - &self, - db: &dyn HirDatabase, - call: &ast::MethodCallExpr, - ) -> Option { - let expr_id = self.expr_id(db, &call.clone().into())?; - self.infer.as_ref()?.method_resolution(expr_id) - } - - pub(crate) fn resolve_field( - &self, - db: &dyn HirDatabase, - field: &ast::FieldExpr, - ) -> Option { - let expr_id = self.expr_id(db, &field.clone().into())?; - self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into()) - } - - pub(crate) fn resolve_record_field( - &self, - db: &dyn HirDatabase, - field: &ast::RecordExprField, - ) -> Option<(Field, Option)> { - let expr = field.expr()?; - let expr_id = self.expr_id(db, &expr)?; - let local = if field.name_ref().is_some() { - None - } else { - let local_name = field.field_name()?.as_name(); - let path = ModPath::from_segments(PathKind::Plain, once(local_name)); - match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) { - Some(ValueNs::LocalBinding(pat_id)) => { - Some(Local { pat_id, parent: self.resolver.body_owner()? }) - } - _ => None, - } - }; - let struct_field = self.infer.as_ref()?.record_field_resolution(expr_id)?; - Some((struct_field.into(), local)) - } - - pub(crate) fn resolve_record_field_pat( - &self, - _db: &dyn HirDatabase, - field: &ast::RecordPatField, - ) -> Option { - let pat_id = self.pat_id(&field.pat()?)?; - let struct_field = self.infer.as_ref()?.record_field_pat_resolution(pat_id)?; - Some(struct_field.into()) - } - - pub(crate) fn resolve_macro_call( - &self, - db: &dyn HirDatabase, - macro_call: InFile<&ast::MacroCall>, - ) -> Option { - let hygiene = Hygiene::new(db.upcast(), macro_call.file_id); - let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &hygiene))?; - self.resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(|it| it.into()) - } - - pub(crate) fn resolve_bind_pat_to_const( - &self, - db: &dyn HirDatabase, - pat: &ast::IdentPat, - ) -> Option { - let pat_id = self.pat_id(&pat.clone().into())?; - let body = self.body.as_ref()?; - let path = match &body[pat_id] { - Pat::Path(path) => path, - _ => return None, - }; - let res = resolve_hir_path(db, &self.resolver, &path)?; - match res { - PathResolution::Def(def) => Some(def), - _ => None, - } - } - - pub(crate) fn resolve_path( - &self, - db: &dyn HirDatabase, - path: &ast::Path, - ) -> Option { - if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) { - let expr_id = self.expr_id(db, &path_expr.into())?; - if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) { - return Some(PathResolution::AssocItem(assoc.into())); - } - if let Some(VariantId::EnumVariantId(variant)) = - self.infer.as_ref()?.variant_resolution_for_expr(expr_id) - { - return Some(PathResolution::Def(ModuleDef::EnumVariant(variant.into()))); - } - } - - if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) { - let pat_id = self.pat_id(&path_pat.into())?; - if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) { - return Some(PathResolution::AssocItem(assoc.into())); - } - if let Some(VariantId::EnumVariantId(variant)) = - self.infer.as_ref()?.variant_resolution_for_pat(pat_id) - { - return Some(PathResolution::Def(ModuleDef::EnumVariant(variant.into()))); - } - } - - if let Some(rec_lit) = path.syntax().parent().and_then(ast::RecordExpr::cast) { - let expr_id = self.expr_id(db, &rec_lit.into())?; - if let Some(VariantId::EnumVariantId(variant)) = - self.infer.as_ref()?.variant_resolution_for_expr(expr_id) - { - return Some(PathResolution::Def(ModuleDef::EnumVariant(variant.into()))); - } - } - - if let Some(rec_pat) = path.syntax().parent().and_then(ast::RecordPat::cast) { - let pat_id = self.pat_id(&rec_pat.into())?; - if let Some(VariantId::EnumVariantId(variant)) = - self.infer.as_ref()?.variant_resolution_for_pat(pat_id) - { - return Some(PathResolution::Def(ModuleDef::EnumVariant(variant.into()))); - } - } - - // This must be a normal source file rather than macro file. - let hir_path = - crate::Path::from_src(path.clone(), &Hygiene::new(db.upcast(), self.file_id))?; - - // Case where path is a qualifier of another path, e.g. foo::bar::Baz where we - // trying to resolve foo::bar. - if let Some(outer_path) = path.syntax().parent().and_then(ast::Path::cast) { - if let Some(qualifier) = outer_path.qualifier() { - if path == &qualifier { - return resolve_hir_path_qualifier(db, &self.resolver, &hir_path); - } - } - } - - resolve_hir_path(db, &self.resolver, &hir_path) - } - - pub(crate) fn record_literal_missing_fields( - &self, - db: &dyn HirDatabase, - literal: &ast::RecordExpr, - ) -> Option> { - let krate = self.resolver.krate()?; - let body = self.body.as_ref()?; - let infer = self.infer.as_ref()?; - - let expr_id = self.expr_id(db, &literal.clone().into())?; - let substs = match &infer.type_of_expr[expr_id] { - Ty::Apply(a_ty) => &a_ty.parameters, - _ => return None, - }; - - let (variant, missing_fields, _exhaustive) = - record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?; - let res = self.missing_fields(db, krate, substs, variant, missing_fields); - Some(res) - } - - pub(crate) fn record_pattern_missing_fields( - &self, - db: &dyn HirDatabase, - pattern: &ast::RecordPat, - ) -> Option> { - let krate = self.resolver.krate()?; - let body = self.body.as_ref()?; - let infer = self.infer.as_ref()?; - - let pat_id = self.pat_id(&pattern.clone().into())?; - let substs = match &infer.type_of_pat[pat_id] { - Ty::Apply(a_ty) => &a_ty.parameters, - _ => return None, - }; - - let (variant, missing_fields, _exhaustive) = - record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?; - let res = self.missing_fields(db, krate, substs, variant, missing_fields); - Some(res) - } - - fn missing_fields( - &self, - db: &dyn HirDatabase, - krate: CrateId, - substs: &Substs, - variant: VariantId, - missing_fields: Vec, - ) -> Vec<(Field, Type)> { - let field_types = db.field_types(variant); - - missing_fields - .into_iter() - .map(|local_id| { - let field = FieldId { parent: variant, local_id }; - let ty = field_types[local_id].clone().subst(substs); - (field.into(), Type::new_with_resolver_inner(db, krate, &self.resolver, ty)) - }) - .collect() - } - - pub(crate) fn expand( - &self, - db: &dyn HirDatabase, - macro_call: InFile<&ast::MacroCall>, - ) -> Option { - let krate = self.resolver.krate()?; - let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| { - self.resolver.resolve_path_as_macro(db.upcast(), &path) - })?; - Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64) - } - - pub(crate) fn resolve_variant( - &self, - db: &dyn HirDatabase, - record_lit: ast::RecordExpr, - ) -> Option { - let infer = self.infer.as_ref()?; - let expr_id = self.expr_id(db, &record_lit.into())?; - infer.variant_resolution_for_expr(expr_id) - } -} - -fn scope_for( - scopes: &ExprScopes, - source_map: &BodySourceMap, - node: InFile<&SyntaxNode>, -) -> Option { - node.value - .ancestors() - .filter_map(ast::Expr::cast) - .filter_map(|it| source_map.node_expr(InFile::new(node.file_id, &it))) - .find_map(|it| scopes.scope_for(it)) -} - -fn scope_for_offset( - db: &dyn HirDatabase, - scopes: &ExprScopes, - source_map: &BodySourceMap, - offset: InFile, -) -> Option { - scopes - .scope_by_expr() - .iter() - .filter_map(|(id, scope)| { - let source = source_map.expr_syntax(*id).ok()?; - // FIXME: correctly handle macro expansion - if source.file_id != offset.file_id { - return None; - } - let root = source.file_syntax(db.upcast()); - let node = source.value.to_node(&root); - Some((node.syntax().text_range(), scope)) - }) - // find containing scope - .min_by_key(|(expr_range, _scope)| { - ( - !(expr_range.start() <= offset.value && offset.value <= expr_range.end()), - expr_range.len(), - ) - }) - .map(|(expr_range, scope)| { - adjust(db, scopes, source_map, expr_range, offset).unwrap_or(*scope) - }) -} - -// XXX: during completion, cursor might be outside of any particular -// expression. Try to figure out the correct scope... -fn adjust( - db: &dyn HirDatabase, - scopes: &ExprScopes, - source_map: &BodySourceMap, - expr_range: TextRange, - offset: InFile, -) -> Option { - let child_scopes = scopes - .scope_by_expr() - .iter() - .filter_map(|(id, scope)| { - let source = source_map.expr_syntax(*id).ok()?; - // FIXME: correctly handle macro expansion - if source.file_id != offset.file_id { - return None; - } - let root = source.file_syntax(db.upcast()); - let node = source.value.to_node(&root); - Some((node.syntax().text_range(), scope)) - }) - .filter(|&(range, _)| { - range.start() <= offset.value && expr_range.contains_range(range) && range != expr_range - }); - - child_scopes - .max_by(|&(r1, _), &(r2, _)| { - if r1.contains_range(r2) { - std::cmp::Ordering::Greater - } else if r2.contains_range(r1) { - std::cmp::Ordering::Less - } else { - r1.start().cmp(&r2.start()) - } - }) - .map(|(_ptr, scope)| *scope) -} - -pub(crate) fn resolve_hir_path( - db: &dyn HirDatabase, - resolver: &Resolver, - path: &crate::Path, -) -> Option { - let types = - resolver.resolve_path_in_type_ns_fully(db.upcast(), path.mod_path()).map(|ty| match ty { - TypeNs::SelfType(it) => PathResolution::SelfType(it.into()), - TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }), - TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => { - PathResolution::Def(Adt::from(it).into()) - } - TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), - TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()), - TypeNs::BuiltinType(it) => PathResolution::Def(it.into()), - TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()), - }); - - let body_owner = resolver.body_owner(); - let values = - resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| { - let res = match val { - ValueNs::LocalBinding(pat_id) => { - let var = Local { parent: body_owner?.into(), pat_id }; - PathResolution::Local(var) - } - ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()), - ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()), - ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()), - ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()), - ValueNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), - ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()), - }; - Some(res) - }); - - let items = resolver - .resolve_module_path_in_items(db.upcast(), path.mod_path()) - .take_types() - .map(|it| PathResolution::Def(it.into())); - - types.or(values).or(items).or_else(|| { - resolver - .resolve_path_as_macro(db.upcast(), path.mod_path()) - .map(|def| PathResolution::Macro(def.into())) - }) -} - -/// Resolves a path where we know it is a qualifier of another path. -/// -/// For example, if we have: -/// ``` -/// mod my { -/// pub mod foo { -/// struct Bar; -/// } -/// -/// pub fn foo() {} -/// } -/// ``` -/// then we know that `foo` in `my::foo::Bar` refers to the module, not the function. -pub(crate) fn resolve_hir_path_qualifier( - db: &dyn HirDatabase, - resolver: &Resolver, - path: &crate::Path, -) -> Option { - let items = resolver - .resolve_module_path_in_items(db.upcast(), path.mod_path()) - .take_types() - .map(|it| PathResolution::Def(it.into())); - - if items.is_some() { - return items; - } - - resolver.resolve_path_in_type_ns_fully(db.upcast(), path.mod_path()).map(|ty| match ty { - TypeNs::SelfType(it) => PathResolution::SelfType(it.into()), - TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }), - TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => PathResolution::Def(Adt::from(it).into()), - TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), - TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()), - TypeNs::BuiltinType(it) => PathResolution::Def(it.into()), - TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()), - }) -} diff --git a/crates/ra_hir_def/Cargo.toml b/crates/ra_hir_def/Cargo.toml deleted file mode 100644 index d96a86b80f..0000000000 --- a/crates/ra_hir_def/Cargo.toml +++ /dev/null @@ -1,36 +0,0 @@ -[package] -edition = "2018" -name = "ra_hir_def" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -log = "0.4.8" -once_cell = "1.3.1" -rustc-hash = "1.1.0" -either = "1.5.3" -anymap = "0.12.1" -drop_bomb = "0.1.4" -fst = { version = "0.4", default-features = false } -itertools = "0.9.0" -indexmap = "1.4.0" -smallvec = "1.4.0" - -stdx = { path = "../stdx" } - -ra_arena = { path = "../ra_arena" } -ra_db = { path = "../ra_db" } -ra_syntax = { path = "../ra_syntax" } -ra_prof = { path = "../ra_prof" } -hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" } -test_utils = { path = "../test_utils" } -mbe = { path = "../ra_mbe", package = "ra_mbe" } -ra_cfg = { path = "../ra_cfg" } -tt = { path = "../ra_tt", package = "ra_tt" } - -[dev-dependencies] -expect = { path = "../expect" } diff --git a/crates/ra_hir_def/src/adt.rs b/crates/ra_hir_def/src/adt.rs deleted file mode 100644 index 6cb56a1cd0..0000000000 --- a/crates/ra_hir_def/src/adt.rs +++ /dev/null @@ -1,295 +0,0 @@ -//! Defines hir-level representation of structs, enums and unions - -use std::sync::Arc; - -use either::Either; -use hir_expand::{ - name::{AsName, Name}, - InFile, -}; -use ra_arena::{map::ArenaMap, Arena}; -use ra_syntax::ast::{self, NameOwner, VisibilityOwner}; - -use crate::{ - body::{CfgExpander, LowerCtx}, - db::DefDatabase, - item_tree::{Field, Fields, ItemTree}, - src::HasChildSource, - src::HasSource, - trace::Trace, - type_ref::TypeRef, - visibility::RawVisibility, - EnumId, HasModule, LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StructId, UnionId, - VariantId, -}; -use ra_cfg::CfgOptions; - -/// Note that we use `StructData` for unions as well! -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct StructData { - pub name: Name, - pub variant_data: Arc, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct EnumData { - pub name: Name, - pub variants: Arena, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct EnumVariantData { - pub name: Name, - pub variant_data: Arc, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum VariantData { - Record(Arena), - Tuple(Arena), - Unit, -} - -/// A single field of an enum variant or struct -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct FieldData { - pub name: Name, - pub type_ref: TypeRef, - pub visibility: RawVisibility, -} - -impl StructData { - pub(crate) fn struct_data_query(db: &dyn DefDatabase, id: StructId) -> Arc { - let loc = id.lookup(db); - let item_tree = db.item_tree(loc.id.file_id); - let cfg_options = db.crate_graph()[loc.container.module(db).krate].cfg_options.clone(); - - let strukt = &item_tree[loc.id.value]; - let variant_data = lower_fields(&item_tree, &cfg_options, &strukt.fields); - - Arc::new(StructData { name: strukt.name.clone(), variant_data: Arc::new(variant_data) }) - } - pub(crate) fn union_data_query(db: &dyn DefDatabase, id: UnionId) -> Arc { - let loc = id.lookup(db); - let item_tree = db.item_tree(loc.id.file_id); - let cfg_options = db.crate_graph()[loc.container.module(db).krate].cfg_options.clone(); - - let union = &item_tree[loc.id.value]; - let variant_data = lower_fields(&item_tree, &cfg_options, &union.fields); - - Arc::new(StructData { name: union.name.clone(), variant_data: Arc::new(variant_data) }) - } -} - -impl EnumData { - pub(crate) fn enum_data_query(db: &dyn DefDatabase, e: EnumId) -> Arc { - let loc = e.lookup(db); - let item_tree = db.item_tree(loc.id.file_id); - let cfg_options = db.crate_graph()[loc.container.module(db).krate].cfg_options.clone(); - - let enum_ = &item_tree[loc.id.value]; - let mut variants = Arena::new(); - for var_id in enum_.variants.clone() { - if item_tree.attrs(var_id.into()).is_cfg_enabled(&cfg_options) { - let var = &item_tree[var_id]; - let var_data = lower_fields(&item_tree, &cfg_options, &var.fields); - - variants.alloc(EnumVariantData { - name: var.name.clone(), - variant_data: Arc::new(var_data), - }); - } - } - - Arc::new(EnumData { name: enum_.name.clone(), variants }) - } - - pub fn variant(&self, name: &Name) -> Option { - let (id, _) = self.variants.iter().find(|(_id, data)| &data.name == name)?; - Some(id) - } -} - -impl HasChildSource for EnumId { - type ChildId = LocalEnumVariantId; - type Value = ast::Variant; - fn child_source(&self, db: &dyn DefDatabase) -> InFile> { - let src = self.lookup(db).source(db); - let mut trace = Trace::new_for_map(); - lower_enum(db, &mut trace, &src, self.lookup(db).container.module(db)); - src.with_value(trace.into_map()) - } -} - -fn lower_enum( - db: &dyn DefDatabase, - trace: &mut Trace, - ast: &InFile, - module_id: ModuleId, -) { - let expander = CfgExpander::new(db, ast.file_id, module_id.krate); - let variants = ast - .value - .variant_list() - .into_iter() - .flat_map(|it| it.variants()) - .filter(|var| expander.is_cfg_enabled(var)); - for var in variants { - trace.alloc( - || var.clone(), - || EnumVariantData { - name: var.name().map_or_else(Name::missing, |it| it.as_name()), - variant_data: Arc::new(VariantData::new(db, ast.with_value(var.kind()), module_id)), - }, - ); - } -} - -impl VariantData { - fn new(db: &dyn DefDatabase, flavor: InFile, module_id: ModuleId) -> Self { - let mut expander = CfgExpander::new(db, flavor.file_id, module_id.krate); - let mut trace = Trace::new_for_arena(); - match lower_struct(db, &mut expander, &mut trace, &flavor) { - StructKind::Tuple => VariantData::Tuple(trace.into_arena()), - StructKind::Record => VariantData::Record(trace.into_arena()), - StructKind::Unit => VariantData::Unit, - } - } - - pub fn fields(&self) -> &Arena { - const EMPTY: &Arena = &Arena::new(); - match &self { - VariantData::Record(fields) | VariantData::Tuple(fields) => fields, - _ => EMPTY, - } - } - - pub fn field(&self, name: &Name) -> Option { - self.fields().iter().find_map(|(id, data)| if &data.name == name { Some(id) } else { None }) - } - - pub fn kind(&self) -> StructKind { - match self { - VariantData::Record(_) => StructKind::Record, - VariantData::Tuple(_) => StructKind::Tuple, - VariantData::Unit => StructKind::Unit, - } - } -} - -impl HasChildSource for VariantId { - type ChildId = LocalFieldId; - type Value = Either; - - fn child_source(&self, db: &dyn DefDatabase) -> InFile> { - let (src, module_id) = match self { - VariantId::EnumVariantId(it) => { - // I don't really like the fact that we call into parent source - // here, this might add to more queries then necessary. - let src = it.parent.child_source(db); - (src.map(|map| map[it.local_id].kind()), it.parent.lookup(db).container.module(db)) - } - VariantId::StructId(it) => { - (it.lookup(db).source(db).map(|it| it.kind()), it.lookup(db).container.module(db)) - } - VariantId::UnionId(it) => ( - it.lookup(db).source(db).map(|it| { - it.record_field_list() - .map(ast::StructKind::Record) - .unwrap_or(ast::StructKind::Unit) - }), - it.lookup(db).container.module(db), - ), - }; - let mut expander = CfgExpander::new(db, src.file_id, module_id.krate); - let mut trace = Trace::new_for_map(); - lower_struct(db, &mut expander, &mut trace, &src); - src.with_value(trace.into_map()) - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub enum StructKind { - Tuple, - Record, - Unit, -} - -fn lower_struct( - db: &dyn DefDatabase, - expander: &mut CfgExpander, - trace: &mut Trace>, - ast: &InFile, -) -> StructKind { - let ctx = LowerCtx::new(db, ast.file_id); - - match &ast.value { - ast::StructKind::Tuple(fl) => { - for (i, fd) in fl.fields().enumerate() { - if !expander.is_cfg_enabled(&fd) { - continue; - } - - trace.alloc( - || Either::Left(fd.clone()), - || FieldData { - name: Name::new_tuple_field(i), - type_ref: TypeRef::from_ast_opt(&ctx, fd.ty()), - visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())), - }, - ); - } - StructKind::Tuple - } - ast::StructKind::Record(fl) => { - for fd in fl.fields() { - if !expander.is_cfg_enabled(&fd) { - continue; - } - - trace.alloc( - || Either::Right(fd.clone()), - || FieldData { - name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing), - type_ref: TypeRef::from_ast_opt(&ctx, fd.ty()), - visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())), - }, - ); - } - StructKind::Record - } - ast::StructKind::Unit => StructKind::Unit, - } -} - -fn lower_fields(item_tree: &ItemTree, cfg_options: &CfgOptions, fields: &Fields) -> VariantData { - match fields { - Fields::Record(flds) => { - let mut arena = Arena::new(); - for field_id in flds.clone() { - if item_tree.attrs(field_id.into()).is_cfg_enabled(cfg_options) { - arena.alloc(lower_field(item_tree, &item_tree[field_id])); - } - } - VariantData::Record(arena) - } - Fields::Tuple(flds) => { - let mut arena = Arena::new(); - for field_id in flds.clone() { - if item_tree.attrs(field_id.into()).is_cfg_enabled(cfg_options) { - arena.alloc(lower_field(item_tree, &item_tree[field_id])); - } - } - VariantData::Tuple(arena) - } - Fields::Unit => VariantData::Unit, - } -} - -fn lower_field(item_tree: &ItemTree, field: &Field) -> FieldData { - FieldData { - name: field.name.clone(), - type_ref: field.type_ref.clone(), - visibility: item_tree[field.visibility].clone(), - } -} diff --git a/crates/ra_hir_def/src/attr.rs b/crates/ra_hir_def/src/attr.rs deleted file mode 100644 index 050832ce01..0000000000 --- a/crates/ra_hir_def/src/attr.rs +++ /dev/null @@ -1,212 +0,0 @@ -//! A higher level attributes based on TokenTree, with also some shortcuts. - -use std::{ops, sync::Arc}; - -use either::Either; -use hir_expand::{hygiene::Hygiene, AstId, InFile}; -use mbe::ast_to_token_tree; -use ra_cfg::{CfgExpr, CfgOptions}; -use ra_syntax::{ - ast::{self, AstNode, AttrsOwner}, - SmolStr, -}; -use tt::Subtree; - -use crate::{ - db::DefDatabase, - item_tree::{ItemTreeId, ItemTreeNode}, - nameres::ModuleSource, - path::ModPath, - src::HasChildSource, - AdtId, AttrDefId, Lookup, -}; - -#[derive(Default, Debug, Clone, PartialEq, Eq)] -pub struct Attrs { - entries: Option>, -} - -impl ops::Deref for Attrs { - type Target = [Attr]; - - fn deref(&self) -> &[Attr] { - match &self.entries { - Some(it) => &*it, - None => &[], - } - } -} - -impl Attrs { - pub const EMPTY: Attrs = Attrs { entries: None }; - - pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs { - match def { - AttrDefId::ModuleId(module) => { - let def_map = db.crate_def_map(module.krate); - let mod_data = &def_map[module.local_id]; - match mod_data.declaration_source(db) { - Some(it) => { - Attrs::from_attrs_owner(db, it.as_ref().map(|it| it as &dyn AttrsOwner)) - } - None => Attrs::from_attrs_owner( - db, - mod_data.definition_source(db).as_ref().map(|src| match src { - ModuleSource::SourceFile(file) => file as &dyn AttrsOwner, - ModuleSource::Module(module) => module as &dyn AttrsOwner, - }), - ), - } - } - AttrDefId::FieldId(it) => { - let src = it.parent.child_source(db); - match &src.value[it.local_id] { - Either::Left(_tuple) => Attrs::default(), - Either::Right(record) => Attrs::from_attrs_owner(db, src.with_value(record)), - } - } - AttrDefId::EnumVariantId(var_id) => { - let src = var_id.parent.child_source(db); - let src = src.as_ref().map(|it| &it[var_id.local_id]); - Attrs::from_attrs_owner(db, src.map(|it| it as &dyn AttrsOwner)) - } - AttrDefId::AdtId(it) => match it { - AdtId::StructId(it) => attrs_from_item_tree(it.lookup(db).id, db), - AdtId::EnumId(it) => attrs_from_item_tree(it.lookup(db).id, db), - AdtId::UnionId(it) => attrs_from_item_tree(it.lookup(db).id, db), - }, - AttrDefId::TraitId(it) => attrs_from_item_tree(it.lookup(db).id, db), - AttrDefId::MacroDefId(it) => { - it.ast_id.map_or_else(Default::default, |ast_id| attrs_from_ast(ast_id, db)) - } - AttrDefId::ImplId(it) => attrs_from_item_tree(it.lookup(db).id, db), - AttrDefId::ConstId(it) => attrs_from_item_tree(it.lookup(db).id, db), - AttrDefId::StaticId(it) => attrs_from_item_tree(it.lookup(db).id, db), - AttrDefId::FunctionId(it) => attrs_from_item_tree(it.lookup(db).id, db), - AttrDefId::TypeAliasId(it) => attrs_from_item_tree(it.lookup(db).id, db), - } - } - - pub fn from_attrs_owner(db: &dyn DefDatabase, owner: InFile<&dyn AttrsOwner>) -> Attrs { - let hygiene = Hygiene::new(db.upcast(), owner.file_id); - Attrs::new(owner.value, &hygiene) - } - - pub(crate) fn new(owner: &dyn AttrsOwner, hygiene: &Hygiene) -> Attrs { - let docs = ast::CommentIter::from_syntax_node(owner.syntax()).doc_comment_text().map( - |docs_text| Attr { - input: Some(AttrInput::Literal(SmolStr::new(docs_text))), - path: ModPath::from(hir_expand::name!(doc)), - }, - ); - let mut attrs = owner.attrs().peekable(); - let entries = if attrs.peek().is_none() { - // Avoid heap allocation - None - } else { - Some(attrs.flat_map(|ast| Attr::from_src(ast, hygiene)).chain(docs).collect()) - }; - Attrs { entries } - } - - pub fn merge(&self, other: Attrs) -> Attrs { - match (&self.entries, &other.entries) { - (None, None) => Attrs { entries: None }, - (Some(entries), None) | (None, Some(entries)) => { - Attrs { entries: Some(entries.clone()) } - } - (Some(a), Some(b)) => { - Attrs { entries: Some(a.iter().chain(b.iter()).cloned().collect()) } - } - } - } - - pub fn by_key(&self, key: &'static str) -> AttrQuery<'_> { - AttrQuery { attrs: self, key } - } - - pub fn cfg(&self) -> impl Iterator + '_ { - // FIXME: handle cfg_attr :-) - self.by_key("cfg").tt_values().map(CfgExpr::parse) - } - pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> bool { - self.cfg().all(|cfg| cfg_options.check(&cfg) != Some(false)) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Attr { - pub(crate) path: ModPath, - pub(crate) input: Option, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum AttrInput { - /// `#[attr = "string"]` - Literal(SmolStr), - /// `#[attr(subtree)]` - TokenTree(Subtree), -} - -impl Attr { - fn from_src(ast: ast::Attr, hygiene: &Hygiene) -> Option { - let path = ModPath::from_src(ast.path()?, hygiene)?; - let input = if let Some(lit) = ast.literal() { - // FIXME: escape? raw string? - let value = lit.syntax().first_token()?.text().trim_matches('"').into(); - Some(AttrInput::Literal(value)) - } else if let Some(tt) = ast.token_tree() { - Some(AttrInput::TokenTree(ast_to_token_tree(&tt)?.0)) - } else { - None - }; - Some(Attr { path, input }) - } -} - -#[derive(Debug, Clone, Copy)] -pub struct AttrQuery<'a> { - attrs: &'a Attrs, - key: &'static str, -} - -impl<'a> AttrQuery<'a> { - pub fn tt_values(self) -> impl Iterator { - self.attrs().filter_map(|attr| match attr.input.as_ref()? { - AttrInput::TokenTree(it) => Some(it), - _ => None, - }) - } - - pub fn string_value(self) -> Option<&'a SmolStr> { - self.attrs().find_map(|attr| match attr.input.as_ref()? { - AttrInput::Literal(it) => Some(it), - _ => None, - }) - } - - pub fn exists(self) -> bool { - self.attrs().next().is_some() - } - - fn attrs(self) -> impl Iterator { - let key = self.key; - self.attrs - .iter() - .filter(move |attr| attr.path.as_ident().map_or(false, |s| s.to_string() == key)) - } -} - -fn attrs_from_ast(src: AstId, db: &dyn DefDatabase) -> Attrs -where - N: ast::AttrsOwner, -{ - let src = InFile::new(src.file_id, src.to_node(db.upcast())); - Attrs::from_attrs_owner(db, src.as_ref().map(|it| it as &dyn AttrsOwner)) -} - -fn attrs_from_item_tree(id: ItemTreeId, db: &dyn DefDatabase) -> Attrs { - let tree = db.item_tree(id.file_id); - let mod_item = N::id_to_mod_item(id.value); - tree.attrs(mod_item.into()).clone() -} diff --git a/crates/ra_hir_def/src/body.rs b/crates/ra_hir_def/src/body.rs deleted file mode 100644 index d5f18b9201..0000000000 --- a/crates/ra_hir_def/src/body.rs +++ /dev/null @@ -1,361 +0,0 @@ -//! Defines `Body`: a lowered representation of bodies of functions, statics and -//! consts. -mod lower; -pub mod scope; - -use std::{mem, ops::Index, sync::Arc}; - -use drop_bomb::DropBomb; -use either::Either; -use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile, MacroDefId}; -use ra_arena::{map::ArenaMap, Arena}; -use ra_cfg::CfgOptions; -use ra_db::CrateId; -use ra_prof::profile; -use ra_syntax::{ast, AstNode, AstPtr}; -use rustc_hash::FxHashMap; -use test_utils::mark; - -pub(crate) use lower::LowerCtx; - -use crate::{ - attr::Attrs, - db::DefDatabase, - expr::{Expr, ExprId, Pat, PatId}, - item_scope::BuiltinShadowMode, - item_scope::ItemScope, - nameres::CrateDefMap, - path::{ModPath, Path}, - src::HasSource, - AsMacroCall, DefWithBodyId, HasModule, Lookup, ModuleId, -}; - -/// A subset of Expander that only deals with cfg attributes. We only need it to -/// avoid cyclic queries in crate def map during enum processing. -pub(crate) struct CfgExpander { - cfg_options: CfgOptions, - hygiene: Hygiene, -} - -pub(crate) struct Expander { - cfg_expander: CfgExpander, - crate_def_map: Arc, - current_file_id: HirFileId, - ast_id_map: Arc, - module: ModuleId, - recursion_limit: usize, -} - -#[cfg(test)] -const EXPANSION_RECURSION_LIMIT: usize = 32; - -#[cfg(not(test))] -const EXPANSION_RECURSION_LIMIT: usize = 128; - -impl CfgExpander { - pub(crate) fn new( - db: &dyn DefDatabase, - current_file_id: HirFileId, - krate: CrateId, - ) -> CfgExpander { - let hygiene = Hygiene::new(db.upcast(), current_file_id); - let cfg_options = db.crate_graph()[krate].cfg_options.clone(); - CfgExpander { cfg_options, hygiene } - } - - pub(crate) fn parse_attrs(&self, owner: &dyn ast::AttrsOwner) -> Attrs { - Attrs::new(owner, &self.hygiene) - } - - pub(crate) fn is_cfg_enabled(&self, owner: &dyn ast::AttrsOwner) -> bool { - let attrs = self.parse_attrs(owner); - attrs.is_cfg_enabled(&self.cfg_options) - } -} - -impl Expander { - pub(crate) fn new( - db: &dyn DefDatabase, - current_file_id: HirFileId, - module: ModuleId, - ) -> Expander { - let cfg_expander = CfgExpander::new(db, current_file_id, module.krate); - let crate_def_map = db.crate_def_map(module.krate); - let ast_id_map = db.ast_id_map(current_file_id); - Expander { - cfg_expander, - crate_def_map, - current_file_id, - ast_id_map, - module, - recursion_limit: 0, - } - } - - pub(crate) fn enter_expand( - &mut self, - db: &dyn DefDatabase, - local_scope: Option<&ItemScope>, - macro_call: ast::MacroCall, - ) -> Option<(Mark, T)> { - self.recursion_limit += 1; - if self.recursion_limit > EXPANSION_RECURSION_LIMIT { - mark::hit!(your_stack_belongs_to_me); - return None; - } - - let macro_call = InFile::new(self.current_file_id, ¯o_call); - - if let Some(call_id) = macro_call.as_call_id(db, self.crate_def_map.krate, |path| { - if let Some(local_scope) = local_scope { - if let Some(def) = path.as_ident().and_then(|n| local_scope.get_legacy_macro(n)) { - return Some(def); - } - } - self.resolve_path_as_macro(db, &path) - }) { - let file_id = call_id.as_file(); - if let Some(node) = db.parse_or_expand(file_id) { - if let Some(expr) = T::cast(node) { - log::debug!("macro expansion {:#?}", expr.syntax()); - - let mark = Mark { - file_id: self.current_file_id, - ast_id_map: mem::take(&mut self.ast_id_map), - bomb: DropBomb::new("expansion mark dropped"), - }; - self.cfg_expander.hygiene = Hygiene::new(db.upcast(), file_id); - self.current_file_id = file_id; - self.ast_id_map = db.ast_id_map(file_id); - return Some((mark, expr)); - } - } - } - - // FIXME: Instead of just dropping the error from expansion - // report it - None - } - - pub(crate) fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) { - self.cfg_expander.hygiene = Hygiene::new(db.upcast(), mark.file_id); - self.current_file_id = mark.file_id; - self.ast_id_map = mem::take(&mut mark.ast_id_map); - self.recursion_limit -= 1; - mark.bomb.defuse(); - } - - pub(crate) fn to_source(&self, value: T) -> InFile { - InFile { file_id: self.current_file_id, value } - } - - pub(crate) fn is_cfg_enabled(&self, owner: &dyn ast::AttrsOwner) -> bool { - self.cfg_expander.is_cfg_enabled(owner) - } - - fn parse_path(&mut self, path: ast::Path) -> Option { - Path::from_src(path, &self.cfg_expander.hygiene) - } - - fn resolve_path_as_macro(&self, db: &dyn DefDatabase, path: &ModPath) -> Option { - self.crate_def_map - .resolve_path(db, self.module.local_id, path, BuiltinShadowMode::Other) - .0 - .take_macros() - } - - fn ast_id(&self, item: &N) -> AstId { - let file_local_id = self.ast_id_map.ast_id(item); - AstId::new(self.current_file_id, file_local_id) - } -} - -pub(crate) struct Mark { - file_id: HirFileId, - ast_id_map: Arc, - bomb: DropBomb, -} - -/// The body of an item (function, const etc.). -#[derive(Debug, Eq, PartialEq)] -pub struct Body { - pub exprs: Arena, - pub pats: Arena, - /// The patterns for the function's parameters. While the parameter types are - /// part of the function signature, the patterns are not (they don't change - /// the external type of the function). - /// - /// If this `Body` is for the body of a constant, this will just be - /// empty. - pub params: Vec, - /// The `ExprId` of the actual body expression. - pub body_expr: ExprId, - pub item_scope: ItemScope, -} - -pub type ExprPtr = AstPtr; -pub type ExprSource = InFile; - -pub type PatPtr = Either, AstPtr>; -pub type PatSource = InFile; - -/// An item body together with the mapping from syntax nodes to HIR expression -/// IDs. This is needed to go from e.g. a position in a file to the HIR -/// expression containing it; but for type inference etc., we want to operate on -/// a structure that is agnostic to the actual positions of expressions in the -/// file, so that we don't recompute types whenever some whitespace is typed. -/// -/// One complication here is that, due to macro expansion, a single `Body` might -/// be spread across several files. So, for each ExprId and PatId, we record -/// both the HirFileId and the position inside the file. However, we only store -/// AST -> ExprId mapping for non-macro files, as it is not clear how to handle -/// this properly for macros. -#[derive(Default, Debug, Eq, PartialEq)] -pub struct BodySourceMap { - expr_map: FxHashMap, - expr_map_back: ArenaMap>, - pat_map: FxHashMap, - pat_map_back: ArenaMap>, - field_map: FxHashMap<(ExprId, usize), InFile>>, - expansions: FxHashMap>, HirFileId>, -} - -#[derive(Default, Debug, Eq, PartialEq, Clone, Copy)] -pub struct SyntheticSyntax; - -impl Body { - pub(crate) fn body_with_source_map_query( - db: &dyn DefDatabase, - def: DefWithBodyId, - ) -> (Arc, Arc) { - let _p = profile("body_with_source_map_query"); - let mut params = None; - - let (file_id, module, body) = match def { - DefWithBodyId::FunctionId(f) => { - let f = f.lookup(db); - let src = f.source(db); - params = src.value.param_list(); - (src.file_id, f.module(db), src.value.body().map(ast::Expr::from)) - } - DefWithBodyId::ConstId(c) => { - let c = c.lookup(db); - let src = c.source(db); - (src.file_id, c.module(db), src.value.body()) - } - DefWithBodyId::StaticId(s) => { - let s = s.lookup(db); - let src = s.source(db); - (src.file_id, s.module(db), src.value.body()) - } - }; - let expander = Expander::new(db, file_id, module); - let (body, source_map) = Body::new(db, def, expander, params, body); - (Arc::new(body), Arc::new(source_map)) - } - - pub(crate) fn body_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc { - db.body_with_source_map(def).0 - } - - fn new( - db: &dyn DefDatabase, - def: DefWithBodyId, - expander: Expander, - params: Option, - body: Option, - ) -> (Body, BodySourceMap) { - lower::lower(db, def, expander, params, body) - } -} - -impl Index for Body { - type Output = Expr; - - fn index(&self, expr: ExprId) -> &Expr { - &self.exprs[expr] - } -} - -impl Index for Body { - type Output = Pat; - - fn index(&self, pat: PatId) -> &Pat { - &self.pats[pat] - } -} - -impl BodySourceMap { - pub fn expr_syntax(&self, expr: ExprId) -> Result { - self.expr_map_back[expr].clone() - } - - pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option { - let src = node.map(|it| AstPtr::new(it)); - self.expr_map.get(&src).cloned() - } - - pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option { - let src = node.map(|it| AstPtr::new(it)); - self.expansions.get(&src).cloned() - } - - pub fn pat_syntax(&self, pat: PatId) -> Result { - self.pat_map_back[pat].clone() - } - - pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option { - let src = node.map(|it| Either::Left(AstPtr::new(it))); - self.pat_map.get(&src).cloned() - } - - pub fn node_self_param(&self, node: InFile<&ast::SelfParam>) -> Option { - let src = node.map(|it| Either::Right(AstPtr::new(it))); - self.pat_map.get(&src).cloned() - } - - pub fn field_syntax(&self, expr: ExprId, field: usize) -> InFile> { - self.field_map[&(expr, field)].clone() - } -} - -#[cfg(test)] -mod tests { - use ra_db::{fixture::WithFixture, SourceDatabase}; - use test_utils::mark; - - use crate::ModuleDefId; - - use super::*; - - fn lower(ra_fixture: &str) -> Arc { - let (db, file_id) = crate::test_db::TestDB::with_single_file(ra_fixture); - - let krate = db.crate_graph().iter().next().unwrap(); - let def_map = db.crate_def_map(krate); - let module = def_map.modules_for_file(file_id).next().unwrap(); - let module = &def_map[module]; - let fn_def = match module.scope.declarations().next().unwrap() { - ModuleDefId::FunctionId(it) => it, - _ => panic!(), - }; - - db.body(fn_def.into()) - } - - #[test] - fn your_stack_belongs_to_me() { - mark::check!(your_stack_belongs_to_me); - lower( - " -macro_rules! n_nuple { - ($e:tt) => (); - ($($rest:tt)*) => {{ - (n_nuple!($($rest)*)None,) - }}; -} -fn main() { n_nuple!(1,2,3); } -", - ); - } -} diff --git a/crates/ra_hir_def/src/body/lower.rs b/crates/ra_hir_def/src/body/lower.rs deleted file mode 100644 index f5c37edb30..0000000000 --- a/crates/ra_hir_def/src/body/lower.rs +++ /dev/null @@ -1,931 +0,0 @@ -//! Transforms `ast::Expr` into an equivalent `hir_def::expr::Expr` -//! representation. - -use std::{any::type_name, sync::Arc}; - -use either::Either; -use hir_expand::{ - hygiene::Hygiene, - name::{name, AsName, Name}, - HirFileId, MacroDefId, MacroDefKind, -}; -use ra_arena::Arena; -use ra_syntax::{ - ast::{ - self, ArgListOwner, ArrayExprKind, AstChildren, LiteralKind, LoopBodyOwner, NameOwner, - SlicePatComponents, - }, - AstNode, AstPtr, -}; -use rustc_hash::FxHashMap; -use test_utils::mark; - -use crate::{ - adt::StructKind, - body::{Body, BodySourceMap, Expander, PatPtr, SyntheticSyntax}, - builtin_type::{BuiltinFloat, BuiltinInt}, - db::DefDatabase, - expr::{ - dummy_expr_id, ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, - LogicOp, MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, - }, - item_scope::BuiltinShadowMode, - item_tree::{ItemTree, ItemTreeId, ItemTreeNode}, - path::{GenericArgs, Path}, - type_ref::{Mutability, Rawness, TypeRef}, - AdtId, ConstLoc, ContainerId, DefWithBodyId, EnumLoc, FunctionLoc, Intern, ModuleDefId, - StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, -}; - -use super::{ExprSource, PatSource}; - -pub(crate) struct LowerCtx { - hygiene: Hygiene, -} - -impl LowerCtx { - pub fn new(db: &dyn DefDatabase, file_id: HirFileId) -> Self { - LowerCtx { hygiene: Hygiene::new(db.upcast(), file_id) } - } - pub fn with_hygiene(hygiene: &Hygiene) -> Self { - LowerCtx { hygiene: hygiene.clone() } - } - - pub fn lower_path(&self, ast: ast::Path) -> Option { - Path::from_src(ast, &self.hygiene) - } -} - -pub(super) fn lower( - db: &dyn DefDatabase, - def: DefWithBodyId, - expander: Expander, - params: Option, - body: Option, -) -> (Body, BodySourceMap) { - let item_tree = db.item_tree(expander.current_file_id); - ExprCollector { - db, - def, - source_map: BodySourceMap::default(), - body: Body { - exprs: Arena::default(), - pats: Arena::default(), - params: Vec::new(), - body_expr: dummy_expr_id(), - item_scope: Default::default(), - }, - item_trees: { - let mut map = FxHashMap::default(); - map.insert(expander.current_file_id, item_tree); - map - }, - expander, - } - .collect(params, body) -} - -struct ExprCollector<'a> { - db: &'a dyn DefDatabase, - def: DefWithBodyId, - expander: Expander, - body: Body, - source_map: BodySourceMap, - - item_trees: FxHashMap>, -} - -impl ExprCollector<'_> { - fn collect( - mut self, - param_list: Option, - body: Option, - ) -> (Body, BodySourceMap) { - if let Some(param_list) = param_list { - if let Some(self_param) = param_list.self_param() { - let ptr = AstPtr::new(&self_param); - let param_pat = self.alloc_pat( - Pat::Bind { - name: name![self], - mode: BindingAnnotation::Unannotated, - subpat: None, - }, - Either::Right(ptr), - ); - self.body.params.push(param_pat); - } - - for param in param_list.params() { - let pat = match param.pat() { - None => continue, - Some(pat) => pat, - }; - let param_pat = self.collect_pat(pat); - self.body.params.push(param_pat); - } - }; - - self.body.body_expr = self.collect_expr_opt(body); - (self.body, self.source_map) - } - - fn ctx(&self) -> LowerCtx { - LowerCtx::new(self.db, self.expander.current_file_id) - } - - fn alloc_expr(&mut self, expr: Expr, ptr: AstPtr) -> ExprId { - let src = self.expander.to_source(ptr); - let id = self.make_expr(expr, Ok(src.clone())); - self.source_map.expr_map.insert(src, id); - id - } - // desugared exprs don't have ptr, that's wrong and should be fixed - // somehow. - fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { - self.make_expr(expr, Err(SyntheticSyntax)) - } - fn empty_block(&mut self) -> ExprId { - self.alloc_expr_desugared(Expr::Block { statements: Vec::new(), tail: None, label: None }) - } - fn missing_expr(&mut self) -> ExprId { - self.alloc_expr_desugared(Expr::Missing) - } - fn make_expr(&mut self, expr: Expr, src: Result) -> ExprId { - let id = self.body.exprs.alloc(expr); - self.source_map.expr_map_back.insert(id, src); - id - } - - fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { - let src = self.expander.to_source(ptr); - let id = self.make_pat(pat, Ok(src.clone())); - self.source_map.pat_map.insert(src, id); - id - } - fn missing_pat(&mut self) -> PatId { - self.make_pat(Pat::Missing, Err(SyntheticSyntax)) - } - fn make_pat(&mut self, pat: Pat, src: Result) -> PatId { - let id = self.body.pats.alloc(pat); - self.source_map.pat_map_back.insert(id, src); - id - } - - fn collect_expr(&mut self, expr: ast::Expr) -> ExprId { - let syntax_ptr = AstPtr::new(&expr); - if !self.expander.is_cfg_enabled(&expr) { - return self.missing_expr(); - } - - match expr { - ast::Expr::IfExpr(e) => { - let then_branch = self.collect_block_opt(e.then_branch()); - - let else_branch = e.else_branch().map(|b| match b { - ast::ElseBranch::Block(it) => self.collect_block(it), - ast::ElseBranch::IfExpr(elif) => { - let expr: ast::Expr = ast::Expr::cast(elif.syntax().clone()).unwrap(); - self.collect_expr(expr) - } - }); - - let condition = match e.condition() { - None => self.missing_expr(), - Some(condition) => match condition.pat() { - None => self.collect_expr_opt(condition.expr()), - // if let -- desugar to match - Some(pat) => { - let pat = self.collect_pat(pat); - let match_expr = self.collect_expr_opt(condition.expr()); - let placeholder_pat = self.missing_pat(); - let arms = vec![ - MatchArm { pat, expr: then_branch, guard: None }, - MatchArm { - pat: placeholder_pat, - expr: else_branch.unwrap_or_else(|| self.empty_block()), - guard: None, - }, - ]; - return self - .alloc_expr(Expr::Match { expr: match_expr, arms }, syntax_ptr); - } - }, - }; - - self.alloc_expr(Expr::If { condition, then_branch, else_branch }, syntax_ptr) - } - ast::Expr::EffectExpr(e) => match e.effect() { - ast::Effect::Try(_) => { - let body = self.collect_block_opt(e.block_expr()); - self.alloc_expr(Expr::TryBlock { body }, syntax_ptr) - } - ast::Effect::Unsafe(_) => { - let body = self.collect_block_opt(e.block_expr()); - self.alloc_expr(Expr::Unsafe { body }, syntax_ptr) - } - // FIXME: we need to record these effects somewhere... - ast::Effect::Label(label) => match e.block_expr() { - Some(block) => { - let res = self.collect_block(block); - match &mut self.body.exprs[res] { - Expr::Block { label: block_label, .. } => { - *block_label = - label.lifetime_token().map(|t| Name::new_lifetime(&t)) - } - _ => unreachable!(), - } - res - } - None => self.missing_expr(), - }, - // FIXME: we need to record these effects somewhere... - ast::Effect::Async(_) => self.collect_block_opt(e.block_expr()), - }, - ast::Expr::BlockExpr(e) => self.collect_block(e), - ast::Expr::LoopExpr(e) => { - let body = self.collect_block_opt(e.loop_body()); - self.alloc_expr( - Expr::Loop { - body, - label: e - .label() - .and_then(|l| l.lifetime_token()) - .map(|l| Name::new_lifetime(&l)), - }, - syntax_ptr, - ) - } - ast::Expr::WhileExpr(e) => { - let body = self.collect_block_opt(e.loop_body()); - - let condition = match e.condition() { - None => self.missing_expr(), - Some(condition) => match condition.pat() { - None => self.collect_expr_opt(condition.expr()), - // if let -- desugar to match - Some(pat) => { - mark::hit!(infer_resolve_while_let); - let pat = self.collect_pat(pat); - let match_expr = self.collect_expr_opt(condition.expr()); - let placeholder_pat = self.missing_pat(); - let break_ = - self.alloc_expr_desugared(Expr::Break { expr: None, label: None }); - let arms = vec![ - MatchArm { pat, expr: body, guard: None }, - MatchArm { pat: placeholder_pat, expr: break_, guard: None }, - ]; - let match_expr = - self.alloc_expr_desugared(Expr::Match { expr: match_expr, arms }); - return self.alloc_expr( - Expr::Loop { - body: match_expr, - label: e - .label() - .and_then(|l| l.lifetime_token()) - .map(|l| Name::new_lifetime(&l)), - }, - syntax_ptr, - ); - } - }, - }; - - self.alloc_expr( - Expr::While { - condition, - body, - label: e - .label() - .and_then(|l| l.lifetime_token()) - .map(|l| Name::new_lifetime(&l)), - }, - syntax_ptr, - ) - } - ast::Expr::ForExpr(e) => { - let iterable = self.collect_expr_opt(e.iterable()); - let pat = self.collect_pat_opt(e.pat()); - let body = self.collect_block_opt(e.loop_body()); - self.alloc_expr( - Expr::For { - iterable, - pat, - body, - label: e - .label() - .and_then(|l| l.lifetime_token()) - .map(|l| Name::new_lifetime(&l)), - }, - syntax_ptr, - ) - } - ast::Expr::CallExpr(e) => { - let callee = self.collect_expr_opt(e.expr()); - let args = if let Some(arg_list) = e.arg_list() { - arg_list.args().map(|e| self.collect_expr(e)).collect() - } else { - Vec::new() - }; - self.alloc_expr(Expr::Call { callee, args }, syntax_ptr) - } - ast::Expr::MethodCallExpr(e) => { - let receiver = self.collect_expr_opt(e.expr()); - let args = if let Some(arg_list) = e.arg_list() { - arg_list.args().map(|e| self.collect_expr(e)).collect() - } else { - Vec::new() - }; - let method_name = e.name_ref().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); - let generic_args = - e.generic_arg_list().and_then(|it| GenericArgs::from_ast(&self.ctx(), it)); - self.alloc_expr( - Expr::MethodCall { receiver, method_name, args, generic_args }, - syntax_ptr, - ) - } - ast::Expr::MatchExpr(e) => { - let expr = self.collect_expr_opt(e.expr()); - let arms = if let Some(match_arm_list) = e.match_arm_list() { - match_arm_list - .arms() - .map(|arm| MatchArm { - pat: self.collect_pat_opt(arm.pat()), - expr: self.collect_expr_opt(arm.expr()), - guard: arm - .guard() - .and_then(|guard| guard.expr()) - .map(|e| self.collect_expr(e)), - }) - .collect() - } else { - Vec::new() - }; - self.alloc_expr(Expr::Match { expr, arms }, syntax_ptr) - } - ast::Expr::PathExpr(e) => { - let path = e - .path() - .and_then(|path| self.expander.parse_path(path)) - .map(Expr::Path) - .unwrap_or(Expr::Missing); - self.alloc_expr(path, syntax_ptr) - } - ast::Expr::ContinueExpr(e) => self.alloc_expr( - Expr::Continue { label: e.lifetime_token().map(|l| Name::new_lifetime(&l)) }, - syntax_ptr, - ), - ast::Expr::BreakExpr(e) => { - let expr = e.expr().map(|e| self.collect_expr(e)); - self.alloc_expr( - Expr::Break { expr, label: e.lifetime_token().map(|l| Name::new_lifetime(&l)) }, - syntax_ptr, - ) - } - ast::Expr::ParenExpr(e) => { - let inner = self.collect_expr_opt(e.expr()); - // make the paren expr point to the inner expression as well - let src = self.expander.to_source(syntax_ptr); - self.source_map.expr_map.insert(src, inner); - inner - } - ast::Expr::ReturnExpr(e) => { - let expr = e.expr().map(|e| self.collect_expr(e)); - self.alloc_expr(Expr::Return { expr }, syntax_ptr) - } - ast::Expr::RecordExpr(e) => { - let path = e.path().and_then(|path| self.expander.parse_path(path)); - let mut field_ptrs = Vec::new(); - let record_lit = if let Some(nfl) = e.record_expr_field_list() { - let fields = nfl - .fields() - .inspect(|field| field_ptrs.push(AstPtr::new(field))) - .filter_map(|field| { - if !self.expander.is_cfg_enabled(&field) { - return None; - } - let name = field.field_name()?.as_name(); - - Some(RecordLitField { - name, - expr: match field.expr() { - Some(e) => self.collect_expr(e), - None => self.missing_expr(), - }, - }) - }) - .collect(); - let spread = nfl.spread().map(|s| self.collect_expr(s)); - Expr::RecordLit { path, fields, spread } - } else { - Expr::RecordLit { path, fields: Vec::new(), spread: None } - }; - - let res = self.alloc_expr(record_lit, syntax_ptr); - for (i, ptr) in field_ptrs.into_iter().enumerate() { - let src = self.expander.to_source(ptr); - self.source_map.field_map.insert((res, i), src); - } - res - } - ast::Expr::FieldExpr(e) => { - let expr = self.collect_expr_opt(e.expr()); - let name = match e.field_access() { - Some(kind) => kind.as_name(), - _ => Name::missing(), - }; - self.alloc_expr(Expr::Field { expr, name }, syntax_ptr) - } - ast::Expr::AwaitExpr(e) => { - let expr = self.collect_expr_opt(e.expr()); - self.alloc_expr(Expr::Await { expr }, syntax_ptr) - } - ast::Expr::TryExpr(e) => { - let expr = self.collect_expr_opt(e.expr()); - self.alloc_expr(Expr::Try { expr }, syntax_ptr) - } - ast::Expr::CastExpr(e) => { - let expr = self.collect_expr_opt(e.expr()); - let type_ref = TypeRef::from_ast_opt(&self.ctx(), e.ty()); - self.alloc_expr(Expr::Cast { expr, type_ref }, syntax_ptr) - } - ast::Expr::RefExpr(e) => { - let expr = self.collect_expr_opt(e.expr()); - let raw_tok = e.raw_token().is_some(); - let mutability = if raw_tok { - if e.mut_token().is_some() { - Mutability::Mut - } else if e.const_token().is_some() { - Mutability::Shared - } else { - unreachable!("parser only remaps to raw_token() if matching mutability token follows") - } - } else { - Mutability::from_mutable(e.mut_token().is_some()) - }; - let rawness = Rawness::from_raw(raw_tok); - self.alloc_expr(Expr::Ref { expr, rawness, mutability }, syntax_ptr) - } - ast::Expr::PrefixExpr(e) => { - let expr = self.collect_expr_opt(e.expr()); - if let Some(op) = e.op_kind() { - self.alloc_expr(Expr::UnaryOp { expr, op }, syntax_ptr) - } else { - self.alloc_expr(Expr::Missing, syntax_ptr) - } - } - ast::Expr::ClosureExpr(e) => { - let mut args = Vec::new(); - let mut arg_types = Vec::new(); - if let Some(pl) = e.param_list() { - for param in pl.params() { - let pat = self.collect_pat_opt(param.pat()); - let type_ref = param.ty().map(|it| TypeRef::from_ast(&self.ctx(), it)); - args.push(pat); - arg_types.push(type_ref); - } - } - let ret_type = - e.ret_type().and_then(|r| r.ty()).map(|it| TypeRef::from_ast(&self.ctx(), it)); - let body = self.collect_expr_opt(e.body()); - self.alloc_expr(Expr::Lambda { args, arg_types, ret_type, body }, syntax_ptr) - } - ast::Expr::BinExpr(e) => { - let lhs = self.collect_expr_opt(e.lhs()); - let rhs = self.collect_expr_opt(e.rhs()); - let op = e.op_kind().map(BinaryOp::from); - self.alloc_expr(Expr::BinaryOp { lhs, rhs, op }, syntax_ptr) - } - ast::Expr::TupleExpr(e) => { - let exprs = e.fields().map(|expr| self.collect_expr(expr)).collect(); - self.alloc_expr(Expr::Tuple { exprs }, syntax_ptr) - } - ast::Expr::BoxExpr(e) => { - let expr = self.collect_expr_opt(e.expr()); - self.alloc_expr(Expr::Box { expr }, syntax_ptr) - } - - ast::Expr::ArrayExpr(e) => { - let kind = e.kind(); - - match kind { - ArrayExprKind::ElementList(e) => { - let exprs = e.map(|expr| self.collect_expr(expr)).collect(); - self.alloc_expr(Expr::Array(Array::ElementList(exprs)), syntax_ptr) - } - ArrayExprKind::Repeat { initializer, repeat } => { - let initializer = self.collect_expr_opt(initializer); - let repeat = self.collect_expr_opt(repeat); - self.alloc_expr( - Expr::Array(Array::Repeat { initializer, repeat }), - syntax_ptr, - ) - } - } - } - - ast::Expr::Literal(e) => self.alloc_expr(Expr::Literal(e.kind().into()), syntax_ptr), - ast::Expr::IndexExpr(e) => { - let base = self.collect_expr_opt(e.base()); - let index = self.collect_expr_opt(e.index()); - self.alloc_expr(Expr::Index { base, index }, syntax_ptr) - } - ast::Expr::RangeExpr(e) => { - let lhs = e.start().map(|lhs| self.collect_expr(lhs)); - let rhs = e.end().map(|rhs| self.collect_expr(rhs)); - match e.op_kind() { - Some(range_type) => { - self.alloc_expr(Expr::Range { lhs, rhs, range_type }, syntax_ptr) - } - None => self.alloc_expr(Expr::Missing, syntax_ptr), - } - } - ast::Expr::MacroCall(e) => { - if let Some(name) = e.is_macro_rules().map(|it| it.as_name()) { - let mac = MacroDefId { - krate: Some(self.expander.module.krate), - ast_id: Some(self.expander.ast_id(&e)), - kind: MacroDefKind::Declarative, - local_inner: false, - }; - self.body.item_scope.define_legacy_macro(name, mac); - - // FIXME: do we still need to allocate this as missing ? - self.alloc_expr(Expr::Missing, syntax_ptr) - } else { - let macro_call = self.expander.to_source(AstPtr::new(&e)); - match self.expander.enter_expand(self.db, Some(&self.body.item_scope), e) { - Some((mark, expansion)) => { - self.source_map - .expansions - .insert(macro_call, self.expander.current_file_id); - - let item_tree = self.db.item_tree(self.expander.current_file_id); - self.item_trees.insert(self.expander.current_file_id, item_tree); - let id = self.collect_expr(expansion); - self.expander.exit(self.db, mark); - id - } - None => self.alloc_expr(Expr::Missing, syntax_ptr), - } - } - } - } - } - - fn find_inner_item(&self, ast: &N::Source) -> Option> { - let id = self.expander.ast_id(ast); - let tree = &self.item_trees[&id.file_id]; - - // FIXME: This probably breaks with `use` items, since they produce multiple item tree nodes - - // Root file (non-macro). - let item_tree_id = tree - .all_inner_items() - .chain(tree.top_level_items().iter().copied()) - .filter_map(|mod_item| mod_item.downcast::()) - .find(|tree_id| tree[*tree_id].ast_id().upcast() == id.value.upcast()) - .or_else(|| { - log::debug!( - "couldn't find inner {} item for {:?} (AST: `{}` - {:?})", - type_name::(), - id, - ast.syntax(), - ast.syntax(), - ); - None - })?; - - Some(ItemTreeId::new(id.file_id, item_tree_id)) - } - - fn collect_expr_opt(&mut self, expr: Option) -> ExprId { - if let Some(expr) = expr { - self.collect_expr(expr) - } else { - self.missing_expr() - } - } - - fn collect_block(&mut self, block: ast::BlockExpr) -> ExprId { - let syntax_node_ptr = AstPtr::new(&block.clone().into()); - self.collect_block_items(&block); - let statements = block - .statements() - .filter_map(|s| { - let stmt = match s { - ast::Stmt::LetStmt(stmt) => { - let pat = self.collect_pat_opt(stmt.pat()); - let type_ref = stmt.ty().map(|it| TypeRef::from_ast(&self.ctx(), it)); - let initializer = stmt.initializer().map(|e| self.collect_expr(e)); - Statement::Let { pat, type_ref, initializer } - } - ast::Stmt::ExprStmt(stmt) => { - Statement::Expr(self.collect_expr_opt(stmt.expr())) - } - ast::Stmt::Item(_) => return None, - }; - Some(stmt) - }) - .collect(); - let tail = block.expr().map(|e| self.collect_expr(e)); - self.alloc_expr(Expr::Block { statements, tail, label: None }, syntax_node_ptr) - } - - fn collect_block_items(&mut self, block: &ast::BlockExpr) { - let container = ContainerId::DefWithBodyId(self.def); - - let items = block - .statements() - .filter_map(|stmt| match stmt { - ast::Stmt::Item(it) => Some(it), - ast::Stmt::LetStmt(_) | ast::Stmt::ExprStmt(_) => None, - }) - .filter_map(|item| { - let (def, name): (ModuleDefId, Option) = match item { - ast::Item::Fn(def) => { - let id = self.find_inner_item(&def)?; - ( - FunctionLoc { container: container.into(), id }.intern(self.db).into(), - def.name(), - ) - } - ast::Item::TypeAlias(def) => { - let id = self.find_inner_item(&def)?; - ( - TypeAliasLoc { container: container.into(), id }.intern(self.db).into(), - def.name(), - ) - } - ast::Item::Const(def) => { - let id = self.find_inner_item(&def)?; - ( - ConstLoc { container: container.into(), id }.intern(self.db).into(), - def.name(), - ) - } - ast::Item::Static(def) => { - let id = self.find_inner_item(&def)?; - (StaticLoc { container, id }.intern(self.db).into(), def.name()) - } - ast::Item::Struct(def) => { - let id = self.find_inner_item(&def)?; - (StructLoc { container, id }.intern(self.db).into(), def.name()) - } - ast::Item::Enum(def) => { - let id = self.find_inner_item(&def)?; - (EnumLoc { container, id }.intern(self.db).into(), def.name()) - } - ast::Item::Union(def) => { - let id = self.find_inner_item(&def)?; - (UnionLoc { container, id }.intern(self.db).into(), def.name()) - } - ast::Item::Trait(def) => { - let id = self.find_inner_item(&def)?; - (TraitLoc { container, id }.intern(self.db).into(), def.name()) - } - ast::Item::ExternBlock(_) => return None, // FIXME: collect from extern blocks - ast::Item::Impl(_) - | ast::Item::Use(_) - | ast::Item::ExternCrate(_) - | ast::Item::Module(_) - | ast::Item::MacroCall(_) => return None, - }; - - Some((def, name)) - }) - .collect::>(); - - for (def, name) in items { - self.body.item_scope.define_def(def); - if let Some(name) = name { - let vis = crate::visibility::Visibility::Public; // FIXME determine correctly - let has_constructor = match def { - ModuleDefId::AdtId(AdtId::StructId(s)) => { - self.db.struct_data(s).variant_data.kind() != StructKind::Record - } - _ => true, - }; - self.body.item_scope.push_res( - name.as_name(), - crate::per_ns::PerNs::from_def(def, vis, has_constructor), - ); - } - } - } - - fn collect_block_opt(&mut self, expr: Option) -> ExprId { - if let Some(block) = expr { - self.collect_block(block) - } else { - self.missing_expr() - } - } - - fn collect_pat(&mut self, pat: ast::Pat) -> PatId { - let pattern = match &pat { - ast::Pat::IdentPat(bp) => { - let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); - let annotation = - BindingAnnotation::new(bp.mut_token().is_some(), bp.ref_token().is_some()); - let subpat = bp.pat().map(|subpat| self.collect_pat(subpat)); - if annotation == BindingAnnotation::Unannotated && subpat.is_none() { - // This could also be a single-segment path pattern. To - // decide that, we need to try resolving the name. - let (resolved, _) = self.expander.crate_def_map.resolve_path( - self.db, - self.expander.module.local_id, - &name.clone().into(), - BuiltinShadowMode::Other, - ); - match resolved.take_values() { - Some(ModuleDefId::ConstId(_)) => Pat::Path(name.into()), - Some(ModuleDefId::EnumVariantId(_)) => { - // this is only really valid for unit variants, but - // shadowing other enum variants with a pattern is - // an error anyway - Pat::Path(name.into()) - } - Some(ModuleDefId::AdtId(AdtId::StructId(s))) - if self.db.struct_data(s).variant_data.kind() != StructKind::Record => - { - // Funnily enough, record structs *can* be shadowed - // by pattern bindings (but unit or tuple structs - // can't). - Pat::Path(name.into()) - } - // shadowing statics is an error as well, so we just ignore that case here - _ => Pat::Bind { name, mode: annotation, subpat }, - } - } else { - Pat::Bind { name, mode: annotation, subpat } - } - } - ast::Pat::TupleStructPat(p) => { - let path = p.path().and_then(|path| self.expander.parse_path(path)); - let (args, ellipsis) = self.collect_tuple_pat(p.fields()); - Pat::TupleStruct { path, args, ellipsis } - } - ast::Pat::RefPat(p) => { - let pat = self.collect_pat_opt(p.pat()); - let mutability = Mutability::from_mutable(p.mut_token().is_some()); - Pat::Ref { pat, mutability } - } - ast::Pat::PathPat(p) => { - let path = p.path().and_then(|path| self.expander.parse_path(path)); - path.map(Pat::Path).unwrap_or(Pat::Missing) - } - ast::Pat::OrPat(p) => { - let pats = p.pats().map(|p| self.collect_pat(p)).collect(); - Pat::Or(pats) - } - ast::Pat::ParenPat(p) => return self.collect_pat_opt(p.pat()), - ast::Pat::TuplePat(p) => { - let (args, ellipsis) = self.collect_tuple_pat(p.fields()); - Pat::Tuple { args, ellipsis } - } - ast::Pat::WildcardPat(_) => Pat::Wild, - ast::Pat::RecordPat(p) => { - let path = p.path().and_then(|path| self.expander.parse_path(path)); - let args: Vec<_> = p - .record_pat_field_list() - .expect("every struct should have a field list") - .fields() - .filter_map(|f| { - let ast_pat = f.pat()?; - let pat = self.collect_pat(ast_pat); - let name = f.field_name()?.as_name(); - Some(RecordFieldPat { name, pat }) - }) - .collect(); - - let ellipsis = p - .record_pat_field_list() - .expect("every struct should have a field list") - .dotdot_token() - .is_some(); - - Pat::Record { path, args, ellipsis } - } - ast::Pat::SlicePat(p) => { - let SlicePatComponents { prefix, slice, suffix } = p.components(); - - // FIXME properly handle `RestPat` - Pat::Slice { - prefix: prefix.into_iter().map(|p| self.collect_pat(p)).collect(), - slice: slice.map(|p| self.collect_pat(p)), - suffix: suffix.into_iter().map(|p| self.collect_pat(p)).collect(), - } - } - ast::Pat::LiteralPat(lit) => { - if let Some(ast_lit) = lit.literal() { - let expr = Expr::Literal(ast_lit.kind().into()); - let expr_ptr = AstPtr::new(&ast::Expr::Literal(ast_lit)); - let expr_id = self.alloc_expr(expr, expr_ptr); - Pat::Lit(expr_id) - } else { - Pat::Missing - } - } - ast::Pat::RestPat(_) => { - // `RestPat` requires special handling and should not be mapped - // to a Pat. Here we are using `Pat::Missing` as a fallback for - // when `RestPat` is mapped to `Pat`, which can easily happen - // when the source code being analyzed has a malformed pattern - // which includes `..` in a place where it isn't valid. - - Pat::Missing - } - // FIXME: implement - ast::Pat::BoxPat(_) | ast::Pat::RangePat(_) | ast::Pat::MacroPat(_) => Pat::Missing, - }; - let ptr = AstPtr::new(&pat); - self.alloc_pat(pattern, Either::Left(ptr)) - } - - fn collect_pat_opt(&mut self, pat: Option) -> PatId { - if let Some(pat) = pat { - self.collect_pat(pat) - } else { - self.missing_pat() - } - } - - fn collect_tuple_pat(&mut self, args: AstChildren) -> (Vec, Option) { - // Find the location of the `..`, if there is one. Note that we do not - // consider the possiblity of there being multiple `..` here. - let ellipsis = args.clone().position(|p| matches!(p, ast::Pat::RestPat(_))); - // We want to skip the `..` pattern here, since we account for it above. - let args = args - .filter(|p| !matches!(p, ast::Pat::RestPat(_))) - .map(|p| self.collect_pat(p)) - .collect(); - - (args, ellipsis) - } -} - -impl From for BinaryOp { - fn from(ast_op: ast::BinOp) -> Self { - match ast_op { - ast::BinOp::BooleanOr => BinaryOp::LogicOp(LogicOp::Or), - ast::BinOp::BooleanAnd => BinaryOp::LogicOp(LogicOp::And), - ast::BinOp::EqualityTest => BinaryOp::CmpOp(CmpOp::Eq { negated: false }), - ast::BinOp::NegatedEqualityTest => BinaryOp::CmpOp(CmpOp::Eq { negated: true }), - ast::BinOp::LesserEqualTest => { - BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Less, strict: false }) - } - ast::BinOp::GreaterEqualTest => { - BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Greater, strict: false }) - } - ast::BinOp::LesserTest => { - BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Less, strict: true }) - } - ast::BinOp::GreaterTest => { - BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Greater, strict: true }) - } - ast::BinOp::Addition => BinaryOp::ArithOp(ArithOp::Add), - ast::BinOp::Multiplication => BinaryOp::ArithOp(ArithOp::Mul), - ast::BinOp::Subtraction => BinaryOp::ArithOp(ArithOp::Sub), - ast::BinOp::Division => BinaryOp::ArithOp(ArithOp::Div), - ast::BinOp::Remainder => BinaryOp::ArithOp(ArithOp::Rem), - ast::BinOp::LeftShift => BinaryOp::ArithOp(ArithOp::Shl), - ast::BinOp::RightShift => BinaryOp::ArithOp(ArithOp::Shr), - ast::BinOp::BitwiseXor => BinaryOp::ArithOp(ArithOp::BitXor), - ast::BinOp::BitwiseOr => BinaryOp::ArithOp(ArithOp::BitOr), - ast::BinOp::BitwiseAnd => BinaryOp::ArithOp(ArithOp::BitAnd), - ast::BinOp::Assignment => BinaryOp::Assignment { op: None }, - ast::BinOp::AddAssign => BinaryOp::Assignment { op: Some(ArithOp::Add) }, - ast::BinOp::DivAssign => BinaryOp::Assignment { op: Some(ArithOp::Div) }, - ast::BinOp::MulAssign => BinaryOp::Assignment { op: Some(ArithOp::Mul) }, - ast::BinOp::RemAssign => BinaryOp::Assignment { op: Some(ArithOp::Rem) }, - ast::BinOp::ShlAssign => BinaryOp::Assignment { op: Some(ArithOp::Shl) }, - ast::BinOp::ShrAssign => BinaryOp::Assignment { op: Some(ArithOp::Shr) }, - ast::BinOp::SubAssign => BinaryOp::Assignment { op: Some(ArithOp::Sub) }, - ast::BinOp::BitOrAssign => BinaryOp::Assignment { op: Some(ArithOp::BitOr) }, - ast::BinOp::BitAndAssign => BinaryOp::Assignment { op: Some(ArithOp::BitAnd) }, - ast::BinOp::BitXorAssign => BinaryOp::Assignment { op: Some(ArithOp::BitXor) }, - } - } -} - -impl From for Literal { - fn from(ast_lit_kind: ast::LiteralKind) -> Self { - match ast_lit_kind { - LiteralKind::IntNumber { suffix } => { - let known_name = suffix.and_then(|it| BuiltinInt::from_suffix(&it)); - - Literal::Int(Default::default(), known_name) - } - LiteralKind::FloatNumber { suffix } => { - let known_name = suffix.and_then(|it| BuiltinFloat::from_suffix(&it)); - - Literal::Float(Default::default(), known_name) - } - LiteralKind::ByteString => Literal::ByteString(Default::default()), - LiteralKind::String => Literal::String(Default::default()), - LiteralKind::Byte => Literal::Int(Default::default(), Some(BuiltinInt::U8)), - LiteralKind::Bool(val) => Literal::Bool(val), - LiteralKind::Char => Literal::Char(Default::default()), - } - } -} diff --git a/crates/ra_hir_def/src/body/scope.rs b/crates/ra_hir_def/src/body/scope.rs deleted file mode 100644 index 99e8766835..0000000000 --- a/crates/ra_hir_def/src/body/scope.rs +++ /dev/null @@ -1,456 +0,0 @@ -//! Name resolution for expressions. -use std::sync::Arc; - -use hir_expand::name::Name; -use ra_arena::{Arena, Idx}; -use rustc_hash::FxHashMap; - -use crate::{ - body::Body, - db::DefDatabase, - expr::{Expr, ExprId, Pat, PatId, Statement}, - DefWithBodyId, -}; - -pub type ScopeId = Idx; - -#[derive(Debug, PartialEq, Eq)] -pub struct ExprScopes { - scopes: Arena, - scope_by_expr: FxHashMap, -} - -#[derive(Debug, PartialEq, Eq)] -pub struct ScopeEntry { - name: Name, - pat: PatId, -} - -impl ScopeEntry { - pub fn name(&self) -> &Name { - &self.name - } - - pub fn pat(&self) -> PatId { - self.pat - } -} - -#[derive(Debug, PartialEq, Eq)] -pub struct ScopeData { - parent: Option, - entries: Vec, -} - -impl ExprScopes { - pub(crate) fn expr_scopes_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc { - let body = db.body(def); - Arc::new(ExprScopes::new(&*body)) - } - - fn new(body: &Body) -> ExprScopes { - let mut scopes = - ExprScopes { scopes: Arena::default(), scope_by_expr: FxHashMap::default() }; - let root = scopes.root_scope(); - scopes.add_params_bindings(body, root, &body.params); - compute_expr_scopes(body.body_expr, body, &mut scopes, root); - scopes - } - - pub fn entries(&self, scope: ScopeId) -> &[ScopeEntry] { - &self.scopes[scope].entries - } - - pub fn scope_chain(&self, scope: Option) -> impl Iterator + '_ { - std::iter::successors(scope, move |&scope| self.scopes[scope].parent) - } - - pub fn resolve_name_in_scope(&self, scope: ScopeId, name: &Name) -> Option<&ScopeEntry> { - self.scope_chain(Some(scope)) - .find_map(|scope| self.entries(scope).iter().find(|it| it.name == *name)) - } - - pub fn scope_for(&self, expr: ExprId) -> Option { - self.scope_by_expr.get(&expr).copied() - } - - pub fn scope_by_expr(&self) -> &FxHashMap { - &self.scope_by_expr - } - - fn root_scope(&mut self) -> ScopeId { - self.scopes.alloc(ScopeData { parent: None, entries: vec![] }) - } - - fn new_scope(&mut self, parent: ScopeId) -> ScopeId { - self.scopes.alloc(ScopeData { parent: Some(parent), entries: vec![] }) - } - - fn add_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) { - let pattern = &body[pat]; - if let Pat::Bind { name, .. } = pattern { - let entry = ScopeEntry { name: name.clone(), pat }; - self.scopes[scope].entries.push(entry); - } - - pattern.walk_child_pats(|pat| self.add_bindings(body, scope, pat)); - } - - fn add_params_bindings(&mut self, body: &Body, scope: ScopeId, params: &[PatId]) { - params.iter().for_each(|pat| self.add_bindings(body, scope, *pat)); - } - - fn set_scope(&mut self, node: ExprId, scope: ScopeId) { - self.scope_by_expr.insert(node, scope); - } -} - -fn compute_block_scopes( - statements: &[Statement], - tail: Option, - body: &Body, - scopes: &mut ExprScopes, - mut scope: ScopeId, -) { - for stmt in statements { - match stmt { - Statement::Let { pat, initializer, .. } => { - if let Some(expr) = initializer { - scopes.set_scope(*expr, scope); - compute_expr_scopes(*expr, body, scopes, scope); - } - scope = scopes.new_scope(scope); - scopes.add_bindings(body, scope, *pat); - } - Statement::Expr(expr) => { - scopes.set_scope(*expr, scope); - compute_expr_scopes(*expr, body, scopes, scope); - } - } - } - if let Some(expr) = tail { - compute_expr_scopes(expr, body, scopes, scope); - } -} - -fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope: ScopeId) { - scopes.set_scope(expr, scope); - match &body[expr] { - Expr::Block { statements, tail, .. } => { - compute_block_scopes(&statements, *tail, body, scopes, scope); - } - Expr::For { iterable, pat, body: body_expr, .. } => { - compute_expr_scopes(*iterable, body, scopes, scope); - let scope = scopes.new_scope(scope); - scopes.add_bindings(body, scope, *pat); - compute_expr_scopes(*body_expr, body, scopes, scope); - } - Expr::Lambda { args, body: body_expr, .. } => { - let scope = scopes.new_scope(scope); - scopes.add_params_bindings(body, scope, &args); - compute_expr_scopes(*body_expr, body, scopes, scope); - } - Expr::Match { expr, arms } => { - compute_expr_scopes(*expr, body, scopes, scope); - for arm in arms { - let scope = scopes.new_scope(scope); - scopes.add_bindings(body, scope, arm.pat); - if let Some(guard) = arm.guard { - scopes.set_scope(guard, scope); - compute_expr_scopes(guard, body, scopes, scope); - } - scopes.set_scope(arm.expr, scope); - compute_expr_scopes(arm.expr, body, scopes, scope); - } - } - e => e.walk_child_exprs(|e| compute_expr_scopes(e, body, scopes, scope)), - }; -} - -#[cfg(test)] -mod tests { - use hir_expand::{name::AsName, InFile}; - use ra_db::{fixture::WithFixture, FileId, SourceDatabase}; - use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; - use test_utils::{assert_eq_text, extract_offset, mark}; - - use crate::{db::DefDatabase, test_db::TestDB, FunctionId, ModuleDefId}; - - fn find_function(db: &TestDB, file_id: FileId) -> FunctionId { - let krate = db.test_crate(); - let crate_def_map = db.crate_def_map(krate); - - let module = crate_def_map.modules_for_file(file_id).next().unwrap(); - let (_, def) = crate_def_map[module].scope.entries().next().unwrap(); - match def.take_values().unwrap() { - ModuleDefId::FunctionId(it) => it, - _ => panic!(), - } - } - - fn do_check(ra_fixture: &str, expected: &[&str]) { - let (offset, code) = extract_offset(ra_fixture); - let code = { - let mut buf = String::new(); - let off: usize = offset.into(); - buf.push_str(&code[..off]); - buf.push_str("<|>marker"); - buf.push_str(&code[off..]); - buf - }; - - let (db, position) = TestDB::with_position(&code); - let file_id = position.file_id; - let offset = position.offset; - - let file_syntax = db.parse(file_id).syntax_node(); - let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap(); - let function = find_function(&db, file_id); - - let scopes = db.expr_scopes(function.into()); - let (_body, source_map) = db.body_with_source_map(function.into()); - - let expr_id = source_map - .node_expr(InFile { file_id: file_id.into(), value: &marker.into() }) - .unwrap(); - let scope = scopes.scope_for(expr_id); - - let actual = scopes - .scope_chain(scope) - .flat_map(|scope| scopes.entries(scope)) - .map(|it| it.name().to_string()) - .collect::>() - .join("\n"); - let expected = expected.join("\n"); - assert_eq_text!(&expected, &actual); - } - - #[test] - fn test_lambda_scope() { - do_check( - r" - fn quux(foo: i32) { - let f = |bar, baz: i32| { - <|> - }; - }", - &["bar", "baz", "foo"], - ); - } - - #[test] - fn test_call_scope() { - do_check( - r" - fn quux() { - f(|x| <|> ); - }", - &["x"], - ); - } - - #[test] - fn test_method_call_scope() { - do_check( - r" - fn quux() { - z.f(|x| <|> ); - }", - &["x"], - ); - } - - #[test] - fn test_loop_scope() { - do_check( - r" - fn quux() { - loop { - let x = (); - <|> - }; - }", - &["x"], - ); - } - - #[test] - fn test_match() { - do_check( - r" - fn quux() { - match () { - Some(x) => { - <|> - } - }; - }", - &["x"], - ); - } - - #[test] - fn test_shadow_variable() { - do_check( - r" - fn foo(x: String) { - let x : &str = &x<|>; - }", - &["x"], - ); - } - - #[test] - fn test_bindings_after_at() { - do_check( - r" -fn foo() { - match Some(()) { - opt @ Some(unit) => { - <|> - } - _ => {} - } -} -", - &["opt", "unit"], - ); - } - - #[test] - fn macro_inner_item() { - do_check( - r" - macro_rules! mac { - () => {{ - fn inner() {} - inner(); - }}; - } - - fn foo() { - mac!(); - <|> - } - ", - &[], - ); - } - - #[test] - fn broken_inner_item() { - do_check( - r" - fn foo() { - trait {} - <|> - } - ", - &[], - ); - } - - fn do_check_local_name(ra_fixture: &str, expected_offset: u32) { - let (db, position) = TestDB::with_position(ra_fixture); - let file_id = position.file_id; - let offset = position.offset; - - let file = db.parse(file_id).ok().unwrap(); - let expected_name = find_node_at_offset::(file.syntax(), expected_offset.into()) - .expect("failed to find a name at the target offset"); - let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap(); - - let function = find_function(&db, file_id); - - let scopes = db.expr_scopes(function.into()); - let (_body, source_map) = db.body_with_source_map(function.into()); - - let expr_scope = { - let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap(); - let expr_id = - source_map.node_expr(InFile { file_id: file_id.into(), value: &expr_ast }).unwrap(); - scopes.scope_for(expr_id).unwrap() - }; - - let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap(); - let pat_src = source_map.pat_syntax(resolved.pat()).unwrap(); - - let local_name = pat_src.value.either( - |it| it.syntax_node_ptr().to_node(file.syntax()), - |it| it.syntax_node_ptr().to_node(file.syntax()), - ); - assert_eq!(local_name.text_range(), expected_name.syntax().text_range()); - } - - #[test] - fn test_resolve_local_name() { - do_check_local_name( - r#" -fn foo(x: i32, y: u32) { - { - let z = x * 2; - } - { - let t = x<|> * 3; - } -} -"#, - 7, - ); - } - - #[test] - fn test_resolve_local_name_declaration() { - do_check_local_name( - r#" -fn foo(x: String) { - let x : &str = &x<|>; -} -"#, - 7, - ); - } - - #[test] - fn test_resolve_local_name_shadow() { - do_check_local_name( - r" -fn foo(x: String) { - let x : &str = &x; - x<|> -} -", - 28, - ); - } - - #[test] - fn ref_patterns_contribute_bindings() { - do_check_local_name( - r" -fn foo() { - if let Some(&from) = bar() { - from<|>; - } -} -", - 28, - ); - } - - #[test] - fn while_let_desugaring() { - mark::check!(infer_resolve_while_let); - do_check_local_name( - r#" -fn test() { - let foo: Option = None; - while let Option::Some(spam) = foo { - spam<|> - } -} -"#, - 75, - ); - } -} diff --git a/crates/ra_hir_def/src/data.rs b/crates/ra_hir_def/src/data.rs deleted file mode 100644 index 88a8ef9bff..0000000000 --- a/crates/ra_hir_def/src/data.rs +++ /dev/null @@ -1,279 +0,0 @@ -//! Contains basic data about various HIR declarations. - -use std::sync::Arc; - -use hir_expand::{name::Name, InFile}; -use ra_prof::profile; -use ra_syntax::ast; - -use crate::{ - attr::Attrs, - body::Expander, - db::DefDatabase, - item_tree::{AssocItem, ItemTreeId, ModItem}, - type_ref::{TypeBound, TypeRef}, - visibility::RawVisibility, - AssocContainerId, AssocItemId, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId, - Intern, Lookup, ModuleId, StaticId, TraitId, TypeAliasId, TypeAliasLoc, -}; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct FunctionData { - pub name: Name, - pub params: Vec, - pub ret_type: TypeRef, - pub attrs: Attrs, - /// True if the first param is `self`. This is relevant to decide whether this - /// can be called as a method. - pub has_self_param: bool, - pub is_unsafe: bool, - pub is_varargs: bool, - pub visibility: RawVisibility, -} - -impl FunctionData { - pub(crate) fn fn_data_query(db: &dyn DefDatabase, func: FunctionId) -> Arc { - let loc = func.lookup(db); - let item_tree = db.item_tree(loc.id.file_id); - let func = &item_tree[loc.id.value]; - - Arc::new(FunctionData { - name: func.name.clone(), - params: func.params.to_vec(), - ret_type: func.ret_type.clone(), - attrs: item_tree.attrs(ModItem::from(loc.id.value).into()).clone(), - has_self_param: func.has_self_param, - is_unsafe: func.is_unsafe, - is_varargs: func.is_varargs, - visibility: item_tree[func.visibility].clone(), - }) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct TypeAliasData { - pub name: Name, - pub type_ref: Option, - pub visibility: RawVisibility, - /// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl). - pub bounds: Vec, -} - -impl TypeAliasData { - pub(crate) fn type_alias_data_query( - db: &dyn DefDatabase, - typ: TypeAliasId, - ) -> Arc { - let loc = typ.lookup(db); - let item_tree = db.item_tree(loc.id.file_id); - let typ = &item_tree[loc.id.value]; - - Arc::new(TypeAliasData { - name: typ.name.clone(), - type_ref: typ.type_ref.clone(), - visibility: item_tree[typ.visibility].clone(), - bounds: typ.bounds.to_vec(), - }) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct TraitData { - pub name: Name, - pub items: Vec<(Name, AssocItemId)>, - pub auto: bool, -} - -impl TraitData { - pub(crate) fn trait_data_query(db: &dyn DefDatabase, tr: TraitId) -> Arc { - let tr_loc = tr.lookup(db); - let item_tree = db.item_tree(tr_loc.id.file_id); - let tr_def = &item_tree[tr_loc.id.value]; - let name = tr_def.name.clone(); - let auto = tr_def.auto; - let module_id = tr_loc.container.module(db); - let container = AssocContainerId::TraitId(tr); - let mut expander = Expander::new(db, tr_loc.id.file_id, module_id); - - let items = collect_items( - db, - module_id, - &mut expander, - tr_def.items.iter().copied(), - tr_loc.id.file_id, - container, - 100, - ); - - Arc::new(TraitData { name, items, auto }) - } - - pub fn associated_types(&self) -> impl Iterator + '_ { - self.items.iter().filter_map(|(_name, item)| match item { - AssocItemId::TypeAliasId(t) => Some(*t), - _ => None, - }) - } - - pub fn associated_type_by_name(&self, name: &Name) -> Option { - self.items.iter().find_map(|(item_name, item)| match item { - AssocItemId::TypeAliasId(t) if item_name == name => Some(*t), - _ => None, - }) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ImplData { - pub target_trait: Option, - pub target_type: TypeRef, - pub items: Vec, - pub is_negative: bool, -} - -impl ImplData { - pub(crate) fn impl_data_query(db: &dyn DefDatabase, id: ImplId) -> Arc { - let _p = profile("impl_data_query"); - let impl_loc = id.lookup(db); - - let item_tree = db.item_tree(impl_loc.id.file_id); - let impl_def = &item_tree[impl_loc.id.value]; - let target_trait = impl_def.target_trait.clone(); - let target_type = impl_def.target_type.clone(); - let is_negative = impl_def.is_negative; - let module_id = impl_loc.container.module(db); - let container = AssocContainerId::ImplId(id); - let mut expander = Expander::new(db, impl_loc.id.file_id, module_id); - - let items = collect_items( - db, - module_id, - &mut expander, - impl_def.items.iter().copied(), - impl_loc.id.file_id, - container, - 100, - ); - let items = items.into_iter().map(|(_, item)| item).collect(); - - Arc::new(ImplData { target_trait, target_type, items, is_negative }) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ConstData { - /// const _: () = (); - pub name: Option, - pub type_ref: TypeRef, - pub visibility: RawVisibility, -} - -impl ConstData { - pub(crate) fn const_data_query(db: &dyn DefDatabase, konst: ConstId) -> Arc { - let loc = konst.lookup(db); - let item_tree = db.item_tree(loc.id.file_id); - let konst = &item_tree[loc.id.value]; - - Arc::new(ConstData { - name: konst.name.clone(), - type_ref: konst.type_ref.clone(), - visibility: item_tree[konst.visibility].clone(), - }) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct StaticData { - pub name: Option, - pub type_ref: TypeRef, - pub visibility: RawVisibility, - pub mutable: bool, -} - -impl StaticData { - pub(crate) fn static_data_query(db: &dyn DefDatabase, konst: StaticId) -> Arc { - let node = konst.lookup(db); - let item_tree = db.item_tree(node.id.file_id); - let statik = &item_tree[node.id.value]; - - Arc::new(StaticData { - name: Some(statik.name.clone()), - type_ref: statik.type_ref.clone(), - visibility: item_tree[statik.visibility].clone(), - mutable: statik.mutable, - }) - } -} - -fn collect_items( - db: &dyn DefDatabase, - module: ModuleId, - expander: &mut Expander, - assoc_items: impl Iterator, - file_id: crate::HirFileId, - container: AssocContainerId, - limit: usize, -) -> Vec<(Name, AssocItemId)> { - if limit == 0 { - return Vec::new(); - } - - let item_tree = db.item_tree(file_id); - let cfg_options = db.crate_graph()[module.krate].cfg_options.clone(); - - let mut items = Vec::new(); - for item in assoc_items { - match item { - AssocItem::Function(id) => { - let item = &item_tree[id]; - let attrs = item_tree.attrs(ModItem::from(id).into()); - if !attrs.is_cfg_enabled(&cfg_options) { - continue; - } - let def = FunctionLoc { container, id: ItemTreeId::new(file_id, id) }.intern(db); - items.push((item.name.clone(), def.into())); - } - // FIXME: cfg? - AssocItem::Const(id) => { - let item = &item_tree[id]; - let name = match item.name.clone() { - Some(name) => name, - None => continue, - }; - let def = ConstLoc { container, id: ItemTreeId::new(file_id, id) }.intern(db); - items.push((name, def.into())); - } - AssocItem::TypeAlias(id) => { - let item = &item_tree[id]; - let def = TypeAliasLoc { container, id: ItemTreeId::new(file_id, id) }.intern(db); - items.push((item.name.clone(), def.into())); - } - AssocItem::MacroCall(call) => { - let call = &item_tree[call]; - let ast_id_map = db.ast_id_map(file_id); - let root = db.parse_or_expand(file_id).unwrap(); - let call = ast_id_map.get(call.ast_id).to_node(&root); - - if let Some((mark, mac)) = expander.enter_expand(db, None, call) { - let src: InFile = expander.to_source(mac); - let item_tree = db.item_tree(src.file_id); - let iter = - item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item); - items.extend(collect_items( - db, - module, - expander, - iter, - src.file_id, - container, - limit - 1, - )); - - expander.exit(db, mark); - } - } - } - } - - items -} diff --git a/crates/ra_hir_def/src/db.rs b/crates/ra_hir_def/src/db.rs deleted file mode 100644 index 9c3ede2d79..0000000000 --- a/crates/ra_hir_def/src/db.rs +++ /dev/null @@ -1,121 +0,0 @@ -//! Defines database & queries for name resolution. -use std::sync::Arc; - -use hir_expand::{db::AstDatabase, HirFileId}; -use ra_db::{salsa, CrateId, SourceDatabase, Upcast}; -use ra_prof::profile; -use ra_syntax::SmolStr; - -use crate::{ - adt::{EnumData, StructData}, - attr::Attrs, - body::{scope::ExprScopes, Body, BodySourceMap}, - data::{ConstData, FunctionData, ImplData, StaticData, TraitData, TypeAliasData}, - docs::Documentation, - generics::GenericParams, - import_map::ImportMap, - item_tree::ItemTree, - lang_item::{LangItemTarget, LangItems}, - nameres::CrateDefMap, - AttrDefId, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, FunctionId, FunctionLoc, - GenericDefId, ImplId, ImplLoc, ModuleId, StaticId, StaticLoc, StructId, StructLoc, TraitId, - TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, -}; - -#[salsa::query_group(InternDatabaseStorage)] -pub trait InternDatabase: SourceDatabase { - #[salsa::interned] - fn intern_function(&self, loc: FunctionLoc) -> FunctionId; - #[salsa::interned] - fn intern_struct(&self, loc: StructLoc) -> StructId; - #[salsa::interned] - fn intern_union(&self, loc: UnionLoc) -> UnionId; - #[salsa::interned] - fn intern_enum(&self, loc: EnumLoc) -> EnumId; - #[salsa::interned] - fn intern_const(&self, loc: ConstLoc) -> ConstId; - #[salsa::interned] - fn intern_static(&self, loc: StaticLoc) -> StaticId; - #[salsa::interned] - fn intern_trait(&self, loc: TraitLoc) -> TraitId; - #[salsa::interned] - fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId; - #[salsa::interned] - fn intern_impl(&self, loc: ImplLoc) -> ImplId; -} - -#[salsa::query_group(DefDatabaseStorage)] -pub trait DefDatabase: InternDatabase + AstDatabase + Upcast { - #[salsa::invoke(ItemTree::item_tree_query)] - fn item_tree(&self, file_id: HirFileId) -> Arc; - - #[salsa::invoke(crate_def_map_wait)] - #[salsa::transparent] - fn crate_def_map(&self, krate: CrateId) -> Arc; - - #[salsa::invoke(CrateDefMap::crate_def_map_query)] - fn crate_def_map_query(&self, krate: CrateId) -> Arc; - - #[salsa::invoke(StructData::struct_data_query)] - fn struct_data(&self, id: StructId) -> Arc; - #[salsa::invoke(StructData::union_data_query)] - fn union_data(&self, id: UnionId) -> Arc; - - #[salsa::invoke(EnumData::enum_data_query)] - fn enum_data(&self, e: EnumId) -> Arc; - - #[salsa::invoke(ImplData::impl_data_query)] - fn impl_data(&self, e: ImplId) -> Arc; - - #[salsa::invoke(TraitData::trait_data_query)] - fn trait_data(&self, e: TraitId) -> Arc; - - #[salsa::invoke(TypeAliasData::type_alias_data_query)] - fn type_alias_data(&self, e: TypeAliasId) -> Arc; - - #[salsa::invoke(FunctionData::fn_data_query)] - fn function_data(&self, func: FunctionId) -> Arc; - - #[salsa::invoke(ConstData::const_data_query)] - fn const_data(&self, konst: ConstId) -> Arc; - - #[salsa::invoke(StaticData::static_data_query)] - fn static_data(&self, konst: StaticId) -> Arc; - - #[salsa::invoke(Body::body_with_source_map_query)] - fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc, Arc); - - #[salsa::invoke(Body::body_query)] - fn body(&self, def: DefWithBodyId) -> Arc; - - #[salsa::invoke(ExprScopes::expr_scopes_query)] - fn expr_scopes(&self, def: DefWithBodyId) -> Arc; - - #[salsa::invoke(GenericParams::generic_params_query)] - fn generic_params(&self, def: GenericDefId) -> Arc; - - #[salsa::invoke(Attrs::attrs_query)] - fn attrs(&self, def: AttrDefId) -> Attrs; - - #[salsa::invoke(LangItems::module_lang_items_query)] - fn module_lang_items(&self, module: ModuleId) -> Option>; - - #[salsa::invoke(LangItems::crate_lang_items_query)] - fn crate_lang_items(&self, krate: CrateId) -> Arc; - - #[salsa::invoke(LangItems::lang_item_query)] - fn lang_item(&self, start_crate: CrateId, item: SmolStr) -> Option; - - // FIXME(https://github.com/rust-analyzer/rust-analyzer/issues/2148#issuecomment-550519102) - // Remove this query completely, in favor of `Attrs::docs` method - #[salsa::invoke(Documentation::documentation_query)] - fn documentation(&self, def: AttrDefId) -> Option; - - #[salsa::invoke(ImportMap::import_map_query)] - fn import_map(&self, krate: CrateId) -> Arc; -} - -fn crate_def_map_wait(db: &impl DefDatabase, krate: CrateId) -> Arc { - let _p = profile("crate_def_map:wait"); - db.crate_def_map_query(krate) -} diff --git a/crates/ra_hir_def/src/diagnostics.rs b/crates/ra_hir_def/src/diagnostics.rs deleted file mode 100644 index 30db48f868..0000000000 --- a/crates/ra_hir_def/src/diagnostics.rs +++ /dev/null @@ -1,27 +0,0 @@ -//! Diagnostics produced by `hir_def`. - -use std::any::Any; - -use hir_expand::diagnostics::Diagnostic; -use ra_syntax::{ast, AstPtr, SyntaxNodePtr}; - -use hir_expand::{HirFileId, InFile}; - -#[derive(Debug)] -pub struct UnresolvedModule { - pub file: HirFileId, - pub decl: AstPtr, - pub candidate: String, -} - -impl Diagnostic for UnresolvedModule { - fn message(&self) -> String { - "unresolved module".to_string() - } - fn source(&self) -> InFile { - InFile::new(self.file, self.decl.clone().into()) - } - fn as_any(&self) -> &(dyn Any + Send + 'static) { - self - } -} diff --git a/crates/ra_hir_def/src/docs.rs b/crates/ra_hir_def/src/docs.rs deleted file mode 100644 index 2630b3d895..0000000000 --- a/crates/ra_hir_def/src/docs.rs +++ /dev/null @@ -1,121 +0,0 @@ -//! Defines hir documentation. -//! -//! This really shouldn't exist, instead, we should deshugar doc comments into attributes, see -//! https://github.com/rust-analyzer/rust-analyzer/issues/2148#issuecomment-550519102 - -use std::sync::Arc; - -use either::Either; -use ra_syntax::ast; - -use crate::{ - db::DefDatabase, - src::{HasChildSource, HasSource}, - AdtId, AttrDefId, Lookup, -}; - -/// Holds documentation -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Documentation(Arc); - -impl Into for Documentation { - fn into(self) -> String { - self.as_str().to_owned() - } -} - -impl Documentation { - fn new(s: &str) -> Documentation { - Documentation(s.into()) - } - - pub fn from_ast(node: &N) -> Option - where - N: ast::DocCommentsOwner + ast::AttrsOwner, - { - docs_from_ast(node) - } - - pub fn as_str(&self) -> &str { - &*self.0 - } - - pub(crate) fn documentation_query( - db: &dyn DefDatabase, - def: AttrDefId, - ) -> Option { - match def { - AttrDefId::ModuleId(module) => { - let def_map = db.crate_def_map(module.krate); - let src = def_map[module.local_id].declaration_source(db)?; - docs_from_ast(&src.value) - } - AttrDefId::FieldId(it) => { - let src = it.parent.child_source(db); - match &src.value[it.local_id] { - Either::Left(_tuple) => None, - Either::Right(record) => docs_from_ast(record), - } - } - AttrDefId::AdtId(it) => match it { - AdtId::StructId(it) => docs_from_ast(&it.lookup(db).source(db).value), - AdtId::EnumId(it) => docs_from_ast(&it.lookup(db).source(db).value), - AdtId::UnionId(it) => docs_from_ast(&it.lookup(db).source(db).value), - }, - AttrDefId::EnumVariantId(it) => { - let src = it.parent.child_source(db); - docs_from_ast(&src.value[it.local_id]) - } - AttrDefId::TraitId(it) => docs_from_ast(&it.lookup(db).source(db).value), - AttrDefId::MacroDefId(it) => docs_from_ast(&it.ast_id?.to_node(db.upcast())), - AttrDefId::ConstId(it) => docs_from_ast(&it.lookup(db).source(db).value), - AttrDefId::StaticId(it) => docs_from_ast(&it.lookup(db).source(db).value), - AttrDefId::FunctionId(it) => docs_from_ast(&it.lookup(db).source(db).value), - AttrDefId::TypeAliasId(it) => docs_from_ast(&it.lookup(db).source(db).value), - AttrDefId::ImplId(_) => None, - } - } -} - -pub(crate) fn docs_from_ast(node: &N) -> Option -where - N: ast::DocCommentsOwner + ast::AttrsOwner, -{ - let doc_comment_text = node.doc_comment_text(); - let doc_attr_text = expand_doc_attrs(node); - let docs = merge_doc_comments_and_attrs(doc_comment_text, doc_attr_text); - docs.map(|it| Documentation::new(&it)) -} - -fn merge_doc_comments_and_attrs( - doc_comment_text: Option, - doc_attr_text: Option, -) -> Option { - match (doc_comment_text, doc_attr_text) { - (Some(mut comment_text), Some(attr_text)) => { - comment_text.push_str("\n\n"); - comment_text.push_str(&attr_text); - Some(comment_text) - } - (Some(comment_text), None) => Some(comment_text), - (None, Some(attr_text)) => Some(attr_text), - (None, None) => None, - } -} - -fn expand_doc_attrs(owner: &dyn ast::AttrsOwner) -> Option { - let mut docs = String::new(); - for attr in owner.attrs() { - if let Some(("doc", value)) = - attr.as_simple_key_value().as_ref().map(|(k, v)| (k.as_str(), v.as_str())) - { - docs.push_str(value); - docs.push_str("\n\n"); - } - } - if docs.is_empty() { - None - } else { - Some(docs.trim_end_matches("\n\n").to_owned()) - } -} diff --git a/crates/ra_hir_def/src/expr.rs b/crates/ra_hir_def/src/expr.rs deleted file mode 100644 index e41cfc16b9..0000000000 --- a/crates/ra_hir_def/src/expr.rs +++ /dev/null @@ -1,420 +0,0 @@ -//! This module describes hir-level representation of expressions. -//! -//! This representaion is: -//! -//! 1. Identity-based. Each expression has an `id`, so we can distinguish -//! between different `1` in `1 + 1`. -//! 2. Independent of syntax. Though syntactic provenance information can be -//! attached separately via id-based side map. -//! 3. Unresolved. Paths are stored as sequences of names, and not as defs the -//! names refer to. -//! 4. Desugared. There's no `if let`. -//! -//! See also a neighboring `body` module. - -use hir_expand::name::Name; -use ra_arena::{Idx, RawId}; -use ra_syntax::ast::RangeOp; - -use crate::{ - builtin_type::{BuiltinFloat, BuiltinInt}, - path::{GenericArgs, Path}, - type_ref::{Mutability, Rawness, TypeRef}, -}; - -pub type ExprId = Idx; -pub(crate) fn dummy_expr_id() -> ExprId { - ExprId::from_raw(RawId::from(!0)) -} - -pub type PatId = Idx; - -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum Literal { - String(String), - ByteString(Vec), - Char(char), - Bool(bool), - Int(u64, Option), - Float(u64, Option), // FIXME: f64 is not Eq -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum Expr { - /// This is produced if the syntax tree does not have a required expression piece. - Missing, - Path(Path), - If { - condition: ExprId, - then_branch: ExprId, - else_branch: Option, - }, - Block { - statements: Vec, - tail: Option, - label: Option, - }, - Loop { - body: ExprId, - label: Option, - }, - While { - condition: ExprId, - body: ExprId, - label: Option, - }, - For { - iterable: ExprId, - pat: PatId, - body: ExprId, - label: Option, - }, - Call { - callee: ExprId, - args: Vec, - }, - MethodCall { - receiver: ExprId, - method_name: Name, - args: Vec, - generic_args: Option, - }, - Match { - expr: ExprId, - arms: Vec, - }, - Continue { - label: Option, - }, - Break { - expr: Option, - label: Option, - }, - Return { - expr: Option, - }, - RecordLit { - path: Option, - fields: Vec, - spread: Option, - }, - Field { - expr: ExprId, - name: Name, - }, - Await { - expr: ExprId, - }, - Try { - expr: ExprId, - }, - TryBlock { - body: ExprId, - }, - Cast { - expr: ExprId, - type_ref: TypeRef, - }, - Ref { - expr: ExprId, - rawness: Rawness, - mutability: Mutability, - }, - Box { - expr: ExprId, - }, - UnaryOp { - expr: ExprId, - op: UnaryOp, - }, - BinaryOp { - lhs: ExprId, - rhs: ExprId, - op: Option, - }, - Range { - lhs: Option, - rhs: Option, - range_type: RangeOp, - }, - Index { - base: ExprId, - index: ExprId, - }, - Lambda { - args: Vec, - arg_types: Vec>, - ret_type: Option, - body: ExprId, - }, - Tuple { - exprs: Vec, - }, - Unsafe { - body: ExprId, - }, - Array(Array), - Literal(Literal), -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub enum BinaryOp { - LogicOp(LogicOp), - ArithOp(ArithOp), - CmpOp(CmpOp), - Assignment { op: Option }, -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub enum LogicOp { - And, - Or, -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub enum CmpOp { - Eq { negated: bool }, - Ord { ordering: Ordering, strict: bool }, -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub enum Ordering { - Less, - Greater, -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub enum ArithOp { - Add, - Mul, - Sub, - Div, - Rem, - Shl, - Shr, - BitXor, - BitOr, - BitAnd, -} - -pub use ra_syntax::ast::PrefixOp as UnaryOp; -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum Array { - ElementList(Vec), - Repeat { initializer: ExprId, repeat: ExprId }, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct MatchArm { - pub pat: PatId, - pub guard: Option, - pub expr: ExprId, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct RecordLitField { - pub name: Name, - pub expr: ExprId, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum Statement { - Let { pat: PatId, type_ref: Option, initializer: Option }, - Expr(ExprId), -} - -impl Expr { - pub fn walk_child_exprs(&self, mut f: impl FnMut(ExprId)) { - match self { - Expr::Missing => {} - Expr::Path(_) => {} - Expr::If { condition, then_branch, else_branch } => { - f(*condition); - f(*then_branch); - if let Some(else_branch) = else_branch { - f(*else_branch); - } - } - Expr::Block { statements, tail, .. } => { - for stmt in statements { - match stmt { - Statement::Let { initializer, .. } => { - if let Some(expr) = initializer { - f(*expr); - } - } - Statement::Expr(e) => f(*e), - } - } - if let Some(expr) = tail { - f(*expr); - } - } - Expr::TryBlock { body } | Expr::Unsafe { body } => f(*body), - Expr::Loop { body, .. } => f(*body), - Expr::While { condition, body, .. } => { - f(*condition); - f(*body); - } - Expr::For { iterable, body, .. } => { - f(*iterable); - f(*body); - } - Expr::Call { callee, args } => { - f(*callee); - for arg in args { - f(*arg); - } - } - Expr::MethodCall { receiver, args, .. } => { - f(*receiver); - for arg in args { - f(*arg); - } - } - Expr::Match { expr, arms } => { - f(*expr); - for arm in arms { - f(arm.expr); - } - } - Expr::Continue { .. } => {} - Expr::Break { expr, .. } | Expr::Return { expr } => { - if let Some(expr) = expr { - f(*expr); - } - } - Expr::RecordLit { fields, spread, .. } => { - for field in fields { - f(field.expr); - } - if let Some(expr) = spread { - f(*expr); - } - } - Expr::Lambda { body, .. } => { - f(*body); - } - Expr::BinaryOp { lhs, rhs, .. } => { - f(*lhs); - f(*rhs); - } - Expr::Range { lhs, rhs, .. } => { - if let Some(lhs) = rhs { - f(*lhs); - } - if let Some(rhs) = lhs { - f(*rhs); - } - } - Expr::Index { base, index } => { - f(*base); - f(*index); - } - Expr::Field { expr, .. } - | Expr::Await { expr } - | Expr::Try { expr } - | Expr::Cast { expr, .. } - | Expr::Ref { expr, .. } - | Expr::UnaryOp { expr, .. } - | Expr::Box { expr } => { - f(*expr); - } - Expr::Tuple { exprs } => { - for expr in exprs { - f(*expr); - } - } - Expr::Array(a) => match a { - Array::ElementList(exprs) => { - for expr in exprs { - f(*expr); - } - } - Array::Repeat { initializer, repeat } => { - f(*initializer); - f(*repeat) - } - }, - Expr::Literal(_) => {} - } - } -} - -/// Explicit binding annotations given in the HIR for a binding. Note -/// that this is not the final binding *mode* that we infer after type -/// inference. -#[derive(Clone, PartialEq, Eq, Debug, Copy)] -pub enum BindingAnnotation { - /// No binding annotation given: this means that the final binding mode - /// will depend on whether we have skipped through a `&` reference - /// when matching. For example, the `x` in `Some(x)` will have binding - /// mode `None`; if you do `let Some(x) = &Some(22)`, it will - /// ultimately be inferred to be by-reference. - Unannotated, - - /// Annotated with `mut x` -- could be either ref or not, similar to `None`. - Mutable, - - /// Annotated as `ref`, like `ref x` - Ref, - - /// Annotated as `ref mut x`. - RefMut, -} - -impl BindingAnnotation { - pub fn new(is_mutable: bool, is_ref: bool) -> Self { - match (is_mutable, is_ref) { - (true, true) => BindingAnnotation::RefMut, - (false, true) => BindingAnnotation::Ref, - (true, false) => BindingAnnotation::Mutable, - (false, false) => BindingAnnotation::Unannotated, - } - } -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct RecordFieldPat { - pub name: Name, - pub pat: PatId, -} - -/// Close relative to rustc's hir::PatKind -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum Pat { - Missing, - Wild, - Tuple { args: Vec, ellipsis: Option }, - Or(Vec), - Record { path: Option, args: Vec, ellipsis: bool }, - Range { start: ExprId, end: ExprId }, - Slice { prefix: Vec, slice: Option, suffix: Vec }, - Path(Path), - Lit(ExprId), - Bind { mode: BindingAnnotation, name: Name, subpat: Option }, - TupleStruct { path: Option, args: Vec, ellipsis: Option }, - Ref { pat: PatId, mutability: Mutability }, -} - -impl Pat { - pub fn walk_child_pats(&self, mut f: impl FnMut(PatId)) { - match self { - Pat::Range { .. } | Pat::Lit(..) | Pat::Path(..) | Pat::Wild | Pat::Missing => {} - Pat::Bind { subpat, .. } => { - subpat.iter().copied().for_each(f); - } - Pat::Or(args) | Pat::Tuple { args, .. } | Pat::TupleStruct { args, .. } => { - args.iter().copied().for_each(f); - } - Pat::Ref { pat, .. } => f(*pat), - Pat::Slice { prefix, slice, suffix } => { - let total_iter = prefix.iter().chain(slice.iter()).chain(suffix.iter()); - total_iter.copied().for_each(f); - } - Pat::Record { args, .. } => { - args.iter().map(|f| f.pat).for_each(f); - } - } - } -} diff --git a/crates/ra_hir_def/src/find_path.rs b/crates/ra_hir_def/src/find_path.rs deleted file mode 100644 index 06701a8309..0000000000 --- a/crates/ra_hir_def/src/find_path.rs +++ /dev/null @@ -1,691 +0,0 @@ -//! An algorithm to find a path to refer to a certain item. - -use hir_expand::name::{known, AsName, Name}; -use ra_prof::profile; -use rustc_hash::FxHashSet; -use test_utils::mark; - -use crate::{ - db::DefDatabase, - item_scope::ItemInNs, - path::{ModPath, PathKind}, - visibility::Visibility, - ModuleDefId, ModuleId, -}; - -// FIXME: handle local items - -/// Find a path that can be used to refer to a certain item. This can depend on -/// *from where* you're referring to the item, hence the `from` parameter. -pub fn find_path(db: &dyn DefDatabase, item: ItemInNs, from: ModuleId) -> Option { - let _p = profile("find_path"); - find_path_inner(db, item, from, MAX_PATH_LEN) -} - -const MAX_PATH_LEN: usize = 15; - -impl ModPath { - fn starts_with_std(&self) -> bool { - self.segments.first() == Some(&known::std) - } - - // When std library is present, paths starting with `std::` - // should be preferred over paths starting with `core::` and `alloc::` - fn can_start_with_std(&self) -> bool { - let first_segment = self.segments.first(); - first_segment == Some(&known::alloc) || first_segment == Some(&known::core) - } -} - -fn find_path_inner( - db: &dyn DefDatabase, - item: ItemInNs, - from: ModuleId, - max_len: usize, -) -> Option { - if max_len == 0 { - return None; - } - - // Base cases: - - // - if the item is already in scope, return the name under which it is - let def_map = db.crate_def_map(from.krate); - let from_scope: &crate::item_scope::ItemScope = &def_map.modules[from.local_id].scope; - if let Some((name, _)) = from_scope.name_of(item) { - return Some(ModPath::from_segments(PathKind::Plain, vec![name.clone()])); - } - - // - if the item is the crate root, return `crate` - if item - == ItemInNs::Types(ModuleDefId::ModuleId(ModuleId { - krate: from.krate, - local_id: def_map.root, - })) - { - return Some(ModPath::from_segments(PathKind::Crate, Vec::new())); - } - - // - if the item is the module we're in, use `self` - if item == ItemInNs::Types(from.into()) { - return Some(ModPath::from_segments(PathKind::Super(0), Vec::new())); - } - - // - if the item is the parent module, use `super` (this is not used recursively, since `super::super` is ugly) - if let Some(parent_id) = def_map.modules[from.local_id].parent { - if item - == ItemInNs::Types(ModuleDefId::ModuleId(ModuleId { - krate: from.krate, - local_id: parent_id, - })) - { - return Some(ModPath::from_segments(PathKind::Super(1), Vec::new())); - } - } - - // - if the item is the crate root of a dependency crate, return the name from the extern prelude - for (name, def_id) in &def_map.extern_prelude { - if item == ItemInNs::Types(*def_id) { - return Some(ModPath::from_segments(PathKind::Plain, vec![name.clone()])); - } - } - - // - if the item is in the prelude, return the name from there - if let Some(prelude_module) = def_map.prelude { - let prelude_def_map = db.crate_def_map(prelude_module.krate); - let prelude_scope: &crate::item_scope::ItemScope = - &prelude_def_map.modules[prelude_module.local_id].scope; - if let Some((name, vis)) = prelude_scope.name_of(item) { - if vis.is_visible_from(db, from) { - return Some(ModPath::from_segments(PathKind::Plain, vec![name.clone()])); - } - } - } - - // - if the item is a builtin, it's in scope - if let ItemInNs::Types(ModuleDefId::BuiltinType(builtin)) = item { - return Some(ModPath::from_segments(PathKind::Plain, vec![builtin.as_name()])); - } - - // Recursive case: - // - if the item is an enum variant, refer to it via the enum - if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() { - if let Some(mut path) = find_path(db, ItemInNs::Types(variant.parent.into()), from) { - let data = db.enum_data(variant.parent); - path.segments.push(data.variants[variant.local_id].name.clone()); - return Some(path); - } - // If this doesn't work, it seems we have no way of referring to the - // enum; that's very weird, but there might still be a reexport of the - // variant somewhere - } - - // - otherwise, look for modules containing (reexporting) it and import it from one of those - - let crate_root = ModuleId { local_id: def_map.root, krate: from.krate }; - let crate_attrs = db.attrs(crate_root.into()); - let prefer_no_std = crate_attrs.by_key("no_std").exists(); - let mut best_path = None; - let mut best_path_len = max_len; - - if item.krate(db) == Some(from.krate) { - // Item was defined in the same crate that wants to import it. It cannot be found in any - // dependency in this case. - - let local_imports = find_local_import_locations(db, item, from); - for (module_id, name) in local_imports { - if let Some(mut path) = find_path_inner( - db, - ItemInNs::Types(ModuleDefId::ModuleId(module_id)), - from, - best_path_len - 1, - ) { - path.segments.push(name); - - let new_path = if let Some(best_path) = best_path { - select_best_path(best_path, path, prefer_no_std) - } else { - path - }; - best_path_len = new_path.len(); - best_path = Some(new_path); - } - } - } else { - // Item was defined in some upstream crate. This means that it must be exported from one, - // too (unless we can't name it at all). It could *also* be (re)exported by the same crate - // that wants to import it here, but we always prefer to use the external path here. - - let crate_graph = db.crate_graph(); - let extern_paths = crate_graph[from.krate].dependencies.iter().filter_map(|dep| { - let import_map = db.import_map(dep.crate_id); - import_map.import_info_for(item).and_then(|info| { - // Determine best path for containing module and append last segment from `info`. - let mut path = find_path_inner( - db, - ItemInNs::Types(ModuleDefId::ModuleId(info.container)), - from, - best_path_len - 1, - )?; - path.segments.push(info.path.segments.last().unwrap().clone()); - Some(path) - }) - }); - - for path in extern_paths { - let new_path = if let Some(best_path) = best_path { - select_best_path(best_path, path, prefer_no_std) - } else { - path - }; - best_path = Some(new_path); - } - } - - best_path -} - -fn select_best_path(old_path: ModPath, new_path: ModPath, prefer_no_std: bool) -> ModPath { - if old_path.starts_with_std() && new_path.can_start_with_std() { - if prefer_no_std { - mark::hit!(prefer_no_std_paths); - new_path - } else { - mark::hit!(prefer_std_paths); - old_path - } - } else if new_path.starts_with_std() && old_path.can_start_with_std() { - if prefer_no_std { - mark::hit!(prefer_no_std_paths); - old_path - } else { - mark::hit!(prefer_std_paths); - new_path - } - } else if new_path.len() < old_path.len() { - new_path - } else { - old_path - } -} - -/// Finds locations in `from.krate` from which `item` can be imported by `from`. -fn find_local_import_locations( - db: &dyn DefDatabase, - item: ItemInNs, - from: ModuleId, -) -> Vec<(ModuleId, Name)> { - let _p = profile("find_local_import_locations"); - - // `from` can import anything below `from` with visibility of at least `from`, and anything - // above `from` with any visibility. That means we do not need to descend into private siblings - // of `from` (and similar). - - let def_map = db.crate_def_map(from.krate); - - // Compute the initial worklist. We start with all direct child modules of `from` as well as all - // of its (recursive) parent modules. - let data = &def_map.modules[from.local_id]; - let mut worklist = data - .children - .values() - .map(|child| ModuleId { krate: from.krate, local_id: *child }) - .collect::>(); - let mut parent = data.parent; - while let Some(p) = parent { - worklist.push(ModuleId { krate: from.krate, local_id: p }); - parent = def_map.modules[p].parent; - } - - let mut seen: FxHashSet<_> = FxHashSet::default(); - - let mut locations = Vec::new(); - while let Some(module) = worklist.pop() { - if !seen.insert(module) { - continue; // already processed this module - } - - let ext_def_map; - let data = if module.krate == from.krate { - &def_map[module.local_id] - } else { - // The crate might reexport a module defined in another crate. - ext_def_map = db.crate_def_map(module.krate); - &ext_def_map[module.local_id] - }; - - if let Some((name, vis)) = data.scope.name_of(item) { - if vis.is_visible_from(db, from) { - let is_private = if let Visibility::Module(private_to) = vis { - private_to.local_id == module.local_id - } else { - false - }; - let is_original_def = if let Some(module_def_id) = item.as_module_def_id() { - data.scope.declarations().any(|it| it == module_def_id) - } else { - false - }; - - // Ignore private imports. these could be used if we are - // in a submodule of this module, but that's usually not - // what the user wants; and if this module can import - // the item and we're a submodule of it, so can we. - // Also this keeps the cached data smaller. - if !is_private || is_original_def { - locations.push((module, name.clone())); - } - } - } - - // Descend into all modules visible from `from`. - for (_, per_ns) in data.scope.entries() { - if let Some((ModuleDefId::ModuleId(module), vis)) = per_ns.take_types_vis() { - if vis.is_visible_from(db, from) { - worklist.push(module); - } - } - } - } - - locations -} - -#[cfg(test)] -mod tests { - use hir_expand::hygiene::Hygiene; - use ra_db::fixture::WithFixture; - use ra_syntax::ast::AstNode; - use test_utils::mark; - - use crate::test_db::TestDB; - - use super::*; - - /// `code` needs to contain a cursor marker; checks that `find_path` for the - /// item the `path` refers to returns that same path when called from the - /// module the cursor is in. - fn check_found_path(ra_fixture: &str, path: &str) { - let (db, pos) = TestDB::with_position(ra_fixture); - let module = db.module_for_file(pos.file_id); - let parsed_path_file = ra_syntax::SourceFile::parse(&format!("use {};", path)); - let ast_path = parsed_path_file - .syntax_node() - .descendants() - .find_map(ra_syntax::ast::Path::cast) - .unwrap(); - let mod_path = ModPath::from_src(ast_path, &Hygiene::new_unhygienic()).unwrap(); - - let crate_def_map = db.crate_def_map(module.krate); - let resolved = crate_def_map - .resolve_path( - &db, - module.local_id, - &mod_path, - crate::item_scope::BuiltinShadowMode::Module, - ) - .0 - .take_types() - .unwrap(); - - let found_path = find_path(&db, ItemInNs::Types(resolved), module); - - assert_eq!(found_path, Some(mod_path)); - } - - #[test] - fn same_module() { - let code = r#" - //- /main.rs - struct S; - <|> - "#; - check_found_path(code, "S"); - } - - #[test] - fn enum_variant() { - let code = r#" - //- /main.rs - enum E { A } - <|> - "#; - check_found_path(code, "E::A"); - } - - #[test] - fn sub_module() { - let code = r#" - //- /main.rs - mod foo { - pub struct S; - } - <|> - "#; - check_found_path(code, "foo::S"); - } - - #[test] - fn super_module() { - let code = r#" - //- /main.rs - mod foo; - //- /foo.rs - mod bar; - struct S; - //- /foo/bar.rs - <|> - "#; - check_found_path(code, "super::S"); - } - - #[test] - fn self_module() { - let code = r#" - //- /main.rs - mod foo; - //- /foo.rs - <|> - "#; - check_found_path(code, "self"); - } - - #[test] - fn crate_root() { - let code = r#" - //- /main.rs - mod foo; - //- /foo.rs - <|> - "#; - check_found_path(code, "crate"); - } - - #[test] - fn same_crate() { - let code = r#" - //- /main.rs - mod foo; - struct S; - //- /foo.rs - <|> - "#; - check_found_path(code, "crate::S"); - } - - #[test] - fn different_crate() { - let code = r#" - //- /main.rs crate:main deps:std - <|> - //- /std.rs crate:std - pub struct S; - "#; - check_found_path(code, "std::S"); - } - - #[test] - fn different_crate_renamed() { - let code = r#" - //- /main.rs crate:main deps:std - extern crate std as std_renamed; - <|> - //- /std.rs crate:std - pub struct S; - "#; - check_found_path(code, "std_renamed::S"); - } - - #[test] - fn partially_imported() { - // Tests that short paths are used even for external items, when parts of the path are - // already in scope. - check_found_path( - r#" - //- /main.rs crate:main deps:ra_syntax - - use ra_syntax::ast; - <|> - - //- /lib.rs crate:ra_syntax - pub mod ast { - pub enum ModuleItem { - A, B, C, - } - } - "#, - "ast::ModuleItem", - ); - - check_found_path( - r#" - //- /main.rs crate:main deps:ra_syntax - - <|> - - //- /lib.rs crate:ra_syntax - pub mod ast { - pub enum ModuleItem { - A, B, C, - } - } - "#, - "ra_syntax::ast::ModuleItem", - ); - } - - #[test] - fn same_crate_reexport() { - let code = r#" - //- /main.rs - mod bar { - mod foo { pub(super) struct S; } - pub(crate) use foo::*; - } - <|> - "#; - check_found_path(code, "bar::S"); - } - - #[test] - fn same_crate_reexport_rename() { - let code = r#" - //- /main.rs - mod bar { - mod foo { pub(super) struct S; } - pub(crate) use foo::S as U; - } - <|> - "#; - check_found_path(code, "bar::U"); - } - - #[test] - fn different_crate_reexport() { - let code = r#" - //- /main.rs crate:main deps:std - <|> - //- /std.rs crate:std deps:core - pub use core::S; - //- /core.rs crate:core - pub struct S; - "#; - check_found_path(code, "std::S"); - } - - #[test] - fn prelude() { - let code = r#" - //- /main.rs crate:main deps:std - <|> - //- /std.rs crate:std - pub mod prelude { pub struct S; } - #[prelude_import] - pub use prelude::*; - "#; - check_found_path(code, "S"); - } - - #[test] - fn enum_variant_from_prelude() { - let code = r#" - //- /main.rs crate:main deps:std - <|> - //- /std.rs crate:std - pub mod prelude { - pub enum Option { Some(T), None } - pub use Option::*; - } - #[prelude_import] - pub use prelude::*; - "#; - check_found_path(code, "None"); - check_found_path(code, "Some"); - } - - #[test] - fn shortest_path() { - let code = r#" - //- /main.rs - pub mod foo; - pub mod baz; - struct S; - <|> - //- /foo.rs - pub mod bar { pub struct S; } - //- /baz.rs - pub use crate::foo::bar::S; - "#; - check_found_path(code, "baz::S"); - } - - #[test] - fn discount_private_imports() { - let code = r#" - //- /main.rs - mod foo; - pub mod bar { pub struct S; } - use bar::S; - //- /foo.rs - <|> - "#; - // crate::S would be shorter, but using private imports seems wrong - check_found_path(code, "crate::bar::S"); - } - - #[test] - fn import_cycle() { - let code = r#" - //- /main.rs - pub mod foo; - pub mod bar; - pub mod baz; - //- /bar.rs - <|> - //- /foo.rs - pub use super::baz; - pub struct S; - //- /baz.rs - pub use super::foo; - "#; - check_found_path(code, "crate::foo::S"); - } - - #[test] - fn prefer_std_paths_over_alloc() { - mark::check!(prefer_std_paths); - let code = r#" - //- /main.rs crate:main deps:alloc,std - <|> - - //- /std.rs crate:std deps:alloc - pub mod sync { - pub use alloc::sync::Arc; - } - - //- /zzz.rs crate:alloc - pub mod sync { - pub struct Arc; - } - "#; - check_found_path(code, "std::sync::Arc"); - } - - #[test] - fn prefer_core_paths_over_std() { - mark::check!(prefer_no_std_paths); - let code = r#" - //- /main.rs crate:main deps:core,std - #![no_std] - - <|> - - //- /std.rs crate:std deps:core - - pub mod fmt { - pub use core::fmt::Error; - } - - //- /zzz.rs crate:core - - pub mod fmt { - pub struct Error; - } - "#; - check_found_path(code, "core::fmt::Error"); - } - - #[test] - fn prefer_alloc_paths_over_std() { - let code = r#" - //- /main.rs crate:main deps:alloc,std - #![no_std] - - <|> - - //- /std.rs crate:std deps:alloc - - pub mod sync { - pub use alloc::sync::Arc; - } - - //- /zzz.rs crate:alloc - - pub mod sync { - pub struct Arc; - } - "#; - check_found_path(code, "alloc::sync::Arc"); - } - - #[test] - fn prefer_shorter_paths_if_not_alloc() { - let code = r#" - //- /main.rs crate:main deps:megaalloc,std - <|> - - //- /std.rs crate:std deps:megaalloc - pub mod sync { - pub use megaalloc::sync::Arc; - } - - //- /zzz.rs crate:megaalloc - pub struct Arc; - "#; - check_found_path(code, "megaalloc::Arc"); - } - - #[test] - fn builtins_are_in_scope() { - let code = r#" - //- /main.rs - <|> - - pub mod primitive { - pub use u8; - } - "#; - check_found_path(code, "u8"); - check_found_path(code, "u16"); - } -} diff --git a/crates/ra_hir_def/src/generics.rs b/crates/ra_hir_def/src/generics.rs deleted file mode 100644 index 699ba9c923..0000000000 --- a/crates/ra_hir_def/src/generics.rs +++ /dev/null @@ -1,340 +0,0 @@ -//! Many kinds of items or constructs can have generic parameters: functions, -//! structs, impls, traits, etc. This module provides a common HIR for these -//! generic parameters. See also the `Generics` type and the `generics_of` query -//! in rustc. -use std::sync::Arc; - -use either::Either; -use hir_expand::{ - name::{name, AsName, Name}, - InFile, -}; -use ra_arena::{map::ArenaMap, Arena}; -use ra_db::FileId; -use ra_prof::profile; -use ra_syntax::ast::{self, GenericParamsOwner, NameOwner, TypeBoundsOwner}; - -use crate::{ - body::LowerCtx, - child_by_source::ChildBySource, - db::DefDatabase, - dyn_map::DynMap, - keys, - src::HasChildSource, - src::HasSource, - type_ref::{TypeBound, TypeRef}, - AdtId, GenericDefId, LocalTypeParamId, Lookup, TypeParamId, -}; - -/// Data about a generic parameter (to a function, struct, impl, ...). -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct TypeParamData { - pub name: Option, - pub default: Option, - pub provenance: TypeParamProvenance, -} - -#[derive(Copy, Clone, PartialEq, Eq, Debug)] -pub enum TypeParamProvenance { - TypeParamList, - TraitSelf, - ArgumentImplTrait, -} - -/// Data about the generic parameters of a function, struct, impl, etc. -#[derive(Clone, PartialEq, Eq, Debug, Default)] -pub struct GenericParams { - pub types: Arena, - // lifetimes: Arena, - pub where_predicates: Vec, -} - -/// A single predicate from a where clause, i.e. `where Type: Trait`. Combined -/// where clauses like `where T: Foo + Bar` are turned into multiple of these. -/// It might still result in multiple actual predicates though, because of -/// associated type bindings like `Iterator`. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct WherePredicate { - pub target: WherePredicateTarget, - pub bound: TypeBound, -} - -#[derive(Clone, PartialEq, Eq, Debug)] -pub enum WherePredicateTarget { - TypeRef(TypeRef), - /// For desugared where predicates that can directly refer to a type param. - TypeParam(LocalTypeParamId), -} - -type SourceMap = ArenaMap>; - -impl GenericParams { - pub(crate) fn generic_params_query( - db: &dyn DefDatabase, - def: GenericDefId, - ) -> Arc { - let _p = profile("generic_params_query"); - - let generics = match def { - GenericDefId::FunctionId(id) => { - let id = id.lookup(db).id; - let tree = db.item_tree(id.file_id); - let item = &tree[id.value]; - tree[item.generic_params].clone() - } - GenericDefId::AdtId(AdtId::StructId(id)) => { - let id = id.lookup(db).id; - let tree = db.item_tree(id.file_id); - let item = &tree[id.value]; - tree[item.generic_params].clone() - } - GenericDefId::AdtId(AdtId::EnumId(id)) => { - let id = id.lookup(db).id; - let tree = db.item_tree(id.file_id); - let item = &tree[id.value]; - tree[item.generic_params].clone() - } - GenericDefId::AdtId(AdtId::UnionId(id)) => { - let id = id.lookup(db).id; - let tree = db.item_tree(id.file_id); - let item = &tree[id.value]; - tree[item.generic_params].clone() - } - GenericDefId::TraitId(id) => { - let id = id.lookup(db).id; - let tree = db.item_tree(id.file_id); - let item = &tree[id.value]; - tree[item.generic_params].clone() - } - GenericDefId::TypeAliasId(id) => { - let id = id.lookup(db).id; - let tree = db.item_tree(id.file_id); - let item = &tree[id.value]; - tree[item.generic_params].clone() - } - GenericDefId::ImplId(id) => { - let id = id.lookup(db).id; - let tree = db.item_tree(id.file_id); - let item = &tree[id.value]; - tree[item.generic_params].clone() - } - GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => GenericParams::default(), - }; - Arc::new(generics) - } - - fn new(db: &dyn DefDatabase, def: GenericDefId) -> (GenericParams, InFile) { - let mut generics = GenericParams { types: Arena::default(), where_predicates: Vec::new() }; - let mut sm = ArenaMap::default(); - - // FIXME: add `: Sized` bound for everything except for `Self` in traits - let file_id = match def { - GenericDefId::FunctionId(it) => { - let src = it.lookup(db).source(db); - let lower_ctx = LowerCtx::new(db, src.file_id); - generics.fill(&lower_ctx, &mut sm, &src.value); - // lower `impl Trait` in arguments - let data = db.function_data(it); - for param in &data.params { - generics.fill_implicit_impl_trait_args(param); - } - src.file_id - } - GenericDefId::AdtId(AdtId::StructId(it)) => { - let src = it.lookup(db).source(db); - let lower_ctx = LowerCtx::new(db, src.file_id); - generics.fill(&lower_ctx, &mut sm, &src.value); - src.file_id - } - GenericDefId::AdtId(AdtId::UnionId(it)) => { - let src = it.lookup(db).source(db); - let lower_ctx = LowerCtx::new(db, src.file_id); - generics.fill(&lower_ctx, &mut sm, &src.value); - src.file_id - } - GenericDefId::AdtId(AdtId::EnumId(it)) => { - let src = it.lookup(db).source(db); - let lower_ctx = LowerCtx::new(db, src.file_id); - generics.fill(&lower_ctx, &mut sm, &src.value); - src.file_id - } - GenericDefId::TraitId(it) => { - let src = it.lookup(db).source(db); - let lower_ctx = LowerCtx::new(db, src.file_id); - - // traits get the Self type as an implicit first type parameter - let self_param_id = generics.types.alloc(TypeParamData { - name: Some(name![Self]), - default: None, - provenance: TypeParamProvenance::TraitSelf, - }); - sm.insert(self_param_id, Either::Left(src.value.clone())); - // add super traits as bounds on Self - // i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar - let self_param = TypeRef::Path(name![Self].into()); - generics.fill_bounds(&lower_ctx, &src.value, self_param); - - generics.fill(&lower_ctx, &mut sm, &src.value); - src.file_id - } - GenericDefId::TypeAliasId(it) => { - let src = it.lookup(db).source(db); - let lower_ctx = LowerCtx::new(db, src.file_id); - - generics.fill(&lower_ctx, &mut sm, &src.value); - src.file_id - } - // Note that we don't add `Self` here: in `impl`s, `Self` is not a - // type-parameter, but rather is a type-alias for impl's target - // type, so this is handled by the resolver. - GenericDefId::ImplId(it) => { - let src = it.lookup(db).source(db); - let lower_ctx = LowerCtx::new(db, src.file_id); - - generics.fill(&lower_ctx, &mut sm, &src.value); - src.file_id - } - // We won't be using this ID anyway - GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => FileId(!0).into(), - }; - - (generics, InFile::new(file_id, sm)) - } - - pub(crate) fn fill( - &mut self, - lower_ctx: &LowerCtx, - sm: &mut SourceMap, - node: &dyn GenericParamsOwner, - ) { - if let Some(params) = node.generic_param_list() { - self.fill_params(lower_ctx, sm, params) - } - if let Some(where_clause) = node.where_clause() { - self.fill_where_predicates(lower_ctx, where_clause); - } - } - - pub(crate) fn fill_bounds( - &mut self, - lower_ctx: &LowerCtx, - node: &dyn ast::TypeBoundsOwner, - type_ref: TypeRef, - ) { - for bound in - node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds()) - { - self.add_where_predicate_from_bound(lower_ctx, bound, type_ref.clone()); - } - } - - fn fill_params( - &mut self, - lower_ctx: &LowerCtx, - sm: &mut SourceMap, - params: ast::GenericParamList, - ) { - for type_param in params.type_params() { - let name = type_param.name().map_or_else(Name::missing, |it| it.as_name()); - // FIXME: Use `Path::from_src` - let default = type_param.default_type().map(|it| TypeRef::from_ast(lower_ctx, it)); - let param = TypeParamData { - name: Some(name.clone()), - default, - provenance: TypeParamProvenance::TypeParamList, - }; - let param_id = self.types.alloc(param); - sm.insert(param_id, Either::Right(type_param.clone())); - - let type_ref = TypeRef::Path(name.into()); - self.fill_bounds(&lower_ctx, &type_param, type_ref); - } - } - - fn fill_where_predicates(&mut self, lower_ctx: &LowerCtx, where_clause: ast::WhereClause) { - for pred in where_clause.predicates() { - let type_ref = match pred.ty() { - Some(type_ref) => type_ref, - None => continue, - }; - let type_ref = TypeRef::from_ast(lower_ctx, type_ref); - for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) { - self.add_where_predicate_from_bound(lower_ctx, bound, type_ref.clone()); - } - } - } - - fn add_where_predicate_from_bound( - &mut self, - lower_ctx: &LowerCtx, - bound: ast::TypeBound, - type_ref: TypeRef, - ) { - if bound.question_mark_token().is_some() { - // FIXME: remove this bound - return; - } - let bound = TypeBound::from_ast(lower_ctx, bound); - self.where_predicates - .push(WherePredicate { target: WherePredicateTarget::TypeRef(type_ref), bound }); - } - - pub(crate) fn fill_implicit_impl_trait_args(&mut self, type_ref: &TypeRef) { - type_ref.walk(&mut |type_ref| { - if let TypeRef::ImplTrait(bounds) = type_ref { - let param = TypeParamData { - name: None, - default: None, - provenance: TypeParamProvenance::ArgumentImplTrait, - }; - let param_id = self.types.alloc(param); - for bound in bounds { - self.where_predicates.push(WherePredicate { - target: WherePredicateTarget::TypeParam(param_id), - bound: bound.clone(), - }); - } - } - }); - } - - pub fn find_by_name(&self, name: &Name) -> Option { - self.types - .iter() - .find_map(|(id, p)| if p.name.as_ref() == Some(name) { Some(id) } else { None }) - } - - pub fn find_trait_self_param(&self) -> Option { - self.types.iter().find_map(|(id, p)| { - if p.provenance == TypeParamProvenance::TraitSelf { - Some(id) - } else { - None - } - }) - } -} - -impl HasChildSource for GenericDefId { - type ChildId = LocalTypeParamId; - type Value = Either; - fn child_source(&self, db: &dyn DefDatabase) -> InFile { - let (_, sm) = GenericParams::new(db, *self); - sm - } -} - -impl ChildBySource for GenericDefId { - fn child_by_source(&self, db: &dyn DefDatabase) -> DynMap { - let mut res = DynMap::default(); - let arena_map = self.child_source(db); - let arena_map = arena_map.as_ref(); - for (local_id, src) in arena_map.value.iter() { - let id = TypeParamId { parent: *self, local_id }; - if let Either::Right(type_param) = src { - res[keys::TYPE_PARAM].insert(arena_map.with_value(type_param.clone()), id) - } - } - res - } -} diff --git a/crates/ra_hir_def/src/import_map.rs b/crates/ra_hir_def/src/import_map.rs deleted file mode 100644 index 9e4c30b1ab..0000000000 --- a/crates/ra_hir_def/src/import_map.rs +++ /dev/null @@ -1,745 +0,0 @@ -//! A map of all publicly exported items in a crate. - -use std::{cmp::Ordering, fmt, hash::BuildHasherDefault, sync::Arc}; - -use fst::{self, Streamer}; -use indexmap::{map::Entry, IndexMap}; -use ra_db::CrateId; -use ra_syntax::SmolStr; -use rustc_hash::{FxHashMap, FxHasher}; -use smallvec::SmallVec; - -use crate::{ - db::DefDatabase, - item_scope::ItemInNs, - path::{ModPath, PathKind}, - visibility::Visibility, - AssocItemId, ModuleDefId, ModuleId, TraitId, -}; - -type FxIndexMap = IndexMap>; - -/// Item import details stored in the `ImportMap`. -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct ImportInfo { - /// A path that can be used to import the item, relative to the crate's root. - pub path: ModPath, - /// The module containing this item. - pub container: ModuleId, -} - -/// A map from publicly exported items to the path needed to import/name them from a downstream -/// crate. -/// -/// Reexports of items are taken into account, ie. if something is exported under multiple -/// names, the one with the shortest import path will be used. -/// -/// Note that all paths are relative to the containing crate's root, so the crate name still needs -/// to be prepended to the `ModPath` before the path is valid. -#[derive(Default)] -pub struct ImportMap { - map: FxIndexMap, - - /// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the - /// values returned by running `fst`. - /// - /// Since a path can refer to multiple items due to namespacing, we store all items with the - /// same path right after each other. This allows us to find all items after the FST gives us - /// the index of the first one. - importables: Vec, - fst: fst::Map>, - - /// Maps names of associated items to the item's ID. Only includes items whose defining trait is - /// exported. - assoc_map: FxHashMap>, -} - -impl ImportMap { - pub fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc { - let _p = ra_prof::profile("import_map_query"); - let def_map = db.crate_def_map(krate); - let mut import_map = Self::default(); - - // We look only into modules that are public(ly reexported), starting with the crate root. - let empty = ModPath { kind: PathKind::Plain, segments: vec![] }; - let root = ModuleId { krate, local_id: def_map.root }; - let mut worklist = vec![(root, empty)]; - while let Some((module, mod_path)) = worklist.pop() { - let ext_def_map; - let mod_data = if module.krate == krate { - &def_map[module.local_id] - } else { - // The crate might reexport a module defined in another crate. - ext_def_map = db.crate_def_map(module.krate); - &ext_def_map[module.local_id] - }; - - let visible_items = mod_data.scope.entries().filter_map(|(name, per_ns)| { - let per_ns = per_ns.filter_visibility(|vis| vis == Visibility::Public); - if per_ns.is_none() { - None - } else { - Some((name, per_ns)) - } - }); - - for (name, per_ns) in visible_items { - let mk_path = || { - let mut path = mod_path.clone(); - path.segments.push(name.clone()); - path - }; - - for item in per_ns.iter_items() { - let path = mk_path(); - match import_map.map.entry(item) { - Entry::Vacant(entry) => { - entry.insert(ImportInfo { path, container: module }); - } - Entry::Occupied(mut entry) => { - // If the new path is shorter, prefer that one. - if path.len() < entry.get().path.len() { - *entry.get_mut() = ImportInfo { path, container: module }; - } else { - continue; - } - } - } - - // If we've just added a path to a module, descend into it. We might traverse - // modules multiple times, but only if the new path to it is shorter than the - // first (else we `continue` above). - if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() { - worklist.push((mod_id, mk_path())); - } - - // If we've added a path to a trait, add the trait's methods to the method map. - if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() { - import_map.collect_trait_methods(db, tr); - } - } - } - } - - let mut importables = import_map.map.iter().collect::>(); - - importables.sort_by(cmp); - - // Build the FST, taking care not to insert duplicate values. - - let mut builder = fst::MapBuilder::memory(); - let mut last_batch_start = 0; - - for idx in 0..importables.len() { - if let Some(next_item) = importables.get(idx + 1) { - if cmp(&importables[last_batch_start], next_item) == Ordering::Equal { - continue; - } - } - - let start = last_batch_start; - last_batch_start = idx + 1; - - let key = fst_path(&importables[start].1.path); - - builder.insert(key, start as u64).unwrap(); - } - - import_map.fst = fst::Map::new(builder.into_inner().unwrap()).unwrap(); - import_map.importables = importables.iter().map(|(item, _)| **item).collect(); - - Arc::new(import_map) - } - - /// Returns the `ModPath` needed to import/mention `item`, relative to this crate's root. - pub fn path_of(&self, item: ItemInNs) -> Option<&ModPath> { - Some(&self.map.get(&item)?.path) - } - - pub fn import_info_for(&self, item: ItemInNs) -> Option<&ImportInfo> { - self.map.get(&item) - } - - fn collect_trait_methods(&mut self, db: &dyn DefDatabase, tr: TraitId) { - let data = db.trait_data(tr); - for (name, item) in data.items.iter() { - self.assoc_map.entry(name.to_string().into()).or_default().push(*item); - } - } -} - -impl PartialEq for ImportMap { - fn eq(&self, other: &Self) -> bool { - // `fst` and `importables` are built from `map`, so we don't need to compare them. - self.map == other.map - } -} - -impl Eq for ImportMap {} - -impl fmt::Debug for ImportMap { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut importable_paths: Vec<_> = self - .map - .iter() - .map(|(item, info)| { - let ns = match item { - ItemInNs::Types(_) => "t", - ItemInNs::Values(_) => "v", - ItemInNs::Macros(_) => "m", - }; - format!("- {} ({})", info.path, ns) - }) - .collect(); - - importable_paths.sort(); - f.write_str(&importable_paths.join("\n")) - } -} - -fn fst_path(path: &ModPath) -> String { - let mut s = path.to_string(); - s.make_ascii_lowercase(); - s -} - -fn cmp((_, lhs): &(&ItemInNs, &ImportInfo), (_, rhs): &(&ItemInNs, &ImportInfo)) -> Ordering { - let lhs_str = fst_path(&lhs.path); - let rhs_str = fst_path(&rhs.path); - lhs_str.cmp(&rhs_str) -} - -#[derive(Debug)] -pub struct Query { - query: String, - lowercased: String, - anchor_end: bool, - case_sensitive: bool, - limit: usize, -} - -impl Query { - pub fn new(query: &str) -> Self { - Self { - lowercased: query.to_lowercase(), - query: query.to_string(), - anchor_end: false, - case_sensitive: false, - limit: usize::max_value(), - } - } - - /// Only returns items whose paths end with the (case-insensitive) query string as their last - /// segment. - pub fn anchor_end(self) -> Self { - Self { anchor_end: true, ..self } - } - - /// Limits the returned number of items to `limit`. - pub fn limit(self, limit: usize) -> Self { - Self { limit, ..self } - } - - /// Respect casing of the query string when matching. - pub fn case_sensitive(self) -> Self { - Self { case_sensitive: true, ..self } - } -} - -/// Searches dependencies of `krate` for an importable path matching `query`. -/// -/// This returns a list of items that could be imported from dependencies of `krate`. -pub fn search_dependencies<'a>( - db: &'a dyn DefDatabase, - krate: CrateId, - query: Query, -) -> Vec { - let _p = ra_prof::profile("search_dependencies").detail(|| format!("{:?}", query)); - - let graph = db.crate_graph(); - let import_maps: Vec<_> = - graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect(); - - let automaton = fst::automaton::Subsequence::new(&query.lowercased); - - let mut op = fst::map::OpBuilder::new(); - for map in &import_maps { - op = op.add(map.fst.search(&automaton)); - } - - let mut stream = op.union(); - let mut res = Vec::new(); - while let Some((_, indexed_values)) = stream.next() { - for indexed_value in indexed_values { - let import_map = &import_maps[indexed_value.index]; - let importables = &import_map.importables[indexed_value.value as usize..]; - - // Path shared by the importable items in this group. - let path = &import_map.map[&importables[0]].path; - - if query.anchor_end { - // Last segment must match query. - let last = path.segments.last().unwrap().to_string(); - if last.to_lowercase() != query.lowercased { - continue; - } - } - - // Add the items from this `ModPath` group. Those are all subsequent items in - // `importables` whose paths match `path`. - let iter = importables.iter().copied().take_while(|item| { - let item_path = &import_map.map[item].path; - fst_path(item_path) == fst_path(path) - }); - - if query.case_sensitive { - // FIXME: This does not do a subsequence match. - res.extend(iter.filter(|item| { - let item_path = &import_map.map[item].path; - item_path.to_string().contains(&query.query) - })); - } else { - res.extend(iter); - } - - if res.len() >= query.limit { - res.truncate(query.limit); - return res; - } - } - } - - // Add all exported associated items whose names match the query (exactly). - for map in &import_maps { - if let Some(v) = map.assoc_map.get(&*query.query) { - res.extend(v.iter().map(|&assoc| { - ItemInNs::Types(match assoc { - AssocItemId::FunctionId(it) => it.into(), - AssocItemId::ConstId(it) => it.into(), - AssocItemId::TypeAliasId(it) => it.into(), - }) - })); - } - } - - res -} - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - use ra_db::{fixture::WithFixture, SourceDatabase, Upcast}; - - use crate::{test_db::TestDB, AssocContainerId, Lookup}; - - use super::*; - - fn check_search(ra_fixture: &str, krate_name: &str, query: Query, expect: Expect) { - let db = TestDB::with_files(ra_fixture); - let crate_graph = db.crate_graph(); - let krate = crate_graph - .iter() - .find(|krate| { - crate_graph[*krate].display_name.as_ref().map(|n| n.to_string()) - == Some(krate_name.to_string()) - }) - .unwrap(); - - let actual = search_dependencies(db.upcast(), krate, query) - .into_iter() - .filter_map(|item| { - let mark = match item { - ItemInNs::Types(_) => "t", - ItemInNs::Values(_) => "v", - ItemInNs::Macros(_) => "m", - }; - let item = assoc_to_trait(&db, item); - item.krate(db.upcast()).map(|krate| { - let map = db.import_map(krate); - let path = map.path_of(item).unwrap(); - format!( - "{}::{} ({})\n", - crate_graph[krate].display_name.as_ref().unwrap(), - path, - mark - ) - }) - }) - .collect::(); - expect.assert_eq(&actual) - } - - fn assoc_to_trait(db: &dyn DefDatabase, item: ItemInNs) -> ItemInNs { - let assoc: AssocItemId = match item { - ItemInNs::Types(it) | ItemInNs::Values(it) => match it { - ModuleDefId::TypeAliasId(it) => it.into(), - ModuleDefId::FunctionId(it) => it.into(), - ModuleDefId::ConstId(it) => it.into(), - _ => return item, - }, - _ => return item, - }; - - let container = match assoc { - AssocItemId::FunctionId(it) => it.lookup(db).container, - AssocItemId::ConstId(it) => it.lookup(db).container, - AssocItemId::TypeAliasId(it) => it.lookup(db).container, - }; - - match container { - AssocContainerId::TraitId(it) => ItemInNs::Types(it.into()), - _ => item, - } - } - - fn check(ra_fixture: &str, expect: Expect) { - let db = TestDB::with_files(ra_fixture); - let crate_graph = db.crate_graph(); - - let actual = crate_graph - .iter() - .filter_map(|krate| { - let cdata = &crate_graph[krate]; - let name = cdata.display_name.as_ref()?; - - let map = db.import_map(krate); - - Some(format!("{}:\n{:?}\n", name, map)) - }) - .collect::(); - - expect.assert_eq(&actual) - } - - #[test] - fn smoke() { - check( - r" - //- /main.rs crate:main deps:lib - - mod private { - pub use lib::Pub; - pub struct InPrivateModule; - } - - pub mod publ1 { - use lib::Pub; - } - - pub mod real_pub { - pub use lib::Pub; - } - pub mod real_pu2 { // same path length as above - pub use lib::Pub; - } - - //- /lib.rs crate:lib - pub struct Pub {} - pub struct Pub2; // t + v - struct Priv; - ", - expect![[r#" - main: - - publ1 (t) - - real_pu2 (t) - - real_pub (t) - - real_pub::Pub (t) - lib: - - Pub (t) - - Pub2 (t) - - Pub2 (v) - "#]], - ); - } - - #[test] - fn prefers_shortest_path() { - check( - r" - //- /main.rs crate:main - - pub mod sub { - pub mod subsub { - pub struct Def {} - } - - pub use super::sub::subsub::Def; - } - ", - expect![[r#" - main: - - sub (t) - - sub::Def (t) - - sub::subsub (t) - "#]], - ); - } - - #[test] - fn type_reexport_cross_crate() { - // Reexports need to be visible from a crate, even if the original crate exports the item - // at a shorter path. - check( - r" - //- /main.rs crate:main deps:lib - pub mod m { - pub use lib::S; - } - //- /lib.rs crate:lib - pub struct S; - ", - expect![[r#" - main: - - m (t) - - m::S (t) - - m::S (v) - lib: - - S (t) - - S (v) - "#]], - ); - } - - #[test] - fn macro_reexport() { - check( - r" - //- /main.rs crate:main deps:lib - pub mod m { - pub use lib::pub_macro; - } - //- /lib.rs crate:lib - #[macro_export] - macro_rules! pub_macro { - () => {}; - } - ", - expect![[r#" - main: - - m (t) - - m::pub_macro (m) - lib: - - pub_macro (m) - "#]], - ); - } - - #[test] - fn module_reexport() { - // Reexporting modules from a dependency adds all contents to the import map. - check( - r" - //- /main.rs crate:main deps:lib - pub use lib::module as reexported_module; - //- /lib.rs crate:lib - pub mod module { - pub struct S; - } - ", - expect![[r#" - main: - - reexported_module (t) - - reexported_module::S (t) - - reexported_module::S (v) - lib: - - module (t) - - module::S (t) - - module::S (v) - "#]], - ); - } - - #[test] - fn cyclic_module_reexport() { - // A cyclic reexport does not hang. - check( - r" - //- /lib.rs crate:lib - pub mod module { - pub struct S; - pub use super::sub::*; - } - - pub mod sub { - pub use super::module; - } - ", - expect![[r#" - lib: - - module (t) - - module::S (t) - - module::S (v) - - sub (t) - "#]], - ); - } - - #[test] - fn private_macro() { - check( - r" - //- /lib.rs crate:lib - macro_rules! private_macro { - () => {}; - } - ", - expect![[r#" - lib: - - "#]], - ); - } - - #[test] - fn namespacing() { - check( - r" - //- /lib.rs crate:lib - pub struct Thing; // t + v - #[macro_export] - macro_rules! Thing { // m - () => {}; - } - ", - expect![[r#" - lib: - - Thing (m) - - Thing (t) - - Thing (v) - "#]], - ); - - check( - r" - //- /lib.rs crate:lib - pub mod Thing {} // t - #[macro_export] - macro_rules! Thing { // m - () => {}; - } - ", - expect![[r#" - lib: - - Thing (m) - - Thing (t) - "#]], - ); - } - - #[test] - fn search() { - let ra_fixture = r#" - //- /main.rs crate:main deps:dep - //- /dep.rs crate:dep deps:tdep - use tdep::fmt as fmt_dep; - pub mod fmt { - pub trait Display { - fn fmt(); - } - } - #[macro_export] - macro_rules! Fmt { - () => {}; - } - pub struct Fmt; - - pub fn format() {} - pub fn no() {} - - //- /tdep.rs crate:tdep - pub mod fmt { - pub struct NotImportableFromMain; - } - "#; - - check_search( - ra_fixture, - "main", - Query::new("fmt"), - expect![[r#" - dep::fmt (t) - dep::Fmt (t) - dep::Fmt (v) - dep::Fmt (m) - dep::fmt::Display (t) - dep::format (v) - dep::fmt::Display (t) - "#]], - ); - - check_search( - ra_fixture, - "main", - Query::new("fmt").anchor_end(), - expect![[r#" - dep::fmt (t) - dep::Fmt (t) - dep::Fmt (v) - dep::Fmt (m) - dep::fmt::Display (t) - "#]], - ); - } - - #[test] - fn search_casing() { - let ra_fixture = r#" - //- /main.rs crate:main deps:dep - //- /dep.rs crate:dep - - pub struct fmt; - pub struct FMT; - "#; - - check_search( - ra_fixture, - "main", - Query::new("FMT"), - expect![[r#" - dep::fmt (t) - dep::fmt (v) - dep::FMT (t) - dep::FMT (v) - "#]], - ); - - check_search( - ra_fixture, - "main", - Query::new("FMT").case_sensitive(), - expect![[r#" - dep::FMT (t) - dep::FMT (v) - "#]], - ); - } - - #[test] - fn search_limit() { - check_search( - r#" - //- /main.rs crate:main deps:dep - //- /dep.rs crate:dep - pub mod fmt { - pub trait Display { - fn fmt(); - } - } - #[macro_export] - macro_rules! Fmt { - () => {}; - } - pub struct Fmt; - - pub fn format() {} - pub fn no() {} - "#, - "main", - Query::new("").limit(2), - expect![[r#" - dep::fmt (t) - dep::Fmt (t) - "#]], - ); - } -} diff --git a/crates/ra_hir_def/src/item_scope.rs b/crates/ra_hir_def/src/item_scope.rs deleted file mode 100644 index 8fee4b15e5..0000000000 --- a/crates/ra_hir_def/src/item_scope.rs +++ /dev/null @@ -1,341 +0,0 @@ -//! Describes items defined or visible (ie, imported) in a certain scope. -//! This is shared between modules and blocks. - -use std::collections::hash_map::Entry; - -use hir_expand::name::Name; -use once_cell::sync::Lazy; -use ra_db::CrateId; -use rustc_hash::{FxHashMap, FxHashSet}; -use test_utils::mark; - -use crate::{ - db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, HasModule, ImplId, - LocalModuleId, Lookup, MacroDefId, ModuleDefId, TraitId, -}; - -#[derive(Copy, Clone)] -pub(crate) enum ImportType { - Glob, - Named, -} - -#[derive(Debug, Default)] -pub struct PerNsGlobImports { - types: FxHashSet<(LocalModuleId, Name)>, - values: FxHashSet<(LocalModuleId, Name)>, - macros: FxHashSet<(LocalModuleId, Name)>, -} - -#[derive(Debug, Default, PartialEq, Eq)] -pub struct ItemScope { - types: FxHashMap, - values: FxHashMap, - macros: FxHashMap, - unresolved: FxHashSet, - - defs: Vec, - impls: Vec, - /// Traits imported via `use Trait as _;`. - unnamed_trait_imports: FxHashMap, - /// Macros visible in current module in legacy textual scope - /// - /// For macros invoked by an unqualified identifier like `bar!()`, `legacy_macros` will be searched in first. - /// If it yields no result, then it turns to module scoped `macros`. - /// It macros with name qualified with a path like `crate::foo::bar!()`, `legacy_macros` will be skipped, - /// and only normal scoped `macros` will be searched in. - /// - /// Note that this automatically inherit macros defined textually before the definition of module itself. - /// - /// Module scoped macros will be inserted into `items` instead of here. - // FIXME: Macro shadowing in one module is not properly handled. Non-item place macros will - // be all resolved to the last one defined if shadowing happens. - legacy_macros: FxHashMap, -} - -pub(crate) static BUILTIN_SCOPE: Lazy> = Lazy::new(|| { - BuiltinType::ALL - .iter() - .map(|(name, ty)| (name.clone(), PerNs::types(ty.clone().into(), Visibility::Public))) - .collect() -}); - -/// Shadow mode for builtin type which can be shadowed by module. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub(crate) enum BuiltinShadowMode { - /// Prefer user-defined modules (or other types) over builtins. - Module, - /// Prefer builtins over user-defined modules (but not other types). - Other, -} - -/// Legacy macros can only be accessed through special methods like `get_legacy_macros`. -/// Other methods will only resolve values, types and module scoped macros only. -impl ItemScope { - pub fn entries<'a>(&'a self) -> impl Iterator + 'a { - // FIXME: shadowing - let keys: FxHashSet<_> = self - .types - .keys() - .chain(self.values.keys()) - .chain(self.macros.keys()) - .chain(self.unresolved.iter()) - .collect(); - - keys.into_iter().map(move |name| (name, self.get(name))) - } - - pub fn declarations(&self) -> impl Iterator + '_ { - self.defs.iter().copied() - } - - pub fn impls(&self) -> impl Iterator + ExactSizeIterator + '_ { - self.impls.iter().copied() - } - - pub fn visibility_of(&self, def: ModuleDefId) -> Option { - self.name_of(ItemInNs::Types(def)) - .or_else(|| self.name_of(ItemInNs::Values(def))) - .map(|(_, v)| v) - } - - /// Iterate over all module scoped macros - pub(crate) fn macros<'a>(&'a self) -> impl Iterator + 'a { - self.entries().filter_map(|(name, def)| def.take_macros().map(|macro_| (name, macro_))) - } - - /// Iterate over all legacy textual scoped macros visible at the end of the module - pub(crate) fn legacy_macros<'a>(&'a self) -> impl Iterator + 'a { - self.legacy_macros.iter().map(|(name, def)| (name, *def)) - } - - /// Get a name from current module scope, legacy macros are not included - pub(crate) fn get(&self, name: &Name) -> PerNs { - PerNs { - types: self.types.get(name).copied(), - values: self.values.get(name).copied(), - macros: self.macros.get(name).copied(), - } - } - - pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> { - for (name, per_ns) in self.entries() { - if let Some(vis) = item.match_with(per_ns) { - return Some((name, vis)); - } - } - None - } - - pub(crate) fn traits<'a>(&'a self) -> impl Iterator + 'a { - self.types - .values() - .filter_map(|(def, _)| match def { - ModuleDefId::TraitId(t) => Some(*t), - _ => None, - }) - .chain(self.unnamed_trait_imports.keys().copied()) - } - - pub(crate) fn define_def(&mut self, def: ModuleDefId) { - self.defs.push(def) - } - - pub(crate) fn get_legacy_macro(&self, name: &Name) -> Option { - self.legacy_macros.get(name).copied() - } - - pub(crate) fn define_impl(&mut self, imp: ImplId) { - self.impls.push(imp) - } - - pub(crate) fn define_legacy_macro(&mut self, name: Name, mac: MacroDefId) { - self.legacy_macros.insert(name, mac); - } - - pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option { - self.unnamed_trait_imports.get(&tr).copied() - } - - pub(crate) fn push_unnamed_trait(&mut self, tr: TraitId, vis: Visibility) { - self.unnamed_trait_imports.insert(tr, vis); - } - - pub(crate) fn push_res(&mut self, name: Name, def: PerNs) -> bool { - let mut changed = false; - - if let Some(types) = def.types { - self.types.entry(name.clone()).or_insert_with(|| { - changed = true; - types - }); - } - if let Some(values) = def.values { - self.values.entry(name.clone()).or_insert_with(|| { - changed = true; - values - }); - } - if let Some(macros) = def.macros { - self.macros.entry(name.clone()).or_insert_with(|| { - changed = true; - macros - }); - } - - if def.is_none() { - if self.unresolved.insert(name) { - changed = true; - } - } - - changed - } - - pub(crate) fn push_res_with_import( - &mut self, - glob_imports: &mut PerNsGlobImports, - lookup: (LocalModuleId, Name), - def: PerNs, - def_import_type: ImportType, - ) -> bool { - let mut changed = false; - - macro_rules! check_changed { - ( - $changed:ident, - ( $this:ident / $def:ident ) . $field:ident, - $glob_imports:ident [ $lookup:ident ], - $def_import_type:ident - ) => {{ - let existing = $this.$field.entry($lookup.1.clone()); - match (existing, $def.$field) { - (Entry::Vacant(entry), Some(_)) => { - match $def_import_type { - ImportType::Glob => { - $glob_imports.$field.insert($lookup.clone()); - } - ImportType::Named => { - $glob_imports.$field.remove(&$lookup); - } - } - - if let Some(fld) = $def.$field { - entry.insert(fld); - } - $changed = true; - } - (Entry::Occupied(mut entry), Some(_)) - if $glob_imports.$field.contains(&$lookup) - && matches!($def_import_type, ImportType::Named) => - { - mark::hit!(import_shadowed); - $glob_imports.$field.remove(&$lookup); - if let Some(fld) = $def.$field { - entry.insert(fld); - } - $changed = true; - } - _ => {} - } - }}; - } - - check_changed!(changed, (self / def).types, glob_imports[lookup], def_import_type); - check_changed!(changed, (self / def).values, glob_imports[lookup], def_import_type); - check_changed!(changed, (self / def).macros, glob_imports[lookup], def_import_type); - - if def.is_none() { - if self.unresolved.insert(lookup.1) { - changed = true; - } - } - - changed - } - - pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator, PerNs)> + 'a { - self.entries().map(|(name, res)| (Some(name.clone()), res)).chain( - self.unnamed_trait_imports - .iter() - .map(|(tr, vis)| (None, PerNs::types(ModuleDefId::TraitId(*tr), *vis))), - ) - } - - pub(crate) fn collect_legacy_macros(&self) -> FxHashMap { - self.legacy_macros.clone() - } -} - -impl PerNs { - pub(crate) fn from_def(def: ModuleDefId, v: Visibility, has_constructor: bool) -> PerNs { - match def { - ModuleDefId::ModuleId(_) => PerNs::types(def, v), - ModuleDefId::FunctionId(_) => PerNs::values(def, v), - ModuleDefId::AdtId(adt) => match adt { - AdtId::UnionId(_) => PerNs::types(def, v), - AdtId::EnumId(_) => PerNs::types(def, v), - AdtId::StructId(_) => { - if has_constructor { - PerNs::both(def, def, v) - } else { - PerNs::types(def, v) - } - } - }, - ModuleDefId::EnumVariantId(_) => PerNs::both(def, def, v), - ModuleDefId::ConstId(_) | ModuleDefId::StaticId(_) => PerNs::values(def, v), - ModuleDefId::TraitId(_) => PerNs::types(def, v), - ModuleDefId::TypeAliasId(_) => PerNs::types(def, v), - ModuleDefId::BuiltinType(_) => PerNs::types(def, v), - } - } -} - -#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] -pub enum ItemInNs { - Types(ModuleDefId), - Values(ModuleDefId), - Macros(MacroDefId), -} - -impl ItemInNs { - fn match_with(self, per_ns: PerNs) -> Option { - match self { - ItemInNs::Types(def) => { - per_ns.types.filter(|(other_def, _)| *other_def == def).map(|(_, vis)| vis) - } - ItemInNs::Values(def) => { - per_ns.values.filter(|(other_def, _)| *other_def == def).map(|(_, vis)| vis) - } - ItemInNs::Macros(def) => { - per_ns.macros.filter(|(other_def, _)| *other_def == def).map(|(_, vis)| vis) - } - } - } - - pub fn as_module_def_id(self) -> Option { - match self { - ItemInNs::Types(id) | ItemInNs::Values(id) => Some(id), - ItemInNs::Macros(_) => None, - } - } - - /// Returns the crate defining this item (or `None` if `self` is built-in). - pub fn krate(&self, db: &dyn DefDatabase) -> Option { - Some(match self { - ItemInNs::Types(did) | ItemInNs::Values(did) => match did { - ModuleDefId::ModuleId(id) => id.krate, - ModuleDefId::FunctionId(id) => id.lookup(db).module(db).krate, - ModuleDefId::AdtId(id) => id.module(db).krate, - ModuleDefId::EnumVariantId(id) => id.parent.lookup(db).container.module(db).krate, - ModuleDefId::ConstId(id) => id.lookup(db).container.module(db).krate, - ModuleDefId::StaticId(id) => id.lookup(db).container.module(db).krate, - ModuleDefId::TraitId(id) => id.lookup(db).container.module(db).krate, - ModuleDefId::TypeAliasId(id) => id.lookup(db).module(db).krate, - ModuleDefId::BuiltinType(_) => return None, - }, - ItemInNs::Macros(id) => return id.krate, - }) - } -} diff --git a/crates/ra_hir_def/src/item_tree.rs b/crates/ra_hir_def/src/item_tree.rs deleted file mode 100644 index a67e75dac0..0000000000 --- a/crates/ra_hir_def/src/item_tree.rs +++ /dev/null @@ -1,754 +0,0 @@ -//! A simplified AST that only contains items. - -mod lower; -#[cfg(test)] -mod tests; - -use std::{ - any::type_name, - fmt::{self, Debug}, - hash::{Hash, Hasher}, - marker::PhantomData, - ops::{Index, Range}, - sync::Arc, -}; - -use ast::{AstNode, AttrsOwner, NameOwner, StructKind}; -use either::Either; -use hir_expand::{ - ast_id_map::FileAstId, - hygiene::Hygiene, - name::{name, AsName, Name}, - HirFileId, InFile, -}; -use ra_arena::{Arena, Idx, RawId}; -use ra_syntax::{ast, match_ast}; -use rustc_hash::FxHashMap; -use smallvec::SmallVec; -use test_utils::mark; - -use crate::{ - attr::Attrs, - db::DefDatabase, - generics::GenericParams, - path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind}, - type_ref::{Mutability, TypeBound, TypeRef}, - visibility::RawVisibility, -}; - -#[derive(Copy, Clone, Eq, PartialEq)] -pub struct RawVisibilityId(u32); - -impl RawVisibilityId { - pub const PUB: Self = RawVisibilityId(u32::max_value()); - pub const PRIV: Self = RawVisibilityId(u32::max_value() - 1); - pub const PUB_CRATE: Self = RawVisibilityId(u32::max_value() - 2); -} - -impl fmt::Debug for RawVisibilityId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut f = f.debug_tuple("RawVisibilityId"); - match *self { - Self::PUB => f.field(&"pub"), - Self::PRIV => f.field(&"pub(self)"), - Self::PUB_CRATE => f.field(&"pub(crate)"), - _ => f.field(&self.0), - }; - f.finish() - } -} - -#[derive(Debug, Copy, Clone, Eq, PartialEq)] -pub struct GenericParamsId(u32); - -impl GenericParamsId { - pub const EMPTY: Self = GenericParamsId(u32::max_value()); -} - -/// The item tree of a source file. -#[derive(Debug, Eq, PartialEq)] -pub struct ItemTree { - top_level: SmallVec<[ModItem; 1]>, - attrs: FxHashMap, - inner_items: FxHashMap, SmallVec<[ModItem; 1]>>, - - data: Option>, -} - -impl ItemTree { - pub fn item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc { - let _p = ra_prof::profile("item_tree_query").detail(|| format!("{:?}", file_id)); - let syntax = if let Some(node) = db.parse_or_expand(file_id) { - node - } else { - return Arc::new(Self::empty()); - }; - - let hygiene = Hygiene::new(db.upcast(), file_id); - let ctx = lower::Ctx::new(db, hygiene.clone(), file_id); - let mut top_attrs = None; - let mut item_tree = match_ast! { - match syntax { - ast::SourceFile(file) => { - top_attrs = Some(Attrs::new(&file, &hygiene)); - ctx.lower_module_items(&file) - }, - ast::MacroItems(items) => { - ctx.lower_module_items(&items) - }, - // Macros can expand to expressions. We return an empty item tree in this case, but - // still need to collect inner items. - ast::Expr(e) => { - ctx.lower_inner_items(e.syntax()) - }, - _ => { - panic!("cannot create item tree from {:?}", syntax); - }, - } - }; - - if let Some(attrs) = top_attrs { - item_tree.attrs.insert(AttrOwner::TopLevel, attrs); - } - item_tree.shrink_to_fit(); - Arc::new(item_tree) - } - - fn empty() -> Self { - Self { - top_level: Default::default(), - attrs: Default::default(), - inner_items: Default::default(), - data: Default::default(), - } - } - - fn shrink_to_fit(&mut self) { - if let Some(data) = &mut self.data { - let ItemTreeData { - imports, - extern_crates, - functions, - structs, - fields, - unions, - enums, - variants, - consts, - statics, - traits, - impls, - type_aliases, - mods, - macro_calls, - exprs, - vis, - generics, - } = &mut **data; - - imports.shrink_to_fit(); - extern_crates.shrink_to_fit(); - functions.shrink_to_fit(); - structs.shrink_to_fit(); - fields.shrink_to_fit(); - unions.shrink_to_fit(); - enums.shrink_to_fit(); - variants.shrink_to_fit(); - consts.shrink_to_fit(); - statics.shrink_to_fit(); - traits.shrink_to_fit(); - impls.shrink_to_fit(); - type_aliases.shrink_to_fit(); - mods.shrink_to_fit(); - macro_calls.shrink_to_fit(); - exprs.shrink_to_fit(); - - vis.arena.shrink_to_fit(); - generics.arena.shrink_to_fit(); - } - } - - /// Returns an iterator over all items located at the top level of the `HirFileId` this - /// `ItemTree` was created from. - pub fn top_level_items(&self) -> &[ModItem] { - &self.top_level - } - - /// Returns the inner attributes of the source file. - pub fn top_level_attrs(&self) -> &Attrs { - self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&Attrs::EMPTY) - } - - pub fn attrs(&self, of: AttrOwner) -> &Attrs { - self.attrs.get(&of).unwrap_or(&Attrs::EMPTY) - } - - /// Returns the lowered inner items that `ast` corresponds to. - /// - /// Most AST items are lowered to a single `ModItem`, but some (eg. `use` items) may be lowered - /// to multiple items in the `ItemTree`. - pub fn inner_items(&self, ast: FileAstId) -> &[ModItem] { - &self.inner_items[&ast] - } - - pub fn all_inner_items(&self) -> impl Iterator + '_ { - self.inner_items.values().flatten().copied() - } - - pub fn source(&self, db: &dyn DefDatabase, of: ItemTreeId) -> S::Source { - // This unwrap cannot fail, since it has either succeeded above, or resulted in an empty - // ItemTree (in which case there is no valid `FileItemTreeId` to call this method with). - let root = - db.parse_or_expand(of.file_id).expect("parse_or_expand failed on constructed ItemTree"); - - let id = self[of.value].ast_id(); - let map = db.ast_id_map(of.file_id); - let ptr = map.get(id); - ptr.to_node(&root) - } - - fn data(&self) -> &ItemTreeData { - self.data.as_ref().expect("attempted to access data of empty ItemTree") - } - - fn data_mut(&mut self) -> &mut ItemTreeData { - self.data.get_or_insert_with(Box::default) - } -} - -#[derive(Default, Debug, Eq, PartialEq)] -struct ItemVisibilities { - arena: Arena, -} - -impl ItemVisibilities { - fn alloc(&mut self, vis: RawVisibility) -> RawVisibilityId { - match &vis { - RawVisibility::Public => RawVisibilityId::PUB, - RawVisibility::Module(path) if path.segments.is_empty() => match &path.kind { - PathKind::Super(0) => RawVisibilityId::PRIV, - PathKind::Crate => RawVisibilityId::PUB_CRATE, - _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()), - }, - _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()), - } - } -} - -static VIS_PUB: RawVisibility = RawVisibility::Public; -static VIS_PRIV: RawVisibility = - RawVisibility::Module(ModPath { kind: PathKind::Super(0), segments: Vec::new() }); -static VIS_PUB_CRATE: RawVisibility = - RawVisibility::Module(ModPath { kind: PathKind::Crate, segments: Vec::new() }); - -#[derive(Default, Debug, Eq, PartialEq)] -struct GenericParamsStorage { - arena: Arena, -} - -impl GenericParamsStorage { - fn alloc(&mut self, params: GenericParams) -> GenericParamsId { - if params.types.is_empty() && params.where_predicates.is_empty() { - return GenericParamsId::EMPTY; - } - - GenericParamsId(self.arena.alloc(params).into_raw().into()) - } -} - -static EMPTY_GENERICS: GenericParams = - GenericParams { types: Arena::new(), where_predicates: Vec::new() }; - -#[derive(Default, Debug, Eq, PartialEq)] -struct ItemTreeData { - imports: Arena, - extern_crates: Arena, - functions: Arena, - structs: Arena, - fields: Arena, - unions: Arena, - enums: Arena, - variants: Arena, - consts: Arena, - statics: Arena, - traits: Arena, - impls: Arena, - type_aliases: Arena, - mods: Arena, - macro_calls: Arena, - exprs: Arena, - - vis: ItemVisibilities, - generics: GenericParamsStorage, -} - -#[derive(Debug, Eq, PartialEq, Hash)] -pub enum AttrOwner { - /// Attributes on an item. - ModItem(ModItem), - /// Inner attributes of the source file. - TopLevel, - - Variant(Idx), - Field(Idx), - // FIXME: Store variant and field attrs, and stop reparsing them in `attrs_query`. -} - -macro_rules! from_attrs { - ( $( $var:ident($t:ty) ),+ ) => { - $( - impl From<$t> for AttrOwner { - fn from(t: $t) -> AttrOwner { - AttrOwner::$var(t) - } - } - )+ - }; -} - -from_attrs!(ModItem(ModItem), Variant(Idx), Field(Idx)); - -/// Trait implemented by all item nodes in the item tree. -pub trait ItemTreeNode: Clone { - type Source: AstNode + Into; - - fn ast_id(&self) -> FileAstId; - - /// Looks up an instance of `Self` in an item tree. - fn lookup(tree: &ItemTree, index: Idx) -> &Self; - - /// Downcasts a `ModItem` to a `FileItemTreeId` specific to this type. - fn id_from_mod_item(mod_item: ModItem) -> Option>; - - /// Upcasts a `FileItemTreeId` to a generic `ModItem`. - fn id_to_mod_item(id: FileItemTreeId) -> ModItem; -} - -pub struct FileItemTreeId { - index: Idx, - _p: PhantomData, -} - -impl Clone for FileItemTreeId { - fn clone(&self) -> Self { - Self { index: self.index, _p: PhantomData } - } -} -impl Copy for FileItemTreeId {} - -impl PartialEq for FileItemTreeId { - fn eq(&self, other: &FileItemTreeId) -> bool { - self.index == other.index - } -} -impl Eq for FileItemTreeId {} - -impl Hash for FileItemTreeId { - fn hash(&self, state: &mut H) { - self.index.hash(state) - } -} - -impl fmt::Debug for FileItemTreeId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.index.fmt(f) - } -} - -pub type ItemTreeId = InFile>; - -macro_rules! mod_items { - ( $( $typ:ident in $fld:ident -> $ast:ty ),+ $(,)? ) => { - #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] - pub enum ModItem { - $( - $typ(FileItemTreeId<$typ>), - )+ - } - - $( - impl From> for ModItem { - fn from(id: FileItemTreeId<$typ>) -> ModItem { - ModItem::$typ(id) - } - } - )+ - - $( - impl ItemTreeNode for $typ { - type Source = $ast; - - fn ast_id(&self) -> FileAstId { - self.ast_id - } - - fn lookup(tree: &ItemTree, index: Idx) -> &Self { - &tree.data().$fld[index] - } - - fn id_from_mod_item(mod_item: ModItem) -> Option> { - if let ModItem::$typ(id) = mod_item { - Some(id) - } else { - None - } - } - - fn id_to_mod_item(id: FileItemTreeId) -> ModItem { - ModItem::$typ(id) - } - } - - impl Index> for ItemTree { - type Output = $typ; - - fn index(&self, index: Idx<$typ>) -> &Self::Output { - &self.data().$fld[index] - } - } - )+ - }; -} - -mod_items! { - Import in imports -> ast::Use, - ExternCrate in extern_crates -> ast::ExternCrate, - Function in functions -> ast::Fn, - Struct in structs -> ast::Struct, - Union in unions -> ast::Union, - Enum in enums -> ast::Enum, - Const in consts -> ast::Const, - Static in statics -> ast::Static, - Trait in traits -> ast::Trait, - Impl in impls -> ast::Impl, - TypeAlias in type_aliases -> ast::TypeAlias, - Mod in mods -> ast::Module, - MacroCall in macro_calls -> ast::MacroCall, -} - -macro_rules! impl_index { - ( $($fld:ident: $t:ty),+ $(,)? ) => { - $( - impl Index> for ItemTree { - type Output = $t; - - fn index(&self, index: Idx<$t>) -> &Self::Output { - &self.data().$fld[index] - } - } - )+ - }; -} - -impl_index!(fields: Field, variants: Variant, exprs: Expr); - -impl Index for ItemTree { - type Output = RawVisibility; - fn index(&self, index: RawVisibilityId) -> &Self::Output { - match index { - RawVisibilityId::PRIV => &VIS_PRIV, - RawVisibilityId::PUB => &VIS_PUB, - RawVisibilityId::PUB_CRATE => &VIS_PUB_CRATE, - _ => &self.data().vis.arena[Idx::from_raw(index.0.into())], - } - } -} - -impl Index for ItemTree { - type Output = GenericParams; - - fn index(&self, index: GenericParamsId) -> &Self::Output { - match index { - GenericParamsId::EMPTY => &EMPTY_GENERICS, - _ => &self.data().generics.arena[Idx::from_raw(index.0.into())], - } - } -} - -impl Index> for ItemTree { - type Output = N; - fn index(&self, id: FileItemTreeId) -> &N { - N::lookup(self, id.index) - } -} - -/// A desugared `use` import. -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct Import { - pub path: ModPath, - pub alias: Option, - pub visibility: RawVisibilityId, - pub is_glob: bool, - pub is_prelude: bool, - /// AST ID of the `use` or `extern crate` item this import was derived from. Note that many - /// `Import`s can map to the same `use` item. - pub ast_id: FileAstId, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct ExternCrate { - pub path: ModPath, - pub alias: Option, - pub visibility: RawVisibilityId, - /// Whether this is a `#[macro_use] extern crate ...`. - pub is_macro_use: bool, - pub ast_id: FileAstId, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct Function { - pub name: Name, - pub visibility: RawVisibilityId, - pub generic_params: GenericParamsId, - pub has_self_param: bool, - pub is_unsafe: bool, - pub params: Box<[TypeRef]>, - pub is_varargs: bool, - pub ret_type: TypeRef, - pub ast_id: FileAstId, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct Struct { - pub name: Name, - pub visibility: RawVisibilityId, - pub generic_params: GenericParamsId, - pub fields: Fields, - pub ast_id: FileAstId, - pub kind: StructDefKind, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum StructDefKind { - /// `struct S { ... }` - type namespace only. - Record, - /// `struct S(...);` - Tuple, - /// `struct S;` - Unit, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct Union { - pub name: Name, - pub visibility: RawVisibilityId, - pub generic_params: GenericParamsId, - pub fields: Fields, - pub ast_id: FileAstId, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct Enum { - pub name: Name, - pub visibility: RawVisibilityId, - pub generic_params: GenericParamsId, - pub variants: IdRange, - pub ast_id: FileAstId, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct Const { - /// const _: () = (); - pub name: Option, - pub visibility: RawVisibilityId, - pub type_ref: TypeRef, - pub ast_id: FileAstId, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct Static { - pub name: Name, - pub visibility: RawVisibilityId, - pub mutable: bool, - pub type_ref: TypeRef, - pub ast_id: FileAstId, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct Trait { - pub name: Name, - pub visibility: RawVisibilityId, - pub generic_params: GenericParamsId, - pub auto: bool, - pub items: Box<[AssocItem]>, - pub ast_id: FileAstId, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct Impl { - pub generic_params: GenericParamsId, - pub target_trait: Option, - pub target_type: TypeRef, - pub is_negative: bool, - pub items: Box<[AssocItem]>, - pub ast_id: FileAstId, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct TypeAlias { - pub name: Name, - pub visibility: RawVisibilityId, - /// Bounds on the type alias itself. Only valid in trait declarations, eg. `type Assoc: Copy;`. - pub bounds: Box<[TypeBound]>, - pub generic_params: GenericParamsId, - pub type_ref: Option, - pub ast_id: FileAstId, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct Mod { - pub name: Name, - pub visibility: RawVisibilityId, - pub kind: ModKind, - pub ast_id: FileAstId, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum ModKind { - /// `mod m { ... }` - Inline { items: Box<[ModItem]> }, - - /// `mod m;` - Outline {}, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct MacroCall { - /// For `macro_rules!` declarations, this is the name of the declared macro. - pub name: Option, - /// Path to the called macro. - pub path: ModPath, - /// Has `#[macro_export]`. - pub is_export: bool, - /// Has `#[macro_export(local_inner_macros)]`. - pub is_local_inner: bool, - /// Has `#[rustc_builtin_macro]`. - pub is_builtin: bool, - pub ast_id: FileAstId, -} - -// NB: There's no `FileAstId` for `Expr`. The only case where this would be useful is for array -// lengths, but we don't do much with them yet. -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct Expr; - -macro_rules! impl_froms { - ($e:ident { $($v:ident ($t:ty)),* $(,)? }) => { - $( - impl From<$t> for $e { - fn from(it: $t) -> $e { - $e::$v(it) - } - } - )* - } -} - -impl ModItem { - pub fn as_assoc_item(&self) -> Option { - match self { - ModItem::Import(_) - | ModItem::ExternCrate(_) - | ModItem::Struct(_) - | ModItem::Union(_) - | ModItem::Enum(_) - | ModItem::Static(_) - | ModItem::Trait(_) - | ModItem::Impl(_) - | ModItem::Mod(_) => None, - ModItem::MacroCall(call) => Some(AssocItem::MacroCall(*call)), - ModItem::Const(konst) => Some(AssocItem::Const(*konst)), - ModItem::TypeAlias(alias) => Some(AssocItem::TypeAlias(*alias)), - ModItem::Function(func) => Some(AssocItem::Function(*func)), - } - } - - pub fn downcast(self) -> Option> { - N::id_from_mod_item(self) - } -} - -#[derive(Debug, Copy, Clone, Eq, PartialEq)] -pub enum AssocItem { - Function(FileItemTreeId), - TypeAlias(FileItemTreeId), - Const(FileItemTreeId), - MacroCall(FileItemTreeId), -} - -impl_froms!(AssocItem { - Function(FileItemTreeId), - TypeAlias(FileItemTreeId), - Const(FileItemTreeId), - MacroCall(FileItemTreeId), -}); - -impl From for ModItem { - fn from(item: AssocItem) -> Self { - match item { - AssocItem::Function(it) => it.into(), - AssocItem::TypeAlias(it) => it.into(), - AssocItem::Const(it) => it.into(), - AssocItem::MacroCall(it) => it.into(), - } - } -} - -#[derive(Debug, Eq, PartialEq)] -pub struct Variant { - pub name: Name, - pub fields: Fields, -} - -pub struct IdRange { - range: Range, - _p: PhantomData, -} - -impl IdRange { - fn new(range: Range>) -> Self { - Self { range: range.start.into_raw().into()..range.end.into_raw().into(), _p: PhantomData } - } -} - -impl Iterator for IdRange { - type Item = Idx; - fn next(&mut self) -> Option { - self.range.next().map(|raw| Idx::from_raw(raw.into())) - } -} - -impl fmt::Debug for IdRange { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple(&format!("IdRange::<{}>", type_name::())).field(&self.range).finish() - } -} - -impl Clone for IdRange { - fn clone(&self) -> Self { - Self { range: self.range.clone(), _p: PhantomData } - } -} - -impl PartialEq for IdRange { - fn eq(&self, other: &Self) -> bool { - self.range == other.range - } -} - -impl Eq for IdRange {} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum Fields { - Record(IdRange), - Tuple(IdRange), - Unit, -} - -/// A single field of an enum variant or struct -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Field { - pub name: Name, - pub type_ref: TypeRef, - pub visibility: RawVisibilityId, -} diff --git a/crates/ra_hir_def/src/item_tree/lower.rs b/crates/ra_hir_def/src/item_tree/lower.rs deleted file mode 100644 index 450ef87981..0000000000 --- a/crates/ra_hir_def/src/item_tree/lower.rs +++ /dev/null @@ -1,705 +0,0 @@ -//! AST -> `ItemTree` lowering code. - -use std::{collections::hash_map::Entry, mem, sync::Arc}; - -use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId}; -use ra_arena::map::ArenaMap; -use ra_syntax::{ - ast::{self, ModuleItemOwner}, - SyntaxNode, -}; -use smallvec::SmallVec; - -use crate::{ - attr::Attrs, - generics::{GenericParams, TypeParamData, TypeParamProvenance}, -}; - -use super::*; - -fn id(index: Idx) -> FileItemTreeId { - FileItemTreeId { index, _p: PhantomData } -} - -struct ModItems(SmallVec<[ModItem; 1]>); - -impl From for ModItems -where - T: Into, -{ - fn from(t: T) -> Self { - ModItems(SmallVec::from_buf([t.into(); 1])) - } -} - -pub(super) struct Ctx { - tree: ItemTree, - hygiene: Hygiene, - file: HirFileId, - source_ast_id_map: Arc, - body_ctx: crate::body::LowerCtx, - inner_items: Vec, - forced_visibility: Option, -} - -impl Ctx { - pub(super) fn new(db: &dyn DefDatabase, hygiene: Hygiene, file: HirFileId) -> Self { - Self { - tree: ItemTree::empty(), - hygiene, - file, - source_ast_id_map: db.ast_id_map(file), - body_ctx: crate::body::LowerCtx::new(db, file), - inner_items: Vec::new(), - forced_visibility: None, - } - } - - pub(super) fn lower_module_items(mut self, item_owner: &dyn ModuleItemOwner) -> ItemTree { - self.tree.top_level = item_owner - .items() - .flat_map(|item| self.lower_mod_item(&item, false)) - .flat_map(|items| items.0) - .collect(); - self.tree - } - - pub(super) fn lower_inner_items(mut self, within: &SyntaxNode) -> ItemTree { - self.collect_inner_items(within); - self.tree - } - - fn data(&mut self) -> &mut ItemTreeData { - self.tree.data_mut() - } - - fn lower_mod_item(&mut self, item: &ast::Item, inner: bool) -> Option { - assert!(inner || self.inner_items.is_empty()); - - // Collect inner items for 1-to-1-lowered items. - match item { - ast::Item::Struct(_) - | ast::Item::Union(_) - | ast::Item::Enum(_) - | ast::Item::Fn(_) - | ast::Item::TypeAlias(_) - | ast::Item::Const(_) - | ast::Item::Static(_) - | ast::Item::MacroCall(_) => { - // Skip this if we're already collecting inner items. We'll descend into all nodes - // already. - if !inner { - self.collect_inner_items(item.syntax()); - } - } - - // These are handled in their respective `lower_X` method (since we can't just blindly - // walk them). - ast::Item::Trait(_) | ast::Item::Impl(_) | ast::Item::ExternBlock(_) => {} - - // These don't have inner items. - ast::Item::Module(_) | ast::Item::ExternCrate(_) | ast::Item::Use(_) => {} - }; - - let attrs = Attrs::new(item, &self.hygiene); - let items = match item { - ast::Item::Struct(ast) => self.lower_struct(ast).map(Into::into), - ast::Item::Union(ast) => self.lower_union(ast).map(Into::into), - ast::Item::Enum(ast) => self.lower_enum(ast).map(Into::into), - ast::Item::Fn(ast) => self.lower_function(ast).map(Into::into), - ast::Item::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into), - ast::Item::Static(ast) => self.lower_static(ast).map(Into::into), - ast::Item::Const(ast) => Some(self.lower_const(ast).into()), - ast::Item::Module(ast) => self.lower_module(ast).map(Into::into), - ast::Item::Trait(ast) => self.lower_trait(ast).map(Into::into), - ast::Item::Impl(ast) => self.lower_impl(ast).map(Into::into), - ast::Item::Use(ast) => Some(ModItems( - self.lower_use(ast).into_iter().map(Into::into).collect::>(), - )), - ast::Item::ExternCrate(ast) => self.lower_extern_crate(ast).map(Into::into), - ast::Item::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into), - ast::Item::ExternBlock(ast) => { - Some(ModItems(self.lower_extern_block(ast).into_iter().collect::>())) - } - }; - - if !attrs.is_empty() { - for item in items.iter().flat_map(|items| &items.0) { - self.add_attrs((*item).into(), attrs.clone()); - } - } - - items - } - - fn add_attrs(&mut self, item: AttrOwner, attrs: Attrs) { - match self.tree.attrs.entry(item) { - Entry::Occupied(mut entry) => { - *entry.get_mut() = entry.get().merge(attrs); - } - Entry::Vacant(entry) => { - entry.insert(attrs); - } - } - } - - fn collect_inner_items(&mut self, container: &SyntaxNode) { - let forced_vis = self.forced_visibility.take(); - let mut inner_items = mem::take(&mut self.tree.inner_items); - inner_items.extend(container.descendants().skip(1).filter_map(ast::Item::cast).filter_map( - |item| { - let ast_id = self.source_ast_id_map.ast_id(&item); - Some((ast_id, self.lower_mod_item(&item, true)?.0)) - }, - )); - self.tree.inner_items = inner_items; - self.forced_visibility = forced_vis; - } - - fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option { - match item { - ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into), - ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into), - ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()), - ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into), - } - } - - fn lower_struct(&mut self, strukt: &ast::Struct) -> Option> { - let visibility = self.lower_visibility(strukt); - let name = strukt.name()?.as_name(); - let generic_params = self.lower_generic_params(GenericsOwner::Struct, strukt); - let fields = self.lower_fields(&strukt.kind()); - let ast_id = self.source_ast_id_map.ast_id(strukt); - let kind = match strukt.kind() { - ast::StructKind::Record(_) => StructDefKind::Record, - ast::StructKind::Tuple(_) => StructDefKind::Tuple, - ast::StructKind::Unit => StructDefKind::Unit, - }; - let res = Struct { name, visibility, generic_params, fields, ast_id, kind }; - Some(id(self.data().structs.alloc(res))) - } - - fn lower_fields(&mut self, strukt_kind: &ast::StructKind) -> Fields { - match strukt_kind { - ast::StructKind::Record(it) => { - let range = self.lower_record_fields(it); - Fields::Record(range) - } - ast::StructKind::Tuple(it) => { - let range = self.lower_tuple_fields(it); - Fields::Tuple(range) - } - ast::StructKind::Unit => Fields::Unit, - } - } - - fn lower_record_fields(&mut self, fields: &ast::RecordFieldList) -> IdRange { - let start = self.next_field_idx(); - for field in fields.fields() { - if let Some(data) = self.lower_record_field(&field) { - let idx = self.data().fields.alloc(data); - self.add_attrs(idx.into(), Attrs::new(&field, &self.hygiene)); - } - } - let end = self.next_field_idx(); - IdRange::new(start..end) - } - - fn lower_record_field(&mut self, field: &ast::RecordField) -> Option { - let name = field.name()?.as_name(); - let visibility = self.lower_visibility(field); - let type_ref = self.lower_type_ref_opt(field.ty()); - let res = Field { name, type_ref, visibility }; - Some(res) - } - - fn lower_tuple_fields(&mut self, fields: &ast::TupleFieldList) -> IdRange { - let start = self.next_field_idx(); - for (i, field) in fields.fields().enumerate() { - let data = self.lower_tuple_field(i, &field); - let idx = self.data().fields.alloc(data); - self.add_attrs(idx.into(), Attrs::new(&field, &self.hygiene)); - } - let end = self.next_field_idx(); - IdRange::new(start..end) - } - - fn lower_tuple_field(&mut self, idx: usize, field: &ast::TupleField) -> Field { - let name = Name::new_tuple_field(idx); - let visibility = self.lower_visibility(field); - let type_ref = self.lower_type_ref_opt(field.ty()); - let res = Field { name, type_ref, visibility }; - res - } - - fn lower_union(&mut self, union: &ast::Union) -> Option> { - let visibility = self.lower_visibility(union); - let name = union.name()?.as_name(); - let generic_params = self.lower_generic_params(GenericsOwner::Union, union); - let fields = match union.record_field_list() { - Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)), - None => Fields::Record(IdRange::new(self.next_field_idx()..self.next_field_idx())), - }; - let ast_id = self.source_ast_id_map.ast_id(union); - let res = Union { name, visibility, generic_params, fields, ast_id }; - Some(id(self.data().unions.alloc(res))) - } - - fn lower_enum(&mut self, enum_: &ast::Enum) -> Option> { - let visibility = self.lower_visibility(enum_); - let name = enum_.name()?.as_name(); - let generic_params = self.lower_generic_params(GenericsOwner::Enum, enum_); - let variants = match &enum_.variant_list() { - Some(variant_list) => self.lower_variants(variant_list), - None => IdRange::new(self.next_variant_idx()..self.next_variant_idx()), - }; - let ast_id = self.source_ast_id_map.ast_id(enum_); - let res = Enum { name, visibility, generic_params, variants, ast_id }; - Some(id(self.data().enums.alloc(res))) - } - - fn lower_variants(&mut self, variants: &ast::VariantList) -> IdRange { - let start = self.next_variant_idx(); - for variant in variants.variants() { - if let Some(data) = self.lower_variant(&variant) { - let idx = self.data().variants.alloc(data); - self.add_attrs(idx.into(), Attrs::new(&variant, &self.hygiene)); - } - } - let end = self.next_variant_idx(); - IdRange::new(start..end) - } - - fn lower_variant(&mut self, variant: &ast::Variant) -> Option { - let name = variant.name()?.as_name(); - let fields = self.lower_fields(&variant.kind()); - let res = Variant { name, fields }; - Some(res) - } - - fn lower_function(&mut self, func: &ast::Fn) -> Option> { - let visibility = self.lower_visibility(func); - let name = func.name()?.as_name(); - - let mut params = Vec::new(); - let mut has_self_param = false; - if let Some(param_list) = func.param_list() { - if let Some(self_param) = param_list.self_param() { - let self_type = match self_param.ty() { - Some(type_ref) => TypeRef::from_ast(&self.body_ctx, type_ref), - None => { - let self_type = TypeRef::Path(name![Self].into()); - match self_param.kind() { - ast::SelfParamKind::Owned => self_type, - ast::SelfParamKind::Ref => { - TypeRef::Reference(Box::new(self_type), Mutability::Shared) - } - ast::SelfParamKind::MutRef => { - TypeRef::Reference(Box::new(self_type), Mutability::Mut) - } - } - } - }; - params.push(self_type); - has_self_param = true; - } - for param in param_list.params() { - let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty()); - params.push(type_ref); - } - } - - let mut is_varargs = false; - if let Some(params) = func.param_list() { - if let Some(last) = params.params().last() { - is_varargs = last.dotdotdot_token().is_some(); - } - } - - let ret_type = match func.ret_type().and_then(|rt| rt.ty()) { - Some(type_ref) => TypeRef::from_ast(&self.body_ctx, type_ref), - _ => TypeRef::unit(), - }; - - let ret_type = if func.async_token().is_some() { - let future_impl = desugar_future_path(ret_type); - let ty_bound = TypeBound::Path(future_impl); - TypeRef::ImplTrait(vec![ty_bound]) - } else { - ret_type - }; - - let ast_id = self.source_ast_id_map.ast_id(func); - let mut res = Function { - name, - visibility, - generic_params: GenericParamsId::EMPTY, - has_self_param, - is_unsafe: func.unsafe_token().is_some(), - params: params.into_boxed_slice(), - is_varargs, - ret_type, - ast_id, - }; - res.generic_params = self.lower_generic_params(GenericsOwner::Function(&res), func); - - Some(id(self.data().functions.alloc(res))) - } - - fn lower_type_alias( - &mut self, - type_alias: &ast::TypeAlias, - ) -> Option> { - let name = type_alias.name()?.as_name(); - let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it)); - let visibility = self.lower_visibility(type_alias); - let bounds = self.lower_type_bounds(type_alias); - let generic_params = self.lower_generic_params(GenericsOwner::TypeAlias, type_alias); - let ast_id = self.source_ast_id_map.ast_id(type_alias); - let res = TypeAlias { - name, - visibility, - bounds: bounds.into_boxed_slice(), - generic_params, - type_ref, - ast_id, - }; - Some(id(self.data().type_aliases.alloc(res))) - } - - fn lower_static(&mut self, static_: &ast::Static) -> Option> { - let name = static_.name()?.as_name(); - let type_ref = self.lower_type_ref_opt(static_.ty()); - let visibility = self.lower_visibility(static_); - let mutable = static_.mut_token().is_some(); - let ast_id = self.source_ast_id_map.ast_id(static_); - let res = Static { name, visibility, mutable, type_ref, ast_id }; - Some(id(self.data().statics.alloc(res))) - } - - fn lower_const(&mut self, konst: &ast::Const) -> FileItemTreeId { - let name = konst.name().map(|it| it.as_name()); - let type_ref = self.lower_type_ref_opt(konst.ty()); - let visibility = self.lower_visibility(konst); - let ast_id = self.source_ast_id_map.ast_id(konst); - let res = Const { name, visibility, type_ref, ast_id }; - id(self.data().consts.alloc(res)) - } - - fn lower_module(&mut self, module: &ast::Module) -> Option> { - let name = module.name()?.as_name(); - let visibility = self.lower_visibility(module); - let kind = if module.semicolon_token().is_some() { - ModKind::Outline {} - } else { - ModKind::Inline { - items: module - .item_list() - .map(|list| { - list.items() - .flat_map(|item| self.lower_mod_item(&item, false)) - .flat_map(|items| items.0) - .collect() - }) - .unwrap_or_else(|| { - mark::hit!(name_res_works_for_broken_modules); - Box::new([]) as Box<[_]> - }), - } - }; - let ast_id = self.source_ast_id_map.ast_id(module); - let res = Mod { name, visibility, kind, ast_id }; - Some(id(self.data().mods.alloc(res))) - } - - fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option> { - let name = trait_def.name()?.as_name(); - let visibility = self.lower_visibility(trait_def); - let generic_params = - self.lower_generic_params_and_inner_items(GenericsOwner::Trait(trait_def), trait_def); - let auto = trait_def.auto_token().is_some(); - let items = trait_def.assoc_item_list().map(|list| { - self.with_inherited_visibility(visibility, |this| { - list.assoc_items() - .filter_map(|item| { - let attrs = Attrs::new(&item, &this.hygiene); - this.collect_inner_items(item.syntax()); - this.lower_assoc_item(&item).map(|item| { - this.add_attrs(ModItem::from(item).into(), attrs); - item - }) - }) - .collect() - }) - }); - let ast_id = self.source_ast_id_map.ast_id(trait_def); - let res = Trait { - name, - visibility, - generic_params, - auto, - items: items.unwrap_or_default(), - ast_id, - }; - Some(id(self.data().traits.alloc(res))) - } - - fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option> { - let generic_params = - self.lower_generic_params_and_inner_items(GenericsOwner::Impl, impl_def); - let target_trait = impl_def.trait_().map(|tr| self.lower_type_ref(&tr)); - let target_type = self.lower_type_ref(&impl_def.self_ty()?); - let is_negative = impl_def.excl_token().is_some(); - - // We cannot use `assoc_items()` here as that does not include macro calls. - let items = impl_def - .assoc_item_list() - .into_iter() - .flat_map(|it| it.assoc_items()) - .filter_map(|item| { - self.collect_inner_items(item.syntax()); - let assoc = self.lower_assoc_item(&item)?; - let attrs = Attrs::new(&item, &self.hygiene); - self.add_attrs(ModItem::from(assoc).into(), attrs); - Some(assoc) - }) - .collect(); - let ast_id = self.source_ast_id_map.ast_id(impl_def); - let res = Impl { generic_params, target_trait, target_type, is_negative, items, ast_id }; - Some(id(self.data().impls.alloc(res))) - } - - fn lower_use(&mut self, use_item: &ast::Use) -> Vec> { - // FIXME: cfg_attr - let is_prelude = use_item.has_atom_attr("prelude_import"); - let visibility = self.lower_visibility(use_item); - let ast_id = self.source_ast_id_map.ast_id(use_item); - - // Every use item can expand to many `Import`s. - let mut imports = Vec::new(); - let tree = self.tree.data_mut(); - ModPath::expand_use_item( - InFile::new(self.file, use_item.clone()), - &self.hygiene, - |path, _tree, is_glob, alias| { - imports.push(id(tree.imports.alloc(Import { - path, - alias, - visibility, - is_glob, - is_prelude, - ast_id, - }))); - }, - ); - - imports - } - - fn lower_extern_crate( - &mut self, - extern_crate: &ast::ExternCrate, - ) -> Option> { - let path = ModPath::from_name_ref(&extern_crate.name_ref()?); - let alias = extern_crate.rename().map(|a| { - a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias) - }); - let visibility = self.lower_visibility(extern_crate); - let ast_id = self.source_ast_id_map.ast_id(extern_crate); - // FIXME: cfg_attr - let is_macro_use = extern_crate.has_atom_attr("macro_use"); - - let res = ExternCrate { path, alias, visibility, is_macro_use, ast_id }; - Some(id(self.data().extern_crates.alloc(res))) - } - - fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option> { - let name = m.name().map(|it| it.as_name()); - let attrs = Attrs::new(m, &self.hygiene); - let path = ModPath::from_src(m.path()?, &self.hygiene)?; - - let ast_id = self.source_ast_id_map.ast_id(m); - - // FIXME: cfg_attr - let export_attr = attrs.by_key("macro_export"); - - let is_export = export_attr.exists(); - let is_local_inner = if is_export { - export_attr.tt_values().map(|it| &it.token_trees).flatten().any(|it| match it { - tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { - ident.text.contains("local_inner_macros") - } - _ => false, - }) - } else { - false - }; - - let is_builtin = attrs.by_key("rustc_builtin_macro").exists(); - let res = MacroCall { name, path, is_export, is_builtin, is_local_inner, ast_id }; - Some(id(self.data().macro_calls.alloc(res))) - } - - fn lower_extern_block(&mut self, block: &ast::ExternBlock) -> Vec { - block.extern_item_list().map_or(Vec::new(), |list| { - list.extern_items() - .filter_map(|item| { - self.collect_inner_items(item.syntax()); - let attrs = Attrs::new(&item, &self.hygiene); - let id: ModItem = match item { - ast::ExternItem::Fn(ast) => { - let func = self.lower_function(&ast)?; - self.data().functions[func.index].is_unsafe = true; - func.into() - } - ast::ExternItem::Static(ast) => { - let statik = self.lower_static(&ast)?; - statik.into() - } - ast::ExternItem::MacroCall(_) => return None, - }; - self.add_attrs(id.into(), attrs); - Some(id) - }) - .collect() - }) - } - - /// Lowers generics defined on `node` and collects inner items defined within. - fn lower_generic_params_and_inner_items( - &mut self, - owner: GenericsOwner<'_>, - node: &impl ast::GenericParamsOwner, - ) -> GenericParamsId { - // Generics are part of item headers and may contain inner items we need to collect. - if let Some(params) = node.generic_param_list() { - self.collect_inner_items(params.syntax()); - } - if let Some(clause) = node.where_clause() { - self.collect_inner_items(clause.syntax()); - } - - self.lower_generic_params(owner, node) - } - - fn lower_generic_params( - &mut self, - owner: GenericsOwner<'_>, - node: &impl ast::GenericParamsOwner, - ) -> GenericParamsId { - let mut sm = &mut ArenaMap::default(); - let mut generics = GenericParams::default(); - match owner { - GenericsOwner::Function(func) => { - generics.fill(&self.body_ctx, sm, node); - // lower `impl Trait` in arguments - for param in &*func.params { - generics.fill_implicit_impl_trait_args(param); - } - } - GenericsOwner::Struct - | GenericsOwner::Enum - | GenericsOwner::Union - | GenericsOwner::TypeAlias => { - generics.fill(&self.body_ctx, sm, node); - } - GenericsOwner::Trait(trait_def) => { - // traits get the Self type as an implicit first type parameter - let self_param_id = generics.types.alloc(TypeParamData { - name: Some(name![Self]), - default: None, - provenance: TypeParamProvenance::TraitSelf, - }); - sm.insert(self_param_id, Either::Left(trait_def.clone())); - // add super traits as bounds on Self - // i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar - let self_param = TypeRef::Path(name![Self].into()); - generics.fill_bounds(&self.body_ctx, trait_def, self_param); - - generics.fill(&self.body_ctx, &mut sm, node); - } - GenericsOwner::Impl => { - // Note that we don't add `Self` here: in `impl`s, `Self` is not a - // type-parameter, but rather is a type-alias for impl's target - // type, so this is handled by the resolver. - generics.fill(&self.body_ctx, &mut sm, node); - } - } - - self.data().generics.alloc(generics) - } - - fn lower_type_bounds(&mut self, node: &impl ast::TypeBoundsOwner) -> Vec { - match node.type_bound_list() { - Some(bound_list) => { - bound_list.bounds().map(|it| TypeBound::from_ast(&self.body_ctx, it)).collect() - } - None => Vec::new(), - } - } - - fn lower_visibility(&mut self, item: &impl ast::VisibilityOwner) -> RawVisibilityId { - let vis = match self.forced_visibility { - Some(vis) => return vis, - None => RawVisibility::from_ast_with_hygiene(item.visibility(), &self.hygiene), - }; - - self.data().vis.alloc(vis) - } - - fn lower_type_ref(&self, type_ref: &ast::Type) -> TypeRef { - TypeRef::from_ast(&self.body_ctx, type_ref.clone()) - } - fn lower_type_ref_opt(&self, type_ref: Option) -> TypeRef { - type_ref.map(|ty| self.lower_type_ref(&ty)).unwrap_or(TypeRef::Error) - } - - /// Forces the visibility `vis` to be used for all items lowered during execution of `f`. - fn with_inherited_visibility( - &mut self, - vis: RawVisibilityId, - f: impl FnOnce(&mut Self) -> R, - ) -> R { - let old = mem::replace(&mut self.forced_visibility, Some(vis)); - let res = f(self); - self.forced_visibility = old; - res - } - - fn next_field_idx(&self) -> Idx { - Idx::from_raw(RawId::from( - self.tree.data.as_ref().map_or(0, |data| data.fields.len() as u32), - )) - } - fn next_variant_idx(&self) -> Idx { - Idx::from_raw(RawId::from( - self.tree.data.as_ref().map_or(0, |data| data.variants.len() as u32), - )) - } -} - -fn desugar_future_path(orig: TypeRef) -> Path { - let path = path![core::future::Future]; - let mut generic_args: Vec<_> = std::iter::repeat(None).take(path.segments.len() - 1).collect(); - let mut last = GenericArgs::empty(); - let binding = - AssociatedTypeBinding { name: name![Output], type_ref: Some(orig), bounds: Vec::new() }; - last.bindings.push(binding); - generic_args.push(Some(Arc::new(last))); - - Path::from_known_path(path, generic_args) -} - -enum GenericsOwner<'a> { - /// We need access to the partially-lowered `Function` for lowering `impl Trait` in argument - /// position. - Function(&'a Function), - Struct, - Enum, - Union, - /// The `TraitDef` is needed to fill the source map for the implicit `Self` parameter. - Trait(&'a ast::Trait), - TypeAlias, - Impl, -} diff --git a/crates/ra_hir_def/src/item_tree/tests.rs b/crates/ra_hir_def/src/item_tree/tests.rs deleted file mode 100644 index a81497fa8a..0000000000 --- a/crates/ra_hir_def/src/item_tree/tests.rs +++ /dev/null @@ -1,439 +0,0 @@ -use expect::{expect, Expect}; -use hir_expand::{db::AstDatabase, HirFileId, InFile}; -use ra_db::fixture::WithFixture; -use ra_syntax::{ast, AstNode}; -use rustc_hash::FxHashSet; -use std::sync::Arc; -use stdx::format_to; - -use crate::{db::DefDatabase, test_db::TestDB}; - -use super::{ItemTree, ModItem, ModKind}; - -fn test_inner_items(ra_fixture: &str) { - let (db, file_id) = TestDB::with_single_file(ra_fixture); - let file_id = HirFileId::from(file_id); - let tree = db.item_tree(file_id); - let root = db.parse_or_expand(file_id).unwrap(); - let ast_id_map = db.ast_id_map(file_id); - - // Traverse the item tree and collect all module/impl/trait-level items as AST nodes. - let mut outer_items = FxHashSet::default(); - let mut worklist = tree.top_level_items().to_vec(); - while let Some(item) = worklist.pop() { - let node: ast::Item = match item { - ModItem::Import(it) => tree.source(&db, InFile::new(file_id, it)).into(), - ModItem::ExternCrate(it) => tree.source(&db, InFile::new(file_id, it)).into(), - ModItem::Function(it) => tree.source(&db, InFile::new(file_id, it)).into(), - ModItem::Struct(it) => tree.source(&db, InFile::new(file_id, it)).into(), - ModItem::Union(it) => tree.source(&db, InFile::new(file_id, it)).into(), - ModItem::Enum(it) => tree.source(&db, InFile::new(file_id, it)).into(), - ModItem::Const(it) => tree.source(&db, InFile::new(file_id, it)).into(), - ModItem::Static(it) => tree.source(&db, InFile::new(file_id, it)).into(), - ModItem::TypeAlias(it) => tree.source(&db, InFile::new(file_id, it)).into(), - ModItem::Mod(it) => { - if let ModKind::Inline { items } = &tree[it].kind { - worklist.extend(&**items); - } - tree.source(&db, InFile::new(file_id, it)).into() - } - ModItem::Trait(it) => { - worklist.extend(tree[it].items.iter().map(|item| ModItem::from(*item))); - tree.source(&db, InFile::new(file_id, it)).into() - } - ModItem::Impl(it) => { - worklist.extend(tree[it].items.iter().map(|item| ModItem::from(*item))); - tree.source(&db, InFile::new(file_id, it)).into() - } - ModItem::MacroCall(_) => continue, - }; - - outer_items.insert(node); - } - - // Now descend the root node and check that all `ast::ModuleItem`s are either recorded above, or - // registered as inner items. - for item in root.descendants().skip(1).filter_map(ast::Item::cast) { - if outer_items.contains(&item) { - continue; - } - - let ast_id = ast_id_map.ast_id(&item); - assert!(!tree.inner_items(ast_id).is_empty()); - } -} - -fn item_tree(ra_fixture: &str) -> Arc { - let (db, file_id) = TestDB::with_single_file(ra_fixture); - db.item_tree(file_id.into()) -} - -fn print_item_tree(ra_fixture: &str) -> String { - let tree = item_tree(ra_fixture); - let mut out = String::new(); - - format_to!(out, "inner attrs: {:?}\n\n", tree.top_level_attrs()); - format_to!(out, "top-level items:\n"); - for item in tree.top_level_items() { - fmt_mod_item(&mut out, &tree, *item); - format_to!(out, "\n"); - } - - if !tree.inner_items.is_empty() { - format_to!(out, "\ninner items:\n\n"); - for (ast_id, items) in &tree.inner_items { - format_to!(out, "for AST {:?}:\n", ast_id); - for inner in items { - fmt_mod_item(&mut out, &tree, *inner); - format_to!(out, "\n\n"); - } - } - } - - out -} - -fn fmt_mod_item(out: &mut String, tree: &ItemTree, item: ModItem) { - let attrs = tree.attrs(item.into()); - if !attrs.is_empty() { - format_to!(out, "#[{:?}]\n", attrs); - } - - let mut children = String::new(); - match item { - ModItem::ExternCrate(it) => { - format_to!(out, "{:?}", tree[it]); - } - ModItem::Import(it) => { - format_to!(out, "{:?}", tree[it]); - } - ModItem::Function(it) => { - format_to!(out, "{:?}", tree[it]); - } - ModItem::Struct(it) => { - format_to!(out, "{:?}", tree[it]); - } - ModItem::Union(it) => { - format_to!(out, "{:?}", tree[it]); - } - ModItem::Enum(it) => { - format_to!(out, "{:?}", tree[it]); - } - ModItem::Const(it) => { - format_to!(out, "{:?}", tree[it]); - } - ModItem::Static(it) => { - format_to!(out, "{:?}", tree[it]); - } - ModItem::Trait(it) => { - format_to!(out, "{:?}", tree[it]); - for item in &*tree[it].items { - fmt_mod_item(&mut children, tree, ModItem::from(*item)); - format_to!(children, "\n"); - } - } - ModItem::Impl(it) => { - format_to!(out, "{:?}", tree[it]); - for item in &*tree[it].items { - fmt_mod_item(&mut children, tree, ModItem::from(*item)); - format_to!(children, "\n"); - } - } - ModItem::TypeAlias(it) => { - format_to!(out, "{:?}", tree[it]); - } - ModItem::Mod(it) => { - format_to!(out, "{:?}", tree[it]); - match &tree[it].kind { - ModKind::Inline { items } => { - for item in &**items { - fmt_mod_item(&mut children, tree, *item); - format_to!(children, "\n"); - } - } - ModKind::Outline {} => {} - } - } - ModItem::MacroCall(it) => { - format_to!(out, "{:?}", tree[it]); - } - } - - for line in children.lines() { - format_to!(out, "\n> {}", line); - } -} - -fn check(ra_fixture: &str, expect: Expect) { - let actual = print_item_tree(ra_fixture); - expect.assert_eq(&actual); -} - -#[test] -fn smoke() { - check( - r" - #![attr] - - #[attr_on_use] - use {a, b::*}; - - #[ext_crate] - extern crate krate; - - #[on_trait] - trait Tr { - #[assoc_ty] - type AssocTy: Tr<()>; - - #[assoc_const] - const CONST: u8; - - #[assoc_method] - fn method(&self); - - #[assoc_dfl_method] - fn dfl_method(&mut self) {} - } - - #[struct0] - struct Struct0; - - #[struct1] - struct Struct1(#[struct1fld] u8); - - #[struct2] - struct Struct2 { - #[struct2fld] - fld: (T, ), - } - - #[en] - enum En { - #[enum_variant] - Variant { - #[enum_field] - field: u8, - }, - } - - #[un] - union Un { - #[union_fld] - fld: u16, - } - ", - expect![[r##" - inner attrs: Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr"))] }, input: None }]) } - - top-level items: - #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_on_use"))] }, input: None }]) }] - Import { path: ModPath { kind: Plain, segments: [Name(Text("a"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_glob: false, is_prelude: false, ast_id: FileAstId::(0) } - #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_on_use"))] }, input: None }]) }] - Import { path: ModPath { kind: Plain, segments: [Name(Text("b"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_glob: true, is_prelude: false, ast_id: FileAstId::(0) } - #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("ext_crate"))] }, input: None }]) }] - ExternCrate { path: ModPath { kind: Plain, segments: [Name(Text("krate"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_macro_use: false, ast_id: FileAstId::(1) } - #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("on_trait"))] }, input: None }]) }] - Trait { name: Name(Text("Tr")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(0), auto: false, items: [TypeAlias(Idx::(0)), Const(Idx::(0)), Function(Idx::(0)), Function(Idx::(1))], ast_id: FileAstId::(2) } - > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_ty"))] }, input: None }]) }] - > TypeAlias { name: Name(Text("AssocTy")), visibility: RawVisibilityId("pub(self)"), bounds: [Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Tr"))] }, generic_args: [Some(GenericArgs { args: [Type(Tuple([]))], has_self_type: false, bindings: [] })] })], generic_params: GenericParamsId(4294967295), type_ref: None, ast_id: FileAstId::(8) } - > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_const"))] }, input: None }]) }] - > Const { name: Some(Name(Text("CONST"))), visibility: RawVisibilityId("pub(self)"), type_ref: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("u8"))] }, generic_args: [None] }), ast_id: FileAstId::(9) } - > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_method"))] }, input: None }]) }] - > Function { name: Name(Text("method")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: true, is_unsafe: false, params: [Reference(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Self"))] }, generic_args: [None] }), Shared)], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(10) } - > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_dfl_method"))] }, input: None }]) }] - > Function { name: Name(Text("dfl_method")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: true, is_unsafe: false, params: [Reference(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Self"))] }, generic_args: [None] }), Mut)], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(11) } - #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct0"))] }, input: None }]) }] - Struct { name: Name(Text("Struct0")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), fields: Unit, ast_id: FileAstId::(3), kind: Unit } - #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct1"))] }, input: None }]) }] - Struct { name: Name(Text("Struct1")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(2), fields: Tuple(IdRange::(0..1)), ast_id: FileAstId::(4), kind: Tuple } - #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct2"))] }, input: None }]) }] - Struct { name: Name(Text("Struct2")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(3), fields: Record(IdRange::(1..2)), ast_id: FileAstId::(5), kind: Record } - #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("en"))] }, input: None }]) }] - Enum { name: Name(Text("En")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), variants: IdRange::(0..1), ast_id: FileAstId::(6) } - #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("un"))] }, input: None }]) }] - Union { name: Name(Text("Un")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), fields: Record(IdRange::(3..4)), ast_id: FileAstId::(7) } - "##]], - ); -} - -#[test] -fn simple_inner_items() { - check( - r" - impl D for Response { - fn foo() { - end(); - fn end() { - let _x: T = loop {}; - } - } - } - ", - expect![[r#" - inner attrs: Attrs { entries: None } - - top-level items: - Impl { generic_params: GenericParamsId(0), target_trait: Some(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("D"))] }, generic_args: [None] })), target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Response"))] }, generic_args: [Some(GenericArgs { args: [Type(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("T"))] }, generic_args: [None] }))], has_self_type: false, bindings: [] })] }), is_negative: false, items: [Function(Idx::(1))], ast_id: FileAstId::(0) } - > Function { name: Name(Text("foo")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(1) } - - inner items: - - for AST FileAstId::(2): - Function { name: Name(Text("end")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(2) } - - "#]], - ); -} - -#[test] -fn extern_attrs() { - check( - r#" - #[block_attr] - extern "C" { - #[attr_a] - fn a() {} - #[attr_b] - fn b() {} - } - "#, - expect![[r##" - inner attrs: Attrs { entries: None } - - top-level items: - #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_a"))] }, input: None }, Attr { path: ModPath { kind: Plain, segments: [Name(Text("block_attr"))] }, input: None }]) }] - Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: true, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(1) } - #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_b"))] }, input: None }, Attr { path: ModPath { kind: Plain, segments: [Name(Text("block_attr"))] }, input: None }]) }] - Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: true, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(2) } - "##]], - ); -} - -#[test] -fn trait_attrs() { - check( - r#" - #[trait_attr] - trait Tr { - #[attr_a] - fn a() {} - #[attr_b] - fn b() {} - } - "#, - expect![[r##" - inner attrs: Attrs { entries: None } - - top-level items: - #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("trait_attr"))] }, input: None }]) }] - Trait { name: Name(Text("Tr")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(0), auto: false, items: [Function(Idx::(0)), Function(Idx::(1))], ast_id: FileAstId::(0) } - > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_a"))] }, input: None }]) }] - > Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(1) } - > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_b"))] }, input: None }]) }] - > Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(2) } - "##]], - ); -} - -#[test] -fn impl_attrs() { - check( - r#" - #[impl_attr] - impl Ty { - #[attr_a] - fn a() {} - #[attr_b] - fn b() {} - } - "#, - expect![[r##" - inner attrs: Attrs { entries: None } - - top-level items: - #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("impl_attr"))] }, input: None }]) }] - Impl { generic_params: GenericParamsId(4294967295), target_trait: None, target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Ty"))] }, generic_args: [None] }), is_negative: false, items: [Function(Idx::(0)), Function(Idx::(1))], ast_id: FileAstId::(0) } - > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_a"))] }, input: None }]) }] - > Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(1) } - > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_b"))] }, input: None }]) }] - > Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(2) } - "##]], - ); -} - -#[test] -fn cursed_inner_items() { - test_inner_items( - r" - struct S(T); - - enum En { - Var1 { - t: [(); { trait Inner {} 0 }], - }, - - Var2([u16; { enum Inner {} 0 }]), - } - - type Ty = [En; { struct Inner; 0 }]; - - impl En { - fn assoc() { - trait InnerTrait {} - struct InnerStruct {} - impl InnerTrait for InnerStruct {} - } - } - - trait Tr { - type AssocTy = [u8; { fn f() {} }]; - - const AssocConst: [u8; { fn f() {} }]; - } - ", - ); -} - -#[test] -fn inner_item_attrs() { - check( - r" - fn foo() { - #[on_inner] - fn inner() {} - } - ", - expect![[r##" - inner attrs: Attrs { entries: None } - - top-level items: - Function { name: Name(Text("foo")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(0) } - - inner items: - - for AST FileAstId::(1): - #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("on_inner"))] }, input: None }]) }] - Function { name: Name(Text("inner")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::(1) } - - "##]], - ); -} - -#[test] -fn assoc_item_macros() { - check( - r" - impl S { - items!(); - } - ", - expect![[r#" - inner attrs: Attrs { entries: None } - - top-level items: - Impl { generic_params: GenericParamsId(4294967295), target_trait: None, target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("S"))] }, generic_args: [None] }), is_negative: false, items: [MacroCall(Idx::(0))], ast_id: FileAstId::(0) } - > MacroCall { name: None, path: ModPath { kind: Plain, segments: [Name(Text("items"))] }, is_export: false, is_local_inner: false, is_builtin: false, ast_id: FileAstId::(1) } - "#]], - ); -} diff --git a/crates/ra_hir_def/src/keys.rs b/crates/ra_hir_def/src/keys.rs deleted file mode 100644 index 441bdbead8..0000000000 --- a/crates/ra_hir_def/src/keys.rs +++ /dev/null @@ -1,58 +0,0 @@ -//! keys to be used with `DynMap` - -use std::marker::PhantomData; - -use hir_expand::{InFile, MacroDefId}; -use ra_syntax::{ast, AstNode, AstPtr}; -use rustc_hash::FxHashMap; - -use crate::{ - dyn_map::{DynMap, Policy}, - ConstId, EnumId, EnumVariantId, FieldId, FunctionId, ImplId, StaticId, StructId, TraitId, - TypeAliasId, TypeParamId, UnionId, -}; - -pub type Key = crate::dyn_map::Key, V, AstPtrPolicy>; - -pub const FUNCTION: Key = Key::new(); -pub const CONST: Key = Key::new(); -pub const STATIC: Key = Key::new(); -pub const TYPE_ALIAS: Key = Key::new(); -pub const IMPL: Key = Key::new(); -pub const TRAIT: Key = Key::new(); -pub const STRUCT: Key = Key::new(); -pub const UNION: Key = Key::new(); -pub const ENUM: Key = Key::new(); - -pub const VARIANT: Key = Key::new(); -pub const TUPLE_FIELD: Key = Key::new(); -pub const RECORD_FIELD: Key = Key::new(); -pub const TYPE_PARAM: Key = Key::new(); - -pub const MACRO: Key = Key::new(); - -/// XXX: AST Nodes and SyntaxNodes have identity equality semantics: nodes are -/// equal if they point to exactly the same object. -/// -/// In general, we do not guarantee that we have exactly one instance of a -/// syntax tree for each file. We probably should add such guarantee, but, for -/// the time being, we will use identity-less AstPtr comparison. -pub struct AstPtrPolicy { - _phantom: PhantomData<(AST, ID)>, -} - -impl Policy for AstPtrPolicy { - type K = InFile; - type V = ID; - fn insert(map: &mut DynMap, key: InFile, value: ID) { - let key = key.as_ref().map(AstPtr::new); - map.map - .entry::>, ID>>() - .or_insert_with(Default::default) - .insert(key, value); - } - fn get<'a>(map: &'a DynMap, key: &InFile) -> Option<&'a ID> { - let key = key.as_ref().map(AstPtr::new); - map.map.get::>, ID>>()?.get(&key) - } -} diff --git a/crates/ra_hir_def/src/lang_item.rs b/crates/ra_hir_def/src/lang_item.rs deleted file mode 100644 index 3516784b8d..0000000000 --- a/crates/ra_hir_def/src/lang_item.rs +++ /dev/null @@ -1,175 +0,0 @@ -//! Collects lang items: items marked with `#[lang = "..."]` attribute. -//! -//! This attribute to tell the compiler about semi built-in std library -//! features, such as Fn family of traits. -use std::sync::Arc; - -use ra_prof::profile; -use ra_syntax::SmolStr; -use rustc_hash::FxHashMap; - -use crate::{ - db::DefDatabase, AdtId, AttrDefId, CrateId, EnumId, FunctionId, ImplId, ModuleDefId, ModuleId, - StaticId, StructId, TraitId, -}; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum LangItemTarget { - EnumId(EnumId), - FunctionId(FunctionId), - ImplDefId(ImplId), - StaticId(StaticId), - StructId(StructId), - TraitId(TraitId), -} - -impl LangItemTarget { - pub fn as_enum(self) -> Option { - match self { - LangItemTarget::EnumId(id) => Some(id), - _ => None, - } - } - - pub fn as_function(self) -> Option { - match self { - LangItemTarget::FunctionId(id) => Some(id), - _ => None, - } - } - - pub fn as_impl_def(self) -> Option { - match self { - LangItemTarget::ImplDefId(id) => Some(id), - _ => None, - } - } - - pub fn as_static(self) -> Option { - match self { - LangItemTarget::StaticId(id) => Some(id), - _ => None, - } - } - - pub fn as_struct(self) -> Option { - match self { - LangItemTarget::StructId(id) => Some(id), - _ => None, - } - } - - pub fn as_trait(self) -> Option { - match self { - LangItemTarget::TraitId(id) => Some(id), - _ => None, - } - } -} - -#[derive(Default, Debug, Clone, PartialEq, Eq)] -pub struct LangItems { - items: FxHashMap, -} - -impl LangItems { - pub fn target(&self, item: &str) -> Option { - self.items.get(item).copied() - } - - /// Salsa query. This will look for lang items in a specific crate. - pub(crate) fn crate_lang_items_query(db: &dyn DefDatabase, krate: CrateId) -> Arc { - let _p = profile("crate_lang_items_query"); - - let mut lang_items = LangItems::default(); - - let crate_def_map = db.crate_def_map(krate); - - crate_def_map - .modules - .iter() - .filter_map(|(local_id, _)| db.module_lang_items(ModuleId { krate, local_id })) - .for_each(|it| lang_items.items.extend(it.items.iter().map(|(k, v)| (k.clone(), *v)))); - - Arc::new(lang_items) - } - - pub(crate) fn module_lang_items_query( - db: &dyn DefDatabase, - module: ModuleId, - ) -> Option> { - let _p = profile("module_lang_items_query"); - let mut lang_items = LangItems::default(); - lang_items.collect_lang_items(db, module); - if lang_items.items.is_empty() { - None - } else { - Some(Arc::new(lang_items)) - } - } - - /// Salsa query. Look for a lang item, starting from the specified crate and recursively - /// traversing its dependencies. - pub(crate) fn lang_item_query( - db: &dyn DefDatabase, - start_crate: CrateId, - item: SmolStr, - ) -> Option { - let _p = profile("lang_item_query"); - let lang_items = db.crate_lang_items(start_crate); - let start_crate_target = lang_items.items.get(&item); - if let Some(target) = start_crate_target { - return Some(*target); - } - db.crate_graph()[start_crate] - .dependencies - .iter() - .find_map(|dep| db.lang_item(dep.crate_id, item.clone())) - } - - fn collect_lang_items(&mut self, db: &dyn DefDatabase, module: ModuleId) { - // Look for impl targets - let def_map = db.crate_def_map(module.krate); - let module_data = &def_map[module.local_id]; - for impl_def in module_data.scope.impls() { - self.collect_lang_item(db, impl_def, LangItemTarget::ImplDefId) - } - - for def in module_data.scope.declarations() { - match def { - ModuleDefId::TraitId(trait_) => { - self.collect_lang_item(db, trait_, LangItemTarget::TraitId) - } - ModuleDefId::AdtId(AdtId::EnumId(e)) => { - self.collect_lang_item(db, e, LangItemTarget::EnumId) - } - ModuleDefId::AdtId(AdtId::StructId(s)) => { - self.collect_lang_item(db, s, LangItemTarget::StructId) - } - ModuleDefId::FunctionId(f) => { - self.collect_lang_item(db, f, LangItemTarget::FunctionId) - } - ModuleDefId::StaticId(s) => self.collect_lang_item(db, s, LangItemTarget::StaticId), - _ => {} - } - } - } - - fn collect_lang_item( - &mut self, - db: &dyn DefDatabase, - item: T, - constructor: fn(T) -> LangItemTarget, - ) where - T: Into + Copy, - { - if let Some(lang_item_name) = lang_attr(db, item) { - self.items.entry(lang_item_name).or_insert_with(|| constructor(item)); - } - } -} - -pub fn lang_attr(db: &dyn DefDatabase, item: impl Into + Copy) -> Option { - let attrs = db.attrs(item.into()); - attrs.by_key("lang").string_value().cloned() -} diff --git a/crates/ra_hir_def/src/lib.rs b/crates/ra_hir_def/src/lib.rs deleted file mode 100644 index 237b1038af..0000000000 --- a/crates/ra_hir_def/src/lib.rs +++ /dev/null @@ -1,541 +0,0 @@ -//! `hir_def` crate contains everything between macro expansion and type -//! inference. -//! -//! It defines various items (structs, enums, traits) which comprises Rust code, -//! as well as an algorithm for resolving paths to such entities. -//! -//! Note that `hir_def` is a work in progress, so not all of the above is -//! actually true. - -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - -pub mod db; - -pub mod attr; -pub mod path; -pub mod type_ref; -pub mod builtin_type; -pub mod diagnostics; -pub mod per_ns; -pub mod item_scope; - -pub mod dyn_map; -pub mod keys; - -pub mod item_tree; - -pub mod adt; -pub mod data; -pub mod generics; -pub mod lang_item; -pub mod docs; - -pub mod expr; -pub mod body; -pub mod resolver; - -mod trace; -pub mod nameres; - -pub mod src; -pub mod child_by_source; - -pub mod visibility; -pub mod find_path; -pub mod import_map; - -#[cfg(test)] -mod test_db; - -use std::hash::{Hash, Hasher}; - -use hir_expand::{ - ast_id_map::FileAstId, eager::expand_eager_macro, hygiene::Hygiene, AstId, HirFileId, InFile, - MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, -}; -use ra_arena::Idx; -use ra_db::{impl_intern_key, salsa, CrateId}; -use ra_syntax::ast; - -use crate::builtin_type::BuiltinType; -use item_tree::{ - Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, ModItem, Static, Struct, Trait, - TypeAlias, Union, -}; -use stdx::impl_from; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct ModuleId { - pub krate: CrateId, - pub local_id: LocalModuleId, -} - -/// An ID of a module, **local** to a specific crate -pub type LocalModuleId = Idx; - -#[derive(Debug)] -pub struct ItemLoc { - pub container: ContainerId, - pub id: ItemTreeId, -} - -impl Clone for ItemLoc { - fn clone(&self) -> Self { - Self { container: self.container, id: self.id } - } -} - -impl Copy for ItemLoc {} - -impl PartialEq for ItemLoc { - fn eq(&self, other: &Self) -> bool { - self.container == other.container && self.id == other.id - } -} - -impl Eq for ItemLoc {} - -impl Hash for ItemLoc { - fn hash(&self, state: &mut H) { - self.container.hash(state); - self.id.hash(state); - } -} - -#[derive(Debug)] -pub struct AssocItemLoc { - pub container: AssocContainerId, - pub id: ItemTreeId, -} - -impl Clone for AssocItemLoc { - fn clone(&self) -> Self { - Self { container: self.container, id: self.id } - } -} - -impl Copy for AssocItemLoc {} - -impl PartialEq for AssocItemLoc { - fn eq(&self, other: &Self) -> bool { - self.container == other.container && self.id == other.id - } -} - -impl Eq for AssocItemLoc {} - -impl Hash for AssocItemLoc { - fn hash(&self, state: &mut H) { - self.container.hash(state); - self.id.hash(state); - } -} - -macro_rules! impl_intern { - ($id:ident, $loc:ident, $intern:ident, $lookup:ident) => { - impl_intern_key!($id); - - impl Intern for $loc { - type ID = $id; - fn intern(self, db: &dyn db::DefDatabase) -> $id { - db.$intern(self) - } - } - - impl Lookup for $id { - type Data = $loc; - fn lookup(&self, db: &dyn db::DefDatabase) -> $loc { - db.$lookup(*self) - } - } - }; -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct FunctionId(salsa::InternId); -type FunctionLoc = AssocItemLoc; -impl_intern!(FunctionId, FunctionLoc, intern_function, lookup_intern_function); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct StructId(salsa::InternId); -type StructLoc = ItemLoc; -impl_intern!(StructId, StructLoc, intern_struct, lookup_intern_struct); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct UnionId(salsa::InternId); -pub type UnionLoc = ItemLoc; -impl_intern!(UnionId, UnionLoc, intern_union, lookup_intern_union); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct EnumId(salsa::InternId); -pub type EnumLoc = ItemLoc; -impl_intern!(EnumId, EnumLoc, intern_enum, lookup_intern_enum); - -// FIXME: rename to `VariantId`, only enums can ave variants -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct EnumVariantId { - pub parent: EnumId, - pub local_id: LocalEnumVariantId, -} - -pub type LocalEnumVariantId = Idx; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct FieldId { - pub parent: VariantId, - pub local_id: LocalFieldId, -} - -pub type LocalFieldId = Idx; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct ConstId(salsa::InternId); -type ConstLoc = AssocItemLoc; -impl_intern!(ConstId, ConstLoc, intern_const, lookup_intern_const); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct StaticId(salsa::InternId); -pub type StaticLoc = ItemLoc; -impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct TraitId(salsa::InternId); -pub type TraitLoc = ItemLoc; -impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct TypeAliasId(salsa::InternId); -type TypeAliasLoc = AssocItemLoc; -impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct ImplId(salsa::InternId); -type ImplLoc = ItemLoc; -impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct TypeParamId { - pub parent: GenericDefId, - pub local_id: LocalTypeParamId, -} - -pub type LocalTypeParamId = Idx; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum ContainerId { - ModuleId(ModuleId), - DefWithBodyId(DefWithBodyId), -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum AssocContainerId { - ContainerId(ContainerId), - ImplId(ImplId), - TraitId(TraitId), -} -impl_from!(ContainerId for AssocContainerId); - -/// A Data Type -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub enum AdtId { - StructId(StructId), - UnionId(UnionId), - EnumId(EnumId), -} -impl_from!(StructId, UnionId, EnumId for AdtId); - -/// The defs which can be visible in the module. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum ModuleDefId { - ModuleId(ModuleId), - FunctionId(FunctionId), - AdtId(AdtId), - // Can't be directly declared, but can be imported. - EnumVariantId(EnumVariantId), - ConstId(ConstId), - StaticId(StaticId), - TraitId(TraitId), - TypeAliasId(TypeAliasId), - BuiltinType(BuiltinType), -} -impl_from!( - ModuleId, - FunctionId, - AdtId(StructId, EnumId, UnionId), - EnumVariantId, - ConstId, - StaticId, - TraitId, - TypeAliasId, - BuiltinType - for ModuleDefId -); - -/// The defs which have a body. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum DefWithBodyId { - FunctionId(FunctionId), - StaticId(StaticId), - ConstId(ConstId), -} - -impl_from!(FunctionId, ConstId, StaticId for DefWithBodyId); - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub enum AssocItemId { - FunctionId(FunctionId), - ConstId(ConstId), - TypeAliasId(TypeAliasId), -} -// FIXME: not every function, ... is actually an assoc item. maybe we should make -// sure that you can only turn actual assoc items into AssocItemIds. This would -// require not implementing From, and instead having some checked way of -// casting them, and somehow making the constructors private, which would be annoying. -impl_from!(FunctionId, ConstId, TypeAliasId for AssocItemId); - -#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] -pub enum GenericDefId { - FunctionId(FunctionId), - AdtId(AdtId), - TraitId(TraitId), - TypeAliasId(TypeAliasId), - ImplId(ImplId), - // enum variants cannot have generics themselves, but their parent enums - // can, and this makes some code easier to write - EnumVariantId(EnumVariantId), - // consts can have type parameters from their parents (i.e. associated consts of traits) - ConstId(ConstId), -} -impl_from!( - FunctionId, - AdtId(StructId, EnumId, UnionId), - TraitId, - TypeAliasId, - ImplId, - EnumVariantId, - ConstId - for GenericDefId -); - -impl From for GenericDefId { - fn from(item: AssocItemId) -> Self { - match item { - AssocItemId::FunctionId(f) => f.into(), - AssocItemId::ConstId(c) => c.into(), - AssocItemId::TypeAliasId(t) => t.into(), - } - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub enum AttrDefId { - ModuleId(ModuleId), - FieldId(FieldId), - AdtId(AdtId), - FunctionId(FunctionId), - EnumVariantId(EnumVariantId), - StaticId(StaticId), - ConstId(ConstId), - TraitId(TraitId), - TypeAliasId(TypeAliasId), - MacroDefId(MacroDefId), - ImplId(ImplId), -} - -impl_from!( - ModuleId, - FieldId, - AdtId(StructId, EnumId, UnionId), - EnumVariantId, - StaticId, - ConstId, - FunctionId, - TraitId, - TypeAliasId, - MacroDefId, - ImplId - for AttrDefId -); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum VariantId { - EnumVariantId(EnumVariantId), - StructId(StructId), - UnionId(UnionId), -} -impl_from!(EnumVariantId, StructId, UnionId for VariantId); - -trait Intern { - type ID; - fn intern(self, db: &dyn db::DefDatabase) -> Self::ID; -} - -pub trait Lookup { - type Data; - fn lookup(&self, db: &dyn db::DefDatabase) -> Self::Data; -} - -pub trait HasModule { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId; -} - -impl HasModule for ContainerId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { - match *self { - ContainerId::ModuleId(it) => it, - ContainerId::DefWithBodyId(it) => it.module(db), - } - } -} - -impl HasModule for AssocContainerId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { - match *self { - AssocContainerId::ContainerId(it) => it.module(db), - AssocContainerId::ImplId(it) => it.lookup(db).container.module(db), - AssocContainerId::TraitId(it) => it.lookup(db).container.module(db), - } - } -} - -impl HasModule for AssocItemLoc { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { - self.container.module(db) - } -} - -impl HasModule for AdtId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { - match self { - AdtId::StructId(it) => it.lookup(db).container, - AdtId::UnionId(it) => it.lookup(db).container, - AdtId::EnumId(it) => it.lookup(db).container, - } - .module(db) - } -} - -impl HasModule for DefWithBodyId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { - match self { - DefWithBodyId::FunctionId(it) => it.lookup(db).module(db), - DefWithBodyId::StaticId(it) => it.lookup(db).module(db), - DefWithBodyId::ConstId(it) => it.lookup(db).module(db), - } - } -} - -impl DefWithBodyId { - pub fn as_mod_item(self, db: &dyn db::DefDatabase) -> ModItem { - match self { - DefWithBodyId::FunctionId(it) => it.lookup(db).id.value.into(), - DefWithBodyId::StaticId(it) => it.lookup(db).id.value.into(), - DefWithBodyId::ConstId(it) => it.lookup(db).id.value.into(), - } - } -} - -impl HasModule for GenericDefId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { - match self { - GenericDefId::FunctionId(it) => it.lookup(db).module(db), - GenericDefId::AdtId(it) => it.module(db), - GenericDefId::TraitId(it) => it.lookup(db).container.module(db), - GenericDefId::TypeAliasId(it) => it.lookup(db).module(db), - GenericDefId::ImplId(it) => it.lookup(db).container.module(db), - GenericDefId::EnumVariantId(it) => it.parent.lookup(db).container.module(db), - GenericDefId::ConstId(it) => it.lookup(db).module(db), - } - } -} - -impl HasModule for StaticLoc { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { - self.container.module(db) - } -} - -/// A helper trait for converting to MacroCallId -pub trait AsMacroCall { - fn as_call_id( - &self, - db: &dyn db::DefDatabase, - krate: CrateId, - resolver: impl Fn(path::ModPath) -> Option, - ) -> Option; -} - -impl AsMacroCall for InFile<&ast::MacroCall> { - fn as_call_id( - &self, - db: &dyn db::DefDatabase, - krate: CrateId, - resolver: impl Fn(path::ModPath) -> Option, - ) -> Option { - let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value)); - let h = Hygiene::new(db.upcast(), self.file_id); - let path = path::ModPath::from_src(self.value.path()?, &h)?; - - AstIdWithPath::new(ast_id.file_id, ast_id.value, path).as_call_id(db, krate, resolver) - } -} - -/// Helper wrapper for `AstId` with `ModPath` -#[derive(Clone, Debug, Eq, PartialEq)] -struct AstIdWithPath { - pub ast_id: AstId, - pub path: path::ModPath, -} - -impl AstIdWithPath { - pub fn new(file_id: HirFileId, ast_id: FileAstId, path: path::ModPath) -> AstIdWithPath { - AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path } - } -} - -impl AsMacroCall for AstIdWithPath { - fn as_call_id( - &self, - db: &dyn db::DefDatabase, - krate: CrateId, - resolver: impl Fn(path::ModPath) -> Option, - ) -> Option { - let def: MacroDefId = resolver(self.path.clone())?; - - if let MacroDefKind::BuiltInEager(_) = def.kind { - let macro_call = InFile::new(self.ast_id.file_id, self.ast_id.to_node(db.upcast())); - let hygiene = Hygiene::new(db.upcast(), self.ast_id.file_id); - - Some( - expand_eager_macro(db.upcast(), krate, macro_call, def, &|path: ast::Path| { - resolver(path::ModPath::from_src(path, &hygiene)?) - })? - .into(), - ) - } else { - Some(def.as_lazy_macro(db.upcast(), krate, MacroCallKind::FnLike(self.ast_id)).into()) - } - } -} - -impl AsMacroCall for AstIdWithPath { - fn as_call_id( - &self, - db: &dyn db::DefDatabase, - krate: CrateId, - resolver: impl Fn(path::ModPath) -> Option, - ) -> Option { - let def = resolver(self.path.clone())?; - Some( - def.as_lazy_macro( - db.upcast(), - krate, - MacroCallKind::Attr(self.ast_id, self.path.segments.last()?.to_string()), - ) - .into(), - ) - } -} diff --git a/crates/ra_hir_def/src/nameres.rs b/crates/ra_hir_def/src/nameres.rs deleted file mode 100644 index 3d9b55a73c..0000000000 --- a/crates/ra_hir_def/src/nameres.rs +++ /dev/null @@ -1,327 +0,0 @@ -//! This module implements import-resolution/macro expansion algorithm. -//! -//! The result of this module is `CrateDefMap`: a data structure which contains: -//! -//! * a tree of modules for the crate -//! * for each module, a set of items visible in the module (directly declared -//! or imported) -//! -//! Note that `CrateDefMap` contains fully macro expanded code. -//! -//! Computing `CrateDefMap` can be partitioned into several logically -//! independent "phases". The phases are mutually recursive though, there's no -//! strict ordering. -//! -//! ## Collecting RawItems -//! -//! This happens in the `raw` module, which parses a single source file into a -//! set of top-level items. Nested imports are desugared to flat imports in this -//! phase. Macro calls are represented as a triple of (Path, Option, -//! TokenTree). -//! -//! ## Collecting Modules -//! -//! This happens in the `collector` module. In this phase, we recursively walk -//! tree of modules, collect raw items from submodules, populate module scopes -//! with defined items (so, we assign item ids in this phase) and record the set -//! of unresolved imports and macros. -//! -//! While we walk tree of modules, we also record macro_rules definitions and -//! expand calls to macro_rules defined macros. -//! -//! ## Resolving Imports -//! -//! We maintain a list of currently unresolved imports. On every iteration, we -//! try to resolve some imports from this list. If the import is resolved, we -//! record it, by adding an item to current module scope and, if necessary, by -//! recursively populating glob imports. -//! -//! ## Resolving Macros -//! -//! macro_rules from the same crate use a global mutable namespace. We expand -//! them immediately, when we collect modules. -//! -//! Macros from other crates (including proc-macros) can be used with -//! `foo::bar!` syntax. We handle them similarly to imports. There's a list of -//! unexpanded macros. On every iteration, we try to resolve each macro call -//! path and, upon success, we run macro expansion and "collect module" phase on -//! the result - -mod collector; -mod mod_resolution; -mod path_resolution; - -#[cfg(test)] -mod tests; - -use std::sync::Arc; - -use hir_expand::{diagnostics::DiagnosticSink, name::Name, InFile}; -use ra_arena::Arena; -use ra_db::{CrateId, Edition, FileId}; -use ra_prof::profile; -use ra_syntax::ast; -use rustc_hash::FxHashMap; -use stdx::format_to; - -use crate::{ - db::DefDatabase, - item_scope::{BuiltinShadowMode, ItemScope}, - nameres::{diagnostics::DefDiagnostic, path_resolution::ResolveMode}, - path::ModPath, - per_ns::PerNs, - AstId, LocalModuleId, ModuleDefId, ModuleId, -}; - -/// Contains all top-level defs from a macro-expanded crate -#[derive(Debug, PartialEq, Eq)] -pub struct CrateDefMap { - pub root: LocalModuleId, - pub modules: Arena, - pub(crate) krate: CrateId, - /// The prelude module for this crate. This either comes from an import - /// marked with the `prelude_import` attribute, or (in the normal case) from - /// a dependency (`std` or `core`). - pub(crate) prelude: Option, - pub(crate) extern_prelude: FxHashMap, - - edition: Edition, - diagnostics: Vec, -} - -impl std::ops::Index for CrateDefMap { - type Output = ModuleData; - fn index(&self, id: LocalModuleId) -> &ModuleData { - &self.modules[id] - } -} - -#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] -pub enum ModuleOrigin { - CrateRoot { - definition: FileId, - }, - /// Note that non-inline modules, by definition, live inside non-macro file. - File { - is_mod_rs: bool, - declaration: AstId, - definition: FileId, - }, - Inline { - definition: AstId, - }, -} - -impl Default for ModuleOrigin { - fn default() -> Self { - ModuleOrigin::CrateRoot { definition: FileId(0) } - } -} - -impl ModuleOrigin { - fn declaration(&self) -> Option> { - match self { - ModuleOrigin::File { declaration: module, .. } - | ModuleOrigin::Inline { definition: module, .. } => Some(*module), - ModuleOrigin::CrateRoot { .. } => None, - } - } - - pub fn file_id(&self) -> Option { - match self { - ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => { - Some(*definition) - } - _ => None, - } - } - - pub fn is_inline(&self) -> bool { - match self { - ModuleOrigin::Inline { .. } => true, - ModuleOrigin::CrateRoot { .. } | ModuleOrigin::File { .. } => false, - } - } - - /// Returns a node which defines this module. - /// That is, a file or a `mod foo {}` with items. - fn definition_source(&self, db: &dyn DefDatabase) -> InFile { - match self { - ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => { - let file_id = *definition; - let sf = db.parse(file_id).tree(); - InFile::new(file_id.into(), ModuleSource::SourceFile(sf)) - } - ModuleOrigin::Inline { definition } => InFile::new( - definition.file_id, - ModuleSource::Module(definition.to_node(db.upcast())), - ), - } - } -} - -#[derive(Default, Debug, PartialEq, Eq)] -pub struct ModuleData { - pub parent: Option, - pub children: FxHashMap, - pub scope: ItemScope, - - /// Where does this module come from? - pub origin: ModuleOrigin, -} - -impl CrateDefMap { - pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc { - let _p = profile("crate_def_map_query").detail(|| { - db.crate_graph()[krate] - .display_name - .as_ref() - .map(ToString::to_string) - .unwrap_or_default() - }); - let def_map = { - let edition = db.crate_graph()[krate].edition; - let mut modules: Arena = Arena::default(); - let root = modules.alloc(ModuleData::default()); - CrateDefMap { - krate, - edition, - extern_prelude: FxHashMap::default(), - prelude: None, - root, - modules, - diagnostics: Vec::new(), - } - }; - let def_map = collector::collect_defs(db, def_map); - Arc::new(def_map) - } - - pub fn add_diagnostics( - &self, - db: &dyn DefDatabase, - module: LocalModuleId, - sink: &mut DiagnosticSink, - ) { - self.diagnostics.iter().for_each(|it| it.add_to(db, module, sink)) - } - - pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator + '_ { - self.modules - .iter() - .filter(move |(_id, data)| data.origin.file_id() == Some(file_id)) - .map(|(id, _data)| id) - } - - pub(crate) fn resolve_path( - &self, - db: &dyn DefDatabase, - original_module: LocalModuleId, - path: &ModPath, - shadow: BuiltinShadowMode, - ) -> (PerNs, Option) { - let res = - self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path, shadow); - (res.resolved_def, res.segment_index) - } - - // FIXME: this can use some more human-readable format (ideally, an IR - // even), as this should be a great debugging aid. - pub fn dump(&self) -> String { - let mut buf = String::new(); - go(&mut buf, self, "crate", self.root); - return buf; - - fn go(buf: &mut String, map: &CrateDefMap, path: &str, module: LocalModuleId) { - format_to!(buf, "{}\n", path); - - let mut entries: Vec<_> = map.modules[module].scope.resolutions().collect(); - entries.sort_by_key(|(name, _)| name.clone()); - - for (name, def) in entries { - format_to!(buf, "{}:", name.map_or("_".to_string(), |name| name.to_string())); - - if def.types.is_some() { - buf.push_str(" t"); - } - if def.values.is_some() { - buf.push_str(" v"); - } - if def.macros.is_some() { - buf.push_str(" m"); - } - if def.is_none() { - buf.push_str(" _"); - } - - buf.push_str("\n"); - } - - for (name, child) in map.modules[module].children.iter() { - let path = format!("{}::{}", path, name); - buf.push('\n'); - go(buf, map, &path, *child); - } - } - } -} - -impl ModuleData { - /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items. - pub fn definition_source(&self, db: &dyn DefDatabase) -> InFile { - self.origin.definition_source(db) - } - - /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. - /// `None` for the crate root or block. - pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option> { - let decl = self.origin.declaration()?; - let value = decl.to_node(db.upcast()); - Some(InFile { file_id: decl.file_id, value }) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum ModuleSource { - SourceFile(ast::SourceFile), - Module(ast::Module), -} - -mod diagnostics { - use hir_expand::diagnostics::DiagnosticSink; - use ra_syntax::{ast, AstPtr}; - - use crate::{db::DefDatabase, diagnostics::UnresolvedModule, nameres::LocalModuleId, AstId}; - - #[derive(Debug, PartialEq, Eq)] - pub(super) enum DefDiagnostic { - UnresolvedModule { - module: LocalModuleId, - declaration: AstId, - candidate: String, - }, - } - - impl DefDiagnostic { - pub(super) fn add_to( - &self, - db: &dyn DefDatabase, - target_module: LocalModuleId, - sink: &mut DiagnosticSink, - ) { - match self { - DefDiagnostic::UnresolvedModule { module, declaration, candidate } => { - if *module != target_module { - return; - } - let decl = declaration.to_node(db.upcast()); - sink.push(UnresolvedModule { - file: declaration.file_id, - decl: AstPtr::new(&decl), - candidate: candidate.clone(), - }) - } - } - } - } -} diff --git a/crates/ra_hir_def/src/nameres/collector.rs b/crates/ra_hir_def/src/nameres/collector.rs deleted file mode 100644 index 28b7a20c55..0000000000 --- a/crates/ra_hir_def/src/nameres/collector.rs +++ /dev/null @@ -1,1279 +0,0 @@ -//! The core of the module-level name resolution algorithm. -//! -//! `DefCollector::collect` contains the fixed-point iteration loop which -//! resolves imports and expands macros. - -use hir_expand::{ - ast_id_map::FileAstId, - builtin_derive::find_builtin_derive, - builtin_macro::find_builtin_macro, - name::{name, AsName, Name}, - proc_macro::ProcMacroExpander, - HirFileId, MacroCallId, MacroDefId, MacroDefKind, -}; -use ra_cfg::CfgOptions; -use ra_db::{CrateId, FileId, ProcMacroId}; -use ra_syntax::ast; -use rustc_hash::FxHashMap; -use test_utils::mark; - -use crate::{ - attr::Attrs, - db::DefDatabase, - item_scope::{ImportType, PerNsGlobImports}, - item_tree::{ - self, FileItemTreeId, ItemTree, ItemTreeId, MacroCall, Mod, ModItem, ModKind, StructDefKind, - }, - nameres::{ - diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint, - BuiltinShadowMode, CrateDefMap, ModuleData, ModuleOrigin, ResolveMode, - }, - path::{ImportAlias, ModPath, PathKind}, - per_ns::PerNs, - visibility::{RawVisibility, Visibility}, - AdtId, AsMacroCall, AstId, AstIdWithPath, ConstLoc, ContainerId, EnumLoc, EnumVariantId, - FunctionLoc, ImplLoc, Intern, LocalModuleId, ModuleDefId, ModuleId, StaticLoc, StructLoc, - TraitLoc, TypeAliasLoc, UnionLoc, -}; - -const GLOB_RECURSION_LIMIT: usize = 100; -const EXPANSION_DEPTH_LIMIT: usize = 128; -const FIXED_POINT_LIMIT: usize = 8192; - -pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap { - let crate_graph = db.crate_graph(); - - // populate external prelude - for dep in &crate_graph[def_map.krate].dependencies { - log::debug!("crate dep {:?} -> {:?}", dep.name, dep.crate_id); - let dep_def_map = db.crate_def_map(dep.crate_id); - def_map.extern_prelude.insert( - dep.as_name(), - ModuleId { krate: dep.crate_id, local_id: dep_def_map.root }.into(), - ); - - // look for the prelude - // If the dependency defines a prelude, we overwrite an already defined - // prelude. This is necessary to import the "std" prelude if a crate - // depends on both "core" and "std". - if dep_def_map.prelude.is_some() { - def_map.prelude = dep_def_map.prelude; - } - } - - let cfg_options = &crate_graph[def_map.krate].cfg_options; - let proc_macros = &crate_graph[def_map.krate].proc_macro; - let proc_macros = proc_macros - .iter() - .enumerate() - .map(|(idx, it)| { - // FIXME: a hacky way to create a Name from string. - let name = tt::Ident { text: it.name.clone(), id: tt::TokenId::unspecified() }; - (name.as_name(), ProcMacroExpander::new(def_map.krate, ProcMacroId(idx as u32))) - }) - .collect(); - - let mut collector = DefCollector { - db, - def_map, - glob_imports: FxHashMap::default(), - unresolved_imports: Vec::new(), - resolved_imports: Vec::new(), - - unexpanded_macros: Vec::new(), - unexpanded_attribute_macros: Vec::new(), - mod_dirs: FxHashMap::default(), - cfg_options, - proc_macros, - from_glob_import: Default::default(), - }; - collector.collect(); - collector.finish() -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -enum PartialResolvedImport { - /// None of any namespaces is resolved - Unresolved, - /// One of namespaces is resolved - Indeterminate(PerNs), - /// All namespaces are resolved, OR it is came from other crate - Resolved(PerNs), -} - -impl PartialResolvedImport { - fn namespaces(&self) -> PerNs { - match self { - PartialResolvedImport::Unresolved => PerNs::none(), - PartialResolvedImport::Indeterminate(ns) => *ns, - PartialResolvedImport::Resolved(ns) => *ns, - } - } -} - -#[derive(Clone, Debug, Eq, PartialEq)] -struct Import { - pub path: ModPath, - pub alias: Option, - pub visibility: RawVisibility, - pub is_glob: bool, - pub is_prelude: bool, - pub is_extern_crate: bool, - pub is_macro_use: bool, -} - -impl Import { - fn from_use(tree: &ItemTree, id: FileItemTreeId) -> Self { - let it = &tree[id]; - let visibility = &tree[it.visibility]; - Self { - path: it.path.clone(), - alias: it.alias.clone(), - visibility: visibility.clone(), - is_glob: it.is_glob, - is_prelude: it.is_prelude, - is_extern_crate: false, - is_macro_use: false, - } - } - - fn from_extern_crate(tree: &ItemTree, id: FileItemTreeId) -> Self { - let it = &tree[id]; - let visibility = &tree[it.visibility]; - Self { - path: it.path.clone(), - alias: it.alias.clone(), - visibility: visibility.clone(), - is_glob: false, - is_prelude: false, - is_extern_crate: true, - is_macro_use: it.is_macro_use, - } - } -} - -#[derive(Clone, Debug, Eq, PartialEq)] -struct ImportDirective { - module_id: LocalModuleId, - import: Import, - status: PartialResolvedImport, -} - -#[derive(Clone, Debug, Eq, PartialEq)] -struct MacroDirective { - module_id: LocalModuleId, - ast_id: AstIdWithPath, - legacy: Option, - depth: usize, -} - -#[derive(Clone, Debug, Eq, PartialEq)] -struct DeriveDirective { - module_id: LocalModuleId, - ast_id: AstIdWithPath, -} - -struct DefData<'a> { - id: ModuleDefId, - name: &'a Name, - visibility: &'a RawVisibility, - has_constructor: bool, -} - -/// Walks the tree of module recursively -struct DefCollector<'a> { - db: &'a dyn DefDatabase, - def_map: CrateDefMap, - glob_imports: FxHashMap>, - unresolved_imports: Vec, - resolved_imports: Vec, - unexpanded_macros: Vec, - unexpanded_attribute_macros: Vec, - mod_dirs: FxHashMap, - cfg_options: &'a CfgOptions, - proc_macros: Vec<(Name, ProcMacroExpander)>, - from_glob_import: PerNsGlobImports, -} - -impl DefCollector<'_> { - fn collect(&mut self) { - let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id; - let item_tree = self.db.item_tree(file_id.into()); - let module_id = self.def_map.root; - self.def_map.modules[module_id].origin = ModuleOrigin::CrateRoot { definition: file_id }; - ModCollector { - def_collector: &mut *self, - macro_depth: 0, - module_id, - file_id: file_id.into(), - item_tree: &item_tree, - mod_dir: ModDir::root(), - } - .collect(item_tree.top_level_items()); - - // main name resolution fixed-point loop. - let mut i = 0; - loop { - self.db.check_canceled(); - self.resolve_imports(); - - match self.resolve_macros() { - ReachedFixedPoint::Yes => break, - ReachedFixedPoint::No => i += 1, - } - if i == FIXED_POINT_LIMIT { - log::error!("name resolution is stuck"); - break; - } - } - - // Resolve all indeterminate resolved imports again - // As some of the macros will expand newly import shadowing partial resolved imports - // FIXME: We maybe could skip this, if we handle the Indetermine imports in `resolve_imports` - // correctly - let partial_resolved = self.resolved_imports.iter().filter_map(|directive| { - if let PartialResolvedImport::Indeterminate(_) = directive.status { - let mut directive = directive.clone(); - directive.status = PartialResolvedImport::Unresolved; - Some(directive) - } else { - None - } - }); - self.unresolved_imports.extend(partial_resolved); - self.resolve_imports(); - - let unresolved_imports = std::mem::replace(&mut self.unresolved_imports, Vec::new()); - // show unresolved imports in completion, etc - for directive in unresolved_imports { - self.record_resolved_import(&directive) - } - - // Record proc-macros - self.collect_proc_macro(); - } - - fn collect_proc_macro(&mut self) { - let proc_macros = std::mem::take(&mut self.proc_macros); - for (name, expander) in proc_macros { - let krate = self.def_map.krate; - - let macro_id = MacroDefId { - ast_id: None, - krate: Some(krate), - kind: MacroDefKind::CustomDerive(expander), - local_inner: false, - }; - - self.define_proc_macro(name.clone(), macro_id); - } - } - - /// Define a macro with `macro_rules`. - /// - /// It will define the macro in legacy textual scope, and if it has `#[macro_export]`, - /// then it is also defined in the root module scope. - /// You can `use` or invoke it by `crate::macro_name` anywhere, before or after the definition. - /// - /// It is surprising that the macro will never be in the current module scope. - /// These code fails with "unresolved import/macro", - /// ```rust,compile_fail - /// mod m { macro_rules! foo { () => {} } } - /// use m::foo as bar; - /// ``` - /// - /// ```rust,compile_fail - /// macro_rules! foo { () => {} } - /// self::foo!(); - /// crate::foo!(); - /// ``` - /// - /// Well, this code compiles, because the plain path `foo` in `use` is searched - /// in the legacy textual scope only. - /// ```rust - /// macro_rules! foo { () => {} } - /// use foo as bar; - /// ``` - fn define_macro( - &mut self, - module_id: LocalModuleId, - name: Name, - macro_: MacroDefId, - export: bool, - ) { - // Textual scoping - self.define_legacy_macro(module_id, name.clone(), macro_); - - // Module scoping - // In Rust, `#[macro_export]` macros are unconditionally visible at the - // crate root, even if the parent modules is **not** visible. - if export { - self.update( - self.def_map.root, - &[(Some(name), PerNs::macros(macro_, Visibility::Public))], - Visibility::Public, - ImportType::Named, - ); - } - } - - /// Define a legacy textual scoped macro in module - /// - /// We use a map `legacy_macros` to store all legacy textual scoped macros visible per module. - /// It will clone all macros from parent legacy scope, whose definition is prior to - /// the definition of current module. - /// And also, `macro_use` on a module will import all legacy macros visible inside to - /// current legacy scope, with possible shadowing. - fn define_legacy_macro(&mut self, module_id: LocalModuleId, name: Name, mac: MacroDefId) { - // Always shadowing - self.def_map.modules[module_id].scope.define_legacy_macro(name, mac); - } - - /// Define a proc macro - /// - /// A proc macro is similar to normal macro scope, but it would not visiable in legacy textual scoped. - /// And unconditionally exported. - fn define_proc_macro(&mut self, name: Name, macro_: MacroDefId) { - self.update( - self.def_map.root, - &[(Some(name), PerNs::macros(macro_, Visibility::Public))], - Visibility::Public, - ImportType::Named, - ); - } - - /// Import macros from `#[macro_use] extern crate`. - fn import_macros_from_extern_crate( - &mut self, - current_module_id: LocalModuleId, - import: &item_tree::ExternCrate, - ) { - log::debug!( - "importing macros from extern crate: {:?} ({:?})", - import, - self.def_map.edition, - ); - - let res = self.def_map.resolve_name_in_extern_prelude( - &import - .path - .as_ident() - .expect("extern crate should have been desugared to one-element path"), - ); - - if let Some(ModuleDefId::ModuleId(m)) = res.take_types() { - mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use); - self.import_all_macros_exported(current_module_id, m.krate); - } - } - - /// Import all exported macros from another crate - /// - /// Exported macros are just all macros in the root module scope. - /// Note that it contains not only all `#[macro_export]` macros, but also all aliases - /// created by `use` in the root module, ignoring the visibility of `use`. - fn import_all_macros_exported(&mut self, current_module_id: LocalModuleId, krate: CrateId) { - let def_map = self.db.crate_def_map(krate); - for (name, def) in def_map[def_map.root].scope.macros() { - // `macro_use` only bring things into legacy scope. - self.define_legacy_macro(current_module_id, name.clone(), def); - } - } - - /// Import resolution - /// - /// This is a fix point algorithm. We resolve imports until no forward - /// progress in resolving imports is made - fn resolve_imports(&mut self) { - let mut n_previous_unresolved = self.unresolved_imports.len() + 1; - - while self.unresolved_imports.len() < n_previous_unresolved { - n_previous_unresolved = self.unresolved_imports.len(); - let imports = std::mem::replace(&mut self.unresolved_imports, Vec::new()); - for mut directive in imports { - directive.status = self.resolve_import(directive.module_id, &directive.import); - match directive.status { - PartialResolvedImport::Indeterminate(_) => { - self.record_resolved_import(&directive); - // FIXME: For avoid performance regression, - // we consider an imported resolved if it is indeterminate (i.e not all namespace resolved) - self.resolved_imports.push(directive) - } - PartialResolvedImport::Resolved(_) => { - self.record_resolved_import(&directive); - self.resolved_imports.push(directive) - } - PartialResolvedImport::Unresolved => { - self.unresolved_imports.push(directive); - } - } - } - } - } - - fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport { - log::debug!("resolving import: {:?} ({:?})", import, self.def_map.edition); - if import.is_extern_crate { - let res = self.def_map.resolve_name_in_extern_prelude( - &import - .path - .as_ident() - .expect("extern crate should have been desugared to one-element path"), - ); - PartialResolvedImport::Resolved(res) - } else { - let res = self.def_map.resolve_path_fp_with_macro( - self.db, - ResolveMode::Import, - module_id, - &import.path, - BuiltinShadowMode::Module, - ); - - let def = res.resolved_def; - if res.reached_fixedpoint == ReachedFixedPoint::No || def.is_none() { - return PartialResolvedImport::Unresolved; - } - - if let Some(krate) = res.krate { - if krate != self.def_map.krate { - return PartialResolvedImport::Resolved(def); - } - } - - // Check whether all namespace is resolved - if def.take_types().is_some() - && def.take_values().is_some() - && def.take_macros().is_some() - { - PartialResolvedImport::Resolved(def) - } else { - PartialResolvedImport::Indeterminate(def) - } - } - } - - fn record_resolved_import(&mut self, directive: &ImportDirective) { - let module_id = directive.module_id; - let import = &directive.import; - let def = directive.status.namespaces(); - let vis = self - .def_map - .resolve_visibility(self.db, module_id, &directive.import.visibility) - .unwrap_or(Visibility::Public); - - if import.is_glob { - log::debug!("glob import: {:?}", import); - match def.take_types() { - Some(ModuleDefId::ModuleId(m)) => { - if import.is_prelude { - mark::hit!(std_prelude); - self.def_map.prelude = Some(m); - } else if m.krate != self.def_map.krate { - mark::hit!(glob_across_crates); - // glob import from other crate => we can just import everything once - let item_map = self.db.crate_def_map(m.krate); - let scope = &item_map[m.local_id].scope; - - // Module scoped macros is included - let items = scope - .resolutions() - // only keep visible names... - .map(|(n, res)| { - (n, res.filter_visibility(|v| v.is_visible_from_other_crate())) - }) - .filter(|(_, res)| !res.is_none()) - .collect::>(); - - self.update(module_id, &items, vis, ImportType::Glob); - } else { - // glob import from same crate => we do an initial - // import, and then need to propagate any further - // additions - let scope = &self.def_map[m.local_id].scope; - - // Module scoped macros is included - let items = scope - .resolutions() - // only keep visible names... - .map(|(n, res)| { - ( - n, - res.filter_visibility(|v| { - v.is_visible_from_def_map(&self.def_map, module_id) - }), - ) - }) - .filter(|(_, res)| !res.is_none()) - .collect::>(); - - self.update(module_id, &items, vis, ImportType::Glob); - // record the glob import in case we add further items - let glob = self.glob_imports.entry(m.local_id).or_default(); - if !glob.iter().any(|(mid, _)| *mid == module_id) { - glob.push((module_id, vis)); - } - } - } - Some(ModuleDefId::AdtId(AdtId::EnumId(e))) => { - mark::hit!(glob_enum); - // glob import from enum => just import all the variants - - // XXX: urgh, so this works by accident! Here, we look at - // the enum data, and, in theory, this might require us to - // look back at the crate_def_map, creating a cycle. For - // example, `enum E { crate::some_macro!(); }`. Luckely, the - // only kind of macro that is allowed inside enum is a - // `cfg_macro`, and we don't need to run name resolution for - // it, but this is sheer luck! - let enum_data = self.db.enum_data(e); - let resolutions = enum_data - .variants - .iter() - .map(|(local_id, variant_data)| { - let name = variant_data.name.clone(); - let variant = EnumVariantId { parent: e, local_id }; - let res = PerNs::both(variant.into(), variant.into(), vis); - (Some(name), res) - }) - .collect::>(); - self.update(module_id, &resolutions, vis, ImportType::Glob); - } - Some(d) => { - log::debug!("glob import {:?} from non-module/enum {:?}", import, d); - } - None => { - log::debug!("glob import {:?} didn't resolve as type", import); - } - } - } else { - match import.path.segments.last() { - Some(last_segment) => { - let name = match &import.alias { - Some(ImportAlias::Alias(name)) => Some(name.clone()), - Some(ImportAlias::Underscore) => None, - None => Some(last_segment.clone()), - }; - log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def); - - // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658 - if import.is_extern_crate && module_id == self.def_map.root { - if let (Some(def), Some(name)) = (def.take_types(), name.as_ref()) { - self.def_map.extern_prelude.insert(name.clone(), def); - } - } - - self.update(module_id, &[(name, def)], vis, ImportType::Named); - } - None => mark::hit!(bogus_paths), - } - } - } - - fn update( - &mut self, - module_id: LocalModuleId, - resolutions: &[(Option, PerNs)], - vis: Visibility, - import_type: ImportType, - ) { - self.db.check_canceled(); - self.update_recursive(module_id, resolutions, vis, import_type, 0) - } - - fn update_recursive( - &mut self, - module_id: LocalModuleId, - resolutions: &[(Option, PerNs)], - // All resolutions are imported with this visibility; the visibilies in - // the `PerNs` values are ignored and overwritten - vis: Visibility, - import_type: ImportType, - depth: usize, - ) { - if depth > GLOB_RECURSION_LIMIT { - // prevent stack overflows (but this shouldn't be possible) - panic!("infinite recursion in glob imports!"); - } - let mut changed = false; - - for (name, res) in resolutions { - match name { - Some(name) => { - let scope = &mut self.def_map.modules[module_id].scope; - changed |= scope.push_res_with_import( - &mut self.from_glob_import, - (module_id, name.clone()), - res.with_visibility(vis), - import_type, - ); - } - None => { - let tr = match res.take_types() { - Some(ModuleDefId::TraitId(tr)) => tr, - Some(other) => { - log::debug!("non-trait `_` import of {:?}", other); - continue; - } - None => continue, - }; - let old_vis = self.def_map.modules[module_id].scope.unnamed_trait_vis(tr); - let should_update = match old_vis { - None => true, - Some(old_vis) => { - let max_vis = old_vis.max(vis, &self.def_map).unwrap_or_else(|| { - panic!("`Tr as _` imports with unrelated visibilities {:?} and {:?} (trait {:?})", old_vis, vis, tr); - }); - - if max_vis == old_vis { - false - } else { - mark::hit!(upgrade_underscore_visibility); - true - } - } - }; - - if should_update { - changed = true; - self.def_map.modules[module_id].scope.push_unnamed_trait(tr, vis); - } - } - } - } - - if !changed { - return; - } - let glob_imports = self - .glob_imports - .get(&module_id) - .into_iter() - .flat_map(|v| v.iter()) - .filter(|(glob_importing_module, _)| { - // we know all resolutions have the same visibility (`vis`), so we - // just need to check that once - vis.is_visible_from_def_map(&self.def_map, *glob_importing_module) - }) - .cloned() - .collect::>(); - - for (glob_importing_module, glob_import_vis) in glob_imports { - self.update_recursive( - glob_importing_module, - resolutions, - glob_import_vis, - ImportType::Glob, - depth + 1, - ); - } - } - - fn resolve_macros(&mut self) -> ReachedFixedPoint { - let mut macros = std::mem::replace(&mut self.unexpanded_macros, Vec::new()); - let mut attribute_macros = - std::mem::replace(&mut self.unexpanded_attribute_macros, Vec::new()); - let mut resolved = Vec::new(); - let mut res = ReachedFixedPoint::Yes; - macros.retain(|directive| { - if let Some(call_id) = directive.legacy { - res = ReachedFixedPoint::No; - resolved.push((directive.module_id, call_id, directive.depth)); - return false; - } - - if let Some(call_id) = - directive.ast_id.as_call_id(self.db, self.def_map.krate, |path| { - let resolved_res = self.def_map.resolve_path_fp_with_macro( - self.db, - ResolveMode::Other, - directive.module_id, - &path, - BuiltinShadowMode::Module, - ); - resolved_res.resolved_def.take_macros() - }) - { - resolved.push((directive.module_id, call_id, directive.depth)); - res = ReachedFixedPoint::No; - return false; - } - - true - }); - attribute_macros.retain(|directive| { - if let Some(call_id) = - directive.ast_id.as_call_id(self.db, self.def_map.krate, |path| { - self.resolve_attribute_macro(&directive, &path) - }) - { - resolved.push((directive.module_id, call_id, 0)); - res = ReachedFixedPoint::No; - return false; - } - - true - }); - - self.unexpanded_macros = macros; - self.unexpanded_attribute_macros = attribute_macros; - - for (module_id, macro_call_id, depth) in resolved { - self.collect_macro_expansion(module_id, macro_call_id, depth); - } - - res - } - - fn resolve_attribute_macro( - &self, - directive: &DeriveDirective, - path: &ModPath, - ) -> Option { - if let Some(name) = path.as_ident() { - // FIXME this should actually be handled with the normal name - // resolution; the std lib defines built-in stubs for the derives, - // but these are new-style `macro`s, which we don't support yet - if let Some(def_id) = find_builtin_derive(name) { - return Some(def_id); - } - } - let resolved_res = self.def_map.resolve_path_fp_with_macro( - self.db, - ResolveMode::Other, - directive.module_id, - &path, - BuiltinShadowMode::Module, - ); - - resolved_res.resolved_def.take_macros() - } - - fn collect_macro_expansion( - &mut self, - module_id: LocalModuleId, - macro_call_id: MacroCallId, - depth: usize, - ) { - if depth > EXPANSION_DEPTH_LIMIT { - mark::hit!(macro_expansion_overflow); - log::warn!("macro expansion is too deep"); - return; - } - let file_id: HirFileId = macro_call_id.as_file(); - let item_tree = self.db.item_tree(file_id); - let mod_dir = self.mod_dirs[&module_id].clone(); - ModCollector { - def_collector: &mut *self, - macro_depth: depth, - file_id, - module_id, - item_tree: &item_tree, - mod_dir, - } - .collect(item_tree.top_level_items()); - } - - fn finish(self) -> CrateDefMap { - self.def_map - } -} - -/// Walks a single module, populating defs, imports and macros -struct ModCollector<'a, 'b> { - def_collector: &'a mut DefCollector<'b>, - macro_depth: usize, - module_id: LocalModuleId, - file_id: HirFileId, - item_tree: &'a ItemTree, - mod_dir: ModDir, -} - -impl ModCollector<'_, '_> { - fn collect(&mut self, items: &[ModItem]) { - // Note: don't assert that inserted value is fresh: it's simply not true - // for macros. - self.def_collector.mod_dirs.insert(self.module_id, self.mod_dir.clone()); - - // Prelude module is always considered to be `#[macro_use]`. - if let Some(prelude_module) = self.def_collector.def_map.prelude { - if prelude_module.krate != self.def_collector.def_map.krate { - mark::hit!(prelude_is_macro_use); - self.def_collector.import_all_macros_exported(self.module_id, prelude_module.krate); - } - } - - // This should be processed eagerly instead of deferred to resolving. - // `#[macro_use] extern crate` is hoisted to imports macros before collecting - // any other items. - for item in items { - if self.is_cfg_enabled(self.item_tree.attrs((*item).into())) { - if let ModItem::ExternCrate(id) = item { - let import = self.item_tree[*id].clone(); - if import.is_macro_use { - self.def_collector.import_macros_from_extern_crate(self.module_id, &import); - } - } - } - } - - for &item in items { - let attrs = self.item_tree.attrs(item.into()); - if self.is_cfg_enabled(attrs) { - let module = - ModuleId { krate: self.def_collector.def_map.krate, local_id: self.module_id }; - let container = ContainerId::ModuleId(module); - - let mut def = None; - match item { - ModItem::Mod(m) => self.collect_module(&self.item_tree[m], attrs), - ModItem::Import(import_id) => { - self.def_collector.unresolved_imports.push(ImportDirective { - module_id: self.module_id, - import: Import::from_use(&self.item_tree, import_id), - status: PartialResolvedImport::Unresolved, - }) - } - ModItem::ExternCrate(import_id) => { - self.def_collector.unresolved_imports.push(ImportDirective { - module_id: self.module_id, - import: Import::from_extern_crate(&self.item_tree, import_id), - status: PartialResolvedImport::Unresolved, - }) - } - ModItem::MacroCall(mac) => self.collect_macro(&self.item_tree[mac]), - ModItem::Impl(imp) => { - let module = ModuleId { - krate: self.def_collector.def_map.krate, - local_id: self.module_id, - }; - let container = ContainerId::ModuleId(module); - let impl_id = ImplLoc { container, id: ItemTreeId::new(self.file_id, imp) } - .intern(self.def_collector.db); - self.def_collector.def_map.modules[self.module_id] - .scope - .define_impl(impl_id) - } - ModItem::Function(id) => { - let func = &self.item_tree[id]; - def = Some(DefData { - id: FunctionLoc { - container: container.into(), - id: ItemTreeId::new(self.file_id, id), - } - .intern(self.def_collector.db) - .into(), - name: &func.name, - visibility: &self.item_tree[func.visibility], - has_constructor: false, - }); - } - ModItem::Struct(id) => { - let it = &self.item_tree[id]; - - // FIXME: check attrs to see if this is an attribute macro invocation; - // in which case we don't add the invocation, just a single attribute - // macro invocation - self.collect_derives(attrs, it.ast_id.upcast()); - - def = Some(DefData { - id: StructLoc { container, id: ItemTreeId::new(self.file_id, id) } - .intern(self.def_collector.db) - .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: it.kind != StructDefKind::Record, - }); - } - ModItem::Union(id) => { - let it = &self.item_tree[id]; - - // FIXME: check attrs to see if this is an attribute macro invocation; - // in which case we don't add the invocation, just a single attribute - // macro invocation - self.collect_derives(attrs, it.ast_id.upcast()); - - def = Some(DefData { - id: UnionLoc { container, id: ItemTreeId::new(self.file_id, id) } - .intern(self.def_collector.db) - .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); - } - ModItem::Enum(id) => { - let it = &self.item_tree[id]; - - // FIXME: check attrs to see if this is an attribute macro invocation; - // in which case we don't add the invocation, just a single attribute - // macro invocation - self.collect_derives(attrs, it.ast_id.upcast()); - - def = Some(DefData { - id: EnumLoc { container, id: ItemTreeId::new(self.file_id, id) } - .intern(self.def_collector.db) - .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); - } - ModItem::Const(id) => { - let it = &self.item_tree[id]; - - if let Some(name) = &it.name { - def = Some(DefData { - id: ConstLoc { - container: container.into(), - id: ItemTreeId::new(self.file_id, id), - } - .intern(self.def_collector.db) - .into(), - name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); - } - } - ModItem::Static(id) => { - let it = &self.item_tree[id]; - - def = Some(DefData { - id: StaticLoc { container, id: ItemTreeId::new(self.file_id, id) } - .intern(self.def_collector.db) - .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); - } - ModItem::Trait(id) => { - let it = &self.item_tree[id]; - - def = Some(DefData { - id: TraitLoc { container, id: ItemTreeId::new(self.file_id, id) } - .intern(self.def_collector.db) - .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); - } - ModItem::TypeAlias(id) => { - let it = &self.item_tree[id]; - - def = Some(DefData { - id: TypeAliasLoc { - container: container.into(), - id: ItemTreeId::new(self.file_id, id), - } - .intern(self.def_collector.db) - .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); - } - } - - if let Some(DefData { id, name, visibility, has_constructor }) = def { - self.def_collector.def_map.modules[self.module_id].scope.define_def(id); - let vis = self - .def_collector - .def_map - .resolve_visibility(self.def_collector.db, self.module_id, visibility) - .unwrap_or(Visibility::Public); - self.def_collector.update( - self.module_id, - &[(Some(name.clone()), PerNs::from_def(id, vis, has_constructor))], - vis, - ImportType::Named, - ) - } - } - } - } - - fn collect_module(&mut self, module: &Mod, attrs: &Attrs) { - let path_attr = attrs.by_key("path").string_value(); - let is_macro_use = attrs.by_key("macro_use").exists(); - match &module.kind { - // inline module, just recurse - ModKind::Inline { items } => { - let module_id = self.push_child_module( - module.name.clone(), - AstId::new(self.file_id, module.ast_id), - None, - &self.item_tree[module.visibility], - ); - - ModCollector { - def_collector: &mut *self.def_collector, - macro_depth: self.macro_depth, - module_id, - file_id: self.file_id, - item_tree: self.item_tree, - mod_dir: self.mod_dir.descend_into_definition(&module.name, path_attr), - } - .collect(&*items); - if is_macro_use { - self.import_all_legacy_macros(module_id); - } - } - // out of line module, resolve, parse and recurse - ModKind::Outline {} => { - let ast_id = AstId::new(self.file_id, module.ast_id); - match self.mod_dir.resolve_declaration( - self.def_collector.db, - self.file_id, - &module.name, - path_attr, - ) { - Ok((file_id, is_mod_rs, mod_dir)) => { - let module_id = self.push_child_module( - module.name.clone(), - ast_id, - Some((file_id, is_mod_rs)), - &self.item_tree[module.visibility], - ); - let item_tree = self.def_collector.db.item_tree(file_id.into()); - ModCollector { - def_collector: &mut *self.def_collector, - macro_depth: self.macro_depth, - module_id, - file_id: file_id.into(), - item_tree: &item_tree, - mod_dir, - } - .collect(item_tree.top_level_items()); - if is_macro_use { - self.import_all_legacy_macros(module_id); - } - } - Err(candidate) => self.def_collector.def_map.diagnostics.push( - DefDiagnostic::UnresolvedModule { - module: self.module_id, - declaration: ast_id, - candidate, - }, - ), - }; - } - } - } - - fn push_child_module( - &mut self, - name: Name, - declaration: AstId, - definition: Option<(FileId, bool)>, - visibility: &crate::visibility::RawVisibility, - ) -> LocalModuleId { - let vis = self - .def_collector - .def_map - .resolve_visibility(self.def_collector.db, self.module_id, visibility) - .unwrap_or(Visibility::Public); - let modules = &mut self.def_collector.def_map.modules; - let res = modules.alloc(ModuleData::default()); - modules[res].parent = Some(self.module_id); - modules[res].origin = match definition { - None => ModuleOrigin::Inline { definition: declaration }, - Some((definition, is_mod_rs)) => { - ModuleOrigin::File { declaration, definition, is_mod_rs } - } - }; - for (name, mac) in modules[self.module_id].scope.collect_legacy_macros() { - modules[res].scope.define_legacy_macro(name, mac) - } - modules[self.module_id].children.insert(name.clone(), res); - let module = ModuleId { krate: self.def_collector.def_map.krate, local_id: res }; - let def: ModuleDefId = module.into(); - self.def_collector.def_map.modules[self.module_id].scope.define_def(def); - self.def_collector.update( - self.module_id, - &[(Some(name), PerNs::from_def(def, vis, false))], - vis, - ImportType::Named, - ); - res - } - - fn collect_derives(&mut self, attrs: &Attrs, ast_id: FileAstId) { - for derive_subtree in attrs.by_key("derive").tt_values() { - // for #[derive(Copy, Clone)], `derive_subtree` is the `(Copy, Clone)` subtree - for tt in &derive_subtree.token_trees { - let ident = match &tt { - tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => ident, - tt::TokenTree::Leaf(tt::Leaf::Punct(_)) => continue, // , is ok - _ => continue, // anything else would be an error (which we currently ignore) - }; - let path = ModPath::from_tt_ident(ident); - - let ast_id = AstIdWithPath::new(self.file_id, ast_id, path); - self.def_collector - .unexpanded_attribute_macros - .push(DeriveDirective { module_id: self.module_id, ast_id }); - } - } - } - - fn collect_macro(&mut self, mac: &MacroCall) { - let mut ast_id = AstIdWithPath::new(self.file_id, mac.ast_id, mac.path.clone()); - - // Case 0: builtin macros - if mac.is_builtin { - if let Some(name) = &mac.name { - let krate = self.def_collector.def_map.krate; - if let Some(macro_id) = find_builtin_macro(name, krate, ast_id.ast_id) { - self.def_collector.define_macro( - self.module_id, - name.clone(), - macro_id, - mac.is_export, - ); - return; - } - } - } - - // Case 1: macro rules, define a macro in crate-global mutable scope - if is_macro_rules(&mac.path) { - if let Some(name) = &mac.name { - let macro_id = MacroDefId { - ast_id: Some(ast_id.ast_id), - krate: Some(self.def_collector.def_map.krate), - kind: MacroDefKind::Declarative, - local_inner: mac.is_local_inner, - }; - self.def_collector.define_macro( - self.module_id, - name.clone(), - macro_id, - mac.is_export, - ); - } - return; - } - - // Case 2: try to resolve in legacy scope and expand macro_rules - if let Some(macro_call_id) = - ast_id.as_call_id(self.def_collector.db, self.def_collector.def_map.krate, |path| { - path.as_ident().and_then(|name| { - self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name) - }) - }) - { - self.def_collector.unexpanded_macros.push(MacroDirective { - module_id: self.module_id, - ast_id, - legacy: Some(macro_call_id), - depth: self.macro_depth + 1, - }); - - return; - } - - // Case 3: resolve in module scope, expand during name resolution. - // We rewrite simple path `macro_name` to `self::macro_name` to force resolve in module scope only. - if ast_id.path.is_ident() { - ast_id.path.kind = PathKind::Super(0); - } - - self.def_collector.unexpanded_macros.push(MacroDirective { - module_id: self.module_id, - ast_id, - legacy: None, - depth: self.macro_depth + 1, - }); - } - - fn import_all_legacy_macros(&mut self, module_id: LocalModuleId) { - let macros = self.def_collector.def_map[module_id].scope.collect_legacy_macros(); - for (name, macro_) in macros { - self.def_collector.define_legacy_macro(self.module_id, name.clone(), macro_); - } - } - - fn is_cfg_enabled(&self, attrs: &Attrs) -> bool { - attrs.is_cfg_enabled(self.def_collector.cfg_options) - } -} - -fn is_macro_rules(path: &ModPath) -> bool { - path.as_ident() == Some(&name![macro_rules]) -} - -#[cfg(test)] -mod tests { - use crate::{db::DefDatabase, test_db::TestDB}; - use ra_arena::Arena; - use ra_db::{fixture::WithFixture, SourceDatabase}; - - use super::*; - - fn do_collect_defs(db: &dyn DefDatabase, def_map: CrateDefMap) -> CrateDefMap { - let mut collector = DefCollector { - db, - def_map, - glob_imports: FxHashMap::default(), - unresolved_imports: Vec::new(), - resolved_imports: Vec::new(), - unexpanded_macros: Vec::new(), - unexpanded_attribute_macros: Vec::new(), - mod_dirs: FxHashMap::default(), - cfg_options: &CfgOptions::default(), - proc_macros: Default::default(), - from_glob_import: Default::default(), - }; - collector.collect(); - collector.def_map - } - - fn do_resolve(code: &str) -> CrateDefMap { - let (db, _file_id) = TestDB::with_single_file(&code); - let krate = db.test_crate(); - - let def_map = { - let edition = db.crate_graph()[krate].edition; - let mut modules: Arena = Arena::default(); - let root = modules.alloc(ModuleData::default()); - CrateDefMap { - krate, - edition, - extern_prelude: FxHashMap::default(), - prelude: None, - root, - modules, - diagnostics: Vec::new(), - } - }; - do_collect_defs(&db, def_map) - } - - #[test] - fn test_macro_expand_will_stop_1() { - do_resolve( - r#" - macro_rules! foo { - ($($ty:ty)*) => { foo!($($ty)*); } - } - foo!(KABOOM); - "#, - ); - } - - #[ignore] // this test does succeed, but takes quite a while :/ - #[test] - fn test_macro_expand_will_stop_2() { - do_resolve( - r#" - macro_rules! foo { - ($($ty:ty)*) => { foo!($($ty)* $($ty)*); } - } - foo!(KABOOM); - "#, - ); - } -} diff --git a/crates/ra_hir_def/src/nameres/mod_resolution.rs b/crates/ra_hir_def/src/nameres/mod_resolution.rs deleted file mode 100644 index 9539616325..0000000000 --- a/crates/ra_hir_def/src/nameres/mod_resolution.rs +++ /dev/null @@ -1,139 +0,0 @@ -//! This module resolves `mod foo;` declaration to file. -use hir_expand::name::Name; -use ra_db::FileId; -use ra_syntax::SmolStr; - -use crate::{db::DefDatabase, HirFileId}; - -#[derive(Clone, Debug)] -pub(super) struct ModDir { - /// `` for `mod.rs`, `lib.rs` - /// `foo/` for `foo.rs` - /// `foo/bar/` for `mod bar { mod x; }` nested in `foo.rs` - /// Invariant: path.is_empty() || path.ends_with('/') - dir_path: DirPath, - /// inside `./foo.rs`, mods with `#[path]` should *not* be relative to `./foo/` - root_non_dir_owner: bool, -} - -impl ModDir { - pub(super) fn root() -> ModDir { - ModDir { dir_path: DirPath::empty(), root_non_dir_owner: false } - } - - pub(super) fn descend_into_definition( - &self, - name: &Name, - attr_path: Option<&SmolStr>, - ) -> ModDir { - let path = match attr_path.map(|it| it.as_str()) { - None => { - let mut path = self.dir_path.clone(); - path.push(&name.to_string()); - path - } - Some(attr_path) => { - let mut path = self.dir_path.join_attr(attr_path, self.root_non_dir_owner); - if !(path.is_empty() || path.ends_with('/')) { - path.push('/') - } - DirPath::new(path) - } - }; - ModDir { dir_path: path, root_non_dir_owner: false } - } - - pub(super) fn resolve_declaration( - &self, - db: &dyn DefDatabase, - file_id: HirFileId, - name: &Name, - attr_path: Option<&SmolStr>, - ) -> Result<(FileId, bool, ModDir), String> { - let file_id = file_id.original_file(db.upcast()); - - let mut candidate_files = Vec::new(); - match attr_path { - Some(attr_path) => { - candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner)) - } - None => { - candidate_files.push(format!("{}{}.rs", self.dir_path.0, name)); - candidate_files.push(format!("{}{}/mod.rs", self.dir_path.0, name)); - } - }; - - for candidate in candidate_files.iter() { - if let Some(file_id) = db.resolve_path(file_id, candidate.as_str()) { - let is_mod_rs = candidate.ends_with("mod.rs"); - - let (dir_path, root_non_dir_owner) = if is_mod_rs || attr_path.is_some() { - (DirPath::empty(), false) - } else { - (DirPath::new(format!("{}/", name)), true) - }; - return Ok((file_id, is_mod_rs, ModDir { dir_path, root_non_dir_owner })); - } - } - Err(candidate_files.remove(0)) - } -} - -#[derive(Clone, Debug)] -struct DirPath(String); - -impl DirPath { - fn assert_invariant(&self) { - assert!(self.0.is_empty() || self.0.ends_with('/')); - } - fn new(repr: String) -> DirPath { - let res = DirPath(repr); - res.assert_invariant(); - res - } - fn empty() -> DirPath { - DirPath::new(String::new()) - } - fn push(&mut self, name: &str) { - self.0.push_str(name); - self.0.push('/'); - self.assert_invariant(); - } - fn parent(&self) -> Option<&str> { - if self.0.is_empty() { - return None; - }; - let idx = - self.0[..self.0.len() - '/'.len_utf8()].rfind('/').map_or(0, |it| it + '/'.len_utf8()); - Some(&self.0[..idx]) - } - /// So this is the case which doesn't really work I think if we try to be - /// 100% platform agnostic: - /// - /// ``` - /// mod a { - /// #[path="C://sad/face"] - /// mod b { mod c; } - /// } - /// ``` - /// - /// Here, we need to join logical dir path to a string path from an - /// attribute. Ideally, we should somehow losslessly communicate the whole - /// construction to `FileLoader`. - fn join_attr(&self, mut attr: &str, relative_to_parent: bool) -> String { - let base = if relative_to_parent { self.parent().unwrap() } else { &self.0 }; - - if attr.starts_with("./") { - attr = &attr["./".len()..]; - } - let tmp; - let attr = if attr.contains('\\') { - tmp = attr.replace('\\', "/"); - &tmp - } else { - attr - }; - let res = format!("{}{}", base, attr); - res - } -} diff --git a/crates/ra_hir_def/src/nameres/path_resolution.rs b/crates/ra_hir_def/src/nameres/path_resolution.rs deleted file mode 100644 index dbfa7fccb4..0000000000 --- a/crates/ra_hir_def/src/nameres/path_resolution.rs +++ /dev/null @@ -1,330 +0,0 @@ -//! This modules implements a function to resolve a path `foo::bar::baz` to a -//! def, which is used within the name resolution. -//! -//! When name resolution is finished, the result of resolving a path is either -//! `Some(def)` or `None`. However, when we are in process of resolving imports -//! or macros, there's a third possibility: -//! -//! I can't resolve this path right now, but I might be resolve this path -//! later, when more macros are expanded. -//! -//! `ReachedFixedPoint` signals about this. - -use std::iter::successors; - -use hir_expand::name::Name; -use ra_db::Edition; -use test_utils::mark; - -use crate::{ - db::DefDatabase, - item_scope::BUILTIN_SCOPE, - nameres::{BuiltinShadowMode, CrateDefMap}, - path::{ModPath, PathKind}, - per_ns::PerNs, - visibility::{RawVisibility, Visibility}, - AdtId, CrateId, EnumVariantId, LocalModuleId, ModuleDefId, ModuleId, -}; - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub(super) enum ResolveMode { - Import, - Other, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub(super) enum ReachedFixedPoint { - Yes, - No, -} - -#[derive(Debug, Clone)] -pub(super) struct ResolvePathResult { - pub(super) resolved_def: PerNs, - pub(super) segment_index: Option, - pub(super) reached_fixedpoint: ReachedFixedPoint, - pub(super) krate: Option, -} - -impl ResolvePathResult { - fn empty(reached_fixedpoint: ReachedFixedPoint) -> ResolvePathResult { - ResolvePathResult::with(PerNs::none(), reached_fixedpoint, None, None) - } - - fn with( - resolved_def: PerNs, - reached_fixedpoint: ReachedFixedPoint, - segment_index: Option, - krate: Option, - ) -> ResolvePathResult { - ResolvePathResult { resolved_def, reached_fixedpoint, segment_index, krate } - } -} - -impl CrateDefMap { - pub(super) fn resolve_name_in_extern_prelude(&self, name: &Name) -> PerNs { - self.extern_prelude - .get(name) - .map_or(PerNs::none(), |&it| PerNs::types(it, Visibility::Public)) - } - - pub(crate) fn resolve_visibility( - &self, - db: &dyn DefDatabase, - original_module: LocalModuleId, - visibility: &RawVisibility, - ) -> Option { - match visibility { - RawVisibility::Module(path) => { - let (result, remaining) = - self.resolve_path(db, original_module, &path, BuiltinShadowMode::Module); - if remaining.is_some() { - return None; - } - let types = result.take_types()?; - match types { - ModuleDefId::ModuleId(m) => Some(Visibility::Module(m)), - _ => { - // error: visibility needs to refer to module - None - } - } - } - RawVisibility::Public => Some(Visibility::Public), - } - } - - // Returns Yes if we are sure that additions to `ItemMap` wouldn't change - // the result. - pub(super) fn resolve_path_fp_with_macro( - &self, - db: &dyn DefDatabase, - mode: ResolveMode, - original_module: LocalModuleId, - path: &ModPath, - shadow: BuiltinShadowMode, - ) -> ResolvePathResult { - let mut segments = path.segments.iter().enumerate(); - let mut curr_per_ns: PerNs = match path.kind { - PathKind::DollarCrate(krate) => { - if krate == self.krate { - mark::hit!(macro_dollar_crate_self); - PerNs::types( - ModuleId { krate: self.krate, local_id: self.root }.into(), - Visibility::Public, - ) - } else { - let def_map = db.crate_def_map(krate); - let module = ModuleId { krate, local_id: def_map.root }; - mark::hit!(macro_dollar_crate_other); - PerNs::types(module.into(), Visibility::Public) - } - } - PathKind::Crate => PerNs::types( - ModuleId { krate: self.krate, local_id: self.root }.into(), - Visibility::Public, - ), - // plain import or absolute path in 2015: crate-relative with - // fallback to extern prelude (with the simplification in - // rust-lang/rust#57745) - // FIXME there must be a nicer way to write this condition - PathKind::Plain | PathKind::Abs - if self.edition == Edition::Edition2015 - && (path.kind == PathKind::Abs || mode == ResolveMode::Import) => - { - let (_, segment) = match segments.next() { - Some((idx, segment)) => (idx, segment), - None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), - }; - log::debug!("resolving {:?} in crate root (+ extern prelude)", segment); - self.resolve_name_in_crate_root_or_extern_prelude(&segment) - } - PathKind::Plain => { - let (_, segment) = match segments.next() { - Some((idx, segment)) => (idx, segment), - None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), - }; - // The first segment may be a builtin type. If the path has more - // than one segment, we first try resolving it as a module - // anyway. - // FIXME: If the next segment doesn't resolve in the module and - // BuiltinShadowMode wasn't Module, then we need to try - // resolving it as a builtin. - let prefer_module = - if path.segments.len() == 1 { shadow } else { BuiltinShadowMode::Module }; - - log::debug!("resolving {:?} in module", segment); - self.resolve_name_in_module(db, original_module, &segment, prefer_module) - } - PathKind::Super(lvl) => { - let m = successors(Some(original_module), |m| self.modules[*m].parent) - .nth(lvl as usize); - if let Some(local_id) = m { - PerNs::types( - ModuleId { krate: self.krate, local_id }.into(), - Visibility::Public, - ) - } else { - log::debug!("super path in root module"); - return ResolvePathResult::empty(ReachedFixedPoint::Yes); - } - } - PathKind::Abs => { - // 2018-style absolute path -- only extern prelude - let segment = match segments.next() { - Some((_, segment)) => segment, - None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), - }; - if let Some(def) = self.extern_prelude.get(&segment) { - log::debug!("absolute path {:?} resolved to crate {:?}", path, def); - PerNs::types(*def, Visibility::Public) - } else { - return ResolvePathResult::empty(ReachedFixedPoint::No); // extern crate declarations can add to the extern prelude - } - } - }; - - for (i, segment) in segments { - let (curr, vis) = match curr_per_ns.take_types_vis() { - Some(r) => r, - None => { - // we still have path segments left, but the path so far - // didn't resolve in the types namespace => no resolution - // (don't break here because `curr_per_ns` might contain - // something in the value namespace, and it would be wrong - // to return that) - return ResolvePathResult::empty(ReachedFixedPoint::No); - } - }; - // resolve segment in curr - - curr_per_ns = match curr { - ModuleDefId::ModuleId(module) => { - if module.krate != self.krate { - let path = ModPath { - segments: path.segments[i..].to_vec(), - kind: PathKind::Super(0), - }; - log::debug!("resolving {:?} in other crate", path); - let defp_map = db.crate_def_map(module.krate); - let (def, s) = defp_map.resolve_path(db, module.local_id, &path, shadow); - return ResolvePathResult::with( - def, - ReachedFixedPoint::Yes, - s.map(|s| s + i), - Some(module.krate), - ); - } - - // Since it is a qualified path here, it should not contains legacy macros - self[module.local_id].scope.get(&segment) - } - ModuleDefId::AdtId(AdtId::EnumId(e)) => { - // enum variant - mark::hit!(can_import_enum_variant); - let enum_data = db.enum_data(e); - match enum_data.variant(&segment) { - Some(local_id) => { - let variant = EnumVariantId { parent: e, local_id }; - match &*enum_data.variants[local_id].variant_data { - crate::adt::VariantData::Record(_) => { - PerNs::types(variant.into(), Visibility::Public) - } - crate::adt::VariantData::Tuple(_) - | crate::adt::VariantData::Unit => { - PerNs::both(variant.into(), variant.into(), Visibility::Public) - } - } - } - None => { - return ResolvePathResult::with( - PerNs::types(e.into(), vis), - ReachedFixedPoint::Yes, - Some(i), - Some(self.krate), - ); - } - } - } - s => { - // could be an inherent method call in UFCS form - // (`Struct::method`), or some other kind of associated item - log::debug!( - "path segment {:?} resolved to non-module {:?}, but is not last", - segment, - curr, - ); - - return ResolvePathResult::with( - PerNs::types(s, vis), - ReachedFixedPoint::Yes, - Some(i), - Some(self.krate), - ); - } - }; - } - - ResolvePathResult::with(curr_per_ns, ReachedFixedPoint::Yes, None, Some(self.krate)) - } - - fn resolve_name_in_module( - &self, - db: &dyn DefDatabase, - module: LocalModuleId, - name: &Name, - shadow: BuiltinShadowMode, - ) -> PerNs { - // Resolve in: - // - legacy scope of macro - // - current module / scope - // - extern prelude - // - std prelude - let from_legacy_macro = self[module] - .scope - .get_legacy_macro(name) - .map_or_else(PerNs::none, |m| PerNs::macros(m, Visibility::Public)); - let from_scope = self[module].scope.get(name); - let from_builtin = BUILTIN_SCOPE.get(name).copied().unwrap_or_else(PerNs::none); - let from_scope_or_builtin = match shadow { - BuiltinShadowMode::Module => from_scope.or(from_builtin), - BuiltinShadowMode::Other => { - if let Some(ModuleDefId::ModuleId(_)) = from_scope.take_types() { - from_builtin.or(from_scope) - } else { - from_scope.or(from_builtin) - } - } - }; - let from_extern_prelude = self - .extern_prelude - .get(name) - .map_or(PerNs::none(), |&it| PerNs::types(it, Visibility::Public)); - let from_prelude = self.resolve_in_prelude(db, name); - - from_legacy_macro.or(from_scope_or_builtin).or(from_extern_prelude).or(from_prelude) - } - - fn resolve_name_in_crate_root_or_extern_prelude(&self, name: &Name) -> PerNs { - let from_crate_root = self[self.root].scope.get(name); - let from_extern_prelude = self.resolve_name_in_extern_prelude(name); - - from_crate_root.or(from_extern_prelude) - } - - fn resolve_in_prelude(&self, db: &dyn DefDatabase, name: &Name) -> PerNs { - if let Some(prelude) = self.prelude { - let keep; - let def_map = if prelude.krate == self.krate { - self - } else { - // Extend lifetime - keep = db.crate_def_map(prelude.krate); - &keep - }; - def_map[prelude.local_id].scope.get(name) - } else { - PerNs::none() - } - } -} diff --git a/crates/ra_hir_def/src/nameres/tests.rs b/crates/ra_hir_def/src/nameres/tests.rs deleted file mode 100644 index 839b1de578..0000000000 --- a/crates/ra_hir_def/src/nameres/tests.rs +++ /dev/null @@ -1,690 +0,0 @@ -mod globs; -mod incremental; -mod macros; -mod mod_resolution; -mod primitives; - -use std::sync::Arc; - -use expect::{expect, Expect}; -use ra_db::{fixture::WithFixture, SourceDatabase}; -use test_utils::mark; - -use crate::{db::DefDatabase, nameres::*, test_db::TestDB}; - -fn compute_crate_def_map(fixture: &str) -> Arc { - let db = TestDB::with_files(fixture); - let krate = db.crate_graph().iter().next().unwrap(); - db.crate_def_map(krate) -} - -fn check(ra_fixture: &str, expect: Expect) { - let db = TestDB::with_files(ra_fixture); - let krate = db.crate_graph().iter().next().unwrap(); - let actual = db.crate_def_map(krate).dump(); - expect.assert_eq(&actual); -} - -#[test] -fn crate_def_map_smoke_test() { - check( - r#" -//- /lib.rs -mod foo; -struct S; -use crate::foo::bar::E; -use self::E::V; - -//- /foo/mod.rs -pub mod bar; -fn f() {} - -//- /foo/bar.rs -pub struct Baz; - -union U { to_be: bool, not_to_be: u8 } -enum E { V } - -extern { - static EXT: u8; - fn ext(); -} -"#, - expect![[r#" - crate - E: t - S: t v - V: t v - foo: t - - crate::foo - bar: t - f: v - - crate::foo::bar - Baz: t v - E: t - EXT: v - U: t - ext: v - "#]], - ); -} - -#[test] -fn crate_def_map_super_super() { - check( - r#" -mod a { - const A: usize = 0; - mod b { - const B: usize = 0; - mod c { - use super::super::*; - } - } -} -"#, - expect![[r#" - crate - a: t - - crate::a - A: v - b: t - - crate::a::b - B: v - c: t - - crate::a::b::c - A: v - b: t - "#]], - ); -} - -#[test] -fn crate_def_map_fn_mod_same_name() { - check( - r#" -mod m { - pub mod z {} - pub fn z() {} -} -"#, - expect![[r#" - crate - m: t - - crate::m - z: t v - - crate::m::z - "#]], - ); -} - -#[test] -fn bogus_paths() { - mark::check!(bogus_paths); - check( - r#" -//- /lib.rs -mod foo; -struct S; -use self; - -//- /foo/mod.rs -use super; -use crate; -"#, - expect![[r#" - crate - S: t v - foo: t - - crate::foo - "#]], - ); -} - -#[test] -fn use_as() { - check( - r#" -//- /lib.rs -mod foo; -use crate::foo::Baz as Foo; - -//- /foo/mod.rs -pub struct Baz; -"#, - expect![[r#" - crate - Foo: t v - foo: t - - crate::foo - Baz: t v - "#]], - ); -} - -#[test] -fn use_trees() { - check( - r#" -//- /lib.rs -mod foo; -use crate::foo::bar::{Baz, Quux}; - -//- /foo/mod.rs -pub mod bar; - -//- /foo/bar.rs -pub struct Baz; -pub enum Quux {}; -"#, - expect![[r#" - crate - Baz: t v - Quux: t - foo: t - - crate::foo - bar: t - - crate::foo::bar - Baz: t v - Quux: t - "#]], - ); -} - -#[test] -fn re_exports() { - check( - r#" -//- /lib.rs -mod foo; -use self::foo::Baz; - -//- /foo/mod.rs -pub mod bar; -pub use self::bar::Baz; - -//- /foo/bar.rs -pub struct Baz; -"#, - expect![[r#" - crate - Baz: t v - foo: t - - crate::foo - Baz: t v - bar: t - - crate::foo::bar - Baz: t v - "#]], - ); -} - -#[test] -fn std_prelude() { - mark::check!(std_prelude); - check( - r#" -//- /main.rs crate:main deps:test_crate -use Foo::*; - -//- /lib.rs crate:test_crate -mod prelude; -#[prelude_import] -use prelude::*; - -//- /prelude.rs -pub enum Foo { Bar, Baz }; -"#, - expect![[r#" - crate - Bar: t v - Baz: t v - "#]], - ); -} - -#[test] -fn can_import_enum_variant() { - mark::check!(can_import_enum_variant); - check( - r#" -enum E { V } -use self::E::V; -"#, - expect![[r#" - crate - E: t - V: t v - "#]], - ); -} - -#[test] -fn edition_2015_imports() { - check( - r#" -//- /main.rs crate:main deps:other_crate edition:2015 -mod foo; -mod bar; - -//- /bar.rs -struct Bar; - -//- /foo.rs -use bar::Bar; -use other_crate::FromLib; - -//- /lib.rs crate:other_crate edition:2018 -struct FromLib; -"#, - expect![[r#" - crate - bar: t - foo: t - - crate::bar - Bar: t v - - crate::foo - Bar: t v - FromLib: t v - "#]], - ); -} - -#[test] -fn item_map_using_self() { - check( - r#" -//- /lib.rs -mod foo; -use crate::foo::bar::Baz::{self}; - -//- /foo/mod.rs -pub mod bar; - -//- /foo/bar.rs -pub struct Baz; -"#, - expect![[r#" - crate - Baz: t v - foo: t - - crate::foo - bar: t - - crate::foo::bar - Baz: t v - "#]], - ); -} - -#[test] -fn item_map_across_crates() { - check( - r#" -//- /main.rs crate:main deps:test_crate -use test_crate::Baz; - -//- /lib.rs crate:test_crate -pub struct Baz; -"#, - expect![[r#" - crate - Baz: t v - "#]], - ); -} - -#[test] -fn extern_crate_rename() { - check( - r#" -//- /main.rs crate:main deps:alloc -extern crate alloc as alloc_crate; -mod alloc; -mod sync; - -//- /sync.rs -use alloc_crate::Arc; - -//- /lib.rs crate:alloc -struct Arc; -"#, - expect![[r#" - crate - alloc_crate: t - sync: t - - crate::sync - Arc: t v - "#]], - ); -} - -#[test] -fn extern_crate_rename_2015_edition() { - check( - r#" -//- /main.rs crate:main deps:alloc edition:2015 -extern crate alloc as alloc_crate; -mod alloc; -mod sync; - -//- /sync.rs -use alloc_crate::Arc; - -//- /lib.rs crate:alloc -struct Arc; -"#, - expect![[r#" - crate - alloc_crate: t - sync: t - - crate::sync - Arc: t v - "#]], - ); -} - -#[test] -fn reexport_across_crates() { - check( - r#" -//- /main.rs crate:main deps:test_crate -use test_crate::Baz; - -//- /lib.rs crate:test_crate -pub use foo::Baz; -mod foo; - -//- /foo.rs -pub struct Baz; -"#, - expect![[r#" - crate - Baz: t v - "#]], - ); -} - -#[test] -fn values_dont_shadow_extern_crates() { - check( - r#" -//- /main.rs crate:main deps:foo -fn foo() {} -use foo::Bar; - -//- /foo/lib.rs crate:foo -pub struct Bar; -"#, - expect![[r#" - crate - Bar: t v - foo: v - "#]], - ); -} - -#[test] -fn std_prelude_takes_precedence_above_core_prelude() { - check( - r#" -//- /main.rs crate:main deps:core,std -use {Foo, Bar}; - -//- /std.rs crate:std deps:core -#[prelude_import] -pub use self::prelude::*; -mod prelude { - pub struct Foo; - pub use core::prelude::Bar; -} - -//- /core.rs crate:core -#[prelude_import] -pub use self::prelude::*; -mod prelude { - pub struct Bar; -} -"#, - expect![[r#" - crate - Bar: t v - Foo: t v - "#]], - ); -} - -#[test] -fn cfg_not_test() { - check( - r#" -//- /main.rs crate:main deps:std -use {Foo, Bar, Baz}; - -//- /lib.rs crate:std -#[prelude_import] -pub use self::prelude::*; -mod prelude { - #[cfg(test)] - pub struct Foo; - #[cfg(not(test))] - pub struct Bar; - #[cfg(all(not(any()), feature = "foo", feature = "bar", opt = "42"))] - pub struct Baz; -} -"#, - expect![[r#" - crate - Bar: t v - Baz: _ - Foo: _ - "#]], - ); -} - -#[test] -fn cfg_test() { - check( - r#" -//- /main.rs crate:main deps:std -use {Foo, Bar, Baz}; - -//- /lib.rs crate:std cfg:test,feature=foo,feature=bar,opt=42 -#[prelude_import] -pub use self::prelude::*; -mod prelude { - #[cfg(test)] - pub struct Foo; - #[cfg(not(test))] - pub struct Bar; - #[cfg(all(not(any()), feature = "foo", feature = "bar", opt = "42"))] - pub struct Baz; -} -"#, - expect![[r#" - crate - Bar: _ - Baz: t v - Foo: t v - "#]], - ); -} - -#[test] -fn infer_multiple_namespace() { - check( - r#" -//- /main.rs -mod a { - pub type T = (); - pub use crate::b::*; -} - -use crate::a::T; - -mod b { - pub const T: () = (); -} -"#, - expect![[r#" - crate - T: t v - a: t - b: t - - crate::b - T: v - - crate::a - T: t v - "#]], - ); -} - -#[test] -fn underscore_import() { - check( - r#" -//- /main.rs -use tr::Tr as _; -use tr::Tr2 as _; - -mod tr { - pub trait Tr {} - pub trait Tr2 {} -} - "#, - expect![[r#" - crate - _: t - _: t - tr: t - - crate::tr - Tr: t - Tr2: t - "#]], - ); -} - -#[test] -fn underscore_reexport() { - check( - r#" -//- /main.rs -mod tr { - pub trait PubTr {} - pub trait PrivTr {} -} -mod reex { - use crate::tr::PrivTr as _; - pub use crate::tr::PubTr as _; -} -use crate::reex::*; - "#, - expect![[r#" - crate - _: t - reex: t - tr: t - - crate::tr - PrivTr: t - PubTr: t - - crate::reex - _: t - _: t - "#]], - ); -} - -#[test] -fn underscore_pub_crate_reexport() { - mark::check!(upgrade_underscore_visibility); - check( - r#" -//- /main.rs crate:main deps:lib -use lib::*; - -//- /lib.rs crate:lib -use tr::Tr as _; -pub use tr::Tr as _; - -mod tr { - pub trait Tr {} -} - "#, - expect![[r#" - crate - _: t - "#]], - ); -} - -#[test] -fn underscore_nontrait() { - check( - r#" -//- /main.rs -mod m { - pub struct Struct; - pub enum Enum {} - pub const CONST: () = (); -} -use crate::m::{Struct as _, Enum as _, CONST as _}; - "#, - expect![[r#" - crate - m: t - - crate::m - CONST: v - Enum: t - Struct: t v - "#]], - ); -} - -#[test] -fn underscore_name_conflict() { - check( - r#" -//- /main.rs -struct Tr; - -use tr::Tr as _; - -mod tr { - pub trait Tr {} -} - "#, - expect![[r#" - crate - _: t - Tr: t v - tr: t - - crate::tr - Tr: t - "#]], - ); -} diff --git a/crates/ra_hir_def/src/nameres/tests/incremental.rs b/crates/ra_hir_def/src/nameres/tests/incremental.rs deleted file mode 100644 index 0c288a1085..0000000000 --- a/crates/ra_hir_def/src/nameres/tests/incremental.rs +++ /dev/null @@ -1,101 +0,0 @@ -use std::sync::Arc; - -use ra_db::SourceDatabaseExt; - -use super::*; - -fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) { - let (mut db, pos) = TestDB::with_position(ra_fixture_initial); - let krate = db.test_crate(); - { - let events = db.log_executed(|| { - db.crate_def_map(krate); - }); - assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) - } - db.set_file_text(pos.file_id, Arc::new(ra_fixture_change.to_string())); - - { - let events = db.log_executed(|| { - db.crate_def_map(krate); - }); - assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) - } -} - -#[test] -fn typing_inside_a_function_should_not_invalidate_def_map() { - check_def_map_is_not_recomputed( - r" - //- /lib.rs - mod foo;<|> - - use crate::foo::bar::Baz; - - enum E { A, B } - use E::*; - - fn foo() -> i32 { - 1 + 1 - } - //- /foo/mod.rs - pub mod bar; - - //- /foo/bar.rs - pub struct Baz; - ", - r" - mod foo; - - use crate::foo::bar::Baz; - - enum E { A, B } - use E::*; - - fn foo() -> i32 { 92 } - ", - ); -} - -#[test] -fn typing_inside_a_macro_should_not_invalidate_def_map() { - let (mut db, pos) = TestDB::with_position( - r" - //- /lib.rs - macro_rules! m { - ($ident:ident) => { - fn f() { - $ident + $ident; - }; - } - } - mod foo; - - //- /foo/mod.rs - pub mod bar; - - //- /foo/bar.rs - <|> - m!(X); - ", - ); - let krate = db.test_crate(); - { - let events = db.log_executed(|| { - let crate_def_map = db.crate_def_map(krate); - let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); - assert_eq!(module_data.scope.resolutions().count(), 1); - }); - assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) - } - db.set_file_text(pos.file_id, Arc::new("m!(Y);".to_string())); - - { - let events = db.log_executed(|| { - let crate_def_map = db.crate_def_map(krate); - let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); - assert_eq!(module_data.scope.resolutions().count(), 1); - }); - assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) - } -} diff --git a/crates/ra_hir_def/src/nameres/tests/mod_resolution.rs b/crates/ra_hir_def/src/nameres/tests/mod_resolution.rs deleted file mode 100644 index ae58948c42..0000000000 --- a/crates/ra_hir_def/src/nameres/tests/mod_resolution.rs +++ /dev/null @@ -1,796 +0,0 @@ -use super::*; - -#[test] -fn name_res_works_for_broken_modules() { - mark::check!(name_res_works_for_broken_modules); - check( - r" -//- /lib.rs -mod foo // no `;`, no body -use self::foo::Baz; - -//- /foo/mod.rs -pub mod bar; -pub use self::bar::Baz; - -//- /foo/bar.rs -pub struct Baz; -", - expect![[r#" - crate - Baz: _ - foo: t - - crate::foo - "#]], - ); -} - -#[test] -fn nested_module_resolution() { - check( - r#" -//- /lib.rs -mod n1; - -//- /n1.rs -mod n2; - -//- /n1/n2.rs -struct X; -"#, - expect![[r#" - crate - n1: t - - crate::n1 - n2: t - - crate::n1::n2 - X: t v - "#]], - ); -} - -#[test] -fn nested_module_resolution_2() { - check( - r#" -//- /lib.rs -mod prelude; -mod iter; - -//- /prelude.rs -pub use crate::iter::Iterator; - -//- /iter.rs -pub use self::traits::Iterator; -mod traits; - -//- /iter/traits.rs -pub use self::iterator::Iterator; -mod iterator; - -//- /iter/traits/iterator.rs -pub trait Iterator; -"#, - expect![[r#" - crate - iter: t - prelude: t - - crate::iter - Iterator: t - traits: t - - crate::iter::traits - Iterator: t - iterator: t - - crate::iter::traits::iterator - Iterator: t - - crate::prelude - Iterator: t - "#]], - ); -} - -#[test] -fn module_resolution_works_for_non_standard_filenames() { - check( - r#" -//- /my_library.rs crate:my_library -mod foo; -use self::foo::Bar; - -//- /foo/mod.rs -pub struct Bar; -"#, - expect![[r#" - crate - Bar: t v - foo: t - - crate::foo - Bar: t v - "#]], - ); -} - -#[test] -fn module_resolution_works_for_raw_modules() { - check( - r#" -//- /lib.rs -mod r#async; -use self::r#async::Bar; - -//- /async.rs -pub struct Bar; -"#, - expect![[r#" - crate - Bar: t v - async: t - - crate::async - Bar: t v - "#]], - ); -} - -#[test] -fn module_resolution_decl_path() { - check( - r#" -//- /lib.rs -#[path = "bar/baz/foo.rs"] -mod foo; -use self::foo::Bar; - -//- /bar/baz/foo.rs -pub struct Bar; -"#, - expect![[r#" - crate - Bar: t v - foo: t - - crate::foo - Bar: t v - "#]], - ); -} - -#[test] -fn module_resolution_module_with_path_in_mod_rs() { - check( - r#" -//- /main.rs -mod foo; - -//- /foo/mod.rs -#[path = "baz.rs"] -pub mod bar; -use self::bar::Baz; - -//- /foo/baz.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - Baz: t v - bar: t - - crate::foo::bar - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_module_with_path_non_crate_root() { - check( - r#" -//- /main.rs -mod foo; - -//- /foo.rs -#[path = "baz.rs"] -pub mod bar; -use self::bar::Baz; - -//- /baz.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - Baz: t v - bar: t - - crate::foo::bar - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_module_decl_path_super() { - check( - r#" -//- /main.rs -#[path = "bar/baz/module.rs"] -mod foo; -pub struct Baz; - -//- /bar/baz/module.rs -use super::Baz; -"#, - expect![[r#" - crate - Baz: t v - foo: t - - crate::foo - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_explicit_path_mod_rs() { - check( - r#" -//- /main.rs -#[path = "module/mod.rs"] -mod foo; - -//- /module/mod.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_relative_path() { - check( - r#" -//- /main.rs -mod foo; - -//- /foo.rs -#[path = "./sub.rs"] -pub mod foo_bar; - -//- /sub.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - foo_bar: t - - crate::foo::foo_bar - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_relative_path_2() { - check( - r#" -//- /main.rs -mod foo; - -//- /foo/mod.rs -#[path="../sub.rs"] -pub mod foo_bar; - -//- /sub.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - foo_bar: t - - crate::foo::foo_bar - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_relative_path_outside_root() { - check( - r#" -//- /main.rs -#[path="../../../../../outside.rs"] -mod foo; -"#, - expect![[r#" - crate - "#]], - ); -} - -#[test] -fn module_resolution_explicit_path_mod_rs_2() { - check( - r#" -//- /main.rs -#[path = "module/bar/mod.rs"] -mod foo; - -//- /module/bar/mod.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_explicit_path_mod_rs_with_win_separator() { - check( - r#" -//- /main.rs -#[path = "module\bar\mod.rs"] -mod foo; - -//- /module/bar/mod.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_decl_inside_inline_module_with_path_attribute() { - check( - r#" -//- /main.rs -#[path = "models"] -mod foo { mod bar; } - -//- /models/bar.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - bar: t - - crate::foo::bar - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_decl_inside_inline_module() { - check( - r#" -//- /main.rs -mod foo { mod bar; } - -//- /foo/bar.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - bar: t - - crate::foo::bar - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_decl_inside_inline_module_2_with_path_attribute() { - check( - r#" -//- /main.rs -#[path = "models/db"] -mod foo { mod bar; } - -//- /models/db/bar.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - bar: t - - crate::foo::bar - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_decl_inside_inline_module_3() { - check( - r#" -//- /main.rs -#[path = "models/db"] -mod foo { - #[path = "users.rs"] - mod bar; -} - -//- /models/db/users.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - bar: t - - crate::foo::bar - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_decl_inside_inline_module_empty_path() { - check( - r#" -//- /main.rs -#[path = ""] -mod foo { - #[path = "users.rs"] - mod bar; -} - -//- /users.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - bar: t - - crate::foo::bar - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_decl_empty_path() { - check( - r#" -//- /main.rs -#[path = ""] // Should try to read `/` (a directory) -mod foo; - -//- /foo.rs -pub struct Baz; -"#, - expect![[r#" - crate - "#]], - ); -} - -#[test] -fn module_resolution_decl_inside_inline_module_relative_path() { - check( - r#" -//- /main.rs -#[path = "./models"] -mod foo { mod bar; } - -//- /models/bar.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - bar: t - - crate::foo::bar - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_decl_inside_inline_module_in_crate_root() { - check( - r#" -//- /main.rs -mod foo { - #[path = "baz.rs"] - mod bar; -} -use self::foo::bar::Baz; - -//- /foo/baz.rs -pub struct Baz; -"#, - expect![[r#" - crate - Baz: t v - foo: t - - crate::foo - bar: t - - crate::foo::bar - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_decl_inside_inline_module_in_mod_rs() { - check( - r#" -//- /main.rs -mod foo; - -//- /foo/mod.rs -mod bar { - #[path = "qwe.rs"] - pub mod baz; -} -use self::bar::baz::Baz; - -//- /foo/bar/qwe.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - Baz: t v - bar: t - - crate::foo::bar - baz: t - - crate::foo::bar::baz - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_decl_inside_inline_module_in_non_crate_root() { - check( - r#" -//- /main.rs -mod foo; - -//- /foo.rs -mod bar { - #[path = "qwe.rs"] - pub mod baz; -} -use self::bar::baz::Baz; - -//- /foo/bar/qwe.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - Baz: t v - bar: t - - crate::foo::bar - baz: t - - crate::foo::bar::baz - Baz: t v - "#]], - ); -} - -#[test] -fn module_resolution_decl_inside_inline_module_in_non_crate_root_2() { - check( - r#" -//- /main.rs -mod foo; - -//- /foo.rs -#[path = "bar"] -mod bar { - pub mod baz; -} -use self::bar::baz::Baz; - -//- /bar/baz.rs -pub struct Baz; -"#, - expect![[r#" - crate - foo: t - - crate::foo - Baz: t v - bar: t - - crate::foo::bar - baz: t - - crate::foo::bar::baz - Baz: t v - "#]], - ); -} - -#[test] -fn unresolved_module_diagnostics() { - let db = TestDB::with_files( - r" - //- /lib.rs - mod foo; - mod bar; - mod baz {} - //- /foo.rs - ", - ); - let krate = db.test_crate(); - - let crate_def_map = db.crate_def_map(krate); - - expect![[r#" - [ - UnresolvedModule { - module: Idx::(0), - declaration: InFile { - file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), - ), - value: FileAstId::(1), - }, - candidate: "bar.rs", - }, - ] - "#]] - .assert_debug_eq(&crate_def_map.diagnostics); -} - -#[test] -fn module_resolution_decl_inside_module_in_non_crate_root_2() { - check( - r#" -//- /main.rs -#[path="module/m2.rs"] -mod module; - -//- /module/m2.rs -pub mod submod; - -//- /module/submod.rs -pub struct Baz; -"#, - expect![[r#" - crate - module: t - - crate::module - submod: t - - crate::module::submod - Baz: t v - "#]], - ); -} - -#[test] -fn nested_out_of_line_module() { - check( - r#" -//- /lib.rs -mod a { - mod b { - mod c; - } -} - -//- /a/b/c.rs -struct X; -"#, - expect![[r#" - crate - a: t - - crate::a - b: t - - crate::a::b - c: t - - crate::a::b::c - X: t v - "#]], - ); -} - -#[test] -fn nested_out_of_line_module_with_path() { - check( - r#" -//- /lib.rs -mod a { - #[path = "d/e"] - mod b { - mod c; - } -} - -//- /a/d/e/c.rs -struct X; -"#, - expect![[r#" - crate - a: t - - crate::a - b: t - - crate::a::b - c: t - - crate::a::b::c - X: t v - "#]], - ); -} diff --git a/crates/ra_hir_def/src/path.rs b/crates/ra_hir_def/src/path.rs deleted file mode 100644 index cc1726e9e0..0000000000 --- a/crates/ra_hir_def/src/path.rs +++ /dev/null @@ -1,351 +0,0 @@ -//! A desugared representation of paths like `crate::foo` or `::bar`. -mod lower; - -use std::{ - fmt::{self, Display}, - iter, - sync::Arc, -}; - -use crate::body::LowerCtx; -use hir_expand::{ - hygiene::Hygiene, - name::{AsName, Name}, -}; -use ra_db::CrateId; -use ra_syntax::ast; - -use crate::{ - type_ref::{TypeBound, TypeRef}, - InFile, -}; - -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct ModPath { - pub kind: PathKind, - pub segments: Vec, -} - -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum PathKind { - Plain, - /// `self::` is `Super(0)` - Super(u8), - Crate, - /// Absolute path (::foo) - Abs, - /// `$crate` from macro expansion - DollarCrate(CrateId), -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum ImportAlias { - /// Unnamed alias, as in `use Foo as _;` - Underscore, - /// Named alias - Alias(Name), -} - -impl ModPath { - pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option { - lower::lower_path(path, hygiene).map(|it| it.mod_path) - } - - pub fn from_segments(kind: PathKind, segments: impl IntoIterator) -> ModPath { - let segments = segments.into_iter().collect::>(); - ModPath { kind, segments } - } - - pub(crate) fn from_name_ref(name_ref: &ast::NameRef) -> ModPath { - name_ref.as_name().into() - } - - /// Converts an `tt::Ident` into a single-identifier `Path`. - pub(crate) fn from_tt_ident(ident: &tt::Ident) -> ModPath { - ident.as_name().into() - } - - /// Calls `cb` with all paths, represented by this use item. - pub(crate) fn expand_use_item( - item_src: InFile, - hygiene: &Hygiene, - mut cb: impl FnMut(ModPath, &ast::UseTree, /* is_glob */ bool, Option), - ) { - if let Some(tree) = item_src.value.use_tree() { - lower::lower_use_tree(None, tree, hygiene, &mut cb); - } - } - - /// Returns the number of segments in the path (counting special segments like `$crate` and - /// `super`). - pub fn len(&self) -> usize { - self.segments.len() - + match self.kind { - PathKind::Plain => 0, - PathKind::Super(i) => i as usize, - PathKind::Crate => 1, - PathKind::Abs => 0, - PathKind::DollarCrate(_) => 1, - } - } - - pub fn is_ident(&self) -> bool { - self.kind == PathKind::Plain && self.segments.len() == 1 - } - - pub fn is_self(&self) -> bool { - self.kind == PathKind::Super(0) && self.segments.is_empty() - } - - /// If this path is a single identifier, like `foo`, return its name. - pub fn as_ident(&self) -> Option<&Name> { - if self.kind != PathKind::Plain || self.segments.len() > 1 { - return None; - } - self.segments.first() - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Path { - /// Type based path like `::foo`. - /// Note that paths like `::foo` are desugard to `Trait::::foo`. - type_anchor: Option>, - mod_path: ModPath, - /// Invariant: the same len as `self.mod_path.segments` - generic_args: Vec>>, -} - -/// Generic arguments to a path segment (e.g. the `i32` in `Option`). This -/// also includes bindings of associated types, like in `Iterator`. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct GenericArgs { - pub args: Vec, - /// This specifies whether the args contain a Self type as the first - /// element. This is the case for path segments like ``, where - /// `T` is actually a type parameter for the path `Trait` specifying the - /// Self type. Otherwise, when we have a path `Trait`, the Self type - /// is left out. - pub has_self_type: bool, - /// Associated type bindings like in `Iterator`. - pub bindings: Vec, -} - -/// An associated type binding like in `Iterator`. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct AssociatedTypeBinding { - /// The name of the associated type. - pub name: Name, - /// The type bound to this associated type (in `Item = T`, this would be the - /// `T`). This can be `None` if there are bounds instead. - pub type_ref: Option, - /// Bounds for the associated type, like in `Iterator`. (This is the unstable `associated_type_bounds` - /// feature.) - pub bounds: Vec, -} - -/// A single generic argument. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum GenericArg { - Type(TypeRef), - // or lifetime... -} - -impl Path { - /// Converts an `ast::Path` to `Path`. Works with use trees. - #[deprecated = "Doesn't handle hygiene, don't add new calls, remove old ones"] - pub fn from_ast(path: ast::Path) -> Option { - lower::lower_path(path, &Hygiene::new_unhygienic()) - } - - /// Converts an `ast::Path` to `Path`. Works with use trees. - /// It correctly handles `$crate` based path from macro call. - pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option { - lower::lower_path(path, hygiene) - } - - /// Converts a known mod path to `Path`. - pub(crate) fn from_known_path( - path: ModPath, - generic_args: Vec>>, - ) -> Path { - Path { type_anchor: None, mod_path: path, generic_args } - } - - pub fn kind(&self) -> &PathKind { - &self.mod_path.kind - } - - pub fn type_anchor(&self) -> Option<&TypeRef> { - self.type_anchor.as_deref() - } - - pub fn segments(&self) -> PathSegments<'_> { - PathSegments { - segments: self.mod_path.segments.as_slice(), - generic_args: self.generic_args.as_slice(), - } - } - - pub fn mod_path(&self) -> &ModPath { - &self.mod_path - } - - pub fn qualifier(&self) -> Option { - if self.mod_path.is_ident() { - return None; - } - let res = Path { - type_anchor: self.type_anchor.clone(), - mod_path: ModPath { - kind: self.mod_path.kind.clone(), - segments: self.mod_path.segments[..self.mod_path.segments.len() - 1].to_vec(), - }, - generic_args: self.generic_args[..self.generic_args.len() - 1].to_vec(), - }; - Some(res) - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct PathSegment<'a> { - pub name: &'a Name, - pub args_and_bindings: Option<&'a GenericArgs>, -} - -pub struct PathSegments<'a> { - segments: &'a [Name], - generic_args: &'a [Option>], -} - -impl<'a> PathSegments<'a> { - pub const EMPTY: PathSegments<'static> = PathSegments { segments: &[], generic_args: &[] }; - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - pub fn len(&self) -> usize { - self.segments.len() - } - pub fn first(&self) -> Option> { - self.get(0) - } - pub fn last(&self) -> Option> { - self.get(self.len().checked_sub(1)?) - } - pub fn get(&self, idx: usize) -> Option> { - assert_eq!(self.segments.len(), self.generic_args.len()); - let res = PathSegment { - name: self.segments.get(idx)?, - args_and_bindings: self.generic_args.get(idx).unwrap().as_ref().map(|it| &**it), - }; - Some(res) - } - pub fn skip(&self, len: usize) -> PathSegments<'a> { - assert_eq!(self.segments.len(), self.generic_args.len()); - PathSegments { segments: &self.segments[len..], generic_args: &self.generic_args[len..] } - } - pub fn take(&self, len: usize) -> PathSegments<'a> { - assert_eq!(self.segments.len(), self.generic_args.len()); - PathSegments { segments: &self.segments[..len], generic_args: &self.generic_args[..len] } - } - pub fn iter(&self) -> impl Iterator> { - self.segments.iter().zip(self.generic_args.iter()).map(|(name, args)| PathSegment { - name, - args_and_bindings: args.as_ref().map(|it| &**it), - }) - } -} - -impl GenericArgs { - pub(crate) fn from_ast(lower_ctx: &LowerCtx, node: ast::GenericArgList) -> Option { - lower::lower_generic_args(lower_ctx, node) - } - - pub(crate) fn empty() -> GenericArgs { - GenericArgs { args: Vec::new(), has_self_type: false, bindings: Vec::new() } - } -} - -impl From for Path { - fn from(name: Name) -> Path { - Path { - type_anchor: None, - mod_path: ModPath::from_segments(PathKind::Plain, iter::once(name)), - generic_args: vec![None], - } - } -} - -impl From for ModPath { - fn from(name: Name) -> ModPath { - ModPath::from_segments(PathKind::Plain, iter::once(name)) - } -} - -impl Display for ModPath { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut first_segment = true; - let mut add_segment = |s| -> fmt::Result { - if !first_segment { - f.write_str("::")?; - } - first_segment = false; - f.write_str(s)?; - Ok(()) - }; - match self.kind { - PathKind::Plain => {} - PathKind::Super(n) => { - if n == 0 { - add_segment("self")?; - } - for _ in 0..n { - add_segment("super")?; - } - } - PathKind::Crate => add_segment("crate")?, - PathKind::Abs => add_segment("")?, - PathKind::DollarCrate(_) => add_segment("$crate")?, - } - for segment in &self.segments { - if !first_segment { - f.write_str("::")?; - } - first_segment = false; - write!(f, "{}", segment)?; - } - Ok(()) - } -} - -pub use hir_expand::name as __name; - -#[macro_export] -macro_rules! __known_path { - (core::iter::IntoIterator) => {}; - (core::result::Result) => {}; - (core::ops::Range) => {}; - (core::ops::RangeFrom) => {}; - (core::ops::RangeFull) => {}; - (core::ops::RangeTo) => {}; - (core::ops::RangeToInclusive) => {}; - (core::ops::RangeInclusive) => {}; - (core::future::Future) => {}; - (core::ops::Try) => {}; - ($path:path) => { - compile_error!("Please register your known path in the path module") - }; -} - -#[macro_export] -macro_rules! __path { - ($start:ident $(:: $seg:ident)*) => ({ - $crate::__known_path!($start $(:: $seg)*); - $crate::path::ModPath::from_segments($crate::path::PathKind::Abs, vec![ - $crate::path::__name![$start], $($crate::path::__name![$seg],)* - ]) - }); -} - -pub use crate::__path as path; diff --git a/crates/ra_hir_def/src/path/lower.rs b/crates/ra_hir_def/src/path/lower.rs deleted file mode 100644 index d09fc66e4d..0000000000 --- a/crates/ra_hir_def/src/path/lower.rs +++ /dev/null @@ -1,215 +0,0 @@ -//! Transforms syntax into `Path` objects, ideally with accounting for hygiene - -mod lower_use; - -use std::sync::Arc; - -use either::Either; -use hir_expand::{ - hygiene::Hygiene, - name::{name, AsName}, -}; -use ra_syntax::ast::{self, AstNode, TypeBoundsOwner}; - -use super::AssociatedTypeBinding; -use crate::{ - body::LowerCtx, - path::{GenericArg, GenericArgs, ModPath, Path, PathKind}, - type_ref::{TypeBound, TypeRef}, -}; - -pub(super) use lower_use::lower_use_tree; - -/// Converts an `ast::Path` to `Path`. Works with use trees. -/// It correctly handles `$crate` based path from macro call. -pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option { - let mut kind = PathKind::Plain; - let mut type_anchor = None; - let mut segments = Vec::new(); - let mut generic_args = Vec::new(); - let ctx = LowerCtx::with_hygiene(hygiene); - loop { - let segment = path.segment()?; - - if segment.coloncolon_token().is_some() { - kind = PathKind::Abs; - } - - match segment.kind()? { - ast::PathSegmentKind::Name(name_ref) => { - // FIXME: this should just return name - match hygiene.name_ref_to_name(name_ref) { - Either::Left(name) => { - let args = segment - .generic_arg_list() - .and_then(|it| lower_generic_args(&ctx, it)) - .or_else(|| { - lower_generic_args_from_fn_path( - &ctx, - segment.param_list(), - segment.ret_type(), - ) - }) - .map(Arc::new); - segments.push(name); - generic_args.push(args) - } - Either::Right(crate_id) => { - kind = PathKind::DollarCrate(crate_id); - break; - } - } - } - ast::PathSegmentKind::Type { type_ref, trait_ref } => { - assert!(path.qualifier().is_none()); // this can only occur at the first segment - - let self_type = TypeRef::from_ast(&ctx, type_ref?); - - match trait_ref { - // ::foo - None => { - type_anchor = Some(Box::new(self_type)); - kind = PathKind::Plain; - } - // >::Foo desugars to Trait::Foo - Some(trait_ref) => { - let path = Path::from_src(trait_ref.path()?, hygiene)?; - kind = path.mod_path.kind; - - let mut prefix_segments = path.mod_path.segments; - prefix_segments.reverse(); - segments.extend(prefix_segments); - - let mut prefix_args = path.generic_args; - prefix_args.reverse(); - generic_args.extend(prefix_args); - - // Insert the type reference (T in the above example) as Self parameter for the trait - let last_segment = generic_args.last_mut()?; - if last_segment.is_none() { - *last_segment = Some(Arc::new(GenericArgs::empty())); - }; - let args = last_segment.as_mut().unwrap(); - let mut args_inner = Arc::make_mut(args); - args_inner.has_self_type = true; - args_inner.args.insert(0, GenericArg::Type(self_type)); - } - } - } - ast::PathSegmentKind::CrateKw => { - kind = PathKind::Crate; - break; - } - ast::PathSegmentKind::SelfKw => { - kind = PathKind::Super(0); - break; - } - ast::PathSegmentKind::SuperKw => { - let nested_super_count = if let PathKind::Super(n) = kind { n } else { 0 }; - kind = PathKind::Super(nested_super_count + 1); - } - } - path = match qualifier(&path) { - Some(it) => it, - None => break, - }; - } - segments.reverse(); - generic_args.reverse(); - - // handle local_inner_macros : - // Basically, even in rustc it is quite hacky: - // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456 - // We follow what it did anyway :) - if segments.len() == 1 && kind == PathKind::Plain { - if let Some(macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { - if macro_call.is_bang() { - if let Some(crate_id) = hygiene.local_inner_macros() { - kind = PathKind::DollarCrate(crate_id); - } - } - } - } - - let mod_path = ModPath { kind, segments }; - return Some(Path { type_anchor, mod_path, generic_args }); - - fn qualifier(path: &ast::Path) -> Option { - if let Some(q) = path.qualifier() { - return Some(q); - } - // FIXME: this bottom up traversal is not too precise. - // Should we handle do a top-down analysis, recording results? - let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?; - let use_tree = use_tree_list.parent_use_tree(); - use_tree.path() - } -} - -pub(super) fn lower_generic_args( - lower_ctx: &LowerCtx, - node: ast::GenericArgList, -) -> Option { - let mut args = Vec::new(); - let mut bindings = Vec::new(); - for generic_arg in node.generic_args() { - match generic_arg { - ast::GenericArg::TypeArg(type_arg) => { - let type_ref = TypeRef::from_ast_opt(lower_ctx, type_arg.ty()); - args.push(GenericArg::Type(type_ref)); - } - ast::GenericArg::AssocTypeArg(assoc_type_arg) => { - if let Some(name_ref) = assoc_type_arg.name_ref() { - let name = name_ref.as_name(); - let type_ref = assoc_type_arg.ty().map(|it| TypeRef::from_ast(lower_ctx, it)); - let bounds = if let Some(l) = assoc_type_arg.type_bound_list() { - l.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect() - } else { - Vec::new() - }; - bindings.push(AssociatedTypeBinding { name, type_ref, bounds }); - } - } - // Lifetimes and constants are ignored for now. - ast::GenericArg::LifetimeArg(_) | ast::GenericArg::ConstArg(_) => (), - } - } - - if args.is_empty() && bindings.is_empty() { - return None; - } - Some(GenericArgs { args, has_self_type: false, bindings }) -} - -/// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y) -/// -> Z` (which desugars to `Fn<(X, Y), Output=Z>`). -fn lower_generic_args_from_fn_path( - ctx: &LowerCtx, - params: Option, - ret_type: Option, -) -> Option { - let mut args = Vec::new(); - let mut bindings = Vec::new(); - if let Some(params) = params { - let mut param_types = Vec::new(); - for param in params.params() { - let type_ref = TypeRef::from_ast_opt(&ctx, param.ty()); - param_types.push(type_ref); - } - let arg = GenericArg::Type(TypeRef::Tuple(param_types)); - args.push(arg); - } - if let Some(ret_type) = ret_type { - let type_ref = TypeRef::from_ast_opt(&ctx, ret_type.ty()); - bindings.push(AssociatedTypeBinding { - name: name![Output], - type_ref: Some(type_ref), - bounds: Vec::new(), - }); - } - if args.is_empty() && bindings.is_empty() { - None - } else { - Some(GenericArgs { args, has_self_type: false, bindings }) - } -} diff --git a/crates/ra_hir_def/src/path/lower/lower_use.rs b/crates/ra_hir_def/src/path/lower/lower_use.rs deleted file mode 100644 index 794be45e89..0000000000 --- a/crates/ra_hir_def/src/path/lower/lower_use.rs +++ /dev/null @@ -1,120 +0,0 @@ -//! Lowers a single complex use like `use foo::{bar, baz};` into a list of paths like -//! `foo::bar`, `foo::baz`; - -use std::iter; - -use either::Either; -use hir_expand::{hygiene::Hygiene, name::AsName}; -use ra_syntax::ast::{self, NameOwner}; -use test_utils::mark; - -use crate::path::{ImportAlias, ModPath, PathKind}; - -pub(crate) fn lower_use_tree( - prefix: Option, - tree: ast::UseTree, - hygiene: &Hygiene, - cb: &mut dyn FnMut(ModPath, &ast::UseTree, bool, Option), -) { - if let Some(use_tree_list) = tree.use_tree_list() { - let prefix = match tree.path() { - // E.g. use something::{{{inner}}}; - None => prefix, - // E.g. `use something::{inner}` (prefix is `None`, path is `something`) - // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`) - Some(path) => match convert_path(prefix, path, hygiene) { - Some(it) => Some(it), - None => return, // FIXME: report errors somewhere - }, - }; - for child_tree in use_tree_list.use_trees() { - lower_use_tree(prefix.clone(), child_tree, hygiene, cb); - } - } else { - let alias = tree.rename().map(|a| { - a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias) - }); - let is_glob = tree.star_token().is_some(); - if let Some(ast_path) = tree.path() { - // Handle self in a path. - // E.g. `use something::{self, <...>}` - if ast_path.qualifier().is_none() { - if let Some(segment) = ast_path.segment() { - if segment.kind() == Some(ast::PathSegmentKind::SelfKw) { - if let Some(prefix) = prefix { - cb(prefix, &tree, false, alias); - return; - } - } - } - } - if let Some(path) = convert_path(prefix, ast_path, hygiene) { - cb(path, &tree, is_glob, alias) - } - // FIXME: report errors somewhere - // We get here if we do - } else if is_glob { - mark::hit!(glob_enum_group); - if let Some(prefix) = prefix { - cb(prefix, &tree, is_glob, None) - } - } - } -} - -fn convert_path(prefix: Option, path: ast::Path, hygiene: &Hygiene) -> Option { - let prefix = if let Some(qual) = path.qualifier() { - Some(convert_path(prefix, qual, hygiene)?) - } else { - prefix - }; - - let segment = path.segment()?; - let res = match segment.kind()? { - ast::PathSegmentKind::Name(name_ref) => { - match hygiene.name_ref_to_name(name_ref) { - Either::Left(name) => { - // no type args in use - let mut res = prefix.unwrap_or_else(|| ModPath { - kind: PathKind::Plain, - segments: Vec::with_capacity(1), - }); - res.segments.push(name); - res - } - Either::Right(crate_id) => { - return Some(ModPath::from_segments( - PathKind::DollarCrate(crate_id), - iter::empty(), - )) - } - } - } - ast::PathSegmentKind::CrateKw => { - if prefix.is_some() { - return None; - } - ModPath::from_segments(PathKind::Crate, iter::empty()) - } - ast::PathSegmentKind::SelfKw => { - if prefix.is_some() { - return None; - } - ModPath::from_segments(PathKind::Super(0), iter::empty()) - } - ast::PathSegmentKind::SuperKw => { - let nested_super_count = match prefix.map(|p| p.kind) { - Some(PathKind::Super(n)) => n, - Some(_) => return None, - None => 0, - }; - - ModPath::from_segments(PathKind::Super(nested_super_count + 1), iter::empty()) - } - ast::PathSegmentKind::Type { .. } => { - // not allowed in imports - return None; - } - }; - Some(res) -} diff --git a/crates/ra_hir_def/src/resolver.rs b/crates/ra_hir_def/src/resolver.rs deleted file mode 100644 index 0bf51eb7b8..0000000000 --- a/crates/ra_hir_def/src/resolver.rs +++ /dev/null @@ -1,713 +0,0 @@ -//! Name resolution façade. -use std::sync::Arc; - -use hir_expand::{ - name::{name, Name}, - MacroDefId, -}; -use ra_db::CrateId; -use rustc_hash::FxHashSet; - -use crate::{ - body::scope::{ExprScopes, ScopeId}, - body::Body, - builtin_type::BuiltinType, - db::DefDatabase, - expr::{ExprId, PatId}, - generics::GenericParams, - item_scope::{BuiltinShadowMode, BUILTIN_SCOPE}, - nameres::CrateDefMap, - path::{ModPath, PathKind}, - per_ns::PerNs, - visibility::{RawVisibility, Visibility}, - AdtId, AssocContainerId, ConstId, ContainerId, DefWithBodyId, EnumId, EnumVariantId, - FunctionId, GenericDefId, HasModule, ImplId, LocalModuleId, Lookup, ModuleDefId, ModuleId, - StaticId, StructId, TraitId, TypeAliasId, TypeParamId, VariantId, -}; - -#[derive(Debug, Clone, Default)] -pub struct Resolver { - // FIXME: all usages generally call `.rev`, so maybe reverse once in consturciton? - scopes: Vec, -} - -// FIXME how to store these best -#[derive(Debug, Clone)] -struct ModuleItemMap { - crate_def_map: Arc, - module_id: LocalModuleId, -} - -#[derive(Debug, Clone)] -struct ExprScope { - owner: DefWithBodyId, - expr_scopes: Arc, - scope_id: ScopeId, -} - -#[derive(Debug, Clone)] -enum Scope { - /// All the items and imported names of a module - ModuleScope(ModuleItemMap), - /// Brings the generic parameters of an item into scope - GenericParams { def: GenericDefId, params: Arc }, - /// Brings `Self` in `impl` block into scope - ImplDefScope(ImplId), - /// Brings `Self` in enum, struct and union definitions into scope - AdtScope(AdtId), - /// Local bindings - ExprScope(ExprScope), - /// Temporary hack to support local items. - LocalItemsScope(Arc), -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum TypeNs { - SelfType(ImplId), - GenericParam(TypeParamId), - AdtId(AdtId), - AdtSelfType(AdtId), - // Yup, enum variants are added to the types ns, but any usage of variant as - // type is an error. - EnumVariantId(EnumVariantId), - TypeAliasId(TypeAliasId), - BuiltinType(BuiltinType), - TraitId(TraitId), - // Module belong to type ns, but the resolver is used when all module paths - // are fully resolved. - // ModuleId(ModuleId) -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum ResolveValueResult { - ValueNs(ValueNs), - Partial(TypeNs, usize), -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum ValueNs { - ImplSelf(ImplId), - LocalBinding(PatId), - FunctionId(FunctionId), - ConstId(ConstId), - StaticId(StaticId), - StructId(StructId), - EnumVariantId(EnumVariantId), -} - -impl Resolver { - /// Resolve known trait from std, like `std::futures::Future` - pub fn resolve_known_trait(&self, db: &dyn DefDatabase, path: &ModPath) -> Option { - let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?; - match res { - ModuleDefId::TraitId(it) => Some(it), - _ => None, - } - } - - /// Resolve known struct from std, like `std::boxed::Box` - pub fn resolve_known_struct(&self, db: &dyn DefDatabase, path: &ModPath) -> Option { - let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?; - match res { - ModuleDefId::AdtId(AdtId::StructId(it)) => Some(it), - _ => None, - } - } - - /// Resolve known enum from std, like `std::result::Result` - pub fn resolve_known_enum(&self, db: &dyn DefDatabase, path: &ModPath) -> Option { - let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?; - match res { - ModuleDefId::AdtId(AdtId::EnumId(it)) => Some(it), - _ => None, - } - } - - fn resolve_module_path( - &self, - db: &dyn DefDatabase, - path: &ModPath, - shadow: BuiltinShadowMode, - ) -> PerNs { - let (item_map, module) = match self.module_scope() { - Some(it) => it, - None => return PerNs::none(), - }; - let (module_res, segment_index) = item_map.resolve_path(db, module, &path, shadow); - if segment_index.is_some() { - return PerNs::none(); - } - module_res - } - - pub fn resolve_module_path_in_items(&self, db: &dyn DefDatabase, path: &ModPath) -> PerNs { - self.resolve_module_path(db, path, BuiltinShadowMode::Module) - } - - pub fn resolve_path_in_type_ns( - &self, - db: &dyn DefDatabase, - path: &ModPath, - ) -> Option<(TypeNs, Option)> { - let first_name = path.segments.first()?; - let skip_to_mod = path.kind != PathKind::Plain; - for scope in self.scopes.iter().rev() { - match scope { - Scope::ExprScope(_) => continue, - Scope::GenericParams { .. } - | Scope::ImplDefScope(_) - | Scope::LocalItemsScope(_) - if skip_to_mod => - { - continue - } - - Scope::GenericParams { params, def } => { - if let Some(local_id) = params.find_by_name(first_name) { - let idx = if path.segments.len() == 1 { None } else { Some(1) }; - return Some(( - TypeNs::GenericParam(TypeParamId { local_id, parent: *def }), - idx, - )); - } - } - Scope::ImplDefScope(impl_) => { - if first_name == &name![Self] { - let idx = if path.segments.len() == 1 { None } else { Some(1) }; - return Some((TypeNs::SelfType(*impl_), idx)); - } - } - Scope::AdtScope(adt) => { - if first_name == &name![Self] { - let idx = if path.segments.len() == 1 { None } else { Some(1) }; - return Some((TypeNs::AdtSelfType(*adt), idx)); - } - } - Scope::ModuleScope(m) => { - let (module_def, idx) = m.crate_def_map.resolve_path( - db, - m.module_id, - &path, - BuiltinShadowMode::Other, - ); - let res = to_type_ns(module_def)?; - return Some((res, idx)); - } - Scope::LocalItemsScope(body) => { - let def = body.item_scope.get(first_name); - if let Some(res) = to_type_ns(def) { - return Some((res, None)); - } - } - } - } - return None; - fn to_type_ns(per_ns: PerNs) -> Option { - let res = match per_ns.take_types()? { - ModuleDefId::AdtId(it) => TypeNs::AdtId(it), - ModuleDefId::EnumVariantId(it) => TypeNs::EnumVariantId(it), - - ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it), - ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it), - - ModuleDefId::TraitId(it) => TypeNs::TraitId(it), - - ModuleDefId::FunctionId(_) - | ModuleDefId::ConstId(_) - | ModuleDefId::StaticId(_) - | ModuleDefId::ModuleId(_) => return None, - }; - Some(res) - } - } - - pub fn resolve_path_in_type_ns_fully( - &self, - db: &dyn DefDatabase, - path: &ModPath, - ) -> Option { - let (res, unresolved) = self.resolve_path_in_type_ns(db, path)?; - if unresolved.is_some() { - return None; - } - Some(res) - } - - pub fn resolve_visibility( - &self, - db: &dyn DefDatabase, - visibility: &RawVisibility, - ) -> Option { - match visibility { - RawVisibility::Module(_) => { - let (item_map, module) = match self.module_scope() { - Some(it) => it, - None => return None, - }; - item_map.resolve_visibility(db, module, visibility) - } - RawVisibility::Public => Some(Visibility::Public), - } - } - - pub fn resolve_path_in_value_ns( - &self, - db: &dyn DefDatabase, - path: &ModPath, - ) -> Option { - let n_segments = path.segments.len(); - let tmp = name![self]; - let first_name = if path.is_self() { &tmp } else { &path.segments.first()? }; - let skip_to_mod = path.kind != PathKind::Plain && !path.is_self(); - for scope in self.scopes.iter().rev() { - match scope { - Scope::AdtScope(_) - | Scope::ExprScope(_) - | Scope::GenericParams { .. } - | Scope::ImplDefScope(_) - | Scope::LocalItemsScope(_) - if skip_to_mod => - { - continue - } - - Scope::ExprScope(scope) if n_segments <= 1 => { - let entry = scope - .expr_scopes - .entries(scope.scope_id) - .iter() - .find(|entry| entry.name() == first_name); - - if let Some(e) = entry { - return Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(e.pat()))); - } - } - Scope::ExprScope(_) => continue, - - Scope::GenericParams { params, def } if n_segments > 1 => { - if let Some(local_id) = params.find_by_name(first_name) { - let ty = TypeNs::GenericParam(TypeParamId { local_id, parent: *def }); - return Some(ResolveValueResult::Partial(ty, 1)); - } - } - Scope::GenericParams { .. } => continue, - - Scope::ImplDefScope(impl_) => { - if first_name == &name![Self] { - if n_segments > 1 { - let ty = TypeNs::SelfType(*impl_); - return Some(ResolveValueResult::Partial(ty, 1)); - } else { - return Some(ResolveValueResult::ValueNs(ValueNs::ImplSelf(*impl_))); - } - } - } - Scope::AdtScope(adt) => { - if n_segments == 1 { - // bare `Self` doesn't work in the value namespace in a struct/enum definition - continue; - } - if first_name == &name![Self] { - let ty = TypeNs::AdtSelfType(*adt); - return Some(ResolveValueResult::Partial(ty, 1)); - } - } - - Scope::ModuleScope(m) => { - let (module_def, idx) = m.crate_def_map.resolve_path( - db, - m.module_id, - &path, - BuiltinShadowMode::Other, - ); - return match idx { - None => { - let value = to_value_ns(module_def)?; - Some(ResolveValueResult::ValueNs(value)) - } - Some(idx) => { - let ty = match module_def.take_types()? { - ModuleDefId::AdtId(it) => TypeNs::AdtId(it), - ModuleDefId::TraitId(it) => TypeNs::TraitId(it), - ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it), - ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it), - - ModuleDefId::ModuleId(_) - | ModuleDefId::FunctionId(_) - | ModuleDefId::EnumVariantId(_) - | ModuleDefId::ConstId(_) - | ModuleDefId::StaticId(_) => return None, - }; - Some(ResolveValueResult::Partial(ty, idx)) - } - }; - } - Scope::LocalItemsScope(body) => { - // we don't bother looking in the builtin scope here because there are no builtin values - let def = to_value_ns(body.item_scope.get(first_name)); - - if let Some(res) = def { - return Some(ResolveValueResult::ValueNs(res)); - } - } - } - } - return None; - - fn to_value_ns(per_ns: PerNs) -> Option { - let res = match per_ns.take_values()? { - ModuleDefId::FunctionId(it) => ValueNs::FunctionId(it), - ModuleDefId::AdtId(AdtId::StructId(it)) => ValueNs::StructId(it), - ModuleDefId::EnumVariantId(it) => ValueNs::EnumVariantId(it), - ModuleDefId::ConstId(it) => ValueNs::ConstId(it), - ModuleDefId::StaticId(it) => ValueNs::StaticId(it), - - ModuleDefId::AdtId(AdtId::EnumId(_)) - | ModuleDefId::AdtId(AdtId::UnionId(_)) - | ModuleDefId::TraitId(_) - | ModuleDefId::TypeAliasId(_) - | ModuleDefId::BuiltinType(_) - | ModuleDefId::ModuleId(_) => return None, - }; - Some(res) - } - } - - pub fn resolve_path_in_value_ns_fully( - &self, - db: &dyn DefDatabase, - path: &ModPath, - ) -> Option { - match self.resolve_path_in_value_ns(db, path)? { - ResolveValueResult::ValueNs(it) => Some(it), - ResolveValueResult::Partial(..) => None, - } - } - - pub fn resolve_path_as_macro( - &self, - db: &dyn DefDatabase, - path: &ModPath, - ) -> Option { - // Search item scope legacy macro first - if let Some(def) = self.resolve_local_macro_def(path) { - return Some(def); - } - - let (item_map, module) = self.module_scope()?; - item_map.resolve_path(db, module, &path, BuiltinShadowMode::Other).0.take_macros() - } - - pub fn process_all_names(&self, db: &dyn DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) { - for scope in self.scopes.iter().rev() { - scope.process_names(db, f); - } - } - - pub fn traits_in_scope(&self, db: &dyn DefDatabase) -> FxHashSet { - let mut traits = FxHashSet::default(); - for scope in &self.scopes { - if let Scope::ModuleScope(m) = scope { - if let Some(prelude) = m.crate_def_map.prelude { - let prelude_def_map = db.crate_def_map(prelude.krate); - traits.extend(prelude_def_map[prelude.local_id].scope.traits()); - } - traits.extend(m.crate_def_map[m.module_id].scope.traits()); - } - } - traits - } - - fn module_scope(&self) -> Option<(&CrateDefMap, LocalModuleId)> { - self.scopes.iter().rev().find_map(|scope| match scope { - Scope::ModuleScope(m) => Some((&*m.crate_def_map, m.module_id)), - - _ => None, - }) - } - - fn resolve_local_macro_def(&self, path: &ModPath) -> Option { - let name = path.as_ident()?; - self.scopes.iter().rev().find_map(|scope| { - if let Scope::LocalItemsScope(body) = scope { - return body.item_scope.get_legacy_macro(name); - } - None - }) - } - - pub fn module(&self) -> Option { - let (def_map, local_id) = self.module_scope()?; - Some(ModuleId { krate: def_map.krate, local_id }) - } - - pub fn krate(&self) -> Option { - self.module_scope().map(|t| t.0.krate) - } - - pub fn where_predicates_in_scope<'a>( - &'a self, - ) -> impl Iterator + 'a { - self.scopes - .iter() - .rev() - .filter_map(|scope| match scope { - Scope::GenericParams { params, .. } => Some(params), - _ => None, - }) - .flat_map(|params| params.where_predicates.iter()) - } - - pub fn generic_def(&self) -> Option { - self.scopes.iter().rev().find_map(|scope| match scope { - Scope::GenericParams { def, .. } => Some(*def), - _ => None, - }) - } - - pub fn body_owner(&self) -> Option { - self.scopes.iter().rev().find_map(|scope| match scope { - Scope::ExprScope(it) => Some(it.owner), - _ => None, - }) - } -} - -pub enum ScopeDef { - PerNs(PerNs), - ImplSelfType(ImplId), - AdtSelfType(AdtId), - GenericParam(TypeParamId), - Local(PatId), -} - -impl Scope { - fn process_names(&self, db: &dyn DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) { - match self { - Scope::ModuleScope(m) => { - // FIXME: should we provide `self` here? - // f( - // Name::self_param(), - // PerNs::types(Resolution::Def { - // def: m.module.into(), - // }), - // ); - m.crate_def_map[m.module_id].scope.entries().for_each(|(name, def)| { - f(name.clone(), ScopeDef::PerNs(def)); - }); - m.crate_def_map[m.module_id].scope.legacy_macros().for_each(|(name, macro_)| { - f(name.clone(), ScopeDef::PerNs(PerNs::macros(macro_, Visibility::Public))); - }); - m.crate_def_map.extern_prelude.iter().for_each(|(name, &def)| { - f(name.clone(), ScopeDef::PerNs(PerNs::types(def, Visibility::Public))); - }); - BUILTIN_SCOPE.iter().for_each(|(name, &def)| { - f(name.clone(), ScopeDef::PerNs(def)); - }); - if let Some(prelude) = m.crate_def_map.prelude { - let prelude_def_map = db.crate_def_map(prelude.krate); - prelude_def_map[prelude.local_id].scope.entries().for_each(|(name, def)| { - f(name.clone(), ScopeDef::PerNs(def)); - }); - } - } - Scope::LocalItemsScope(body) => body.item_scope.entries().for_each(|(name, def)| { - f(name.clone(), ScopeDef::PerNs(def)); - }), - Scope::GenericParams { params, def } => { - for (local_id, param) in params.types.iter() { - if let Some(name) = ¶m.name { - f( - name.clone(), - ScopeDef::GenericParam(TypeParamId { local_id, parent: *def }), - ) - } - } - } - Scope::ImplDefScope(i) => { - f(name![Self], ScopeDef::ImplSelfType(*i)); - } - Scope::AdtScope(i) => { - f(name![Self], ScopeDef::AdtSelfType(*i)); - } - Scope::ExprScope(scope) => { - scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| { - f(e.name().clone(), ScopeDef::Local(e.pat())); - }); - } - } - } -} - -// needs arbitrary_self_types to be a method... or maybe move to the def? -pub fn resolver_for_expr(db: &dyn DefDatabase, owner: DefWithBodyId, expr_id: ExprId) -> Resolver { - let scopes = db.expr_scopes(owner); - resolver_for_scope(db, owner, scopes.scope_for(expr_id)) -} - -pub fn resolver_for_scope( - db: &dyn DefDatabase, - owner: DefWithBodyId, - scope_id: Option, -) -> Resolver { - let mut r = owner.resolver(db); - r = r.push_local_items_scope(db.body(owner)); - let scopes = db.expr_scopes(owner); - let scope_chain = scopes.scope_chain(scope_id).collect::>(); - for scope in scope_chain.into_iter().rev() { - r = r.push_expr_scope(owner, Arc::clone(&scopes), scope); - } - r -} - -impl Resolver { - fn push_scope(mut self, scope: Scope) -> Resolver { - self.scopes.push(scope); - self - } - - fn push_generic_params_scope(self, db: &dyn DefDatabase, def: GenericDefId) -> Resolver { - let params = db.generic_params(def); - self.push_scope(Scope::GenericParams { def, params }) - } - - fn push_impl_def_scope(self, impl_def: ImplId) -> Resolver { - self.push_scope(Scope::ImplDefScope(impl_def)) - } - - fn push_module_scope( - self, - crate_def_map: Arc, - module_id: LocalModuleId, - ) -> Resolver { - self.push_scope(Scope::ModuleScope(ModuleItemMap { crate_def_map, module_id })) - } - - fn push_local_items_scope(self, body: Arc) -> Resolver { - self.push_scope(Scope::LocalItemsScope(body)) - } - - fn push_expr_scope( - self, - owner: DefWithBodyId, - expr_scopes: Arc, - scope_id: ScopeId, - ) -> Resolver { - self.push_scope(Scope::ExprScope(ExprScope { owner, expr_scopes, scope_id })) - } -} - -pub trait HasResolver: Copy { - /// Builds a resolver for type references inside this def. - fn resolver(self, db: &dyn DefDatabase) -> Resolver; -} - -impl HasResolver for ModuleId { - fn resolver(self, db: &dyn DefDatabase) -> Resolver { - let def_map = db.crate_def_map(self.krate); - Resolver::default().push_module_scope(def_map, self.local_id) - } -} - -impl HasResolver for TraitId { - fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) - } -} - -impl + Copy> HasResolver for T { - fn resolver(self, db: &dyn DefDatabase) -> Resolver { - let def = self.into(); - def.module(db) - .resolver(db) - .push_generic_params_scope(db, def.into()) - .push_scope(Scope::AdtScope(def)) - } -} - -impl HasResolver for FunctionId { - fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) - } -} - -impl HasResolver for ConstId { - fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db) - } -} - -impl HasResolver for StaticId { - fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db) - } -} - -impl HasResolver for TypeAliasId { - fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) - } -} - -impl HasResolver for ImplId { - fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db) - .container - .resolver(db) - .push_generic_params_scope(db, self.into()) - .push_impl_def_scope(self) - } -} - -impl HasResolver for DefWithBodyId { - fn resolver(self, db: &dyn DefDatabase) -> Resolver { - match self { - DefWithBodyId::ConstId(c) => c.resolver(db), - DefWithBodyId::FunctionId(f) => f.resolver(db), - DefWithBodyId::StaticId(s) => s.resolver(db), - } - } -} - -impl HasResolver for ContainerId { - fn resolver(self, db: &dyn DefDatabase) -> Resolver { - match self { - ContainerId::ModuleId(it) => it.resolver(db), - ContainerId::DefWithBodyId(it) => it.module(db).resolver(db), - } - } -} - -impl HasResolver for AssocContainerId { - fn resolver(self, db: &dyn DefDatabase) -> Resolver { - match self { - AssocContainerId::ContainerId(it) => it.resolver(db), - AssocContainerId::TraitId(it) => it.resolver(db), - AssocContainerId::ImplId(it) => it.resolver(db), - } - } -} - -impl HasResolver for GenericDefId { - fn resolver(self, db: &dyn DefDatabase) -> Resolver { - match self { - GenericDefId::FunctionId(inner) => inner.resolver(db), - GenericDefId::AdtId(adt) => adt.resolver(db), - GenericDefId::TraitId(inner) => inner.resolver(db), - GenericDefId::TypeAliasId(inner) => inner.resolver(db), - GenericDefId::ImplId(inner) => inner.resolver(db), - GenericDefId::EnumVariantId(inner) => inner.parent.resolver(db), - GenericDefId::ConstId(inner) => inner.resolver(db), - } - } -} - -impl HasResolver for VariantId { - fn resolver(self, db: &dyn DefDatabase) -> Resolver { - match self { - VariantId::EnumVariantId(it) => it.parent.resolver(db), - VariantId::StructId(it) => it.resolver(db), - VariantId::UnionId(it) => it.resolver(db), - } - } -} diff --git a/crates/ra_hir_def/src/src.rs b/crates/ra_hir_def/src/src.rs deleted file mode 100644 index 043b93fad5..0000000000 --- a/crates/ra_hir_def/src/src.rs +++ /dev/null @@ -1,43 +0,0 @@ -//! Utilities for mapping between hir IDs and the surface syntax. - -use hir_expand::InFile; -use ra_arena::map::ArenaMap; - -use crate::{db::DefDatabase, item_tree::ItemTreeNode, AssocItemLoc, ItemLoc}; - -pub trait HasSource { - type Value; - fn source(&self, db: &dyn DefDatabase) -> InFile; -} - -impl HasSource for AssocItemLoc { - type Value = N::Source; - - fn source(&self, db: &dyn DefDatabase) -> InFile { - let tree = db.item_tree(self.id.file_id); - let ast_id_map = db.ast_id_map(self.id.file_id); - let root = db.parse_or_expand(self.id.file_id).unwrap(); - let node = &tree[self.id.value]; - - InFile::new(self.id.file_id, ast_id_map.get(node.ast_id()).to_node(&root)) - } -} - -impl HasSource for ItemLoc { - type Value = N::Source; - - fn source(&self, db: &dyn DefDatabase) -> InFile { - let tree = db.item_tree(self.id.file_id); - let ast_id_map = db.ast_id_map(self.id.file_id); - let root = db.parse_or_expand(self.id.file_id).unwrap(); - let node = &tree[self.id.value]; - - InFile::new(self.id.file_id, ast_id_map.get(node.ast_id()).to_node(&root)) - } -} - -pub trait HasChildSource { - type ChildId; - type Value; - fn child_source(&self, db: &dyn DefDatabase) -> InFile>; -} diff --git a/crates/ra_hir_def/src/test_db.rs b/crates/ra_hir_def/src/test_db.rs deleted file mode 100644 index 339f819b8b..0000000000 --- a/crates/ra_hir_def/src/test_db.rs +++ /dev/null @@ -1,101 +0,0 @@ -//! Database used for testing `hir_def`. - -use std::{ - fmt, panic, - sync::{Arc, Mutex}, -}; - -use hir_expand::db::AstDatabase; -use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, Upcast}; -use rustc_hash::FxHashSet; - -use crate::db::DefDatabase; - -#[salsa::database( - ra_db::SourceDatabaseExtStorage, - ra_db::SourceDatabaseStorage, - hir_expand::db::AstDatabaseStorage, - crate::db::InternDatabaseStorage, - crate::db::DefDatabaseStorage -)] -#[derive(Default)] -pub struct TestDB { - storage: salsa::Storage, - events: Mutex>>, -} - -impl Upcast for TestDB { - fn upcast(&self) -> &(dyn AstDatabase + 'static) { - &*self - } -} - -impl Upcast for TestDB { - fn upcast(&self) -> &(dyn DefDatabase + 'static) { - &*self - } -} - -impl salsa::Database for TestDB { - fn salsa_event(&self, event: salsa::Event) { - let mut events = self.events.lock().unwrap(); - if let Some(events) = &mut *events { - events.push(event); - } - } -} - -impl fmt::Debug for TestDB { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("TestDB").finish() - } -} - -impl panic::RefUnwindSafe for TestDB {} - -impl FileLoader for TestDB { - fn file_text(&self, file_id: FileId) -> Arc { - FileLoaderDelegate(self).file_text(file_id) - } - fn resolve_path(&self, anchor: FileId, path: &str) -> Option { - FileLoaderDelegate(self).resolve_path(anchor, path) - } - fn relevant_crates(&self, file_id: FileId) -> Arc> { - FileLoaderDelegate(self).relevant_crates(file_id) - } -} - -impl TestDB { - pub fn module_for_file(&self, file_id: FileId) -> crate::ModuleId { - for &krate in self.relevant_crates(file_id).iter() { - let crate_def_map = self.crate_def_map(krate); - for (local_id, data) in crate_def_map.modules.iter() { - if data.origin.file_id() == Some(file_id) { - return crate::ModuleId { krate, local_id }; - } - } - } - panic!("Can't find module for file") - } - - pub fn log(&self, f: impl FnOnce()) -> Vec { - *self.events.lock().unwrap() = Some(Vec::new()); - f(); - self.events.lock().unwrap().take().unwrap() - } - - pub fn log_executed(&self, f: impl FnOnce()) -> Vec { - let events = self.log(f); - events - .into_iter() - .filter_map(|e| match e.kind { - // This pretty horrible, but `Debug` is the only way to inspect - // QueryDescriptor at the moment. - salsa::EventKind::WillExecute { database_key } => { - Some(format!("{:?}", database_key.debug(self))) - } - _ => None, - }) - .collect() - } -} diff --git a/crates/ra_hir_def/src/trace.rs b/crates/ra_hir_def/src/trace.rs deleted file mode 100644 index ced07577dc..0000000000 --- a/crates/ra_hir_def/src/trace.rs +++ /dev/null @@ -1,51 +0,0 @@ -//! Trace is a pretty niche data structure which is used when lowering a CST -//! into HIR. -//! -//! Lowering process calculates two bits of information: -//! * the lowered syntax itself -//! * a mapping between lowered syntax and original syntax -//! -//! Due to the way salsa works, the mapping is usually hot lava, as it contains -//! absolute offsets. The `Trace` structure (inspired, at least in name, by -//! Kotlin's `BindingTrace`) allows use the same code to compute both -//! projections. -use ra_arena::{map::ArenaMap, Arena, Idx, RawId}; - -pub(crate) struct Trace { - arena: Option>, - map: Option, V>>, - len: u32, -} - -impl Trace { - pub(crate) fn new_for_arena() -> Trace { - Trace { arena: Some(Arena::default()), map: None, len: 0 } - } - - pub(crate) fn new_for_map() -> Trace { - Trace { arena: None, map: Some(ArenaMap::default()), len: 0 } - } - - pub(crate) fn alloc(&mut self, value: impl FnOnce() -> V, data: impl FnOnce() -> T) -> Idx { - let id = if let Some(arena) = &mut self.arena { - arena.alloc(data()) - } else { - let id = Idx::::from_raw(RawId::from(self.len)); - self.len += 1; - id - }; - - if let Some(map) = &mut self.map { - map.insert(id, value()); - } - id - } - - pub(crate) fn into_arena(mut self) -> Arena { - self.arena.take().unwrap() - } - - pub(crate) fn into_map(mut self) -> ArenaMap, V> { - self.map.take().unwrap() - } -} diff --git a/crates/ra_hir_def/src/type_ref.rs b/crates/ra_hir_def/src/type_ref.rs deleted file mode 100644 index 6f7884ffe5..0000000000 --- a/crates/ra_hir_def/src/type_ref.rs +++ /dev/null @@ -1,245 +0,0 @@ -//! HIR for references to types. Paths in these are not yet resolved. They can -//! be directly created from an ast::TypeRef, without further queries. -use ra_syntax::ast::{self}; - -use crate::{body::LowerCtx, path::Path}; - -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] -pub enum Mutability { - Shared, - Mut, -} - -impl Mutability { - pub fn from_mutable(mutable: bool) -> Mutability { - if mutable { - Mutability::Mut - } else { - Mutability::Shared - } - } - - pub fn as_keyword_for_ref(self) -> &'static str { - match self { - Mutability::Shared => "", - Mutability::Mut => "mut ", - } - } - - pub fn as_keyword_for_ptr(self) -> &'static str { - match self { - Mutability::Shared => "const ", - Mutability::Mut => "mut ", - } - } -} - -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] -pub enum Rawness { - RawPtr, - Ref, -} - -impl Rawness { - pub fn from_raw(is_raw: bool) -> Rawness { - if is_raw { - Rawness::RawPtr - } else { - Rawness::Ref - } - } -} - -/// Compare ty::Ty -#[derive(Clone, PartialEq, Eq, Hash, Debug)] -pub enum TypeRef { - Never, - Placeholder, - Tuple(Vec), - Path(Path), - RawPtr(Box, Mutability), - Reference(Box, Mutability), - Array(Box /*, Expr*/), - Slice(Box), - /// A fn pointer. Last element of the vector is the return type. - Fn(Vec, bool /*varargs*/), - // For - ImplTrait(Vec), - DynTrait(Vec), - Error, -} - -#[derive(Clone, PartialEq, Eq, Hash, Debug)] -pub enum TypeBound { - Path(Path), - // also for<> bounds - // also Lifetimes - Error, -} - -impl TypeRef { - /// Converts an `ast::TypeRef` to a `hir::TypeRef`. - pub(crate) fn from_ast(ctx: &LowerCtx, node: ast::Type) -> Self { - match node { - ast::Type::ParenType(inner) => TypeRef::from_ast_opt(&ctx, inner.ty()), - ast::Type::TupleType(inner) => { - TypeRef::Tuple(inner.fields().map(|it| TypeRef::from_ast(ctx, it)).collect()) - } - ast::Type::NeverType(..) => TypeRef::Never, - ast::Type::PathType(inner) => { - // FIXME: Use `Path::from_src` - inner - .path() - .and_then(|it| ctx.lower_path(it)) - .map(TypeRef::Path) - .unwrap_or(TypeRef::Error) - } - ast::Type::PtrType(inner) => { - let inner_ty = TypeRef::from_ast_opt(&ctx, inner.ty()); - let mutability = Mutability::from_mutable(inner.mut_token().is_some()); - TypeRef::RawPtr(Box::new(inner_ty), mutability) - } - ast::Type::ArrayType(inner) => { - TypeRef::Array(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty()))) - } - ast::Type::SliceType(inner) => { - TypeRef::Slice(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty()))) - } - ast::Type::RefType(inner) => { - let inner_ty = TypeRef::from_ast_opt(&ctx, inner.ty()); - let mutability = Mutability::from_mutable(inner.mut_token().is_some()); - TypeRef::Reference(Box::new(inner_ty), mutability) - } - ast::Type::InferType(_inner) => TypeRef::Placeholder, - ast::Type::FnPtrType(inner) => { - let ret_ty = inner - .ret_type() - .and_then(|rt| rt.ty()) - .map(|it| TypeRef::from_ast(ctx, it)) - .unwrap_or_else(|| TypeRef::Tuple(Vec::new())); - let mut is_varargs = false; - let mut params = if let Some(pl) = inner.param_list() { - if let Some(param) = pl.params().last() { - is_varargs = param.dotdotdot_token().is_some(); - } - - pl.params().map(|p| p.ty()).map(|it| TypeRef::from_ast_opt(&ctx, it)).collect() - } else { - Vec::new() - }; - params.push(ret_ty); - TypeRef::Fn(params, is_varargs) - } - // for types are close enough for our purposes to the inner type for now... - ast::Type::ForType(inner) => TypeRef::from_ast_opt(&ctx, inner.ty()), - ast::Type::ImplTraitType(inner) => { - TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) - } - ast::Type::DynTraitType(inner) => { - TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) - } - } - } - - pub(crate) fn from_ast_opt(ctx: &LowerCtx, node: Option) -> Self { - if let Some(node) = node { - TypeRef::from_ast(ctx, node) - } else { - TypeRef::Error - } - } - - pub(crate) fn unit() -> TypeRef { - TypeRef::Tuple(Vec::new()) - } - - pub fn walk(&self, f: &mut impl FnMut(&TypeRef)) { - go(self, f); - - fn go(type_ref: &TypeRef, f: &mut impl FnMut(&TypeRef)) { - f(type_ref); - match type_ref { - TypeRef::Fn(types, _) | TypeRef::Tuple(types) => { - types.iter().for_each(|t| go(t, f)) - } - TypeRef::RawPtr(type_ref, _) - | TypeRef::Reference(type_ref, _) - | TypeRef::Array(type_ref) - | TypeRef::Slice(type_ref) => go(&type_ref, f), - TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => { - for bound in bounds { - match bound { - TypeBound::Path(path) => go_path(path, f), - TypeBound::Error => (), - } - } - } - TypeRef::Path(path) => go_path(path, f), - TypeRef::Never | TypeRef::Placeholder | TypeRef::Error => {} - }; - } - - fn go_path(path: &Path, f: &mut impl FnMut(&TypeRef)) { - if let Some(type_ref) = path.type_anchor() { - go(type_ref, f); - } - for segment in path.segments().iter() { - if let Some(args_and_bindings) = segment.args_and_bindings { - for arg in &args_and_bindings.args { - let crate::path::GenericArg::Type(type_ref) = arg; - go(type_ref, f); - } - for binding in &args_and_bindings.bindings { - if let Some(type_ref) = &binding.type_ref { - go(type_ref, f); - } - for bound in &binding.bounds { - match bound { - TypeBound::Path(path) => go_path(path, f), - TypeBound::Error => (), - } - } - } - } - } - } - } -} - -pub(crate) fn type_bounds_from_ast( - lower_ctx: &LowerCtx, - type_bounds_opt: Option, -) -> Vec { - if let Some(type_bounds) = type_bounds_opt { - type_bounds.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect() - } else { - vec![] - } -} - -impl TypeBound { - pub(crate) fn from_ast(ctx: &LowerCtx, node: ast::TypeBound) -> Self { - match node.kind() { - ast::TypeBoundKind::PathType(path_type) => { - let path = match path_type.path() { - Some(p) => p, - None => return TypeBound::Error, - }; - - let path = match ctx.lower_path(path) { - Some(p) => p, - None => return TypeBound::Error, - }; - TypeBound::Path(path) - } - ast::TypeBoundKind::ForType(_) | ast::TypeBoundKind::Lifetime(_) => TypeBound::Error, - } - } - - pub fn as_path(&self) -> Option<&Path> { - match self { - TypeBound::Path(p) => Some(p), - _ => None, - } - } -} diff --git a/crates/ra_hir_def/src/visibility.rs b/crates/ra_hir_def/src/visibility.rs deleted file mode 100644 index 1abffb4c3c..0000000000 --- a/crates/ra_hir_def/src/visibility.rs +++ /dev/null @@ -1,171 +0,0 @@ -//! Defines hir-level representation of visibility (e.g. `pub` and `pub(crate)`). - -use hir_expand::{hygiene::Hygiene, InFile}; -use ra_syntax::ast; - -use crate::{ - db::DefDatabase, - nameres::CrateDefMap, - path::{ModPath, PathKind}, - ModuleId, -}; - -/// Visibility of an item, not yet resolved. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum RawVisibility { - /// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is - /// equivalent to `pub(self)`. - Module(ModPath), - /// `pub`. - Public, -} - -impl RawVisibility { - pub(crate) const fn private() -> RawVisibility { - let path = ModPath { kind: PathKind::Super(0), segments: Vec::new() }; - RawVisibility::Module(path) - } - - pub(crate) fn from_ast( - db: &dyn DefDatabase, - node: InFile>, - ) -> RawVisibility { - Self::from_ast_with_hygiene(node.value, &Hygiene::new(db.upcast(), node.file_id)) - } - - pub(crate) fn from_ast_with_hygiene( - node: Option, - hygiene: &Hygiene, - ) -> RawVisibility { - Self::from_ast_with_hygiene_and_default(node, RawVisibility::private(), hygiene) - } - - pub(crate) fn from_ast_with_hygiene_and_default( - node: Option, - default: RawVisibility, - hygiene: &Hygiene, - ) -> RawVisibility { - let node = match node { - None => return default, - Some(node) => node, - }; - match node.kind() { - ast::VisibilityKind::In(path) => { - let path = ModPath::from_src(path, hygiene); - let path = match path { - None => return RawVisibility::private(), - Some(path) => path, - }; - RawVisibility::Module(path) - } - ast::VisibilityKind::PubCrate => { - let path = ModPath { kind: PathKind::Crate, segments: Vec::new() }; - RawVisibility::Module(path) - } - ast::VisibilityKind::PubSuper => { - let path = ModPath { kind: PathKind::Super(1), segments: Vec::new() }; - RawVisibility::Module(path) - } - ast::VisibilityKind::PubSelf => { - let path = ModPath { kind: PathKind::Plain, segments: Vec::new() }; - RawVisibility::Module(path) - } - ast::VisibilityKind::Pub => RawVisibility::Public, - } - } - - pub fn resolve( - &self, - db: &dyn DefDatabase, - resolver: &crate::resolver::Resolver, - ) -> Visibility { - // we fall back to public visibility (i.e. fail open) if the path can't be resolved - resolver.resolve_visibility(db, self).unwrap_or(Visibility::Public) - } -} - -/// Visibility of an item, with the path resolved. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub enum Visibility { - /// Visibility is restricted to a certain module. - Module(ModuleId), - /// Visibility is unrestricted. - Public, -} - -impl Visibility { - pub fn is_visible_from(self, db: &dyn DefDatabase, from_module: ModuleId) -> bool { - let to_module = match self { - Visibility::Module(m) => m, - Visibility::Public => return true, - }; - // if they're not in the same crate, it can't be visible - if from_module.krate != to_module.krate { - return false; - } - let def_map = db.crate_def_map(from_module.krate); - self.is_visible_from_def_map(&def_map, from_module.local_id) - } - - pub(crate) fn is_visible_from_other_crate(self) -> bool { - match self { - Visibility::Module(_) => false, - Visibility::Public => true, - } - } - - pub(crate) fn is_visible_from_def_map( - self, - def_map: &CrateDefMap, - from_module: crate::LocalModuleId, - ) -> bool { - let to_module = match self { - Visibility::Module(m) => m, - Visibility::Public => return true, - }; - // from_module needs to be a descendant of to_module - let mut ancestors = std::iter::successors(Some(from_module), |m| { - let parent_id = def_map[*m].parent?; - Some(parent_id) - }); - ancestors.any(|m| m == to_module.local_id) - } - - /// Returns the most permissive visibility of `self` and `other`. - /// - /// If there is no subset relation between `self` and `other`, returns `None` (ie. they're only - /// visible in unrelated modules). - pub(crate) fn max(self, other: Visibility, def_map: &CrateDefMap) -> Option { - match (self, other) { - (Visibility::Module(_), Visibility::Public) - | (Visibility::Public, Visibility::Module(_)) - | (Visibility::Public, Visibility::Public) => Some(Visibility::Public), - (Visibility::Module(mod_a), Visibility::Module(mod_b)) => { - if mod_a.krate != mod_b.krate { - return None; - } - - let mut a_ancestors = std::iter::successors(Some(mod_a.local_id), |m| { - let parent_id = def_map[*m].parent?; - Some(parent_id) - }); - let mut b_ancestors = std::iter::successors(Some(mod_b.local_id), |m| { - let parent_id = def_map[*m].parent?; - Some(parent_id) - }); - - if a_ancestors.any(|m| m == mod_b.local_id) { - // B is above A - return Some(Visibility::Module(mod_b)); - } - - if b_ancestors.any(|m| m == mod_a.local_id) { - // A is above B - return Some(Visibility::Module(mod_a)); - } - - None - } - } - } -} diff --git a/crates/ra_hir_expand/Cargo.toml b/crates/ra_hir_expand/Cargo.toml deleted file mode 100644 index 6da0e2a165..0000000000 --- a/crates/ra_hir_expand/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -edition = "2018" -name = "ra_hir_expand" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -log = "0.4.8" -either = "1.5.3" -rustc-hash = "1.0.0" - -ra_arena = { path = "../ra_arena" } -ra_db = { path = "../ra_db" } -ra_syntax = { path = "../ra_syntax" } -ra_parser = { path = "../ra_parser" } -ra_prof = { path = "../ra_prof" } -tt = { path = "../ra_tt", package = "ra_tt" } -mbe = { path = "../ra_mbe", package = "ra_mbe" } -test_utils = { path = "../test_utils"} diff --git a/crates/ra_hir_expand/src/ast_id_map.rs b/crates/ra_hir_expand/src/ast_id_map.rs deleted file mode 100644 index 8bfe1b4ba7..0000000000 --- a/crates/ra_hir_expand/src/ast_id_map.rs +++ /dev/null @@ -1,119 +0,0 @@ -//! `AstIdMap` allows to create stable IDs for "large" syntax nodes like items -//! and macro calls. -//! -//! Specifically, it enumerates all items in a file and uses position of a an -//! item as an ID. That way, id's don't change unless the set of items itself -//! changes. - -use std::{ - any::type_name, - fmt, - hash::{Hash, Hasher}, - marker::PhantomData, -}; - -use ra_arena::{Arena, Idx}; -use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; - -/// `AstId` points to an AST node in a specific file. -pub struct FileAstId { - raw: ErasedFileAstId, - _ty: PhantomData N>, -} - -impl Clone for FileAstId { - fn clone(&self) -> FileAstId { - *self - } -} -impl Copy for FileAstId {} - -impl PartialEq for FileAstId { - fn eq(&self, other: &Self) -> bool { - self.raw == other.raw - } -} -impl Eq for FileAstId {} -impl Hash for FileAstId { - fn hash(&self, hasher: &mut H) { - self.raw.hash(hasher); - } -} - -impl fmt::Debug for FileAstId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "FileAstId::<{}>({})", type_name::(), self.raw.into_raw()) - } -} - -impl FileAstId { - // Can't make this a From implementation because of coherence - pub fn upcast(self) -> FileAstId - where - N: Into, - { - FileAstId { raw: self.raw, _ty: PhantomData } - } -} - -type ErasedFileAstId = Idx; - -/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back. -#[derive(Debug, PartialEq, Eq, Default)] -pub struct AstIdMap { - arena: Arena, -} - -impl AstIdMap { - pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap { - assert!(node.parent().is_none()); - let mut res = AstIdMap { arena: Arena::default() }; - // By walking the tree in breadth-first order we make sure that parents - // get lower ids then children. That is, adding a new child does not - // change parent's id. This means that, say, adding a new function to a - // trait does not change ids of top-level items, which helps caching. - bfs(node, |it| { - if let Some(module_item) = ast::Item::cast(it) { - res.alloc(module_item.syntax()); - } - }); - res - } - - pub fn ast_id(&self, item: &N) -> FileAstId { - let raw = self.erased_ast_id(item.syntax()); - FileAstId { raw, _ty: PhantomData } - } - fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId { - let ptr = SyntaxNodePtr::new(item); - match self.arena.iter().find(|(_id, i)| **i == ptr) { - Some((it, _)) => it, - None => panic!( - "Can't find {:?} in AstIdMap:\n{:?}", - item, - self.arena.iter().map(|(_id, i)| i).collect::>(), - ), - } - } - - pub fn get(&self, id: FileAstId) -> AstPtr { - self.arena[id.raw].clone().cast::().unwrap() - } - - fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId { - self.arena.alloc(SyntaxNodePtr::new(item)) - } -} - -/// Walks the subtree in bfs order, calling `f` for each node. -fn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) { - let mut curr_layer = vec![node.clone()]; - let mut next_layer = vec![]; - while !curr_layer.is_empty() { - curr_layer.drain(..).for_each(|node| { - next_layer.extend(node.children()); - f(node); - }); - std::mem::swap(&mut curr_layer, &mut next_layer); - } -} diff --git a/crates/ra_hir_expand/src/builtin_derive.rs b/crates/ra_hir_expand/src/builtin_derive.rs deleted file mode 100644 index 69fa907cb8..0000000000 --- a/crates/ra_hir_expand/src/builtin_derive.rs +++ /dev/null @@ -1,361 +0,0 @@ -//! Builtin derives. - -use log::debug; - -use ra_parser::FragmentKind; -use ra_syntax::{ - ast::{self, AstNode, GenericParamsOwner, ModuleItemOwner, NameOwner}, - match_ast, -}; - -use crate::{db::AstDatabase, name, quote, LazyMacroId, MacroDefId, MacroDefKind}; - -macro_rules! register_builtin { - ( $($trait:ident => $expand:ident),* ) => { - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] - pub enum BuiltinDeriveExpander { - $($trait),* - } - - impl BuiltinDeriveExpander { - pub fn expand( - &self, - db: &dyn AstDatabase, - id: LazyMacroId, - tt: &tt::Subtree, - ) -> Result { - let expander = match *self { - $( BuiltinDeriveExpander::$trait => $expand, )* - }; - expander(db, id, tt) - } - } - - pub fn find_builtin_derive(ident: &name::Name) -> Option { - let kind = match ident { - $( id if id == &name::name![$trait] => BuiltinDeriveExpander::$trait, )* - _ => return None, - }; - - Some(MacroDefId { krate: None, ast_id: None, kind: MacroDefKind::BuiltInDerive(kind), local_inner: false }) - } - }; -} - -register_builtin! { - Copy => copy_expand, - Clone => clone_expand, - Default => default_expand, - Debug => debug_expand, - Hash => hash_expand, - Ord => ord_expand, - PartialOrd => partial_ord_expand, - Eq => eq_expand, - PartialEq => partial_eq_expand -} - -struct BasicAdtInfo { - name: tt::Ident, - type_params: usize, -} - -fn parse_adt(tt: &tt::Subtree) -> Result { - let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, FragmentKind::Items)?; // FragmentKind::Items doesn't parse attrs? - let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| { - debug!("derive node didn't parse"); - mbe::ExpandError::UnexpectedToken - })?; - let item = macro_items.items().next().ok_or_else(|| { - debug!("no module item parsed"); - mbe::ExpandError::NoMatchingRule - })?; - let node = item.syntax(); - let (name, params) = match_ast! { - match node { - ast::Struct(it) => (it.name(), it.generic_param_list()), - ast::Enum(it) => (it.name(), it.generic_param_list()), - ast::Union(it) => (it.name(), it.generic_param_list()), - _ => { - debug!("unexpected node is {:?}", node); - return Err(mbe::ExpandError::ConversionError) - }, - } - }; - let name = name.ok_or_else(|| { - debug!("parsed item has no name"); - mbe::ExpandError::NoMatchingRule - })?; - let name_token_id = token_map.token_by_range(name.syntax().text_range()).ok_or_else(|| { - debug!("name token not found"); - mbe::ExpandError::ConversionError - })?; - let name_token = tt::Ident { id: name_token_id, text: name.text().clone() }; - let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count()); - Ok(BasicAdtInfo { name: name_token, type_params }) -} - -fn make_type_args(n: usize, bound: Vec) -> Vec { - let mut result = Vec::::new(); - result.push( - tt::Leaf::Punct(tt::Punct { - char: '<', - spacing: tt::Spacing::Alone, - id: tt::TokenId::unspecified(), - }) - .into(), - ); - for i in 0..n { - if i > 0 { - result.push( - tt::Leaf::Punct(tt::Punct { - char: ',', - spacing: tt::Spacing::Alone, - id: tt::TokenId::unspecified(), - }) - .into(), - ); - } - result.push( - tt::Leaf::Ident(tt::Ident { - id: tt::TokenId::unspecified(), - text: format!("T{}", i).into(), - }) - .into(), - ); - result.extend(bound.iter().cloned()); - } - result.push( - tt::Leaf::Punct(tt::Punct { - char: '>', - spacing: tt::Spacing::Alone, - id: tt::TokenId::unspecified(), - }) - .into(), - ); - result -} - -fn expand_simple_derive( - tt: &tt::Subtree, - trait_path: tt::Subtree, -) -> Result { - let info = parse_adt(tt)?; - let name = info.name; - let trait_path_clone = trait_path.token_trees.clone(); - let bound = (quote! { : ##trait_path_clone }).token_trees; - let type_params = make_type_args(info.type_params, bound); - let type_args = make_type_args(info.type_params, Vec::new()); - let trait_path = trait_path.token_trees; - let expanded = quote! { - impl ##type_params ##trait_path for #name ##type_args {} - }; - Ok(expanded) -} - -fn find_builtin_crate(db: &dyn AstDatabase, id: LazyMacroId) -> tt::TokenTree { - // FIXME: make hygiene works for builtin derive macro - // such that $crate can be used here. - let cg = db.crate_graph(); - let krate = db.lookup_intern_macro(id).krate; - - // XXX - // All crates except core itself should have a dependency on core, - // We detect `core` by seeing whether it doesn't have such a dependency. - let tt = if cg[krate].dependencies.iter().any(|dep| &*dep.name == "core") { - quote! { core } - } else { - quote! { crate } - }; - - tt.token_trees[0].clone() -} - -fn copy_expand( - db: &dyn AstDatabase, - id: LazyMacroId, - tt: &tt::Subtree, -) -> Result { - let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::marker::Copy }) -} - -fn clone_expand( - db: &dyn AstDatabase, - id: LazyMacroId, - tt: &tt::Subtree, -) -> Result { - let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::clone::Clone }) -} - -fn default_expand( - db: &dyn AstDatabase, - id: LazyMacroId, - tt: &tt::Subtree, -) -> Result { - let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::default::Default }) -} - -fn debug_expand( - db: &dyn AstDatabase, - id: LazyMacroId, - tt: &tt::Subtree, -) -> Result { - let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::fmt::Debug }) -} - -fn hash_expand( - db: &dyn AstDatabase, - id: LazyMacroId, - tt: &tt::Subtree, -) -> Result { - let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::hash::Hash }) -} - -fn eq_expand( - db: &dyn AstDatabase, - id: LazyMacroId, - tt: &tt::Subtree, -) -> Result { - let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::cmp::Eq }) -} - -fn partial_eq_expand( - db: &dyn AstDatabase, - id: LazyMacroId, - tt: &tt::Subtree, -) -> Result { - let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::cmp::PartialEq }) -} - -fn ord_expand( - db: &dyn AstDatabase, - id: LazyMacroId, - tt: &tt::Subtree, -) -> Result { - let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::cmp::Ord }) -} - -fn partial_ord_expand( - db: &dyn AstDatabase, - id: LazyMacroId, - tt: &tt::Subtree, -) -> Result { - let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::cmp::PartialOrd }) -} - -#[cfg(test)] -mod tests { - use name::{known, Name}; - use ra_db::{fixture::WithFixture, CrateId, SourceDatabase}; - - use crate::{test_db::TestDB, AstId, MacroCallId, MacroCallKind, MacroCallLoc}; - - use super::*; - - fn expand_builtin_derive(s: &str, name: Name) -> String { - let def = find_builtin_derive(&name).unwrap(); - let fixture = format!( - r#"//- /main.rs crate:main deps:core -<|> -{} -//- /lib.rs crate:core -// empty -"#, - s - ); - - let (db, file_pos) = TestDB::with_position(&fixture); - let file_id = file_pos.file_id; - let parsed = db.parse(file_id); - let items: Vec<_> = - parsed.syntax_node().descendants().filter_map(ast::Item::cast).collect(); - - let ast_id_map = db.ast_id_map(file_id.into()); - - let attr_id = AstId::new(file_id.into(), ast_id_map.ast_id(&items[0])); - - let loc = MacroCallLoc { - def, - krate: CrateId(0), - kind: MacroCallKind::Attr(attr_id, name.to_string()), - }; - - let id: MacroCallId = db.intern_macro(loc).into(); - let parsed = db.parse_or_expand(id.as_file()).unwrap(); - - // FIXME text() for syntax nodes parsed from token tree looks weird - // because there's no whitespace, see below - parsed.text().to_string() - } - - #[test] - fn test_copy_expand_simple() { - let expanded = expand_builtin_derive( - r#" - #[derive(Copy)] - struct Foo; -"#, - known::Copy, - ); - - assert_eq!(expanded, "impl< >core::marker::CopyforFoo< >{}"); - } - - #[test] - fn test_copy_expand_with_type_params() { - let expanded = expand_builtin_derive( - r#" - #[derive(Copy)] - struct Foo; -"#, - known::Copy, - ); - - assert_eq!( - expanded, - "implcore::marker::CopyforFoo{}" - ); - } - - #[test] - fn test_copy_expand_with_lifetimes() { - let expanded = expand_builtin_derive( - r#" - #[derive(Copy)] - struct Foo; -"#, - known::Copy, - ); - - // We currently just ignore lifetimes - - assert_eq!( - expanded, - "implcore::marker::CopyforFoo{}" - ); - } - - #[test] - fn test_clone_expand() { - let expanded = expand_builtin_derive( - r#" - #[derive(Clone)] - struct Foo; -"#, - known::Clone, - ); - - assert_eq!( - expanded, - "implcore::clone::CloneforFoo{}" - ); - } -} diff --git a/crates/ra_hir_expand/src/builtin_macro.rs b/crates/ra_hir_expand/src/builtin_macro.rs deleted file mode 100644 index 9f50569dc4..0000000000 --- a/crates/ra_hir_expand/src/builtin_macro.rs +++ /dev/null @@ -1,649 +0,0 @@ -//! Builtin macro -use crate::{ - db::AstDatabase, name, quote, AstId, CrateId, EagerMacroId, LazyMacroId, MacroCallId, - MacroDefId, MacroDefKind, TextSize, -}; - -use either::Either; -use mbe::parse_to_token_tree; -use ra_db::FileId; -use ra_parser::FragmentKind; -use ra_syntax::ast::{self, AstToken, HasStringValue}; - -macro_rules! register_builtin { - ( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => { - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] - pub enum BuiltinFnLikeExpander { - $($kind),* - } - - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] - pub enum EagerExpander { - $($e_kind),* - } - - impl BuiltinFnLikeExpander { - pub fn expand( - &self, - db: &dyn AstDatabase, - id: LazyMacroId, - tt: &tt::Subtree, - ) -> Result { - let expander = match *self { - $( BuiltinFnLikeExpander::$kind => $expand, )* - }; - expander(db, id, tt) - } - } - - impl EagerExpander { - pub fn expand( - &self, - db: &dyn AstDatabase, - arg_id: EagerMacroId, - tt: &tt::Subtree, - ) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { - let expander = match *self { - $( EagerExpander::$e_kind => $e_expand, )* - }; - expander(db,arg_id,tt) - } - } - - fn find_by_name(ident: &name::Name) -> Option> { - match ident { - $( id if id == &name::name![$name] => Some(Either::Left(BuiltinFnLikeExpander::$kind)), )* - $( id if id == &name::name![$e_name] => Some(Either::Right(EagerExpander::$e_kind)), )* - _ => return None, - } - } - }; -} - -pub fn find_builtin_macro( - ident: &name::Name, - krate: CrateId, - ast_id: AstId, -) -> Option { - let kind = find_by_name(ident)?; - - match kind { - Either::Left(kind) => Some(MacroDefId { - krate: Some(krate), - ast_id: Some(ast_id), - kind: MacroDefKind::BuiltIn(kind), - local_inner: false, - }), - Either::Right(kind) => Some(MacroDefId { - krate: Some(krate), - ast_id: Some(ast_id), - kind: MacroDefKind::BuiltInEager(kind), - local_inner: false, - }), - } -} - -register_builtin! { - LAZY: - (column, Column) => column_expand, - (compile_error, CompileError) => compile_error_expand, - (file, File) => file_expand, - (line, Line) => line_expand, - (assert, Assert) => assert_expand, - (stringify, Stringify) => stringify_expand, - (format_args, FormatArgs) => format_args_expand, - // format_args_nl only differs in that it adds a newline in the end, - // so we use the same stub expansion for now - (format_args_nl, FormatArgsNl) => format_args_expand, - - EAGER: - (concat, Concat) => concat_expand, - (include, Include) => include_expand, - (include_bytes, IncludeBytes) => include_bytes_expand, - (include_str, IncludeStr) => include_str_expand, - (env, Env) => env_expand, - (option_env, OptionEnv) => option_env_expand -} - -fn line_expand( - _db: &dyn AstDatabase, - _id: LazyMacroId, - _tt: &tt::Subtree, -) -> Result { - // dummy implementation for type-checking purposes - let line_num = 0; - let expanded = quote! { - #line_num - }; - - Ok(expanded) -} - -fn stringify_expand( - db: &dyn AstDatabase, - id: LazyMacroId, - _tt: &tt::Subtree, -) -> Result { - let loc = db.lookup_intern_macro(id); - - let macro_content = { - let arg = loc.kind.arg(db).ok_or_else(|| mbe::ExpandError::UnexpectedToken)?; - let macro_args = arg; - let text = macro_args.text(); - let without_parens = TextSize::of('(')..text.len() - TextSize::of(')'); - text.slice(without_parens).to_string() - }; - - let expanded = quote! { - #macro_content - }; - - Ok(expanded) -} - -fn column_expand( - _db: &dyn AstDatabase, - _id: LazyMacroId, - _tt: &tt::Subtree, -) -> Result { - // dummy implementation for type-checking purposes - let col_num = 0; - let expanded = quote! { - #col_num - }; - - Ok(expanded) -} - -fn assert_expand( - _db: &dyn AstDatabase, - _id: LazyMacroId, - tt: &tt::Subtree, -) -> Result { - // A hacky implementation for goto def and hover - // We expand `assert!(cond, arg1, arg2)` to - // ``` - // {(cond, &(arg1), &(arg2));} - // ```, - // which is wrong but useful. - - let mut args = Vec::new(); - let mut current = Vec::new(); - for tt in tt.token_trees.iter().cloned() { - match tt { - tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => { - args.push(current); - current = Vec::new(); - } - _ => { - current.push(tt); - } - } - } - if !current.is_empty() { - args.push(current); - } - - let arg_tts = args.into_iter().flat_map(|arg| { - quote! { &(##arg), } - }.token_trees).collect::>(); - - let expanded = quote! { - { { (##arg_tts); } } - }; - Ok(expanded) -} - -fn file_expand( - _db: &dyn AstDatabase, - _id: LazyMacroId, - _tt: &tt::Subtree, -) -> Result { - // FIXME: RA purposefully lacks knowledge of absolute file names - // so just return "". - let file_name = ""; - - let expanded = quote! { - #file_name - }; - - Ok(expanded) -} - -fn compile_error_expand( - _db: &dyn AstDatabase, - _id: LazyMacroId, - tt: &tt::Subtree, -) -> Result { - if tt.count() == 1 { - if let tt::TokenTree::Leaf(tt::Leaf::Literal(it)) = &tt.token_trees[0] { - let s = it.text.as_str(); - if s.contains('"') { - return Ok(quote! { loop { #it }}); - } - }; - } - - Err(mbe::ExpandError::BindingError("Must be a string".into())) -} - -fn format_args_expand( - _db: &dyn AstDatabase, - _id: LazyMacroId, - tt: &tt::Subtree, -) -> Result { - // We expand `format_args!("", a1, a2)` to - // ``` - // std::fmt::Arguments::new_v1(&[], &[ - // std::fmt::ArgumentV1::new(&arg1,std::fmt::Display::fmt), - // std::fmt::ArgumentV1::new(&arg2,std::fmt::Display::fmt), - // ]) - // ```, - // which is still not really correct, but close enough for now - let mut args = Vec::new(); - let mut current = Vec::new(); - for tt in tt.token_trees.iter().cloned() { - match tt { - tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => { - args.push(current); - current = Vec::new(); - } - _ => { - current.push(tt); - } - } - } - if !current.is_empty() { - args.push(current); - } - if args.is_empty() { - return Err(mbe::ExpandError::NoMatchingRule); - } - let _format_string = args.remove(0); - let arg_tts = args.into_iter().flat_map(|arg| { - quote! { std::fmt::ArgumentV1::new(&(##arg), std::fmt::Display::fmt), } - }.token_trees).collect::>(); - let expanded = quote! { - std::fmt::Arguments::new_v1(&[], &[##arg_tts]) - }; - Ok(expanded) -} - -fn unquote_str(lit: &tt::Literal) -> Option { - let lit = ast::make::tokens::literal(&lit.to_string()); - let token = ast::String::cast(lit)?; - token.value().map(|it| it.into_owned()) -} - -fn concat_expand( - _db: &dyn AstDatabase, - _arg_id: EagerMacroId, - tt: &tt::Subtree, -) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { - let mut text = String::new(); - for (i, t) in tt.token_trees.iter().enumerate() { - match t { - tt::TokenTree::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => { - text += &unquote_str(&it).ok_or_else(|| mbe::ExpandError::ConversionError)?; - } - tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), - _ => return Err(mbe::ExpandError::UnexpectedToken), - } - } - - Ok((quote!(#text), FragmentKind::Expr)) -} - -fn relative_file( - db: &dyn AstDatabase, - call_id: MacroCallId, - path: &str, - allow_recursion: bool, -) -> Option { - let call_site = call_id.as_file().original_file(db); - let res = db.resolve_path(call_site, path)?; - // Prevent include itself - if res == call_site && !allow_recursion { - None - } else { - Some(res) - } -} - -fn parse_string(tt: &tt::Subtree) -> Result { - tt.token_trees - .get(0) - .and_then(|tt| match tt { - tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(&it), - _ => None, - }) - .ok_or_else(|| mbe::ExpandError::ConversionError) -} - -fn include_expand( - db: &dyn AstDatabase, - arg_id: EagerMacroId, - tt: &tt::Subtree, -) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { - let path = parse_string(tt)?; - let file_id = relative_file(db, arg_id.into(), &path, false) - .ok_or_else(|| mbe::ExpandError::ConversionError)?; - - // FIXME: - // Handle include as expression - let res = parse_to_token_tree(&db.file_text(file_id)) - .ok_or_else(|| mbe::ExpandError::ConversionError)? - .0; - - Ok((res, FragmentKind::Items)) -} - -fn include_bytes_expand( - _db: &dyn AstDatabase, - _arg_id: EagerMacroId, - tt: &tt::Subtree, -) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { - let _path = parse_string(tt)?; - - // FIXME: actually read the file here if the user asked for macro expansion - let res = tt::Subtree { - delimiter: None, - token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { - text: r#"b"""#.into(), - id: tt::TokenId::unspecified(), - }))], - }; - Ok((res, FragmentKind::Expr)) -} - -fn include_str_expand( - db: &dyn AstDatabase, - arg_id: EagerMacroId, - tt: &tt::Subtree, -) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { - let path = parse_string(tt)?; - - // FIXME: we're not able to read excluded files (which is most of them because - // it's unusual to `include_str!` a Rust file), but we can return an empty string. - // Ideally, we'd be able to offer a precise expansion if the user asks for macro - // expansion. - let file_id = match relative_file(db, arg_id.into(), &path, true) { - Some(file_id) => file_id, - None => { - return Ok((quote!(""), FragmentKind::Expr)); - } - }; - - let text = db.file_text(file_id); - let text = &*text; - - Ok((quote!(#text), FragmentKind::Expr)) -} - -fn get_env_inner(db: &dyn AstDatabase, arg_id: EagerMacroId, key: &str) -> Option { - let krate = db.lookup_intern_eager_expansion(arg_id).krate; - db.crate_graph()[krate].env.get(key) -} - -fn env_expand( - db: &dyn AstDatabase, - arg_id: EagerMacroId, - tt: &tt::Subtree, -) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { - let key = parse_string(tt)?; - - // FIXME: - // If the environment variable is not defined int rustc, then a compilation error will be emitted. - // We might do the same if we fully support all other stuffs. - // But for now on, we should return some dummy string for better type infer purpose. - // However, we cannot use an empty string here, because for - // `include!(concat!(env!("OUT_DIR"), "/foo.rs"))` will become - // `include!("foo.rs"), which might go to infinite loop - let s = get_env_inner(db, arg_id, &key).unwrap_or_else(|| "__RA_UNIMPLEMENTED__".to_string()); - let expanded = quote! { #s }; - - Ok((expanded, FragmentKind::Expr)) -} - -fn option_env_expand( - db: &dyn AstDatabase, - arg_id: EagerMacroId, - tt: &tt::Subtree, -) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { - let key = parse_string(tt)?; - let expanded = match get_env_inner(db, arg_id, &key) { - None => quote! { std::option::Option::None::<&str> }, - Some(s) => quote! { std::option::Some(#s) }, - }; - - Ok((expanded, FragmentKind::Expr)) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{ - name::AsName, test_db::TestDB, AstNode, EagerCallLoc, MacroCallId, MacroCallKind, - MacroCallLoc, - }; - use ra_db::{fixture::WithFixture, SourceDatabase}; - use ra_syntax::ast::NameOwner; - use std::sync::Arc; - - fn expand_builtin_macro(ra_fixture: &str) -> String { - let (db, file_id) = TestDB::with_single_file(&ra_fixture); - let parsed = db.parse(file_id); - let macro_calls: Vec<_> = - parsed.syntax_node().descendants().filter_map(ast::MacroCall::cast).collect(); - - let ast_id_map = db.ast_id_map(file_id.into()); - - let expander = find_by_name(¯o_calls[0].name().unwrap().as_name()).unwrap(); - - let krate = CrateId(0); - let file_id = match expander { - Either::Left(expander) => { - // the first one should be a macro_rules - let def = MacroDefId { - krate: Some(CrateId(0)), - ast_id: Some(AstId::new(file_id.into(), ast_id_map.ast_id(¯o_calls[0]))), - kind: MacroDefKind::BuiltIn(expander), - local_inner: false, - }; - - let loc = MacroCallLoc { - def, - krate, - kind: MacroCallKind::FnLike(AstId::new( - file_id.into(), - ast_id_map.ast_id(¯o_calls[1]), - )), - }; - - let id: MacroCallId = db.intern_macro(loc).into(); - id.as_file() - } - Either::Right(expander) => { - // the first one should be a macro_rules - let def = MacroDefId { - krate: Some(krate), - ast_id: Some(AstId::new(file_id.into(), ast_id_map.ast_id(¯o_calls[0]))), - kind: MacroDefKind::BuiltInEager(expander), - local_inner: false, - }; - - let args = macro_calls[1].token_tree().unwrap(); - let parsed_args = mbe::ast_to_token_tree(&args).unwrap().0; - - let arg_id = db.intern_eager_expansion({ - EagerCallLoc { - def, - fragment: FragmentKind::Expr, - subtree: Arc::new(parsed_args.clone()), - krate, - file_id: file_id.into(), - } - }); - - let (subtree, fragment) = expander.expand(&db, arg_id, &parsed_args).unwrap(); - let eager = EagerCallLoc { - def, - fragment, - subtree: Arc::new(subtree), - krate, - file_id: file_id.into(), - }; - - let id: MacroCallId = db.intern_eager_expansion(eager).into(); - id.as_file() - } - }; - - db.parse_or_expand(file_id).unwrap().to_string() - } - - #[test] - fn test_column_expand() { - let expanded = expand_builtin_macro( - r#" - #[rustc_builtin_macro] - macro_rules! column {() => {}} - column!() - "#, - ); - - assert_eq!(expanded, "0"); - } - - #[test] - fn test_line_expand() { - let expanded = expand_builtin_macro( - r#" - #[rustc_builtin_macro] - macro_rules! line {() => {}} - line!() - "#, - ); - - assert_eq!(expanded, "0"); - } - - #[test] - fn test_stringify_expand() { - let expanded = expand_builtin_macro( - r#" - #[rustc_builtin_macro] - macro_rules! stringify {() => {}} - stringify!(a b c) - "#, - ); - - assert_eq!(expanded, "\"a b c\""); - } - - #[test] - fn test_env_expand() { - let expanded = expand_builtin_macro( - r#" - #[rustc_builtin_macro] - macro_rules! env {() => {}} - env!("TEST_ENV_VAR") - "#, - ); - - assert_eq!(expanded, "\"__RA_UNIMPLEMENTED__\""); - } - - #[test] - fn test_option_env_expand() { - let expanded = expand_builtin_macro( - r#" - #[rustc_builtin_macro] - macro_rules! option_env {() => {}} - option_env!("TEST_ENV_VAR") - "#, - ); - - assert_eq!(expanded, "std::option::Option::None:: < &str>"); - } - - #[test] - fn test_file_expand() { - let expanded = expand_builtin_macro( - r#" - #[rustc_builtin_macro] - macro_rules! file {() => {}} - file!() - "#, - ); - - assert_eq!(expanded, "\"\""); - } - - #[test] - fn test_assert_expand() { - let expanded = expand_builtin_macro( - r#" - #[rustc_builtin_macro] - macro_rules! assert { - ($cond:expr) => ({ /* compiler built-in */ }); - ($cond:expr, $($args:tt)*) => ({ /* compiler built-in */ }) - } - assert!(true, "{} {:?}", arg1(a, b, c), arg2); - "#, - ); - - assert_eq!(expanded, "{{(&(true), &(\"{} {:?}\"), &(arg1(a,b,c)), &(arg2),);}}"); - } - - #[test] - fn test_compile_error_expand() { - let expanded = expand_builtin_macro( - r#" - #[rustc_builtin_macro] - macro_rules! compile_error { - ($msg:expr) => ({ /* compiler built-in */ }); - ($msg:expr,) => ({ /* compiler built-in */ }) - } - compile_error!("error!"); - "#, - ); - - assert_eq!(expanded, r#"loop{"error!"}"#); - } - - #[test] - fn test_format_args_expand() { - let expanded = expand_builtin_macro( - r#" - #[rustc_builtin_macro] - macro_rules! format_args { - ($fmt:expr) => ({ /* compiler built-in */ }); - ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ }) - } - format_args!("{} {:?}", arg1(a, b, c), arg2); - "#, - ); - - assert_eq!( - expanded, - r#"std::fmt::Arguments::new_v1(&[], &[std::fmt::ArgumentV1::new(&(arg1(a,b,c)),std::fmt::Display::fmt),std::fmt::ArgumentV1::new(&(arg2),std::fmt::Display::fmt),])"# - ); - } - - #[test] - fn test_include_bytes_expand() { - let expanded = expand_builtin_macro( - r#" - #[rustc_builtin_macro] - macro_rules! include_bytes { - ($file:expr) => {{ /* compiler built-in */ }}; - ($file:expr,) => {{ /* compiler built-in */ }}; - } - include_bytes("foo"); - "#, - ); - - assert_eq!(expanded, r#"b"""#); - } -} diff --git a/crates/ra_hir_expand/src/db.rs b/crates/ra_hir_expand/src/db.rs deleted file mode 100644 index f3b7cd492c..0000000000 --- a/crates/ra_hir_expand/src/db.rs +++ /dev/null @@ -1,404 +0,0 @@ -//! Defines database & queries for macro expansion. - -use std::sync::Arc; - -use mbe::{ExpandResult, MacroRules}; -use ra_db::{salsa, SourceDatabase}; -use ra_parser::FragmentKind; -use ra_prof::profile; -use ra_syntax::{algo::diff, AstNode, GreenNode, Parse, SyntaxKind::*, SyntaxNode}; - -use crate::{ - ast_id_map::AstIdMap, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallLoc, EagerMacroId, - HirFileId, HirFileIdRepr, LazyMacroId, MacroCallId, MacroCallLoc, MacroDefId, MacroDefKind, - MacroFile, ProcMacroExpander, -}; - -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum TokenExpander { - MacroRules(mbe::MacroRules), - Builtin(BuiltinFnLikeExpander), - BuiltinDerive(BuiltinDeriveExpander), - ProcMacro(ProcMacroExpander), -} - -impl TokenExpander { - pub fn expand( - &self, - db: &dyn AstDatabase, - id: LazyMacroId, - tt: &tt::Subtree, - ) -> mbe::ExpandResult { - match self { - TokenExpander::MacroRules(it) => it.expand(tt), - // FIXME switch these to ExpandResult as well - TokenExpander::Builtin(it) => it.expand(db, id, tt).into(), - TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(), - TokenExpander::ProcMacro(_) => { - // We store the result in salsa db to prevent non-determinisc behavior in - // some proc-macro implementation - // See #4315 for details - db.expand_proc_macro(id.into()).into() - } - } - } - - pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { - match self { - TokenExpander::MacroRules(it) => it.map_id_down(id), - TokenExpander::Builtin(..) => id, - TokenExpander::BuiltinDerive(..) => id, - TokenExpander::ProcMacro(..) => id, - } - } - - pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { - match self { - TokenExpander::MacroRules(it) => it.map_id_up(id), - TokenExpander::Builtin(..) => (id, mbe::Origin::Call), - TokenExpander::BuiltinDerive(..) => (id, mbe::Origin::Call), - TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), - } - } -} - -// FIXME: rename to ExpandDatabase -#[salsa::query_group(AstDatabaseStorage)] -pub trait AstDatabase: SourceDatabase { - fn ast_id_map(&self, file_id: HirFileId) -> Arc; - - #[salsa::transparent] - fn parse_or_expand(&self, file_id: HirFileId) -> Option; - - #[salsa::interned] - fn intern_macro(&self, macro_call: MacroCallLoc) -> LazyMacroId; - fn macro_arg_text(&self, id: MacroCallId) -> Option; - #[salsa::transparent] - fn macro_arg(&self, id: MacroCallId) -> Option>; - fn macro_def(&self, id: MacroDefId) -> Option>; - fn parse_macro(&self, macro_file: MacroFile) - -> Option<(Parse, Arc)>; - fn macro_expand(&self, macro_call: MacroCallId) -> (Option>, Option); - - #[salsa::interned] - fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; - - fn expand_proc_macro(&self, call: MacroCallId) -> Result; -} - -/// This expands the given macro call, but with different arguments. This is -/// used for completion, where we want to see what 'would happen' if we insert a -/// token. The `token_to_map` mapped down into the expansion, with the mapped -/// token returned. -pub fn expand_hypothetical( - db: &dyn AstDatabase, - actual_macro_call: MacroCallId, - hypothetical_args: &ra_syntax::ast::TokenTree, - token_to_map: ra_syntax::SyntaxToken, -) -> Option<(SyntaxNode, ra_syntax::SyntaxToken)> { - let macro_file = MacroFile { macro_call_id: actual_macro_call }; - let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()).unwrap(); - let range = - token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; - let token_id = tmap_1.token_by_range(range)?; - let macro_def = expander(db, actual_macro_call)?; - let (node, tmap_2) = - parse_macro_with_arg(db, macro_file, Some(std::sync::Arc::new((tt, tmap_1))))?; - let token_id = macro_def.0.map_id_down(token_id); - let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; - let token = ra_syntax::algo::find_covering_element(&node.syntax_node(), range).into_token()?; - Some((node.syntax_node(), token)) -} - -pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc { - let map = - db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it)); - Arc::new(map) -} - -pub(crate) fn macro_def( - db: &dyn AstDatabase, - id: MacroDefId, -) -> Option> { - match id.kind { - MacroDefKind::Declarative => { - let macro_call = id.ast_id?.to_node(db); - let arg = macro_call.token_tree()?; - let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| { - log::warn!("fail on macro_def to token tree: {:#?}", arg); - None - })?; - let rules = match MacroRules::parse(&tt) { - Ok(it) => it, - Err(err) => { - log::warn!("fail on macro_def parse: error: {:#?} {:#?}", err, tt); - return None; - } - }; - Some(Arc::new((TokenExpander::MacroRules(rules), tmap))) - } - MacroDefKind::BuiltIn(expander) => { - Some(Arc::new((TokenExpander::Builtin(expander), mbe::TokenMap::default()))) - } - MacroDefKind::BuiltInDerive(expander) => { - Some(Arc::new((TokenExpander::BuiltinDerive(expander), mbe::TokenMap::default()))) - } - MacroDefKind::BuiltInEager(_) => None, - MacroDefKind::CustomDerive(expander) => { - Some(Arc::new((TokenExpander::ProcMacro(expander), mbe::TokenMap::default()))) - } - } -} - -pub(crate) fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option { - let id = match id { - MacroCallId::LazyMacro(id) => id, - MacroCallId::EagerMacro(_id) => { - // FIXME: support macro_arg for eager macro - return None; - } - }; - let loc = db.lookup_intern_macro(id); - let arg = loc.kind.arg(db)?; - Some(arg.green().clone()) -} - -pub(crate) fn macro_arg( - db: &dyn AstDatabase, - id: MacroCallId, -) -> Option> { - let arg = db.macro_arg_text(id)?; - let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg))?; - Some(Arc::new((tt, tmap))) -} - -pub(crate) fn macro_expand( - db: &dyn AstDatabase, - id: MacroCallId, -) -> (Option>, Option) { - macro_expand_with_arg(db, id, None) -} - -fn expander(db: &dyn AstDatabase, id: MacroCallId) -> Option> { - let lazy_id = match id { - MacroCallId::LazyMacro(id) => id, - MacroCallId::EagerMacro(_id) => { - return None; - } - }; - - let loc = db.lookup_intern_macro(lazy_id); - let macro_rules = db.macro_def(loc.def)?; - Some(macro_rules) -} - -fn macro_expand_with_arg( - db: &dyn AstDatabase, - id: MacroCallId, - arg: Option>, -) -> (Option>, Option) { - let lazy_id = match id { - MacroCallId::LazyMacro(id) => id, - MacroCallId::EagerMacro(id) => { - if arg.is_some() { - return ( - None, - Some("hypothetical macro expansion not implemented for eager macro".to_owned()), - ); - } else { - return (Some(db.lookup_intern_eager_expansion(id).subtree), None); - } - } - }; - - let loc = db.lookup_intern_macro(lazy_id); - let macro_arg = match arg.or_else(|| db.macro_arg(id)) { - Some(it) => it, - None => return (None, Some("Fail to args in to tt::TokenTree".into())), - }; - - let macro_rules = match db.macro_def(loc.def) { - Some(it) => it, - None => return (None, Some("Fail to find macro definition".into())), - }; - let ExpandResult(tt, err) = macro_rules.0.expand(db, lazy_id, ¯o_arg.0); - // Set a hard limit for the expanded tt - let count = tt.count(); - if count > 65536 { - return (None, Some(format!("Total tokens count exceed limit : count = {}", count))); - } - (Some(Arc::new(tt)), err.map(|e| format!("{:?}", e))) -} - -pub(crate) fn expand_proc_macro( - db: &dyn AstDatabase, - id: MacroCallId, -) -> Result { - let lazy_id = match id { - MacroCallId::LazyMacro(id) => id, - MacroCallId::EagerMacro(_) => unreachable!(), - }; - - let loc = db.lookup_intern_macro(lazy_id); - let macro_arg = match db.macro_arg(id) { - Some(it) => it, - None => { - return Err( - tt::ExpansionError::Unknown("No arguments for proc-macro".to_string()).into() - ) - } - }; - - let expander = match loc.def.kind { - MacroDefKind::CustomDerive(expander) => expander, - _ => unreachable!(), - }; - - expander.expand(db, lazy_id, ¯o_arg.0) -} - -pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option { - match file_id.0 { - HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), - HirFileIdRepr::MacroFile(macro_file) => { - db.parse_macro(macro_file).map(|(it, _)| it.syntax_node()) - } - } -} - -pub(crate) fn parse_macro( - db: &dyn AstDatabase, - macro_file: MacroFile, -) -> Option<(Parse, Arc)> { - parse_macro_with_arg(db, macro_file, None) -} - -pub fn parse_macro_with_arg( - db: &dyn AstDatabase, - macro_file: MacroFile, - arg: Option>, -) -> Option<(Parse, Arc)> { - let _p = profile("parse_macro_query"); - - let macro_call_id = macro_file.macro_call_id; - let (tt, err) = if let Some(arg) = arg { - macro_expand_with_arg(db, macro_call_id, Some(arg)) - } else { - db.macro_expand(macro_call_id) - }; - if let Some(err) = &err { - // Note: - // The final goal we would like to make all parse_macro success, - // such that the following log will not call anyway. - match macro_call_id { - MacroCallId::LazyMacro(id) => { - let loc: MacroCallLoc = db.lookup_intern_macro(id); - let node = loc.kind.node(db); - - // collect parent information for warning log - let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| { - it.file_id.call_node(db) - }) - .map(|n| format!("{:#}", n.value)) - .collect::>() - .join("\n"); - - log::warn!( - "fail on macro_parse: (reason: {} macro_call: {:#}) parents: {}", - err, - node.value, - parents - ); - } - _ => { - log::warn!("fail on macro_parse: (reason: {})", err); - } - } - }; - let tt = tt?; - - let fragment_kind = to_fragment_kind(db, macro_call_id); - - let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?; - - if err.is_none() { - Some((parse, Arc::new(rev_token_map))) - } else { - // FIXME: - // In future, we should propagate the actual error with recovery information - // instead of ignore the error here. - - // Safe check for recurisve identity macro - let node = parse.syntax_node(); - let file: HirFileId = macro_file.into(); - let call_node = file.call_node(db)?; - - if !diff(&node, &call_node.value).is_empty() { - Some((parse, Arc::new(rev_token_map))) - } else { - None - } - } -} - -/// Given a `MacroCallId`, return what `FragmentKind` it belongs to. -/// FIXME: Not completed -fn to_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind { - let lazy_id = match id { - MacroCallId::LazyMacro(id) => id, - MacroCallId::EagerMacro(id) => { - return db.lookup_intern_eager_expansion(id).fragment; - } - }; - let syn = db.lookup_intern_macro(lazy_id).kind.node(db).value; - - let parent = match syn.parent() { - Some(it) => it, - None => { - // FIXME: - // If it is root, which means the parent HirFile - // MacroKindFile must be non-items - // return expr now. - return FragmentKind::Expr; - } - }; - - match parent.kind() { - MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items, - ITEM_LIST => FragmentKind::Items, - LET_STMT => { - // FIXME: Handle Pattern - FragmentKind::Expr - } - // FIXME: Expand to statements in appropriate positions; HIR lowering needs to handle that - EXPR_STMT | BLOCK_EXPR => FragmentKind::Expr, - ARG_LIST => FragmentKind::Expr, - TRY_EXPR => FragmentKind::Expr, - TUPLE_EXPR => FragmentKind::Expr, - PAREN_EXPR => FragmentKind::Expr, - - FOR_EXPR => FragmentKind::Expr, - PATH_EXPR => FragmentKind::Expr, - CLOSURE_EXPR => FragmentKind::Expr, - CONDITION => FragmentKind::Expr, - BREAK_EXPR => FragmentKind::Expr, - RETURN_EXPR => FragmentKind::Expr, - MATCH_EXPR => FragmentKind::Expr, - MATCH_ARM => FragmentKind::Expr, - MATCH_GUARD => FragmentKind::Expr, - RECORD_EXPR_FIELD => FragmentKind::Expr, - CALL_EXPR => FragmentKind::Expr, - INDEX_EXPR => FragmentKind::Expr, - METHOD_CALL_EXPR => FragmentKind::Expr, - AWAIT_EXPR => FragmentKind::Expr, - CAST_EXPR => FragmentKind::Expr, - REF_EXPR => FragmentKind::Expr, - PREFIX_EXPR => FragmentKind::Expr, - RANGE_EXPR => FragmentKind::Expr, - BIN_EXPR => FragmentKind::Expr, - _ => { - // Unknown , Just guess it is `Items` - FragmentKind::Items - } - } -} diff --git a/crates/ra_hir_expand/src/diagnostics.rs b/crates/ra_hir_expand/src/diagnostics.rs deleted file mode 100644 index 84ba97b14a..0000000000 --- a/crates/ra_hir_expand/src/diagnostics.rs +++ /dev/null @@ -1,110 +0,0 @@ -//! Semantic errors and warnings. -//! -//! The `Diagnostic` trait defines a trait object which can represent any -//! diagnostic. -//! -//! `DiagnosticSink` struct is used as an emitter for diagnostic. When creating -//! a `DiagnosticSink`, you supply a callback which can react to a `dyn -//! Diagnostic` or to any concrete diagnostic (downcasting is sued internally). -//! -//! Because diagnostics store file offsets, it's a bad idea to store them -//! directly in salsa. For this reason, every hir subsytem defines it's own -//! strongly-typed closed set of diagnostics which use hir ids internally, are -//! stored in salsa and do *not* implement the `Diagnostic` trait. Instead, a -//! subsystem provides a separate, non-query-based API which can walk all stored -//! values and transform them into instances of `Diagnostic`. - -use std::{any::Any, fmt}; - -use ra_syntax::{SyntaxNode, SyntaxNodePtr}; - -use crate::{db::AstDatabase, InFile}; - -pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static { - fn message(&self) -> String; - fn source(&self) -> InFile; - fn as_any(&self) -> &(dyn Any + Send + 'static); - fn is_experimental(&self) -> bool { - false - } -} - -pub trait AstDiagnostic { - type AST; - fn ast(&self, db: &dyn AstDatabase) -> Self::AST; -} - -impl dyn Diagnostic { - pub fn syntax_node(&self, db: &impl AstDatabase) -> SyntaxNode { - let node = db.parse_or_expand(self.source().file_id).unwrap(); - self.source().value.to_node(&node) - } - - pub fn downcast_ref(&self) -> Option<&D> { - self.as_any().downcast_ref() - } -} - -pub struct DiagnosticSink<'a> { - callbacks: Vec Result<(), ()> + 'a>>, - filters: Vec bool + 'a>>, - default_callback: Box, -} - -impl<'a> DiagnosticSink<'a> { - pub fn push(&mut self, d: impl Diagnostic) { - let d: &dyn Diagnostic = &d; - self._push(d); - } - - fn _push(&mut self, d: &dyn Diagnostic) { - for filter in &mut self.filters { - if !filter(d) { - return; - } - } - for cb in &mut self.callbacks { - match cb(d) { - Ok(()) => return, - Err(()) => (), - } - } - (self.default_callback)(d) - } -} - -pub struct DiagnosticSinkBuilder<'a> { - callbacks: Vec Result<(), ()> + 'a>>, - filters: Vec bool + 'a>>, -} - -impl<'a> DiagnosticSinkBuilder<'a> { - pub fn new() -> Self { - Self { callbacks: Vec::new(), filters: Vec::new() } - } - - pub fn filter bool + 'a>(mut self, cb: F) -> Self { - self.filters.push(Box::new(cb)); - self - } - - pub fn on(mut self, mut cb: F) -> Self { - let cb = move |diag: &dyn Diagnostic| match diag.downcast_ref::() { - Some(d) => { - cb(d); - Ok(()) - } - None => Err(()), - }; - self.callbacks.push(Box::new(cb)); - self - } - - pub fn build(self, default_callback: F) -> DiagnosticSink<'a> { - DiagnosticSink { - callbacks: self.callbacks, - filters: self.filters, - default_callback: Box::new(default_callback), - } - } -} diff --git a/crates/ra_hir_expand/src/eager.rs b/crates/ra_hir_expand/src/eager.rs deleted file mode 100644 index 302d2b3e09..0000000000 --- a/crates/ra_hir_expand/src/eager.rs +++ /dev/null @@ -1,144 +0,0 @@ -//! Eager expansion related utils -//! -//! Here is a dump of a discussion from Vadim Petrochenkov about Eager Expansion and -//! Its name resolution : -//! -//! > Eagerly expanded macros (and also macros eagerly expanded by eagerly expanded macros, -//! > which actually happens in practice too!) are resolved at the location of the "root" macro -//! > that performs the eager expansion on its arguments. -//! > If some name cannot be resolved at the eager expansion time it's considered unresolved, -//! > even if becomes available later (e.g. from a glob import or other macro). -//! -//! > Eagerly expanded macros don't add anything to the module structure of the crate and -//! > don't build any speculative module structures, i.e. they are expanded in a "flat" -//! > way even if tokens in them look like modules. -//! -//! > In other words, it kinda works for simple cases for which it was originally intended, -//! > and we need to live with it because it's available on stable and widely relied upon. -//! -//! -//! See the full discussion : https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros - -use crate::{ - ast::{self, AstNode}, - db::AstDatabase, - EagerCallLoc, EagerMacroId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, -}; - -use ra_db::CrateId; -use ra_parser::FragmentKind; -use ra_syntax::{algo::SyntaxRewriter, SyntaxNode}; -use std::sync::Arc; - -pub fn expand_eager_macro( - db: &dyn AstDatabase, - krate: CrateId, - macro_call: InFile, - def: MacroDefId, - resolver: &dyn Fn(ast::Path) -> Option, -) -> Option { - let args = macro_call.value.token_tree()?; - let parsed_args = mbe::ast_to_token_tree(&args)?.0; - - // Note: - // When `lazy_expand` is called, its *parent* file must be already exists. - // Here we store an eager macro id for the argument expanded subtree here - // for that purpose. - let arg_id = db.intern_eager_expansion({ - EagerCallLoc { - def, - fragment: FragmentKind::Expr, - subtree: Arc::new(parsed_args.clone()), - krate, - file_id: macro_call.file_id, - } - }); - let arg_file_id: MacroCallId = arg_id.into(); - - let parsed_args = mbe::token_tree_to_syntax_node(&parsed_args, FragmentKind::Expr).ok()?.0; - let result = eager_macro_recur( - db, - InFile::new(arg_file_id.as_file(), parsed_args.syntax_node()), - krate, - resolver, - )?; - let subtree = to_subtree(&result)?; - - if let MacroDefKind::BuiltInEager(eager) = def.kind { - let (subtree, fragment) = eager.expand(db, arg_id, &subtree).ok()?; - let eager = EagerCallLoc { - def, - fragment, - subtree: Arc::new(subtree), - krate, - file_id: macro_call.file_id, - }; - - Some(db.intern_eager_expansion(eager)) - } else { - None - } -} - -fn to_subtree(node: &SyntaxNode) -> Option { - let mut subtree = mbe::syntax_node_to_token_tree(node)?.0; - subtree.delimiter = None; - Some(subtree) -} - -fn lazy_expand( - db: &dyn AstDatabase, - def: &MacroDefId, - macro_call: InFile, - krate: CrateId, -) -> Option> { - let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); - - let id: MacroCallId = - def.as_lazy_macro(db, krate, MacroCallKind::FnLike(macro_call.with_value(ast_id))).into(); - - db.parse_or_expand(id.as_file()).map(|node| InFile::new(id.as_file(), node)) -} - -fn eager_macro_recur( - db: &dyn AstDatabase, - curr: InFile, - krate: CrateId, - macro_resolver: &dyn Fn(ast::Path) -> Option, -) -> Option { - let original = curr.value.clone(); - - let children = curr.value.descendants().filter_map(ast::MacroCall::cast); - let mut rewriter = SyntaxRewriter::default(); - - // Collect replacement - for child in children { - let def: MacroDefId = macro_resolver(child.path()?)?; - let insert = match def.kind { - MacroDefKind::BuiltInEager(_) => { - let id: MacroCallId = expand_eager_macro( - db, - krate, - curr.with_value(child.clone()), - def, - macro_resolver, - )? - .into(); - db.parse_or_expand(id.as_file())? - } - MacroDefKind::Declarative - | MacroDefKind::BuiltIn(_) - | MacroDefKind::BuiltInDerive(_) - | MacroDefKind::CustomDerive(_) => { - let expanded = lazy_expand(db, &def, curr.with_value(child.clone()), krate)?; - // replace macro inside - eager_macro_recur(db, expanded, krate, macro_resolver)? - } - }; - - rewriter.replace(child.syntax(), &insert); - } - - let res = rewriter.rewrite(&original); - Some(res) -} diff --git a/crates/ra_hir_expand/src/hygiene.rs b/crates/ra_hir_expand/src/hygiene.rs deleted file mode 100644 index 6b482a60c5..0000000000 --- a/crates/ra_hir_expand/src/hygiene.rs +++ /dev/null @@ -1,66 +0,0 @@ -//! This modules handles hygiene information. -//! -//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at -//! this moment, this is horribly incomplete and handles only `$crate`. -use either::Either; -use ra_db::CrateId; -use ra_syntax::ast; - -use crate::{ - db::AstDatabase, - name::{AsName, Name}, - HirFileId, HirFileIdRepr, MacroCallId, MacroDefKind, -}; - -#[derive(Clone, Debug)] -pub struct Hygiene { - // This is what `$crate` expands to - def_crate: Option, - - // Indiciate this is a local inner macro - local_inner: bool, -} - -impl Hygiene { - pub fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Hygiene { - let (def_crate, local_inner) = match file_id.0 { - HirFileIdRepr::FileId(_) => (None, false), - HirFileIdRepr::MacroFile(macro_file) => match macro_file.macro_call_id { - MacroCallId::LazyMacro(id) => { - let loc = db.lookup_intern_macro(id); - match loc.def.kind { - MacroDefKind::Declarative => (loc.def.krate, loc.def.local_inner), - MacroDefKind::BuiltIn(_) => (None, false), - MacroDefKind::BuiltInDerive(_) => (None, false), - MacroDefKind::BuiltInEager(_) => (None, false), - MacroDefKind::CustomDerive(_) => (None, false), - } - } - MacroCallId::EagerMacro(_id) => (None, false), - }, - }; - Hygiene { def_crate, local_inner } - } - - pub fn new_unhygienic() -> Hygiene { - Hygiene { def_crate: None, local_inner: false } - } - - // FIXME: this should just return name - pub fn name_ref_to_name(&self, name_ref: ast::NameRef) -> Either { - if let Some(def_crate) = self.def_crate { - if name_ref.text() == "$crate" { - return Either::Right(def_crate); - } - } - Either::Left(name_ref.as_name()) - } - - pub fn local_inner_macros(&self) -> Option { - if self.local_inner { - self.def_crate - } else { - None - } - } -} diff --git a/crates/ra_hir_expand/src/lib.rs b/crates/ra_hir_expand/src/lib.rs deleted file mode 100644 index 2e8d636917..0000000000 --- a/crates/ra_hir_expand/src/lib.rs +++ /dev/null @@ -1,452 +0,0 @@ -//! `ra_hir_expand` deals with macro expansion. -//! -//! Specifically, it implements a concept of `MacroFile` -- a file whose syntax -//! tree originates not from the text of some `FileId`, but from some macro -//! expansion. - -pub mod db; -pub mod ast_id_map; -pub mod name; -pub mod hygiene; -pub mod diagnostics; -pub mod builtin_derive; -pub mod builtin_macro; -pub mod proc_macro; -pub mod quote; -pub mod eager; - -use std::hash::Hash; -use std::sync::Arc; - -use ra_db::{impl_intern_key, salsa, CrateId, FileId}; -use ra_syntax::{ - algo, - ast::{self, AstNode}, - SyntaxNode, SyntaxToken, TextSize, -}; - -use crate::ast_id_map::FileAstId; -use crate::builtin_derive::BuiltinDeriveExpander; -use crate::builtin_macro::{BuiltinFnLikeExpander, EagerExpander}; -use crate::proc_macro::ProcMacroExpander; - -#[cfg(test)] -mod test_db; - -/// Input to the analyzer is a set of files, where each file is identified by -/// `FileId` and contains source code. However, another source of source code in -/// Rust are macros: each macro can be thought of as producing a "temporary -/// file". To assign an id to such a file, we use the id of the macro call that -/// produced the file. So, a `HirFileId` is either a `FileId` (source code -/// written by user), or a `MacroCallId` (source code produced by macro). -/// -/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file -/// containing the call plus the offset of the macro call in the file. Note that -/// this is a recursive definition! However, the size_of of `HirFileId` is -/// finite (because everything bottoms out at the real `FileId`) and small -/// (`MacroCallId` uses the location interner). -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct HirFileId(HirFileIdRepr); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -enum HirFileIdRepr { - FileId(FileId), - MacroFile(MacroFile), -} - -impl From for HirFileId { - fn from(id: FileId) -> Self { - HirFileId(HirFileIdRepr::FileId(id)) - } -} - -impl From for HirFileId { - fn from(id: MacroFile) -> Self { - HirFileId(HirFileIdRepr::MacroFile(id)) - } -} - -impl HirFileId { - /// For macro-expansion files, returns the file original source file the - /// expansion originated from. - pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId { - match self.0 { - HirFileIdRepr::FileId(file_id) => file_id, - HirFileIdRepr::MacroFile(macro_file) => { - let file_id = match macro_file.macro_call_id { - MacroCallId::LazyMacro(id) => { - let loc = db.lookup_intern_macro(id); - loc.kind.file_id() - } - MacroCallId::EagerMacro(id) => { - let loc = db.lookup_intern_eager_expansion(id); - loc.file_id - } - }; - file_id.original_file(db) - } - } - } - - pub fn expansion_level(self, db: &dyn db::AstDatabase) -> u32 { - let mut level = 0; - let mut curr = self; - while let HirFileIdRepr::MacroFile(macro_file) = curr.0 { - level += 1; - curr = match macro_file.macro_call_id { - MacroCallId::LazyMacro(id) => { - let loc = db.lookup_intern_macro(id); - loc.kind.file_id() - } - MacroCallId::EagerMacro(id) => { - let loc = db.lookup_intern_eager_expansion(id); - loc.file_id - } - }; - } - level - } - - /// If this is a macro call, returns the syntax node of the call. - pub fn call_node(self, db: &dyn db::AstDatabase) -> Option> { - match self.0 { - HirFileIdRepr::FileId(_) => None, - HirFileIdRepr::MacroFile(macro_file) => { - let lazy_id = match macro_file.macro_call_id { - MacroCallId::LazyMacro(id) => id, - MacroCallId::EagerMacro(_id) => { - // FIXME: handle call node for eager macro - return None; - } - }; - let loc = db.lookup_intern_macro(lazy_id); - Some(loc.kind.node(db)) - } - } - } - - /// Return expansion information if it is a macro-expansion file - pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option { - match self.0 { - HirFileIdRepr::FileId(_) => None, - HirFileIdRepr::MacroFile(macro_file) => { - let lazy_id = match macro_file.macro_call_id { - MacroCallId::LazyMacro(id) => id, - MacroCallId::EagerMacro(_id) => { - // FIXME: handle expansion_info for eager macro - return None; - } - }; - let loc: MacroCallLoc = db.lookup_intern_macro(lazy_id); - - let arg_tt = loc.kind.arg(db)?; - let def_tt = loc.def.ast_id?.to_node(db).token_tree()?; - - let macro_def = db.macro_def(loc.def)?; - let (parse, exp_map) = db.parse_macro(macro_file)?; - let macro_arg = db.macro_arg(macro_file.macro_call_id)?; - - Some(ExpansionInfo { - expanded: InFile::new(self, parse.syntax_node()), - arg: InFile::new(loc.kind.file_id(), arg_tt), - def: InFile::new(loc.def.ast_id?.file_id, def_tt), - macro_arg, - macro_def, - exp_map, - }) - } - } - } - - /// Indicate it is macro file generated for builtin derive - pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option> { - match self.0 { - HirFileIdRepr::FileId(_) => None, - HirFileIdRepr::MacroFile(macro_file) => { - let lazy_id = match macro_file.macro_call_id { - MacroCallId::LazyMacro(id) => id, - MacroCallId::EagerMacro(_id) => { - return None; - } - }; - let loc: MacroCallLoc = db.lookup_intern_macro(lazy_id); - let item = match loc.def.kind { - MacroDefKind::BuiltInDerive(_) => loc.kind.node(db), - _ => return None, - }; - Some(item.with_value(ast::Item::cast(item.value.clone())?)) - } - } - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct MacroFile { - macro_call_id: MacroCallId, -} - -/// `MacroCallId` identifies a particular macro invocation, like -/// `println!("Hello, {}", world)`. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum MacroCallId { - LazyMacro(LazyMacroId), - EagerMacro(EagerMacroId), -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct LazyMacroId(salsa::InternId); -impl_intern_key!(LazyMacroId); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct EagerMacroId(salsa::InternId); -impl_intern_key!(EagerMacroId); - -impl From for MacroCallId { - fn from(it: LazyMacroId) -> Self { - MacroCallId::LazyMacro(it) - } -} -impl From for MacroCallId { - fn from(it: EagerMacroId) -> Self { - MacroCallId::EagerMacro(it) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct MacroDefId { - // FIXME: krate and ast_id are currently optional because we don't have a - // definition location for built-in derives. There is one, though: the - // standard library defines them. The problem is that it uses the new - // `macro` syntax for this, which we don't support yet. As soon as we do - // (which will probably require touching this code), we can instead use - // that (and also remove the hacks for resolving built-in derives). - pub krate: Option, - pub ast_id: Option>, - pub kind: MacroDefKind, - - pub local_inner: bool, -} - -impl MacroDefId { - pub fn as_lazy_macro( - self, - db: &dyn db::AstDatabase, - krate: CrateId, - kind: MacroCallKind, - ) -> LazyMacroId { - db.intern_macro(MacroCallLoc { def: self, krate, kind }) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum MacroDefKind { - Declarative, - BuiltIn(BuiltinFnLikeExpander), - // FIXME: maybe just Builtin and rename BuiltinFnLikeExpander to BuiltinExpander - BuiltInDerive(BuiltinDeriveExpander), - BuiltInEager(EagerExpander), - CustomDerive(ProcMacroExpander), -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct MacroCallLoc { - pub(crate) def: MacroDefId, - pub(crate) krate: CrateId, - pub(crate) kind: MacroCallKind, -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum MacroCallKind { - FnLike(AstId), - Attr(AstId, String), -} - -impl MacroCallKind { - fn file_id(&self) -> HirFileId { - match self { - MacroCallKind::FnLike(ast_id) => ast_id.file_id, - MacroCallKind::Attr(ast_id, _) => ast_id.file_id, - } - } - - fn node(&self, db: &dyn db::AstDatabase) -> InFile { - match self { - MacroCallKind::FnLike(ast_id) => ast_id.with_value(ast_id.to_node(db).syntax().clone()), - MacroCallKind::Attr(ast_id, _) => { - ast_id.with_value(ast_id.to_node(db).syntax().clone()) - } - } - } - - fn arg(&self, db: &dyn db::AstDatabase) -> Option { - match self { - MacroCallKind::FnLike(ast_id) => { - Some(ast_id.to_node(db).token_tree()?.syntax().clone()) - } - MacroCallKind::Attr(ast_id, _) => Some(ast_id.to_node(db).syntax().clone()), - } - } -} - -impl MacroCallId { - pub fn as_file(self) -> HirFileId { - MacroFile { macro_call_id: self }.into() - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct EagerCallLoc { - pub(crate) def: MacroDefId, - pub(crate) fragment: FragmentKind, - pub(crate) subtree: Arc, - pub(crate) krate: CrateId, - pub(crate) file_id: HirFileId, -} - -/// ExpansionInfo mainly describes how to map text range between src and expanded macro -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ExpansionInfo { - expanded: InFile, - arg: InFile, - def: InFile, - - macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, - macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, - exp_map: Arc, -} - -pub use mbe::Origin; -use ra_parser::FragmentKind; - -impl ExpansionInfo { - pub fn call_node(&self) -> Option> { - Some(self.arg.with_value(self.arg.value.parent()?)) - } - - pub fn map_token_down(&self, token: InFile<&SyntaxToken>) -> Option> { - assert_eq!(token.file_id, self.arg.file_id); - let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?; - let token_id = self.macro_arg.1.token_by_range(range)?; - let token_id = self.macro_def.0.map_id_down(token_id); - - let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?; - - let token = algo::find_covering_element(&self.expanded.value, range).into_token()?; - - Some(self.expanded.with_value(token)) - } - - pub fn map_token_up( - &self, - token: InFile<&SyntaxToken>, - ) -> Option<(InFile, Origin)> { - let token_id = self.exp_map.token_by_range(token.value.text_range())?; - - let (token_id, origin) = self.macro_def.0.map_id_up(token_id); - let (token_map, tt) = match origin { - mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), - mbe::Origin::Def => { - (&self.macro_def.1, self.def.as_ref().map(|tt| tt.syntax().clone())) - } - }; - - let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?; - let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start()) - .into_token()?; - Some((tt.with_value(token), origin)) - } -} - -/// `AstId` points to an AST node in any file. -/// -/// It is stable across reparses, and can be used as salsa key/value. -// FIXME: isn't this just a `Source>` ? -pub type AstId = InFile>; - -impl AstId { - pub fn to_node(&self, db: &dyn db::AstDatabase) -> N { - let root = db.parse_or_expand(self.file_id).unwrap(); - db.ast_id_map(self.file_id).get(self.value).to_node(&root) - } -} - -/// `InFile` stores a value of `T` inside a particular file/syntax tree. -/// -/// Typical usages are: -/// -/// * `InFile` -- syntax node in a file -/// * `InFile` -- ast node in a file -/// * `InFile` -- offset in a file -#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] -pub struct InFile { - pub file_id: HirFileId, - pub value: T, -} - -impl InFile { - pub fn new(file_id: HirFileId, value: T) -> InFile { - InFile { file_id, value } - } - - // Similarly, naming here is stupid... - pub fn with_value(&self, value: U) -> InFile { - InFile::new(self.file_id, value) - } - - pub fn map U, U>(self, f: F) -> InFile { - InFile::new(self.file_id, f(self.value)) - } - pub fn as_ref(&self) -> InFile<&T> { - self.with_value(&self.value) - } - pub fn file_syntax(&self, db: &dyn db::AstDatabase) -> SyntaxNode { - db.parse_or_expand(self.file_id).expect("source created from invalid file") - } -} - -impl InFile<&T> { - pub fn cloned(&self) -> InFile { - self.with_value(self.value.clone()) - } -} - -impl InFile> { - pub fn transpose(self) -> Option> { - let value = self.value?; - Some(InFile::new(self.file_id, value)) - } -} - -impl InFile { - pub fn ancestors_with_macros( - self, - db: &dyn db::AstDatabase, - ) -> impl Iterator> + '_ { - std::iter::successors(Some(self), move |node| match node.value.parent() { - Some(parent) => Some(node.with_value(parent)), - None => { - let parent_node = node.file_id.call_node(db)?; - Some(parent_node) - } - }) - } -} - -impl InFile { - pub fn ancestors_with_macros( - self, - db: &dyn db::AstDatabase, - ) -> impl Iterator> + '_ { - self.map(|it| it.parent()).ancestors_with_macros(db) - } -} - -impl InFile { - pub fn descendants(self) -> impl Iterator> { - self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n)) - } - - pub fn syntax(&self) -> InFile<&SyntaxNode> { - self.with_value(self.value.syntax()) - } -} diff --git a/crates/ra_hir_expand/src/name.rs b/crates/ra_hir_expand/src/name.rs deleted file mode 100644 index 969a2e5b83..0000000000 --- a/crates/ra_hir_expand/src/name.rs +++ /dev/null @@ -1,230 +0,0 @@ -//! FIXME: write short doc here - -use std::fmt; - -use ra_syntax::{ast, SmolStr}; - -/// `Name` is a wrapper around string, which is used in hir for both references -/// and declarations. In theory, names should also carry hygiene info, but we are -/// not there yet! -#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct Name(Repr); - -#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -enum Repr { - Text(SmolStr), - TupleField(usize), -} - -impl fmt::Display for Name { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match &self.0 { - Repr::Text(text) => fmt::Display::fmt(&text, f), - Repr::TupleField(idx) => fmt::Display::fmt(&idx, f), - } - } -} - -impl Name { - /// Note: this is private to make creating name from random string hard. - /// Hopefully, this should allow us to integrate hygiene cleaner in the - /// future, and to switch to interned representation of names. - const fn new_text(text: SmolStr) -> Name { - Name(Repr::Text(text)) - } - - pub fn new_tuple_field(idx: usize) -> Name { - Name(Repr::TupleField(idx)) - } - - pub fn new_lifetime(lt: &ra_syntax::SyntaxToken) -> Name { - assert!(lt.kind() == ra_syntax::SyntaxKind::LIFETIME); - Name(Repr::Text(lt.text().clone())) - } - - /// Shortcut to create inline plain text name - const fn new_inline_ascii(text: &[u8]) -> Name { - Name::new_text(SmolStr::new_inline_from_ascii(text.len(), text)) - } - - /// Resolve a name from the text of token. - fn resolve(raw_text: &SmolStr) -> Name { - let raw_start = "r#"; - if raw_text.as_str().starts_with(raw_start) { - Name::new_text(SmolStr::new(&raw_text[raw_start.len()..])) - } else { - Name::new_text(raw_text.clone()) - } - } - - pub fn missing() -> Name { - Name::new_text("[missing name]".into()) - } - - pub fn as_tuple_index(&self) -> Option { - match self.0 { - Repr::TupleField(idx) => Some(idx), - _ => None, - } - } -} - -pub trait AsName { - fn as_name(&self) -> Name; -} - -impl AsName for ast::NameRef { - fn as_name(&self) -> Name { - match self.as_tuple_field() { - Some(idx) => Name::new_tuple_field(idx), - None => Name::resolve(self.text()), - } - } -} - -impl AsName for ast::Name { - fn as_name(&self) -> Name { - Name::resolve(self.text()) - } -} - -impl AsName for ast::NameOrNameRef { - fn as_name(&self) -> Name { - match self { - ast::NameOrNameRef::Name(it) => it.as_name(), - ast::NameOrNameRef::NameRef(it) => it.as_name(), - } - } -} - -impl AsName for tt::Ident { - fn as_name(&self) -> Name { - Name::resolve(&self.text) - } -} - -impl AsName for ast::FieldKind { - fn as_name(&self) -> Name { - match self { - ast::FieldKind::Name(nr) => nr.as_name(), - ast::FieldKind::Index(idx) => { - let idx = idx.text().parse::().unwrap_or(0); - Name::new_tuple_field(idx) - } - } - } -} - -impl AsName for ra_db::Dependency { - fn as_name(&self) -> Name { - Name::new_text(SmolStr::new(&*self.name)) - } -} - -pub mod known { - macro_rules! known_names { - ($($ident:ident),* $(,)?) => { - $( - #[allow(bad_style)] - pub const $ident: super::Name = - super::Name::new_inline_ascii(stringify!($ident).as_bytes()); - )* - }; - } - - known_names!( - // Primitives - isize, - i8, - i16, - i32, - i64, - i128, - usize, - u8, - u16, - u32, - u64, - u128, - f32, - f64, - bool, - char, - str, - // Special names - macro_rules, - doc, - // Components of known path (value or mod name) - std, - core, - alloc, - iter, - ops, - future, - result, - boxed, - // Components of known path (type name) - IntoIterator, - Item, - Try, - Ok, - Future, - Result, - Output, - Target, - Box, - RangeFrom, - RangeFull, - RangeInclusive, - RangeToInclusive, - RangeTo, - Range, - Neg, - Not, - Index, - // Builtin macros - file, - column, - compile_error, - line, - assert, - stringify, - concat, - include, - include_bytes, - include_str, - format_args, - format_args_nl, - env, - option_env, - // Builtin derives - Copy, - Clone, - Default, - Debug, - Hash, - Ord, - PartialOrd, - Eq, - PartialEq, - ); - - // self/Self cannot be used as an identifier - pub const SELF_PARAM: super::Name = super::Name::new_inline_ascii(b"self"); - pub const SELF_TYPE: super::Name = super::Name::new_inline_ascii(b"Self"); - - #[macro_export] - macro_rules! name { - (self) => { - $crate::name::known::SELF_PARAM - }; - (Self) => { - $crate::name::known::SELF_TYPE - }; - ($ident:ident) => { - $crate::name::known::$ident - }; - } -} - -pub use crate::name; diff --git a/crates/ra_hir_expand/src/proc_macro.rs b/crates/ra_hir_expand/src/proc_macro.rs deleted file mode 100644 index 2c0ec41d24..0000000000 --- a/crates/ra_hir_expand/src/proc_macro.rs +++ /dev/null @@ -1,143 +0,0 @@ -//! Proc Macro Expander stub - -use crate::{db::AstDatabase, LazyMacroId}; -use ra_db::{CrateId, ProcMacroId}; -use tt::buffer::{Cursor, TokenBuffer}; - -#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] -pub struct ProcMacroExpander { - krate: CrateId, - proc_macro_id: ProcMacroId, -} - -macro_rules! err { - ($fmt:literal, $($tt:tt),*) => { - mbe::ExpandError::ProcMacroError(tt::ExpansionError::Unknown(format!($fmt, $($tt),*))) - }; - ($fmt:literal) => { - mbe::ExpandError::ProcMacroError(tt::ExpansionError::Unknown($fmt.to_string())) - } -} - -impl ProcMacroExpander { - pub fn new(krate: CrateId, proc_macro_id: ProcMacroId) -> ProcMacroExpander { - ProcMacroExpander { krate, proc_macro_id } - } - - pub fn expand( - self, - db: &dyn AstDatabase, - _id: LazyMacroId, - tt: &tt::Subtree, - ) -> Result { - let krate_graph = db.crate_graph(); - let proc_macro = krate_graph[self.krate] - .proc_macro - .get(self.proc_macro_id.0 as usize) - .clone() - .ok_or_else(|| err!("No derive macro found."))?; - - let tt = remove_derive_attrs(tt) - .ok_or_else(|| err!("Fail to remove derive for custom derive"))?; - - proc_macro.expander.expand(&tt, None).map_err(mbe::ExpandError::from) - } -} - -fn eat_punct(cursor: &mut Cursor, c: char) -> bool { - if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = cursor.token_tree() { - if punct.char == c { - *cursor = cursor.bump(); - return true; - } - } - false -} - -fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool { - if let Some(tt::TokenTree::Subtree(subtree)) = cursor.token_tree() { - if Some(kind) == subtree.delimiter_kind() { - *cursor = cursor.bump_subtree(); - return true; - } - } - false -} - -fn eat_ident(cursor: &mut Cursor, t: &str) -> bool { - if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = cursor.token_tree() { - if t == ident.text.as_str() { - *cursor = cursor.bump(); - return true; - } - } - false -} - -fn remove_derive_attrs(tt: &tt::Subtree) -> Option { - let buffer = TokenBuffer::new(&tt.token_trees); - let mut p = buffer.begin(); - let mut result = tt::Subtree::default(); - - while !p.eof() { - let curr = p; - - if eat_punct(&mut p, '#') { - eat_punct(&mut p, '!'); - let parent = p; - if eat_subtree(&mut p, tt::DelimiterKind::Bracket) { - if eat_ident(&mut p, "derive") { - p = parent.bump(); - continue; - } - } - } - - result.token_trees.push(curr.token_tree()?.clone()); - p = curr.bump(); - } - - Some(result) -} - -#[cfg(test)] -mod tests { - use super::*; - use test_utils::assert_eq_text; - - #[test] - fn test_remove_derive_attrs() { - let tt = mbe::parse_to_token_tree( - r#" - #[allow(unused)] - #[derive(Copy)] - #[derive(Hello)] - struct A { - bar: u32 - } -"#, - ) - .unwrap() - .0; - let result = format!("{:#?}", remove_derive_attrs(&tt).unwrap()); - - assert_eq_text!( - &result, - r#" -SUBTREE $ - PUNCH # [alone] 0 - SUBTREE [] 1 - IDENT allow 2 - SUBTREE () 3 - IDENT unused 4 - IDENT struct 15 - IDENT A 16 - SUBTREE {} 17 - IDENT bar 18 - PUNCH : [alone] 19 - IDENT u32 20 -"# - .trim() - ); - } -} diff --git a/crates/ra_hir_expand/src/test_db.rs b/crates/ra_hir_expand/src/test_db.rs deleted file mode 100644 index 332fa556fa..0000000000 --- a/crates/ra_hir_expand/src/test_db.rs +++ /dev/null @@ -1,49 +0,0 @@ -//! Database used for testing `hir_expand`. - -use std::{ - fmt, panic, - sync::{Arc, Mutex}, -}; - -use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate}; -use rustc_hash::FxHashSet; - -#[salsa::database( - ra_db::SourceDatabaseExtStorage, - ra_db::SourceDatabaseStorage, - crate::db::AstDatabaseStorage -)] -#[derive(Default)] -pub struct TestDB { - storage: salsa::Storage, - events: Mutex>>, -} - -impl fmt::Debug for TestDB { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("TestDB").finish() - } -} - -impl salsa::Database for TestDB { - fn salsa_event(&self, event: salsa::Event) { - let mut events = self.events.lock().unwrap(); - if let Some(events) = &mut *events { - events.push(event); - } - } -} - -impl panic::RefUnwindSafe for TestDB {} - -impl FileLoader for TestDB { - fn file_text(&self, file_id: FileId) -> Arc { - FileLoaderDelegate(self).file_text(file_id) - } - fn resolve_path(&self, anchor: FileId, path: &str) -> Option { - FileLoaderDelegate(self).resolve_path(anchor, path) - } - fn relevant_crates(&self, file_id: FileId) -> Arc> { - FileLoaderDelegate(self).relevant_crates(file_id) - } -} diff --git a/crates/ra_hir_ty/Cargo.toml b/crates/ra_hir_ty/Cargo.toml deleted file mode 100644 index 83397d5793..0000000000 --- a/crates/ra_hir_ty/Cargo.toml +++ /dev/null @@ -1,40 +0,0 @@ -[package] -edition = "2018" -name = "ra_hir_ty" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -itertools = "0.9.0" -arrayvec = "0.5.1" -smallvec = "1.2.0" -ena = "0.14.0" -log = "0.4.8" -rustc-hash = "1.1.0" - -stdx = { path = "../stdx" } - -hir_def = { path = "../ra_hir_def", package = "ra_hir_def" } -hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" } -ra_arena = { path = "../ra_arena" } -ra_db = { path = "../ra_db" } -ra_prof = { path = "../ra_prof" } -ra_syntax = { path = "../ra_syntax" } -test_utils = { path = "../test_utils" } - -scoped-tls = "1" - -chalk-solve = { version = "0.21.0" } -chalk-ir = { version = "0.21.0" } -chalk-recursive = { version = "0.21.0" } - -[dev-dependencies] -expect = { path = "../expect" } - -tracing = "0.1" -tracing-subscriber = { version = "0.2", default-features = false, features = ["env-filter", "registry"] } -tracing-tree = { version = "0.1.4" } diff --git a/crates/ra_hir_ty/src/autoderef.rs b/crates/ra_hir_ty/src/autoderef.rs deleted file mode 100644 index c727012c69..0000000000 --- a/crates/ra_hir_ty/src/autoderef.rs +++ /dev/null @@ -1,131 +0,0 @@ -//! In certain situations, rust automatically inserts derefs as necessary: for -//! example, field accesses `foo.bar` still work when `foo` is actually a -//! reference to a type with the field `bar`. This is an approximation of the -//! logic in rustc (which lives in librustc_typeck/check/autoderef.rs). - -use std::iter::successors; - -use hir_def::lang_item::LangItemTarget; -use hir_expand::name::name; -use log::{info, warn}; -use ra_db::CrateId; - -use crate::{ - db::HirDatabase, - traits::{InEnvironment, Solution}, - utils::generics, - BoundVar, Canonical, DebruijnIndex, Obligation, Substs, TraitRef, Ty, -}; - -const AUTODEREF_RECURSION_LIMIT: usize = 10; - -pub fn autoderef<'a>( - db: &'a dyn HirDatabase, - krate: Option, - ty: InEnvironment>, -) -> impl Iterator> + 'a { - let InEnvironment { value: ty, environment } = ty; - successors(Some(ty), move |ty| { - deref(db, krate?, InEnvironment { value: ty, environment: environment.clone() }) - }) - .take(AUTODEREF_RECURSION_LIMIT) -} - -pub(crate) fn deref( - db: &dyn HirDatabase, - krate: CrateId, - ty: InEnvironment<&Canonical>, -) -> Option> { - if let Some(derefed) = ty.value.value.builtin_deref() { - Some(Canonical { value: derefed, kinds: ty.value.kinds.clone() }) - } else { - deref_by_trait(db, krate, ty) - } -} - -fn deref_by_trait( - db: &dyn HirDatabase, - krate: CrateId, - ty: InEnvironment<&Canonical>, -) -> Option> { - let deref_trait = match db.lang_item(krate, "deref".into())? { - LangItemTarget::TraitId(it) => it, - _ => return None, - }; - let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?; - - let generic_params = generics(db.upcast(), target.into()); - if generic_params.len() != 1 { - // the Target type + Deref trait should only have one generic parameter, - // namely Deref's Self type - return None; - } - - // FIXME make the Canonical / bound var handling nicer - - let parameters = - Substs::build_for_generics(&generic_params).push(ty.value.value.clone()).build(); - - // Check that the type implements Deref at all - let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() }; - let implements_goal = Canonical { - kinds: ty.value.kinds.clone(), - value: InEnvironment { - value: Obligation::Trait(trait_ref), - environment: ty.environment.clone(), - }, - }; - if db.trait_solve(krate, implements_goal).is_none() { - return None; - } - - // Now do the assoc type projection - let projection = super::traits::ProjectionPredicate { - ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.kinds.len())), - projection_ty: super::ProjectionTy { associated_ty: target, parameters }, - }; - - let obligation = super::Obligation::Projection(projection); - - let in_env = InEnvironment { value: obligation, environment: ty.environment }; - - let canonical = - Canonical::new(in_env, ty.value.kinds.iter().copied().chain(Some(super::TyKind::General))); - - let solution = db.trait_solve(krate, canonical)?; - - match &solution { - Solution::Unique(vars) => { - // FIXME: vars may contain solutions for any inference variables - // that happened to be inside ty. To correctly handle these, we - // would have to pass the solution up to the inference context, but - // that requires a larger refactoring (especially if the deref - // happens during method resolution). So for the moment, we just - // check that we're not in the situation we're we would actually - // need to handle the values of the additional variables, i.e. - // they're just being 'passed through'. In the 'standard' case where - // we have `impl Deref for Foo { Target = T }`, that should be - // the case. - - // FIXME: if the trait solver decides to truncate the type, these - // assumptions will be broken. We would need to properly introduce - // new variables in that case - - for i in 1..vars.0.kinds.len() { - if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1)) - { - warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.value, solution); - return None; - } - } - Some(Canonical { - value: vars.0.value[vars.0.value.len() - 1].clone(), - kinds: vars.0.kinds.clone(), - }) - } - Solution::Ambig(_) => { - info!("Ambiguous solution for derefing {:?}: {:?}", ty.value, solution); - None - } - } -} diff --git a/crates/ra_hir_ty/src/db.rs b/crates/ra_hir_ty/src/db.rs deleted file mode 100644 index c773adc674..0000000000 --- a/crates/ra_hir_ty/src/db.rs +++ /dev/null @@ -1,159 +0,0 @@ -//! FIXME: write short doc here - -use std::sync::Arc; - -use hir_def::{ - db::DefDatabase, expr::ExprId, DefWithBodyId, FunctionId, GenericDefId, ImplId, LocalFieldId, - TypeParamId, VariantId, -}; -use ra_arena::map::ArenaMap; -use ra_db::{impl_intern_key, salsa, CrateId, Upcast}; -use ra_prof::profile; - -use crate::{ - method_resolution::{InherentImpls, TraitImpls}, - traits::chalk, - Binders, CallableDefId, GenericPredicate, InferenceResult, OpaqueTyId, PolyFnSig, - ReturnTypeImplTraits, TraitRef, Ty, TyDefId, ValueTyDefId, -}; -use hir_expand::name::Name; - -#[salsa::query_group(HirDatabaseStorage)] -pub trait HirDatabase: DefDatabase + Upcast { - #[salsa::invoke(infer_wait)] - #[salsa::transparent] - fn infer(&self, def: DefWithBodyId) -> Arc; - - #[salsa::invoke(crate::infer::infer_query)] - fn infer_query(&self, def: DefWithBodyId) -> Arc; - - #[salsa::invoke(crate::lower::ty_query)] - #[salsa::cycle(crate::lower::ty_recover)] - fn ty(&self, def: TyDefId) -> Binders; - - #[salsa::invoke(crate::lower::value_ty_query)] - fn value_ty(&self, def: ValueTyDefId) -> Binders; - - #[salsa::invoke(crate::lower::impl_self_ty_query)] - #[salsa::cycle(crate::lower::impl_self_ty_recover)] - fn impl_self_ty(&self, def: ImplId) -> Binders; - - #[salsa::invoke(crate::lower::impl_trait_query)] - fn impl_trait(&self, def: ImplId) -> Option>; - - #[salsa::invoke(crate::lower::field_types_query)] - fn field_types(&self, var: VariantId) -> Arc>>; - - #[salsa::invoke(crate::callable_item_sig)] - fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig; - - #[salsa::invoke(crate::lower::return_type_impl_traits)] - fn return_type_impl_traits( - &self, - def: FunctionId, - ) -> Option>>; - - #[salsa::invoke(crate::lower::generic_predicates_for_param_query)] - #[salsa::cycle(crate::lower::generic_predicates_for_param_recover)] - fn generic_predicates_for_param( - &self, - param_id: TypeParamId, - ) -> Arc<[Binders]>; - - #[salsa::invoke(crate::lower::generic_predicates_query)] - fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders]>; - - #[salsa::invoke(crate::lower::generic_defaults_query)] - fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders]>; - - #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)] - fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc; - - #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)] - fn trait_impls_in_crate(&self, krate: CrateId) -> Arc; - - #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)] - fn trait_impls_in_deps(&self, krate: CrateId) -> Arc; - - // Interned IDs for Chalk integration - #[salsa::interned] - fn intern_callable_def(&self, callable_def: CallableDefId) -> InternedCallableDefId; - #[salsa::interned] - fn intern_type_param_id(&self, param_id: TypeParamId) -> GlobalTypeParamId; - #[salsa::interned] - fn intern_impl_trait_id(&self, id: OpaqueTyId) -> InternedOpaqueTyId; - #[salsa::interned] - fn intern_closure(&self, id: (DefWithBodyId, ExprId)) -> ClosureId; - - #[salsa::invoke(chalk::associated_ty_data_query)] - fn associated_ty_data(&self, id: chalk::AssocTypeId) -> Arc; - - #[salsa::invoke(chalk::trait_datum_query)] - fn trait_datum(&self, krate: CrateId, trait_id: chalk::TraitId) -> Arc; - - #[salsa::invoke(chalk::struct_datum_query)] - fn struct_datum(&self, krate: CrateId, struct_id: chalk::AdtId) -> Arc; - - #[salsa::invoke(crate::traits::chalk::impl_datum_query)] - fn impl_datum(&self, krate: CrateId, impl_id: chalk::ImplId) -> Arc; - - #[salsa::invoke(crate::traits::chalk::fn_def_datum_query)] - fn fn_def_datum(&self, krate: CrateId, fn_def_id: chalk::FnDefId) -> Arc; - - #[salsa::invoke(crate::traits::chalk::associated_ty_value_query)] - fn associated_ty_value( - &self, - krate: CrateId, - id: chalk::AssociatedTyValueId, - ) -> Arc; - - #[salsa::invoke(crate::traits::trait_solve_query)] - fn trait_solve( - &self, - krate: CrateId, - goal: crate::Canonical>, - ) -> Option; - - #[salsa::invoke(crate::traits::chalk::program_clauses_for_chalk_env_query)] - fn program_clauses_for_chalk_env( - &self, - krate: CrateId, - env: chalk_ir::Environment, - ) -> chalk_ir::ProgramClauses; -} - -fn infer_wait(db: &impl HirDatabase, def: DefWithBodyId) -> Arc { - let _p = profile("infer:wait").detail(|| match def { - DefWithBodyId::FunctionId(it) => db.function_data(it).name.to_string(), - DefWithBodyId::StaticId(it) => { - db.static_data(it).name.clone().unwrap_or_else(Name::missing).to_string() - } - DefWithBodyId::ConstId(it) => { - db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string() - } - }); - db.infer_query(def) -} - -#[test] -fn hir_database_is_object_safe() { - fn _assert_object_safe(_: &dyn HirDatabase) {} -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct GlobalTypeParamId(salsa::InternId); -impl_intern_key!(GlobalTypeParamId); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct InternedOpaqueTyId(salsa::InternId); -impl_intern_key!(InternedOpaqueTyId); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct ClosureId(salsa::InternId); -impl_intern_key!(ClosureId); - -/// This exists just for Chalk, because Chalk just has a single `FnDefId` where -/// we have different IDs for struct and enum variant constructors. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct InternedCallableDefId(salsa::InternId); -impl_intern_key!(InternedCallableDefId); diff --git a/crates/ra_hir_ty/src/diagnostics.rs b/crates/ra_hir_ty/src/diagnostics.rs deleted file mode 100644 index 977c0525b5..0000000000 --- a/crates/ra_hir_ty/src/diagnostics.rs +++ /dev/null @@ -1,481 +0,0 @@ -//! FIXME: write short doc here -mod expr; -mod match_check; -mod unsafe_check; - -use std::any::Any; - -use hir_def::DefWithBodyId; -use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink}; -use hir_expand::{db::AstDatabase, name::Name, HirFileId, InFile}; -use ra_prof::profile; -use ra_syntax::{ast, AstNode, AstPtr, SyntaxNodePtr}; -use stdx::format_to; - -use crate::db::HirDatabase; - -pub use crate::diagnostics::expr::{record_literal_missing_fields, record_pattern_missing_fields}; - -pub fn validate_body(db: &dyn HirDatabase, owner: DefWithBodyId, sink: &mut DiagnosticSink<'_>) { - let _p = profile("validate_body"); - let infer = db.infer(owner); - infer.add_diagnostics(db, owner, sink); - let mut validator = expr::ExprValidator::new(owner, infer.clone(), sink); - validator.validate_body(db); - let mut validator = unsafe_check::UnsafeValidator::new(owner, infer, sink); - validator.validate_body(db); -} - -#[derive(Debug)] -pub struct NoSuchField { - pub file: HirFileId, - pub field: AstPtr, -} - -impl Diagnostic for NoSuchField { - fn message(&self) -> String { - "no such field".to_string() - } - - fn source(&self) -> InFile { - InFile::new(self.file, self.field.clone().into()) - } - - fn as_any(&self) -> &(dyn Any + Send + 'static) { - self - } -} - -impl AstDiagnostic for NoSuchField { - type AST = ast::RecordExprField; - - fn ast(&self, db: &dyn AstDatabase) -> Self::AST { - let root = db.parse_or_expand(self.source().file_id).unwrap(); - let node = self.source().value.to_node(&root); - ast::RecordExprField::cast(node).unwrap() - } -} - -#[derive(Debug)] -pub struct MissingFields { - pub file: HirFileId, - pub field_list: AstPtr, - pub missed_fields: Vec, -} - -impl Diagnostic for MissingFields { - fn message(&self) -> String { - let mut buf = String::from("Missing structure fields:\n"); - for field in &self.missed_fields { - format_to!(buf, "- {}\n", field); - } - buf - } - fn source(&self) -> InFile { - InFile { file_id: self.file, value: self.field_list.clone().into() } - } - fn as_any(&self) -> &(dyn Any + Send + 'static) { - self - } -} - -impl AstDiagnostic for MissingFields { - type AST = ast::RecordExprFieldList; - - fn ast(&self, db: &dyn AstDatabase) -> Self::AST { - let root = db.parse_or_expand(self.source().file_id).unwrap(); - let node = self.source().value.to_node(&root); - ast::RecordExprFieldList::cast(node).unwrap() - } -} - -#[derive(Debug)] -pub struct MissingPatFields { - pub file: HirFileId, - pub field_list: AstPtr, - pub missed_fields: Vec, -} - -impl Diagnostic for MissingPatFields { - fn message(&self) -> String { - let mut buf = String::from("Missing structure fields:\n"); - for field in &self.missed_fields { - format_to!(buf, "- {}\n", field); - } - buf - } - fn source(&self) -> InFile { - InFile { file_id: self.file, value: self.field_list.clone().into() } - } - fn as_any(&self) -> &(dyn Any + Send + 'static) { - self - } -} - -#[derive(Debug)] -pub struct MissingMatchArms { - pub file: HirFileId, - pub match_expr: AstPtr, - pub arms: AstPtr, -} - -impl Diagnostic for MissingMatchArms { - fn message(&self) -> String { - String::from("Missing match arm") - } - fn source(&self) -> InFile { - InFile { file_id: self.file, value: self.match_expr.clone().into() } - } - fn as_any(&self) -> &(dyn Any + Send + 'static) { - self - } -} - -#[derive(Debug)] -pub struct MissingOkInTailExpr { - pub file: HirFileId, - pub expr: AstPtr, -} - -impl Diagnostic for MissingOkInTailExpr { - fn message(&self) -> String { - "wrap return expression in Ok".to_string() - } - fn source(&self) -> InFile { - InFile { file_id: self.file, value: self.expr.clone().into() } - } - fn as_any(&self) -> &(dyn Any + Send + 'static) { - self - } -} - -impl AstDiagnostic for MissingOkInTailExpr { - type AST = ast::Expr; - - fn ast(&self, db: &dyn AstDatabase) -> Self::AST { - let root = db.parse_or_expand(self.file).unwrap(); - let node = self.source().value.to_node(&root); - ast::Expr::cast(node).unwrap() - } -} - -#[derive(Debug)] -pub struct BreakOutsideOfLoop { - pub file: HirFileId, - pub expr: AstPtr, -} - -impl Diagnostic for BreakOutsideOfLoop { - fn message(&self) -> String { - "break outside of loop".to_string() - } - fn source(&self) -> InFile { - InFile { file_id: self.file, value: self.expr.clone().into() } - } - fn as_any(&self) -> &(dyn Any + Send + 'static) { - self - } -} - -impl AstDiagnostic for BreakOutsideOfLoop { - type AST = ast::Expr; - - fn ast(&self, db: &dyn AstDatabase) -> Self::AST { - let root = db.parse_or_expand(self.file).unwrap(); - let node = self.source().value.to_node(&root); - ast::Expr::cast(node).unwrap() - } -} - -#[derive(Debug)] -pub struct MissingUnsafe { - pub file: HirFileId, - pub expr: AstPtr, -} - -impl Diagnostic for MissingUnsafe { - fn message(&self) -> String { - format!("This operation is unsafe and requires an unsafe function or block") - } - fn source(&self) -> InFile { - InFile { file_id: self.file, value: self.expr.clone().into() } - } - fn as_any(&self) -> &(dyn Any + Send + 'static) { - self - } -} - -impl AstDiagnostic for MissingUnsafe { - type AST = ast::Expr; - - fn ast(&self, db: &dyn AstDatabase) -> Self::AST { - let root = db.parse_or_expand(self.source().file_id).unwrap(); - let node = self.source().value.to_node(&root); - ast::Expr::cast(node).unwrap() - } -} - -#[derive(Debug)] -pub struct MismatchedArgCount { - pub file: HirFileId, - pub call_expr: AstPtr, - pub expected: usize, - pub found: usize, -} - -impl Diagnostic for MismatchedArgCount { - fn message(&self) -> String { - let s = if self.expected == 1 { "" } else { "s" }; - format!("Expected {} argument{}, found {}", self.expected, s, self.found) - } - fn source(&self) -> InFile { - InFile { file_id: self.file, value: self.call_expr.clone().into() } - } - fn as_any(&self) -> &(dyn Any + Send + 'static) { - self - } - fn is_experimental(&self) -> bool { - true - } -} - -impl AstDiagnostic for MismatchedArgCount { - type AST = ast::CallExpr; - fn ast(&self, db: &dyn AstDatabase) -> Self::AST { - let root = db.parse_or_expand(self.source().file_id).unwrap(); - let node = self.source().value.to_node(&root); - ast::CallExpr::cast(node).unwrap() - } -} - -#[cfg(test)] -mod tests { - use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId}; - use hir_expand::diagnostics::{Diagnostic, DiagnosticSinkBuilder}; - use ra_db::{fixture::WithFixture, FileId, SourceDatabase, SourceDatabaseExt}; - use ra_syntax::{TextRange, TextSize}; - use rustc_hash::FxHashMap; - - use crate::{diagnostics::validate_body, test_db::TestDB}; - - impl TestDB { - fn diagnostics(&self, mut cb: F) { - let crate_graph = self.crate_graph(); - for krate in crate_graph.iter() { - let crate_def_map = self.crate_def_map(krate); - - let mut fns = Vec::new(); - for (module_id, _) in crate_def_map.modules.iter() { - for decl in crate_def_map[module_id].scope.declarations() { - if let ModuleDefId::FunctionId(f) = decl { - fns.push(f) - } - } - - for impl_id in crate_def_map[module_id].scope.impls() { - let impl_data = self.impl_data(impl_id); - for item in impl_data.items.iter() { - if let AssocItemId::FunctionId(f) = item { - fns.push(*f) - } - } - } - } - - for f in fns { - let mut sink = DiagnosticSinkBuilder::new().build(&mut cb); - validate_body(self, f.into(), &mut sink); - } - } - } - } - - pub(crate) fn check_diagnostics(ra_fixture: &str) { - let db = TestDB::with_files(ra_fixture); - let annotations = db.extract_annotations(); - - let mut actual: FxHashMap> = FxHashMap::default(); - db.diagnostics(|d| { - // FXIME: macros... - let file_id = d.source().file_id.original_file(&db); - let range = d.syntax_node(&db).text_range(); - let message = d.message().to_owned(); - actual.entry(file_id).or_default().push((range, message)); - }); - - for (file_id, diags) in actual.iter_mut() { - diags.sort_by_key(|it| it.0.start()); - let text = db.file_text(*file_id); - // For multiline spans, place them on line start - for (range, content) in diags { - if text[*range].contains('\n') { - *range = TextRange::new(range.start(), range.start() + TextSize::from(1)); - *content = format!("... {}", content); - } - } - } - - assert_eq!(annotations, actual); - } - - #[test] - fn no_such_field_diagnostics() { - check_diagnostics( - r#" -struct S { foo: i32, bar: () } -impl S { - fn new() -> S { - S { - //^... Missing structure fields: - //| - bar - foo: 92, - baz: 62, - //^^^^^^^ no such field - } - } -} -"#, - ); - } - #[test] - fn no_such_field_with_feature_flag_diagnostics() { - check_diagnostics( - r#" -//- /lib.rs crate:foo cfg:feature=foo -struct MyStruct { - my_val: usize, - #[cfg(feature = "foo")] - bar: bool, -} - -impl MyStruct { - #[cfg(feature = "foo")] - pub(crate) fn new(my_val: usize, bar: bool) -> Self { - Self { my_val, bar } - } - #[cfg(not(feature = "foo"))] - pub(crate) fn new(my_val: usize, _bar: bool) -> Self { - Self { my_val } - } -} -"#, - ); - } - - #[test] - fn no_such_field_enum_with_feature_flag_diagnostics() { - check_diagnostics( - r#" -//- /lib.rs crate:foo cfg:feature=foo -enum Foo { - #[cfg(not(feature = "foo"))] - Buz, - #[cfg(feature = "foo")] - Bar, - Baz -} - -fn test_fn(f: Foo) { - match f { - Foo::Bar => {}, - Foo::Baz => {}, - } -} -"#, - ); - } - - #[test] - fn no_such_field_with_feature_flag_diagnostics_on_struct_lit() { - check_diagnostics( - r#" -//- /lib.rs crate:foo cfg:feature=foo -struct S { - #[cfg(feature = "foo")] - foo: u32, - #[cfg(not(feature = "foo"))] - bar: u32, -} - -impl S { - #[cfg(feature = "foo")] - fn new(foo: u32) -> Self { - Self { foo } - } - #[cfg(not(feature = "foo"))] - fn new(bar: u32) -> Self { - Self { bar } - } - fn new2(bar: u32) -> Self { - #[cfg(feature = "foo")] - { Self { foo: bar } } - #[cfg(not(feature = "foo"))] - { Self { bar } } - } - fn new2(val: u32) -> Self { - Self { - #[cfg(feature = "foo")] - foo: val, - #[cfg(not(feature = "foo"))] - bar: val, - } - } -} -"#, - ); - } - - #[test] - fn no_such_field_with_type_macro() { - check_diagnostics( - r#" -macro_rules! Type { () => { u32 }; } -struct Foo { bar: Type![] } - -impl Foo { - fn new() -> Self { - Foo { bar: 0 } - } -} -"#, - ); - } - - #[test] - fn missing_record_pat_field_diagnostic() { - check_diagnostics( - r#" -struct S { foo: i32, bar: () } -fn baz(s: S) { - let S { foo: _ } = s; - //^^^^^^^^^^ Missing structure fields: - // | - bar -} -"#, - ); - } - - #[test] - fn missing_record_pat_field_no_diagnostic_if_not_exhaustive() { - check_diagnostics( - r" -struct S { foo: i32, bar: () } -fn baz(s: S) -> i32 { - match s { - S { foo, .. } => foo, - } -} -", - ) - } - - #[test] - fn break_outside_of_loop() { - check_diagnostics( - r#" -fn foo() { break; } - //^^^^^ break outside of loop -"#, - ); - } -} diff --git a/crates/ra_hir_ty/src/diagnostics/expr.rs b/crates/ra_hir_ty/src/diagnostics/expr.rs deleted file mode 100644 index 95bbf2d955..0000000000 --- a/crates/ra_hir_ty/src/diagnostics/expr.rs +++ /dev/null @@ -1,565 +0,0 @@ -//! FIXME: write short doc here - -use std::sync::Arc; - -use hir_def::{path::path, resolver::HasResolver, AdtId, DefWithBodyId}; -use hir_expand::diagnostics::DiagnosticSink; -use ra_syntax::{ast, AstPtr}; -use rustc_hash::FxHashSet; - -use crate::{ - db::HirDatabase, - diagnostics::{ - match_check::{is_useful, MatchCheckCtx, Matrix, PatStack, Usefulness}, - MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkInTailExpr, MissingPatFields, - }, - utils::variant_data, - ApplicationTy, InferenceResult, Ty, TypeCtor, -}; - -pub use hir_def::{ - body::{ - scope::{ExprScopes, ScopeEntry, ScopeId}, - Body, BodySourceMap, ExprPtr, ExprSource, PatPtr, PatSource, - }, - expr::{ - ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp, - MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp, - }, - src::HasSource, - LocalFieldId, Lookup, VariantId, -}; - -pub(super) struct ExprValidator<'a, 'b: 'a> { - owner: DefWithBodyId, - infer: Arc, - sink: &'a mut DiagnosticSink<'b>, -} - -impl<'a, 'b> ExprValidator<'a, 'b> { - pub(super) fn new( - owner: DefWithBodyId, - infer: Arc, - sink: &'a mut DiagnosticSink<'b>, - ) -> ExprValidator<'a, 'b> { - ExprValidator { owner, infer, sink } - } - - pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) { - let body = db.body(self.owner.into()); - - for (id, expr) in body.exprs.iter() { - if let Some((variant_def, missed_fields, true)) = - record_literal_missing_fields(db, &self.infer, id, expr) - { - self.create_record_literal_missing_fields_diagnostic( - id, - db, - variant_def, - missed_fields, - ); - } - - match expr { - Expr::Match { expr, arms } => { - self.validate_match(id, *expr, arms, db, self.infer.clone()); - } - Expr::Call { .. } | Expr::MethodCall { .. } => { - self.validate_call(db, id, expr); - } - _ => {} - } - } - for (id, pat) in body.pats.iter() { - if let Some((variant_def, missed_fields, true)) = - record_pattern_missing_fields(db, &self.infer, id, pat) - { - self.create_record_pattern_missing_fields_diagnostic( - id, - db, - variant_def, - missed_fields, - ); - } - } - let body_expr = &body[body.body_expr]; - if let Expr::Block { tail: Some(t), .. } = body_expr { - self.validate_results_in_tail_expr(body.body_expr, *t, db); - } - } - - fn create_record_literal_missing_fields_diagnostic( - &mut self, - id: ExprId, - db: &dyn HirDatabase, - variant_def: VariantId, - missed_fields: Vec, - ) { - // XXX: only look at source_map if we do have missing fields - let (_, source_map) = db.body_with_source_map(self.owner.into()); - - if let Ok(source_ptr) = source_map.expr_syntax(id) { - let root = source_ptr.file_syntax(db.upcast()); - if let ast::Expr::RecordExpr(record_lit) = &source_ptr.value.to_node(&root) { - if let Some(field_list) = record_lit.record_expr_field_list() { - let variant_data = variant_data(db.upcast(), variant_def); - let missed_fields = missed_fields - .into_iter() - .map(|idx| variant_data.fields()[idx].name.clone()) - .collect(); - self.sink.push(MissingFields { - file: source_ptr.file_id, - field_list: AstPtr::new(&field_list), - missed_fields, - }) - } - } - } - } - - fn create_record_pattern_missing_fields_diagnostic( - &mut self, - id: PatId, - db: &dyn HirDatabase, - variant_def: VariantId, - missed_fields: Vec, - ) { - // XXX: only look at source_map if we do have missing fields - let (_, source_map) = db.body_with_source_map(self.owner.into()); - - if let Ok(source_ptr) = source_map.pat_syntax(id) { - if let Some(expr) = source_ptr.value.as_ref().left() { - let root = source_ptr.file_syntax(db.upcast()); - if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) { - if let Some(field_list) = record_pat.record_pat_field_list() { - let variant_data = variant_data(db.upcast(), variant_def); - let missed_fields = missed_fields - .into_iter() - .map(|idx| variant_data.fields()[idx].name.clone()) - .collect(); - self.sink.push(MissingPatFields { - file: source_ptr.file_id, - field_list: AstPtr::new(&field_list), - missed_fields, - }) - } - } - } - } - } - - fn validate_call(&mut self, db: &dyn HirDatabase, call_id: ExprId, expr: &Expr) -> Option<()> { - // Check that the number of arguments matches the number of parameters. - - // FIXME: Due to shortcomings in the current type system implementation, only emit this - // diagnostic if there are no type mismatches in the containing function. - if self.infer.type_mismatches.iter().next().is_some() { - return Some(()); - } - - let is_method_call = matches!(expr, Expr::MethodCall { .. }); - let (sig, args) = match expr { - Expr::Call { callee, args } => { - let callee = &self.infer.type_of_expr[*callee]; - let sig = callee.callable_sig(db)?; - (sig, args.clone()) - } - Expr::MethodCall { receiver, args, .. } => { - let mut args = args.clone(); - args.insert(0, *receiver); - - // FIXME: note that we erase information about substs here. This - // is not right, but, luckily, doesn't matter as we care only - // about the number of params - let callee = self.infer.method_resolution(call_id)?; - let sig = db.callable_item_signature(callee.into()).value; - - (sig, args) - } - _ => return None, - }; - - if sig.is_varargs { - return None; - } - - let params = sig.params(); - - let mut param_count = params.len(); - let mut arg_count = args.len(); - - if arg_count != param_count { - let (_, source_map) = db.body_with_source_map(self.owner.into()); - if let Ok(source_ptr) = source_map.expr_syntax(call_id) { - if is_method_call { - param_count -= 1; - arg_count -= 1; - } - self.sink.push(MismatchedArgCount { - file: source_ptr.file_id, - call_expr: source_ptr.value, - expected: param_count, - found: arg_count, - }); - } - } - - None - } - - fn validate_match( - &mut self, - id: ExprId, - match_expr: ExprId, - arms: &[MatchArm], - db: &dyn HirDatabase, - infer: Arc, - ) { - let (body, source_map): (Arc, Arc) = - db.body_with_source_map(self.owner.into()); - - let match_expr_ty = match infer.type_of_expr.get(match_expr) { - Some(ty) => ty, - // If we can't resolve the type of the match expression - // we cannot perform exhaustiveness checks. - None => return, - }; - - let cx = MatchCheckCtx { match_expr, body, infer: infer.clone(), db }; - let pats = arms.iter().map(|arm| arm.pat); - - let mut seen = Matrix::empty(); - for pat in pats { - if let Some(pat_ty) = infer.type_of_pat.get(pat) { - // We only include patterns whose type matches the type - // of the match expression. If we had a InvalidMatchArmPattern - // diagnostic or similar we could raise that in an else - // block here. - // - // When comparing the types, we also have to consider that rustc - // will automatically de-reference the match expression type if - // necessary. - // - // FIXME we should use the type checker for this. - if pat_ty == match_expr_ty - || match_expr_ty - .as_reference() - .map(|(match_expr_ty, _)| match_expr_ty == pat_ty) - .unwrap_or(false) - { - // If we had a NotUsefulMatchArm diagnostic, we could - // check the usefulness of each pattern as we added it - // to the matrix here. - let v = PatStack::from_pattern(pat); - seen.push(&cx, v); - continue; - } - } - - // If we can't resolve the type of a pattern, or the pattern type doesn't - // fit the match expression, we skip this diagnostic. Skipping the entire - // diagnostic rather than just not including this match arm is preferred - // to avoid the chance of false positives. - return; - } - - match is_useful(&cx, &seen, &PatStack::from_wild()) { - Ok(Usefulness::Useful) => (), - // if a wildcard pattern is not useful, then all patterns are covered - Ok(Usefulness::NotUseful) => return, - // this path is for unimplemented checks, so we err on the side of not - // reporting any errors - _ => return, - } - - if let Ok(source_ptr) = source_map.expr_syntax(id) { - let root = source_ptr.file_syntax(db.upcast()); - if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) { - if let (Some(match_expr), Some(arms)) = - (match_expr.expr(), match_expr.match_arm_list()) - { - self.sink.push(MissingMatchArms { - file: source_ptr.file_id, - match_expr: AstPtr::new(&match_expr), - arms: AstPtr::new(&arms), - }) - } - } - } - } - - fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) { - // the mismatch will be on the whole block currently - let mismatch = match self.infer.type_mismatch_for_expr(body_id) { - Some(m) => m, - None => return, - }; - - let core_result_path = path![core::result::Result]; - - let resolver = self.owner.resolver(db.upcast()); - let core_result_enum = match resolver.resolve_known_enum(db.upcast(), &core_result_path) { - Some(it) => it, - _ => return, - }; - - let core_result_ctor = TypeCtor::Adt(AdtId::EnumId(core_result_enum)); - let params = match &mismatch.expected { - Ty::Apply(ApplicationTy { ctor, parameters }) if ctor == &core_result_ctor => { - parameters - } - _ => return, - }; - - if params.len() == 2 && params[0] == mismatch.actual { - let (_, source_map) = db.body_with_source_map(self.owner.into()); - - if let Ok(source_ptr) = source_map.expr_syntax(id) { - self.sink - .push(MissingOkInTailExpr { file: source_ptr.file_id, expr: source_ptr.value }); - } - } - } -} - -pub fn record_literal_missing_fields( - db: &dyn HirDatabase, - infer: &InferenceResult, - id: ExprId, - expr: &Expr, -) -> Option<(VariantId, Vec, /*exhaustive*/ bool)> { - let (fields, exhausitve) = match expr { - Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()), - _ => return None, - }; - - let variant_def = infer.variant_resolution_for_expr(id)?; - if let VariantId::UnionId(_) = variant_def { - return None; - } - - let variant_data = variant_data(db.upcast(), variant_def); - - let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect(); - let missed_fields: Vec = variant_data - .fields() - .iter() - .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) }) - .collect(); - if missed_fields.is_empty() { - return None; - } - Some((variant_def, missed_fields, exhausitve)) -} - -pub fn record_pattern_missing_fields( - db: &dyn HirDatabase, - infer: &InferenceResult, - id: PatId, - pat: &Pat, -) -> Option<(VariantId, Vec, /*exhaustive*/ bool)> { - let (fields, exhaustive) = match pat { - Pat::Record { path: _, args, ellipsis } => (args, !ellipsis), - _ => return None, - }; - - let variant_def = infer.variant_resolution_for_pat(id)?; - if let VariantId::UnionId(_) = variant_def { - return None; - } - - let variant_data = variant_data(db.upcast(), variant_def); - - let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect(); - let missed_fields: Vec = variant_data - .fields() - .iter() - .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) }) - .collect(); - if missed_fields.is_empty() { - return None; - } - Some((variant_def, missed_fields, exhaustive)) -} - -#[cfg(test)] -mod tests { - use crate::diagnostics::tests::check_diagnostics; - - #[test] - fn simple_free_fn_zero() { - check_diagnostics( - r#" -fn zero() {} -fn f() { zero(1); } - //^^^^^^^ Expected 0 arguments, found 1 -"#, - ); - - check_diagnostics( - r#" -fn zero() {} -fn f() { zero(); } -"#, - ); - } - - #[test] - fn simple_free_fn_one() { - check_diagnostics( - r#" -fn one(arg: u8) {} -fn f() { one(); } - //^^^^^ Expected 1 argument, found 0 -"#, - ); - - check_diagnostics( - r#" -fn one(arg: u8) {} -fn f() { one(1); } -"#, - ); - } - - #[test] - fn method_as_fn() { - check_diagnostics( - r#" -struct S; -impl S { fn method(&self) {} } - -fn f() { - S::method(); -} //^^^^^^^^^^^ Expected 1 argument, found 0 -"#, - ); - - check_diagnostics( - r#" -struct S; -impl S { fn method(&self) {} } - -fn f() { - S::method(&S); - S.method(); -} -"#, - ); - } - - #[test] - fn method_with_arg() { - check_diagnostics( - r#" -struct S; -impl S { fn method(&self, arg: u8) {} } - - fn f() { - S.method(); - } //^^^^^^^^^^ Expected 1 argument, found 0 - "#, - ); - - check_diagnostics( - r#" -struct S; -impl S { fn method(&self, arg: u8) {} } - -fn f() { - S::method(&S, 0); - S.method(1); -} -"#, - ); - } - - #[test] - fn tuple_struct() { - check_diagnostics( - r#" -struct Tup(u8, u16); -fn f() { - Tup(0); -} //^^^^^^ Expected 2 arguments, found 1 -"#, - ) - } - - #[test] - fn enum_variant() { - check_diagnostics( - r#" -enum En { Variant(u8, u16), } -fn f() { - En::Variant(0); -} //^^^^^^^^^^^^^^ Expected 2 arguments, found 1 -"#, - ) - } - - #[test] - fn enum_variant_type_macro() { - check_diagnostics( - r#" -macro_rules! Type { - () => { u32 }; -} -enum Foo { - Bar(Type![]) -} -impl Foo { - fn new() { - Foo::Bar(0); - Foo::Bar(0, 1); - //^^^^^^^^^^^^^^ Expected 1 argument, found 2 - Foo::Bar(); - //^^^^^^^^^^ Expected 1 argument, found 0 - } -} - "#, - ); - } - - #[test] - fn varargs() { - check_diagnostics( - r#" -extern "C" { - fn fixed(fixed: u8); - fn varargs(fixed: u8, ...); - fn varargs2(...); -} - -fn f() { - unsafe { - fixed(0); - fixed(0, 1); - //^^^^^^^^^^^ Expected 1 argument, found 2 - varargs(0); - varargs(0, 1); - varargs2(); - varargs2(0); - varargs2(0, 1); - } -} - "#, - ) - } - - #[test] - fn arg_count_lambda() { - check_diagnostics( - r#" -fn main() { - let f = |()| (); - f(); - //^^^ Expected 1 argument, found 0 - f(()); - f((), ()); - //^^^^^^^^^ Expected 1 argument, found 2 -} -"#, - ) - } -} diff --git a/crates/ra_hir_ty/src/diagnostics/match_check.rs b/crates/ra_hir_ty/src/diagnostics/match_check.rs deleted file mode 100644 index 507edcb7de..0000000000 --- a/crates/ra_hir_ty/src/diagnostics/match_check.rs +++ /dev/null @@ -1,1421 +0,0 @@ -//! This module implements match statement exhaustiveness checking and usefulness checking -//! for match arms. -//! -//! It is modeled on the rustc module `librustc_mir_build::hair::pattern::_match`, which -//! contains very detailed documentation about the algorithms used here. I've duplicated -//! most of that documentation below. -//! -//! This file includes the logic for exhaustiveness and usefulness checking for -//! pattern-matching. Specifically, given a list of patterns for a type, we can -//! tell whether: -//! - (a) the patterns cover every possible constructor for the type (exhaustiveness). -//! - (b) each pattern is necessary (usefulness). -//! -//! The algorithm implemented here is a modified version of the one described in -//! . -//! However, to save future implementors from reading the original paper, we -//! summarise the algorithm here to hopefully save time and be a little clearer -//! (without being so rigorous). -//! -//! The core of the algorithm revolves about a "usefulness" check. In particular, we -//! are trying to compute a predicate `U(P, p)` where `P` is a list of patterns (we refer to this as -//! a matrix). `U(P, p)` represents whether, given an existing list of patterns -//! `P_1 ..= P_m`, adding a new pattern `p` will be "useful" (that is, cover previously- -//! uncovered values of the type). -//! -//! If we have this predicate, then we can easily compute both exhaustiveness of an -//! entire set of patterns and the individual usefulness of each one. -//! (a) the set of patterns is exhaustive iff `U(P, _)` is false (i.e., adding a wildcard -//! match doesn't increase the number of values we're matching) -//! (b) a pattern `P_i` is not useful if `U(P[0..=(i-1), P_i)` is false (i.e., adding a -//! pattern to those that have come before it doesn't increase the number of values -//! we're matching). -//! -//! During the course of the algorithm, the rows of the matrix won't just be individual patterns, -//! but rather partially-deconstructed patterns in the form of a list of patterns. The paper -//! calls those pattern-vectors, and we will call them pattern-stacks. The same holds for the -//! new pattern `p`. -//! -//! For example, say we have the following: -//! -//! ```ignore -//! // x: (Option, Result<()>) -//! match x { -//! (Some(true), _) => (), -//! (None, Err(())) => (), -//! (None, Err(_)) => (), -//! } -//! ``` -//! -//! Here, the matrix `P` starts as: -//! -//! ```text -//! [ -//! [(Some(true), _)], -//! [(None, Err(()))], -//! [(None, Err(_))], -//! ] -//! ``` -//! -//! We can tell it's not exhaustive, because `U(P, _)` is true (we're not covering -//! `[(Some(false), _)]`, for instance). In addition, row 3 is not useful, because -//! all the values it covers are already covered by row 2. -//! -//! A list of patterns can be thought of as a stack, because we are mainly interested in the top of -//! the stack at any given point, and we can pop or apply constructors to get new pattern-stacks. -//! To match the paper, the top of the stack is at the beginning / on the left. -//! -//! There are two important operations on pattern-stacks necessary to understand the algorithm: -//! -//! 1. We can pop a given constructor off the top of a stack. This operation is called -//! `specialize`, and is denoted `S(c, p)` where `c` is a constructor (like `Some` or -//! `None`) and `p` a pattern-stack. -//! If the pattern on top of the stack can cover `c`, this removes the constructor and -//! pushes its arguments onto the stack. It also expands OR-patterns into distinct patterns. -//! Otherwise the pattern-stack is discarded. -//! This essentially filters those pattern-stacks whose top covers the constructor `c` and -//! discards the others. -//! -//! For example, the first pattern above initially gives a stack `[(Some(true), _)]`. If we -//! pop the tuple constructor, we are left with `[Some(true), _]`, and if we then pop the -//! `Some` constructor we get `[true, _]`. If we had popped `None` instead, we would get -//! nothing back. -//! -//! This returns zero or more new pattern-stacks, as follows. We look at the pattern `p_1` -//! on top of the stack, and we have four cases: -//! -//! * 1.1. `p_1 = c(r_1, .., r_a)`, i.e. the top of the stack has constructor `c`. We push onto -//! the stack the arguments of this constructor, and return the result: -//! -//! r_1, .., r_a, p_2, .., p_n -//! -//! * 1.2. `p_1 = c'(r_1, .., r_a')` where `c ≠ c'`. We discard the current stack and return -//! nothing. -//! * 1.3. `p_1 = _`. We push onto the stack as many wildcards as the constructor `c` has -//! arguments (its arity), and return the resulting stack: -//! -//! _, .., _, p_2, .., p_n -//! -//! * 1.4. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting stack: -//! -//! S(c, (r_1, p_2, .., p_n)) -//! S(c, (r_2, p_2, .., p_n)) -//! -//! 2. We can pop a wildcard off the top of the stack. This is called `D(p)`, where `p` is -//! a pattern-stack. -//! This is used when we know there are missing constructor cases, but there might be -//! existing wildcard patterns, so to check the usefulness of the matrix, we have to check -//! all its *other* components. -//! -//! It is computed as follows. We look at the pattern `p_1` on top of the stack, -//! and we have three cases: -//! * 1.1. `p_1 = c(r_1, .., r_a)`. We discard the current stack and return nothing. -//! * 1.2. `p_1 = _`. We return the rest of the stack: -//! -//! p_2, .., p_n -//! -//! * 1.3. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting stack: -//! -//! D((r_1, p_2, .., p_n)) -//! D((r_2, p_2, .., p_n)) -//! -//! Note that the OR-patterns are not always used directly in Rust, but are used to derive the -//! exhaustive integer matching rules, so they're written here for posterity. -//! -//! Both those operations extend straightforwardly to a list or pattern-stacks, i.e. a matrix, by -//! working row-by-row. Popping a constructor ends up keeping only the matrix rows that start with -//! the given constructor, and popping a wildcard keeps those rows that start with a wildcard. -//! -//! -//! The algorithm for computing `U` -//! ------------------------------- -//! The algorithm is inductive (on the number of columns: i.e., components of tuple patterns). -//! That means we're going to check the components from left-to-right, so the algorithm -//! operates principally on the first component of the matrix and new pattern-stack `p`. -//! This algorithm is realised in the `is_useful` function. -//! -//! Base case (`n = 0`, i.e., an empty tuple pattern): -//! - If `P` already contains an empty pattern (i.e., if the number of patterns `m > 0`), then -//! `U(P, p)` is false. -//! - Otherwise, `P` must be empty, so `U(P, p)` is true. -//! -//! Inductive step (`n > 0`, i.e., whether there's at least one column [which may then be expanded -//! into further columns later]). We're going to match on the top of the new pattern-stack, `p_1`: -//! -//! - If `p_1 == c(r_1, .., r_a)`, i.e. we have a constructor pattern. -//! Then, the usefulness of `p_1` can be reduced to whether it is useful when -//! we ignore all the patterns in the first column of `P` that involve other constructors. -//! This is where `S(c, P)` comes in: -//! -//! ```text -//! U(P, p) := U(S(c, P), S(c, p)) -//! ``` -//! -//! This special case is handled in `is_useful_specialized`. -//! -//! For example, if `P` is: -//! -//! ```text -//! [ -//! [Some(true), _], -//! [None, 0], -//! ] -//! ``` -//! -//! and `p` is `[Some(false), 0]`, then we don't care about row 2 since we know `p` only -//! matches values that row 2 doesn't. For row 1 however, we need to dig into the -//! arguments of `Some` to know whether some new value is covered. So we compute -//! `U([[true, _]], [false, 0])`. -//! -//! - If `p_1 == _`, then we look at the list of constructors that appear in the first component of -//! the rows of `P`: -//! - If there are some constructors that aren't present, then we might think that the -//! wildcard `_` is useful, since it covers those constructors that weren't covered -//! before. -//! That's almost correct, but only works if there were no wildcards in those first -//! components. So we need to check that `p` is useful with respect to the rows that -//! start with a wildcard, if there are any. This is where `D` comes in: -//! `U(P, p) := U(D(P), D(p))` -//! -//! For example, if `P` is: -//! ```text -//! [ -//! [_, true, _], -//! [None, false, 1], -//! ] -//! ``` -//! and `p` is `[_, false, _]`, the `Some` constructor doesn't appear in `P`. So if we -//! only had row 2, we'd know that `p` is useful. However row 1 starts with a -//! wildcard, so we need to check whether `U([[true, _]], [false, 1])`. -//! -//! - Otherwise, all possible constructors (for the relevant type) are present. In this -//! case we must check whether the wildcard pattern covers any unmatched value. For -//! that, we can think of the `_` pattern as a big OR-pattern that covers all -//! possible constructors. For `Option`, that would mean `_ = None | Some(_)` for -//! example. The wildcard pattern is useful in this case if it is useful when -//! specialized to one of the possible constructors. So we compute: -//! `U(P, p) := ∃(k ϵ constructors) U(S(k, P), S(k, p))` -//! -//! For example, if `P` is: -//! ```text -//! [ -//! [Some(true), _], -//! [None, false], -//! ] -//! ``` -//! and `p` is `[_, false]`, both `None` and `Some` constructors appear in the first -//! components of `P`. We will therefore try popping both constructors in turn: we -//! compute `U([[true, _]], [_, false])` for the `Some` constructor, and `U([[false]], -//! [false])` for the `None` constructor. The first case returns true, so we know that -//! `p` is useful for `P`. Indeed, it matches `[Some(false), _]` that wasn't matched -//! before. -//! -//! - If `p_1 == r_1 | r_2`, then the usefulness depends on each `r_i` separately: -//! -//! ```text -//! U(P, p) := U(P, (r_1, p_2, .., p_n)) -//! || U(P, (r_2, p_2, .., p_n)) -//! ``` -use std::sync::Arc; - -use hir_def::{ - adt::VariantData, - body::Body, - expr::{Expr, Literal, Pat, PatId}, - AdtId, EnumVariantId, VariantId, -}; -use ra_arena::Idx; -use smallvec::{smallvec, SmallVec}; - -use crate::{db::HirDatabase, ApplicationTy, InferenceResult, Ty, TypeCtor}; - -#[derive(Debug, Clone, Copy)] -/// Either a pattern from the source code being analyzed, represented as -/// as `PatId`, or a `Wild` pattern which is created as an intermediate -/// step in the match checking algorithm and thus is not backed by a -/// real `PatId`. -/// -/// Note that it is totally valid for the `PatId` variant to contain -/// a `PatId` which resolves to a `Wild` pattern, if that wild pattern -/// exists in the source code being analyzed. -enum PatIdOrWild { - PatId(PatId), - Wild, -} - -impl PatIdOrWild { - fn as_pat(self, cx: &MatchCheckCtx) -> Pat { - match self { - PatIdOrWild::PatId(id) => cx.body.pats[id].clone(), - PatIdOrWild::Wild => Pat::Wild, - } - } - - fn as_id(self) -> Option { - match self { - PatIdOrWild::PatId(id) => Some(id), - PatIdOrWild::Wild => None, - } - } -} - -impl From for PatIdOrWild { - fn from(pat_id: PatId) -> Self { - Self::PatId(pat_id) - } -} - -impl From<&PatId> for PatIdOrWild { - fn from(pat_id: &PatId) -> Self { - Self::PatId(*pat_id) - } -} - -#[derive(Debug, Clone, Copy, PartialEq)] -pub(super) enum MatchCheckErr { - NotImplemented, - MalformedMatchArm, - /// Used when type inference cannot resolve the type of - /// a pattern or expression. - Unknown, -} - -/// The return type of `is_useful` is either an indication of usefulness -/// of the match arm, or an error in the case the match statement -/// is made up of types for which exhaustiveness checking is currently -/// not completely implemented. -/// -/// The `std::result::Result` type is used here rather than a custom enum -/// to allow the use of `?`. -pub(super) type MatchCheckResult = Result; - -#[derive(Debug)] -/// A row in a Matrix. -/// -/// This type is modeled from the struct of the same name in `rustc`. -pub(super) struct PatStack(PatStackInner); -type PatStackInner = SmallVec<[PatIdOrWild; 2]>; - -impl PatStack { - pub(super) fn from_pattern(pat_id: PatId) -> PatStack { - Self(smallvec!(pat_id.into())) - } - - pub(super) fn from_wild() -> PatStack { - Self(smallvec!(PatIdOrWild::Wild)) - } - - fn from_slice(slice: &[PatIdOrWild]) -> PatStack { - Self(SmallVec::from_slice(slice)) - } - - fn from_vec(v: PatStackInner) -> PatStack { - Self(v) - } - - fn get_head(&self) -> Option { - self.0.first().copied() - } - - fn tail(&self) -> &[PatIdOrWild] { - self.0.get(1..).unwrap_or(&[]) - } - - fn to_tail(&self) -> PatStack { - Self::from_slice(self.tail()) - } - - fn replace_head_with(&self, pats: I) -> PatStack - where - I: Iterator, - T: Into, - { - let mut patterns: PatStackInner = smallvec![]; - for pat in pats { - patterns.push(pat.into()); - } - for pat in &self.0[1..] { - patterns.push(*pat); - } - PatStack::from_vec(patterns) - } - - /// Computes `D(self)`. - /// - /// See the module docs and the associated documentation in rustc for details. - fn specialize_wildcard(&self, cx: &MatchCheckCtx) -> Option { - if matches!(self.get_head()?.as_pat(cx), Pat::Wild) { - Some(self.to_tail()) - } else { - None - } - } - - /// Computes `S(constructor, self)`. - /// - /// See the module docs and the associated documentation in rustc for details. - fn specialize_constructor( - &self, - cx: &MatchCheckCtx, - constructor: &Constructor, - ) -> MatchCheckResult> { - let head = match self.get_head() { - Some(head) => head, - None => return Ok(None), - }; - - let head_pat = head.as_pat(cx); - let result = match (head_pat, constructor) { - (Pat::Tuple { args: ref pat_ids, ellipsis }, Constructor::Tuple { arity: _ }) => { - if ellipsis.is_some() { - // If there are ellipsis here, we should add the correct number of - // Pat::Wild patterns to `pat_ids`. We should be able to use the - // constructors arity for this, but at the time of writing we aren't - // correctly calculating this arity when ellipsis are present. - return Err(MatchCheckErr::NotImplemented); - } - - Some(self.replace_head_with(pat_ids.iter())) - } - (Pat::Lit(lit_expr), Constructor::Bool(constructor_val)) => { - match cx.body.exprs[lit_expr] { - Expr::Literal(Literal::Bool(pat_val)) if *constructor_val == pat_val => { - Some(self.to_tail()) - } - // it was a bool but the value doesn't match - Expr::Literal(Literal::Bool(_)) => None, - // perhaps this is actually unreachable given we have - // already checked that these match arms have the appropriate type? - _ => return Err(MatchCheckErr::NotImplemented), - } - } - (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?), - (Pat::Path(_), Constructor::Enum(constructor)) => { - // unit enum variants become `Pat::Path` - let pat_id = head.as_id().expect("we know this isn't a wild"); - if !enum_variant_matches(cx, pat_id, *constructor) { - None - } else { - Some(self.to_tail()) - } - } - ( - Pat::TupleStruct { args: ref pat_ids, ellipsis, .. }, - Constructor::Enum(enum_constructor), - ) => { - let pat_id = head.as_id().expect("we know this isn't a wild"); - if !enum_variant_matches(cx, pat_id, *enum_constructor) { - None - } else { - let constructor_arity = constructor.arity(cx)?; - if let Some(ellipsis_position) = ellipsis { - // If there are ellipsis in the pattern, the ellipsis must take the place - // of at least one sub-pattern, so `pat_ids` should be smaller than the - // constructor arity. - if pat_ids.len() < constructor_arity { - let mut new_patterns: Vec = vec![]; - - for pat_id in &pat_ids[0..ellipsis_position] { - new_patterns.push((*pat_id).into()); - } - - for _ in 0..(constructor_arity - pat_ids.len()) { - new_patterns.push(PatIdOrWild::Wild); - } - - for pat_id in &pat_ids[ellipsis_position..pat_ids.len()] { - new_patterns.push((*pat_id).into()); - } - - Some(self.replace_head_with(new_patterns.into_iter())) - } else { - return Err(MatchCheckErr::MalformedMatchArm); - } - } else { - // If there is no ellipsis in the tuple pattern, the number - // of patterns must equal the constructor arity. - if pat_ids.len() == constructor_arity { - Some(self.replace_head_with(pat_ids.into_iter())) - } else { - return Err(MatchCheckErr::MalformedMatchArm); - } - } - } - } - (Pat::Record { args: ref arg_patterns, .. }, Constructor::Enum(e)) => { - let pat_id = head.as_id().expect("we know this isn't a wild"); - if !enum_variant_matches(cx, pat_id, *e) { - None - } else { - match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { - VariantData::Record(struct_field_arena) => { - // Here we treat any missing fields in the record as the wild pattern, as - // if the record has ellipsis. We want to do this here even if the - // record does not contain ellipsis, because it allows us to continue - // enforcing exhaustiveness for the rest of the match statement. - // - // Creating the diagnostic for the missing field in the pattern - // should be done in a different diagnostic. - let patterns = struct_field_arena.iter().map(|(_, struct_field)| { - arg_patterns - .iter() - .find(|pat| pat.name == struct_field.name) - .map(|pat| PatIdOrWild::from(pat.pat)) - .unwrap_or(PatIdOrWild::Wild) - }); - - Some(self.replace_head_with(patterns)) - } - _ => return Err(MatchCheckErr::Unknown), - } - } - } - (Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented), - (_, _) => return Err(MatchCheckErr::NotImplemented), - }; - - Ok(result) - } - - /// A special case of `specialize_constructor` where the head of the pattern stack - /// is a Wild pattern. - /// - /// Replaces the Wild pattern at the head of the pattern stack with N Wild patterns - /// (N >= 0), where N is the arity of the given constructor. - fn expand_wildcard( - &self, - cx: &MatchCheckCtx, - constructor: &Constructor, - ) -> MatchCheckResult { - assert_eq!( - Pat::Wild, - self.get_head().expect("expand_wildcard called on empty PatStack").as_pat(cx), - "expand_wildcard must only be called on PatStack with wild at head", - ); - - let mut patterns: PatStackInner = smallvec![]; - - for _ in 0..constructor.arity(cx)? { - patterns.push(PatIdOrWild::Wild); - } - - for pat in &self.0[1..] { - patterns.push(*pat); - } - - Ok(PatStack::from_vec(patterns)) - } -} - -/// A collection of PatStack. -/// -/// This type is modeled from the struct of the same name in `rustc`. -pub(super) struct Matrix(Vec); - -impl Matrix { - pub(super) fn empty() -> Self { - Self(vec![]) - } - - pub(super) fn push(&mut self, cx: &MatchCheckCtx, row: PatStack) { - if let Some(Pat::Or(pat_ids)) = row.get_head().map(|pat_id| pat_id.as_pat(cx)) { - // Or patterns are expanded here - for pat_id in pat_ids { - self.0.push(PatStack::from_pattern(pat_id)); - } - } else { - self.0.push(row); - } - } - - fn is_empty(&self) -> bool { - self.0.is_empty() - } - - fn heads(&self) -> Vec { - self.0.iter().flat_map(|p| p.get_head()).collect() - } - - /// Computes `D(self)` for each contained PatStack. - /// - /// See the module docs and the associated documentation in rustc for details. - fn specialize_wildcard(&self, cx: &MatchCheckCtx) -> Self { - Self::collect(cx, self.0.iter().filter_map(|r| r.specialize_wildcard(cx))) - } - - /// Computes `S(constructor, self)` for each contained PatStack. - /// - /// See the module docs and the associated documentation in rustc for details. - fn specialize_constructor( - &self, - cx: &MatchCheckCtx, - constructor: &Constructor, - ) -> MatchCheckResult { - let mut new_matrix = Matrix::empty(); - for pat in &self.0 { - if let Some(pat) = pat.specialize_constructor(cx, constructor)? { - new_matrix.push(cx, pat); - } - } - - Ok(new_matrix) - } - - fn collect>(cx: &MatchCheckCtx, iter: T) -> Self { - let mut matrix = Matrix::empty(); - - for pat in iter { - // using push ensures we expand or-patterns - matrix.push(cx, pat); - } - - matrix - } -} - -#[derive(Clone, Debug, PartialEq)] -/// An indication of the usefulness of a given match arm, where -/// usefulness is defined as matching some patterns which were -/// not matched by an prior match arms. -/// -/// We may eventually need an `Unknown` variant here. -pub(super) enum Usefulness { - Useful, - NotUseful, -} - -pub(super) struct MatchCheckCtx<'a> { - pub(super) match_expr: Idx, - pub(super) body: Arc, - pub(super) infer: Arc, - pub(super) db: &'a dyn HirDatabase, -} - -/// Given a set of patterns `matrix`, and pattern to consider `v`, determines -/// whether `v` is useful. A pattern is useful if it covers cases which were -/// not previously covered. -/// -/// When calling this function externally (that is, not the recursive calls) it -/// expected that you have already type checked the match arms. All patterns in -/// matrix should be the same type as v, as well as they should all be the same -/// type as the match expression. -pub(super) fn is_useful( - cx: &MatchCheckCtx, - matrix: &Matrix, - v: &PatStack, -) -> MatchCheckResult { - // Handle two special cases: - // - enum with no variants - // - `!` type - // In those cases, no match arm is useful. - match cx.infer[cx.match_expr].strip_references() { - Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(AdtId::EnumId(enum_id)), .. }) => { - if cx.db.enum_data(*enum_id).variants.is_empty() { - return Ok(Usefulness::NotUseful); - } - } - Ty::Apply(ApplicationTy { ctor: TypeCtor::Never, .. }) => { - return Ok(Usefulness::NotUseful); - } - _ => (), - } - - let head = match v.get_head() { - Some(head) => head, - None => { - let result = if matrix.is_empty() { Usefulness::Useful } else { Usefulness::NotUseful }; - - return Ok(result); - } - }; - - if let Pat::Or(pat_ids) = head.as_pat(cx) { - let mut found_unimplemented = false; - let any_useful = pat_ids.iter().any(|&pat_id| { - let v = PatStack::from_pattern(pat_id); - - match is_useful(cx, matrix, &v) { - Ok(Usefulness::Useful) => true, - Ok(Usefulness::NotUseful) => false, - _ => { - found_unimplemented = true; - false - } - } - }); - - return if any_useful { - Ok(Usefulness::Useful) - } else if found_unimplemented { - Err(MatchCheckErr::NotImplemented) - } else { - Ok(Usefulness::NotUseful) - }; - } - - if let Some(constructor) = pat_constructor(cx, head)? { - let matrix = matrix.specialize_constructor(&cx, &constructor)?; - let v = v - .specialize_constructor(&cx, &constructor)? - .expect("we know this can't fail because we get the constructor from `v.head()` above"); - - is_useful(&cx, &matrix, &v) - } else { - // expanding wildcard - let mut used_constructors: Vec = vec![]; - for pat in matrix.heads() { - if let Some(constructor) = pat_constructor(cx, pat)? { - used_constructors.push(constructor); - } - } - - // We assume here that the first constructor is the "correct" type. Since we - // only care about the "type" of the constructor (i.e. if it is a bool we - // don't care about the value), this assumption should be valid as long as - // the match statement is well formed. We currently uphold this invariant by - // filtering match arms before calling `is_useful`, only passing in match arms - // whose type matches the type of the match expression. - match &used_constructors.first() { - Some(constructor) if all_constructors_covered(&cx, constructor, &used_constructors) => { - // If all constructors are covered, then we need to consider whether - // any values are covered by this wildcard. - // - // For example, with matrix '[[Some(true)], [None]]', all - // constructors are covered (`Some`/`None`), so we need - // to perform specialization to see that our wildcard will cover - // the `Some(false)` case. - // - // Here we create a constructor for each variant and then check - // usefulness after specializing for that constructor. - let mut found_unimplemented = false; - for constructor in constructor.all_constructors(cx) { - let matrix = matrix.specialize_constructor(&cx, &constructor)?; - let v = v.expand_wildcard(&cx, &constructor)?; - - match is_useful(&cx, &matrix, &v) { - Ok(Usefulness::Useful) => return Ok(Usefulness::Useful), - Ok(Usefulness::NotUseful) => continue, - _ => found_unimplemented = true, - }; - } - - if found_unimplemented { - Err(MatchCheckErr::NotImplemented) - } else { - Ok(Usefulness::NotUseful) - } - } - _ => { - // Either not all constructors are covered, or the only other arms - // are wildcards. Either way, this pattern is useful if it is useful - // when compared to those arms with wildcards. - let matrix = matrix.specialize_wildcard(&cx); - let v = v.to_tail(); - - is_useful(&cx, &matrix, &v) - } - } - } -} - -#[derive(Debug, Clone, Copy)] -/// Similar to TypeCtor, but includes additional information about the specific -/// value being instantiated. For example, TypeCtor::Bool doesn't contain the -/// boolean value. -enum Constructor { - Bool(bool), - Tuple { arity: usize }, - Enum(EnumVariantId), -} - -impl Constructor { - fn arity(&self, cx: &MatchCheckCtx) -> MatchCheckResult { - let arity = match self { - Constructor::Bool(_) => 0, - Constructor::Tuple { arity } => *arity, - Constructor::Enum(e) => { - match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { - VariantData::Tuple(struct_field_data) => struct_field_data.len(), - VariantData::Record(struct_field_data) => struct_field_data.len(), - VariantData::Unit => 0, - } - } - }; - - Ok(arity) - } - - fn all_constructors(&self, cx: &MatchCheckCtx) -> Vec { - match self { - Constructor::Bool(_) => vec![Constructor::Bool(true), Constructor::Bool(false)], - Constructor::Tuple { .. } => vec![*self], - Constructor::Enum(e) => cx - .db - .enum_data(e.parent) - .variants - .iter() - .map(|(local_id, _)| { - Constructor::Enum(EnumVariantId { parent: e.parent, local_id }) - }) - .collect(), - } - } -} - -/// Returns the constructor for the given pattern. Should only return None -/// in the case of a Wild pattern. -fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult> { - let res = match pat.as_pat(cx) { - Pat::Wild => None, - // FIXME somehow create the Tuple constructor with the proper arity. If there are - // ellipsis, the arity is not equal to the number of patterns. - Pat::Tuple { args: pats, ellipsis } if ellipsis.is_none() => { - Some(Constructor::Tuple { arity: pats.len() }) - } - Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] { - Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)), - _ => return Err(MatchCheckErr::NotImplemented), - }, - Pat::TupleStruct { .. } | Pat::Path(_) | Pat::Record { .. } => { - let pat_id = pat.as_id().expect("we already know this pattern is not a wild"); - let variant_id = - cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckErr::Unknown)?; - match variant_id { - VariantId::EnumVariantId(enum_variant_id) => { - Some(Constructor::Enum(enum_variant_id)) - } - _ => return Err(MatchCheckErr::NotImplemented), - } - } - _ => return Err(MatchCheckErr::NotImplemented), - }; - - Ok(res) -} - -fn all_constructors_covered( - cx: &MatchCheckCtx, - constructor: &Constructor, - used_constructors: &[Constructor], -) -> bool { - match constructor { - Constructor::Tuple { arity } => { - used_constructors.iter().any(|constructor| match constructor { - Constructor::Tuple { arity: used_arity } => arity == used_arity, - _ => false, - }) - } - Constructor::Bool(_) => { - if used_constructors.is_empty() { - return false; - } - - let covers_true = - used_constructors.iter().any(|c| matches!(c, Constructor::Bool(true))); - let covers_false = - used_constructors.iter().any(|c| matches!(c, Constructor::Bool(false))); - - covers_true && covers_false - } - Constructor::Enum(e) => cx.db.enum_data(e.parent).variants.iter().all(|(id, _)| { - for constructor in used_constructors { - if let Constructor::Enum(e) = constructor { - if id == e.local_id { - return true; - } - } - } - - false - }), - } -} - -fn enum_variant_matches(cx: &MatchCheckCtx, pat_id: PatId, enum_variant_id: EnumVariantId) -> bool { - Some(enum_variant_id.into()) == cx.infer.variant_resolution_for_pat(pat_id) -} - -#[cfg(test)] -mod tests { - use crate::diagnostics::tests::check_diagnostics; - - #[test] - fn empty_tuple() { - check_diagnostics( - r#" -fn main() { - match () { } - //^^ Missing match arm - match (()) { } - //^^^^ Missing match arm - - match () { _ => (), } - match () { () => (), } - match (()) { (()) => (), } -} -"#, - ); - } - - #[test] - fn tuple_of_two_empty_tuple() { - check_diagnostics( - r#" -fn main() { - match ((), ()) { } - //^^^^^^^^ Missing match arm - - match ((), ()) { ((), ()) => (), } -} -"#, - ); - } - - #[test] - fn boolean() { - check_diagnostics( - r#" -fn test_main() { - match false { } - //^^^^^ Missing match arm - match false { true => (), } - //^^^^^ Missing match arm - match (false, true) {} - //^^^^^^^^^^^^^ Missing match arm - match (false, true) { (true, true) => (), } - //^^^^^^^^^^^^^ Missing match arm - match (false, true) { - //^^^^^^^^^^^^^ Missing match arm - (false, true) => (), - (false, false) => (), - (true, false) => (), - } - match (false, true) { (true, _x) => (), } - //^^^^^^^^^^^^^ Missing match arm - - match false { true => (), false => (), } - match (false, true) { - (false, _) => (), - (true, false) => (), - (_, true) => (), - } - match (false, true) { - (true, true) => (), - (true, false) => (), - (false, true) => (), - (false, false) => (), - } - match (false, true) { - (true, _x) => (), - (false, true) => (), - (false, false) => (), - } - match (false, true, false) { - (false, ..) => (), - (true, ..) => (), - } - match (false, true, false) { - (.., false) => (), - (.., true) => (), - } - match (false, true, false) { (..) => (), } -} -"#, - ); - } - - #[test] - fn tuple_of_tuple_and_bools() { - check_diagnostics( - r#" -fn main() { - match (false, ((), false)) {} - //^^^^^^^^^^^^^^^^^^^^ Missing match arm - match (false, ((), false)) { (true, ((), true)) => (), } - //^^^^^^^^^^^^^^^^^^^^ Missing match arm - match (false, ((), false)) { (true, _) => (), } - //^^^^^^^^^^^^^^^^^^^^ Missing match arm - - match (false, ((), false)) { - (true, ((), true)) => (), - (true, ((), false)) => (), - (false, ((), true)) => (), - (false, ((), false)) => (), - } - match (false, ((), false)) { - (true, ((), true)) => (), - (true, ((), false)) => (), - (false, _) => (), - } -} -"#, - ); - } - - #[test] - fn enums() { - check_diagnostics( - r#" -enum Either { A, B, } - -fn main() { - match Either::A { } - //^^^^^^^^^ Missing match arm - match Either::B { Either::A => (), } - //^^^^^^^^^ Missing match arm - - match &Either::B { - //^^^^^^^^^^ Missing match arm - Either::A => (), - } - - match Either::B { - Either::A => (), Either::B => (), - } - match &Either::B { - Either::A => (), Either::B => (), - } -} -"#, - ); - } - - #[test] - fn enum_containing_bool() { - check_diagnostics( - r#" -enum Either { A(bool), B } - -fn main() { - match Either::B { } - //^^^^^^^^^ Missing match arm - match Either::B { - //^^^^^^^^^ Missing match arm - Either::A(true) => (), Either::B => () - } - - match Either::B { - Either::A(true) => (), - Either::A(false) => (), - Either::B => (), - } - match Either::B { - Either::B => (), - _ => (), - } - match Either::B { - Either::A(_) => (), - Either::B => (), - } - -} - "#, - ); - } - - #[test] - fn enum_different_sizes() { - check_diagnostics( - r#" -enum Either { A(bool), B(bool, bool) } - -fn main() { - match Either::A(false) { - //^^^^^^^^^^^^^^^^ Missing match arm - Either::A(_) => (), - Either::B(false, _) => (), - } - - match Either::A(false) { - Either::A(_) => (), - Either::B(true, _) => (), - Either::B(false, _) => (), - } - match Either::A(false) { - Either::A(true) | Either::A(false) => (), - Either::B(true, _) => (), - Either::B(false, _) => (), - } -} -"#, - ); - } - - #[test] - fn tuple_of_enum_no_diagnostic() { - check_diagnostics( - r#" -enum Either { A(bool), B(bool, bool) } -enum Either2 { C, D } - -fn main() { - match (Either::A(false), Either2::C) { - (Either::A(true), _) | (Either::A(false), _) => (), - (Either::B(true, _), Either2::C) => (), - (Either::B(false, _), Either2::C) => (), - (Either::B(_, _), Either2::D) => (), - } -} -"#, - ); - } - - #[test] - fn mismatched_types() { - // Match statements with arms that don't match the - // expression pattern do not fire this diagnostic. - check_diagnostics( - r#" -enum Either { A, B } -enum Either2 { C, D } - -fn main() { - match Either::A { - Either2::C => (), - Either2::D => (), - } - match (true, false) { - (true, false, true) => (), - (true) => (), - } - match (0) { () => () } - match Unresolved::Bar { Unresolved::Baz => () } -} - "#, - ); - } - - #[test] - fn malformed_match_arm_tuple_enum_missing_pattern() { - // We are testing to be sure we don't panic here when the match - // arm `Either::B` is missing its pattern. - check_diagnostics( - r#" -enum Either { A, B(u32) } - -fn main() { - match Either::A { - Either::A => (), - Either::B() => (), - } -} -"#, - ); - } - - #[test] - fn expr_diverges() { - check_diagnostics( - r#" -enum Either { A, B } - -fn main() { - match loop {} { - Either::A => (), - Either::B => (), - } - match loop {} { - Either::A => (), - } - match loop { break Foo::A } { - //^^^^^^^^^^^^^^^^^^^^^ Missing match arm - Either::A => (), - } - match loop { break Foo::A } { - Either::A => (), - Either::B => (), - } -} -"#, - ); - } - - #[test] - fn expr_partially_diverges() { - check_diagnostics( - r#" -enum Either { A(T), B } - -fn foo() -> Either { Either::B } -fn main() -> u32 { - match foo() { - Either::A(val) => val, - Either::B => 0, - } -} -"#, - ); - } - - #[test] - fn enum_record() { - check_diagnostics( - r#" -enum Either { A { foo: bool }, B } - -fn main() { - let a = Either::A { foo: true }; - match a { } - //^ Missing match arm - match a { Either::A { foo: true } => () } - //^ Missing match arm - match a { - Either::A { } => (), - //^^^ Missing structure fields: - // | - foo - Either::B => (), - } - match a { - //^ Missing match arm - Either::A { } => (), - } //^^^ Missing structure fields: - // | - foo - - match a { - Either::A { foo: true } => (), - Either::A { foo: false } => (), - Either::B => (), - } - match a { - Either::A { foo: _ } => (), - Either::B => (), - } -} -"#, - ); - } - - #[test] - fn enum_record_fields_out_of_order() { - check_diagnostics( - r#" -enum Either { - A { foo: bool, bar: () }, - B, -} - -fn main() { - let a = Either::A { foo: true, bar: () }; - match a { - //^ Missing match arm - Either::A { bar: (), foo: false } => (), - Either::A { foo: true, bar: () } => (), - } - - match a { - Either::A { bar: (), foo: false } => (), - Either::A { foo: true, bar: () } => (), - Either::B => (), - } -} -"#, - ); - } - - #[test] - fn enum_record_ellipsis() { - check_diagnostics( - r#" -enum Either { - A { foo: bool, bar: bool }, - B, -} - -fn main() { - let a = Either::B; - match a { - //^ Missing match arm - Either::A { foo: true, .. } => (), - Either::B => (), - } - match a { - //^ Missing match arm - Either::A { .. } => (), - } - - match a { - Either::A { foo: true, .. } => (), - Either::A { foo: false, .. } => (), - Either::B => (), - } - - match a { - Either::A { .. } => (), - Either::B => (), - } -} -"#, - ); - } - - #[test] - fn enum_tuple_partial_ellipsis() { - check_diagnostics( - r#" -enum Either { - A(bool, bool, bool, bool), - B, -} - -fn main() { - match Either::B { - //^^^^^^^^^ Missing match arm - Either::A(true, .., true) => (), - Either::A(true, .., false) => (), - Either::A(false, .., false) => (), - Either::B => (), - } - match Either::B { - //^^^^^^^^^ Missing match arm - Either::A(true, .., true) => (), - Either::A(true, .., false) => (), - Either::A(.., true) => (), - Either::B => (), - } - - match Either::B { - Either::A(true, .., true) => (), - Either::A(true, .., false) => (), - Either::A(false, .., true) => (), - Either::A(false, .., false) => (), - Either::B => (), - } - match Either::B { - Either::A(true, .., true) => (), - Either::A(true, .., false) => (), - Either::A(.., true) => (), - Either::A(.., false) => (), - Either::B => (), - } -} -"#, - ); - } - - #[test] - fn never() { - check_diagnostics( - r#" -enum Never {} - -fn enum_(never: Never) { - match never {} -} -fn enum_ref(never: &Never) { - match never {} -} -fn bang(never: !) { - match never {} -} -"#, - ); - } - - #[test] - fn or_pattern_panic() { - check_diagnostics( - r#" -pub enum Category { Infinity, Zero } - -fn panic(a: Category, b: Category) { - match (a, b) { - (Category::Zero | Category::Infinity, _) => (), - (_, Category::Zero | Category::Infinity) => (), - } - - // FIXME: This is a false positive, but the code used to cause a panic in the match checker, - // so this acts as a regression test for that. - match (a, b) { - //^^^^^^ Missing match arm - (Category::Infinity, Category::Infinity) | (Category::Zero, Category::Zero) => (), - (Category::Infinity | Category::Zero, _) => (), - } -} -"#, - ); - } - - mod false_negatives { - //! The implementation of match checking here is a work in progress. As we roll this out, we - //! prefer false negatives to false positives (ideally there would be no false positives). This - //! test module should document known false negatives. Eventually we will have a complete - //! implementation of match checking and this module will be empty. - //! - //! The reasons for documenting known false negatives: - //! - //! 1. It acts as a backlog of work that can be done to improve the behavior of the system. - //! 2. It ensures the code doesn't panic when handling these cases. - use super::*; - - #[test] - fn integers() { - // We don't currently check integer exhaustiveness. - check_diagnostics( - r#" -fn main() { - match 5 { - 10 => (), - 11..20 => (), - } -} -"#, - ); - } - - #[test] - fn internal_or() { - // We do not currently handle patterns with internal `or`s. - check_diagnostics( - r#" -fn main() { - enum Either { A(bool), B } - match Either::B { - Either::A(true | false) => (), - } -} -"#, - ); - } - - #[test] - fn tuple_of_bools_with_ellipsis_at_end_missing_arm() { - // We don't currently handle tuple patterns with ellipsis. - check_diagnostics( - r#" -fn main() { - match (false, true, false) { - (false, ..) => (), - } -} -"#, - ); - } - - #[test] - fn tuple_of_bools_with_ellipsis_at_beginning_missing_arm() { - // We don't currently handle tuple patterns with ellipsis. - check_diagnostics( - r#" -fn main() { - match (false, true, false) { - (.., false) => (), - } -} -"#, - ); - } - - #[test] - fn struct_missing_arm() { - // We don't currently handle structs. - check_diagnostics( - r#" -struct Foo { a: bool } -fn main(f: Foo) { - match f { Foo { a: true } => () } -} -"#, - ); - } - } -} diff --git a/crates/ra_hir_ty/src/display.rs b/crates/ra_hir_ty/src/display.rs deleted file mode 100644 index 19770e609a..0000000000 --- a/crates/ra_hir_ty/src/display.rs +++ /dev/null @@ -1,631 +0,0 @@ -//! FIXME: write short doc here - -use std::fmt; - -use crate::{ - db::HirDatabase, utils::generics, ApplicationTy, CallableDefId, FnSig, GenericPredicate, - Obligation, OpaqueTyId, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, -}; -use hir_def::{ - find_path, generics::TypeParamProvenance, item_scope::ItemInNs, AdtId, AssocContainerId, - Lookup, ModuleId, -}; -use hir_expand::name::Name; - -pub struct HirFormatter<'a> { - pub db: &'a dyn HirDatabase, - fmt: &'a mut dyn fmt::Write, - buf: String, - curr_size: usize, - pub(crate) max_size: Option, - omit_verbose_types: bool, - display_target: DisplayTarget, -} - -pub trait HirDisplay { - fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError>; - - /// Returns a `Display`able type that is human-readable. - /// Use this for showing types to the user (e.g. diagnostics) - fn display<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self> - where - Self: Sized, - { - HirDisplayWrapper { - db, - t: self, - max_size: None, - omit_verbose_types: false, - display_target: DisplayTarget::Diagnostics, - } - } - - /// Returns a `Display`able type that is human-readable and tries to be succinct. - /// Use this for showing types to the user where space is constrained (e.g. doc popups) - fn display_truncated<'a>( - &'a self, - db: &'a dyn HirDatabase, - max_size: Option, - ) -> HirDisplayWrapper<'a, Self> - where - Self: Sized, - { - HirDisplayWrapper { - db, - t: self, - max_size, - omit_verbose_types: true, - display_target: DisplayTarget::Diagnostics, - } - } - - /// Returns a String representation of `self` that can be inserted into the given module. - /// Use this when generating code (e.g. assists) - fn display_source_code<'a>( - &'a self, - db: &'a dyn HirDatabase, - module_id: ModuleId, - ) -> Result { - let mut result = String::new(); - match self.hir_fmt(&mut HirFormatter { - db, - fmt: &mut result, - buf: String::with_capacity(20), - curr_size: 0, - max_size: None, - omit_verbose_types: false, - display_target: DisplayTarget::SourceCode { module_id }, - }) { - Ok(()) => {} - Err(HirDisplayError::FmtError) => panic!("Writing to String can't fail!"), - Err(HirDisplayError::DisplaySourceCodeError(e)) => return Err(e), - }; - Ok(result) - } -} - -impl<'a> HirFormatter<'a> { - pub fn write_joined( - &mut self, - iter: impl IntoIterator, - sep: &str, - ) -> Result<(), HirDisplayError> { - let mut first = true; - for e in iter { - if !first { - write!(self, "{}", sep)?; - } - first = false; - e.hir_fmt(self)?; - } - Ok(()) - } - - /// This allows using the `write!` macro directly with a `HirFormatter`. - pub fn write_fmt(&mut self, args: fmt::Arguments) -> Result<(), HirDisplayError> { - // We write to a buffer first to track output size - self.buf.clear(); - fmt::write(&mut self.buf, args)?; - self.curr_size += self.buf.len(); - - // Then we write to the internal formatter from the buffer - self.fmt.write_str(&self.buf).map_err(HirDisplayError::from) - } - - pub fn should_truncate(&self) -> bool { - if let Some(max_size) = self.max_size { - self.curr_size >= max_size - } else { - false - } - } - - pub fn omit_verbose_types(&self) -> bool { - self.omit_verbose_types - } -} - -#[derive(Clone, Copy)] -enum DisplayTarget { - /// Display types for inlays, doc popups, autocompletion, etc... - /// Showing `{unknown}` or not qualifying paths is fine here. - /// There's no reason for this to fail. - Diagnostics, - /// Display types for inserting them in source files. - /// The generated code should compile, so paths need to be qualified. - SourceCode { module_id: ModuleId }, -} - -impl DisplayTarget { - fn is_source_code(&self) -> bool { - matches!(self, Self::SourceCode {..}) - } -} - -#[derive(Debug)] -pub enum DisplaySourceCodeError { - PathNotFound, -} - -pub enum HirDisplayError { - /// Errors that can occur when generating source code - DisplaySourceCodeError(DisplaySourceCodeError), - /// `FmtError` is required to be compatible with std::fmt::Display - FmtError, -} -impl From for HirDisplayError { - fn from(_: fmt::Error) -> Self { - Self::FmtError - } -} - -pub struct HirDisplayWrapper<'a, T> { - db: &'a dyn HirDatabase, - t: &'a T, - max_size: Option, - omit_verbose_types: bool, - display_target: DisplayTarget, -} - -impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T> -where - T: HirDisplay, -{ - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self.t.hir_fmt(&mut HirFormatter { - db: self.db, - fmt: f, - buf: String::with_capacity(20), - curr_size: 0, - max_size: self.max_size, - omit_verbose_types: self.omit_verbose_types, - display_target: self.display_target, - }) { - Ok(()) => Ok(()), - Err(HirDisplayError::FmtError) => Err(fmt::Error), - Err(HirDisplayError::DisplaySourceCodeError(_)) => { - // This should never happen - panic!("HirDisplay failed when calling Display::fmt!") - } - } - } -} - -const TYPE_HINT_TRUNCATION: &str = "…"; - -impl HirDisplay for &Ty { - fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { - HirDisplay::hir_fmt(*self, f) - } -} - -impl HirDisplay for ApplicationTy { - fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { - if f.should_truncate() { - return write!(f, "{}", TYPE_HINT_TRUNCATION); - } - - match self.ctor { - TypeCtor::Bool => write!(f, "bool")?, - TypeCtor::Char => write!(f, "char")?, - TypeCtor::Int(t) => write!(f, "{}", t)?, - TypeCtor::Float(t) => write!(f, "{}", t)?, - TypeCtor::Str => write!(f, "str")?, - TypeCtor::Slice => { - let t = self.parameters.as_single(); - write!(f, "[{}]", t.display(f.db))?; - } - TypeCtor::Array => { - let t = self.parameters.as_single(); - write!(f, "[{}; _]", t.display(f.db))?; - } - TypeCtor::RawPtr(m) => { - let t = self.parameters.as_single(); - write!(f, "*{}{}", m.as_keyword_for_ptr(), t.display(f.db))?; - } - TypeCtor::Ref(m) => { - let t = self.parameters.as_single(); - let ty_display = if f.omit_verbose_types() { - t.display_truncated(f.db, f.max_size) - } else { - t.display(f.db) - }; - write!(f, "&{}{}", m.as_keyword_for_ref(), ty_display)?; - } - TypeCtor::Never => write!(f, "!")?, - TypeCtor::Tuple { .. } => { - let ts = &self.parameters; - if ts.len() == 1 { - write!(f, "({},)", ts[0].display(f.db))?; - } else { - write!(f, "(")?; - f.write_joined(&*ts.0, ", ")?; - write!(f, ")")?; - } - } - TypeCtor::FnPtr { is_varargs, .. } => { - let sig = FnSig::from_fn_ptr_substs(&self.parameters, is_varargs); - write!(f, "fn(")?; - f.write_joined(sig.params(), ", ")?; - if is_varargs { - if sig.params().is_empty() { - write!(f, "...")?; - } else { - write!(f, ", ...")?; - } - } - write!(f, ")")?; - let ret = sig.ret(); - if *ret != Ty::unit() { - let ret_display = if f.omit_verbose_types() { - ret.display_truncated(f.db, f.max_size) - } else { - ret.display(f.db) - }; - write!(f, " -> {}", ret_display)?; - } - } - TypeCtor::FnDef(def) => { - let sig = f.db.callable_item_signature(def).subst(&self.parameters); - match def { - CallableDefId::FunctionId(ff) => { - write!(f, "fn {}", f.db.function_data(ff).name)? - } - CallableDefId::StructId(s) => write!(f, "{}", f.db.struct_data(s).name)?, - CallableDefId::EnumVariantId(e) => { - write!(f, "{}", f.db.enum_data(e.parent).variants[e.local_id].name)? - } - }; - if self.parameters.len() > 0 { - let generics = generics(f.db.upcast(), def.into()); - let (parent_params, self_param, type_params, _impl_trait_params) = - generics.provenance_split(); - let total_len = parent_params + self_param + type_params; - // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self? - if total_len > 0 { - write!(f, "<")?; - f.write_joined(&self.parameters.0[..total_len], ", ")?; - write!(f, ">")?; - } - } - write!(f, "(")?; - f.write_joined(sig.params(), ", ")?; - write!(f, ")")?; - let ret = sig.ret(); - if *ret != Ty::unit() { - let ret_display = if f.omit_verbose_types() { - ret.display_truncated(f.db, f.max_size) - } else { - ret.display(f.db) - }; - write!(f, " -> {}", ret_display)?; - } - } - TypeCtor::Adt(def_id) => { - match f.display_target { - DisplayTarget::Diagnostics => { - let name = match def_id { - AdtId::StructId(it) => f.db.struct_data(it).name.clone(), - AdtId::UnionId(it) => f.db.union_data(it).name.clone(), - AdtId::EnumId(it) => f.db.enum_data(it).name.clone(), - }; - write!(f, "{}", name)?; - } - DisplayTarget::SourceCode { module_id } => { - if let Some(path) = find_path::find_path( - f.db.upcast(), - ItemInNs::Types(def_id.into()), - module_id, - ) { - write!(f, "{}", path)?; - } else { - return Err(HirDisplayError::DisplaySourceCodeError( - DisplaySourceCodeError::PathNotFound, - )); - } - } - } - - if self.parameters.len() > 0 { - let parameters_to_write = - if f.display_target.is_source_code() || f.omit_verbose_types() { - match self - .ctor - .as_generic_def() - .map(|generic_def_id| f.db.generic_defaults(generic_def_id)) - .filter(|defaults| !defaults.is_empty()) - { - None => self.parameters.0.as_ref(), - Some(default_parameters) => { - let mut default_from = 0; - for (i, parameter) in self.parameters.iter().enumerate() { - match (parameter, default_parameters.get(i)) { - (&Ty::Unknown, _) | (_, None) => { - default_from = i + 1; - } - (_, Some(default_parameter)) => { - let actual_default = default_parameter - .clone() - .subst(&self.parameters.prefix(i)); - if parameter != &actual_default { - default_from = i + 1; - } - } - } - } - &self.parameters.0[0..default_from] - } - } - } else { - self.parameters.0.as_ref() - }; - if !parameters_to_write.is_empty() { - write!(f, "<")?; - f.write_joined(parameters_to_write, ", ")?; - write!(f, ">")?; - } - } - } - TypeCtor::AssociatedType(type_alias) => { - let trait_ = match type_alias.lookup(f.db.upcast()).container { - AssocContainerId::TraitId(it) => it, - _ => panic!("not an associated type"), - }; - let trait_ = f.db.trait_data(trait_); - let type_alias = f.db.type_alias_data(type_alias); - write!(f, "{}::{}", trait_.name, type_alias.name)?; - if self.parameters.len() > 0 { - write!(f, "<")?; - f.write_joined(&*self.parameters.0, ", ")?; - write!(f, ">")?; - } - } - TypeCtor::OpaqueType(opaque_ty_id) => { - let bounds = match opaque_ty_id { - OpaqueTyId::ReturnTypeImplTrait(func, idx) => { - let datas = - f.db.return_type_impl_traits(func).expect("impl trait id without data"); - let data = (*datas) - .as_ref() - .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); - data.subst(&self.parameters) - } - }; - write!(f, "impl ")?; - write_bounds_like_dyn_trait(&bounds.value, f)?; - // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution - } - TypeCtor::Closure { .. } => { - let sig = self.parameters[0].callable_sig(f.db); - if let Some(sig) = sig { - if sig.params().is_empty() { - write!(f, "||")?; - } else if f.omit_verbose_types() { - write!(f, "|{}|", TYPE_HINT_TRUNCATION)?; - } else { - write!(f, "|")?; - f.write_joined(sig.params(), ", ")?; - write!(f, "|")?; - }; - - let ret_display = if f.omit_verbose_types() { - sig.ret().display_truncated(f.db, f.max_size) - } else { - sig.ret().display(f.db) - }; - write!(f, " -> {}", ret_display)?; - } else { - write!(f, "{{closure}}")?; - } - } - } - Ok(()) - } -} - -impl HirDisplay for ProjectionTy { - fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { - if f.should_truncate() { - return write!(f, "{}", TYPE_HINT_TRUNCATION); - } - - let trait_ = f.db.trait_data(self.trait_(f.db)); - write!(f, "<{} as {}", self.parameters[0].display(f.db), trait_.name)?; - if self.parameters.len() > 1 { - write!(f, "<")?; - f.write_joined(&self.parameters[1..], ", ")?; - write!(f, ">")?; - } - write!(f, ">::{}", f.db.type_alias_data(self.associated_ty).name)?; - Ok(()) - } -} - -impl HirDisplay for Ty { - fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { - if f.should_truncate() { - return write!(f, "{}", TYPE_HINT_TRUNCATION); - } - - match self { - Ty::Apply(a_ty) => a_ty.hir_fmt(f)?, - Ty::Projection(p_ty) => p_ty.hir_fmt(f)?, - Ty::Placeholder(id) => { - let generics = generics(f.db.upcast(), id.parent); - let param_data = &generics.params.types[id.local_id]; - match param_data.provenance { - TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => { - write!(f, "{}", param_data.name.clone().unwrap_or_else(Name::missing))? - } - TypeParamProvenance::ArgumentImplTrait => { - write!(f, "impl ")?; - let bounds = f.db.generic_predicates_for_param(*id); - let substs = Substs::type_params_for_generics(&generics); - write_bounds_like_dyn_trait( - &bounds.iter().map(|b| b.clone().subst(&substs)).collect::>(), - f, - )?; - } - } - } - Ty::Bound(idx) => write!(f, "?{}.{}", idx.debruijn.depth(), idx.index)?, - Ty::Dyn(predicates) => { - write!(f, "dyn ")?; - write_bounds_like_dyn_trait(predicates, f)?; - } - Ty::Opaque(opaque_ty) => { - let bounds = match opaque_ty.opaque_ty_id { - OpaqueTyId::ReturnTypeImplTrait(func, idx) => { - let datas = - f.db.return_type_impl_traits(func).expect("impl trait id without data"); - let data = (*datas) - .as_ref() - .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); - data.subst(&opaque_ty.parameters) - } - }; - write!(f, "impl ")?; - write_bounds_like_dyn_trait(&bounds.value, f)?; - } - Ty::Unknown => write!(f, "{{unknown}}")?, - Ty::Infer(..) => write!(f, "_")?, - } - Ok(()) - } -} - -fn write_bounds_like_dyn_trait( - predicates: &[GenericPredicate], - f: &mut HirFormatter, -) -> Result<(), HirDisplayError> { - // Note: This code is written to produce nice results (i.e. - // corresponding to surface Rust) for types that can occur in - // actual Rust. It will have weird results if the predicates - // aren't as expected (i.e. self types = $0, projection - // predicates for a certain trait come after the Implemented - // predicate for that trait). - let mut first = true; - let mut angle_open = false; - for p in predicates.iter() { - match p { - GenericPredicate::Implemented(trait_ref) => { - if angle_open { - write!(f, ">")?; - } - if !first { - write!(f, " + ")?; - } - // We assume that the self type is $0 (i.e. the - // existential) here, which is the only thing that's - // possible in actual Rust, and hence don't print it - write!(f, "{}", f.db.trait_data(trait_ref.trait_).name)?; - if trait_ref.substs.len() > 1 { - write!(f, "<")?; - f.write_joined(&trait_ref.substs[1..], ", ")?; - // there might be assoc type bindings, so we leave the angle brackets open - angle_open = true; - } - } - GenericPredicate::Projection(projection_pred) => { - // in types in actual Rust, these will always come - // after the corresponding Implemented predicate - if angle_open { - write!(f, ", ")?; - } else { - write!(f, "<")?; - angle_open = true; - } - let type_alias = f.db.type_alias_data(projection_pred.projection_ty.associated_ty); - write!(f, "{} = ", type_alias.name)?; - projection_pred.ty.hir_fmt(f)?; - } - GenericPredicate::Error => { - if angle_open { - // impl Trait - write!(f, ", ")?; - } else if !first { - // impl Trait + {error} - write!(f, " + ")?; - } - p.hir_fmt(f)?; - } - } - first = false; - } - if angle_open { - write!(f, ">")?; - } - Ok(()) -} - -impl TraitRef { - fn hir_fmt_ext(&self, f: &mut HirFormatter, use_as: bool) -> Result<(), HirDisplayError> { - if f.should_truncate() { - return write!(f, "{}", TYPE_HINT_TRUNCATION); - } - - self.substs[0].hir_fmt(f)?; - if use_as { - write!(f, " as ")?; - } else { - write!(f, ": ")?; - } - write!(f, "{}", f.db.trait_data(self.trait_).name)?; - if self.substs.len() > 1 { - write!(f, "<")?; - f.write_joined(&self.substs[1..], ", ")?; - write!(f, ">")?; - } - Ok(()) - } -} - -impl HirDisplay for TraitRef { - fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { - self.hir_fmt_ext(f, false) - } -} - -impl HirDisplay for &GenericPredicate { - fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { - HirDisplay::hir_fmt(*self, f) - } -} - -impl HirDisplay for GenericPredicate { - fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { - if f.should_truncate() { - return write!(f, "{}", TYPE_HINT_TRUNCATION); - } - - match self { - GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?, - GenericPredicate::Projection(projection_pred) => { - write!(f, "<")?; - projection_pred.projection_ty.trait_ref(f.db).hir_fmt_ext(f, true)?; - write!( - f, - ">::{} = {}", - f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name, - projection_pred.ty.display(f.db) - )?; - } - GenericPredicate::Error => write!(f, "{{error}}")?, - } - Ok(()) - } -} - -impl HirDisplay for Obligation { - fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { - Ok(match self { - Obligation::Trait(tr) => write!(f, "Implements({})", tr.display(f.db))?, - Obligation::Projection(proj) => write!( - f, - "Normalize({} => {})", - proj.projection_ty.display(f.db), - proj.ty.display(f.db) - )?, - }) - } -} diff --git a/crates/ra_hir_ty/src/infer.rs b/crates/ra_hir_ty/src/infer.rs deleted file mode 100644 index 3d12039a6d..0000000000 --- a/crates/ra_hir_ty/src/infer.rs +++ /dev/null @@ -1,803 +0,0 @@ -//! Type inference, i.e. the process of walking through the code and determining -//! the type of each expression and pattern. -//! -//! For type inference, compare the implementations in rustc (the various -//! check_* methods in librustc_typeck/check/mod.rs are a good entry point) and -//! IntelliJ-Rust (org.rust.lang.core.types.infer). Our entry point for -//! inference here is the `infer` function, which infers the types of all -//! expressions in a given function. -//! -//! During inference, types (i.e. the `Ty` struct) can contain type 'variables' -//! which represent currently unknown types; as we walk through the expressions, -//! we might determine that certain variables need to be equal to each other, or -//! to certain types. To record this, we use the union-find implementation from -//! the `ena` crate, which is extracted from rustc. - -use std::borrow::Cow; -use std::mem; -use std::ops::Index; -use std::sync::Arc; - -use hir_def::{ - body::Body, - data::{ConstData, FunctionData, StaticData}, - expr::{BindingAnnotation, ExprId, PatId}, - lang_item::LangItemTarget, - path::{path, Path}, - resolver::{HasResolver, Resolver, TypeNs}, - type_ref::{Mutability, TypeRef}, - AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, Lookup, TraitId, - TypeAliasId, VariantId, -}; -use hir_expand::{diagnostics::DiagnosticSink, name::name}; -use ra_arena::map::ArenaMap; -use ra_prof::profile; -use ra_syntax::SmolStr; -use rustc_hash::FxHashMap; -use stdx::impl_from; - -use super::{ - primitive::{FloatTy, IntTy}, - traits::{Guidance, Obligation, ProjectionPredicate, Solution}, - InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk, -}; -use crate::{ - db::HirDatabase, infer::diagnostics::InferenceDiagnostic, lower::ImplTraitLoweringMode, -}; - -pub(crate) use unify::unify; - -macro_rules! ty_app { - ($ctor:pat, $param:pat) => { - crate::Ty::Apply(crate::ApplicationTy { ctor: $ctor, parameters: $param }) - }; - ($ctor:pat) => { - ty_app!($ctor, _) - }; -} - -mod unify; -mod path; -mod expr; -mod pat; -mod coerce; - -/// The entry point of type inference. -pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc { - let _p = profile("infer_query"); - let resolver = def.resolver(db.upcast()); - let mut ctx = InferenceContext::new(db, def, resolver); - - match def { - DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)), - DefWithBodyId::FunctionId(f) => ctx.collect_fn(&db.function_data(f)), - DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)), - } - - ctx.infer_body(); - - Arc::new(ctx.resolve_all()) -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -enum ExprOrPatId { - ExprId(ExprId), - PatId(PatId), -} -impl_from!(ExprId, PatId for ExprOrPatId); - -/// Binding modes inferred for patterns. -/// https://doc.rust-lang.org/reference/patterns.html#binding-modes -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -enum BindingMode { - Move, - Ref(Mutability), -} - -impl BindingMode { - pub fn convert(annotation: BindingAnnotation) -> BindingMode { - match annotation { - BindingAnnotation::Unannotated | BindingAnnotation::Mutable => BindingMode::Move, - BindingAnnotation::Ref => BindingMode::Ref(Mutability::Shared), - BindingAnnotation::RefMut => BindingMode::Ref(Mutability::Mut), - } - } -} - -impl Default for BindingMode { - fn default() -> Self { - BindingMode::Move - } -} - -/// A mismatch between an expected and an inferred type. -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub struct TypeMismatch { - pub expected: Ty, - pub actual: Ty, -} - -/// The result of type inference: A mapping from expressions and patterns to types. -#[derive(Clone, PartialEq, Eq, Debug, Default)] -pub struct InferenceResult { - /// For each method call expr, records the function it resolves to. - method_resolutions: FxHashMap, - /// For each field access expr, records the field it resolves to. - field_resolutions: FxHashMap, - /// For each field in record literal, records the field it resolves to. - record_field_resolutions: FxHashMap, - record_field_pat_resolutions: FxHashMap, - /// For each struct literal, records the variant it resolves to. - variant_resolutions: FxHashMap, - /// For each associated item record what it resolves to - assoc_resolutions: FxHashMap, - diagnostics: Vec, - pub type_of_expr: ArenaMap, - pub type_of_pat: ArenaMap, - pub(super) type_mismatches: ArenaMap, -} - -impl InferenceResult { - pub fn method_resolution(&self, expr: ExprId) -> Option { - self.method_resolutions.get(&expr).copied() - } - pub fn field_resolution(&self, expr: ExprId) -> Option { - self.field_resolutions.get(&expr).copied() - } - pub fn record_field_resolution(&self, expr: ExprId) -> Option { - self.record_field_resolutions.get(&expr).copied() - } - pub fn record_field_pat_resolution(&self, pat: PatId) -> Option { - self.record_field_pat_resolutions.get(&pat).copied() - } - pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option { - self.variant_resolutions.get(&id.into()).copied() - } - pub fn variant_resolution_for_pat(&self, id: PatId) -> Option { - self.variant_resolutions.get(&id.into()).copied() - } - pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option { - self.assoc_resolutions.get(&id.into()).copied() - } - pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option { - self.assoc_resolutions.get(&id.into()).copied() - } - pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> { - self.type_mismatches.get(expr) - } - pub fn add_diagnostics( - &self, - db: &dyn HirDatabase, - owner: DefWithBodyId, - sink: &mut DiagnosticSink, - ) { - self.diagnostics.iter().for_each(|it| it.add_to(db, owner, sink)) - } -} - -impl Index for InferenceResult { - type Output = Ty; - - fn index(&self, expr: ExprId) -> &Ty { - self.type_of_expr.get(expr).unwrap_or(&Ty::Unknown) - } -} - -impl Index for InferenceResult { - type Output = Ty; - - fn index(&self, pat: PatId) -> &Ty { - self.type_of_pat.get(pat).unwrap_or(&Ty::Unknown) - } -} - -/// The inference context contains all information needed during type inference. -#[derive(Clone, Debug)] -struct InferenceContext<'a> { - db: &'a dyn HirDatabase, - owner: DefWithBodyId, - body: Arc, - resolver: Resolver, - table: unify::InferenceTable, - trait_env: Arc, - obligations: Vec, - result: InferenceResult, - /// The return type of the function being inferred, or the closure if we're - /// currently within one. - /// - /// We might consider using a nested inference context for checking - /// closures, but currently this is the only field that will change there, - /// so it doesn't make sense. - return_ty: Ty, - diverges: Diverges, - breakables: Vec, -} - -#[derive(Clone, Debug)] -struct BreakableContext { - pub may_break: bool, - pub break_ty: Ty, - pub label: Option, -} - -fn find_breakable<'c>( - ctxs: &'c mut [BreakableContext], - label: Option<&name::Name>, -) -> Option<&'c mut BreakableContext> { - match label { - Some(_) => ctxs.iter_mut().rev().find(|ctx| ctx.label.as_ref() == label), - None => ctxs.last_mut(), - } -} - -impl<'a> InferenceContext<'a> { - fn new(db: &'a dyn HirDatabase, owner: DefWithBodyId, resolver: Resolver) -> Self { - InferenceContext { - result: InferenceResult::default(), - table: unify::InferenceTable::new(), - obligations: Vec::default(), - return_ty: Ty::Unknown, // set in collect_fn_signature - trait_env: TraitEnvironment::lower(db, &resolver), - db, - owner, - body: db.body(owner), - resolver, - diverges: Diverges::Maybe, - breakables: Vec::new(), - } - } - - fn resolve_all(mut self) -> InferenceResult { - // FIXME resolve obligations as well (use Guidance if necessary) - let mut result = std::mem::take(&mut self.result); - for ty in result.type_of_expr.values_mut() { - let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown)); - *ty = resolved; - } - for ty in result.type_of_pat.values_mut() { - let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown)); - *ty = resolved; - } - result - } - - fn write_expr_ty(&mut self, expr: ExprId, ty: Ty) { - self.result.type_of_expr.insert(expr, ty); - } - - fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId) { - self.result.method_resolutions.insert(expr, func); - } - - fn write_field_resolution(&mut self, expr: ExprId, field: FieldId) { - self.result.field_resolutions.insert(expr, field); - } - - fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) { - self.result.variant_resolutions.insert(id, variant); - } - - fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) { - self.result.assoc_resolutions.insert(id, item); - } - - fn write_pat_ty(&mut self, pat: PatId, ty: Ty) { - self.result.type_of_pat.insert(pat, ty); - } - - fn push_diagnostic(&mut self, diagnostic: InferenceDiagnostic) { - self.result.diagnostics.push(diagnostic); - } - - fn make_ty_with_mode( - &mut self, - type_ref: &TypeRef, - impl_trait_mode: ImplTraitLoweringMode, - ) -> Ty { - // FIXME use right resolver for block - let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver) - .with_impl_trait_mode(impl_trait_mode); - let ty = Ty::from_hir(&ctx, type_ref); - let ty = self.insert_type_vars(ty); - self.normalize_associated_types_in(ty) - } - - fn make_ty(&mut self, type_ref: &TypeRef) -> Ty { - self.make_ty_with_mode(type_ref, ImplTraitLoweringMode::Disallowed) - } - - /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it. - fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty { - match ty { - Ty::Unknown => self.table.new_type_var(), - _ => ty, - } - } - - fn insert_type_vars(&mut self, ty: Ty) -> Ty { - ty.fold(&mut |ty| self.insert_type_vars_shallow(ty)) - } - - fn resolve_obligations_as_possible(&mut self) { - let obligations = mem::replace(&mut self.obligations, Vec::new()); - for obligation in obligations { - let in_env = InEnvironment::new(self.trait_env.clone(), obligation.clone()); - let canonicalized = self.canonicalizer().canonicalize_obligation(in_env); - let solution = - self.db.trait_solve(self.resolver.krate().unwrap(), canonicalized.value.clone()); - - match solution { - Some(Solution::Unique(substs)) => { - canonicalized.apply_solution(self, substs.0); - } - Some(Solution::Ambig(Guidance::Definite(substs))) => { - canonicalized.apply_solution(self, substs.0); - self.obligations.push(obligation); - } - Some(_) => { - // FIXME use this when trying to resolve everything at the end - self.obligations.push(obligation); - } - None => { - // FIXME obligation cannot be fulfilled => diagnostic - } - }; - } - } - - fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool { - self.table.unify(ty1, ty2) - } - - /// Resolves the type as far as currently possible, replacing type variables - /// by their known types. All types returned by the infer_* functions should - /// be resolved as far as possible, i.e. contain no type variables with - /// known type. - fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty { - self.resolve_obligations_as_possible(); - - self.table.resolve_ty_as_possible(ty) - } - - fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> { - self.table.resolve_ty_shallow(ty) - } - - fn resolve_associated_type(&mut self, inner_ty: Ty, assoc_ty: Option) -> Ty { - self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[]) - } - - fn resolve_associated_type_with_params( - &mut self, - inner_ty: Ty, - assoc_ty: Option, - params: &[Ty], - ) -> Ty { - match assoc_ty { - Some(res_assoc_ty) => { - let trait_ = match res_assoc_ty.lookup(self.db.upcast()).container { - hir_def::AssocContainerId::TraitId(trait_) => trait_, - _ => panic!("resolve_associated_type called with non-associated type"), - }; - let ty = self.table.new_type_var(); - let substs = Substs::build_for_def(self.db, res_assoc_ty) - .push(inner_ty) - .fill(params.iter().cloned()) - .build(); - let trait_ref = TraitRef { trait_, substs: substs.clone() }; - let projection = ProjectionPredicate { - ty: ty.clone(), - projection_ty: ProjectionTy { associated_ty: res_assoc_ty, parameters: substs }, - }; - self.obligations.push(Obligation::Trait(trait_ref)); - self.obligations.push(Obligation::Projection(projection)); - self.resolve_ty_as_possible(ty) - } - None => Ty::Unknown, - } - } - - /// Recurses through the given type, normalizing associated types mentioned - /// in it by replacing them by type variables and registering obligations to - /// resolve later. This should be done once for every type we get from some - /// type annotation (e.g. from a let type annotation, field type or function - /// call). `make_ty` handles this already, but e.g. for field types we need - /// to do it as well. - fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty { - let ty = self.resolve_ty_as_possible(ty); - ty.fold(&mut |ty| match ty { - Ty::Projection(proj_ty) => self.normalize_projection_ty(proj_ty), - _ => ty, - }) - } - - fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty { - let var = self.table.new_type_var(); - let predicate = ProjectionPredicate { projection_ty: proj_ty, ty: var.clone() }; - let obligation = Obligation::Projection(predicate); - self.obligations.push(obligation); - var - } - - fn resolve_variant(&mut self, path: Option<&Path>) -> (Ty, Option) { - let path = match path { - Some(path) => path, - None => return (Ty::Unknown, None), - }; - let resolver = &self.resolver; - let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver); - // FIXME: this should resolve assoc items as well, see this example: - // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521 - let (resolution, unresolved) = - match resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) { - Some(it) => it, - None => return (Ty::Unknown, None), - }; - return match resolution { - TypeNs::AdtId(AdtId::StructId(strukt)) => { - let substs = Ty::substs_from_path(&ctx, path, strukt.into(), true); - let ty = self.db.ty(strukt.into()); - let ty = self.insert_type_vars(ty.subst(&substs)); - forbid_unresolved_segments((ty, Some(strukt.into())), unresolved) - } - TypeNs::AdtId(AdtId::UnionId(u)) => { - let substs = Ty::substs_from_path(&ctx, path, u.into(), true); - let ty = self.db.ty(u.into()); - let ty = self.insert_type_vars(ty.subst(&substs)); - forbid_unresolved_segments((ty, Some(u.into())), unresolved) - } - TypeNs::EnumVariantId(var) => { - let substs = Ty::substs_from_path(&ctx, path, var.into(), true); - let ty = self.db.ty(var.parent.into()); - let ty = self.insert_type_vars(ty.subst(&substs)); - forbid_unresolved_segments((ty, Some(var.into())), unresolved) - } - TypeNs::SelfType(impl_id) => { - let generics = crate::utils::generics(self.db.upcast(), impl_id.into()); - let substs = Substs::type_params_for_generics(&generics); - let ty = self.db.impl_self_ty(impl_id).subst(&substs); - match unresolved { - None => { - let variant = ty_variant(&ty); - (ty, variant) - } - Some(1) => { - let segment = path.mod_path().segments.last().unwrap(); - // this could be an enum variant or associated type - if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() { - let enum_data = self.db.enum_data(enum_id); - if let Some(local_id) = enum_data.variant(segment) { - let variant = EnumVariantId { parent: enum_id, local_id }; - return (ty, Some(variant.into())); - } - } - // FIXME potentially resolve assoc type - (Ty::Unknown, None) - } - Some(_) => { - // FIXME diagnostic - (Ty::Unknown, None) - } - } - } - TypeNs::TypeAliasId(it) => { - let substs = Substs::build_for_def(self.db, it) - .fill(std::iter::repeat_with(|| self.table.new_type_var())) - .build(); - let ty = self.db.ty(it.into()).subst(&substs); - let variant = ty_variant(&ty); - forbid_unresolved_segments((ty, variant), unresolved) - } - TypeNs::AdtSelfType(_) => { - // FIXME this could happen in array size expressions, once we're checking them - (Ty::Unknown, None) - } - TypeNs::GenericParam(_) => { - // FIXME potentially resolve assoc type - (Ty::Unknown, None) - } - TypeNs::AdtId(AdtId::EnumId(_)) | TypeNs::BuiltinType(_) | TypeNs::TraitId(_) => { - // FIXME diagnostic - (Ty::Unknown, None) - } - }; - - fn forbid_unresolved_segments( - result: (Ty, Option), - unresolved: Option, - ) -> (Ty, Option) { - if unresolved.is_none() { - result - } else { - // FIXME diagnostic - (Ty::Unknown, None) - } - } - - fn ty_variant(ty: &Ty) -> Option { - ty.as_adt().and_then(|(adt_id, _)| match adt_id { - AdtId::StructId(s) => Some(VariantId::StructId(s)), - AdtId::UnionId(u) => Some(VariantId::UnionId(u)), - AdtId::EnumId(_) => { - // FIXME Error E0071, expected struct, variant or union type, found enum `Foo` - None - } - }) - } - } - - fn collect_const(&mut self, data: &ConstData) { - self.return_ty = self.make_ty(&data.type_ref); - } - - fn collect_static(&mut self, data: &StaticData) { - self.return_ty = self.make_ty(&data.type_ref); - } - - fn collect_fn(&mut self, data: &FunctionData) { - let body = Arc::clone(&self.body); // avoid borrow checker problem - let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver) - .with_impl_trait_mode(ImplTraitLoweringMode::Param); - let param_tys = - data.params.iter().map(|type_ref| Ty::from_hir(&ctx, type_ref)).collect::>(); - for (ty, pat) in param_tys.into_iter().zip(body.params.iter()) { - let ty = self.insert_type_vars(ty); - let ty = self.normalize_associated_types_in(ty); - - self.infer_pat(*pat, &ty, BindingMode::default()); - } - let return_ty = self.make_ty_with_mode(&data.ret_type, ImplTraitLoweringMode::Disallowed); // FIXME implement RPIT - self.return_ty = return_ty; - } - - fn infer_body(&mut self) { - self.infer_expr_coerce(self.body.body_expr, &Expectation::has_type(self.return_ty.clone())); - } - - fn resolve_lang_item(&self, name: &str) -> Option { - let krate = self.resolver.krate()?; - let name = SmolStr::new_inline_from_ascii(name.len(), name.as_bytes()); - self.db.lang_item(krate, name) - } - - fn resolve_into_iter_item(&self) -> Option { - let path = path![core::iter::IntoIterator]; - let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?; - self.db.trait_data(trait_).associated_type_by_name(&name![Item]) - } - - fn resolve_ops_try_ok(&self) -> Option { - let path = path![core::ops::Try]; - let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?; - self.db.trait_data(trait_).associated_type_by_name(&name![Ok]) - } - - fn resolve_ops_neg_output(&self) -> Option { - let trait_ = self.resolve_lang_item("neg")?.as_trait()?; - self.db.trait_data(trait_).associated_type_by_name(&name![Output]) - } - - fn resolve_ops_not_output(&self) -> Option { - let trait_ = self.resolve_lang_item("not")?.as_trait()?; - self.db.trait_data(trait_).associated_type_by_name(&name![Output]) - } - - fn resolve_future_future_output(&self) -> Option { - let trait_ = self.resolve_lang_item("future_trait")?.as_trait()?; - self.db.trait_data(trait_).associated_type_by_name(&name![Output]) - } - - fn resolve_boxed_box(&self) -> Option { - let struct_ = self.resolve_lang_item("owned_box")?.as_struct()?; - Some(struct_.into()) - } - - fn resolve_range_full(&self) -> Option { - let path = path![core::ops::RangeFull]; - let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; - Some(struct_.into()) - } - - fn resolve_range(&self) -> Option { - let path = path![core::ops::Range]; - let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; - Some(struct_.into()) - } - - fn resolve_range_inclusive(&self) -> Option { - let path = path![core::ops::RangeInclusive]; - let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; - Some(struct_.into()) - } - - fn resolve_range_from(&self) -> Option { - let path = path![core::ops::RangeFrom]; - let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; - Some(struct_.into()) - } - - fn resolve_range_to(&self) -> Option { - let path = path![core::ops::RangeTo]; - let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; - Some(struct_.into()) - } - - fn resolve_range_to_inclusive(&self) -> Option { - let path = path![core::ops::RangeToInclusive]; - let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; - Some(struct_.into()) - } - - fn resolve_ops_index(&self) -> Option { - self.resolve_lang_item("index")?.as_trait() - } - - fn resolve_ops_index_output(&self) -> Option { - let trait_ = self.resolve_ops_index()?; - self.db.trait_data(trait_).associated_type_by_name(&name![Output]) - } -} - -/// The kinds of placeholders we need during type inference. There's separate -/// values for general types, and for integer and float variables. The latter -/// two are used for inference of literal values (e.g. `100` could be one of -/// several integer types). -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] -pub enum InferTy { - TypeVar(unify::TypeVarId), - IntVar(unify::TypeVarId), - FloatVar(unify::TypeVarId), - MaybeNeverTypeVar(unify::TypeVarId), -} - -impl InferTy { - fn to_inner(self) -> unify::TypeVarId { - match self { - InferTy::TypeVar(ty) - | InferTy::IntVar(ty) - | InferTy::FloatVar(ty) - | InferTy::MaybeNeverTypeVar(ty) => ty, - } - } - - fn fallback_value(self) -> Ty { - match self { - InferTy::TypeVar(..) => Ty::Unknown, - InferTy::IntVar(..) => Ty::simple(TypeCtor::Int(IntTy::i32())), - InferTy::FloatVar(..) => Ty::simple(TypeCtor::Float(FloatTy::f64())), - InferTy::MaybeNeverTypeVar(..) => Ty::simple(TypeCtor::Never), - } - } -} - -/// When inferring an expression, we propagate downward whatever type hint we -/// are able in the form of an `Expectation`. -#[derive(Clone, PartialEq, Eq, Debug)] -struct Expectation { - ty: Ty, - /// See the `rvalue_hint` method. - rvalue_hint: bool, -} - -impl Expectation { - /// The expectation that the type of the expression needs to equal the given - /// type. - fn has_type(ty: Ty) -> Self { - Expectation { ty, rvalue_hint: false } - } - - /// The following explanation is copied straight from rustc: - /// Provides an expectation for an rvalue expression given an *optional* - /// hint, which is not required for type safety (the resulting type might - /// be checked higher up, as is the case with `&expr` and `box expr`), but - /// is useful in determining the concrete type. - /// - /// The primary use case is where the expected type is a fat pointer, - /// like `&[isize]`. For example, consider the following statement: - /// - /// let x: &[isize] = &[1, 2, 3]; - /// - /// In this case, the expected type for the `&[1, 2, 3]` expression is - /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the - /// expectation `ExpectHasType([isize])`, that would be too strong -- - /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`. - /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced - /// to the type `&[isize]`. Therefore, we propagate this more limited hint, - /// which still is useful, because it informs integer literals and the like. - /// See the test case `test/ui/coerce-expect-unsized.rs` and #20169 - /// for examples of where this comes up,. - fn rvalue_hint(ty: Ty) -> Self { - Expectation { ty, rvalue_hint: true } - } - - /// This expresses no expectation on the type. - fn none() -> Self { - Expectation { ty: Ty::Unknown, rvalue_hint: false } - } - - fn coercion_target(&self) -> &Ty { - if self.rvalue_hint { - &Ty::Unknown - } else { - &self.ty - } - } -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -enum Diverges { - Maybe, - Always, -} - -impl Diverges { - fn is_always(self) -> bool { - self == Diverges::Always - } -} - -impl std::ops::BitAnd for Diverges { - type Output = Self; - fn bitand(self, other: Self) -> Self { - std::cmp::min(self, other) - } -} - -impl std::ops::BitOr for Diverges { - type Output = Self; - fn bitor(self, other: Self) -> Self { - std::cmp::max(self, other) - } -} - -impl std::ops::BitAndAssign for Diverges { - fn bitand_assign(&mut self, other: Self) { - *self = *self & other; - } -} - -impl std::ops::BitOrAssign for Diverges { - fn bitor_assign(&mut self, other: Self) { - *self = *self | other; - } -} - -mod diagnostics { - use hir_def::{expr::ExprId, DefWithBodyId}; - use hir_expand::diagnostics::DiagnosticSink; - - use crate::{ - db::HirDatabase, - diagnostics::{BreakOutsideOfLoop, NoSuchField}, - }; - - #[derive(Debug, PartialEq, Eq, Clone)] - pub(super) enum InferenceDiagnostic { - NoSuchField { expr: ExprId, field: usize }, - BreakOutsideOfLoop { expr: ExprId }, - } - - impl InferenceDiagnostic { - pub(super) fn add_to( - &self, - db: &dyn HirDatabase, - owner: DefWithBodyId, - sink: &mut DiagnosticSink, - ) { - match self { - InferenceDiagnostic::NoSuchField { expr, field } => { - let (_, source_map) = db.body_with_source_map(owner); - let field = source_map.field_syntax(*expr, *field); - sink.push(NoSuchField { file: field.file_id, field: field.value }) - } - InferenceDiagnostic::BreakOutsideOfLoop { expr } => { - let (_, source_map) = db.body_with_source_map(owner); - let ptr = source_map - .expr_syntax(*expr) - .expect("break outside of loop in synthetic syntax"); - sink.push(BreakOutsideOfLoop { file: ptr.file_id, expr: ptr.value }) - } - } - } - } -} diff --git a/crates/ra_hir_ty/src/infer/expr.rs b/crates/ra_hir_ty/src/infer/expr.rs deleted file mode 100644 index 731b062c2d..0000000000 --- a/crates/ra_hir_ty/src/infer/expr.rs +++ /dev/null @@ -1,873 +0,0 @@ -//! Type inference for expressions. - -use std::iter::{repeat, repeat_with}; -use std::{mem, sync::Arc}; - -use hir_def::{ - builtin_type::Signedness, - expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp}, - path::{GenericArg, GenericArgs}, - resolver::resolver_for_expr, - AdtId, AssocContainerId, FieldId, Lookup, -}; -use hir_expand::name::{name, Name}; -use ra_syntax::ast::RangeOp; - -use crate::{ - autoderef, method_resolution, op, - traits::{FnTrait, InEnvironment}, - utils::{generics, variant_data, Generics}, - ApplicationTy, Binders, CallableDefId, InferTy, IntTy, Mutability, Obligation, Rawness, Substs, - TraitRef, Ty, TypeCtor, -}; - -use super::{ - find_breakable, BindingMode, BreakableContext, Diverges, Expectation, InferenceContext, - InferenceDiagnostic, TypeMismatch, -}; - -impl<'a> InferenceContext<'a> { - pub(super) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { - let ty = self.infer_expr_inner(tgt_expr, expected); - if ty.is_never() { - // Any expression that produces a value of type `!` must have diverged - self.diverges = Diverges::Always; - } - let could_unify = self.unify(&ty, &expected.ty); - if !could_unify { - self.result.type_mismatches.insert( - tgt_expr, - TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() }, - ); - } - self.resolve_ty_as_possible(ty) - } - - /// Infer type of expression with possibly implicit coerce to the expected type. - /// Return the type after possible coercion. - pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty { - let ty = self.infer_expr_inner(expr, &expected); - let ty = if !self.coerce(&ty, &expected.coercion_target()) { - self.result - .type_mismatches - .insert(expr, TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() }); - // Return actual type when type mismatch. - // This is needed for diagnostic when return type mismatch. - ty - } else if expected.coercion_target() == &Ty::Unknown { - ty - } else { - expected.ty.clone() - }; - - self.resolve_ty_as_possible(ty) - } - - fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { - let krate = self.resolver.krate()?; - let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?; - let output_assoc_type = - self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?; - let generic_params = generics(self.db.upcast(), fn_once_trait.into()); - if generic_params.len() != 2 { - return None; - } - - let mut param_builder = Substs::builder(num_args); - let mut arg_tys = vec![]; - for _ in 0..num_args { - let arg = self.table.new_type_var(); - param_builder = param_builder.push(arg.clone()); - arg_tys.push(arg); - } - let parameters = param_builder.build(); - let arg_ty = Ty::Apply(ApplicationTy { - ctor: TypeCtor::Tuple { cardinality: num_args as u16 }, - parameters, - }); - let substs = - Substs::build_for_generics(&generic_params).push(ty.clone()).push(arg_ty).build(); - - let trait_env = Arc::clone(&self.trait_env); - let implements_fn_trait = - Obligation::Trait(TraitRef { trait_: fn_once_trait, substs: substs.clone() }); - let goal = self.canonicalizer().canonicalize_obligation(InEnvironment { - value: implements_fn_trait.clone(), - environment: trait_env, - }); - if self.db.trait_solve(krate, goal.value).is_some() { - self.obligations.push(implements_fn_trait); - let output_proj_ty = - crate::ProjectionTy { associated_ty: output_assoc_type, parameters: substs }; - let return_ty = self.normalize_projection_ty(output_proj_ty); - Some((arg_tys, return_ty)) - } else { - None - } - } - - pub fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { - match ty.callable_sig(self.db) { - Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())), - None => self.callable_sig_from_fn_trait(ty, num_args), - } - } - - fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { - let body = Arc::clone(&self.body); // avoid borrow checker problem - let ty = match &body[tgt_expr] { - Expr::Missing => Ty::Unknown, - Expr::If { condition, then_branch, else_branch } => { - // if let is desugared to match, so this is always simple if - self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool))); - - let condition_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); - let mut both_arms_diverge = Diverges::Always; - - let then_ty = self.infer_expr_inner(*then_branch, &expected); - both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe); - let else_ty = match else_branch { - Some(else_branch) => self.infer_expr_inner(*else_branch, &expected), - None => Ty::unit(), - }; - both_arms_diverge &= self.diverges; - - self.diverges = condition_diverges | both_arms_diverge; - - self.coerce_merge_branch(&then_ty, &else_ty) - } - Expr::Block { statements, tail, .. } => { - // FIXME: Breakable block inference - self.infer_block(statements, *tail, expected) - } - Expr::Unsafe { body } => self.infer_expr(*body, expected), - Expr::TryBlock { body } => { - let _inner = self.infer_expr(*body, expected); - // FIXME should be std::result::Result<{inner}, _> - Ty::Unknown - } - Expr::Loop { body, label } => { - self.breakables.push(BreakableContext { - may_break: false, - break_ty: self.table.new_type_var(), - label: label.clone(), - }); - self.infer_expr(*body, &Expectation::has_type(Ty::unit())); - - let ctxt = self.breakables.pop().expect("breakable stack broken"); - if ctxt.may_break { - self.diverges = Diverges::Maybe; - } - - if ctxt.may_break { - ctxt.break_ty - } else { - Ty::simple(TypeCtor::Never) - } - } - Expr::While { condition, body, label } => { - self.breakables.push(BreakableContext { - may_break: false, - break_ty: Ty::Unknown, - label: label.clone(), - }); - // while let is desugared to a match loop, so this is always simple while - self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool))); - self.infer_expr(*body, &Expectation::has_type(Ty::unit())); - let _ctxt = self.breakables.pop().expect("breakable stack broken"); - // the body may not run, so it diverging doesn't mean we diverge - self.diverges = Diverges::Maybe; - Ty::unit() - } - Expr::For { iterable, body, pat, label } => { - let iterable_ty = self.infer_expr(*iterable, &Expectation::none()); - - self.breakables.push(BreakableContext { - may_break: false, - break_ty: Ty::Unknown, - label: label.clone(), - }); - let pat_ty = - self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item()); - - self.infer_pat(*pat, &pat_ty, BindingMode::default()); - - self.infer_expr(*body, &Expectation::has_type(Ty::unit())); - let _ctxt = self.breakables.pop().expect("breakable stack broken"); - // the body may not run, so it diverging doesn't mean we diverge - self.diverges = Diverges::Maybe; - Ty::unit() - } - Expr::Lambda { body, args, ret_type, arg_types } => { - assert_eq!(args.len(), arg_types.len()); - - let mut sig_tys = Vec::new(); - - // collect explicitly written argument types - for arg_type in arg_types.iter() { - let arg_ty = if let Some(type_ref) = arg_type { - self.make_ty(type_ref) - } else { - self.table.new_type_var() - }; - sig_tys.push(arg_ty); - } - - // add return type - let ret_ty = match ret_type { - Some(type_ref) => self.make_ty(type_ref), - None => self.table.new_type_var(), - }; - sig_tys.push(ret_ty.clone()); - let sig_ty = Ty::apply( - TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1, is_varargs: false }, - Substs(sig_tys.clone().into()), - ); - let closure_ty = - Ty::apply_one(TypeCtor::Closure { def: self.owner, expr: tgt_expr }, sig_ty); - - // Eagerly try to relate the closure type with the expected - // type, otherwise we often won't have enough information to - // infer the body. - self.coerce(&closure_ty, &expected.ty); - - // Now go through the argument patterns - for (arg_pat, arg_ty) in args.iter().zip(sig_tys) { - let resolved = self.resolve_ty_as_possible(arg_ty); - self.infer_pat(*arg_pat, &resolved, BindingMode::default()); - } - - let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); - let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone()); - - self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty)); - - self.diverges = prev_diverges; - self.return_ty = prev_ret_ty; - - closure_ty - } - Expr::Call { callee, args } => { - let callee_ty = self.infer_expr(*callee, &Expectation::none()); - let canonicalized = self.canonicalizer().canonicalize_ty(callee_ty.clone()); - let mut derefs = autoderef( - self.db, - self.resolver.krate(), - InEnvironment { - value: canonicalized.value.clone(), - environment: self.trait_env.clone(), - }, - ); - let (param_tys, ret_ty): (Vec, Ty) = derefs - .find_map(|callee_deref_ty| { - self.callable_sig( - &canonicalized.decanonicalize_ty(callee_deref_ty.value), - args.len(), - ) - }) - .unwrap_or((Vec::new(), Ty::Unknown)); - self.register_obligations_for_call(&callee_ty); - self.check_call_arguments(args, ¶m_tys); - self.normalize_associated_types_in(ret_ty) - } - Expr::MethodCall { receiver, args, method_name, generic_args } => self - .infer_method_call(tgt_expr, *receiver, &args, &method_name, generic_args.as_ref()), - Expr::Match { expr, arms } => { - let input_ty = self.infer_expr(*expr, &Expectation::none()); - - let mut result_ty = if arms.is_empty() { - Ty::simple(TypeCtor::Never) - } else { - self.table.new_type_var() - }; - - let matchee_diverges = self.diverges; - let mut all_arms_diverge = Diverges::Always; - - for arm in arms { - self.diverges = Diverges::Maybe; - let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default()); - if let Some(guard_expr) = arm.guard { - self.infer_expr( - guard_expr, - &Expectation::has_type(Ty::simple(TypeCtor::Bool)), - ); - } - - let arm_ty = self.infer_expr_inner(arm.expr, &expected); - all_arms_diverge &= self.diverges; - result_ty = self.coerce_merge_branch(&result_ty, &arm_ty); - } - - self.diverges = matchee_diverges | all_arms_diverge; - - result_ty - } - Expr::Path(p) => { - // FIXME this could be more efficient... - let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr); - self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown) - } - Expr::Continue { .. } => Ty::simple(TypeCtor::Never), - Expr::Break { expr, label } => { - let val_ty = if let Some(expr) = expr { - self.infer_expr(*expr, &Expectation::none()) - } else { - Ty::unit() - }; - - let last_ty = - if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) { - ctxt.break_ty.clone() - } else { - Ty::Unknown - }; - - let merged_type = self.coerce_merge_branch(&last_ty, &val_ty); - - if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) { - ctxt.break_ty = merged_type; - ctxt.may_break = true; - } else { - self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop { - expr: tgt_expr, - }); - } - - Ty::simple(TypeCtor::Never) - } - Expr::Return { expr } => { - if let Some(expr) = expr { - self.infer_expr_coerce(*expr, &Expectation::has_type(self.return_ty.clone())); - } else { - let unit = Ty::unit(); - self.coerce(&unit, &self.return_ty.clone()); - } - Ty::simple(TypeCtor::Never) - } - Expr::RecordLit { path, fields, spread } => { - let (ty, def_id) = self.resolve_variant(path.as_ref()); - if let Some(variant) = def_id { - self.write_variant_resolution(tgt_expr.into(), variant); - } - - self.unify(&ty, &expected.ty); - - let substs = ty.substs().unwrap_or_else(Substs::empty); - let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default(); - let variant_data = def_id.map(|it| variant_data(self.db.upcast(), it)); - for (field_idx, field) in fields.iter().enumerate() { - let field_def = - variant_data.as_ref().and_then(|it| match it.field(&field.name) { - Some(local_id) => Some(FieldId { parent: def_id.unwrap(), local_id }), - None => { - self.push_diagnostic(InferenceDiagnostic::NoSuchField { - expr: tgt_expr, - field: field_idx, - }); - None - } - }); - if let Some(field_def) = field_def { - self.result.record_field_resolutions.insert(field.expr, field_def); - } - let field_ty = field_def - .map_or(Ty::Unknown, |it| field_types[it.local_id].clone().subst(&substs)); - self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty)); - } - if let Some(expr) = spread { - self.infer_expr(*expr, &Expectation::has_type(ty.clone())); - } - ty - } - Expr::Field { expr, name } => { - let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none()); - let canonicalized = self.canonicalizer().canonicalize_ty(receiver_ty); - let ty = autoderef::autoderef( - self.db, - self.resolver.krate(), - InEnvironment { - value: canonicalized.value.clone(), - environment: self.trait_env.clone(), - }, - ) - .find_map(|derefed_ty| match canonicalized.decanonicalize_ty(derefed_ty.value) { - Ty::Apply(a_ty) => match a_ty.ctor { - TypeCtor::Tuple { .. } => name - .as_tuple_index() - .and_then(|idx| a_ty.parameters.0.get(idx).cloned()), - TypeCtor::Adt(AdtId::StructId(s)) => { - self.db.struct_data(s).variant_data.field(name).map(|local_id| { - let field = FieldId { parent: s.into(), local_id }; - self.write_field_resolution(tgt_expr, field); - self.db.field_types(s.into())[field.local_id] - .clone() - .subst(&a_ty.parameters) - }) - } - TypeCtor::Adt(AdtId::UnionId(u)) => { - self.db.union_data(u).variant_data.field(name).map(|local_id| { - let field = FieldId { parent: u.into(), local_id }; - self.write_field_resolution(tgt_expr, field); - self.db.field_types(u.into())[field.local_id] - .clone() - .subst(&a_ty.parameters) - }) - } - _ => None, - }, - _ => None, - }) - .unwrap_or(Ty::Unknown); - let ty = self.insert_type_vars(ty); - self.normalize_associated_types_in(ty) - } - Expr::Await { expr } => { - let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); - self.resolve_associated_type(inner_ty, self.resolve_future_future_output()) - } - Expr::Try { expr } => { - let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); - self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok()) - } - Expr::Cast { expr, type_ref } => { - let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); - let cast_ty = self.make_ty(type_ref); - // FIXME check the cast... - cast_ty - } - Expr::Ref { expr, rawness, mutability } => { - let expectation = if let Some((exp_inner, exp_rawness, exp_mutability)) = - &expected.ty.as_reference_or_ptr() - { - if *exp_mutability == Mutability::Mut && *mutability == Mutability::Shared { - // FIXME: throw type error - expected mut reference but found shared ref, - // which cannot be coerced - } - if *exp_rawness == Rawness::Ref && *rawness == Rawness::RawPtr { - // FIXME: throw type error - expected reference but found ptr, - // which cannot be coerced - } - Expectation::rvalue_hint(Ty::clone(exp_inner)) - } else { - Expectation::none() - }; - let inner_ty = self.infer_expr_inner(*expr, &expectation); - let ty = match rawness { - Rawness::RawPtr => TypeCtor::RawPtr(*mutability), - Rawness::Ref => TypeCtor::Ref(*mutability), - }; - Ty::apply_one(ty, inner_ty) - } - Expr::Box { expr } => { - let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); - if let Some(box_) = self.resolve_boxed_box() { - Ty::apply_one(TypeCtor::Adt(box_), inner_ty) - } else { - Ty::Unknown - } - } - Expr::UnaryOp { expr, op } => { - let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); - match op { - UnaryOp::Deref => match self.resolver.krate() { - Some(krate) => { - let canonicalized = self.canonicalizer().canonicalize_ty(inner_ty); - match autoderef::deref( - self.db, - krate, - InEnvironment { - value: &canonicalized.value, - environment: self.trait_env.clone(), - }, - ) { - Some(derefed_ty) => { - canonicalized.decanonicalize_ty(derefed_ty.value) - } - None => Ty::Unknown, - } - } - None => Ty::Unknown, - }, - UnaryOp::Neg => { - match &inner_ty { - // Fast path for builtins - Ty::Apply(ApplicationTy { - ctor: TypeCtor::Int(IntTy { signedness: Signedness::Signed, .. }), - .. - }) - | Ty::Apply(ApplicationTy { ctor: TypeCtor::Float(_), .. }) - | Ty::Infer(InferTy::IntVar(..)) - | Ty::Infer(InferTy::FloatVar(..)) => inner_ty, - // Otherwise we resolve via the std::ops::Neg trait - _ => self - .resolve_associated_type(inner_ty, self.resolve_ops_neg_output()), - } - } - UnaryOp::Not => { - match &inner_ty { - // Fast path for builtins - Ty::Apply(ApplicationTy { ctor: TypeCtor::Bool, .. }) - | Ty::Apply(ApplicationTy { ctor: TypeCtor::Int(_), .. }) - | Ty::Infer(InferTy::IntVar(..)) => inner_ty, - // Otherwise we resolve via the std::ops::Not trait - _ => self - .resolve_associated_type(inner_ty, self.resolve_ops_not_output()), - } - } - } - } - Expr::BinaryOp { lhs, rhs, op } => match op { - Some(op) => { - let lhs_expectation = match op { - BinaryOp::LogicOp(..) => Expectation::has_type(Ty::simple(TypeCtor::Bool)), - _ => Expectation::none(), - }; - let lhs_ty = self.infer_expr(*lhs, &lhs_expectation); - // FIXME: find implementation of trait corresponding to operation - // symbol and resolve associated `Output` type - let rhs_expectation = op::binary_op_rhs_expectation(*op, lhs_ty.clone()); - let rhs_ty = self.infer_expr(*rhs, &Expectation::has_type(rhs_expectation)); - - // FIXME: similar as above, return ty is often associated trait type - op::binary_op_return_ty(*op, lhs_ty, rhs_ty) - } - _ => Ty::Unknown, - }, - Expr::Range { lhs, rhs, range_type } => { - let lhs_ty = lhs.map(|e| self.infer_expr_inner(e, &Expectation::none())); - let rhs_expect = lhs_ty - .as_ref() - .map_or_else(Expectation::none, |ty| Expectation::has_type(ty.clone())); - let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect)); - match (range_type, lhs_ty, rhs_ty) { - (RangeOp::Exclusive, None, None) => match self.resolve_range_full() { - Some(adt) => Ty::simple(TypeCtor::Adt(adt)), - None => Ty::Unknown, - }, - (RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() { - Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty), - None => Ty::Unknown, - }, - (RangeOp::Inclusive, None, Some(ty)) => { - match self.resolve_range_to_inclusive() { - Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty), - None => Ty::Unknown, - } - } - (RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() { - Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty), - None => Ty::Unknown, - }, - (RangeOp::Inclusive, Some(_), Some(ty)) => { - match self.resolve_range_inclusive() { - Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty), - None => Ty::Unknown, - } - } - (RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() { - Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty), - None => Ty::Unknown, - }, - (RangeOp::Inclusive, _, None) => Ty::Unknown, - } - } - Expr::Index { base, index } => { - let base_ty = self.infer_expr_inner(*base, &Expectation::none()); - let index_ty = self.infer_expr(*index, &Expectation::none()); - - if let (Some(index_trait), Some(krate)) = - (self.resolve_ops_index(), self.resolver.krate()) - { - let canonicalized = self.canonicalizer().canonicalize_ty(base_ty); - let self_ty = method_resolution::resolve_indexing_op( - self.db, - &canonicalized.value, - self.trait_env.clone(), - krate, - index_trait, - ); - let self_ty = - self_ty.map_or(Ty::Unknown, |t| canonicalized.decanonicalize_ty(t.value)); - self.resolve_associated_type_with_params( - self_ty, - self.resolve_ops_index_output(), - &[index_ty], - ) - } else { - Ty::Unknown - } - } - Expr::Tuple { exprs } => { - let mut tys = match &expected.ty { - ty_app!(TypeCtor::Tuple { .. }, st) => st - .iter() - .cloned() - .chain(repeat_with(|| self.table.new_type_var())) - .take(exprs.len()) - .collect::>(), - _ => (0..exprs.len()).map(|_| self.table.new_type_var()).collect(), - }; - - for (expr, ty) in exprs.iter().zip(tys.iter_mut()) { - self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone())); - } - - Ty::apply(TypeCtor::Tuple { cardinality: tys.len() as u16 }, Substs(tys.into())) - } - Expr::Array(array) => { - let elem_ty = match &expected.ty { - ty_app!(TypeCtor::Array, st) | ty_app!(TypeCtor::Slice, st) => { - st.as_single().clone() - } - _ => self.table.new_type_var(), - }; - - match array { - Array::ElementList(items) => { - for expr in items.iter() { - self.infer_expr_coerce(*expr, &Expectation::has_type(elem_ty.clone())); - } - } - Array::Repeat { initializer, repeat } => { - self.infer_expr_coerce( - *initializer, - &Expectation::has_type(elem_ty.clone()), - ); - self.infer_expr( - *repeat, - &Expectation::has_type(Ty::simple(TypeCtor::Int(IntTy::usize()))), - ); - } - } - - Ty::apply_one(TypeCtor::Array, elem_ty) - } - Expr::Literal(lit) => match lit { - Literal::Bool(..) => Ty::simple(TypeCtor::Bool), - Literal::String(..) => { - Ty::apply_one(TypeCtor::Ref(Mutability::Shared), Ty::simple(TypeCtor::Str)) - } - Literal::ByteString(..) => { - let byte_type = Ty::simple(TypeCtor::Int(IntTy::u8())); - let array_type = Ty::apply_one(TypeCtor::Array, byte_type); - Ty::apply_one(TypeCtor::Ref(Mutability::Shared), array_type) - } - Literal::Char(..) => Ty::simple(TypeCtor::Char), - Literal::Int(_v, ty) => match ty { - Some(int_ty) => Ty::simple(TypeCtor::Int((*int_ty).into())), - None => self.table.new_integer_var(), - }, - Literal::Float(_v, ty) => match ty { - Some(float_ty) => Ty::simple(TypeCtor::Float((*float_ty).into())), - None => self.table.new_float_var(), - }, - }, - }; - // use a new type variable if we got Ty::Unknown here - let ty = self.insert_type_vars_shallow(ty); - let ty = self.resolve_ty_as_possible(ty); - self.write_expr_ty(tgt_expr, ty.clone()); - ty - } - - fn infer_block( - &mut self, - statements: &[Statement], - tail: Option, - expected: &Expectation, - ) -> Ty { - for stmt in statements { - match stmt { - Statement::Let { pat, type_ref, initializer } => { - let decl_ty = - type_ref.as_ref().map(|tr| self.make_ty(tr)).unwrap_or(Ty::Unknown); - - // Always use the declared type when specified - let mut ty = decl_ty.clone(); - - if let Some(expr) = initializer { - let actual_ty = - self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone())); - if decl_ty == Ty::Unknown { - ty = actual_ty; - } - } - - let ty = self.resolve_ty_as_possible(ty); - self.infer_pat(*pat, &ty, BindingMode::default()); - } - Statement::Expr(expr) => { - self.infer_expr(*expr, &Expectation::none()); - } - } - } - - let ty = if let Some(expr) = tail { - self.infer_expr_coerce(expr, expected) - } else { - // Citing rustc: if there is no explicit tail expression, - // that is typically equivalent to a tail expression - // of `()` -- except if the block diverges. In that - // case, there is no value supplied from the tail - // expression (assuming there are no other breaks, - // this implies that the type of the block will be - // `!`). - if self.diverges.is_always() { - // we don't even make an attempt at coercion - self.table.new_maybe_never_type_var() - } else { - self.coerce(&Ty::unit(), expected.coercion_target()); - Ty::unit() - } - }; - ty - } - - fn infer_method_call( - &mut self, - tgt_expr: ExprId, - receiver: ExprId, - args: &[ExprId], - method_name: &Name, - generic_args: Option<&GenericArgs>, - ) -> Ty { - let receiver_ty = self.infer_expr(receiver, &Expectation::none()); - let canonicalized_receiver = self.canonicalizer().canonicalize_ty(receiver_ty.clone()); - - let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast()); - - let resolved = self.resolver.krate().and_then(|krate| { - method_resolution::lookup_method( - &canonicalized_receiver.value, - self.db, - self.trait_env.clone(), - krate, - &traits_in_scope, - method_name, - ) - }); - let (derefed_receiver_ty, method_ty, def_generics) = match resolved { - Some((ty, func)) => { - let ty = canonicalized_receiver.decanonicalize_ty(ty); - self.write_method_resolution(tgt_expr, func); - (ty, self.db.value_ty(func.into()), Some(generics(self.db.upcast(), func.into()))) - } - None => (receiver_ty, Binders::new(0, Ty::Unknown), None), - }; - let substs = self.substs_for_method_call(def_generics, generic_args, &derefed_receiver_ty); - let method_ty = method_ty.subst(&substs); - let method_ty = self.insert_type_vars(method_ty); - self.register_obligations_for_call(&method_ty); - let (expected_receiver_ty, param_tys, ret_ty) = match method_ty.callable_sig(self.db) { - Some(sig) => { - if !sig.params().is_empty() { - (sig.params()[0].clone(), sig.params()[1..].to_vec(), sig.ret().clone()) - } else { - (Ty::Unknown, Vec::new(), sig.ret().clone()) - } - } - None => (Ty::Unknown, Vec::new(), Ty::Unknown), - }; - // Apply autoref so the below unification works correctly - // FIXME: return correct autorefs from lookup_method - let actual_receiver_ty = match expected_receiver_ty.as_reference() { - Some((_, mutability)) => Ty::apply_one(TypeCtor::Ref(mutability), derefed_receiver_ty), - _ => derefed_receiver_ty, - }; - self.unify(&expected_receiver_ty, &actual_receiver_ty); - - self.check_call_arguments(args, ¶m_tys); - self.normalize_associated_types_in(ret_ty) - } - - fn check_call_arguments(&mut self, args: &[ExprId], param_tys: &[Ty]) { - // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 -- - // We do this in a pretty awful way: first we type-check any arguments - // that are not closures, then we type-check the closures. This is so - // that we have more information about the types of arguments when we - // type-check the functions. This isn't really the right way to do this. - for &check_closures in &[false, true] { - let param_iter = param_tys.iter().cloned().chain(repeat(Ty::Unknown)); - for (&arg, param_ty) in args.iter().zip(param_iter) { - let is_closure = matches!(&self.body[arg], Expr::Lambda { .. }); - if is_closure != check_closures { - continue; - } - - let param_ty = self.normalize_associated_types_in(param_ty); - self.infer_expr_coerce(arg, &Expectation::has_type(param_ty.clone())); - } - } - } - - fn substs_for_method_call( - &mut self, - def_generics: Option, - generic_args: Option<&GenericArgs>, - receiver_ty: &Ty, - ) -> Substs { - let (parent_params, self_params, type_params, impl_trait_params) = - def_generics.as_ref().map_or((0, 0, 0, 0), |g| g.provenance_split()); - assert_eq!(self_params, 0); // method shouldn't have another Self param - let total_len = parent_params + type_params + impl_trait_params; - let mut substs = Vec::with_capacity(total_len); - // Parent arguments are unknown, except for the receiver type - if let Some(parent_generics) = def_generics.as_ref().map(|p| p.iter_parent()) { - for (_id, param) in parent_generics { - if param.provenance == hir_def::generics::TypeParamProvenance::TraitSelf { - substs.push(receiver_ty.clone()); - } else { - substs.push(Ty::Unknown); - } - } - } - // handle provided type arguments - if let Some(generic_args) = generic_args { - // if args are provided, it should be all of them, but we can't rely on that - for arg in generic_args.args.iter().take(type_params) { - match arg { - GenericArg::Type(type_ref) => { - let ty = self.make_ty(type_ref); - substs.push(ty); - } - } - } - }; - let supplied_params = substs.len(); - for _ in supplied_params..total_len { - substs.push(Ty::Unknown); - } - assert_eq!(substs.len(), total_len); - Substs(substs.into()) - } - - fn register_obligations_for_call(&mut self, callable_ty: &Ty) { - if let Ty::Apply(a_ty) = callable_ty { - if let TypeCtor::FnDef(def) = a_ty.ctor { - let generic_predicates = self.db.generic_predicates(def.into()); - for predicate in generic_predicates.iter() { - let predicate = predicate.clone().subst(&a_ty.parameters); - if let Some(obligation) = Obligation::from_predicate(predicate) { - self.obligations.push(obligation); - } - } - // add obligation for trait implementation, if this is a trait method - match def { - CallableDefId::FunctionId(f) => { - if let AssocContainerId::TraitId(trait_) = - f.lookup(self.db.upcast()).container - { - // construct a TraitDef - let substs = a_ty - .parameters - .prefix(generics(self.db.upcast(), trait_.into()).len()); - self.obligations.push(Obligation::Trait(TraitRef { trait_, substs })); - } - } - CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {} - } - } - } - } -} diff --git a/crates/ra_hir_ty/src/lib.rs b/crates/ra_hir_ty/src/lib.rs deleted file mode 100644 index 7698cb0d4b..0000000000 --- a/crates/ra_hir_ty/src/lib.rs +++ /dev/null @@ -1,1078 +0,0 @@ -//! The type system. We currently use this to infer types for completion, hover -//! information and various assists. - -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - -mod autoderef; -pub mod primitive; -pub mod traits; -pub mod method_resolution; -mod op; -mod lower; -pub(crate) mod infer; -pub(crate) mod utils; - -pub mod display; -pub mod db; -pub mod diagnostics; - -#[cfg(test)] -mod tests; -#[cfg(test)] -mod test_db; - -use std::{iter, mem, ops::Deref, sync::Arc}; - -use hir_def::{ - expr::ExprId, - type_ref::{Mutability, Rawness}, - AdtId, AssocContainerId, DefWithBodyId, GenericDefId, HasModule, Lookup, TraitId, TypeAliasId, - TypeParamId, -}; -use itertools::Itertools; -use ra_db::{salsa, CrateId}; - -use crate::{ - db::HirDatabase, - display::HirDisplay, - primitive::{FloatTy, IntTy}, - utils::{generics, make_mut_slice, Generics}, -}; - -pub use autoderef::autoderef; -pub use infer::{InferTy, InferenceResult}; -pub use lower::CallableDefId; -pub use lower::{ - associated_type_shorthand_candidates, callable_item_sig, ImplTraitLoweringMode, TyDefId, - TyLoweringContext, ValueTyDefId, -}; -pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment}; - -pub use chalk_ir::{BoundVar, DebruijnIndex}; - -/// A type constructor or type name: this might be something like the primitive -/// type `bool`, a struct like `Vec`, or things like function pointers or -/// tuples. -#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] -pub enum TypeCtor { - /// The primitive boolean type. Written as `bool`. - Bool, - - /// The primitive character type; holds a Unicode scalar value - /// (a non-surrogate code point). Written as `char`. - Char, - - /// A primitive integer type. For example, `i32`. - Int(IntTy), - - /// A primitive floating-point type. For example, `f64`. - Float(FloatTy), - - /// Structures, enumerations and unions. - Adt(AdtId), - - /// The pointee of a string slice. Written as `str`. - Str, - - /// The pointee of an array slice. Written as `[T]`. - Slice, - - /// An array with the given length. Written as `[T; n]`. - Array, - - /// A raw pointer. Written as `*mut T` or `*const T` - RawPtr(Mutability), - - /// A reference; a pointer with an associated lifetime. Written as - /// `&'a mut T` or `&'a T`. - Ref(Mutability), - - /// The anonymous type of a function declaration/definition. Each - /// function has a unique type, which is output (for a function - /// named `foo` returning an `i32`) as `fn() -> i32 {foo}`. - /// - /// This includes tuple struct / enum variant constructors as well. - /// - /// For example the type of `bar` here: - /// - /// ``` - /// fn foo() -> i32 { 1 } - /// let bar = foo; // bar: fn() -> i32 {foo} - /// ``` - FnDef(CallableDefId), - - /// A pointer to a function. Written as `fn() -> i32`. - /// - /// For example the type of `bar` here: - /// - /// ``` - /// fn foo() -> i32 { 1 } - /// let bar: fn() -> i32 = foo; - /// ``` - // FIXME make this a Ty variant like in Chalk - FnPtr { num_args: u16, is_varargs: bool }, - - /// The never type `!`. - Never, - - /// A tuple type. For example, `(i32, bool)`. - Tuple { cardinality: u16 }, - - /// Represents an associated item like `Iterator::Item`. This is used - /// when we have tried to normalize a projection like `T::Item` but - /// couldn't find a better representation. In that case, we generate - /// an **application type** like `(Iterator::Item)`. - AssociatedType(TypeAliasId), - - /// This represents a placeholder for an opaque type in situations where we - /// don't know the hidden type (i.e. currently almost always). This is - /// analogous to the `AssociatedType` type constructor. As with that one, - /// these are only produced by Chalk. - OpaqueType(OpaqueTyId), - - /// The type of a specific closure. - /// - /// The closure signature is stored in a `FnPtr` type in the first type - /// parameter. - Closure { def: DefWithBodyId, expr: ExprId }, -} - -impl TypeCtor { - pub fn num_ty_params(self, db: &dyn HirDatabase) -> usize { - match self { - TypeCtor::Bool - | TypeCtor::Char - | TypeCtor::Int(_) - | TypeCtor::Float(_) - | TypeCtor::Str - | TypeCtor::Never => 0, - TypeCtor::Slice - | TypeCtor::Array - | TypeCtor::RawPtr(_) - | TypeCtor::Ref(_) - | TypeCtor::Closure { .. } // 1 param representing the signature of the closure - => 1, - TypeCtor::Adt(adt) => { - let generic_params = generics(db.upcast(), adt.into()); - generic_params.len() - } - TypeCtor::FnDef(callable) => { - let generic_params = generics(db.upcast(), callable.into()); - generic_params.len() - } - TypeCtor::AssociatedType(type_alias) => { - let generic_params = generics(db.upcast(), type_alias.into()); - generic_params.len() - } - TypeCtor::OpaqueType(opaque_ty_id) => { - match opaque_ty_id { - OpaqueTyId::ReturnTypeImplTrait(func, _) => { - let generic_params = generics(db.upcast(), func.into()); - generic_params.len() - } - } - } - TypeCtor::FnPtr { num_args, is_varargs: _ } => num_args as usize + 1, - TypeCtor::Tuple { cardinality } => cardinality as usize, - } - } - - pub fn krate(self, db: &dyn HirDatabase) -> Option { - match self { - TypeCtor::Bool - | TypeCtor::Char - | TypeCtor::Int(_) - | TypeCtor::Float(_) - | TypeCtor::Str - | TypeCtor::Never - | TypeCtor::Slice - | TypeCtor::Array - | TypeCtor::RawPtr(_) - | TypeCtor::Ref(_) - | TypeCtor::FnPtr { .. } - | TypeCtor::Tuple { .. } => None, - // Closure's krate is irrelevant for coherence I would think? - TypeCtor::Closure { .. } => None, - TypeCtor::Adt(adt) => Some(adt.module(db.upcast()).krate), - TypeCtor::FnDef(callable) => Some(callable.krate(db)), - TypeCtor::AssociatedType(type_alias) => { - Some(type_alias.lookup(db.upcast()).module(db.upcast()).krate) - } - TypeCtor::OpaqueType(opaque_ty_id) => match opaque_ty_id { - OpaqueTyId::ReturnTypeImplTrait(func, _) => { - Some(func.lookup(db.upcast()).module(db.upcast()).krate) - } - }, - } - } - - pub fn as_generic_def(self) -> Option { - match self { - TypeCtor::Bool - | TypeCtor::Char - | TypeCtor::Int(_) - | TypeCtor::Float(_) - | TypeCtor::Str - | TypeCtor::Never - | TypeCtor::Slice - | TypeCtor::Array - | TypeCtor::RawPtr(_) - | TypeCtor::Ref(_) - | TypeCtor::FnPtr { .. } - | TypeCtor::Tuple { .. } - | TypeCtor::Closure { .. } => None, - TypeCtor::Adt(adt) => Some(adt.into()), - TypeCtor::FnDef(callable) => Some(callable.into()), - TypeCtor::AssociatedType(type_alias) => Some(type_alias.into()), - TypeCtor::OpaqueType(_impl_trait_id) => None, - } - } -} - -/// A nominal type with (maybe 0) type parameters. This might be a primitive -/// type like `bool`, a struct, tuple, function pointer, reference or -/// several other things. -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub struct ApplicationTy { - pub ctor: TypeCtor, - pub parameters: Substs, -} - -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub struct OpaqueTy { - pub opaque_ty_id: OpaqueTyId, - pub parameters: Substs, -} - -/// A "projection" type corresponds to an (unnormalized) -/// projection like `>::Foo`. Note that the -/// trait and all its parameters are fully known. -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub struct ProjectionTy { - pub associated_ty: TypeAliasId, - pub parameters: Substs, -} - -impl ProjectionTy { - pub fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef { - TraitRef { trait_: self.trait_(db), substs: self.parameters.clone() } - } - - fn trait_(&self, db: &dyn HirDatabase) -> TraitId { - match self.associated_ty.lookup(db.upcast()).container { - AssocContainerId::TraitId(it) => it, - _ => panic!("projection ty without parent trait"), - } - } -} - -impl TypeWalk for ProjectionTy { - fn walk(&self, f: &mut impl FnMut(&Ty)) { - self.parameters.walk(f); - } - - fn walk_mut_binders( - &mut self, - f: &mut impl FnMut(&mut Ty, DebruijnIndex), - binders: DebruijnIndex, - ) { - self.parameters.walk_mut_binders(f, binders); - } -} - -/// A type. -/// -/// See also the `TyKind` enum in rustc (librustc/ty/sty.rs), which represents -/// the same thing (but in a different way). -/// -/// This should be cheap to clone. -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub enum Ty { - /// A nominal type with (maybe 0) type parameters. This might be a primitive - /// type like `bool`, a struct, tuple, function pointer, reference or - /// several other things. - Apply(ApplicationTy), - - /// A "projection" type corresponds to an (unnormalized) - /// projection like `>::Foo`. Note that the - /// trait and all its parameters are fully known. - Projection(ProjectionTy), - - /// An opaque type (`impl Trait`). - /// - /// This is currently only used for return type impl trait; each instance of - /// `impl Trait` in a return type gets its own ID. - Opaque(OpaqueTy), - - /// A placeholder for a type parameter; for example, `T` in `fn f(x: T) - /// {}` when we're type-checking the body of that function. In this - /// situation, we know this stands for *some* type, but don't know the exact - /// type. - Placeholder(TypeParamId), - - /// A bound type variable. This is used in various places: when representing - /// some polymorphic type like the type of function `fn f`, the type - /// parameters get turned into variables; during trait resolution, inference - /// variables get turned into bound variables and back; and in `Dyn` the - /// `Self` type is represented with a bound variable as well. - Bound(BoundVar), - - /// A type variable used during type checking. - Infer(InferTy), - - /// A trait object (`dyn Trait` or bare `Trait` in pre-2018 Rust). - /// - /// The predicates are quantified over the `Self` type, i.e. `Ty::Bound(0)` - /// represents the `Self` type inside the bounds. This is currently - /// implicit; Chalk has the `Binders` struct to make it explicit, but it - /// didn't seem worth the overhead yet. - Dyn(Arc<[GenericPredicate]>), - - /// A placeholder for a type which could not be computed; this is propagated - /// to avoid useless error messages. Doubles as a placeholder where type - /// variables are inserted before type checking, since we want to try to - /// infer a better type here anyway -- for the IDE use case, we want to try - /// to infer as much as possible even in the presence of type errors. - Unknown, -} - -/// A list of substitutions for generic parameters. -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub struct Substs(Arc<[Ty]>); - -impl TypeWalk for Substs { - fn walk(&self, f: &mut impl FnMut(&Ty)) { - for t in self.0.iter() { - t.walk(f); - } - } - - fn walk_mut_binders( - &mut self, - f: &mut impl FnMut(&mut Ty, DebruijnIndex), - binders: DebruijnIndex, - ) { - for t in make_mut_slice(&mut self.0) { - t.walk_mut_binders(f, binders); - } - } -} - -impl Substs { - pub fn empty() -> Substs { - Substs(Arc::new([])) - } - - pub fn single(ty: Ty) -> Substs { - Substs(Arc::new([ty])) - } - - pub fn prefix(&self, n: usize) -> Substs { - Substs(self.0[..std::cmp::min(self.0.len(), n)].into()) - } - - pub fn suffix(&self, n: usize) -> Substs { - Substs(self.0[self.0.len() - std::cmp::min(self.0.len(), n)..].into()) - } - - pub fn as_single(&self) -> &Ty { - if self.0.len() != 1 { - panic!("expected substs of len 1, got {:?}", self); - } - &self.0[0] - } - - /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). - pub(crate) fn type_params_for_generics(generic_params: &Generics) -> Substs { - Substs(generic_params.iter().map(|(id, _)| Ty::Placeholder(id)).collect()) - } - - /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). - pub fn type_params(db: &dyn HirDatabase, def: impl Into) -> Substs { - let params = generics(db.upcast(), def.into()); - Substs::type_params_for_generics(¶ms) - } - - /// Return Substs that replace each parameter by a bound variable. - pub(crate) fn bound_vars(generic_params: &Generics, debruijn: DebruijnIndex) -> Substs { - Substs( - generic_params - .iter() - .enumerate() - .map(|(idx, _)| Ty::Bound(BoundVar::new(debruijn, idx))) - .collect(), - ) - } - - pub fn build_for_def(db: &dyn HirDatabase, def: impl Into) -> SubstsBuilder { - let def = def.into(); - let params = generics(db.upcast(), def); - let param_count = params.len(); - Substs::builder(param_count) - } - - pub(crate) fn build_for_generics(generic_params: &Generics) -> SubstsBuilder { - Substs::builder(generic_params.len()) - } - - pub fn build_for_type_ctor(db: &dyn HirDatabase, type_ctor: TypeCtor) -> SubstsBuilder { - Substs::builder(type_ctor.num_ty_params(db)) - } - - fn builder(param_count: usize) -> SubstsBuilder { - SubstsBuilder { vec: Vec::with_capacity(param_count), param_count } - } -} - -/// Return an index of a parameter in the generic type parameter list by it's id. -pub fn param_idx(db: &dyn HirDatabase, id: TypeParamId) -> Option { - generics(db.upcast(), id.parent).param_idx(id) -} - -#[derive(Debug, Clone)] -pub struct SubstsBuilder { - vec: Vec, - param_count: usize, -} - -impl SubstsBuilder { - pub fn build(self) -> Substs { - assert_eq!(self.vec.len(), self.param_count); - Substs(self.vec.into()) - } - - pub fn push(mut self, ty: Ty) -> Self { - self.vec.push(ty); - self - } - - fn remaining(&self) -> usize { - self.param_count - self.vec.len() - } - - pub fn fill_with_bound_vars(self, debruijn: DebruijnIndex, starting_from: usize) -> Self { - self.fill((starting_from..).map(|idx| Ty::Bound(BoundVar::new(debruijn, idx)))) - } - - pub fn fill_with_unknown(self) -> Self { - self.fill(iter::repeat(Ty::Unknown)) - } - - pub fn fill(mut self, filler: impl Iterator) -> Self { - self.vec.extend(filler.take(self.remaining())); - assert_eq!(self.remaining(), 0); - self - } - - pub fn use_parent_substs(mut self, parent_substs: &Substs) -> Self { - assert!(self.vec.is_empty()); - assert!(parent_substs.len() <= self.param_count); - self.vec.extend(parent_substs.iter().cloned()); - self - } -} - -impl Deref for Substs { - type Target = [Ty]; - - fn deref(&self) -> &[Ty] { - &self.0 - } -} - -#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] -pub struct Binders { - pub num_binders: usize, - pub value: T, -} - -impl Binders { - pub fn new(num_binders: usize, value: T) -> Self { - Self { num_binders, value } - } - - pub fn as_ref(&self) -> Binders<&T> { - Binders { num_binders: self.num_binders, value: &self.value } - } - - pub fn map(self, f: impl FnOnce(T) -> U) -> Binders { - Binders { num_binders: self.num_binders, value: f(self.value) } - } - - pub fn filter_map(self, f: impl FnOnce(T) -> Option) -> Option> { - Some(Binders { num_binders: self.num_binders, value: f(self.value)? }) - } -} - -impl Binders<&T> { - pub fn cloned(&self) -> Binders { - Binders { num_binders: self.num_binders, value: self.value.clone() } - } -} - -impl Binders { - /// Substitutes all variables. - pub fn subst(self, subst: &Substs) -> T { - assert_eq!(subst.len(), self.num_binders); - self.value.subst_bound_vars(subst) - } - - /// Substitutes just a prefix of the variables (shifting the rest). - pub fn subst_prefix(self, subst: &Substs) -> Binders { - assert!(subst.len() < self.num_binders); - Binders::new(self.num_binders - subst.len(), self.value.subst_bound_vars(subst)) - } -} - -impl TypeWalk for Binders { - fn walk(&self, f: &mut impl FnMut(&Ty)) { - self.value.walk(f); - } - - fn walk_mut_binders( - &mut self, - f: &mut impl FnMut(&mut Ty, DebruijnIndex), - binders: DebruijnIndex, - ) { - self.value.walk_mut_binders(f, binders.shifted_in()) - } -} - -/// A trait with type parameters. This includes the `Self`, so this represents a concrete type implementing the trait. -/// Name to be bikeshedded: TraitBound? TraitImplements? -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub struct TraitRef { - /// FIXME name? - pub trait_: TraitId, - pub substs: Substs, -} - -impl TraitRef { - pub fn self_ty(&self) -> &Ty { - &self.substs[0] - } -} - -impl TypeWalk for TraitRef { - fn walk(&self, f: &mut impl FnMut(&Ty)) { - self.substs.walk(f); - } - - fn walk_mut_binders( - &mut self, - f: &mut impl FnMut(&mut Ty, DebruijnIndex), - binders: DebruijnIndex, - ) { - self.substs.walk_mut_binders(f, binders); - } -} - -/// Like `generics::WherePredicate`, but with resolved types: A condition on the -/// parameters of a generic item. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum GenericPredicate { - /// The given trait needs to be implemented for its type parameters. - Implemented(TraitRef), - /// An associated type bindings like in `Iterator`. - Projection(ProjectionPredicate), - /// We couldn't resolve the trait reference. (If some type parameters can't - /// be resolved, they will just be Unknown). - Error, -} - -impl GenericPredicate { - pub fn is_error(&self) -> bool { - matches!(self, GenericPredicate::Error) - } - - pub fn is_implemented(&self) -> bool { - matches!(self, GenericPredicate::Implemented(_)) - } - - pub fn trait_ref(&self, db: &dyn HirDatabase) -> Option { - match self { - GenericPredicate::Implemented(tr) => Some(tr.clone()), - GenericPredicate::Projection(proj) => Some(proj.projection_ty.trait_ref(db)), - GenericPredicate::Error => None, - } - } -} - -impl TypeWalk for GenericPredicate { - fn walk(&self, f: &mut impl FnMut(&Ty)) { - match self { - GenericPredicate::Implemented(trait_ref) => trait_ref.walk(f), - GenericPredicate::Projection(projection_pred) => projection_pred.walk(f), - GenericPredicate::Error => {} - } - } - - fn walk_mut_binders( - &mut self, - f: &mut impl FnMut(&mut Ty, DebruijnIndex), - binders: DebruijnIndex, - ) { - match self { - GenericPredicate::Implemented(trait_ref) => trait_ref.walk_mut_binders(f, binders), - GenericPredicate::Projection(projection_pred) => { - projection_pred.walk_mut_binders(f, binders) - } - GenericPredicate::Error => {} - } - } -} - -/// Basically a claim (currently not validated / checked) that the contained -/// type / trait ref contains no inference variables; any inference variables it -/// contained have been replaced by bound variables, and `kinds` tells us how -/// many there are and whether they were normal or float/int variables. This is -/// used to erase irrelevant differences between types before using them in -/// queries. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Canonical { - pub value: T, - pub kinds: Arc<[TyKind]>, -} - -impl Canonical { - pub fn new(value: T, kinds: impl IntoIterator) -> Self { - Self { value, kinds: kinds.into_iter().collect() } - } -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub enum TyKind { - General, - Integer, - Float, -} - -/// A function signature as seen by type inference: Several parameter types and -/// one return type. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct FnSig { - params_and_return: Arc<[Ty]>, - is_varargs: bool, -} - -/// A polymorphic function signature. -pub type PolyFnSig = Binders; - -impl FnSig { - pub fn from_params_and_return(mut params: Vec, ret: Ty, is_varargs: bool) -> FnSig { - params.push(ret); - FnSig { params_and_return: params.into(), is_varargs } - } - - pub fn from_fn_ptr_substs(substs: &Substs, is_varargs: bool) -> FnSig { - FnSig { params_and_return: Arc::clone(&substs.0), is_varargs } - } - - pub fn params(&self) -> &[Ty] { - &self.params_and_return[0..self.params_and_return.len() - 1] - } - - pub fn ret(&self) -> &Ty { - &self.params_and_return[self.params_and_return.len() - 1] - } -} - -impl TypeWalk for FnSig { - fn walk(&self, f: &mut impl FnMut(&Ty)) { - for t in self.params_and_return.iter() { - t.walk(f); - } - } - - fn walk_mut_binders( - &mut self, - f: &mut impl FnMut(&mut Ty, DebruijnIndex), - binders: DebruijnIndex, - ) { - for t in make_mut_slice(&mut self.params_and_return) { - t.walk_mut_binders(f, binders); - } - } -} - -impl Ty { - pub fn simple(ctor: TypeCtor) -> Ty { - Ty::Apply(ApplicationTy { ctor, parameters: Substs::empty() }) - } - pub fn apply_one(ctor: TypeCtor, param: Ty) -> Ty { - Ty::Apply(ApplicationTy { ctor, parameters: Substs::single(param) }) - } - pub fn apply(ctor: TypeCtor, parameters: Substs) -> Ty { - Ty::Apply(ApplicationTy { ctor, parameters }) - } - pub fn unit() -> Self { - Ty::apply(TypeCtor::Tuple { cardinality: 0 }, Substs::empty()) - } - pub fn fn_ptr(sig: FnSig) -> Self { - Ty::apply( - TypeCtor::FnPtr { num_args: sig.params().len() as u16, is_varargs: sig.is_varargs }, - Substs(sig.params_and_return), - ) - } - - pub fn as_reference(&self) -> Option<(&Ty, Mutability)> { - match self { - Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(mutability), parameters }) => { - Some((parameters.as_single(), *mutability)) - } - _ => None, - } - } - - pub fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)> { - match self { - Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(mutability), parameters }) => { - Some((parameters.as_single(), Rawness::Ref, *mutability)) - } - Ty::Apply(ApplicationTy { ctor: TypeCtor::RawPtr(mutability), parameters }) => { - Some((parameters.as_single(), Rawness::RawPtr, *mutability)) - } - _ => None, - } - } - - pub fn strip_references(&self) -> &Ty { - let mut t: &Ty = self; - - while let Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(_mutability), parameters }) = t { - t = parameters.as_single(); - } - - t - } - - pub fn as_adt(&self) -> Option<(AdtId, &Substs)> { - match self { - Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(adt_def), parameters }) => { - Some((*adt_def, parameters)) - } - _ => None, - } - } - - pub fn as_tuple(&self) -> Option<&Substs> { - match self { - Ty::Apply(ApplicationTy { ctor: TypeCtor::Tuple { .. }, parameters }) => { - Some(parameters) - } - _ => None, - } - } - - pub fn is_never(&self) -> bool { - matches!(self, Ty::Apply(ApplicationTy { ctor: TypeCtor::Never, .. })) - } - - /// If this is a `dyn Trait` type, this returns the `Trait` part. - pub fn dyn_trait_ref(&self) -> Option<&TraitRef> { - match self { - Ty::Dyn(bounds) => bounds.get(0).and_then(|b| match b { - GenericPredicate::Implemented(trait_ref) => Some(trait_ref), - _ => None, - }), - _ => None, - } - } - - /// If this is a `dyn Trait`, returns that trait. - pub fn dyn_trait(&self) -> Option { - self.dyn_trait_ref().map(|it| it.trait_) - } - - fn builtin_deref(&self) -> Option { - match self { - Ty::Apply(a_ty) => match a_ty.ctor { - TypeCtor::Ref(..) => Some(Ty::clone(a_ty.parameters.as_single())), - TypeCtor::RawPtr(..) => Some(Ty::clone(a_ty.parameters.as_single())), - _ => None, - }, - _ => None, - } - } - - pub fn callable_sig(&self, db: &dyn HirDatabase) -> Option { - match self { - Ty::Apply(a_ty) => match a_ty.ctor { - TypeCtor::FnPtr { is_varargs, .. } => { - Some(FnSig::from_fn_ptr_substs(&a_ty.parameters, is_varargs)) - } - TypeCtor::FnDef(def) => { - let sig = db.callable_item_signature(def); - Some(sig.subst(&a_ty.parameters)) - } - TypeCtor::Closure { .. } => { - let sig_param = &a_ty.parameters[0]; - sig_param.callable_sig(db) - } - _ => None, - }, - _ => None, - } - } - - /// If this is a type with type parameters (an ADT or function), replaces - /// the `Substs` for these type parameters with the given ones. (So e.g. if - /// `self` is `Option<_>` and the substs contain `u32`, we'll have - /// `Option` afterwards.) - pub fn apply_substs(self, substs: Substs) -> Ty { - match self { - Ty::Apply(ApplicationTy { ctor, parameters: previous_substs }) => { - assert_eq!(previous_substs.len(), substs.len()); - Ty::Apply(ApplicationTy { ctor, parameters: substs }) - } - _ => self, - } - } - - /// Returns the type parameters of this type if it has some (i.e. is an ADT - /// or function); so if `self` is `Option`, this returns the `u32`. - pub fn substs(&self) -> Option { - match self { - Ty::Apply(ApplicationTy { parameters, .. }) => Some(parameters.clone()), - _ => None, - } - } - - pub fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option> { - match self { - Ty::Opaque(opaque_ty) => { - let predicates = match opaque_ty.opaque_ty_id { - OpaqueTyId::ReturnTypeImplTrait(func, idx) => { - db.return_type_impl_traits(func).map(|it| { - let data = (*it) - .as_ref() - .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); - data.subst(&opaque_ty.parameters) - }) - } - }; - - predicates.map(|it| it.value) - } - Ty::Placeholder(id) => { - let generic_params = db.generic_params(id.parent); - let param_data = &generic_params.types[id.local_id]; - match param_data.provenance { - hir_def::generics::TypeParamProvenance::ArgumentImplTrait => { - let predicates = db - .generic_predicates_for_param(*id) - .into_iter() - .map(|pred| pred.value.clone()) - .collect_vec(); - - Some(predicates) - } - _ => None, - } - } - _ => None, - } - } - - pub fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option { - match self { - Ty::Apply(ApplicationTy { ctor: TypeCtor::AssociatedType(type_alias_id), .. }) => { - match type_alias_id.lookup(db.upcast()).container { - AssocContainerId::TraitId(trait_id) => Some(trait_id), - _ => None, - } - } - Ty::Projection(projection_ty) => { - match projection_ty.associated_ty.lookup(db.upcast()).container { - AssocContainerId::TraitId(trait_id) => Some(trait_id), - _ => None, - } - } - _ => None, - } - } -} - -/// This allows walking structures that contain types to do something with those -/// types, similar to Chalk's `Fold` trait. -pub trait TypeWalk { - fn walk(&self, f: &mut impl FnMut(&Ty)); - fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { - self.walk_mut_binders(&mut |ty, _binders| f(ty), DebruijnIndex::INNERMOST); - } - /// Walk the type, counting entered binders. - /// - /// `Ty::Bound` variables use DeBruijn indexing, which means that 0 refers - /// to the innermost binder, 1 to the next, etc.. So when we want to - /// substitute a certain bound variable, we can't just walk the whole type - /// and blindly replace each instance of a certain index; when we 'enter' - /// things that introduce new bound variables, we have to keep track of - /// that. Currently, the only thing that introduces bound variables on our - /// side are `Ty::Dyn` and `Ty::Opaque`, which each introduce a bound - /// variable for the self type. - fn walk_mut_binders( - &mut self, - f: &mut impl FnMut(&mut Ty, DebruijnIndex), - binders: DebruijnIndex, - ); - - fn fold_binders( - mut self, - f: &mut impl FnMut(Ty, DebruijnIndex) -> Ty, - binders: DebruijnIndex, - ) -> Self - where - Self: Sized, - { - self.walk_mut_binders( - &mut |ty_mut, binders| { - let ty = mem::replace(ty_mut, Ty::Unknown); - *ty_mut = f(ty, binders); - }, - binders, - ); - self - } - - fn fold(mut self, f: &mut impl FnMut(Ty) -> Ty) -> Self - where - Self: Sized, - { - self.walk_mut(&mut |ty_mut| { - let ty = mem::replace(ty_mut, Ty::Unknown); - *ty_mut = f(ty); - }); - self - } - - /// Substitutes `Ty::Bound` vars with the given substitution. - fn subst_bound_vars(self, substs: &Substs) -> Self - where - Self: Sized, - { - self.subst_bound_vars_at_depth(substs, DebruijnIndex::INNERMOST) - } - - /// Substitutes `Ty::Bound` vars with the given substitution. - fn subst_bound_vars_at_depth(mut self, substs: &Substs, depth: DebruijnIndex) -> Self - where - Self: Sized, - { - self.walk_mut_binders( - &mut |ty, binders| { - if let &mut Ty::Bound(bound) = ty { - if bound.debruijn >= binders { - *ty = substs.0[bound.index].clone().shift_bound_vars(binders); - } - } - }, - depth, - ); - self - } - - /// Shifts up debruijn indices of `Ty::Bound` vars by `n`. - fn shift_bound_vars(self, n: DebruijnIndex) -> Self - where - Self: Sized, - { - self.fold_binders( - &mut |ty, binders| match ty { - Ty::Bound(bound) if bound.debruijn >= binders => { - Ty::Bound(bound.shifted_in_from(n)) - } - ty => ty, - }, - DebruijnIndex::INNERMOST, - ) - } -} - -impl TypeWalk for Ty { - fn walk(&self, f: &mut impl FnMut(&Ty)) { - match self { - Ty::Apply(a_ty) => { - for t in a_ty.parameters.iter() { - t.walk(f); - } - } - Ty::Projection(p_ty) => { - for t in p_ty.parameters.iter() { - t.walk(f); - } - } - Ty::Dyn(predicates) => { - for p in predicates.iter() { - p.walk(f); - } - } - Ty::Opaque(o_ty) => { - for t in o_ty.parameters.iter() { - t.walk(f); - } - } - Ty::Placeholder { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} - } - f(self); - } - - fn walk_mut_binders( - &mut self, - f: &mut impl FnMut(&mut Ty, DebruijnIndex), - binders: DebruijnIndex, - ) { - match self { - Ty::Apply(a_ty) => { - a_ty.parameters.walk_mut_binders(f, binders); - } - Ty::Projection(p_ty) => { - p_ty.parameters.walk_mut_binders(f, binders); - } - Ty::Dyn(predicates) => { - for p in make_mut_slice(predicates) { - p.walk_mut_binders(f, binders.shifted_in()); - } - } - Ty::Opaque(o_ty) => { - o_ty.parameters.walk_mut_binders(f, binders); - } - Ty::Placeholder { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} - } - f(self, binders); - } -} - -impl TypeWalk for Vec { - fn walk(&self, f: &mut impl FnMut(&Ty)) { - for t in self { - t.walk(f); - } - } - fn walk_mut_binders( - &mut self, - f: &mut impl FnMut(&mut Ty, DebruijnIndex), - binders: DebruijnIndex, - ) { - for t in self { - t.walk_mut_binders(f, binders); - } - } -} - -#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] -pub enum OpaqueTyId { - ReturnTypeImplTrait(hir_def::FunctionId, u16), -} - -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub struct ReturnTypeImplTraits { - pub(crate) impl_traits: Vec, -} - -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub(crate) struct ReturnTypeImplTrait { - pub bounds: Binders>, -} diff --git a/crates/ra_hir_ty/src/lower.rs b/crates/ra_hir_ty/src/lower.rs deleted file mode 100644 index 7638f167b5..0000000000 --- a/crates/ra_hir_ty/src/lower.rs +++ /dev/null @@ -1,1242 +0,0 @@ -//! Methods for lowering the HIR to types. There are two main cases here: -//! -//! - Lowering a type reference like `&usize` or `Option` to a -//! type: The entry point for this is `Ty::from_hir`. -//! - Building the type for an item: This happens through the `type_for_def` query. -//! -//! This usually involves resolving names, collecting generic arguments etc. -use std::{iter, sync::Arc}; - -use hir_def::{ - adt::StructKind, - builtin_type::BuiltinType, - generics::{TypeParamProvenance, WherePredicate, WherePredicateTarget}, - path::{GenericArg, Path, PathSegment, PathSegments}, - resolver::{HasResolver, Resolver, TypeNs}, - type_ref::{TypeBound, TypeRef}, - AdtId, AssocContainerId, AssocItemId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, - HasModule, ImplId, LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, - UnionId, VariantId, -}; -use hir_expand::name::Name; -use ra_arena::map::ArenaMap; -use ra_db::CrateId; -use smallvec::SmallVec; -use stdx::impl_from; -use test_utils::mark; - -use crate::{ - db::HirDatabase, - primitive::{FloatTy, IntTy}, - utils::{ - all_super_trait_refs, associated_type_by_name_including_super_traits, generics, - make_mut_slice, variant_data, - }, - Binders, BoundVar, DebruijnIndex, FnSig, GenericPredicate, OpaqueTy, OpaqueTyId, PolyFnSig, - ProjectionPredicate, ProjectionTy, ReturnTypeImplTrait, ReturnTypeImplTraits, Substs, - TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk, -}; - -#[derive(Debug)] -pub struct TyLoweringContext<'a> { - pub db: &'a dyn HirDatabase, - pub resolver: &'a Resolver, - in_binders: DebruijnIndex, - /// Note: Conceptually, it's thinkable that we could be in a location where - /// some type params should be represented as placeholders, and others - /// should be converted to variables. I think in practice, this isn't - /// possible currently, so this should be fine for now. - pub type_param_mode: TypeParamLoweringMode, - pub impl_trait_mode: ImplTraitLoweringMode, - impl_trait_counter: std::cell::Cell, - /// When turning `impl Trait` into opaque types, we have to collect the - /// bounds at the same time to get the IDs correct (without becoming too - /// complicated). I don't like using interior mutability (as for the - /// counter), but I've tried and failed to make the lifetimes work for - /// passing around a `&mut TyLoweringContext`. The core problem is that - /// we're grouping the mutable data (the counter and this field) together - /// with the immutable context (the references to the DB and resolver). - /// Splitting this up would be a possible fix. - opaque_type_data: std::cell::RefCell>, -} - -impl<'a> TyLoweringContext<'a> { - pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver) -> Self { - let impl_trait_counter = std::cell::Cell::new(0); - let impl_trait_mode = ImplTraitLoweringMode::Disallowed; - let type_param_mode = TypeParamLoweringMode::Placeholder; - let in_binders = DebruijnIndex::INNERMOST; - let opaque_type_data = std::cell::RefCell::new(Vec::new()); - Self { - db, - resolver, - in_binders, - impl_trait_mode, - impl_trait_counter, - type_param_mode, - opaque_type_data, - } - } - - pub fn with_debruijn( - &self, - debruijn: DebruijnIndex, - f: impl FnOnce(&TyLoweringContext) -> T, - ) -> T { - let opaque_ty_data_vec = self.opaque_type_data.replace(Vec::new()); - let new_ctx = Self { - in_binders: debruijn, - impl_trait_counter: std::cell::Cell::new(self.impl_trait_counter.get()), - opaque_type_data: std::cell::RefCell::new(opaque_ty_data_vec), - ..*self - }; - let result = f(&new_ctx); - self.impl_trait_counter.set(new_ctx.impl_trait_counter.get()); - self.opaque_type_data.replace(new_ctx.opaque_type_data.into_inner()); - result - } - - pub fn with_shifted_in( - &self, - debruijn: DebruijnIndex, - f: impl FnOnce(&TyLoweringContext) -> T, - ) -> T { - self.with_debruijn(self.in_binders.shifted_in_from(debruijn), f) - } - - pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self { - Self { impl_trait_mode, ..self } - } - - pub fn with_type_param_mode(self, type_param_mode: TypeParamLoweringMode) -> Self { - Self { type_param_mode, ..self } - } -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum ImplTraitLoweringMode { - /// `impl Trait` gets lowered into an opaque type that doesn't unify with - /// anything except itself. This is used in places where values flow 'out', - /// i.e. for arguments of the function we're currently checking, and return - /// types of functions we're calling. - Opaque, - /// `impl Trait` gets lowered into a type variable. Used for argument - /// position impl Trait when inside the respective function, since it allows - /// us to support that without Chalk. - Param, - /// `impl Trait` gets lowered into a variable that can unify with some - /// type. This is used in places where values flow 'in', i.e. for arguments - /// of functions we're calling, and the return type of the function we're - /// currently checking. - Variable, - /// `impl Trait` is disallowed and will be an error. - Disallowed, -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum TypeParamLoweringMode { - Placeholder, - Variable, -} - -impl Ty { - pub fn from_hir(ctx: &TyLoweringContext<'_>, type_ref: &TypeRef) -> Self { - Ty::from_hir_ext(ctx, type_ref).0 - } - pub fn from_hir_ext(ctx: &TyLoweringContext<'_>, type_ref: &TypeRef) -> (Self, Option) { - let mut res = None; - let ty = match type_ref { - TypeRef::Never => Ty::simple(TypeCtor::Never), - TypeRef::Tuple(inner) => { - let inner_tys: Arc<[Ty]> = inner.iter().map(|tr| Ty::from_hir(ctx, tr)).collect(); - Ty::apply( - TypeCtor::Tuple { cardinality: inner_tys.len() as u16 }, - Substs(inner_tys), - ) - } - TypeRef::Path(path) => { - let (ty, res_) = Ty::from_hir_path(ctx, path); - res = res_; - ty - } - TypeRef::RawPtr(inner, mutability) => { - let inner_ty = Ty::from_hir(ctx, inner); - Ty::apply_one(TypeCtor::RawPtr(*mutability), inner_ty) - } - TypeRef::Array(inner) => { - let inner_ty = Ty::from_hir(ctx, inner); - Ty::apply_one(TypeCtor::Array, inner_ty) - } - TypeRef::Slice(inner) => { - let inner_ty = Ty::from_hir(ctx, inner); - Ty::apply_one(TypeCtor::Slice, inner_ty) - } - TypeRef::Reference(inner, mutability) => { - let inner_ty = Ty::from_hir(ctx, inner); - Ty::apply_one(TypeCtor::Ref(*mutability), inner_ty) - } - TypeRef::Placeholder => Ty::Unknown, - TypeRef::Fn(params, is_varargs) => { - let sig = Substs(params.iter().map(|tr| Ty::from_hir(ctx, tr)).collect()); - Ty::apply( - TypeCtor::FnPtr { num_args: sig.len() as u16 - 1, is_varargs: *is_varargs }, - sig, - ) - } - TypeRef::DynTrait(bounds) => { - let self_ty = Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, 0)); - let predicates = ctx.with_shifted_in(DebruijnIndex::ONE, |ctx| { - bounds - .iter() - .flat_map(|b| GenericPredicate::from_type_bound(ctx, b, self_ty.clone())) - .collect() - }); - Ty::Dyn(predicates) - } - TypeRef::ImplTrait(bounds) => { - match ctx.impl_trait_mode { - ImplTraitLoweringMode::Opaque => { - let idx = ctx.impl_trait_counter.get(); - ctx.impl_trait_counter.set(idx + 1); - - assert!(idx as usize == ctx.opaque_type_data.borrow().len()); - // this dance is to make sure the data is in the right - // place even if we encounter more opaque types while - // lowering the bounds - ctx.opaque_type_data - .borrow_mut() - .push(ReturnTypeImplTrait { bounds: Binders::new(1, Vec::new()) }); - // We don't want to lower the bounds inside the binders - // we're currently in, because they don't end up inside - // those binders. E.g. when we have `impl Trait>`, the `impl OtherTrait` can't refer - // to the self parameter from `impl Trait`, and the - // bounds aren't actually stored nested within each - // other, but separately. So if the `T` refers to a type - // parameter of the outer function, it's just one binder - // away instead of two. - let actual_opaque_type_data = ctx - .with_debruijn(DebruijnIndex::INNERMOST, |ctx| { - ReturnTypeImplTrait::from_hir(ctx, &bounds) - }); - ctx.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data; - - let func = match ctx.resolver.generic_def() { - Some(GenericDefId::FunctionId(f)) => f, - _ => panic!("opaque impl trait lowering in non-function"), - }; - let impl_trait_id = OpaqueTyId::ReturnTypeImplTrait(func, idx); - let generics = generics(ctx.db.upcast(), func.into()); - let parameters = Substs::bound_vars(&generics, ctx.in_binders); - Ty::Opaque(OpaqueTy { opaque_ty_id: impl_trait_id, parameters }) - } - ImplTraitLoweringMode::Param => { - let idx = ctx.impl_trait_counter.get(); - // FIXME we're probably doing something wrong here - ctx.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16); - if let Some(def) = ctx.resolver.generic_def() { - let generics = generics(ctx.db.upcast(), def); - let param = generics - .iter() - .filter(|(_, data)| { - data.provenance == TypeParamProvenance::ArgumentImplTrait - }) - .nth(idx as usize) - .map_or(Ty::Unknown, |(id, _)| Ty::Placeholder(id)); - param - } else { - Ty::Unknown - } - } - ImplTraitLoweringMode::Variable => { - let idx = ctx.impl_trait_counter.get(); - // FIXME we're probably doing something wrong here - ctx.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16); - let (parent_params, self_params, list_params, _impl_trait_params) = - if let Some(def) = ctx.resolver.generic_def() { - let generics = generics(ctx.db.upcast(), def); - generics.provenance_split() - } else { - (0, 0, 0, 0) - }; - Ty::Bound(BoundVar::new( - ctx.in_binders, - idx as usize + parent_params + self_params + list_params, - )) - } - ImplTraitLoweringMode::Disallowed => { - // FIXME: report error - Ty::Unknown - } - } - } - TypeRef::Error => Ty::Unknown, - }; - (ty, res) - } - - /// This is only for `generic_predicates_for_param`, where we can't just - /// lower the self types of the predicates since that could lead to cycles. - /// So we just check here if the `type_ref` resolves to a generic param, and which. - fn from_hir_only_param(ctx: &TyLoweringContext<'_>, type_ref: &TypeRef) -> Option { - let path = match type_ref { - TypeRef::Path(path) => path, - _ => return None, - }; - if path.type_anchor().is_some() { - return None; - } - if path.segments().len() > 1 { - return None; - } - let resolution = - match ctx.resolver.resolve_path_in_type_ns(ctx.db.upcast(), path.mod_path()) { - Some((it, None)) => it, - _ => return None, - }; - if let TypeNs::GenericParam(param_id) = resolution { - Some(param_id) - } else { - None - } - } - - pub(crate) fn from_type_relative_path( - ctx: &TyLoweringContext<'_>, - ty: Ty, - // We need the original resolution to lower `Self::AssocTy` correctly - res: Option, - remaining_segments: PathSegments<'_>, - ) -> (Ty, Option) { - if remaining_segments.len() == 1 { - // resolve unselected assoc types - let segment = remaining_segments.first().unwrap(); - (Ty::select_associated_type(ctx, res, segment), None) - } else if remaining_segments.len() > 1 { - // FIXME report error (ambiguous associated type) - (Ty::Unknown, None) - } else { - (ty, res) - } - } - - pub(crate) fn from_partly_resolved_hir_path( - ctx: &TyLoweringContext<'_>, - resolution: TypeNs, - resolved_segment: PathSegment<'_>, - remaining_segments: PathSegments<'_>, - infer_args: bool, - ) -> (Ty, Option) { - let ty = match resolution { - TypeNs::TraitId(trait_) => { - // if this is a bare dyn Trait, we'll directly put the required ^0 for the self type in there - let self_ty = if remaining_segments.len() == 0 { - Some(Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, 0))) - } else { - None - }; - let trait_ref = - TraitRef::from_resolved_path(ctx, trait_, resolved_segment, self_ty); - let ty = if remaining_segments.len() == 1 { - let segment = remaining_segments.first().unwrap(); - let found = associated_type_by_name_including_super_traits( - ctx.db, - trait_ref, - &segment.name, - ); - match found { - Some((super_trait_ref, associated_ty)) => { - // FIXME handle type parameters on the segment - Ty::Projection(ProjectionTy { - associated_ty, - parameters: super_trait_ref.substs, - }) - } - None => { - // FIXME: report error (associated type not found) - Ty::Unknown - } - } - } else if remaining_segments.len() > 1 { - // FIXME report error (ambiguous associated type) - Ty::Unknown - } else { - Ty::Dyn(Arc::new([GenericPredicate::Implemented(trait_ref)])) - }; - return (ty, None); - } - TypeNs::GenericParam(param_id) => { - let generics = generics( - ctx.db.upcast(), - ctx.resolver.generic_def().expect("generics in scope"), - ); - match ctx.type_param_mode { - TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id), - TypeParamLoweringMode::Variable => { - let idx = generics.param_idx(param_id).expect("matching generics"); - Ty::Bound(BoundVar::new(ctx.in_binders, idx)) - } - } - } - TypeNs::SelfType(impl_id) => { - let generics = generics(ctx.db.upcast(), impl_id.into()); - let substs = match ctx.type_param_mode { - TypeParamLoweringMode::Placeholder => { - Substs::type_params_for_generics(&generics) - } - TypeParamLoweringMode::Variable => { - Substs::bound_vars(&generics, ctx.in_binders) - } - }; - ctx.db.impl_self_ty(impl_id).subst(&substs) - } - TypeNs::AdtSelfType(adt) => { - let generics = generics(ctx.db.upcast(), adt.into()); - let substs = match ctx.type_param_mode { - TypeParamLoweringMode::Placeholder => { - Substs::type_params_for_generics(&generics) - } - TypeParamLoweringMode::Variable => { - Substs::bound_vars(&generics, ctx.in_binders) - } - }; - ctx.db.ty(adt.into()).subst(&substs) - } - - TypeNs::AdtId(it) => { - Ty::from_hir_path_inner(ctx, resolved_segment, it.into(), infer_args) - } - TypeNs::BuiltinType(it) => { - Ty::from_hir_path_inner(ctx, resolved_segment, it.into(), infer_args) - } - TypeNs::TypeAliasId(it) => { - Ty::from_hir_path_inner(ctx, resolved_segment, it.into(), infer_args) - } - // FIXME: report error - TypeNs::EnumVariantId(_) => return (Ty::Unknown, None), - }; - - Ty::from_type_relative_path(ctx, ty, Some(resolution), remaining_segments) - } - - pub(crate) fn from_hir_path(ctx: &TyLoweringContext<'_>, path: &Path) -> (Ty, Option) { - // Resolve the path (in type namespace) - if let Some(type_ref) = path.type_anchor() { - let (ty, res) = Ty::from_hir_ext(ctx, &type_ref); - return Ty::from_type_relative_path(ctx, ty, res, path.segments()); - } - let (resolution, remaining_index) = - match ctx.resolver.resolve_path_in_type_ns(ctx.db.upcast(), path.mod_path()) { - Some(it) => it, - None => return (Ty::Unknown, None), - }; - let (resolved_segment, remaining_segments) = match remaining_index { - None => ( - path.segments().last().expect("resolved path has at least one element"), - PathSegments::EMPTY, - ), - Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)), - }; - Ty::from_partly_resolved_hir_path( - ctx, - resolution, - resolved_segment, - remaining_segments, - false, - ) - } - - fn select_associated_type( - ctx: &TyLoweringContext<'_>, - res: Option, - segment: PathSegment<'_>, - ) -> Ty { - if let Some(res) = res { - let ty = - associated_type_shorthand_candidates(ctx.db, res, move |name, t, associated_ty| { - if name == segment.name { - let substs = match ctx.type_param_mode { - TypeParamLoweringMode::Placeholder => { - // if we're lowering to placeholders, we have to put - // them in now - let s = Substs::type_params( - ctx.db, - ctx.resolver.generic_def().expect( - "there should be generics if there's a generic param", - ), - ); - t.substs.clone().subst_bound_vars(&s) - } - TypeParamLoweringMode::Variable => t.substs.clone(), - }; - // We need to shift in the bound vars, since - // associated_type_shorthand_candidates does not do that - let substs = substs.shift_bound_vars(ctx.in_binders); - // FIXME handle type parameters on the segment - return Some(Ty::Projection(ProjectionTy { - associated_ty, - parameters: substs, - })); - } - - None - }); - - ty.unwrap_or(Ty::Unknown) - } else { - Ty::Unknown - } - } - - fn from_hir_path_inner( - ctx: &TyLoweringContext<'_>, - segment: PathSegment<'_>, - typable: TyDefId, - infer_args: bool, - ) -> Ty { - let generic_def = match typable { - TyDefId::BuiltinType(_) => None, - TyDefId::AdtId(it) => Some(it.into()), - TyDefId::TypeAliasId(it) => Some(it.into()), - }; - let substs = substs_from_path_segment(ctx, segment, generic_def, infer_args); - ctx.db.ty(typable).subst(&substs) - } - - /// Collect generic arguments from a path into a `Substs`. See also - /// `create_substs_for_ast_path` and `def_to_ty` in rustc. - pub(super) fn substs_from_path( - ctx: &TyLoweringContext<'_>, - path: &Path, - // Note that we don't call `db.value_type(resolved)` here, - // `ValueTyDefId` is just a convenient way to pass generics and - // special-case enum variants - resolved: ValueTyDefId, - infer_args: bool, - ) -> Substs { - let last = path.segments().last().expect("path should have at least one segment"); - let (segment, generic_def) = match resolved { - ValueTyDefId::FunctionId(it) => (last, Some(it.into())), - ValueTyDefId::StructId(it) => (last, Some(it.into())), - ValueTyDefId::UnionId(it) => (last, Some(it.into())), - ValueTyDefId::ConstId(it) => (last, Some(it.into())), - ValueTyDefId::StaticId(_) => (last, None), - ValueTyDefId::EnumVariantId(var) => { - // the generic args for an enum variant may be either specified - // on the segment referring to the enum, or on the segment - // referring to the variant. So `Option::::None` and - // `Option::None::` are both allowed (though the former is - // preferred). See also `def_ids_for_path_segments` in rustc. - let len = path.segments().len(); - let penultimate = if len >= 2 { path.segments().get(len - 2) } else { None }; - let segment = match penultimate { - Some(segment) if segment.args_and_bindings.is_some() => segment, - _ => last, - }; - (segment, Some(var.parent.into())) - } - }; - substs_from_path_segment(ctx, segment, generic_def, infer_args) - } -} - -fn substs_from_path_segment( - ctx: &TyLoweringContext<'_>, - segment: PathSegment<'_>, - def_generic: Option, - infer_args: bool, -) -> Substs { - let mut substs = Vec::new(); - let def_generics = def_generic.map(|def| generics(ctx.db.upcast(), def)); - - let (parent_params, self_params, type_params, impl_trait_params) = - def_generics.map_or((0, 0, 0, 0), |g| g.provenance_split()); - let total_len = parent_params + self_params + type_params + impl_trait_params; - - substs.extend(iter::repeat(Ty::Unknown).take(parent_params)); - - let mut had_explicit_args = false; - - if let Some(generic_args) = &segment.args_and_bindings { - if !generic_args.has_self_type { - substs.extend(iter::repeat(Ty::Unknown).take(self_params)); - } - let expected_num = - if generic_args.has_self_type { self_params + type_params } else { type_params }; - let skip = if generic_args.has_self_type && self_params == 0 { 1 } else { 0 }; - // if args are provided, it should be all of them, but we can't rely on that - for arg in generic_args.args.iter().skip(skip).take(expected_num) { - match arg { - GenericArg::Type(type_ref) => { - had_explicit_args = true; - let ty = Ty::from_hir(ctx, type_ref); - substs.push(ty); - } - } - } - } - - // handle defaults. In expression or pattern path segments without - // explicitly specified type arguments, missing type arguments are inferred - // (i.e. defaults aren't used). - if !infer_args || had_explicit_args { - if let Some(def_generic) = def_generic { - let defaults = ctx.db.generic_defaults(def_generic); - assert_eq!(total_len, defaults.len()); - - for default_ty in defaults.iter().skip(substs.len()) { - // each default can depend on the previous parameters - let substs_so_far = Substs(substs.clone().into()); - substs.push(default_ty.clone().subst(&substs_so_far)); - } - } - } - - // add placeholders for args that were not provided - // FIXME: emit diagnostics in contexts where this is not allowed - for _ in substs.len()..total_len { - substs.push(Ty::Unknown); - } - assert_eq!(substs.len(), total_len); - - Substs(substs.into()) -} - -impl TraitRef { - fn from_path( - ctx: &TyLoweringContext<'_>, - path: &Path, - explicit_self_ty: Option, - ) -> Option { - let resolved = - match ctx.resolver.resolve_path_in_type_ns_fully(ctx.db.upcast(), path.mod_path())? { - TypeNs::TraitId(tr) => tr, - _ => return None, - }; - let segment = path.segments().last().expect("path should have at least one segment"); - Some(TraitRef::from_resolved_path(ctx, resolved, segment, explicit_self_ty)) - } - - pub(crate) fn from_resolved_path( - ctx: &TyLoweringContext<'_>, - resolved: TraitId, - segment: PathSegment<'_>, - explicit_self_ty: Option, - ) -> Self { - let mut substs = TraitRef::substs_from_path(ctx, segment, resolved); - if let Some(self_ty) = explicit_self_ty { - make_mut_slice(&mut substs.0)[0] = self_ty; - } - TraitRef { trait_: resolved, substs } - } - - fn from_hir( - ctx: &TyLoweringContext<'_>, - type_ref: &TypeRef, - explicit_self_ty: Option, - ) -> Option { - let path = match type_ref { - TypeRef::Path(path) => path, - _ => return None, - }; - TraitRef::from_path(ctx, path, explicit_self_ty) - } - - fn substs_from_path( - ctx: &TyLoweringContext<'_>, - segment: PathSegment<'_>, - resolved: TraitId, - ) -> Substs { - substs_from_path_segment(ctx, segment, Some(resolved.into()), false) - } - - pub(crate) fn from_type_bound( - ctx: &TyLoweringContext<'_>, - bound: &TypeBound, - self_ty: Ty, - ) -> Option { - match bound { - TypeBound::Path(path) => TraitRef::from_path(ctx, path, Some(self_ty)), - TypeBound::Error => None, - } - } -} - -impl GenericPredicate { - pub(crate) fn from_where_predicate<'a>( - ctx: &'a TyLoweringContext<'a>, - where_predicate: &'a WherePredicate, - ) -> impl Iterator + 'a { - let self_ty = match &where_predicate.target { - WherePredicateTarget::TypeRef(type_ref) => Ty::from_hir(ctx, type_ref), - WherePredicateTarget::TypeParam(param_id) => { - let generic_def = ctx.resolver.generic_def().expect("generics in scope"); - let generics = generics(ctx.db.upcast(), generic_def); - let param_id = hir_def::TypeParamId { parent: generic_def, local_id: *param_id }; - match ctx.type_param_mode { - TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id), - TypeParamLoweringMode::Variable => { - let idx = generics.param_idx(param_id).expect("matching generics"); - Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, idx)) - } - } - } - }; - GenericPredicate::from_type_bound(ctx, &where_predicate.bound, self_ty) - } - - pub(crate) fn from_type_bound<'a>( - ctx: &'a TyLoweringContext<'a>, - bound: &'a TypeBound, - self_ty: Ty, - ) -> impl Iterator + 'a { - let trait_ref = TraitRef::from_type_bound(ctx, bound, self_ty); - iter::once(trait_ref.clone().map_or(GenericPredicate::Error, GenericPredicate::Implemented)) - .chain( - trait_ref - .into_iter() - .flat_map(move |tr| assoc_type_bindings_from_type_bound(ctx, bound, tr)), - ) - } -} - -fn assoc_type_bindings_from_type_bound<'a>( - ctx: &'a TyLoweringContext<'a>, - bound: &'a TypeBound, - trait_ref: TraitRef, -) -> impl Iterator + 'a { - let last_segment = match bound { - TypeBound::Path(path) => path.segments().last(), - TypeBound::Error => None, - }; - last_segment - .into_iter() - .flat_map(|segment| segment.args_and_bindings.into_iter()) - .flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) - .flat_map(move |binding| { - let found = associated_type_by_name_including_super_traits( - ctx.db, - trait_ref.clone(), - &binding.name, - ); - let (super_trait_ref, associated_ty) = match found { - None => return SmallVec::<[GenericPredicate; 1]>::new(), - Some(t) => t, - }; - let projection_ty = ProjectionTy { associated_ty, parameters: super_trait_ref.substs }; - let mut preds = SmallVec::with_capacity( - binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), - ); - if let Some(type_ref) = &binding.type_ref { - let ty = Ty::from_hir(ctx, type_ref); - let projection_predicate = - ProjectionPredicate { projection_ty: projection_ty.clone(), ty }; - preds.push(GenericPredicate::Projection(projection_predicate)); - } - for bound in &binding.bounds { - preds.extend(GenericPredicate::from_type_bound( - ctx, - bound, - Ty::Projection(projection_ty.clone()), - )); - } - preds - }) -} - -impl ReturnTypeImplTrait { - fn from_hir(ctx: &TyLoweringContext, bounds: &[TypeBound]) -> Self { - mark::hit!(lower_rpit); - let self_ty = Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, 0)); - let predicates = ctx.with_shifted_in(DebruijnIndex::ONE, |ctx| { - bounds - .iter() - .flat_map(|b| GenericPredicate::from_type_bound(ctx, b, self_ty.clone())) - .collect() - }); - ReturnTypeImplTrait { bounds: Binders::new(1, predicates) } - } -} - -fn count_impl_traits(type_ref: &TypeRef) -> usize { - let mut count = 0; - type_ref.walk(&mut |type_ref| { - if matches!(type_ref, TypeRef::ImplTrait(_)) { - count += 1; - } - }); - count -} - -/// Build the signature of a callable item (function, struct or enum variant). -pub fn callable_item_sig(db: &dyn HirDatabase, def: CallableDefId) -> PolyFnSig { - match def { - CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f), - CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s), - CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e), - } -} - -pub fn associated_type_shorthand_candidates( - db: &dyn HirDatabase, - res: TypeNs, - mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option, -) -> Option { - let traits_from_env: Vec<_> = match res { - TypeNs::SelfType(impl_id) => match db.impl_trait(impl_id) { - None => vec![], - Some(trait_ref) => vec![trait_ref.value], - }, - TypeNs::GenericParam(param_id) => { - let predicates = db.generic_predicates_for_param(param_id); - let mut traits_: Vec<_> = predicates - .iter() - .filter_map(|pred| match &pred.value { - GenericPredicate::Implemented(tr) => Some(tr.clone()), - _ => None, - }) - .collect(); - // Handle `Self::Type` referring to own associated type in trait definitions - if let GenericDefId::TraitId(trait_id) = param_id.parent { - let generics = generics(db.upcast(), trait_id.into()); - if generics.params.types[param_id.local_id].provenance - == TypeParamProvenance::TraitSelf - { - let trait_ref = TraitRef { - trait_: trait_id, - substs: Substs::bound_vars(&generics, DebruijnIndex::INNERMOST), - }; - traits_.push(trait_ref); - } - } - traits_ - } - _ => vec![], - }; - - for t in traits_from_env.into_iter().flat_map(move |t| all_super_trait_refs(db, t)) { - let data = db.trait_data(t.trait_); - - for (name, assoc_id) in &data.items { - match assoc_id { - AssocItemId::TypeAliasId(alias) => { - if let Some(result) = cb(name, &t, *alias) { - return Some(result); - } - } - AssocItemId::FunctionId(_) | AssocItemId::ConstId(_) => {} - } - } - } - - None -} - -/// Build the type of all specific fields of a struct or enum variant. -pub(crate) fn field_types_query( - db: &dyn HirDatabase, - variant_id: VariantId, -) -> Arc>> { - let var_data = variant_data(db.upcast(), variant_id); - let (resolver, def): (_, GenericDefId) = match variant_id { - VariantId::StructId(it) => (it.resolver(db.upcast()), it.into()), - VariantId::UnionId(it) => (it.resolver(db.upcast()), it.into()), - VariantId::EnumVariantId(it) => (it.parent.resolver(db.upcast()), it.parent.into()), - }; - let generics = generics(db.upcast(), def); - let mut res = ArenaMap::default(); - let ctx = - TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); - for (field_id, field_data) in var_data.fields().iter() { - res.insert(field_id, Binders::new(generics.len(), Ty::from_hir(&ctx, &field_data.type_ref))) - } - Arc::new(res) -} - -/// This query exists only to be used when resolving short-hand associated types -/// like `T::Item`. -/// -/// See the analogous query in rustc and its comment: -/// https://github.com/rust-lang/rust/blob/9150f844e2624eb013ec78ca08c1d416e6644026/src/librustc_typeck/astconv.rs#L46 -/// This is a query mostly to handle cycles somewhat gracefully; e.g. the -/// following bounds are disallowed: `T: Foo, U: Foo`, but -/// these are fine: `T: Foo, U: Foo<()>`. -pub(crate) fn generic_predicates_for_param_query( - db: &dyn HirDatabase, - param_id: TypeParamId, -) -> Arc<[Binders]> { - let resolver = param_id.parent.resolver(db.upcast()); - let ctx = - TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); - let generics = generics(db.upcast(), param_id.parent); - resolver - .where_predicates_in_scope() - // we have to filter out all other predicates *first*, before attempting to lower them - .filter(|pred| match &pred.target { - WherePredicateTarget::TypeRef(type_ref) => { - Ty::from_hir_only_param(&ctx, type_ref) == Some(param_id) - } - WherePredicateTarget::TypeParam(local_id) => *local_id == param_id.local_id, - }) - .flat_map(|pred| { - GenericPredicate::from_where_predicate(&ctx, pred) - .map(|p| Binders::new(generics.len(), p)) - }) - .collect() -} - -pub(crate) fn generic_predicates_for_param_recover( - _db: &dyn HirDatabase, - _cycle: &[String], - _param_id: &TypeParamId, -) -> Arc<[Binders]> { - Arc::new([]) -} - -impl TraitEnvironment { - pub fn lower(db: &dyn HirDatabase, resolver: &Resolver) -> Arc { - let ctx = TyLoweringContext::new(db, &resolver) - .with_type_param_mode(TypeParamLoweringMode::Placeholder); - let mut predicates = resolver - .where_predicates_in_scope() - .flat_map(|pred| GenericPredicate::from_where_predicate(&ctx, pred)) - .collect::>(); - - if let Some(def) = resolver.generic_def() { - let container: Option = match def { - // FIXME: is there a function for this? - GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container), - GenericDefId::AdtId(_) => None, - GenericDefId::TraitId(_) => None, - GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container), - GenericDefId::ImplId(_) => None, - GenericDefId::EnumVariantId(_) => None, - GenericDefId::ConstId(c) => Some(c.lookup(db.upcast()).container), - }; - if let Some(AssocContainerId::TraitId(trait_id)) = container { - // add `Self: Trait` to the environment in trait - // function default implementations (and hypothetical code - // inside consts or type aliases) - test_utils::mark::hit!(trait_self_implements_self); - let substs = Substs::type_params(db, trait_id); - let trait_ref = TraitRef { trait_: trait_id, substs }; - let pred = GenericPredicate::Implemented(trait_ref); - - predicates.push(pred); - } - } - - Arc::new(TraitEnvironment { predicates }) - } -} - -/// Resolve the where clause(s) of an item with generics. -pub(crate) fn generic_predicates_query( - db: &dyn HirDatabase, - def: GenericDefId, -) -> Arc<[Binders]> { - let resolver = def.resolver(db.upcast()); - let ctx = - TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); - let generics = generics(db.upcast(), def); - resolver - .where_predicates_in_scope() - .flat_map(|pred| { - GenericPredicate::from_where_predicate(&ctx, pred) - .map(|p| Binders::new(generics.len(), p)) - }) - .collect() -} - -/// Resolve the default type params from generics -pub(crate) fn generic_defaults_query( - db: &dyn HirDatabase, - def: GenericDefId, -) -> Arc<[Binders]> { - let resolver = def.resolver(db.upcast()); - let ctx = - TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); - let generic_params = generics(db.upcast(), def); - - let defaults = generic_params - .iter() - .enumerate() - .map(|(idx, (_, p))| { - let mut ty = p.default.as_ref().map_or(Ty::Unknown, |t| Ty::from_hir(&ctx, t)); - - // Each default can only refer to previous parameters. - ty.walk_mut_binders( - &mut |ty, binders| match ty { - Ty::Bound(BoundVar { debruijn, index }) if *debruijn == binders => { - if *index >= idx { - // type variable default referring to parameter coming - // after it. This is forbidden (FIXME: report - // diagnostic) - *ty = Ty::Unknown; - } - } - _ => {} - }, - DebruijnIndex::INNERMOST, - ); - - Binders::new(idx, ty) - }) - .collect(); - - defaults -} - -fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig { - let data = db.function_data(def); - let resolver = def.resolver(db.upcast()); - let ctx_params = TyLoweringContext::new(db, &resolver) - .with_impl_trait_mode(ImplTraitLoweringMode::Variable) - .with_type_param_mode(TypeParamLoweringMode::Variable); - let params = data.params.iter().map(|tr| Ty::from_hir(&ctx_params, tr)).collect::>(); - let ctx_ret = TyLoweringContext::new(db, &resolver) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) - .with_type_param_mode(TypeParamLoweringMode::Variable); - let ret = Ty::from_hir(&ctx_ret, &data.ret_type); - let generics = generics(db.upcast(), def.into()); - let num_binders = generics.len(); - Binders::new(num_binders, FnSig::from_params_and_return(params, ret, data.is_varargs)) -} - -/// Build the declared type of a function. This should not need to look at the -/// function body. -fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders { - let generics = generics(db.upcast(), def.into()); - let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST); - Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs)) -} - -/// Build the declared type of a const. -fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders { - let data = db.const_data(def); - let generics = generics(db.upcast(), def.into()); - let resolver = def.resolver(db.upcast()); - let ctx = - TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); - - Binders::new(generics.len(), Ty::from_hir(&ctx, &data.type_ref)) -} - -/// Build the declared type of a static. -fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders { - let data = db.static_data(def); - let resolver = def.resolver(db.upcast()); - let ctx = TyLoweringContext::new(db, &resolver); - - Binders::new(0, Ty::from_hir(&ctx, &data.type_ref)) -} - -/// Build the declared type of a static. -fn type_for_builtin(def: BuiltinType) -> Ty { - Ty::simple(match def { - BuiltinType::Char => TypeCtor::Char, - BuiltinType::Bool => TypeCtor::Bool, - BuiltinType::Str => TypeCtor::Str, - BuiltinType::Int(t) => TypeCtor::Int(IntTy::from(t).into()), - BuiltinType::Float(t) => TypeCtor::Float(FloatTy::from(t).into()), - }) -} - -fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnSig { - let struct_data = db.struct_data(def); - let fields = struct_data.variant_data.fields(); - let resolver = def.resolver(db.upcast()); - let ctx = - TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); - let params = - fields.iter().map(|(_, field)| Ty::from_hir(&ctx, &field.type_ref)).collect::>(); - let ret = type_for_adt(db, def.into()); - Binders::new(ret.num_binders, FnSig::from_params_and_return(params, ret.value, false)) -} - -/// Build the type of a tuple struct constructor. -fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Binders { - let struct_data = db.struct_data(def); - if let StructKind::Unit = struct_data.variant_data.kind() { - return type_for_adt(db, def.into()); - } - let generics = generics(db.upcast(), def.into()); - let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST); - Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs)) -} - -fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> PolyFnSig { - let enum_data = db.enum_data(def.parent); - let var_data = &enum_data.variants[def.local_id]; - let fields = var_data.variant_data.fields(); - let resolver = def.parent.resolver(db.upcast()); - let ctx = - TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); - let params = - fields.iter().map(|(_, field)| Ty::from_hir(&ctx, &field.type_ref)).collect::>(); - let ret = type_for_adt(db, def.parent.into()); - Binders::new(ret.num_binders, FnSig::from_params_and_return(params, ret.value, false)) -} - -/// Build the type of a tuple enum variant constructor. -fn type_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> Binders { - let enum_data = db.enum_data(def.parent); - let var_data = &enum_data.variants[def.local_id].variant_data; - if let StructKind::Unit = var_data.kind() { - return type_for_adt(db, def.parent.into()); - } - let generics = generics(db.upcast(), def.parent.into()); - let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST); - Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs)) -} - -fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders { - let generics = generics(db.upcast(), adt.into()); - let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST); - Binders::new(substs.len(), Ty::apply(TypeCtor::Adt(adt), substs)) -} - -fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders { - let generics = generics(db.upcast(), t.into()); - let resolver = t.resolver(db.upcast()); - let ctx = - TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); - let type_ref = &db.type_alias_data(t).type_ref; - let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST); - let inner = Ty::from_hir(&ctx, type_ref.as_ref().unwrap_or(&TypeRef::Error)); - Binders::new(substs.len(), inner) -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub enum CallableDefId { - FunctionId(FunctionId), - StructId(StructId), - EnumVariantId(EnumVariantId), -} -impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId); - -impl CallableDefId { - pub fn krate(self, db: &dyn HirDatabase) -> CrateId { - let db = db.upcast(); - match self { - CallableDefId::FunctionId(f) => f.lookup(db).module(db), - CallableDefId::StructId(s) => s.lookup(db).container.module(db), - CallableDefId::EnumVariantId(e) => e.parent.lookup(db).container.module(db), - } - .krate - } -} - -impl From for GenericDefId { - fn from(def: CallableDefId) -> GenericDefId { - match def { - CallableDefId::FunctionId(f) => f.into(), - CallableDefId::StructId(s) => s.into(), - CallableDefId::EnumVariantId(e) => e.into(), - } - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum TyDefId { - BuiltinType(BuiltinType), - AdtId(AdtId), - TypeAliasId(TypeAliasId), -} -impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum ValueTyDefId { - FunctionId(FunctionId), - StructId(StructId), - UnionId(UnionId), - EnumVariantId(EnumVariantId), - ConstId(ConstId), - StaticId(StaticId), -} -impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId); - -/// Build the declared type of an item. This depends on the namespace; e.g. for -/// `struct Foo(usize)`, we have two types: The type of the struct itself, and -/// the constructor function `(usize) -> Foo` which lives in the values -/// namespace. -pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders { - match def { - TyDefId::BuiltinType(it) => Binders::new(0, type_for_builtin(it)), - TyDefId::AdtId(it) => type_for_adt(db, it), - TyDefId::TypeAliasId(it) => type_for_type_alias(db, it), - } -} - -pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders { - let num_binders = match *def { - TyDefId::BuiltinType(_) => 0, - TyDefId::AdtId(it) => generics(db.upcast(), it.into()).len(), - TyDefId::TypeAliasId(it) => generics(db.upcast(), it.into()).len(), - }; - Binders::new(num_binders, Ty::Unknown) -} - -pub(crate) fn value_ty_query(db: &dyn HirDatabase, def: ValueTyDefId) -> Binders { - match def { - ValueTyDefId::FunctionId(it) => type_for_fn(db, it), - ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it), - ValueTyDefId::UnionId(it) => type_for_adt(db, it.into()), - ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it), - ValueTyDefId::ConstId(it) => type_for_const(db, it), - ValueTyDefId::StaticId(it) => type_for_static(db, it), - } -} - -pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binders { - let impl_data = db.impl_data(impl_id); - let resolver = impl_id.resolver(db.upcast()); - let generics = generics(db.upcast(), impl_id.into()); - let ctx = - TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); - Binders::new(generics.len(), Ty::from_hir(&ctx, &impl_data.target_type)) -} - -pub(crate) fn impl_self_ty_recover( - db: &dyn HirDatabase, - _cycle: &[String], - impl_id: &ImplId, -) -> Binders { - let generics = generics(db.upcast(), (*impl_id).into()); - Binders::new(generics.len(), Ty::Unknown) -} - -pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option> { - let impl_data = db.impl_data(impl_id); - let resolver = impl_id.resolver(db.upcast()); - let ctx = - TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); - let self_ty = db.impl_self_ty(impl_id); - let target_trait = impl_data.target_trait.as_ref()?; - Some(Binders::new( - self_ty.num_binders, - TraitRef::from_hir(&ctx, target_trait, Some(self_ty.value))?, - )) -} - -pub(crate) fn return_type_impl_traits( - db: &dyn HirDatabase, - def: hir_def::FunctionId, -) -> Option>> { - // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe - let data = db.function_data(def); - let resolver = def.resolver(db.upcast()); - let ctx_ret = TyLoweringContext::new(db, &resolver) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) - .with_type_param_mode(TypeParamLoweringMode::Variable); - let _ret = Ty::from_hir(&ctx_ret, &data.ret_type); - let generics = generics(db.upcast(), def.into()); - let num_binders = generics.len(); - let return_type_impl_traits = - ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() }; - if return_type_impl_traits.impl_traits.is_empty() { - None - } else { - Some(Arc::new(Binders::new(num_binders, return_type_impl_traits))) - } -} diff --git a/crates/ra_hir_ty/src/method_resolution.rs b/crates/ra_hir_ty/src/method_resolution.rs deleted file mode 100644 index fb4b30a131..0000000000 --- a/crates/ra_hir_ty/src/method_resolution.rs +++ /dev/null @@ -1,770 +0,0 @@ -//! This module is concerned with finding methods that a given type provides. -//! For details about how this works in rustc, see the method lookup page in the -//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html) -//! and the corresponding code mostly in librustc_typeck/check/method/probe.rs. -use std::{iter, sync::Arc}; - -use arrayvec::ArrayVec; -use hir_def::{ - builtin_type::{IntBitness, Signedness}, - lang_item::LangItemTarget, - type_ref::Mutability, - AssocContainerId, AssocItemId, FunctionId, HasModule, ImplId, Lookup, TraitId, -}; -use hir_expand::name::Name; -use ra_db::CrateId; -use ra_prof::profile; -use rustc_hash::{FxHashMap, FxHashSet}; - -use super::Substs; -use crate::{ - autoderef, - db::HirDatabase, - primitive::{FloatBitness, FloatTy, IntTy}, - utils::all_super_traits, - ApplicationTy, Canonical, DebruijnIndex, InEnvironment, TraitEnvironment, TraitRef, Ty, TyKind, - TypeCtor, TypeWalk, -}; - -/// This is used as a key for indexing impls. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub enum TyFingerprint { - Apply(TypeCtor), -} - -impl TyFingerprint { - /// Creates a TyFingerprint for looking up an impl. Only certain types can - /// have impls: if we have some `struct S`, we can have an `impl S`, but not - /// `impl &S`. Hence, this will return `None` for reference types and such. - pub(crate) fn for_impl(ty: &Ty) -> Option { - match ty { - Ty::Apply(a_ty) => Some(TyFingerprint::Apply(a_ty.ctor)), - _ => None, - } - } -} - -pub(crate) const ALL_INT_FPS: [TyFingerprint; 12] = [ - TyFingerprint::Apply(TypeCtor::Int(IntTy { - signedness: Signedness::Unsigned, - bitness: IntBitness::X8, - })), - TyFingerprint::Apply(TypeCtor::Int(IntTy { - signedness: Signedness::Unsigned, - bitness: IntBitness::X16, - })), - TyFingerprint::Apply(TypeCtor::Int(IntTy { - signedness: Signedness::Unsigned, - bitness: IntBitness::X32, - })), - TyFingerprint::Apply(TypeCtor::Int(IntTy { - signedness: Signedness::Unsigned, - bitness: IntBitness::X64, - })), - TyFingerprint::Apply(TypeCtor::Int(IntTy { - signedness: Signedness::Unsigned, - bitness: IntBitness::X128, - })), - TyFingerprint::Apply(TypeCtor::Int(IntTy { - signedness: Signedness::Unsigned, - bitness: IntBitness::Xsize, - })), - TyFingerprint::Apply(TypeCtor::Int(IntTy { - signedness: Signedness::Signed, - bitness: IntBitness::X8, - })), - TyFingerprint::Apply(TypeCtor::Int(IntTy { - signedness: Signedness::Signed, - bitness: IntBitness::X16, - })), - TyFingerprint::Apply(TypeCtor::Int(IntTy { - signedness: Signedness::Signed, - bitness: IntBitness::X32, - })), - TyFingerprint::Apply(TypeCtor::Int(IntTy { - signedness: Signedness::Signed, - bitness: IntBitness::X64, - })), - TyFingerprint::Apply(TypeCtor::Int(IntTy { - signedness: Signedness::Signed, - bitness: IntBitness::X128, - })), - TyFingerprint::Apply(TypeCtor::Int(IntTy { - signedness: Signedness::Signed, - bitness: IntBitness::Xsize, - })), -]; - -pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 2] = [ - TyFingerprint::Apply(TypeCtor::Float(FloatTy { bitness: FloatBitness::X32 })), - TyFingerprint::Apply(TypeCtor::Float(FloatTy { bitness: FloatBitness::X64 })), -]; - -/// Trait impls defined or available in some crate. -#[derive(Debug, Eq, PartialEq)] -pub struct TraitImpls { - // If the `Option` is `None`, the impl may apply to any self type. - map: FxHashMap, Vec>>, -} - -impl TraitImpls { - pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc { - let _p = profile("trait_impls_in_crate_query"); - let mut impls = Self { map: FxHashMap::default() }; - - let crate_def_map = db.crate_def_map(krate); - for (_module_id, module_data) in crate_def_map.modules.iter() { - for impl_id in module_data.scope.impls() { - let target_trait = match db.impl_trait(impl_id) { - Some(tr) => tr.value.trait_, - None => continue, - }; - let self_ty = db.impl_self_ty(impl_id); - let self_ty_fp = TyFingerprint::for_impl(&self_ty.value); - impls - .map - .entry(target_trait) - .or_default() - .entry(self_ty_fp) - .or_default() - .push(impl_id); - } - } - - Arc::new(impls) - } - - pub(crate) fn trait_impls_in_deps_query(db: &dyn HirDatabase, krate: CrateId) -> Arc { - let _p = profile("trait_impls_in_deps_query"); - let crate_graph = db.crate_graph(); - let mut res = Self { map: FxHashMap::default() }; - - for krate in crate_graph.transitive_deps(krate) { - res.merge(&db.trait_impls_in_crate(krate)); - } - - Arc::new(res) - } - - fn merge(&mut self, other: &Self) { - for (trait_, other_map) in &other.map { - let map = self.map.entry(*trait_).or_default(); - for (fp, impls) in other_map { - let vec = map.entry(*fp).or_default(); - vec.extend(impls); - } - } - } - - /// Queries all impls of the given trait. - pub fn for_trait(&self, trait_: TraitId) -> impl Iterator + '_ { - self.map - .get(&trait_) - .into_iter() - .flat_map(|map| map.values().flat_map(|v| v.iter().copied())) - } - - /// Queries all impls of `trait_` that may apply to `self_ty`. - pub fn for_trait_and_self_ty( - &self, - trait_: TraitId, - self_ty: TyFingerprint, - ) -> impl Iterator + '_ { - self.map - .get(&trait_) - .into_iter() - .flat_map(move |map| map.get(&None).into_iter().chain(map.get(&Some(self_ty)))) - .flat_map(|v| v.iter().copied()) - } - - pub fn all_impls(&self) -> impl Iterator + '_ { - self.map.values().flat_map(|map| map.values().flat_map(|v| v.iter().copied())) - } -} - -/// Inherent impls defined in some crate. -/// -/// Inherent impls can only be defined in the crate that also defines the self type of the impl -/// (note that some primitives are considered to be defined by both libcore and liballoc). -/// -/// This makes inherent impl lookup easier than trait impl lookup since we only have to consider a -/// single crate. -#[derive(Debug, Eq, PartialEq)] -pub struct InherentImpls { - map: FxHashMap>, -} - -impl InherentImpls { - pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc { - let mut map: FxHashMap<_, Vec<_>> = FxHashMap::default(); - - let crate_def_map = db.crate_def_map(krate); - for (_module_id, module_data) in crate_def_map.modules.iter() { - for impl_id in module_data.scope.impls() { - let data = db.impl_data(impl_id); - if data.target_trait.is_some() { - continue; - } - - let self_ty = db.impl_self_ty(impl_id); - if let Some(fp) = TyFingerprint::for_impl(&self_ty.value) { - map.entry(fp).or_default().push(impl_id); - } - } - } - - Arc::new(Self { map }) - } - - pub fn for_self_ty(&self, self_ty: &Ty) -> &[ImplId] { - match TyFingerprint::for_impl(self_ty) { - Some(fp) => self.map.get(&fp).map(|vec| vec.as_ref()).unwrap_or(&[]), - None => &[], - } - } - - pub fn all_impls(&self) -> impl Iterator + '_ { - self.map.values().flat_map(|v| v.iter().copied()) - } -} - -impl Ty { - pub fn def_crates( - &self, - db: &dyn HirDatabase, - cur_crate: CrateId, - ) -> Option> { - // Types like slice can have inherent impls in several crates, (core and alloc). - // The corresponding impls are marked with lang items, so we can use them to find the required crates. - macro_rules! lang_item_crate { - ($($name:expr),+ $(,)?) => {{ - let mut v = ArrayVec::<[LangItemTarget; 2]>::new(); - $( - v.extend(db.lang_item(cur_crate, $name.into())); - )+ - v - }}; - } - - let lang_item_targets = match self { - Ty::Apply(a_ty) => match a_ty.ctor { - TypeCtor::Adt(def_id) => { - return Some(std::iter::once(def_id.module(db.upcast()).krate).collect()) - } - TypeCtor::Bool => lang_item_crate!("bool"), - TypeCtor::Char => lang_item_crate!("char"), - TypeCtor::Float(f) => match f.bitness { - // There are two lang items: one in libcore (fXX) and one in libstd (fXX_runtime) - FloatBitness::X32 => lang_item_crate!("f32", "f32_runtime"), - FloatBitness::X64 => lang_item_crate!("f64", "f64_runtime"), - }, - TypeCtor::Int(i) => lang_item_crate!(i.ty_to_string()), - TypeCtor::Str => lang_item_crate!("str_alloc", "str"), - TypeCtor::Slice => lang_item_crate!("slice_alloc", "slice"), - TypeCtor::RawPtr(Mutability::Shared) => lang_item_crate!("const_ptr"), - TypeCtor::RawPtr(Mutability::Mut) => lang_item_crate!("mut_ptr"), - _ => return None, - }, - _ => return None, - }; - let res = lang_item_targets - .into_iter() - .filter_map(|it| match it { - LangItemTarget::ImplDefId(it) => Some(it), - _ => None, - }) - .map(|it| it.lookup(db.upcast()).container.module(db.upcast()).krate) - .collect(); - Some(res) - } -} -/// Look up the method with the given name, returning the actual autoderefed -/// receiver type (but without autoref applied yet). -pub(crate) fn lookup_method( - ty: &Canonical, - db: &dyn HirDatabase, - env: Arc, - krate: CrateId, - traits_in_scope: &FxHashSet, - name: &Name, -) -> Option<(Ty, FunctionId)> { - iterate_method_candidates( - ty, - db, - env, - krate, - &traits_in_scope, - Some(name), - LookupMode::MethodCall, - |ty, f| match f { - AssocItemId::FunctionId(f) => Some((ty.clone(), f)), - _ => None, - }, - ) -} - -/// Whether we're looking up a dotted method call (like `v.len()`) or a path -/// (like `Vec::new`). -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum LookupMode { - /// Looking up a method call like `v.len()`: We only consider candidates - /// that have a `self` parameter, and do autoderef. - MethodCall, - /// Looking up a path like `Vec::new` or `Vec::default`: We consider all - /// candidates including associated constants, but don't do autoderef. - Path, -} - -// This would be nicer if it just returned an iterator, but that runs into -// lifetime problems, because we need to borrow temp `CrateImplDefs`. -// FIXME add a context type here? -pub fn iterate_method_candidates( - ty: &Canonical, - db: &dyn HirDatabase, - env: Arc, - krate: CrateId, - traits_in_scope: &FxHashSet, - name: Option<&Name>, - mode: LookupMode, - mut callback: impl FnMut(&Ty, AssocItemId) -> Option, -) -> Option { - let mut slot = None; - iterate_method_candidates_impl( - ty, - db, - env, - krate, - traits_in_scope, - name, - mode, - &mut |ty, item| { - assert!(slot.is_none()); - slot = callback(ty, item); - slot.is_some() - }, - ); - slot -} - -fn iterate_method_candidates_impl( - ty: &Canonical, - db: &dyn HirDatabase, - env: Arc, - krate: CrateId, - traits_in_scope: &FxHashSet, - name: Option<&Name>, - mode: LookupMode, - callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool, -) -> bool { - match mode { - LookupMode::MethodCall => { - // For method calls, rust first does any number of autoderef, and then one - // autoref (i.e. when the method takes &self or &mut self). We just ignore - // the autoref currently -- when we find a method matching the given name, - // we assume it fits. - - // Also note that when we've got a receiver like &S, even if the method we - // find in the end takes &self, we still do the autoderef step (just as - // rustc does an autoderef and then autoref again). - let ty = InEnvironment { value: ty.clone(), environment: env.clone() }; - - // We have to be careful about the order we're looking at candidates - // in here. Consider the case where we're resolving `x.clone()` - // where `x: &Vec<_>`. This resolves to the clone method with self - // type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where - // the receiver type exactly matches before cases where we have to - // do autoref. But in the autoderef steps, the `&_` self type comes - // up *before* the `Vec<_>` self type. - // - // On the other hand, we don't want to just pick any by-value method - // before any by-autoref method; it's just that we need to consider - // the methods by autoderef order of *receiver types*, not *self - // types*. - - let deref_chain = autoderef_method_receiver(db, krate, ty); - for i in 0..deref_chain.len() { - if iterate_method_candidates_with_autoref( - &deref_chain[i..], - db, - env.clone(), - krate, - traits_in_scope, - name, - callback, - ) { - return true; - } - } - false - } - LookupMode::Path => { - // No autoderef for path lookups - iterate_method_candidates_for_self_ty( - &ty, - db, - env, - krate, - traits_in_scope, - name, - callback, - ) - } - } -} - -fn iterate_method_candidates_with_autoref( - deref_chain: &[Canonical], - db: &dyn HirDatabase, - env: Arc, - krate: CrateId, - traits_in_scope: &FxHashSet, - name: Option<&Name>, - mut callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool, -) -> bool { - if iterate_method_candidates_by_receiver( - &deref_chain[0], - &deref_chain[1..], - db, - env.clone(), - krate, - &traits_in_scope, - name, - &mut callback, - ) { - return true; - } - let refed = Canonical { - kinds: deref_chain[0].kinds.clone(), - value: Ty::apply_one(TypeCtor::Ref(Mutability::Shared), deref_chain[0].value.clone()), - }; - if iterate_method_candidates_by_receiver( - &refed, - deref_chain, - db, - env.clone(), - krate, - &traits_in_scope, - name, - &mut callback, - ) { - return true; - } - let ref_muted = Canonical { - kinds: deref_chain[0].kinds.clone(), - value: Ty::apply_one(TypeCtor::Ref(Mutability::Mut), deref_chain[0].value.clone()), - }; - if iterate_method_candidates_by_receiver( - &ref_muted, - deref_chain, - db, - env, - krate, - &traits_in_scope, - name, - &mut callback, - ) { - return true; - } - false -} - -fn iterate_method_candidates_by_receiver( - receiver_ty: &Canonical, - rest_of_deref_chain: &[Canonical], - db: &dyn HirDatabase, - env: Arc, - krate: CrateId, - traits_in_scope: &FxHashSet, - name: Option<&Name>, - mut callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool, -) -> bool { - // We're looking for methods with *receiver* type receiver_ty. These could - // be found in any of the derefs of receiver_ty, so we have to go through - // that. - for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) { - if iterate_inherent_methods(self_ty, db, name, Some(receiver_ty), krate, &mut callback) { - return true; - } - } - for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) { - if iterate_trait_method_candidates( - self_ty, - db, - env.clone(), - krate, - &traits_in_scope, - name, - Some(receiver_ty), - &mut callback, - ) { - return true; - } - } - false -} - -fn iterate_method_candidates_for_self_ty( - self_ty: &Canonical, - db: &dyn HirDatabase, - env: Arc, - krate: CrateId, - traits_in_scope: &FxHashSet, - name: Option<&Name>, - mut callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool, -) -> bool { - if iterate_inherent_methods(self_ty, db, name, None, krate, &mut callback) { - return true; - } - iterate_trait_method_candidates(self_ty, db, env, krate, traits_in_scope, name, None, callback) -} - -fn iterate_trait_method_candidates( - self_ty: &Canonical, - db: &dyn HirDatabase, - env: Arc, - krate: CrateId, - traits_in_scope: &FxHashSet, - name: Option<&Name>, - receiver_ty: Option<&Canonical>, - callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool, -) -> bool { - // if ty is `dyn Trait`, the trait doesn't need to be in scope - let inherent_trait = - self_ty.value.dyn_trait().into_iter().flat_map(|t| all_super_traits(db.upcast(), t)); - let env_traits = if let Ty::Placeholder(_) = self_ty.value { - // if we have `T: Trait` in the param env, the trait doesn't need to be in scope - env.trait_predicates_for_self_ty(&self_ty.value) - .map(|tr| tr.trait_) - .flat_map(|t| all_super_traits(db.upcast(), t)) - .collect() - } else { - Vec::new() - }; - let traits = - inherent_trait.chain(env_traits.into_iter()).chain(traits_in_scope.iter().copied()); - 'traits: for t in traits { - let data = db.trait_data(t); - - // we'll be lazy about checking whether the type implements the - // trait, but if we find out it doesn't, we'll skip the rest of the - // iteration - let mut known_implemented = false; - for (_name, item) in data.items.iter() { - if !is_valid_candidate(db, name, receiver_ty, *item, self_ty) { - continue; - } - if !known_implemented { - let goal = generic_implements_goal(db, env.clone(), t, self_ty.clone()); - if db.trait_solve(krate, goal).is_none() { - continue 'traits; - } - } - known_implemented = true; - if callback(&self_ty.value, *item) { - return true; - } - } - } - false -} - -fn iterate_inherent_methods( - self_ty: &Canonical, - db: &dyn HirDatabase, - name: Option<&Name>, - receiver_ty: Option<&Canonical>, - krate: CrateId, - callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool, -) -> bool { - let def_crates = match self_ty.value.def_crates(db, krate) { - Some(k) => k, - None => return false, - }; - for krate in def_crates { - let impls = db.inherent_impls_in_crate(krate); - - for &impl_def in impls.for_self_ty(&self_ty.value) { - for &item in db.impl_data(impl_def).items.iter() { - if !is_valid_candidate(db, name, receiver_ty, item, self_ty) { - continue; - } - // we have to check whether the self type unifies with the type - // that the impl is for. If we have a receiver type, this - // already happens in `is_valid_candidate` above; if not, we - // check it here - if receiver_ty.is_none() && inherent_impl_substs(db, impl_def, self_ty).is_none() { - test_utils::mark::hit!(impl_self_type_match_without_receiver); - continue; - } - if callback(&self_ty.value, item) { - return true; - } - } - } - } - false -} - -/// Returns the self type for the index trait call. -pub fn resolve_indexing_op( - db: &dyn HirDatabase, - ty: &Canonical, - env: Arc, - krate: CrateId, - index_trait: TraitId, -) -> Option> { - let ty = InEnvironment { value: ty.clone(), environment: env.clone() }; - let deref_chain = autoderef_method_receiver(db, krate, ty); - for ty in deref_chain { - let goal = generic_implements_goal(db, env.clone(), index_trait, ty.clone()); - if db.trait_solve(krate, goal).is_some() { - return Some(ty); - } - } - None -} - -fn is_valid_candidate( - db: &dyn HirDatabase, - name: Option<&Name>, - receiver_ty: Option<&Canonical>, - item: AssocItemId, - self_ty: &Canonical, -) -> bool { - match item { - AssocItemId::FunctionId(m) => { - let data = db.function_data(m); - if let Some(name) = name { - if &data.name != name { - return false; - } - } - if let Some(receiver_ty) = receiver_ty { - if !data.has_self_param { - return false; - } - let transformed_receiver_ty = match transform_receiver_ty(db, m, self_ty) { - Some(ty) => ty, - None => return false, - }; - if transformed_receiver_ty != receiver_ty.value { - return false; - } - } - true - } - AssocItemId::ConstId(c) => { - let data = db.const_data(c); - name.map_or(true, |name| data.name.as_ref() == Some(name)) && receiver_ty.is_none() - } - _ => false, - } -} - -pub(crate) fn inherent_impl_substs( - db: &dyn HirDatabase, - impl_id: ImplId, - self_ty: &Canonical, -) -> Option { - // we create a var for each type parameter of the impl; we need to keep in - // mind here that `self_ty` might have vars of its own - let vars = Substs::build_for_def(db, impl_id) - .fill_with_bound_vars(DebruijnIndex::INNERMOST, self_ty.kinds.len()) - .build(); - let self_ty_with_vars = db.impl_self_ty(impl_id).subst(&vars); - let mut kinds = self_ty.kinds.to_vec(); - kinds.extend(iter::repeat(TyKind::General).take(vars.len())); - let tys = Canonical { kinds: kinds.into(), value: (self_ty_with_vars, self_ty.value.clone()) }; - let substs = super::infer::unify(&tys); - // We only want the substs for the vars we added, not the ones from self_ty. - // Also, if any of the vars we added are still in there, we replace them by - // Unknown. I think this can only really happen if self_ty contained - // Unknown, and in that case we want the result to contain Unknown in those - // places again. - substs.map(|s| fallback_bound_vars(s.suffix(vars.len()), self_ty.kinds.len())) -} - -/// This replaces any 'free' Bound vars in `s` (i.e. those with indices past -/// num_vars_to_keep) by `Ty::Unknown`. -fn fallback_bound_vars(s: Substs, num_vars_to_keep: usize) -> Substs { - s.fold_binders( - &mut |ty, binders| { - if let Ty::Bound(bound) = &ty { - if bound.index >= num_vars_to_keep && bound.debruijn >= binders { - Ty::Unknown - } else { - ty - } - } else { - ty - } - }, - DebruijnIndex::INNERMOST, - ) -} - -fn transform_receiver_ty( - db: &dyn HirDatabase, - function_id: FunctionId, - self_ty: &Canonical, -) -> Option { - let substs = match function_id.lookup(db.upcast()).container { - AssocContainerId::TraitId(_) => Substs::build_for_def(db, function_id) - .push(self_ty.value.clone()) - .fill_with_unknown() - .build(), - AssocContainerId::ImplId(impl_id) => inherent_impl_substs(db, impl_id, &self_ty)?, - AssocContainerId::ContainerId(_) => unreachable!(), - }; - let sig = db.callable_item_signature(function_id.into()); - Some(sig.value.params()[0].clone().subst_bound_vars(&substs)) -} - -pub fn implements_trait( - ty: &Canonical, - db: &dyn HirDatabase, - env: Arc, - krate: CrateId, - trait_: TraitId, -) -> bool { - let goal = generic_implements_goal(db, env, trait_, ty.clone()); - let solution = db.trait_solve(krate, goal); - - solution.is_some() -} - -/// This creates Substs for a trait with the given Self type and type variables -/// for all other parameters, to query Chalk with it. -fn generic_implements_goal( - db: &dyn HirDatabase, - env: Arc, - trait_: TraitId, - self_ty: Canonical, -) -> Canonical> { - let mut kinds = self_ty.kinds.to_vec(); - let substs = super::Substs::build_for_def(db, trait_) - .push(self_ty.value) - .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len()) - .build(); - kinds.extend(iter::repeat(TyKind::General).take(substs.len() - 1)); - let trait_ref = TraitRef { trait_, substs }; - let obligation = super::Obligation::Trait(trait_ref); - Canonical { kinds: kinds.into(), value: InEnvironment::new(env, obligation) } -} - -fn autoderef_method_receiver( - db: &dyn HirDatabase, - krate: CrateId, - ty: InEnvironment>, -) -> Vec> { - let mut deref_chain: Vec<_> = autoderef::autoderef(db, Some(krate), ty).collect(); - // As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!) - if let Some(Ty::Apply(ApplicationTy { ctor: TypeCtor::Array, parameters })) = - deref_chain.last().map(|ty| &ty.value) - { - let kinds = deref_chain.last().unwrap().kinds.clone(); - let unsized_ty = Ty::apply(TypeCtor::Slice, parameters.clone()); - deref_chain.push(Canonical { value: unsized_ty, kinds }) - } - deref_chain -} diff --git a/crates/ra_hir_ty/src/test_db.rs b/crates/ra_hir_ty/src/test_db.rs deleted file mode 100644 index a1714ff0fc..0000000000 --- a/crates/ra_hir_ty/src/test_db.rs +++ /dev/null @@ -1,136 +0,0 @@ -//! Database used for testing `hir`. - -use std::{ - fmt, panic, - sync::{Arc, Mutex}, -}; - -use hir_def::{db::DefDatabase, ModuleId}; -use hir_expand::db::AstDatabase; -use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast}; -use ra_syntax::TextRange; -use rustc_hash::{FxHashMap, FxHashSet}; -use test_utils::extract_annotations; - -#[salsa::database( - ra_db::SourceDatabaseExtStorage, - ra_db::SourceDatabaseStorage, - hir_expand::db::AstDatabaseStorage, - hir_def::db::InternDatabaseStorage, - hir_def::db::DefDatabaseStorage, - crate::db::HirDatabaseStorage -)] -#[derive(Default)] -pub struct TestDB { - storage: salsa::Storage, - events: Mutex>>, -} -impl fmt::Debug for TestDB { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("TestDB").finish() - } -} - -impl Upcast for TestDB { - fn upcast(&self) -> &(dyn AstDatabase + 'static) { - &*self - } -} - -impl Upcast for TestDB { - fn upcast(&self) -> &(dyn DefDatabase + 'static) { - &*self - } -} - -impl salsa::Database for TestDB { - fn salsa_event(&self, event: salsa::Event) { - let mut events = self.events.lock().unwrap(); - if let Some(events) = &mut *events { - events.push(event); - } - } -} - -impl salsa::ParallelDatabase for TestDB { - fn snapshot(&self) -> salsa::Snapshot { - salsa::Snapshot::new(TestDB { - storage: self.storage.snapshot(), - events: Default::default(), - }) - } -} - -impl panic::RefUnwindSafe for TestDB {} - -impl FileLoader for TestDB { - fn file_text(&self, file_id: FileId) -> Arc { - FileLoaderDelegate(self).file_text(file_id) - } - fn resolve_path(&self, anchor: FileId, path: &str) -> Option { - FileLoaderDelegate(self).resolve_path(anchor, path) - } - fn relevant_crates(&self, file_id: FileId) -> Arc> { - FileLoaderDelegate(self).relevant_crates(file_id) - } -} - -impl TestDB { - pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId { - for &krate in self.relevant_crates(file_id).iter() { - let crate_def_map = self.crate_def_map(krate); - for (local_id, data) in crate_def_map.modules.iter() { - if data.origin.file_id() == Some(file_id) { - return ModuleId { krate, local_id }; - } - } - } - panic!("Can't find module for file") - } - - pub(crate) fn extract_annotations(&self) -> FxHashMap> { - let mut files = Vec::new(); - let crate_graph = self.crate_graph(); - for krate in crate_graph.iter() { - let crate_def_map = self.crate_def_map(krate); - for (module_id, _) in crate_def_map.modules.iter() { - let file_id = crate_def_map[module_id].origin.file_id(); - files.extend(file_id) - } - } - files - .into_iter() - .filter_map(|file_id| { - let text = self.file_text(file_id); - let annotations = extract_annotations(&text); - if annotations.is_empty() { - return None; - } - Some((file_id, annotations)) - }) - .collect() - } -} - -impl TestDB { - pub fn log(&self, f: impl FnOnce()) -> Vec { - *self.events.lock().unwrap() = Some(Vec::new()); - f(); - self.events.lock().unwrap().take().unwrap() - } - - pub fn log_executed(&self, f: impl FnOnce()) -> Vec { - let events = self.log(f); - events - .into_iter() - .filter_map(|e| match e.kind { - // This pretty horrible, but `Debug` is the only way to inspect - // QueryDescriptor at the moment. - salsa::EventKind::WillExecute { database_key } => { - Some(format!("{:?}", database_key.debug(self))) - } - _ => None, - }) - .collect() - } -} diff --git a/crates/ra_hir_ty/src/tests.rs b/crates/ra_hir_ty/src/tests.rs deleted file mode 100644 index 016e689fff..0000000000 --- a/crates/ra_hir_ty/src/tests.rs +++ /dev/null @@ -1,359 +0,0 @@ -mod never_type; -mod coercion; -mod regression; -mod simple; -mod patterns; -mod traits; -mod method_resolution; -mod macros; -mod display_source_code; - -use std::sync::Arc; - -use expect::Expect; -use hir_def::{ - body::{BodySourceMap, SyntheticSyntax}, - child_by_source::ChildBySource, - db::DefDatabase, - item_scope::ItemScope, - keys, - nameres::CrateDefMap, - AssocItemId, DefWithBodyId, LocalModuleId, Lookup, ModuleDefId, -}; -use hir_expand::{db::AstDatabase, InFile}; -use ra_db::{fixture::WithFixture, FileRange, SourceDatabase, SourceDatabaseExt}; -use ra_syntax::{ - algo, - ast::{self, AstNode}, - SyntaxNode, -}; -use stdx::format_to; - -use crate::{ - db::HirDatabase, display::HirDisplay, infer::TypeMismatch, test_db::TestDB, InferenceResult, Ty, -}; - -// These tests compare the inference results for all expressions in a file -// against snapshots of the expected results using expect. Use -// `env UPDATE_EXPECT=1 cargo test -p ra_hir_ty` to update the snapshots. - -fn setup_tracing() -> tracing::subscriber::DefaultGuard { - use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry}; - use tracing_tree::HierarchicalLayer; - let filter = EnvFilter::from_env("CHALK_DEBUG"); - let layer = HierarchicalLayer::default() - .with_indent_lines(true) - .with_ansi(false) - .with_indent_amount(2) - .with_writer(std::io::stderr); - let subscriber = Registry::default().with(filter).with(layer); - tracing::subscriber::set_default(subscriber) -} - -fn check_types(ra_fixture: &str) { - check_types_impl(ra_fixture, false) -} - -fn check_types_source_code(ra_fixture: &str) { - check_types_impl(ra_fixture, true) -} - -fn check_types_impl(ra_fixture: &str, display_source: bool) { - let _tracing = setup_tracing(); - let db = TestDB::with_files(ra_fixture); - let mut checked_one = false; - for (file_id, annotations) in db.extract_annotations() { - for (range, expected) in annotations { - let ty = type_at_range(&db, FileRange { file_id, range }); - let actual = if display_source { - let module = db.module_for_file(file_id); - ty.display_source_code(&db, module).unwrap() - } else { - ty.display(&db).to_string() - }; - assert_eq!(expected, actual); - checked_one = true; - } - } - assert!(checked_one, "no `//^` annotations found"); -} - -fn type_at_range(db: &TestDB, pos: FileRange) -> Ty { - let file = db.parse(pos.file_id).ok().unwrap(); - let expr = algo::find_node_at_range::(file.syntax(), pos.range).unwrap(); - let fn_def = expr.syntax().ancestors().find_map(ast::Fn::cast).unwrap(); - let module = db.module_for_file(pos.file_id); - let func = *module.child_by_source(db)[keys::FUNCTION] - .get(&InFile::new(pos.file_id.into(), fn_def)) - .unwrap(); - - let (_body, source_map) = db.body_with_source_map(func.into()); - if let Some(expr_id) = source_map.node_expr(InFile::new(pos.file_id.into(), &expr)) { - let infer = db.infer(func.into()); - return infer[expr_id].clone(); - } - panic!("Can't find expression") -} - -fn infer(ra_fixture: &str) -> String { - infer_with_mismatches(ra_fixture, false) -} - -fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { - let _tracing = setup_tracing(); - let (db, file_id) = TestDB::with_single_file(content); - - let mut buf = String::new(); - - let mut infer_def = |inference_result: Arc, - body_source_map: Arc| { - let mut types: Vec<(InFile, &Ty)> = Vec::new(); - let mut mismatches: Vec<(InFile, &TypeMismatch)> = Vec::new(); - - for (pat, ty) in inference_result.type_of_pat.iter() { - let syntax_ptr = match body_source_map.pat_syntax(pat) { - Ok(sp) => { - let root = db.parse_or_expand(sp.file_id).unwrap(); - sp.map(|ptr| { - ptr.either( - |it| it.to_node(&root).syntax().clone(), - |it| it.to_node(&root).syntax().clone(), - ) - }) - } - Err(SyntheticSyntax) => continue, - }; - types.push((syntax_ptr, ty)); - } - - for (expr, ty) in inference_result.type_of_expr.iter() { - let node = match body_source_map.expr_syntax(expr) { - Ok(sp) => { - let root = db.parse_or_expand(sp.file_id).unwrap(); - sp.map(|ptr| ptr.to_node(&root).syntax().clone()) - } - Err(SyntheticSyntax) => continue, - }; - types.push((node.clone(), ty)); - if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) { - mismatches.push((node, mismatch)); - } - } - - // sort ranges for consistency - types.sort_by_key(|(node, _)| { - let range = node.value.text_range(); - (range.start(), range.end()) - }); - for (node, ty) in &types { - let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) { - (self_param.self_token().unwrap().text_range(), "self".to_string()) - } else { - (node.value.text_range(), node.value.text().to_string().replace("\n", " ")) - }; - let macro_prefix = if node.file_id != file_id.into() { "!" } else { "" }; - format_to!( - buf, - "{}{:?} '{}': {}\n", - macro_prefix, - range, - ellipsize(text, 15), - ty.display(&db) - ); - } - if include_mismatches { - mismatches.sort_by_key(|(node, _)| { - let range = node.value.text_range(); - (range.start(), range.end()) - }); - for (src_ptr, mismatch) in &mismatches { - let range = src_ptr.value.text_range(); - let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" }; - format_to!( - buf, - "{}{:?}: expected {}, got {}\n", - macro_prefix, - range, - mismatch.expected.display(&db), - mismatch.actual.display(&db), - ); - } - } - }; - - let module = db.module_for_file(file_id); - let crate_def_map = db.crate_def_map(module.krate); - - let mut defs: Vec = Vec::new(); - visit_module(&db, &crate_def_map, module.local_id, &mut |it| defs.push(it)); - defs.sort_by_key(|def| match def { - DefWithBodyId::FunctionId(it) => { - let loc = it.lookup(&db); - let tree = db.item_tree(loc.id.file_id); - tree.source(&db, loc.id).syntax().text_range().start() - } - DefWithBodyId::ConstId(it) => { - let loc = it.lookup(&db); - let tree = db.item_tree(loc.id.file_id); - tree.source(&db, loc.id).syntax().text_range().start() - } - DefWithBodyId::StaticId(it) => { - let loc = it.lookup(&db); - let tree = db.item_tree(loc.id.file_id); - tree.source(&db, loc.id).syntax().text_range().start() - } - }); - for def in defs { - let (_body, source_map) = db.body_with_source_map(def); - let infer = db.infer(def); - infer_def(infer, source_map); - } - - buf.truncate(buf.trim_end().len()); - buf -} - -fn visit_module( - db: &TestDB, - crate_def_map: &CrateDefMap, - module_id: LocalModuleId, - cb: &mut dyn FnMut(DefWithBodyId), -) { - visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb); - for impl_id in crate_def_map[module_id].scope.impls() { - let impl_data = db.impl_data(impl_id); - for &item in impl_data.items.iter() { - match item { - AssocItemId::FunctionId(it) => { - let def = it.into(); - cb(def); - let body = db.body(def); - visit_scope(db, crate_def_map, &body.item_scope, cb); - } - AssocItemId::ConstId(it) => { - let def = it.into(); - cb(def); - let body = db.body(def); - visit_scope(db, crate_def_map, &body.item_scope, cb); - } - AssocItemId::TypeAliasId(_) => (), - } - } - } - - fn visit_scope( - db: &TestDB, - crate_def_map: &CrateDefMap, - scope: &ItemScope, - cb: &mut dyn FnMut(DefWithBodyId), - ) { - for decl in scope.declarations() { - match decl { - ModuleDefId::FunctionId(it) => { - let def = it.into(); - cb(def); - let body = db.body(def); - visit_scope(db, crate_def_map, &body.item_scope, cb); - } - ModuleDefId::ConstId(it) => { - let def = it.into(); - cb(def); - let body = db.body(def); - visit_scope(db, crate_def_map, &body.item_scope, cb); - } - ModuleDefId::StaticId(it) => { - let def = it.into(); - cb(def); - let body = db.body(def); - visit_scope(db, crate_def_map, &body.item_scope, cb); - } - ModuleDefId::TraitId(it) => { - let trait_data = db.trait_data(it); - for &(_, item) in trait_data.items.iter() { - match item { - AssocItemId::FunctionId(it) => cb(it.into()), - AssocItemId::ConstId(it) => cb(it.into()), - AssocItemId::TypeAliasId(_) => (), - } - } - } - ModuleDefId::ModuleId(it) => visit_module(db, crate_def_map, it.local_id, cb), - _ => (), - } - } - } -} - -fn ellipsize(mut text: String, max_len: usize) -> String { - if text.len() <= max_len { - return text; - } - let ellipsis = "..."; - let e_len = ellipsis.len(); - let mut prefix_len = (max_len - e_len) / 2; - while !text.is_char_boundary(prefix_len) { - prefix_len += 1; - } - let mut suffix_len = max_len - e_len - prefix_len; - while !text.is_char_boundary(text.len() - suffix_len) { - suffix_len += 1; - } - text.replace_range(prefix_len..text.len() - suffix_len, ellipsis); - text -} - -#[test] -fn typing_whitespace_inside_a_function_should_not_invalidate_types() { - let (mut db, pos) = TestDB::with_position( - " - //- /lib.rs - fn foo() -> i32 { - <|>1 + 1 - } - ", - ); - { - let events = db.log_executed(|| { - let module = db.module_for_file(pos.file_id); - let crate_def_map = db.crate_def_map(module.krate); - visit_module(&db, &crate_def_map, module.local_id, &mut |def| { - db.infer(def); - }); - }); - assert!(format!("{:?}", events).contains("infer")) - } - - let new_text = " - fn foo() -> i32 { - 1 - + - 1 - } - " - .to_string(); - - db.set_file_text(pos.file_id, Arc::new(new_text)); - - { - let events = db.log_executed(|| { - let module = db.module_for_file(pos.file_id); - let crate_def_map = db.crate_def_map(module.krate); - visit_module(&db, &crate_def_map, module.local_id, &mut |def| { - db.infer(def); - }); - }); - assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) - } -} - -fn check_infer(ra_fixture: &str, expect: Expect) { - let mut actual = infer(ra_fixture); - actual.push('\n'); - expect.assert_eq(&actual); -} - -fn check_infer_with_mismatches(ra_fixture: &str, expect: Expect) { - let mut actual = infer_with_mismatches(ra_fixture, true); - actual.push('\n'); - expect.assert_eq(&actual); -} diff --git a/crates/ra_hir_ty/src/tests/macros.rs b/crates/ra_hir_ty/src/tests/macros.rs deleted file mode 100644 index 24c53eb022..0000000000 --- a/crates/ra_hir_ty/src/tests/macros.rs +++ /dev/null @@ -1,787 +0,0 @@ -use std::fs; - -use expect::expect; -use test_utils::project_dir; - -use super::{check_infer, check_types}; - -#[test] -fn cfg_impl_def() { - check_types( - r#" -//- /main.rs crate:main deps:foo cfg:test -use foo::S as T; -struct S; - -#[cfg(test)] -impl S { - fn foo1(&self) -> i32 { 0 } -} - -#[cfg(not(test))] -impl S { - fn foo2(&self) -> i32 { 0 } -} - -fn test() { - let t = (S.foo1(), S.foo2(), T.foo3(), T.foo4()); - t; -} //^ (i32, {unknown}, i32, {unknown}) - -//- /foo.rs crate:foo -struct S; - -#[cfg(not(test))] -impl S { - fn foo3(&self) -> i32 { 0 } -} - -#[cfg(test)] -impl S { - fn foo4(&self) -> i32 { 0 } -} -"#, - ); -} - -#[test] -fn infer_macros_expanded() { - check_infer( - r#" - struct Foo(Vec); - - macro_rules! foo { - ($($item:expr),*) => { - { - Foo(vec![$($item,)*]) - } - }; - } - - fn main() { - let x = foo!(1,2); - } - "#, - expect![[r#" - !0..17 '{Foo(v...,2,])}': Foo - !1..4 'Foo': Foo({unknown}) -> Foo - !1..16 'Foo(vec![1,2,])': Foo - !5..15 'vec![1,2,]': {unknown} - 155..181 '{ ...,2); }': () - 165..166 'x': Foo - "#]], - ); -} - -#[test] -fn infer_legacy_textual_scoped_macros_expanded() { - check_infer( - r#" - struct Foo(Vec); - - #[macro_use] - mod m { - macro_rules! foo { - ($($item:expr),*) => { - { - Foo(vec![$($item,)*]) - } - }; - } - } - - fn main() { - let x = foo!(1,2); - let y = crate::foo!(1,2); - } - "#, - expect![[r#" - !0..17 '{Foo(v...,2,])}': Foo - !1..4 'Foo': Foo({unknown}) -> Foo - !1..16 'Foo(vec![1,2,])': Foo - !5..15 'vec![1,2,]': {unknown} - 194..250 '{ ...,2); }': () - 204..205 'x': Foo - 227..228 'y': {unknown} - 231..247 'crate:...!(1,2)': {unknown} - "#]], - ); -} - -#[test] -fn infer_path_qualified_macros_expanded() { - check_infer( - r#" - #[macro_export] - macro_rules! foo { - () => { 42i32 } - } - - mod m { - pub use super::foo as bar; - } - - fn main() { - let x = crate::foo!(); - let y = m::bar!(); - } - "#, - expect![[r#" - !0..5 '42i32': i32 - !0..5 '42i32': i32 - 110..163 '{ ...!(); }': () - 120..121 'x': i32 - 147..148 'y': i32 - "#]], - ); -} - -#[test] -fn expr_macro_expanded_in_various_places() { - check_infer( - r#" - macro_rules! spam { - () => (1isize); - } - - fn spam() { - spam!(); - (spam!()); - spam!().spam(spam!()); - for _ in spam!() {} - || spam!(); - while spam!() {} - break spam!(); - return spam!(); - match spam!() { - _ if spam!() => spam!(), - } - spam!()(spam!()); - Spam { spam: spam!() }; - spam!()[spam!()]; - await spam!(); - spam!() as usize; - &spam!(); - -spam!(); - spam!()..spam!(); - spam!() + spam!(); - } - "#, - expect![[r#" - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize - 53..456 '{ ...!(); }': () - 87..108 'spam!(...am!())': {unknown} - 114..133 'for _ ...!() {}': () - 118..119 '_': {unknown} - 131..133 '{}': () - 138..148 '|| spam!()': || -> isize - 154..170 'while ...!() {}': () - 168..170 '{}': () - 175..188 'break spam!()': ! - 194..208 'return spam!()': ! - 214..268 'match ... }': isize - 238..239 '_': isize - 273..289 'spam!(...am!())': {unknown} - 295..317 'Spam {...m!() }': {unknown} - 323..339 'spam!(...am!()]': {unknown} - 364..380 'spam!(... usize': usize - 386..394 '&spam!()': &isize - 400..408 '-spam!()': isize - 414..430 'spam!(...pam!()': {unknown} - 436..453 'spam!(...pam!()': isize - "#]], - ); -} - -#[test] -fn infer_type_value_macro_having_same_name() { - check_infer( - r#" - #[macro_export] - macro_rules! foo { - () => { - mod foo { - pub use super::foo; - } - }; - ($x:tt) => { - $x - }; - } - - foo!(); - - fn foo() { - let foo = foo::foo!(42i32); - } - "#, - expect![[r#" - !0..5 '42i32': i32 - 170..205 '{ ...32); }': () - 180..183 'foo': i32 - "#]], - ); -} - -#[test] -fn processes_impls_generated_by_macros() { - check_types( - r#" -macro_rules! m { - ($ident:ident) => (impl Trait for $ident {}) -} -trait Trait { fn foo(self) -> u128 {} } -struct S; -m!(S); -fn test() { S.foo(); } - //^ u128 -"#, - ); -} - -#[test] -fn infer_assoc_items_generated_by_macros() { - check_types( - r#" -macro_rules! m { - () => (fn foo(&self) -> u128 {0}) -} -struct S; -impl S { - m!(); -} - -fn test() { S.foo(); } - //^ u128 -"#, - ); -} - -#[test] -fn infer_assoc_items_generated_by_macros_chain() { - check_types( - r#" -macro_rules! m_inner { - () => {fn foo(&self) -> u128 {0}} -} -macro_rules! m { - () => {m_inner!();} -} - -struct S; -impl S { - m!(); -} - -fn test() { S.foo(); } - //^ u128 -"#, - ); -} - -#[test] -fn infer_macro_with_dollar_crate_is_correct_in_expr() { - check_types( - r#" -//- /main.rs crate:main deps:foo -fn test() { - let x = (foo::foo!(1), foo::foo!(2)); - x; -} //^ (i32, usize) - -//- /lib.rs crate:foo -#[macro_export] -macro_rules! foo { - (1) => { $crate::bar!() }; - (2) => { 1 + $crate::baz() }; -} - -#[macro_export] -macro_rules! bar { - () => { 42 } -} - -pub fn baz() -> usize { 31usize } -"#, - ); -} - -#[test] -fn infer_macro_with_dollar_crate_is_correct_in_trait_associate_type() { - check_types( - r#" -//- /main.rs crate:main deps:foo -use foo::Trait; - -fn test() { - let msg = foo::Message(foo::MessageRef); - let r = msg.deref(); - r; - //^ &MessageRef -} - -//- /lib.rs crate:foo -pub struct MessageRef; -pub struct Message(MessageRef); - -pub trait Trait { - type Target; - fn deref(&self) -> &Self::Target; -} - -#[macro_export] -macro_rules! expand { - () => { - impl Trait for Message { - type Target = $crate::MessageRef; - fn deref(&self) -> &Self::Target { - &self.0 - } - } - } -} - -expand!(); -"#, - ); -} - -#[test] -fn infer_type_value_non_legacy_macro_use_as() { - check_infer( - r#" - mod m { - macro_rules! _foo { - ($x:ident) => { type $x = u64; } - } - pub(crate) use _foo as foo; - } - - m::foo!(foo); - use foo as bar; - fn f() -> bar { 0 } - fn main() { - let _a = f(); - } - "#, - expect![[r#" - 158..163 '{ 0 }': u64 - 160..161 '0': u64 - 174..196 '{ ...f(); }': () - 184..186 '_a': u64 - 190..191 'f': fn f() -> u64 - 190..193 'f()': u64 - "#]], - ); -} - -#[test] -fn infer_local_macro() { - check_infer( - r#" - fn main() { - macro_rules! foo { - () => { 1usize } - } - let _a = foo!(); - } - "#, - expect![[r#" - !0..6 '1usize': usize - 10..89 '{ ...!(); }': () - 16..65 'macro_... }': {unknown} - 74..76 '_a': usize - "#]], - ); -} - -#[test] -fn infer_local_inner_macros() { - check_types( - r#" -//- /main.rs crate:main deps:foo -fn test() { - let x = foo::foo!(1); - x; -} //^ i32 - -//- /lib.rs crate:foo -#[macro_export(local_inner_macros)] -macro_rules! foo { - (1) => { bar!() }; -} - -#[macro_export] -macro_rules! bar { - () => { 42 } -} - -"#, - ); -} - -#[test] -fn infer_builtin_macros_line() { - check_infer( - r#" - #[rustc_builtin_macro] - macro_rules! line {() => {}} - - fn main() { - let x = line!(); - } - "#, - expect![[r#" - !0..1 '0': i32 - 63..87 '{ ...!(); }': () - 73..74 'x': i32 - "#]], - ); -} - -#[test] -fn infer_builtin_macros_file() { - check_infer( - r#" - #[rustc_builtin_macro] - macro_rules! file {() => {}} - - fn main() { - let x = file!(); - } - "#, - expect![[r#" - !0..2 '""': &str - 63..87 '{ ...!(); }': () - 73..74 'x': &str - "#]], - ); -} - -#[test] -fn infer_builtin_macros_column() { - check_infer( - r#" - #[rustc_builtin_macro] - macro_rules! column {() => {}} - - fn main() { - let x = column!(); - } - "#, - expect![[r#" - !0..1 '0': i32 - 65..91 '{ ...!(); }': () - 75..76 'x': i32 - "#]], - ); -} - -#[test] -fn infer_builtin_macros_concat() { - check_infer( - r#" - #[rustc_builtin_macro] - macro_rules! concat {() => {}} - - fn main() { - let x = concat!("hello", concat!("world", "!")); - } - "#, - expect![[r#" - !0..13 '"helloworld!"': &str - 65..121 '{ ...")); }': () - 75..76 'x': &str - "#]], - ); -} - -#[test] -fn infer_builtin_macros_include() { - check_types( - r#" -//- /main.rs -#[rustc_builtin_macro] -macro_rules! include {() => {}} - -include!("foo.rs"); - -fn main() { - bar(); -} //^ u32 - -//- /foo.rs -fn bar() -> u32 {0} -"#, - ); -} - -#[test] -#[ignore] -fn include_accidentally_quadratic() { - let file = project_dir().join("crates/ra_syntax/test_data/accidentally_quadratic"); - let big_file = fs::read_to_string(file).unwrap(); - let big_file = vec![big_file; 10].join("\n"); - - let fixture = r#" -//- /main.rs -#[rustc_builtin_macro] -macro_rules! include {() => {}} - -include!("foo.rs"); - -fn main() { - RegisterBlock { }; - //^ RegisterBlock -} - "#; - let fixture = format!("{}\n//- /foo.rs\n{}", fixture, big_file); - check_types(&fixture); -} - -#[test] -fn infer_builtin_macros_include_concat() { - check_types( - r#" -//- /main.rs -#[rustc_builtin_macro] -macro_rules! include {() => {}} - -#[rustc_builtin_macro] -macro_rules! concat {() => {}} - -include!(concat!("f", "oo.rs")); - -fn main() { - bar(); -} //^ u32 - -//- /foo.rs -fn bar() -> u32 {0} -"#, - ); -} - -#[test] -fn infer_builtin_macros_include_concat_with_bad_env_should_failed() { - check_types( - r#" -//- /main.rs -#[rustc_builtin_macro] -macro_rules! include {() => {}} - -#[rustc_builtin_macro] -macro_rules! concat {() => {}} - -#[rustc_builtin_macro] -macro_rules! env {() => {}} - -include!(concat!(env!("OUT_DIR"), "/foo.rs")); - -fn main() { - bar(); -} //^ {unknown} - -//- /foo.rs -fn bar() -> u32 {0} -"#, - ); -} - -#[test] -fn infer_builtin_macros_include_itself_should_failed() { - check_types( - r#" -#[rustc_builtin_macro] -macro_rules! include {() => {}} - -include!("main.rs"); - -fn main() { - 0 -} //^ i32 -"#, - ); -} - -#[test] -fn infer_builtin_macros_concat_with_lazy() { - check_infer( - r#" - macro_rules! hello {() => {"hello"}} - - #[rustc_builtin_macro] - macro_rules! concat {() => {}} - - fn main() { - let x = concat!(hello!(), concat!("world", "!")); - } - "#, - expect![[r#" - !0..13 '"helloworld!"': &str - 103..160 '{ ...")); }': () - 113..114 'x': &str - "#]], - ); -} - -#[test] -fn infer_builtin_macros_env() { - check_infer( - r#" - //- /main.rs env:foo=bar - #[rustc_builtin_macro] - macro_rules! env {() => {}} - - fn main() { - let x = env!("foo"); - } - "#, - expect![[r#" - !0..22 '"__RA_...TED__"': &str - 62..90 '{ ...o"); }': () - 72..73 'x': &str - "#]], - ); -} - -#[test] -fn infer_derive_clone_simple() { - check_types( - r#" -//- /main.rs crate:main deps:core -#[derive(Clone)] -struct S; -fn test() { - S.clone(); -} //^ S - -//- /lib.rs crate:core -#[prelude_import] -use clone::*; -mod clone { - trait Clone { - fn clone(&self) -> Self; - } -} -"#, - ); -} - -#[test] -fn infer_derive_clone_in_core() { - check_types( - r#" -//- /lib.rs crate:core -#[prelude_import] -use clone::*; -mod clone { - trait Clone { - fn clone(&self) -> Self; - } -} -#[derive(Clone)] -pub struct S; - -//- /main.rs crate:main deps:core -use core::S; -fn test() { - S.clone(); -} //^ S -"#, - ); -} - -#[test] -fn infer_derive_clone_with_params() { - check_types( - r#" -//- /main.rs crate:main deps:core -#[derive(Clone)] -struct S; -#[derive(Clone)] -struct Wrapper(T); -struct NonClone; -fn test() { - (Wrapper(S).clone(), Wrapper(NonClone).clone()); - //^ (Wrapper, {unknown}) -} - -//- /lib.rs crate:core -#[prelude_import] -use clone::*; -mod clone { - trait Clone { - fn clone(&self) -> Self; - } -} -"#, - ); -} - -#[test] -fn infer_custom_derive_simple() { - // FIXME: this test current now do nothing - check_types( - r#" -//- /main.rs crate:main -use foo::Foo; - -#[derive(Foo)] -struct S{} - -fn test() { - S{}; -} //^ S -"#, - ); -} - -#[test] -fn macro_in_arm() { - check_infer( - r#" - macro_rules! unit { - () => { () }; - } - - fn main() { - let x = match () { - unit!() => 92u32, - }; - } - "#, - expect![[r#" - 51..110 '{ ... }; }': () - 61..62 'x': u32 - 65..107 'match ... }': u32 - 71..73 '()': () - 84..91 'unit!()': () - 95..100 '92u32': u32 - "#]], - ); -} diff --git a/crates/ra_hir_ty/src/tests/simple.rs b/crates/ra_hir_ty/src/tests/simple.rs deleted file mode 100644 index 5a7cf9455b..0000000000 --- a/crates/ra_hir_ty/src/tests/simple.rs +++ /dev/null @@ -1,2218 +0,0 @@ -use expect::expect; - -use super::{check_infer, check_types}; - -#[test] -fn infer_box() { - check_types( - r#" -//- /main.rs crate:main deps:std -fn test() { - let x = box 1; - let t = (x, box x, box &1, box [1]); - t; -} //^ (Box, Box>, Box<&i32>, Box<[i32; _]>) - -//- /std.rs crate:std -#[prelude_import] use prelude::*; -mod prelude {} - -mod boxed { - #[lang = "owned_box"] - pub struct Box { - inner: *mut T, - } -} -"#, - ); -} - -#[test] -fn infer_adt_self() { - check_types( - r#" -enum Nat { Succ(Self), Demo(Nat), Zero } - -fn test() { - let foo: Nat = Nat::Zero; - if let Nat::Succ(x) = foo { - x - } //^ Nat -} -"#, - ); -} - -#[test] -fn self_in_struct_lit() { - check_infer( - r#" - //- /main.rs - struct S { x: T } - - impl S { - fn foo() { - Self { x: 1 }; - } - } - "#, - expect![[r#" - 49..79 '{ ... }': () - 59..72 'Self { x: 1 }': S - 69..70 '1': u32 - "#]], - ); -} - -#[test] -fn type_alias_in_struct_lit() { - check_infer( - r#" - //- /main.rs - struct S { x: T } - - type SS = S; - - fn foo() { - SS { x: 1 }; - } - "#, - expect![[r#" - 50..70 '{ ...1 }; }': () - 56..67 'SS { x: 1 }': S - 64..65 '1': u32 - "#]], - ); -} - -#[test] -fn infer_ranges() { - check_types( - r#" -//- /main.rs crate:main deps:core -fn test() { - let a = ..; - let b = 1..; - let c = ..2u32; - let d = 1..2usize; - let e = ..=10; - let f = 'a'..='z'; - - let t = (a, b, c, d, e, f); - t; -} //^ (RangeFull, RangeFrom, RangeTo, Range, RangeToInclusive, RangeInclusive) - -//- /core.rs crate:core -#[prelude_import] use prelude::*; -mod prelude {} - -pub mod ops { - pub struct Range { - pub start: Idx, - pub end: Idx, - } - pub struct RangeFrom { - pub start: Idx, - } - struct RangeFull; - pub struct RangeInclusive { - start: Idx, - end: Idx, - is_empty: u8, - } - pub struct RangeTo { - pub end: Idx, - } - pub struct RangeToInclusive { - pub end: Idx, - } -} -"#, - ); -} - -#[test] -fn infer_while_let() { - check_types( - r#" -enum Option { Some(T), None } - -fn test() { - let foo: Option = None; - while let Option::Some(x) = foo { - x - } //^ f32 -} -"#, - ); -} - -#[test] -fn infer_basics() { - check_infer( - r#" - fn test(a: u32, b: isize, c: !, d: &str) { - a; - b; - c; - d; - 1usize; - 1isize; - "test"; - 1.0f32; - }"#, - expect![[r#" - 8..9 'a': u32 - 16..17 'b': isize - 26..27 'c': ! - 32..33 'd': &str - 41..120 '{ ...f32; }': () - 47..48 'a': u32 - 54..55 'b': isize - 61..62 'c': ! - 68..69 'd': &str - 75..81 '1usize': usize - 87..93 '1isize': isize - 99..105 '"test"': &str - 111..117 '1.0f32': f32 - "#]], - ); -} - -#[test] -fn infer_let() { - check_infer( - r#" - fn test() { - let a = 1isize; - let b: usize = 1; - let c = b; - let d: u32; - let e; - let f: i32 = e; - } - "#, - expect![[r#" - 10..117 '{ ...= e; }': () - 20..21 'a': isize - 24..30 '1isize': isize - 40..41 'b': usize - 51..52 '1': usize - 62..63 'c': usize - 66..67 'b': usize - 77..78 'd': u32 - 93..94 'e': i32 - 104..105 'f': i32 - 113..114 'e': i32 - "#]], - ); -} - -#[test] -fn infer_paths() { - check_infer( - r#" - fn a() -> u32 { 1 } - - mod b { - fn c() -> u32 { 1 } - } - - fn test() { - a(); - b::c(); - } - "#, - expect![[r#" - 14..19 '{ 1 }': u32 - 16..17 '1': u32 - 47..52 '{ 1 }': u32 - 49..50 '1': u32 - 66..90 '{ ...c(); }': () - 72..73 'a': fn a() -> u32 - 72..75 'a()': u32 - 81..85 'b::c': fn c() -> u32 - 81..87 'b::c()': u32 - "#]], - ); -} - -#[test] -fn infer_path_type() { - check_infer( - r#" - struct S; - - impl S { - fn foo() -> i32 { 1 } - } - - fn test() { - S::foo(); - ::foo(); - } - "#, - expect![[r#" - 40..45 '{ 1 }': i32 - 42..43 '1': i32 - 59..92 '{ ...o(); }': () - 65..71 'S::foo': fn foo() -> i32 - 65..73 'S::foo()': i32 - 79..87 '::foo': fn foo() -> i32 - 79..89 '::foo()': i32 - "#]], - ); -} - -#[test] -fn infer_struct() { - check_infer( - r#" - struct A { - b: B, - c: C, - } - struct B; - struct C(usize); - - fn test() { - let c = C(1); - B; - let a: A = A { b: B, c: C(1) }; - a.b; - a.c; - } - "#, - expect![[r#" - 71..153 '{ ...a.c; }': () - 81..82 'c': C - 85..86 'C': C(usize) -> C - 85..89 'C(1)': C - 87..88 '1': usize - 95..96 'B': B - 106..107 'a': A - 113..132 'A { b:...C(1) }': A - 120..121 'B': B - 126..127 'C': C(usize) -> C - 126..130 'C(1)': C - 128..129 '1': usize - 138..139 'a': A - 138..141 'a.b': B - 147..148 'a': A - 147..150 'a.c': C - "#]], - ); -} - -#[test] -fn infer_enum() { - check_infer( - r#" - enum E { - V1 { field: u32 }, - V2 - } - fn test() { - E::V1 { field: 1 }; - E::V2; - }"#, - expect![[r#" - 51..89 '{ ...:V2; }': () - 57..75 'E::V1 ...d: 1 }': E - 72..73 '1': u32 - 81..86 'E::V2': E - "#]], - ); -} - -#[test] -fn infer_union() { - check_infer( - r#" - union MyUnion { - foo: u32, - bar: f32, - } - - fn test() { - let u = MyUnion { foo: 0 }; - unsafe { baz(u); } - let u = MyUnion { bar: 0.0 }; - unsafe { baz(u); } - } - - unsafe fn baz(u: MyUnion) { - let inner = u.foo; - let inner = u.bar; - } - "#, - expect![[r#" - 57..172 '{ ...); } }': () - 67..68 'u': MyUnion - 71..89 'MyUnio...o: 0 }': MyUnion - 86..87 '0': u32 - 95..113 'unsafe...(u); }': () - 102..113 '{ baz(u); }': () - 104..107 'baz': fn baz(MyUnion) - 104..110 'baz(u)': () - 108..109 'u': MyUnion - 122..123 'u': MyUnion - 126..146 'MyUnio... 0.0 }': MyUnion - 141..144 '0.0': f32 - 152..170 'unsafe...(u); }': () - 159..170 '{ baz(u); }': () - 161..164 'baz': fn baz(MyUnion) - 161..167 'baz(u)': () - 165..166 'u': MyUnion - 188..189 'u': MyUnion - 200..249 '{ ...bar; }': () - 210..215 'inner': u32 - 218..219 'u': MyUnion - 218..223 'u.foo': u32 - 233..238 'inner': f32 - 241..242 'u': MyUnion - 241..246 'u.bar': f32 - "#]], - ); -} - -#[test] -fn infer_refs() { - check_infer( - r#" - fn test(a: &u32, b: &mut u32, c: *const u32, d: *mut u32) { - a; - *a; - &a; - &mut a; - b; - *b; - &b; - c; - *c; - d; - *d; - } - "#, - expect![[r#" - 8..9 'a': &u32 - 17..18 'b': &mut u32 - 30..31 'c': *const u32 - 45..46 'd': *mut u32 - 58..149 '{ ... *d; }': () - 64..65 'a': &u32 - 71..73 '*a': u32 - 72..73 'a': &u32 - 79..81 '&a': &&u32 - 80..81 'a': &u32 - 87..93 '&mut a': &mut &u32 - 92..93 'a': &u32 - 99..100 'b': &mut u32 - 106..108 '*b': u32 - 107..108 'b': &mut u32 - 114..116 '&b': &&mut u32 - 115..116 'b': &mut u32 - 122..123 'c': *const u32 - 129..131 '*c': u32 - 130..131 'c': *const u32 - 137..138 'd': *mut u32 - 144..146 '*d': u32 - 145..146 'd': *mut u32 - "#]], - ); -} - -#[test] -fn infer_raw_ref() { - check_infer( - r#" - fn test(a: i32) { - &raw mut a; - &raw const a; - } - "#, - expect![[r#" - 8..9 'a': i32 - 16..53 '{ ...t a; }': () - 22..32 '&raw mut a': *mut i32 - 31..32 'a': i32 - 38..50 '&raw const a': *const i32 - 49..50 'a': i32 - "#]], - ); -} - -#[test] -fn infer_literals() { - check_infer( - r##" - fn test() { - 5i32; - 5f32; - 5f64; - "hello"; - b"bytes"; - 'c'; - b'b'; - 3.14; - 5000; - false; - true; - r#" - //! doc - // non-doc - mod foo {} - "#; - br#"yolo"#; - } - "##, - expect![[r##" - 10..216 '{ ...o"#; }': () - 16..20 '5i32': i32 - 26..30 '5f32': f32 - 36..40 '5f64': f64 - 46..53 '"hello"': &str - 59..67 'b"bytes"': &[u8; _] - 73..76 ''c'': char - 82..86 'b'b'': u8 - 92..96 '3.14': f64 - 102..106 '5000': i32 - 112..117 'false': bool - 123..127 'true': bool - 133..197 'r#" ... "#': &str - 203..213 'br#"yolo"#': &[u8; _] - "##]], - ); -} - -#[test] -fn infer_unary_op() { - check_infer( - r#" - enum SomeType {} - - fn test(x: SomeType) { - let b = false; - let c = !b; - let a = 100; - let d: i128 = -a; - let e = -100; - let f = !!!true; - let g = !42; - let h = !10u32; - let j = !a; - -3.14; - !3; - -x; - !x; - -"hello"; - !"hello"; - } - "#, - expect![[r#" - 26..27 'x': SomeType - 39..271 '{ ...lo"; }': () - 49..50 'b': bool - 53..58 'false': bool - 68..69 'c': bool - 72..74 '!b': bool - 73..74 'b': bool - 84..85 'a': i128 - 88..91 '100': i128 - 101..102 'd': i128 - 111..113 '-a': i128 - 112..113 'a': i128 - 123..124 'e': i32 - 127..131 '-100': i32 - 128..131 '100': i32 - 141..142 'f': bool - 145..152 '!!!true': bool - 146..152 '!!true': bool - 147..152 '!true': bool - 148..152 'true': bool - 162..163 'g': i32 - 166..169 '!42': i32 - 167..169 '42': i32 - 179..180 'h': u32 - 183..189 '!10u32': u32 - 184..189 '10u32': u32 - 199..200 'j': i128 - 203..205 '!a': i128 - 204..205 'a': i128 - 211..216 '-3.14': f64 - 212..216 '3.14': f64 - 222..224 '!3': i32 - 223..224 '3': i32 - 230..232 '-x': {unknown} - 231..232 'x': SomeType - 238..240 '!x': {unknown} - 239..240 'x': SomeType - 246..254 '-"hello"': {unknown} - 247..254 '"hello"': &str - 260..268 '!"hello"': {unknown} - 261..268 '"hello"': &str - "#]], - ); -} - -#[test] -fn infer_backwards() { - check_infer( - r#" - fn takes_u32(x: u32) {} - - struct S { i32_field: i32 } - - fn test() -> &mut &f64 { - let a = unknown_function(); - takes_u32(a); - let b = unknown_function(); - S { i32_field: b }; - let c = unknown_function(); - &mut &c - } - "#, - expect![[r#" - 13..14 'x': u32 - 21..23 '{}': () - 77..230 '{ ...t &c }': &mut &f64 - 87..88 'a': u32 - 91..107 'unknow...nction': {unknown} - 91..109 'unknow...tion()': u32 - 115..124 'takes_u32': fn takes_u32(u32) - 115..127 'takes_u32(a)': () - 125..126 'a': u32 - 137..138 'b': i32 - 141..157 'unknow...nction': {unknown} - 141..159 'unknow...tion()': i32 - 165..183 'S { i3...d: b }': S - 180..181 'b': i32 - 193..194 'c': f64 - 197..213 'unknow...nction': {unknown} - 197..215 'unknow...tion()': f64 - 221..228 '&mut &c': &mut &f64 - 226..228 '&c': &f64 - 227..228 'c': f64 - "#]], - ); -} - -#[test] -fn infer_self() { - check_infer( - r#" - struct S; - - impl S { - fn test(&self) { - self; - } - fn test2(self: &Self) { - self; - } - fn test3() -> Self { - S {} - } - fn test4() -> Self { - Self {} - } - } - "#, - expect![[r#" - 33..37 'self': &S - 39..60 '{ ... }': () - 49..53 'self': &S - 74..78 'self': &S - 87..108 '{ ... }': () - 97..101 'self': &S - 132..152 '{ ... }': S - 142..146 'S {}': S - 176..199 '{ ... }': S - 186..193 'Self {}': S - "#]], - ); -} - -#[test] -fn infer_self_as_path() { - check_infer( - r#" - struct S1; - struct S2(isize); - enum E { - V1, - V2(u32), - } - - impl S1 { - fn test() { - Self; - } - } - impl S2 { - fn test() { - Self(1); - } - } - impl E { - fn test() { - Self::V1; - Self::V2(1); - } - } - "#, - expect![[r#" - 86..107 '{ ... }': () - 96..100 'Self': S1 - 134..158 '{ ... }': () - 144..148 'Self': S2(isize) -> S2 - 144..151 'Self(1)': S2 - 149..150 '1': isize - 184..230 '{ ... }': () - 194..202 'Self::V1': E - 212..220 'Self::V2': V2(u32) -> E - 212..223 'Self::V2(1)': E - 221..222 '1': u32 - "#]], - ); -} - -#[test] -fn infer_binary_op() { - check_infer( - r#" - fn f(x: bool) -> i32 { - 0i32 - } - - fn test() -> bool { - let x = a && b; - let y = true || false; - let z = x == y; - let t = x != y; - let minus_forty: isize = -40isize; - let h = minus_forty <= CONST_2; - let c = f(z || y) + 5; - let d = b; - let g = minus_forty ^= i; - let ten: usize = 10; - let ten_is_eleven = ten == some_num; - - ten < 3 - } - "#, - expect![[r#" - 5..6 'x': bool - 21..33 '{ 0i32 }': i32 - 27..31 '0i32': i32 - 53..369 '{ ... < 3 }': bool - 63..64 'x': bool - 67..68 'a': bool - 67..73 'a && b': bool - 72..73 'b': bool - 83..84 'y': bool - 87..91 'true': bool - 87..100 'true || false': bool - 95..100 'false': bool - 110..111 'z': bool - 114..115 'x': bool - 114..120 'x == y': bool - 119..120 'y': bool - 130..131 't': bool - 134..135 'x': bool - 134..140 'x != y': bool - 139..140 'y': bool - 150..161 'minus_forty': isize - 171..179 '-40isize': isize - 172..179 '40isize': isize - 189..190 'h': bool - 193..204 'minus_forty': isize - 193..215 'minus_...ONST_2': bool - 208..215 'CONST_2': isize - 225..226 'c': i32 - 229..230 'f': fn f(bool) -> i32 - 229..238 'f(z || y)': i32 - 229..242 'f(z || y) + 5': i32 - 231..232 'z': bool - 231..237 'z || y': bool - 236..237 'y': bool - 241..242 '5': i32 - 252..253 'd': {unknown} - 256..257 'b': {unknown} - 267..268 'g': () - 271..282 'minus_forty': isize - 271..287 'minus_...y ^= i': () - 286..287 'i': isize - 297..300 'ten': usize - 310..312 '10': usize - 322..335 'ten_is_eleven': bool - 338..341 'ten': usize - 338..353 'ten == some_num': bool - 345..353 'some_num': usize - 360..363 'ten': usize - 360..367 'ten < 3': bool - 366..367 '3': usize - "#]], - ); -} - -#[test] -fn infer_shift_op() { - check_infer( - r#" - fn test() { - 1u32 << 5u8; - 1u32 >> 5u8; - } - "#, - expect![[r#" - 10..47 '{ ...5u8; }': () - 16..20 '1u32': u32 - 16..27 '1u32 << 5u8': u32 - 24..27 '5u8': u8 - 33..37 '1u32': u32 - 33..44 '1u32 >> 5u8': u32 - 41..44 '5u8': u8 - "#]], - ); -} - -#[test] -fn infer_field_autoderef() { - check_infer( - r#" - struct A { - b: B, - } - struct B; - - fn test1(a: A) { - let a1 = a; - a1.b; - let a2 = &a; - a2.b; - let a3 = &mut a; - a3.b; - let a4 = &&&&&&&a; - a4.b; - let a5 = &mut &&mut &&mut a; - a5.b; - } - - fn test2(a1: *const A, a2: *mut A) { - a1.b; - a2.b; - } - "#, - expect![[r#" - 43..44 'a': A - 49..212 '{ ...5.b; }': () - 59..61 'a1': A - 64..65 'a': A - 71..73 'a1': A - 71..75 'a1.b': B - 85..87 'a2': &A - 90..92 '&a': &A - 91..92 'a': A - 98..100 'a2': &A - 98..102 'a2.b': B - 112..114 'a3': &mut A - 117..123 '&mut a': &mut A - 122..123 'a': A - 129..131 'a3': &mut A - 129..133 'a3.b': B - 143..145 'a4': &&&&&&&A - 148..156 '&&&&&&&a': &&&&&&&A - 149..156 '&&&&&&a': &&&&&&A - 150..156 '&&&&&a': &&&&&A - 151..156 '&&&&a': &&&&A - 152..156 '&&&a': &&&A - 153..156 '&&a': &&A - 154..156 '&a': &A - 155..156 'a': A - 162..164 'a4': &&&&&&&A - 162..166 'a4.b': B - 176..178 'a5': &mut &&mut &&mut A - 181..199 '&mut &...&mut a': &mut &&mut &&mut A - 186..199 '&&mut &&mut a': &&mut &&mut A - 187..199 '&mut &&mut a': &mut &&mut A - 192..199 '&&mut a': &&mut A - 193..199 '&mut a': &mut A - 198..199 'a': A - 205..207 'a5': &mut &&mut &&mut A - 205..209 'a5.b': B - 223..225 'a1': *const A - 237..239 'a2': *mut A - 249..272 '{ ...2.b; }': () - 255..257 'a1': *const A - 255..259 'a1.b': B - 265..267 'a2': *mut A - 265..269 'a2.b': B - "#]], - ); -} - -#[test] -fn infer_argument_autoderef() { - check_infer( - r#" - #[lang = "deref"] - pub trait Deref { - type Target; - fn deref(&self) -> &Self::Target; - } - - struct A(T); - - impl A { - fn foo(&self) -> &T { - &self.0 - } - } - - struct B(T); - - impl Deref for B { - type Target = T; - fn deref(&self) -> &Self::Target { - &self.0 - } - } - - fn test() { - let t = A::foo(&&B(B(A(42)))); - } - "#, - expect![[r#" - 67..71 'self': &Self - 138..142 'self': &A - 150..173 '{ ... }': &T - 160..167 '&self.0': &T - 161..165 'self': &A - 161..167 'self.0': T - 254..258 'self': &B - 277..300 '{ ... }': &T - 287..294 '&self.0': &T - 288..292 'self': &B - 288..294 'self.0': T - 314..352 '{ ...))); }': () - 324..325 't': &i32 - 328..334 'A::foo': fn foo(&A) -> &i32 - 328..349 'A::foo...42))))': &i32 - 335..348 '&&B(B(A(42)))': &&B>> - 336..348 '&B(B(A(42)))': &B>> - 337..338 'B': B>>(B>) -> B>> - 337..348 'B(B(A(42)))': B>> - 339..340 'B': B>(A) -> B> - 339..347 'B(A(42))': B> - 341..342 'A': A(i32) -> A - 341..346 'A(42)': A - 343..345 '42': i32 - "#]], - ); -} - -#[test] -fn infer_method_argument_autoderef() { - check_infer( - r#" - #[lang = "deref"] - pub trait Deref { - type Target; - fn deref(&self) -> &Self::Target; - } - - struct A(*mut T); - - impl A { - fn foo(&self, x: &A) -> &T { - &*x.0 - } - } - - struct B(T); - - impl Deref for B { - type Target = T; - fn deref(&self) -> &Self::Target { - &self.0 - } - } - - fn test(a: A) { - let t = A(0 as *mut _).foo(&&B(B(a))); - } - "#, - expect![[r#" - 67..71 'self': &Self - 143..147 'self': &A - 149..150 'x': &A - 165..186 '{ ... }': &T - 175..180 '&*x.0': &T - 176..180 '*x.0': T - 177..178 'x': &A - 177..180 'x.0': *mut T - 267..271 'self': &B - 290..313 '{ ... }': &T - 300..307 '&self.0': &T - 301..305 'self': &B - 301..307 'self.0': T - 325..326 'a': A - 336..382 '{ ...))); }': () - 346..347 't': &i32 - 350..351 'A': A(*mut i32) -> A - 350..364 'A(0 as *mut _)': A - 350..379 'A(0 as...B(a)))': &i32 - 352..353 '0': i32 - 352..363 '0 as *mut _': *mut i32 - 369..378 '&&B(B(a))': &&B>> - 370..378 '&B(B(a))': &B>> - 371..372 'B': B>>(B>) -> B>> - 371..378 'B(B(a))': B>> - 373..374 'B': B>(A) -> B> - 373..377 'B(a)': B> - 375..376 'a': A - "#]], - ); -} - -#[test] -fn infer_in_elseif() { - check_infer( - r#" - struct Foo { field: i32 } - fn main(foo: Foo) { - if true { - - } else if false { - foo.field - } - } - "#, - expect![[r#" - 34..37 'foo': Foo - 44..108 '{ ... } }': () - 50..106 'if tru... }': () - 53..57 'true': bool - 58..66 '{ }': () - 72..106 'if fal... }': i32 - 75..80 'false': bool - 81..106 '{ ... }': i32 - 91..94 'foo': Foo - 91..100 'foo.field': i32 - "#]], - ) -} - -#[test] -fn infer_if_match_with_return() { - check_infer( - r#" - fn foo() { - let _x1 = if true { - 1 - } else { - return; - }; - let _x2 = if true { - 2 - } else { - return - }; - let _x3 = match true { - true => 3, - _ => { - return; - } - }; - let _x4 = match true { - true => 4, - _ => return - }; - }"#, - expect![[r#" - 9..322 '{ ... }; }': () - 19..22 '_x1': i32 - 25..79 'if tru... }': i32 - 28..32 'true': bool - 33..50 '{ ... }': i32 - 43..44 '1': i32 - 56..79 '{ ... }': i32 - 66..72 'return': ! - 89..92 '_x2': i32 - 95..148 'if tru... }': i32 - 98..102 'true': bool - 103..120 '{ ... }': i32 - 113..114 '2': i32 - 126..148 '{ ... }': ! - 136..142 'return': ! - 158..161 '_x3': i32 - 164..246 'match ... }': i32 - 170..174 'true': bool - 185..189 'true': bool - 185..189 'true': bool - 193..194 '3': i32 - 204..205 '_': bool - 209..240 '{ ... }': i32 - 223..229 'return': ! - 256..259 '_x4': i32 - 262..319 'match ... }': i32 - 268..272 'true': bool - 283..287 'true': bool - 283..287 'true': bool - 291..292 '4': i32 - 302..303 '_': bool - 307..313 'return': ! - "#]], - ) -} - -#[test] -fn infer_inherent_method() { - check_infer( - r#" - struct A; - - impl A { - fn foo(self, x: u32) -> i32 {} - } - - mod b { - impl super::A { - fn bar(&self, x: u64) -> i64 {} - } - } - - fn test(a: A) { - a.foo(1); - (&a).bar(1); - a.bar(1); - } - "#, - expect![[r#" - 31..35 'self': A - 37..38 'x': u32 - 52..54 '{}': () - 102..106 'self': &A - 108..109 'x': u64 - 123..125 '{}': () - 143..144 'a': A - 149..197 '{ ...(1); }': () - 155..156 'a': A - 155..163 'a.foo(1)': i32 - 161..162 '1': u32 - 169..180 '(&a).bar(1)': i64 - 170..172 '&a': &A - 171..172 'a': A - 178..179 '1': u64 - 186..187 'a': A - 186..194 'a.bar(1)': i64 - 192..193 '1': u64 - "#]], - ); -} - -#[test] -fn infer_inherent_method_str() { - check_infer( - r#" - #[lang = "str"] - impl str { - fn foo(&self) -> i32 {} - } - - fn test() { - "foo".foo(); - } - "#, - expect![[r#" - 39..43 'self': &str - 52..54 '{}': () - 68..88 '{ ...o(); }': () - 74..79 '"foo"': &str - 74..85 '"foo".foo()': i32 - "#]], - ); -} - -#[test] -fn infer_tuple() { - check_infer( - r#" - fn test(x: &str, y: isize) { - let a: (u32, &str) = (1, "a"); - let b = (a, x); - let c = (y, x); - let d = (c, x); - let e = (1, "e"); - let f = (e, "d"); - } - "#, - expect![[r#" - 8..9 'x': &str - 17..18 'y': isize - 27..169 '{ ...d"); }': () - 37..38 'a': (u32, &str) - 54..62 '(1, "a")': (u32, &str) - 55..56 '1': u32 - 58..61 '"a"': &str - 72..73 'b': ((u32, &str), &str) - 76..82 '(a, x)': ((u32, &str), &str) - 77..78 'a': (u32, &str) - 80..81 'x': &str - 92..93 'c': (isize, &str) - 96..102 '(y, x)': (isize, &str) - 97..98 'y': isize - 100..101 'x': &str - 112..113 'd': ((isize, &str), &str) - 116..122 '(c, x)': ((isize, &str), &str) - 117..118 'c': (isize, &str) - 120..121 'x': &str - 132..133 'e': (i32, &str) - 136..144 '(1, "e")': (i32, &str) - 137..138 '1': i32 - 140..143 '"e"': &str - 154..155 'f': ((i32, &str), &str) - 158..166 '(e, "d")': ((i32, &str), &str) - 159..160 'e': (i32, &str) - 162..165 '"d"': &str - "#]], - ); -} - -#[test] -fn infer_array() { - check_infer( - r#" - fn test(x: &str, y: isize) { - let a = [x]; - let b = [a, a]; - let c = [b, b]; - - let d = [y, 1, 2, 3]; - let d = [1, y, 2, 3]; - let e = [y]; - let f = [d, d]; - let g = [e, e]; - - let h = [1, 2]; - let i = ["a", "b"]; - - let b = [a, ["b"]]; - let x: [u8; 0] = []; - } - "#, - expect![[r#" - 8..9 'x': &str - 17..18 'y': isize - 27..292 '{ ... []; }': () - 37..38 'a': [&str; _] - 41..44 '[x]': [&str; _] - 42..43 'x': &str - 54..55 'b': [[&str; _]; _] - 58..64 '[a, a]': [[&str; _]; _] - 59..60 'a': [&str; _] - 62..63 'a': [&str; _] - 74..75 'c': [[[&str; _]; _]; _] - 78..84 '[b, b]': [[[&str; _]; _]; _] - 79..80 'b': [[&str; _]; _] - 82..83 'b': [[&str; _]; _] - 95..96 'd': [isize; _] - 99..111 '[y, 1, 2, 3]': [isize; _] - 100..101 'y': isize - 103..104 '1': isize - 106..107 '2': isize - 109..110 '3': isize - 121..122 'd': [isize; _] - 125..137 '[1, y, 2, 3]': [isize; _] - 126..127 '1': isize - 129..130 'y': isize - 132..133 '2': isize - 135..136 '3': isize - 147..148 'e': [isize; _] - 151..154 '[y]': [isize; _] - 152..153 'y': isize - 164..165 'f': [[isize; _]; _] - 168..174 '[d, d]': [[isize; _]; _] - 169..170 'd': [isize; _] - 172..173 'd': [isize; _] - 184..185 'g': [[isize; _]; _] - 188..194 '[e, e]': [[isize; _]; _] - 189..190 'e': [isize; _] - 192..193 'e': [isize; _] - 205..206 'h': [i32; _] - 209..215 '[1, 2]': [i32; _] - 210..211 '1': i32 - 213..214 '2': i32 - 225..226 'i': [&str; _] - 229..239 '["a", "b"]': [&str; _] - 230..233 '"a"': &str - 235..238 '"b"': &str - 250..251 'b': [[&str; _]; _] - 254..264 '[a, ["b"]]': [[&str; _]; _] - 255..256 'a': [&str; _] - 258..263 '["b"]': [&str; _] - 259..262 '"b"': &str - 274..275 'x': [u8; _] - 287..289 '[]': [u8; _] - "#]], - ); -} - -#[test] -fn infer_struct_generics() { - check_infer( - r#" - struct A { - x: T, - } - - fn test(a1: A, i: i32) { - a1.x; - let a2 = A { x: i }; - a2.x; - let a3 = A:: { x: 1 }; - a3.x; - } - "#, - expect![[r#" - 35..37 'a1': A - 47..48 'i': i32 - 55..146 '{ ...3.x; }': () - 61..63 'a1': A - 61..65 'a1.x': u32 - 75..77 'a2': A - 80..90 'A { x: i }': A - 87..88 'i': i32 - 96..98 'a2': A - 96..100 'a2.x': i32 - 110..112 'a3': A - 115..133 'A:: - 130..131 '1': i128 - 139..141 'a3': A - 139..143 'a3.x': i128 - "#]], - ); -} - -#[test] -fn infer_tuple_struct_generics() { - check_infer( - r#" - struct A(T); - enum Option { Some(T), None } - use Option::*; - - fn test() { - A(42); - A(42u128); - Some("x"); - Option::Some("x"); - None; - let x: Option = None; - } - "#, - expect![[r#" - 75..183 '{ ...one; }': () - 81..82 'A': A(i32) -> A - 81..86 'A(42)': A - 83..85 '42': i32 - 92..93 'A': A(u128) -> A - 92..101 'A(42u128)': A - 94..100 '42u128': u128 - 107..111 'Some': Some<&str>(&str) -> Option<&str> - 107..116 'Some("x")': Option<&str> - 112..115 '"x"': &str - 122..134 'Option::Some': Some<&str>(&str) -> Option<&str> - 122..139 'Option...e("x")': Option<&str> - 135..138 '"x"': &str - 145..149 'None': Option<{unknown}> - 159..160 'x': Option - 176..180 'None': Option - "#]], - ); -} - -#[test] -fn infer_function_generics() { - check_infer( - r#" - fn id(t: T) -> T { t } - - fn test() { - id(1u32); - id::(1); - let x: u64 = id(1); - } - "#, - expect![[r#" - 9..10 't': T - 20..25 '{ t }': T - 22..23 't': T - 37..97 '{ ...(1); }': () - 43..45 'id': fn id(u32) -> u32 - 43..51 'id(1u32)': u32 - 46..50 '1u32': u32 - 57..67 'id::': fn id(i128) -> i128 - 57..70 'id::(1)': i128 - 68..69 '1': i128 - 80..81 'x': u64 - 89..91 'id': fn id(u64) -> u64 - 89..94 'id(1)': u64 - 92..93 '1': u64 - "#]], - ); -} - -#[test] -fn infer_impl_generics_basic() { - check_infer( - r#" - struct A { - x: T1, - y: T2, - } - impl A { - fn x(self) -> X { - self.x - } - fn y(self) -> Y { - self.y - } - fn z(self, t: T) -> (X, Y, T) { - (self.x, self.y, t) - } - } - - fn test() -> i128 { - let a = A { x: 1u64, y: 1i64 }; - a.x(); - a.y(); - a.z(1i128); - a.z::(1); - } - "#, - expect![[r#" - 73..77 'self': A - 84..106 '{ ... }': X - 94..98 'self': A - 94..100 'self.x': X - 116..120 'self': A - 127..149 '{ ... }': Y - 137..141 'self': A - 137..143 'self.y': Y - 162..166 'self': A - 168..169 't': T - 187..222 '{ ... }': (X, Y, T) - 197..216 '(self.....y, t)': (X, Y, T) - 198..202 'self': A - 198..204 'self.x': X - 206..210 'self': A - 206..212 'self.y': Y - 214..215 't': T - 244..341 '{ ...(1); }': () - 254..255 'a': A - 258..280 'A { x:...1i64 }': A - 265..269 '1u64': u64 - 274..278 '1i64': i64 - 286..287 'a': A - 286..291 'a.x()': u64 - 297..298 'a': A - 297..302 'a.y()': i64 - 308..309 'a': A - 308..318 'a.z(1i128)': (u64, i64, i128) - 312..317 '1i128': i128 - 324..325 'a': A - 324..338 'a.z::(1)': (u64, i64, u128) - 336..337 '1': u128 - "#]], - ); -} - -#[test] -fn infer_impl_generics_with_autoderef() { - check_infer( - r#" - enum Option { - Some(T), - None, - } - impl Option { - fn as_ref(&self) -> Option<&T> {} - } - fn test(o: Option) { - (&o).as_ref(); - o.as_ref(); - } - "#, - expect![[r#" - 77..81 'self': &Option - 97..99 '{}': () - 110..111 'o': Option - 126..164 '{ ...f(); }': () - 132..145 '(&o).as_ref()': Option<&u32> - 133..135 '&o': &Option - 134..135 'o': Option - 151..152 'o': Option - 151..161 'o.as_ref()': Option<&u32> - "#]], - ); -} - -#[test] -fn infer_generic_chain() { - check_infer( - r#" - struct A { - x: T, - } - impl A { - fn x(self) -> T2 { - self.x - } - } - fn id(t: T) -> T { t } - - fn test() -> i128 { - let x = 1; - let y = id(x); - let a = A { x: id(y) }; - let z = id(a.x); - let b = A { x: z }; - b.x() - } - "#, - expect![[r#" - 52..56 'self': A - 64..86 '{ ... }': T2 - 74..78 'self': A - 74..80 'self.x': T2 - 98..99 't': T - 109..114 '{ t }': T - 111..112 't': T - 134..254 '{ ....x() }': i128 - 144..145 'x': i128 - 148..149 '1': i128 - 159..160 'y': i128 - 163..165 'id': fn id(i128) -> i128 - 163..168 'id(x)': i128 - 166..167 'x': i128 - 178..179 'a': A - 182..196 'A { x: id(y) }': A - 189..191 'id': fn id(i128) -> i128 - 189..194 'id(y)': i128 - 192..193 'y': i128 - 206..207 'z': i128 - 210..212 'id': fn id(i128) -> i128 - 210..217 'id(a.x)': i128 - 213..214 'a': A - 213..216 'a.x': i128 - 227..228 'b': A - 231..241 'A { x: z }': A - 238..239 'z': i128 - 247..248 'b': A - 247..252 'b.x()': i128 - "#]], - ); -} - -#[test] -fn infer_associated_const() { - check_infer( - r#" - struct Struct; - - impl Struct { - const FOO: u32 = 1; - } - - enum Enum {} - - impl Enum { - const BAR: u32 = 2; - } - - trait Trait { - const ID: u32; - } - - struct TraitTest; - - impl Trait for TraitTest { - const ID: u32 = 5; - } - - fn test() { - let x = Struct::FOO; - let y = Enum::BAR; - let z = TraitTest::ID; - } - "#, - expect![[r#" - 51..52 '1': u32 - 104..105 '2': u32 - 212..213 '5': u32 - 228..306 '{ ...:ID; }': () - 238..239 'x': u32 - 242..253 'Struct::FOO': u32 - 263..264 'y': u32 - 267..276 'Enum::BAR': u32 - 286..287 'z': u32 - 290..303 'TraitTest::ID': u32 - "#]], - ); -} - -#[test] -fn infer_type_alias() { - check_infer( - r#" - struct A { x: X, y: Y } - type Foo = A; - type Bar = A; - type Baz = A; - fn test(x: Foo, y: Bar<&str>, z: Baz) { - x.x; - x.y; - y.x; - y.y; - z.x; - z.y; - } - "#, - expect![[r#" - 115..116 'x': A - 123..124 'y': A<&str, u128> - 137..138 'z': A - 153..210 '{ ...z.y; }': () - 159..160 'x': A - 159..162 'x.x': u32 - 168..169 'x': A - 168..171 'x.y': i128 - 177..178 'y': A<&str, u128> - 177..180 'y.x': &str - 186..187 'y': A<&str, u128> - 186..189 'y.y': u128 - 195..196 'z': A - 195..198 'z.x': u8 - 204..205 'z': A - 204..207 'z.y': i8 - "#]], - ) -} - -#[test] -fn recursive_type_alias() { - check_infer( - r#" - struct A {} - type Foo = Foo; - type Bar = A; - fn test(x: Foo) {} - "#, - expect![[r#" - 58..59 'x': {unknown} - 66..68 '{}': () - "#]], - ) -} - -#[test] -fn infer_type_param() { - check_infer( - r#" - fn id(x: T) -> T { - x - } - - fn clone(x: &T) -> T { - *x - } - - fn test() { - let y = 10u32; - id(y); - let x: bool = clone(z); - id::(1); - } - "#, - expect![[r#" - 9..10 'x': T - 20..29 '{ x }': T - 26..27 'x': T - 43..44 'x': &T - 55..65 '{ *x }': T - 61..63 '*x': T - 62..63 'x': &T - 77..157 '{ ...(1); }': () - 87..88 'y': u32 - 91..96 '10u32': u32 - 102..104 'id': fn id(u32) -> u32 - 102..107 'id(y)': u32 - 105..106 'y': u32 - 117..118 'x': bool - 127..132 'clone': fn clone(&bool) -> bool - 127..135 'clone(z)': bool - 133..134 'z': &bool - 141..151 'id::': fn id(i128) -> i128 - 141..154 'id::(1)': i128 - 152..153 '1': i128 - "#]], - ); -} - -#[test] -fn infer_const() { - check_infer( - r#" - struct Foo; - impl Foo { const ASSOC_CONST: u32 = 0; } - const GLOBAL_CONST: u32 = 101; - fn test() { - const LOCAL_CONST: u32 = 99; - let x = LOCAL_CONST; - let z = GLOBAL_CONST; - let id = Foo::ASSOC_CONST; - } - "#, - expect![[r#" - 48..49 '0': u32 - 79..82 '101': u32 - 94..212 '{ ...NST; }': () - 137..138 'x': u32 - 141..152 'LOCAL_CONST': u32 - 162..163 'z': u32 - 166..178 'GLOBAL_CONST': u32 - 188..190 'id': u32 - 193..209 'Foo::A..._CONST': u32 - 125..127 '99': u32 - "#]], - ); -} - -#[test] -fn infer_static() { - check_infer( - r#" - static GLOBAL_STATIC: u32 = 101; - static mut GLOBAL_STATIC_MUT: u32 = 101; - fn test() { - static LOCAL_STATIC: u32 = 99; - static mut LOCAL_STATIC_MUT: u32 = 99; - let x = LOCAL_STATIC; - let y = LOCAL_STATIC_MUT; - let z = GLOBAL_STATIC; - let w = GLOBAL_STATIC_MUT; - } - "#, - expect![[r#" - 28..31 '101': u32 - 69..72 '101': u32 - 84..279 '{ ...MUT; }': () - 172..173 'x': u32 - 176..188 'LOCAL_STATIC': u32 - 198..199 'y': u32 - 202..218 'LOCAL_...IC_MUT': u32 - 228..229 'z': u32 - 232..245 'GLOBAL_STATIC': u32 - 255..256 'w': u32 - 259..276 'GLOBAL...IC_MUT': u32 - 117..119 '99': u32 - 160..162 '99': u32 - "#]], - ); -} - -#[test] -fn shadowing_primitive() { - check_types( - r#" -struct i32; -struct Foo; - -impl i32 { fn foo(&self) -> Foo { Foo } } - -fn main() { - let x: i32 = i32; - x.foo(); - //^ Foo -}"#, - ); -} - -#[test] -fn not_shadowing_primitive_by_module() { - check_types( - r#" -//- /str.rs -fn foo() {} - -//- /main.rs -mod str; -fn foo() -> &'static str { "" } - -fn main() { - foo(); - //^ &str -}"#, - ); -} - -#[test] -fn not_shadowing_module_by_primitive() { - check_types( - r#" -//- /str.rs -fn foo() -> u32 {0} - -//- /main.rs -mod str; -fn foo() -> &'static str { "" } - -fn main() { - str::foo(); - //^ u32 -}"#, - ); -} - -// This test is actually testing the shadowing behavior within ra_hir_def. It -// lives here because the testing infrastructure in ra_hir_def isn't currently -// capable of asserting the necessary conditions. -#[test] -fn should_be_shadowing_imports() { - check_types( - r#" -mod a { - pub fn foo() -> i8 {0} - pub struct foo { a: i8 } -} -mod b { pub fn foo () -> u8 {0} } -mod c { pub struct foo { a: u8 } } -mod d { - pub use super::a::*; - pub use super::c::foo; - pub use super::b::foo; -} - -fn main() { - d::foo(); - //^ u8 - d::foo{a:0}; - //^ u8 -}"#, - ); -} - -#[test] -fn closure_return() { - check_infer( - r#" - fn foo() -> u32 { - let x = || -> usize { return 1; }; - } - "#, - expect![[r#" - 16..58 '{ ...; }; }': () - 26..27 'x': || -> usize - 30..55 '|| -> ...n 1; }': || -> usize - 42..55 '{ return 1; }': usize - 44..52 'return 1': ! - 51..52 '1': usize - "#]], - ); -} - -#[test] -fn closure_return_unit() { - check_infer( - r#" - fn foo() -> u32 { - let x = || { return; }; - } - "#, - expect![[r#" - 16..47 '{ ...; }; }': () - 26..27 'x': || -> () - 30..44 '|| { return; }': || -> () - 33..44 '{ return; }': () - 35..41 'return': ! - "#]], - ); -} - -#[test] -fn closure_return_inferred() { - check_infer( - r#" - fn foo() -> u32 { - let x = || { "test" }; - } - "#, - expect![[r#" - 16..46 '{ ..." }; }': () - 26..27 'x': || -> &str - 30..43 '|| { "test" }': || -> &str - 33..43 '{ "test" }': &str - 35..41 '"test"': &str - "#]], - ); -} - -#[test] -fn fn_pointer_return() { - check_infer( - r#" - struct Vtable { - method: fn(), - } - - fn main() { - let vtable = Vtable { method: || {} }; - let m = vtable.method; - } - "#, - expect![[r#" - 47..120 '{ ...hod; }': () - 57..63 'vtable': Vtable - 66..90 'Vtable...| {} }': Vtable - 83..88 '|| {}': || -> () - 86..88 '{}': () - 100..101 'm': fn() - 104..110 'vtable': Vtable - 104..117 'vtable.method': fn() - "#]], - ); -} - -#[test] -fn effects_smoke_test() { - check_infer( - r#" - fn main() { - let x = unsafe { 92 }; - let y = async { async { () }.await }; - let z = try { () }; - let t = 'a: { 92 }; - } - "#, - expect![[r#" - 10..130 '{ ...2 }; }': () - 20..21 'x': i32 - 24..37 'unsafe { 92 }': i32 - 31..37 '{ 92 }': i32 - 33..35 '92': i32 - 47..48 'y': {unknown} - 57..79 '{ asyn...wait }': {unknown} - 59..77 'async ....await': {unknown} - 65..71 '{ () }': () - 67..69 '()': () - 89..90 'z': {unknown} - 93..103 'try { () }': {unknown} - 97..103 '{ () }': () - 99..101 '()': () - 113..114 't': i32 - 121..127 '{ 92 }': i32 - 123..125 '92': i32 - "#]], - ) -} - -#[test] -fn infer_generic_from_later_assignment() { - check_infer( - r#" - enum Option { Some(T), None } - use Option::*; - - fn test() { - let mut end = None; - loop { - end = Some(true); - } - } - "#, - expect![[r#" - 59..129 '{ ... } }': () - 69..76 'mut end': Option - 79..83 'None': Option - 89..127 'loop {... }': ! - 94..127 '{ ... }': () - 104..107 'end': Option - 104..120 'end = ...(true)': () - 110..114 'Some': Some(bool) -> Option - 110..120 'Some(true)': Option - 115..119 'true': bool - "#]], - ); -} - -#[test] -fn infer_loop_break_with_val() { - check_infer( - r#" - enum Option { Some(T), None } - use Option::*; - - fn test() { - let x = loop { - if false { - break None; - } - - break Some(true); - }; - } - "#, - expect![[r#" - 59..168 '{ ... }; }': () - 69..70 'x': Option - 73..165 'loop {... }': Option - 78..165 '{ ... }': () - 88..132 'if fal... }': () - 91..96 'false': bool - 97..132 '{ ... }': () - 111..121 'break None': ! - 117..121 'None': Option - 142..158 'break ...(true)': ! - 148..152 'Some': Some(bool) -> Option - 148..158 'Some(true)': Option - 153..157 'true': bool - "#]], - ); -} - -#[test] -fn infer_loop_break_without_val() { - check_infer( - r#" - enum Option { Some(T), None } - use Option::*; - - fn test() { - let x = loop { - if false { - break; - } - }; - } - "#, - expect![[r#" - 59..136 '{ ... }; }': () - 69..70 'x': () - 73..133 'loop {... }': () - 78..133 '{ ... }': () - 88..127 'if fal... }': () - 91..96 'false': bool - 97..127 '{ ... }': () - 111..116 'break': ! - "#]], - ); -} - -#[test] -fn infer_labelled_break_with_val() { - check_infer( - r#" - fn foo() { - let _x = || 'outer: loop { - let inner = 'inner: loop { - let i = Default::default(); - if (break 'outer i) { - loop { break 'inner 5i8; }; - } else if true { - break 'inner 6; - } - break 7; - }; - break inner < 8; - }; - } - "#, - expect![[r#" - 9..335 '{ ... }; }': () - 19..21 '_x': || -> bool - 24..332 '|| 'ou... }': || -> bool - 27..332 ''outer... }': bool - 40..332 '{ ... }': () - 54..59 'inner': i8 - 62..300 ''inner... }': i8 - 75..300 '{ ... }': () - 93..94 'i': bool - 97..113 'Defaul...efault': {unknown} - 97..115 'Defaul...ault()': bool - 129..269 'if (br... }': () - 133..147 'break 'outer i': ! - 146..147 'i': bool - 149..208 '{ ... }': () - 167..193 'loop {...5i8; }': ! - 172..193 '{ brea...5i8; }': () - 174..190 'break ...er 5i8': ! - 187..190 '5i8': i8 - 214..269 'if tru... }': () - 217..221 'true': bool - 222..269 '{ ... }': () - 240..254 'break 'inner 6': ! - 253..254 '6': i8 - 282..289 'break 7': ! - 288..289 '7': i8 - 310..325 'break inner < 8': ! - 316..321 'inner': i8 - 316..325 'inner < 8': bool - 324..325 '8': i8 - "#]], - ); -} - -#[test] -fn generic_default() { - check_infer( - r#" - struct Thing { t: T } - enum OtherThing { - One { t: T }, - Two(T), - } - - fn test(t1: Thing, t2: OtherThing, t3: Thing, t4: OtherThing) { - t1.t; - t3.t; - match t2 { - OtherThing::One { t } => { t; }, - OtherThing::Two(t) => { t; }, - } - match t4 { - OtherThing::One { t } => { t; }, - OtherThing::Two(t) => { t; }, - } - } - "#, - expect![[r#" - 97..99 't1': Thing<()> - 108..110 't2': OtherThing<()> - 124..126 't3': Thing - 140..142 't4': OtherThing - 161..384 '{ ... } }': () - 167..169 't1': Thing<()> - 167..171 't1.t': () - 177..179 't3': Thing - 177..181 't3.t': i32 - 187..282 'match ... }': () - 193..195 't2': OtherThing<()> - 206..227 'OtherT... { t }': OtherThing<()> - 224..225 't': () - 231..237 '{ t; }': () - 233..234 't': () - 247..265 'OtherT...Two(t)': OtherThing<()> - 263..264 't': () - 269..275 '{ t; }': () - 271..272 't': () - 287..382 'match ... }': () - 293..295 't4': OtherThing - 306..327 'OtherT... { t }': OtherThing - 324..325 't': i32 - 331..337 '{ t; }': () - 333..334 't': i32 - 347..365 'OtherT...Two(t)': OtherThing - 363..364 't': i32 - 369..375 '{ t; }': () - 371..372 't': i32 - "#]], - ); -} - -#[test] -fn generic_default_in_struct_literal() { - check_infer( - r#" - struct Thing { t: T } - enum OtherThing { - One { t: T }, - Two(T), - } - - fn test() { - let x = Thing { t: loop {} }; - let y = Thing { t: () }; - let z = Thing { t: 1i32 }; - if let Thing { t } = z { - t; - } - - let a = OtherThing::One { t: 1i32 }; - let b = OtherThing::Two(1i32); - } - "#, - expect![[r#" - 99..319 '{ ...32); }': () - 109..110 'x': Thing - 113..133 'Thing ...p {} }': Thing - 124..131 'loop {}': ! - 129..131 '{}': () - 143..144 'y': Thing<()> - 147..162 'Thing { t: () }': Thing<()> - 158..160 '()': () - 172..173 'z': Thing - 176..193 'Thing ...1i32 }': Thing - 187..191 '1i32': i32 - 199..240 'if let... }': () - 206..217 'Thing { t }': Thing - 214..215 't': i32 - 220..221 'z': Thing - 222..240 '{ ... }': () - 232..233 't': i32 - 250..251 'a': OtherThing - 254..281 'OtherT...1i32 }': OtherThing - 275..279 '1i32': i32 - 291..292 'b': OtherThing - 295..310 'OtherThing::Two': Two(i32) -> OtherThing - 295..316 'OtherT...(1i32)': OtherThing - 311..315 '1i32': i32 - "#]], - ); -} - -#[test] -fn generic_default_depending_on_other_type_arg() { - // FIXME: the {unknown} is a bug - check_infer( - r#" - struct Thing T> { t: T } - - fn test(t1: Thing, t2: Thing) { - t1; - t2; - Thing::<_> { t: 1u32 }; - } - "#, - expect![[r#" - 56..58 't1': Thing u32> - 72..74 't2': Thing u128> - 83..130 '{ ...2 }; }': () - 89..91 't1': Thing u32> - 97..99 't2': Thing u128> - 105..127 'Thing:...1u32 }': Thing {unknown}> - 121..125 '1u32': u32 - "#]], - ); -} - -#[test] -fn generic_default_depending_on_other_type_arg_forward() { - // the {unknown} here is intentional, as defaults are not allowed to - // refer to type parameters coming later - check_infer( - r#" - struct Thing T, T = u128> { t: T } - - fn test(t1: Thing) { - t1; - } - "#, - expect![[r#" - 56..58 't1': Thing {unknown}, u128> - 67..78 '{ t1; }': () - 73..75 't1': Thing {unknown}, u128> - "#]], - ); -} diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs deleted file mode 100644 index 3f6d2cf352..0000000000 --- a/crates/ra_hir_ty/src/traits.rs +++ /dev/null @@ -1,273 +0,0 @@ -//! Trait solving using Chalk. -use std::sync::Arc; - -use chalk_ir::cast::Cast; -use chalk_solve::Solver; -use hir_def::{lang_item::LangItemTarget, TraitId}; -use ra_db::CrateId; -use ra_prof::profile; - -use crate::{db::HirDatabase, DebruijnIndex, Substs}; - -use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk}; - -use self::chalk::{from_chalk, Interner, ToChalk}; - -pub(crate) mod chalk; - -// This controls the maximum size of types Chalk considers. If we set this too -// high, we can run into slow edge cases; if we set it too low, Chalk won't -// find some solutions. -// FIXME this is currently hardcoded in the recursive solver -// const CHALK_SOLVER_MAX_SIZE: usize = 10; - -/// This controls how much 'time' we give the Chalk solver before giving up. -const CHALK_SOLVER_FUEL: i32 = 100; - -#[derive(Debug, Copy, Clone)] -struct ChalkContext<'a> { - db: &'a dyn HirDatabase, - krate: CrateId, -} - -fn create_chalk_solver() -> chalk_recursive::RecursiveSolver { - let overflow_depth = 100; - let caching_enabled = true; - chalk_recursive::RecursiveSolver::new(overflow_depth, caching_enabled) -} - -/// A set of clauses that we assume to be true. E.g. if we are inside this function: -/// ```rust -/// fn foo(t: T) {} -/// ``` -/// we assume that `T: Default`. -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct TraitEnvironment { - pub predicates: Vec, -} - -impl TraitEnvironment { - /// Returns trait refs with the given self type which are supposed to hold - /// in this trait env. E.g. if we are in `foo()`, this will - /// find that `T: SomeTrait` if we call it for `T`. - pub(crate) fn trait_predicates_for_self_ty<'a>( - &'a self, - ty: &'a Ty, - ) -> impl Iterator + 'a { - self.predicates.iter().filter_map(move |pred| match pred { - GenericPredicate::Implemented(tr) if tr.self_ty() == ty => Some(tr), - _ => None, - }) - } -} - -/// Something (usually a goal), along with an environment. -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct InEnvironment { - pub environment: Arc, - pub value: T, -} - -impl InEnvironment { - pub fn new(environment: Arc, value: T) -> InEnvironment { - InEnvironment { environment, value } - } -} - -/// Something that needs to be proven (by Chalk) during type checking, e.g. that -/// a certain type implements a certain trait. Proving the Obligation might -/// result in additional information about inference variables. -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub enum Obligation { - /// Prove that a certain type implements a trait (the type is the `Self` type - /// parameter to the `TraitRef`). - Trait(TraitRef), - Projection(ProjectionPredicate), -} - -impl Obligation { - pub fn from_predicate(predicate: GenericPredicate) -> Option { - match predicate { - GenericPredicate::Implemented(trait_ref) => Some(Obligation::Trait(trait_ref)), - GenericPredicate::Projection(projection_pred) => { - Some(Obligation::Projection(projection_pred)) - } - GenericPredicate::Error => None, - } - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct ProjectionPredicate { - pub projection_ty: ProjectionTy, - pub ty: Ty, -} - -impl TypeWalk for ProjectionPredicate { - fn walk(&self, f: &mut impl FnMut(&Ty)) { - self.projection_ty.walk(f); - self.ty.walk(f); - } - - fn walk_mut_binders( - &mut self, - f: &mut impl FnMut(&mut Ty, DebruijnIndex), - binders: DebruijnIndex, - ) { - self.projection_ty.walk_mut_binders(f, binders); - self.ty.walk_mut_binders(f, binders); - } -} - -/// Solve a trait goal using Chalk. -pub(crate) fn trait_solve_query( - db: &dyn HirDatabase, - krate: CrateId, - goal: Canonical>, -) -> Option { - let _p = profile("trait_solve_query").detail(|| match &goal.value.value { - Obligation::Trait(it) => db.trait_data(it.trait_).name.to_string(), - Obligation::Projection(_) => "projection".to_string(), - }); - log::info!("trait_solve_query({})", goal.value.value.display(db)); - - if let Obligation::Projection(pred) = &goal.value.value { - if let Ty::Bound(_) = &pred.projection_ty.parameters[0] { - // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible - return Some(Solution::Ambig(Guidance::Unknown)); - } - } - - let canonical = goal.to_chalk(db).cast(&Interner); - - // We currently don't deal with universes (I think / hope they're not yet - // relevant for our use cases?) - let u_canonical = chalk_ir::UCanonical { canonical, universes: 1 }; - let solution = solve(db, krate, &u_canonical); - solution.map(|solution| solution_from_chalk(db, solution)) -} - -fn solve( - db: &dyn HirDatabase, - krate: CrateId, - goal: &chalk_ir::UCanonical>>, -) -> Option> { - let context = ChalkContext { db, krate }; - log::debug!("solve goal: {:?}", goal); - let mut solver = create_chalk_solver(); - - let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL); - - let should_continue = || { - context.db.check_canceled(); - let remaining = fuel.get(); - fuel.set(remaining - 1); - if remaining == 0 { - log::debug!("fuel exhausted"); - } - remaining > 0 - }; - let mut solve = || { - let solution = solver.solve_limited(&context, goal, should_continue); - log::debug!("solve({:?}) => {:?}", goal, solution); - solution - }; - // don't set the TLS for Chalk unless Chalk debugging is active, to make - // extra sure we only use it for debugging - let solution = - if is_chalk_debug() { chalk::tls::set_current_program(db, solve) } else { solve() }; - - solution -} - -fn is_chalk_debug() -> bool { - std::env::var("CHALK_DEBUG").is_ok() -} - -fn solution_from_chalk( - db: &dyn HirDatabase, - solution: chalk_solve::Solution, -) -> Solution { - let convert_subst = |subst: chalk_ir::Canonical>| { - let result = from_chalk(db, subst); - SolutionVariables(result) - }; - match solution { - chalk_solve::Solution::Unique(constr_subst) => { - let subst = chalk_ir::Canonical { - value: constr_subst.value.subst, - binders: constr_subst.binders, - }; - Solution::Unique(convert_subst(subst)) - } - chalk_solve::Solution::Ambig(chalk_solve::Guidance::Definite(subst)) => { - Solution::Ambig(Guidance::Definite(convert_subst(subst))) - } - chalk_solve::Solution::Ambig(chalk_solve::Guidance::Suggested(subst)) => { - Solution::Ambig(Guidance::Suggested(convert_subst(subst))) - } - chalk_solve::Solution::Ambig(chalk_solve::Guidance::Unknown) => { - Solution::Ambig(Guidance::Unknown) - } - } -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct SolutionVariables(pub Canonical); - -#[derive(Clone, Debug, PartialEq, Eq)] -/// A (possible) solution for a proposed goal. -pub enum Solution { - /// The goal indeed holds, and there is a unique value for all existential - /// variables. - Unique(SolutionVariables), - - /// The goal may be provable in multiple ways, but regardless we may have some guidance - /// for type inference. In this case, we don't return any lifetime - /// constraints, since we have not "committed" to any particular solution - /// yet. - Ambig(Guidance), -} - -#[derive(Clone, Debug, PartialEq, Eq)] -/// When a goal holds ambiguously (e.g., because there are multiple possible -/// solutions), we issue a set of *guidance* back to type inference. -pub enum Guidance { - /// The existential variables *must* have the given values if the goal is - /// ever to hold, but that alone isn't enough to guarantee the goal will - /// actually hold. - Definite(SolutionVariables), - - /// There are multiple plausible values for the existentials, but the ones - /// here are suggested as the preferred choice heuristically. These should - /// be used for inference fallback only. - Suggested(SolutionVariables), - - /// There's no useful information to feed back to type inference - Unknown, -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub enum FnTrait { - FnOnce, - FnMut, - Fn, -} - -impl FnTrait { - fn lang_item_name(self) -> &'static str { - match self { - FnTrait::FnOnce => "fn_once", - FnTrait::FnMut => "fn_mut", - FnTrait::Fn => "fn", - } - } - - pub fn get_id(&self, db: &dyn HirDatabase, krate: CrateId) -> Option { - let target = db.lang_item(krate, self.lang_item_name().into())?; - match target { - LangItemTarget::TraitId(t) => Some(t), - _ => None, - } - } -} diff --git a/crates/ra_hir_ty/src/traits/chalk.rs b/crates/ra_hir_ty/src/traits/chalk.rs deleted file mode 100644 index 1c70653644..0000000000 --- a/crates/ra_hir_ty/src/traits/chalk.rs +++ /dev/null @@ -1,586 +0,0 @@ -//! Conversion code from/to Chalk. -use std::sync::Arc; - -use log::debug; - -use chalk_ir::{fold::shift::Shift, CanonicalVarKinds, GenericArg, TypeName}; -use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait}; - -use hir_def::{ - lang_item::{lang_attr, LangItemTarget}, - AssocContainerId, AssocItemId, HasModule, Lookup, TypeAliasId, -}; -use ra_db::{salsa::InternKey, CrateId}; - -use super::ChalkContext; -use crate::{ - db::HirDatabase, - display::HirDisplay, - method_resolution::{TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS}, - utils::generics, - CallableDefId, DebruijnIndex, FnSig, GenericPredicate, Substs, Ty, TypeCtor, -}; -use mapping::{ - convert_where_clauses, generic_predicate_to_inline_bound, make_binders, TypeAliasAsValue, -}; - -pub use self::interner::*; - -pub(super) mod tls; -mod interner; -mod mapping; - -pub(super) trait ToChalk { - type Chalk; - fn to_chalk(self, db: &dyn HirDatabase) -> Self::Chalk; - fn from_chalk(db: &dyn HirDatabase, chalk: Self::Chalk) -> Self; -} - -pub(super) fn from_chalk(db: &dyn HirDatabase, chalk: ChalkT) -> T -where - T: ToChalk, -{ - T::from_chalk(db, chalk) -} - -impl<'a> chalk_solve::RustIrDatabase for ChalkContext<'a> { - fn associated_ty_data(&self, id: AssocTypeId) -> Arc { - self.db.associated_ty_data(id) - } - fn trait_datum(&self, trait_id: TraitId) -> Arc { - self.db.trait_datum(self.krate, trait_id) - } - fn adt_datum(&self, struct_id: AdtId) -> Arc { - self.db.struct_datum(self.krate, struct_id) - } - fn adt_repr(&self, _struct_id: AdtId) -> rust_ir::AdtRepr { - rust_ir::AdtRepr { repr_c: false, repr_packed: false } - } - fn impl_datum(&self, impl_id: ImplId) -> Arc { - self.db.impl_datum(self.krate, impl_id) - } - - fn fn_def_datum( - &self, - fn_def_id: chalk_ir::FnDefId, - ) -> Arc> { - self.db.fn_def_datum(self.krate, fn_def_id) - } - - fn impls_for_trait( - &self, - trait_id: TraitId, - parameters: &[GenericArg], - binders: &CanonicalVarKinds, - ) -> Vec { - debug!("impls_for_trait {:?}", trait_id); - let trait_: hir_def::TraitId = from_chalk(self.db, trait_id); - - let ty: Ty = from_chalk(self.db, parameters[0].assert_ty_ref(&Interner).clone()); - - fn binder_kind(ty: &Ty, binders: &CanonicalVarKinds) -> Option { - if let Ty::Bound(bv) = ty { - let binders = binders.as_slice(&Interner); - if bv.debruijn == DebruijnIndex::INNERMOST { - if let chalk_ir::VariableKind::Ty(tk) = binders[bv.index].kind { - return Some(tk); - } - } - } - None - } - - let self_ty_fp = TyFingerprint::for_impl(&ty); - let fps: &[TyFingerprint] = match binder_kind(&ty, binders) { - Some(chalk_ir::TyKind::Integer) => &ALL_INT_FPS, - Some(chalk_ir::TyKind::Float) => &ALL_FLOAT_FPS, - _ => self_ty_fp.as_ref().map(std::slice::from_ref).unwrap_or(&[]), - }; - - // Note: Since we're using impls_for_trait, only impls where the trait - // can be resolved should ever reach Chalk. `impl_datum` relies on that - // and will panic if the trait can't be resolved. - let in_deps = self.db.trait_impls_in_deps(self.krate); - let in_self = self.db.trait_impls_in_crate(self.krate); - let impl_maps = [in_deps, in_self]; - - let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db); - - let result: Vec<_> = if fps.is_empty() { - debug!("Unrestricted search for {:?} impls...", trait_); - impl_maps - .iter() - .flat_map(|crate_impl_defs| crate_impl_defs.for_trait(trait_).map(id_to_chalk)) - .collect() - } else { - impl_maps - .iter() - .flat_map(|crate_impl_defs| { - fps.iter().flat_map(move |fp| { - crate_impl_defs.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk) - }) - }) - .collect() - }; - - debug!("impls_for_trait returned {} impls", result.len()); - result - } - fn impl_provided_for(&self, auto_trait_id: TraitId, struct_id: AdtId) -> bool { - debug!("impl_provided_for {:?}, {:?}", auto_trait_id, struct_id); - false // FIXME - } - fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc { - self.db.associated_ty_value(self.krate, id) - } - - fn custom_clauses(&self) -> Vec> { - vec![] - } - fn local_impls_to_coherence_check(&self, _trait_id: TraitId) -> Vec { - // We don't do coherence checking (yet) - unimplemented!() - } - fn interner(&self) -> &Interner { - &Interner - } - fn well_known_trait_id( - &self, - well_known_trait: rust_ir::WellKnownTrait, - ) -> Option> { - let lang_attr = lang_attr_from_well_known_trait(well_known_trait); - let trait_ = match self.db.lang_item(self.krate, lang_attr.into()) { - Some(LangItemTarget::TraitId(trait_)) => trait_, - _ => return None, - }; - Some(trait_.to_chalk(self.db)) - } - - fn program_clauses_for_env( - &self, - environment: &chalk_ir::Environment, - ) -> chalk_ir::ProgramClauses { - self.db.program_clauses_for_chalk_env(self.krate, environment.clone()) - } - - fn opaque_ty_data(&self, id: chalk_ir::OpaqueTyId) -> Arc { - let interned_id = crate::db::InternedOpaqueTyId::from(id); - let full_id = self.db.lookup_intern_impl_trait_id(interned_id); - let (func, idx) = match full_id { - crate::OpaqueTyId::ReturnTypeImplTrait(func, idx) => (func, idx), - }; - let datas = - self.db.return_type_impl_traits(func).expect("impl trait id without impl traits"); - let data = &datas.value.impl_traits[idx as usize]; - let bound = OpaqueTyDatumBound { - bounds: make_binders( - data.bounds - .value - .iter() - .cloned() - .filter(|b| !b.is_error()) - .map(|b| b.to_chalk(self.db)) - .collect(), - 1, - ), - where_clauses: make_binders(vec![], 0), - }; - let num_vars = datas.num_binders; - Arc::new(OpaqueTyDatum { opaque_ty_id: id, bound: make_binders(bound, num_vars) }) - } - - fn hidden_opaque_type(&self, _id: chalk_ir::OpaqueTyId) -> chalk_ir::Ty { - // FIXME: actually provide the hidden type; it is relevant for auto traits - Ty::Unknown.to_chalk(self.db) - } - - fn is_object_safe(&self, _trait_id: chalk_ir::TraitId) -> bool { - // FIXME: implement actual object safety - true - } - - fn closure_kind( - &self, - _closure_id: chalk_ir::ClosureId, - _substs: &chalk_ir::Substitution, - ) -> rust_ir::ClosureKind { - // Fn is the closure kind that implements all three traits - rust_ir::ClosureKind::Fn - } - fn closure_inputs_and_output( - &self, - _closure_id: chalk_ir::ClosureId, - substs: &chalk_ir::Substitution, - ) -> chalk_ir::Binders> { - let sig_ty: Ty = - from_chalk(self.db, substs.at(&Interner, 0).assert_ty_ref(&Interner).clone()); - let sig = FnSig::from_fn_ptr_substs( - &sig_ty.substs().expect("first closure param should be fn ptr"), - false, - ); - let io = rust_ir::FnDefInputsAndOutputDatum { - argument_types: sig.params().iter().map(|ty| ty.clone().to_chalk(self.db)).collect(), - return_type: sig.ret().clone().to_chalk(self.db), - }; - make_binders(io.shifted_in(&Interner), 0) - } - fn closure_upvars( - &self, - _closure_id: chalk_ir::ClosureId, - _substs: &chalk_ir::Substitution, - ) -> chalk_ir::Binders> { - let ty = Ty::unit().to_chalk(self.db); - make_binders(ty, 0) - } - fn closure_fn_substitution( - &self, - _closure_id: chalk_ir::ClosureId, - _substs: &chalk_ir::Substitution, - ) -> chalk_ir::Substitution { - Substs::empty().to_chalk(self.db) - } - - fn trait_name(&self, _trait_id: chalk_ir::TraitId) -> String { - unimplemented!() - } - fn adt_name(&self, _struct_id: chalk_ir::AdtId) -> String { - unimplemented!() - } - fn assoc_type_name(&self, _assoc_ty_id: chalk_ir::AssocTypeId) -> String { - unimplemented!() - } - fn opaque_type_name(&self, _opaque_ty_id: chalk_ir::OpaqueTyId) -> String { - unimplemented!() - } - fn fn_def_name(&self, _fn_def_id: chalk_ir::FnDefId) -> String { - unimplemented!() - } -} - -pub(crate) fn program_clauses_for_chalk_env_query( - db: &dyn HirDatabase, - krate: CrateId, - environment: chalk_ir::Environment, -) -> chalk_ir::ProgramClauses { - chalk_solve::program_clauses_for_env(&ChalkContext { db, krate }, &environment) -} - -pub(crate) fn associated_ty_data_query( - db: &dyn HirDatabase, - id: AssocTypeId, -) -> Arc { - debug!("associated_ty_data {:?}", id); - let type_alias: TypeAliasId = from_chalk(db, id); - let trait_ = match type_alias.lookup(db.upcast()).container { - AssocContainerId::TraitId(t) => t, - _ => panic!("associated type not in trait"), - }; - - // Lower bounds -- we could/should maybe move this to a separate query in `lower` - let type_alias_data = db.type_alias_data(type_alias); - let generic_params = generics(db.upcast(), type_alias.into()); - let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST); - let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast()); - let ctx = crate::TyLoweringContext::new(db, &resolver) - .with_type_param_mode(crate::lower::TypeParamLoweringMode::Variable); - let self_ty = Ty::Bound(crate::BoundVar::new(crate::DebruijnIndex::INNERMOST, 0)); - let bounds = type_alias_data - .bounds - .iter() - .flat_map(|bound| GenericPredicate::from_type_bound(&ctx, bound, self_ty.clone())) - .filter_map(|pred| generic_predicate_to_inline_bound(db, &pred, &self_ty)) - .map(|bound| make_binders(bound.shifted_in(&Interner), 0)) - .collect(); - - let where_clauses = convert_where_clauses(db, type_alias.into(), &bound_vars); - let bound_data = rust_ir::AssociatedTyDatumBound { bounds, where_clauses }; - let datum = AssociatedTyDatum { - trait_id: trait_.to_chalk(db), - id, - name: type_alias, - binders: make_binders(bound_data, generic_params.len()), - }; - Arc::new(datum) -} - -pub(crate) fn trait_datum_query( - db: &dyn HirDatabase, - krate: CrateId, - trait_id: TraitId, -) -> Arc { - debug!("trait_datum {:?}", trait_id); - let trait_: hir_def::TraitId = from_chalk(db, trait_id); - let trait_data = db.trait_data(trait_); - debug!("trait {:?} = {:?}", trait_id, trait_data.name); - let generic_params = generics(db.upcast(), trait_.into()); - let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST); - let flags = rust_ir::TraitFlags { - auto: trait_data.auto, - upstream: trait_.lookup(db.upcast()).container.module(db.upcast()).krate != krate, - non_enumerable: true, - coinductive: false, // only relevant for Chalk testing - // FIXME: set these flags correctly - marker: false, - fundamental: false, - }; - let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars); - let associated_ty_ids = - trait_data.associated_types().map(|type_alias| type_alias.to_chalk(db)).collect(); - let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses }; - let well_known = - lang_attr(db.upcast(), trait_).and_then(|name| well_known_trait_from_lang_attr(&name)); - let trait_datum = TraitDatum { - id: trait_id, - binders: make_binders(trait_datum_bound, bound_vars.len()), - flags, - associated_ty_ids, - well_known, - }; - Arc::new(trait_datum) -} - -fn well_known_trait_from_lang_attr(name: &str) -> Option { - Some(match name { - "sized" => WellKnownTrait::Sized, - "copy" => WellKnownTrait::Copy, - "clone" => WellKnownTrait::Clone, - "drop" => WellKnownTrait::Drop, - "fn_once" => WellKnownTrait::FnOnce, - "fn_mut" => WellKnownTrait::FnMut, - "fn" => WellKnownTrait::Fn, - "unsize" => WellKnownTrait::Unsize, - _ => return None, - }) -} - -fn lang_attr_from_well_known_trait(attr: WellKnownTrait) -> &'static str { - match attr { - WellKnownTrait::Sized => "sized", - WellKnownTrait::Copy => "copy", - WellKnownTrait::Clone => "clone", - WellKnownTrait::Drop => "drop", - WellKnownTrait::FnOnce => "fn_once", - WellKnownTrait::FnMut => "fn_mut", - WellKnownTrait::Fn => "fn", - WellKnownTrait::Unsize => "unsize", - } -} - -pub(crate) fn struct_datum_query( - db: &dyn HirDatabase, - krate: CrateId, - struct_id: AdtId, -) -> Arc { - debug!("struct_datum {:?}", struct_id); - let type_ctor: TypeCtor = from_chalk(db, TypeName::Adt(struct_id)); - debug!("struct {:?} = {:?}", struct_id, type_ctor); - let num_params = type_ctor.num_ty_params(db); - let upstream = type_ctor.krate(db) != Some(krate); - let where_clauses = type_ctor - .as_generic_def() - .map(|generic_def| { - let generic_params = generics(db.upcast(), generic_def); - let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST); - convert_where_clauses(db, generic_def, &bound_vars) - }) - .unwrap_or_else(Vec::new); - let flags = rust_ir::AdtFlags { - upstream, - // FIXME set fundamental and phantom_data flags correctly - fundamental: false, - phantom_data: false, - }; - // FIXME provide enum variants properly (for auto traits) - let variant = rust_ir::AdtVariantDatum { - fields: Vec::new(), // FIXME add fields (only relevant for auto traits), - }; - let struct_datum_bound = rust_ir::AdtDatumBound { variants: vec![variant], where_clauses }; - let struct_datum = StructDatum { - // FIXME set ADT kind - kind: rust_ir::AdtKind::Struct, - id: struct_id, - binders: make_binders(struct_datum_bound, num_params), - flags, - }; - Arc::new(struct_datum) -} - -pub(crate) fn impl_datum_query( - db: &dyn HirDatabase, - krate: CrateId, - impl_id: ImplId, -) -> Arc { - let _p = ra_prof::profile("impl_datum"); - debug!("impl_datum {:?}", impl_id); - let impl_: hir_def::ImplId = from_chalk(db, impl_id); - impl_def_datum(db, krate, impl_id, impl_) -} - -fn impl_def_datum( - db: &dyn HirDatabase, - krate: CrateId, - chalk_id: ImplId, - impl_id: hir_def::ImplId, -) -> Arc { - let trait_ref = db - .impl_trait(impl_id) - // ImplIds for impls where the trait ref can't be resolved should never reach Chalk - .expect("invalid impl passed to Chalk") - .value; - let impl_data = db.impl_data(impl_id); - - let generic_params = generics(db.upcast(), impl_id.into()); - let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST); - let trait_ = trait_ref.trait_; - let impl_type = if impl_id.lookup(db.upcast()).container.module(db.upcast()).krate == krate { - rust_ir::ImplType::Local - } else { - rust_ir::ImplType::External - }; - let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars); - let negative = impl_data.is_negative; - debug!( - "impl {:?}: {}{} where {:?}", - chalk_id, - if negative { "!" } else { "" }, - trait_ref.display(db), - where_clauses - ); - let trait_ref = trait_ref.to_chalk(db); - - let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive }; - - let impl_datum_bound = rust_ir::ImplDatumBound { trait_ref, where_clauses }; - let trait_data = db.trait_data(trait_); - let associated_ty_value_ids = impl_data - .items - .iter() - .filter_map(|item| match item { - AssocItemId::TypeAliasId(type_alias) => Some(*type_alias), - _ => None, - }) - .filter(|&type_alias| { - // don't include associated types that don't exist in the trait - let name = &db.type_alias_data(type_alias).name; - trait_data.associated_type_by_name(name).is_some() - }) - .map(|type_alias| TypeAliasAsValue(type_alias).to_chalk(db)) - .collect(); - debug!("impl_datum: {:?}", impl_datum_bound); - let impl_datum = ImplDatum { - binders: make_binders(impl_datum_bound, bound_vars.len()), - impl_type, - polarity, - associated_ty_value_ids, - }; - Arc::new(impl_datum) -} - -pub(crate) fn associated_ty_value_query( - db: &dyn HirDatabase, - krate: CrateId, - id: AssociatedTyValueId, -) -> Arc { - let type_alias: TypeAliasAsValue = from_chalk(db, id); - type_alias_associated_ty_value(db, krate, type_alias.0) -} - -fn type_alias_associated_ty_value( - db: &dyn HirDatabase, - _krate: CrateId, - type_alias: TypeAliasId, -) -> Arc { - let type_alias_data = db.type_alias_data(type_alias); - let impl_id = match type_alias.lookup(db.upcast()).container { - AssocContainerId::ImplId(it) => it, - _ => panic!("assoc ty value should be in impl"), - }; - - let trait_ref = db.impl_trait(impl_id).expect("assoc ty value should not exist").value; // we don't return any assoc ty values if the impl'd trait can't be resolved - - let assoc_ty = db - .trait_data(trait_ref.trait_) - .associated_type_by_name(&type_alias_data.name) - .expect("assoc ty value should not exist"); // validated when building the impl data as well - let ty = db.ty(type_alias.into()); - let value_bound = rust_ir::AssociatedTyValueBound { ty: ty.value.to_chalk(db) }; - let value = rust_ir::AssociatedTyValue { - impl_id: impl_id.to_chalk(db), - associated_ty_id: assoc_ty.to_chalk(db), - value: make_binders(value_bound, ty.num_binders), - }; - Arc::new(value) -} - -pub(crate) fn fn_def_datum_query( - db: &dyn HirDatabase, - _krate: CrateId, - fn_def_id: FnDefId, -) -> Arc { - let callable_def: CallableDefId = from_chalk(db, fn_def_id); - let generic_params = generics(db.upcast(), callable_def.into()); - let sig = db.callable_item_signature(callable_def); - let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST); - let where_clauses = convert_where_clauses(db, callable_def.into(), &bound_vars); - let bound = rust_ir::FnDefDatumBound { - // Note: Chalk doesn't actually use this information yet as far as I am aware, but we provide it anyway - inputs_and_output: make_binders( - rust_ir::FnDefInputsAndOutputDatum { - argument_types: sig - .value - .params() - .iter() - .map(|ty| ty.clone().to_chalk(db)) - .collect(), - return_type: sig.value.ret().clone().to_chalk(db), - } - .shifted_in(&Interner), - 0, - ), - where_clauses, - }; - let datum = FnDefDatum { - id: fn_def_id, - abi: (), - safety: chalk_ir::Safety::Safe, - variadic: sig.value.is_varargs, - binders: make_binders(bound, sig.num_binders), - }; - Arc::new(datum) -} - -impl From for crate::db::InternedCallableDefId { - fn from(fn_def_id: FnDefId) -> Self { - InternKey::from_intern_id(fn_def_id.0) - } -} - -impl From for FnDefId { - fn from(callable_def_id: crate::db::InternedCallableDefId) -> Self { - chalk_ir::FnDefId(callable_def_id.as_intern_id()) - } -} - -impl From for crate::db::InternedOpaqueTyId { - fn from(id: OpaqueTyId) -> Self { - InternKey::from_intern_id(id.0) - } -} - -impl From for OpaqueTyId { - fn from(id: crate::db::InternedOpaqueTyId) -> Self { - chalk_ir::OpaqueTyId(id.as_intern_id()) - } -} - -impl From> for crate::db::ClosureId { - fn from(id: chalk_ir::ClosureId) -> Self { - Self::from_intern_id(id.0) - } -} - -impl From for chalk_ir::ClosureId { - fn from(id: crate::db::ClosureId) -> Self { - chalk_ir::ClosureId(id.as_intern_id()) - } -} diff --git a/crates/ra_hir_ty/src/traits/chalk/interner.rs b/crates/ra_hir_ty/src/traits/chalk/interner.rs deleted file mode 100644 index 8d4c51a8ff..0000000000 --- a/crates/ra_hir_ty/src/traits/chalk/interner.rs +++ /dev/null @@ -1,383 +0,0 @@ -//! Implementation of the Chalk `Interner` trait, which allows customizing the -//! representation of the various objects Chalk deals with (types, goals etc.). - -use super::tls; -use chalk_ir::{GenericArg, Goal, GoalData}; -use hir_def::TypeAliasId; -use ra_db::salsa::InternId; -use std::{fmt, sync::Arc}; - -#[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)] -pub struct Interner; - -pub type AssocTypeId = chalk_ir::AssocTypeId; -pub type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum; -pub type TraitId = chalk_ir::TraitId; -pub type TraitDatum = chalk_solve::rust_ir::TraitDatum; -pub type AdtId = chalk_ir::AdtId; -pub type StructDatum = chalk_solve::rust_ir::AdtDatum; -pub type ImplId = chalk_ir::ImplId; -pub type ImplDatum = chalk_solve::rust_ir::ImplDatum; -pub type AssociatedTyValueId = chalk_solve::rust_ir::AssociatedTyValueId; -pub type AssociatedTyValue = chalk_solve::rust_ir::AssociatedTyValue; -pub type FnDefId = chalk_ir::FnDefId; -pub type FnDefDatum = chalk_solve::rust_ir::FnDefDatum; -pub type OpaqueTyId = chalk_ir::OpaqueTyId; -pub type OpaqueTyDatum = chalk_solve::rust_ir::OpaqueTyDatum; - -impl chalk_ir::interner::Interner for Interner { - type InternedType = Box>; // FIXME use Arc? - type InternedLifetime = chalk_ir::LifetimeData; - type InternedConst = Arc>; - type InternedConcreteConst = (); - type InternedGenericArg = chalk_ir::GenericArgData; - type InternedGoal = Arc>; - type InternedGoals = Vec>; - type InternedSubstitution = Vec>; - type InternedProgramClause = chalk_ir::ProgramClauseData; - type InternedProgramClauses = Arc<[chalk_ir::ProgramClause]>; - type InternedQuantifiedWhereClauses = Vec>; - type InternedVariableKinds = Vec>; - type InternedCanonicalVarKinds = Vec>; - type InternedConstraints = Vec>>; - type DefId = InternId; - type InternedAdtId = hir_def::AdtId; - type Identifier = TypeAliasId; - type FnAbi = (); - - fn debug_adt_id(type_kind_id: AdtId, fmt: &mut fmt::Formatter<'_>) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_struct_id(type_kind_id, fmt))) - } - - fn debug_trait_id(type_kind_id: TraitId, fmt: &mut fmt::Formatter<'_>) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_trait_id(type_kind_id, fmt))) - } - - fn debug_assoc_type_id(id: AssocTypeId, fmt: &mut fmt::Formatter<'_>) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_assoc_type_id(id, fmt))) - } - - fn debug_alias( - alias: &chalk_ir::AliasTy, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_alias(alias, fmt))) - } - - fn debug_projection_ty( - proj: &chalk_ir::ProjectionTy, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt))) - } - - fn debug_opaque_ty( - opaque_ty: &chalk_ir::OpaqueTy, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_opaque_ty(opaque_ty, fmt))) - } - - fn debug_opaque_ty_id( - opaque_ty_id: chalk_ir::OpaqueTyId, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_opaque_ty_id(opaque_ty_id, fmt))) - } - - fn debug_ty(ty: &chalk_ir::Ty, fmt: &mut fmt::Formatter<'_>) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_ty(ty, fmt))) - } - - fn debug_lifetime( - lifetime: &chalk_ir::Lifetime, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_lifetime(lifetime, fmt))) - } - - fn debug_generic_arg( - parameter: &GenericArg, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_generic_arg(parameter, fmt))) - } - - fn debug_goal(goal: &Goal, fmt: &mut fmt::Formatter<'_>) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_goal(goal, fmt))) - } - - fn debug_goals( - goals: &chalk_ir::Goals, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_goals(goals, fmt))) - } - - fn debug_program_clause_implication( - pci: &chalk_ir::ProgramClauseImplication, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_program_clause_implication(pci, fmt))) - } - - fn debug_application_ty( - application_ty: &chalk_ir::ApplicationTy, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_application_ty(application_ty, fmt))) - } - - fn debug_substitution( - substitution: &chalk_ir::Substitution, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_substitution(substitution, fmt))) - } - - fn debug_separator_trait_ref( - separator_trait_ref: &chalk_ir::SeparatorTraitRef, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| { - Some(prog?.debug_separator_trait_ref(separator_trait_ref, fmt)) - }) - } - - fn debug_fn_def_id( - fn_def_id: chalk_ir::FnDefId, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_fn_def_id(fn_def_id, fmt))) - } - fn debug_const( - constant: &chalk_ir::Const, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_const(constant, fmt))) - } - fn debug_variable_kinds( - variable_kinds: &chalk_ir::VariableKinds, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_variable_kinds(variable_kinds, fmt))) - } - fn debug_variable_kinds_with_angles( - variable_kinds: &chalk_ir::VariableKinds, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| { - Some(prog?.debug_variable_kinds_with_angles(variable_kinds, fmt)) - }) - } - fn debug_canonical_var_kinds( - canonical_var_kinds: &chalk_ir::CanonicalVarKinds, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| { - Some(prog?.debug_canonical_var_kinds(canonical_var_kinds, fmt)) - }) - } - fn debug_program_clause( - clause: &chalk_ir::ProgramClause, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_program_clause(clause, fmt))) - } - fn debug_program_clauses( - clauses: &chalk_ir::ProgramClauses, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_program_clauses(clauses, fmt))) - } - fn debug_quantified_where_clauses( - clauses: &chalk_ir::QuantifiedWhereClauses, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_quantified_where_clauses(clauses, fmt))) - } - - fn intern_ty(&self, ty: chalk_ir::TyData) -> Box> { - Box::new(ty) - } - - fn ty_data<'a>(&self, ty: &'a Box>) -> &'a chalk_ir::TyData { - ty - } - - fn intern_lifetime( - &self, - lifetime: chalk_ir::LifetimeData, - ) -> chalk_ir::LifetimeData { - lifetime - } - - fn lifetime_data<'a>( - &self, - lifetime: &'a chalk_ir::LifetimeData, - ) -> &'a chalk_ir::LifetimeData { - lifetime - } - - fn intern_const(&self, constant: chalk_ir::ConstData) -> Arc> { - Arc::new(constant) - } - - fn const_data<'a>( - &self, - constant: &'a Arc>, - ) -> &'a chalk_ir::ConstData { - constant - } - - fn const_eq(&self, _ty: &Box>, _c1: &(), _c2: &()) -> bool { - true - } - - fn intern_generic_arg( - &self, - parameter: chalk_ir::GenericArgData, - ) -> chalk_ir::GenericArgData { - parameter - } - - fn generic_arg_data<'a>( - &self, - parameter: &'a chalk_ir::GenericArgData, - ) -> &'a chalk_ir::GenericArgData { - parameter - } - - fn intern_goal(&self, goal: GoalData) -> Arc> { - Arc::new(goal) - } - - fn intern_goals( - &self, - data: impl IntoIterator, E>>, - ) -> Result { - data.into_iter().collect() - } - - fn goal_data<'a>(&self, goal: &'a Arc>) -> &'a GoalData { - goal - } - - fn goals_data<'a>(&self, goals: &'a Vec>) -> &'a [Goal] { - goals - } - - fn intern_substitution( - &self, - data: impl IntoIterator, E>>, - ) -> Result>, E> { - data.into_iter().collect() - } - - fn substitution_data<'a>( - &self, - substitution: &'a Vec>, - ) -> &'a [GenericArg] { - substitution - } - - fn intern_program_clause( - &self, - data: chalk_ir::ProgramClauseData, - ) -> chalk_ir::ProgramClauseData { - data - } - - fn program_clause_data<'a>( - &self, - clause: &'a chalk_ir::ProgramClauseData, - ) -> &'a chalk_ir::ProgramClauseData { - clause - } - - fn intern_program_clauses( - &self, - data: impl IntoIterator, E>>, - ) -> Result]>, E> { - data.into_iter().collect() - } - - fn program_clauses_data<'a>( - &self, - clauses: &'a Arc<[chalk_ir::ProgramClause]>, - ) -> &'a [chalk_ir::ProgramClause] { - &clauses - } - - fn intern_quantified_where_clauses( - &self, - data: impl IntoIterator, E>>, - ) -> Result { - data.into_iter().collect() - } - - fn quantified_where_clauses_data<'a>( - &self, - clauses: &'a Self::InternedQuantifiedWhereClauses, - ) -> &'a [chalk_ir::QuantifiedWhereClause] { - clauses - } - - fn intern_generic_arg_kinds( - &self, - data: impl IntoIterator, E>>, - ) -> Result { - data.into_iter().collect() - } - - fn variable_kinds_data<'a>( - &self, - parameter_kinds: &'a Self::InternedVariableKinds, - ) -> &'a [chalk_ir::VariableKind] { - ¶meter_kinds - } - - fn intern_canonical_var_kinds( - &self, - data: impl IntoIterator, E>>, - ) -> Result { - data.into_iter().collect() - } - - fn canonical_var_kinds_data<'a>( - &self, - canonical_var_kinds: &'a Self::InternedCanonicalVarKinds, - ) -> &'a [chalk_ir::CanonicalVarKind] { - &canonical_var_kinds - } - - fn intern_constraints( - &self, - data: impl IntoIterator>, E>>, - ) -> Result { - data.into_iter().collect() - } - - fn constraints_data<'a>( - &self, - constraints: &'a Self::InternedConstraints, - ) -> &'a [chalk_ir::InEnvironment>] { - constraints - } - fn debug_closure_id( - _fn_def_id: chalk_ir::ClosureId, - _fmt: &mut fmt::Formatter<'_>, - ) -> Option { - None - } - fn debug_constraints( - _clauses: &chalk_ir::Constraints, - _fmt: &mut fmt::Formatter<'_>, - ) -> Option { - None - } -} - -impl chalk_ir::interner::HasInterner for Interner { - type Interner = Self; -} diff --git a/crates/ra_hir_ty/src/traits/chalk/mapping.rs b/crates/ra_hir_ty/src/traits/chalk/mapping.rs deleted file mode 100644 index b3e92993d2..0000000000 --- a/crates/ra_hir_ty/src/traits/chalk/mapping.rs +++ /dev/null @@ -1,787 +0,0 @@ -//! This module contains the implementations of the `ToChalk` trait, which -//! handles conversion between our data types and their corresponding types in -//! Chalk (in both directions); plus some helper functions for more specialized -//! conversions. - -use chalk_ir::{ - cast::Cast, fold::shift::Shift, interner::HasInterner, PlaceholderIndex, Scalar, TypeName, - UniverseIndex, -}; -use chalk_solve::rust_ir; - -use hir_def::{type_ref::Mutability, AssocContainerId, GenericDefId, Lookup, TypeAliasId}; -use ra_db::salsa::InternKey; - -use crate::{ - db::HirDatabase, - primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness}, - traits::{Canonical, Obligation}, - ApplicationTy, CallableDefId, GenericPredicate, InEnvironment, OpaqueTy, OpaqueTyId, - ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TyKind, TypeCtor, -}; - -use super::interner::*; -use super::*; - -impl ToChalk for Ty { - type Chalk = chalk_ir::Ty; - fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Ty { - match self { - Ty::Apply(apply_ty) => match apply_ty.ctor { - TypeCtor::Ref(m) => ref_to_chalk(db, m, apply_ty.parameters), - TypeCtor::Array => array_to_chalk(db, apply_ty.parameters), - TypeCtor::FnPtr { num_args: _, is_varargs } => { - let substitution = apply_ty.parameters.to_chalk(db).shifted_in(&Interner); - chalk_ir::TyData::Function(chalk_ir::FnPointer { - num_binders: 0, - abi: (), - safety: chalk_ir::Safety::Safe, - variadic: is_varargs, - substitution, - }) - .intern(&Interner) - } - _ => { - let name = apply_ty.ctor.to_chalk(db); - let substitution = apply_ty.parameters.to_chalk(db); - chalk_ir::ApplicationTy { name, substitution }.cast(&Interner).intern(&Interner) - } - }, - Ty::Projection(proj_ty) => { - let associated_ty_id = proj_ty.associated_ty.to_chalk(db); - let substitution = proj_ty.parameters.to_chalk(db); - chalk_ir::AliasTy::Projection(chalk_ir::ProjectionTy { - associated_ty_id, - substitution, - }) - .cast(&Interner) - .intern(&Interner) - } - Ty::Placeholder(id) => { - let interned_id = db.intern_type_param_id(id); - PlaceholderIndex { - ui: UniverseIndex::ROOT, - idx: interned_id.as_intern_id().as_usize(), - } - .to_ty::(&Interner) - } - Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx).intern(&Interner), - Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"), - Ty::Dyn(predicates) => { - let where_clauses = chalk_ir::QuantifiedWhereClauses::from_iter( - &Interner, - predicates.iter().filter(|p| !p.is_error()).cloned().map(|p| p.to_chalk(db)), - ); - let bounded_ty = chalk_ir::DynTy { - bounds: make_binders(where_clauses, 1), - lifetime: FAKE_PLACEHOLDER.to_lifetime(&Interner), - }; - chalk_ir::TyData::Dyn(bounded_ty).intern(&Interner) - } - Ty::Opaque(opaque_ty) => { - let opaque_ty_id = opaque_ty.opaque_ty_id.to_chalk(db); - let substitution = opaque_ty.parameters.to_chalk(db); - chalk_ir::TyData::Alias(chalk_ir::AliasTy::Opaque(chalk_ir::OpaqueTy { - opaque_ty_id, - substitution, - })) - .intern(&Interner) - } - Ty::Unknown => { - let substitution = chalk_ir::Substitution::empty(&Interner); - let name = TypeName::Error; - chalk_ir::ApplicationTy { name, substitution }.cast(&Interner).intern(&Interner) - } - } - } - fn from_chalk(db: &dyn HirDatabase, chalk: chalk_ir::Ty) -> Self { - match chalk.data(&Interner).clone() { - chalk_ir::TyData::Apply(apply_ty) => match apply_ty.name { - TypeName::Error => Ty::Unknown, - TypeName::Ref(m) => ref_from_chalk(db, m, apply_ty.substitution), - TypeName::Array => array_from_chalk(db, apply_ty.substitution), - _ => { - let ctor = from_chalk(db, apply_ty.name); - let parameters = from_chalk(db, apply_ty.substitution); - Ty::Apply(ApplicationTy { ctor, parameters }) - } - }, - chalk_ir::TyData::Placeholder(idx) => { - assert_eq!(idx.ui, UniverseIndex::ROOT); - let interned_id = crate::db::GlobalTypeParamId::from_intern_id( - crate::salsa::InternId::from(idx.idx), - ); - Ty::Placeholder(db.lookup_intern_type_param_id(interned_id)) - } - chalk_ir::TyData::Alias(chalk_ir::AliasTy::Projection(proj)) => { - let associated_ty = from_chalk(db, proj.associated_ty_id); - let parameters = from_chalk(db, proj.substitution); - Ty::Projection(ProjectionTy { associated_ty, parameters }) - } - chalk_ir::TyData::Alias(chalk_ir::AliasTy::Opaque(opaque_ty)) => { - let impl_trait_id = from_chalk(db, opaque_ty.opaque_ty_id); - let parameters = from_chalk(db, opaque_ty.substitution); - Ty::Opaque(OpaqueTy { opaque_ty_id: impl_trait_id, parameters }) - } - chalk_ir::TyData::Function(chalk_ir::FnPointer { - num_binders, - variadic, - substitution, - .. - }) => { - assert_eq!(num_binders, 0); - let parameters: Substs = from_chalk( - db, - substitution.shifted_out(&Interner).expect("fn ptr should have no binders"), - ); - Ty::Apply(ApplicationTy { - ctor: TypeCtor::FnPtr { - num_args: (parameters.len() - 1) as u16, - is_varargs: variadic, - }, - parameters, - }) - } - chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx), - chalk_ir::TyData::InferenceVar(_iv, _kind) => Ty::Unknown, - chalk_ir::TyData::Dyn(where_clauses) => { - assert_eq!(where_clauses.bounds.binders.len(&Interner), 1); - let predicates = where_clauses - .bounds - .skip_binders() - .iter(&Interner) - .map(|c| from_chalk(db, c.clone())) - .collect(); - Ty::Dyn(predicates) - } - } - } -} - -const FAKE_PLACEHOLDER: PlaceholderIndex = - PlaceholderIndex { ui: UniverseIndex::ROOT, idx: usize::MAX }; - -/// We currently don't model lifetimes, but Chalk does. So, we have to insert a -/// fake lifetime here, because Chalks built-in logic may expect it to be there. -fn ref_to_chalk( - db: &dyn HirDatabase, - mutability: Mutability, - subst: Substs, -) -> chalk_ir::Ty { - let arg = subst[0].clone().to_chalk(db); - let lifetime = FAKE_PLACEHOLDER.to_lifetime(&Interner); - chalk_ir::ApplicationTy { - name: TypeName::Ref(mutability.to_chalk(db)), - substitution: chalk_ir::Substitution::from_iter( - &Interner, - vec![lifetime.cast(&Interner), arg.cast(&Interner)], - ), - } - .intern(&Interner) -} - -/// Here we remove the lifetime from the type we got from Chalk. -fn ref_from_chalk( - db: &dyn HirDatabase, - mutability: chalk_ir::Mutability, - subst: chalk_ir::Substitution, -) -> Ty { - let tys = subst - .iter(&Interner) - .filter_map(|p| Some(from_chalk(db, p.ty(&Interner)?.clone()))) - .collect(); - Ty::apply(TypeCtor::Ref(from_chalk(db, mutability)), Substs(tys)) -} - -/// We currently don't model constants, but Chalk does. So, we have to insert a -/// fake constant here, because Chalks built-in logic may expect it to be there. -fn array_to_chalk(db: &dyn HirDatabase, subst: Substs) -> chalk_ir::Ty { - let arg = subst[0].clone().to_chalk(db); - let usize_ty = chalk_ir::ApplicationTy { - name: TypeName::Scalar(Scalar::Uint(chalk_ir::UintTy::Usize)), - substitution: chalk_ir::Substitution::empty(&Interner), - } - .intern(&Interner); - let const_ = FAKE_PLACEHOLDER.to_const(&Interner, usize_ty); - chalk_ir::ApplicationTy { - name: TypeName::Array, - substitution: chalk_ir::Substitution::from_iter( - &Interner, - vec![arg.cast(&Interner), const_.cast(&Interner)], - ), - } - .intern(&Interner) -} - -/// Here we remove the const from the type we got from Chalk. -fn array_from_chalk(db: &dyn HirDatabase, subst: chalk_ir::Substitution) -> Ty { - let tys = subst - .iter(&Interner) - .filter_map(|p| Some(from_chalk(db, p.ty(&Interner)?.clone()))) - .collect(); - Ty::apply(TypeCtor::Array, Substs(tys)) -} - -impl ToChalk for Substs { - type Chalk = chalk_ir::Substitution; - - fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Substitution { - chalk_ir::Substitution::from_iter(&Interner, self.iter().map(|ty| ty.clone().to_chalk(db))) - } - - fn from_chalk(db: &dyn HirDatabase, parameters: chalk_ir::Substitution) -> Substs { - let tys = parameters - .iter(&Interner) - .map(|p| match p.ty(&Interner) { - Some(ty) => from_chalk(db, ty.clone()), - None => unimplemented!(), - }) - .collect(); - Substs(tys) - } -} - -impl ToChalk for TraitRef { - type Chalk = chalk_ir::TraitRef; - - fn to_chalk(self: TraitRef, db: &dyn HirDatabase) -> chalk_ir::TraitRef { - let trait_id = self.trait_.to_chalk(db); - let substitution = self.substs.to_chalk(db); - chalk_ir::TraitRef { trait_id, substitution } - } - - fn from_chalk(db: &dyn HirDatabase, trait_ref: chalk_ir::TraitRef) -> Self { - let trait_ = from_chalk(db, trait_ref.trait_id); - let substs = from_chalk(db, trait_ref.substitution); - TraitRef { trait_, substs } - } -} - -impl ToChalk for hir_def::TraitId { - type Chalk = TraitId; - - fn to_chalk(self, _db: &dyn HirDatabase) -> TraitId { - chalk_ir::TraitId(self.as_intern_id()) - } - - fn from_chalk(_db: &dyn HirDatabase, trait_id: TraitId) -> hir_def::TraitId { - InternKey::from_intern_id(trait_id.0) - } -} - -impl ToChalk for OpaqueTyId { - type Chalk = chalk_ir::OpaqueTyId; - - fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::OpaqueTyId { - db.intern_impl_trait_id(self).into() - } - - fn from_chalk( - db: &dyn HirDatabase, - opaque_ty_id: chalk_ir::OpaqueTyId, - ) -> OpaqueTyId { - db.lookup_intern_impl_trait_id(opaque_ty_id.into()) - } -} - -impl ToChalk for TypeCtor { - type Chalk = TypeName; - - fn to_chalk(self, db: &dyn HirDatabase) -> TypeName { - match self { - TypeCtor::AssociatedType(type_alias) => { - let type_id = type_alias.to_chalk(db); - TypeName::AssociatedType(type_id) - } - - TypeCtor::OpaqueType(impl_trait_id) => { - let id = impl_trait_id.to_chalk(db); - TypeName::OpaqueType(id) - } - - TypeCtor::Bool => TypeName::Scalar(Scalar::Bool), - TypeCtor::Char => TypeName::Scalar(Scalar::Char), - TypeCtor::Int(int_ty) => TypeName::Scalar(int_ty_to_chalk(int_ty)), - TypeCtor::Float(FloatTy { bitness: FloatBitness::X32 }) => { - TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F32)) - } - TypeCtor::Float(FloatTy { bitness: FloatBitness::X64 }) => { - TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F64)) - } - - TypeCtor::Tuple { cardinality } => TypeName::Tuple(cardinality.into()), - TypeCtor::RawPtr(mutability) => TypeName::Raw(mutability.to_chalk(db)), - TypeCtor::Slice => TypeName::Slice, - TypeCtor::Array => TypeName::Array, - TypeCtor::Ref(mutability) => TypeName::Ref(mutability.to_chalk(db)), - TypeCtor::Str => TypeName::Str, - TypeCtor::FnDef(callable_def) => { - let id = callable_def.to_chalk(db); - TypeName::FnDef(id) - } - TypeCtor::Never => TypeName::Never, - - TypeCtor::Closure { def, expr } => { - let closure_id = db.intern_closure((def, expr)); - TypeName::Closure(closure_id.into()) - } - - TypeCtor::Adt(adt_id) => TypeName::Adt(chalk_ir::AdtId(adt_id)), - - TypeCtor::FnPtr { .. } => { - // This should not be reached, since Chalk doesn't represent - // function pointers with TypeName - unreachable!() - } - } - } - - fn from_chalk(db: &dyn HirDatabase, type_name: TypeName) -> TypeCtor { - match type_name { - TypeName::Adt(struct_id) => TypeCtor::Adt(struct_id.0), - TypeName::AssociatedType(type_id) => TypeCtor::AssociatedType(from_chalk(db, type_id)), - TypeName::OpaqueType(opaque_type_id) => { - TypeCtor::OpaqueType(from_chalk(db, opaque_type_id)) - } - - TypeName::Scalar(Scalar::Bool) => TypeCtor::Bool, - TypeName::Scalar(Scalar::Char) => TypeCtor::Char, - TypeName::Scalar(Scalar::Int(int_ty)) => TypeCtor::Int(IntTy { - signedness: Signedness::Signed, - bitness: bitness_from_chalk_int(int_ty), - }), - TypeName::Scalar(Scalar::Uint(uint_ty)) => TypeCtor::Int(IntTy { - signedness: Signedness::Unsigned, - bitness: bitness_from_chalk_uint(uint_ty), - }), - TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F32)) => { - TypeCtor::Float(FloatTy { bitness: FloatBitness::X32 }) - } - TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F64)) => { - TypeCtor::Float(FloatTy { bitness: FloatBitness::X64 }) - } - TypeName::Tuple(cardinality) => TypeCtor::Tuple { cardinality: cardinality as u16 }, - TypeName::Raw(mutability) => TypeCtor::RawPtr(from_chalk(db, mutability)), - TypeName::Slice => TypeCtor::Slice, - TypeName::Ref(mutability) => TypeCtor::Ref(from_chalk(db, mutability)), - TypeName::Str => TypeCtor::Str, - TypeName::Never => TypeCtor::Never, - - TypeName::FnDef(fn_def_id) => { - let callable_def = from_chalk(db, fn_def_id); - TypeCtor::FnDef(callable_def) - } - TypeName::Array => TypeCtor::Array, - - TypeName::Closure(id) => { - let id: crate::db::ClosureId = id.into(); - let (def, expr) = db.lookup_intern_closure(id); - TypeCtor::Closure { def, expr } - } - - TypeName::Error => { - // this should not be reached, since we don't represent TypeName::Error with TypeCtor - unreachable!() - } - } - } -} - -fn bitness_from_chalk_uint(uint_ty: chalk_ir::UintTy) -> IntBitness { - use chalk_ir::UintTy; - - match uint_ty { - UintTy::Usize => IntBitness::Xsize, - UintTy::U8 => IntBitness::X8, - UintTy::U16 => IntBitness::X16, - UintTy::U32 => IntBitness::X32, - UintTy::U64 => IntBitness::X64, - UintTy::U128 => IntBitness::X128, - } -} - -fn bitness_from_chalk_int(int_ty: chalk_ir::IntTy) -> IntBitness { - use chalk_ir::IntTy; - - match int_ty { - IntTy::Isize => IntBitness::Xsize, - IntTy::I8 => IntBitness::X8, - IntTy::I16 => IntBitness::X16, - IntTy::I32 => IntBitness::X32, - IntTy::I64 => IntBitness::X64, - IntTy::I128 => IntBitness::X128, - } -} - -fn int_ty_to_chalk(int_ty: IntTy) -> Scalar { - use chalk_ir::{IntTy, UintTy}; - - match int_ty.signedness { - Signedness::Signed => Scalar::Int(match int_ty.bitness { - IntBitness::Xsize => IntTy::Isize, - IntBitness::X8 => IntTy::I8, - IntBitness::X16 => IntTy::I16, - IntBitness::X32 => IntTy::I32, - IntBitness::X64 => IntTy::I64, - IntBitness::X128 => IntTy::I128, - }), - Signedness::Unsigned => Scalar::Uint(match int_ty.bitness { - IntBitness::Xsize => UintTy::Usize, - IntBitness::X8 => UintTy::U8, - IntBitness::X16 => UintTy::U16, - IntBitness::X32 => UintTy::U32, - IntBitness::X64 => UintTy::U64, - IntBitness::X128 => UintTy::U128, - }), - } -} - -impl ToChalk for Mutability { - type Chalk = chalk_ir::Mutability; - fn to_chalk(self, _db: &dyn HirDatabase) -> Self::Chalk { - match self { - Mutability::Shared => chalk_ir::Mutability::Not, - Mutability::Mut => chalk_ir::Mutability::Mut, - } - } - fn from_chalk(_db: &dyn HirDatabase, chalk: Self::Chalk) -> Self { - match chalk { - chalk_ir::Mutability::Mut => Mutability::Mut, - chalk_ir::Mutability::Not => Mutability::Shared, - } - } -} - -impl ToChalk for hir_def::ImplId { - type Chalk = ImplId; - - fn to_chalk(self, _db: &dyn HirDatabase) -> ImplId { - chalk_ir::ImplId(self.as_intern_id()) - } - - fn from_chalk(_db: &dyn HirDatabase, impl_id: ImplId) -> hir_def::ImplId { - InternKey::from_intern_id(impl_id.0) - } -} - -impl ToChalk for CallableDefId { - type Chalk = FnDefId; - - fn to_chalk(self, db: &dyn HirDatabase) -> FnDefId { - db.intern_callable_def(self).into() - } - - fn from_chalk(db: &dyn HirDatabase, fn_def_id: FnDefId) -> CallableDefId { - db.lookup_intern_callable_def(fn_def_id.into()) - } -} - -impl ToChalk for TypeAliasId { - type Chalk = AssocTypeId; - - fn to_chalk(self, _db: &dyn HirDatabase) -> AssocTypeId { - chalk_ir::AssocTypeId(self.as_intern_id()) - } - - fn from_chalk(_db: &dyn HirDatabase, type_alias_id: AssocTypeId) -> TypeAliasId { - InternKey::from_intern_id(type_alias_id.0) - } -} - -pub struct TypeAliasAsValue(pub TypeAliasId); - -impl ToChalk for TypeAliasAsValue { - type Chalk = AssociatedTyValueId; - - fn to_chalk(self, _db: &dyn HirDatabase) -> AssociatedTyValueId { - rust_ir::AssociatedTyValueId(self.0.as_intern_id()) - } - - fn from_chalk( - _db: &dyn HirDatabase, - assoc_ty_value_id: AssociatedTyValueId, - ) -> TypeAliasAsValue { - TypeAliasAsValue(TypeAliasId::from_intern_id(assoc_ty_value_id.0)) - } -} - -impl ToChalk for GenericPredicate { - type Chalk = chalk_ir::QuantifiedWhereClause; - - fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::QuantifiedWhereClause { - match self { - GenericPredicate::Implemented(trait_ref) => { - let chalk_trait_ref = trait_ref.to_chalk(db); - let chalk_trait_ref = chalk_trait_ref.shifted_in(&Interner); - make_binders(chalk_ir::WhereClause::Implemented(chalk_trait_ref), 0) - } - GenericPredicate::Projection(projection_pred) => { - let ty = projection_pred.ty.to_chalk(db).shifted_in(&Interner); - let projection = projection_pred.projection_ty.to_chalk(db).shifted_in(&Interner); - let alias = chalk_ir::AliasTy::Projection(projection); - make_binders(chalk_ir::WhereClause::AliasEq(chalk_ir::AliasEq { alias, ty }), 0) - } - GenericPredicate::Error => panic!("tried passing GenericPredicate::Error to Chalk"), - } - } - - fn from_chalk( - db: &dyn HirDatabase, - where_clause: chalk_ir::QuantifiedWhereClause, - ) -> GenericPredicate { - // we don't produce any where clauses with binders and can't currently deal with them - match where_clause - .skip_binders() - .shifted_out(&Interner) - .expect("unexpected bound vars in where clause") - { - chalk_ir::WhereClause::Implemented(tr) => { - GenericPredicate::Implemented(from_chalk(db, tr)) - } - chalk_ir::WhereClause::AliasEq(projection_eq) => { - let projection_ty = from_chalk( - db, - match projection_eq.alias { - chalk_ir::AliasTy::Projection(p) => p, - _ => unimplemented!(), - }, - ); - let ty = from_chalk(db, projection_eq.ty); - GenericPredicate::Projection(ProjectionPredicate { projection_ty, ty }) - } - - chalk_ir::WhereClause::LifetimeOutlives(_) => { - // we shouldn't get these from Chalk - panic!("encountered LifetimeOutlives from Chalk") - } - - chalk_ir::WhereClause::TypeOutlives(_) => { - // we shouldn't get these from Chalk - panic!("encountered TypeOutlives from Chalk") - } - } - } -} - -impl ToChalk for ProjectionTy { - type Chalk = chalk_ir::ProjectionTy; - - fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::ProjectionTy { - chalk_ir::ProjectionTy { - associated_ty_id: self.associated_ty.to_chalk(db), - substitution: self.parameters.to_chalk(db), - } - } - - fn from_chalk( - db: &dyn HirDatabase, - projection_ty: chalk_ir::ProjectionTy, - ) -> ProjectionTy { - ProjectionTy { - associated_ty: from_chalk(db, projection_ty.associated_ty_id), - parameters: from_chalk(db, projection_ty.substitution), - } - } -} - -impl ToChalk for ProjectionPredicate { - type Chalk = chalk_ir::AliasEq; - - fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasEq { - chalk_ir::AliasEq { - alias: chalk_ir::AliasTy::Projection(self.projection_ty.to_chalk(db)), - ty: self.ty.to_chalk(db), - } - } - - fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::AliasEq) -> Self { - unimplemented!() - } -} - -impl ToChalk for Obligation { - type Chalk = chalk_ir::DomainGoal; - - fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::DomainGoal { - match self { - Obligation::Trait(tr) => tr.to_chalk(db).cast(&Interner), - Obligation::Projection(pr) => pr.to_chalk(db).cast(&Interner), - } - } - - fn from_chalk(_db: &dyn HirDatabase, _goal: chalk_ir::DomainGoal) -> Self { - unimplemented!() - } -} - -impl ToChalk for Canonical -where - T: ToChalk, - T::Chalk: HasInterner, -{ - type Chalk = chalk_ir::Canonical; - - fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical { - let kinds = self - .kinds - .iter() - .map(|k| match k { - TyKind::General => chalk_ir::TyKind::General, - TyKind::Integer => chalk_ir::TyKind::Integer, - TyKind::Float => chalk_ir::TyKind::Float, - }) - .map(|tk| { - chalk_ir::CanonicalVarKind::new( - chalk_ir::VariableKind::Ty(tk), - chalk_ir::UniverseIndex::ROOT, - ) - }); - let value = self.value.to_chalk(db); - chalk_ir::Canonical { - value, - binders: chalk_ir::CanonicalVarKinds::from_iter(&Interner, kinds), - } - } - - fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical) -> Canonical { - let kinds = canonical - .binders - .iter(&Interner) - .map(|k| match k.kind { - chalk_ir::VariableKind::Ty(tk) => match tk { - chalk_ir::TyKind::General => TyKind::General, - chalk_ir::TyKind::Integer => TyKind::Integer, - chalk_ir::TyKind::Float => TyKind::Float, - }, - chalk_ir::VariableKind::Lifetime => panic!("unexpected lifetime from Chalk"), - chalk_ir::VariableKind::Const(_) => panic!("unexpected const from Chalk"), - }) - .collect(); - Canonical { kinds, value: from_chalk(db, canonical.value) } - } -} - -impl ToChalk for Arc { - type Chalk = chalk_ir::Environment; - - fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Environment { - let mut clauses = Vec::new(); - for pred in &self.predicates { - if pred.is_error() { - // for env, we just ignore errors - continue; - } - let program_clause: chalk_ir::ProgramClause = - pred.clone().to_chalk(db).cast(&Interner); - clauses.push(program_clause.into_from_env_clause(&Interner)); - } - chalk_ir::Environment::new(&Interner).add_clauses(&Interner, clauses) - } - - fn from_chalk( - _db: &dyn HirDatabase, - _env: chalk_ir::Environment, - ) -> Arc { - unimplemented!() - } -} - -impl ToChalk for InEnvironment -where - T::Chalk: chalk_ir::interner::HasInterner, -{ - type Chalk = chalk_ir::InEnvironment; - - fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::InEnvironment { - chalk_ir::InEnvironment { - environment: self.environment.to_chalk(db), - goal: self.value.to_chalk(db), - } - } - - fn from_chalk( - db: &dyn HirDatabase, - in_env: chalk_ir::InEnvironment, - ) -> InEnvironment { - InEnvironment { - environment: from_chalk(db, in_env.environment), - value: from_chalk(db, in_env.goal), - } - } -} - -pub(super) fn make_binders(value: T, num_vars: usize) -> chalk_ir::Binders -where - T: HasInterner, -{ - chalk_ir::Binders::new( - chalk_ir::VariableKinds::from_iter( - &Interner, - std::iter::repeat(chalk_ir::VariableKind::Ty(chalk_ir::TyKind::General)).take(num_vars), - ), - value, - ) -} - -pub(super) fn convert_where_clauses( - db: &dyn HirDatabase, - def: GenericDefId, - substs: &Substs, -) -> Vec> { - let generic_predicates = db.generic_predicates(def); - let mut result = Vec::with_capacity(generic_predicates.len()); - for pred in generic_predicates.iter() { - if pred.value.is_error() { - // skip errored predicates completely - continue; - } - result.push(pred.clone().subst(substs).to_chalk(db)); - } - result -} - -pub(super) fn generic_predicate_to_inline_bound( - db: &dyn HirDatabase, - pred: &GenericPredicate, - self_ty: &Ty, -) -> Option> { - // An InlineBound is like a GenericPredicate, except the self type is left out. - // We don't have a special type for this, but Chalk does. - match pred { - GenericPredicate::Implemented(trait_ref) => { - if &trait_ref.substs[0] != self_ty { - // we can only convert predicates back to type bounds if they - // have the expected self type - return None; - } - let args_no_self = trait_ref.substs[1..] - .iter() - .map(|ty| ty.clone().to_chalk(db).cast(&Interner)) - .collect(); - let trait_bound = - rust_ir::TraitBound { trait_id: trait_ref.trait_.to_chalk(db), args_no_self }; - Some(rust_ir::InlineBound::TraitBound(trait_bound)) - } - GenericPredicate::Projection(proj) => { - if &proj.projection_ty.parameters[0] != self_ty { - return None; - } - let trait_ = match proj.projection_ty.associated_ty.lookup(db.upcast()).container { - AssocContainerId::TraitId(t) => t, - _ => panic!("associated type not in trait"), - }; - let args_no_self = proj.projection_ty.parameters[1..] - .iter() - .map(|ty| ty.clone().to_chalk(db).cast(&Interner)) - .collect(); - let alias_eq_bound = rust_ir::AliasEqBound { - value: proj.ty.clone().to_chalk(db), - trait_bound: rust_ir::TraitBound { trait_id: trait_.to_chalk(db), args_no_self }, - associated_ty_id: proj.projection_ty.associated_ty.to_chalk(db), - parameters: Vec::new(), // FIXME we don't support generic associated types yet - }; - Some(rust_ir::InlineBound::AliasEqBound(alias_eq_bound)) - } - GenericPredicate::Error => None, - } -} diff --git a/crates/ra_ide/Cargo.toml b/crates/ra_ide/Cargo.toml deleted file mode 100644 index f4181c4eb8..0000000000 --- a/crates/ra_ide/Cargo.toml +++ /dev/null @@ -1,40 +0,0 @@ -[package] -edition = "2018" -name = "ra_ide" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[features] -wasm = [] - -[dependencies] -either = "1.5.3" -indexmap = "1.3.2" -itertools = "0.9.0" -log = "0.4.8" -rustc-hash = "1.1.0" -oorandom = "11.1.2" - -stdx = { path = "../stdx" } - -ra_syntax = { path = "../ra_syntax" } -ra_text_edit = { path = "../ra_text_edit" } -ra_db = { path = "../ra_db" } -ra_ide_db = { path = "../ra_ide_db" } -ra_cfg = { path = "../ra_cfg" } -ra_fmt = { path = "../ra_fmt" } -ra_prof = { path = "../ra_prof" } -test_utils = { path = "../test_utils" } -ra_assists = { path = "../ra_assists" } -ra_ssr = { path = "../ra_ssr" } - -# ra_ide should depend only on the top-level `hir` package. if you need -# something from some `hir_xxx` subpackage, reexport the API via `hir`. -hir = { path = "../ra_hir", package = "ra_hir" } - -[dev-dependencies] -expect = { path = "../expect" } diff --git a/crates/ra_ide/src/call_hierarchy.rs b/crates/ra_ide/src/call_hierarchy.rs deleted file mode 100644 index 1fcaf4a32b..0000000000 --- a/crates/ra_ide/src/call_hierarchy.rs +++ /dev/null @@ -1,393 +0,0 @@ -//! Entry point for call-hierarchy - -use indexmap::IndexMap; - -use hir::Semantics; -use ra_ide_db::RootDatabase; -use ra_syntax::{ast, match_ast, AstNode, TextRange}; - -use crate::{ - call_info::FnCallNode, display::ToNav, goto_definition, references, FilePosition, - NavigationTarget, RangeInfo, -}; - -#[derive(Debug, Clone)] -pub struct CallItem { - pub target: NavigationTarget, - pub ranges: Vec, -} - -impl CallItem { - #[cfg(test)] - pub(crate) fn assert_match(&self, expected: &str) { - let actual = self.debug_render(); - test_utils::assert_eq_text!(expected.trim(), actual.trim(),); - } - - #[cfg(test)] - pub(crate) fn debug_render(&self) -> String { - format!("{} : {:?}", self.target.debug_render(), self.ranges) - } -} - -pub(crate) fn call_hierarchy( - db: &RootDatabase, - position: FilePosition, -) -> Option>> { - goto_definition::goto_definition(db, position) -} - -pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Option> { - let sema = Semantics::new(db); - - // 1. Find all refs - // 2. Loop through refs and determine unique fndef. This will become our `from: CallHierarchyItem,` in the reply. - // 3. Add ranges relative to the start of the fndef. - let refs = references::find_all_refs(&sema, position, None)?; - - let mut calls = CallLocations::default(); - - for reference in refs.info.references() { - let file_id = reference.file_range.file_id; - let file = sema.parse(file_id); - let file = file.syntax(); - let token = file.token_at_offset(reference.file_range.range.start()).next()?; - let token = sema.descend_into_macros(token); - let syntax = token.parent(); - - // This target is the containing function - if let Some(nav) = syntax.ancestors().find_map(|node| { - match_ast! { - match node { - ast::Fn(it) => { - let def = sema.to_def(&it)?; - Some(def.to_nav(sema.db)) - }, - _ => None, - } - } - }) { - let relative_range = reference.file_range.range; - calls.add(&nav, relative_range); - } - } - - Some(calls.into_items()) -} - -pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option> { - let sema = Semantics::new(db); - let file_id = position.file_id; - let file = sema.parse(file_id); - let file = file.syntax(); - let token = file.token_at_offset(position.offset).next()?; - let token = sema.descend_into_macros(token); - let syntax = token.parent(); - - let mut calls = CallLocations::default(); - - syntax - .descendants() - .filter_map(|node| FnCallNode::with_node_exact(&node)) - .filter_map(|call_node| { - let name_ref = call_node.name_ref()?; - - if let Some(func_target) = match &call_node { - FnCallNode::CallExpr(expr) => { - //FIXME: Type::as_callable is broken - let callable = sema.type_of_expr(&expr.expr()?)?.as_callable(db)?; - match callable.kind() { - hir::CallableKind::Function(it) => { - let fn_def: hir::Function = it.into(); - let nav = fn_def.to_nav(db); - Some(nav) - } - _ => None, - } - } - FnCallNode::MethodCallExpr(expr) => { - let function = sema.resolve_method_call(&expr)?; - Some(function.to_nav(db)) - } - } { - Some((func_target, name_ref.syntax().text_range())) - } else { - None - } - }) - .for_each(|(nav, range)| calls.add(&nav, range)); - - Some(calls.into_items()) -} - -#[derive(Default)] -struct CallLocations { - funcs: IndexMap>, -} - -impl CallLocations { - fn add(&mut self, target: &NavigationTarget, range: TextRange) { - self.funcs.entry(target.clone()).or_default().push(range); - } - - fn into_items(self) -> Vec { - self.funcs.into_iter().map(|(target, ranges)| CallItem { target, ranges }).collect() - } -} - -#[cfg(test)] -mod tests { - use ra_db::FilePosition; - - use crate::mock_analysis::analysis_and_position; - - fn check_hierarchy( - ra_fixture: &str, - expected: &str, - expected_incoming: &[&str], - expected_outgoing: &[&str], - ) { - let (analysis, pos) = analysis_and_position(ra_fixture); - - let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info; - assert_eq!(navs.len(), 1); - let nav = navs.pop().unwrap(); - nav.assert_match(expected); - - let item_pos = - FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() }; - let incoming_calls = analysis.incoming_calls(item_pos).unwrap().unwrap(); - assert_eq!(incoming_calls.len(), expected_incoming.len()); - - for call in 0..incoming_calls.len() { - incoming_calls[call].assert_match(expected_incoming[call]); - } - - let outgoing_calls = analysis.outgoing_calls(item_pos).unwrap().unwrap(); - assert_eq!(outgoing_calls.len(), expected_outgoing.len()); - - for call in 0..outgoing_calls.len() { - outgoing_calls[call].assert_match(expected_outgoing[call]); - } - } - - #[test] - fn test_call_hierarchy_on_ref() { - check_hierarchy( - r#" -//- /lib.rs -fn callee() {} -fn caller() { - call<|>ee(); -} -"#, - "callee FN FileId(1) 0..14 3..9", - &["caller FN FileId(1) 15..44 18..24 : [33..39]"], - &[], - ); - } - - #[test] - fn test_call_hierarchy_on_def() { - check_hierarchy( - r#" -//- /lib.rs -fn call<|>ee() {} -fn caller() { - callee(); -} -"#, - "callee FN FileId(1) 0..14 3..9", - &["caller FN FileId(1) 15..44 18..24 : [33..39]"], - &[], - ); - } - - #[test] - fn test_call_hierarchy_in_same_fn() { - check_hierarchy( - r#" -//- /lib.rs -fn callee() {} -fn caller() { - call<|>ee(); - callee(); -} -"#, - "callee FN FileId(1) 0..14 3..9", - &["caller FN FileId(1) 15..58 18..24 : [33..39, 47..53]"], - &[], - ); - } - - #[test] - fn test_call_hierarchy_in_different_fn() { - check_hierarchy( - r#" -//- /lib.rs -fn callee() {} -fn caller1() { - call<|>ee(); -} - -fn caller2() { - callee(); -} -"#, - "callee FN FileId(1) 0..14 3..9", - &[ - "caller1 FN FileId(1) 15..45 18..25 : [34..40]", - "caller2 FN FileId(1) 47..77 50..57 : [66..72]", - ], - &[], - ); - } - - #[test] - fn test_call_hierarchy_in_tests_mod() { - check_hierarchy( - r#" -//- /lib.rs cfg:test -fn callee() {} -fn caller1() { - call<|>ee(); -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_caller() { - callee(); - } -} -"#, - "callee FN FileId(1) 0..14 3..9", - &[ - "caller1 FN FileId(1) 15..45 18..25 : [34..40]", - "test_caller FN FileId(1) 95..149 110..121 : [134..140]", - ], - &[], - ); - } - - #[test] - fn test_call_hierarchy_in_different_files() { - check_hierarchy( - r#" -//- /lib.rs -mod foo; -use foo::callee; - -fn caller() { - call<|>ee(); -} - -//- /foo/mod.rs -pub fn callee() {} -"#, - "callee FN FileId(2) 0..18 7..13", - &["caller FN FileId(1) 27..56 30..36 : [45..51]"], - &[], - ); - } - - #[test] - fn test_call_hierarchy_outgoing() { - check_hierarchy( - r#" -//- /lib.rs -fn callee() {} -fn call<|>er() { - callee(); - callee(); -} -"#, - "caller FN FileId(1) 15..58 18..24", - &[], - &["callee FN FileId(1) 0..14 3..9 : [33..39, 47..53]"], - ); - } - - #[test] - fn test_call_hierarchy_outgoing_in_different_files() { - check_hierarchy( - r#" -//- /lib.rs -mod foo; -use foo::callee; - -fn call<|>er() { - callee(); -} - -//- /foo/mod.rs -pub fn callee() {} -"#, - "caller FN FileId(1) 27..56 30..36", - &[], - &["callee FN FileId(2) 0..18 7..13 : [45..51]"], - ); - } - - #[test] - fn test_call_hierarchy_incoming_outgoing() { - check_hierarchy( - r#" -//- /lib.rs -fn caller1() { - call<|>er2(); -} - -fn caller2() { - caller3(); -} - -fn caller3() { - -} -"#, - "caller2 FN FileId(1) 33..64 36..43", - &["caller1 FN FileId(1) 0..31 3..10 : [19..26]"], - &["caller3 FN FileId(1) 66..83 69..76 : [52..59]"], - ); - } - - #[test] - fn test_call_hierarchy_issue_5103() { - check_hierarchy( - r#" -fn a() { - b() -} - -fn b() {} - -fn main() { - a<|>() -} -"#, - "a FN FileId(1) 0..18 3..4", - &["main FN FileId(1) 31..52 34..38 : [47..48]"], - &["b FN FileId(1) 20..29 23..24 : [13..14]"], - ); - - check_hierarchy( - r#" -fn a() { - b<|>() -} - -fn b() {} - -fn main() { - a() -} -"#, - "b FN FileId(1) 20..29 23..24", - &["a FN FileId(1) 0..18 3..4 : [13..14]"], - &[], - ); - } -} diff --git a/crates/ra_ide/src/call_info.rs b/crates/ra_ide/src/call_info.rs deleted file mode 100644 index ff602202f2..0000000000 --- a/crates/ra_ide/src/call_info.rs +++ /dev/null @@ -1,742 +0,0 @@ -//! FIXME: write short doc here -use either::Either; -use hir::{Docs, HirDisplay, Semantics, Type}; -use ra_ide_db::RootDatabase; -use ra_syntax::{ - ast::{self, ArgListOwner}, - match_ast, AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize, -}; -use stdx::format_to; -use test_utils::mark; - -use crate::FilePosition; - -/// Contains information about a call site. Specifically the -/// `FunctionSignature`and current parameter. -#[derive(Debug)] -pub struct CallInfo { - pub doc: Option, - pub signature: String, - pub active_parameter: Option, - parameters: Vec, -} - -impl CallInfo { - pub fn parameter_labels(&self) -> impl Iterator + '_ { - self.parameters.iter().map(move |&it| &self.signature[it]) - } - pub fn parameter_ranges(&self) -> &[TextRange] { - &self.parameters - } - fn push_param(&mut self, param: &str) { - if !self.signature.ends_with('(') { - self.signature.push_str(", "); - } - let start = TextSize::of(&self.signature); - self.signature.push_str(param); - let end = TextSize::of(&self.signature); - self.parameters.push(TextRange::new(start, end)) - } -} - -/// Computes parameter information for the given call expression. -pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option { - let sema = Semantics::new(db); - let file = sema.parse(position.file_id); - let file = file.syntax(); - let token = file.token_at_offset(position.offset).next()?; - let token = sema.descend_into_macros(token); - - let (callable, active_parameter) = call_info_impl(&sema, token)?; - - let mut res = - CallInfo { doc: None, signature: String::new(), parameters: vec![], active_parameter }; - - match callable.kind() { - hir::CallableKind::Function(func) => { - res.doc = func.docs(db).map(|it| it.as_str().to_string()); - format_to!(res.signature, "fn {}", func.name(db)); - } - hir::CallableKind::TupleStruct(strukt) => { - res.doc = strukt.docs(db).map(|it| it.as_str().to_string()); - format_to!(res.signature, "struct {}", strukt.name(db)); - } - hir::CallableKind::TupleEnumVariant(variant) => { - res.doc = variant.docs(db).map(|it| it.as_str().to_string()); - format_to!( - res.signature, - "enum {}::{}", - variant.parent_enum(db).name(db), - variant.name(db) - ); - } - hir::CallableKind::Closure => (), - } - - res.signature.push('('); - { - if let Some(self_param) = callable.receiver_param(db) { - format_to!(res.signature, "{}", self_param) - } - let mut buf = String::new(); - for (pat, ty) in callable.params(db) { - buf.clear(); - if let Some(pat) = pat { - match pat { - Either::Left(_self) => format_to!(buf, "self: "), - Either::Right(pat) => format_to!(buf, "{}: ", pat), - } - } - format_to!(buf, "{}", ty.display(db)); - res.push_param(&buf); - } - } - res.signature.push(')'); - - match callable.kind() { - hir::CallableKind::Function(_) | hir::CallableKind::Closure => { - let ret_type = callable.return_type(); - if !ret_type.is_unit() { - format_to!(res.signature, " -> {}", ret_type.display(db)); - } - } - hir::CallableKind::TupleStruct(_) | hir::CallableKind::TupleEnumVariant(_) => {} - } - Some(res) -} - -fn call_info_impl( - sema: &Semantics, - token: SyntaxToken, -) -> Option<(hir::Callable, Option)> { - // Find the calling expression and it's NameRef - let calling_node = FnCallNode::with_node(&token.parent())?; - - let callable = match &calling_node { - FnCallNode::CallExpr(call) => sema.type_of_expr(&call.expr()?)?.as_callable(sema.db)?, - FnCallNode::MethodCallExpr(call) => sema.resolve_method_call_as_callable(call)?, - }; - let active_param = if let Some(arg_list) = calling_node.arg_list() { - // Number of arguments specified at the call site - let num_args_at_callsite = arg_list.args().count(); - - let arg_list_range = arg_list.syntax().text_range(); - if !arg_list_range.contains_inclusive(token.text_range().start()) { - mark::hit!(call_info_bad_offset); - return None; - } - let param = std::cmp::min( - num_args_at_callsite, - arg_list - .args() - .take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start()) - .count(), - ); - - Some(param) - } else { - None - }; - Some((callable, active_param)) -} - -#[derive(Debug)] -pub(crate) struct ActiveParameter { - pub(crate) ty: Type, - pub(crate) name: String, -} - -impl ActiveParameter { - pub(crate) fn at(db: &RootDatabase, position: FilePosition) -> Option { - let sema = Semantics::new(db); - let file = sema.parse(position.file_id); - let file = file.syntax(); - let token = file.token_at_offset(position.offset).next()?; - let token = sema.descend_into_macros(token); - Self::at_token(&sema, token) - } - - pub(crate) fn at_token(sema: &Semantics, token: SyntaxToken) -> Option { - let (signature, active_parameter) = call_info_impl(&sema, token)?; - - let idx = active_parameter?; - let mut params = signature.params(sema.db); - if !(idx < params.len()) { - mark::hit!(too_many_arguments); - return None; - } - let (pat, ty) = params.swap_remove(idx); - let name = pat?.to_string(); - Some(ActiveParameter { ty, name }) - } -} - -#[derive(Debug)] -pub(crate) enum FnCallNode { - CallExpr(ast::CallExpr), - MethodCallExpr(ast::MethodCallExpr), -} - -impl FnCallNode { - fn with_node(syntax: &SyntaxNode) -> Option { - syntax.ancestors().find_map(|node| { - match_ast! { - match node { - ast::CallExpr(it) => Some(FnCallNode::CallExpr(it)), - ast::MethodCallExpr(it) => { - let arg_list = it.arg_list()?; - if !arg_list.syntax().text_range().contains_range(syntax.text_range()) { - return None; - } - Some(FnCallNode::MethodCallExpr(it)) - }, - _ => None, - } - } - }) - } - - pub(crate) fn with_node_exact(node: &SyntaxNode) -> Option { - match_ast! { - match node { - ast::CallExpr(it) => Some(FnCallNode::CallExpr(it)), - ast::MethodCallExpr(it) => Some(FnCallNode::MethodCallExpr(it)), - _ => None, - } - } - } - - pub(crate) fn name_ref(&self) -> Option { - match self { - FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()? { - ast::Expr::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?, - _ => return None, - }), - - FnCallNode::MethodCallExpr(call_expr) => { - call_expr.syntax().children().filter_map(ast::NameRef::cast).next() - } - } - } - - fn arg_list(&self) -> Option { - match self { - FnCallNode::CallExpr(expr) => expr.arg_list(), - FnCallNode::MethodCallExpr(expr) => expr.arg_list(), - } - } -} - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - use test_utils::mark; - - use crate::mock_analysis::analysis_and_position; - - fn check(ra_fixture: &str, expect: Expect) { - let (analysis, position) = analysis_and_position(ra_fixture); - let call_info = analysis.call_info(position).unwrap(); - let actual = match call_info { - Some(call_info) => { - let docs = match &call_info.doc { - None => "".to_string(), - Some(docs) => format!("{}\n------\n", docs.as_str()), - }; - let params = call_info - .parameter_labels() - .enumerate() - .map(|(i, param)| { - if Some(i) == call_info.active_parameter { - format!("<{}>", param) - } else { - param.to_string() - } - }) - .collect::>() - .join(", "); - format!("{}{}\n({})\n", docs, call_info.signature, params) - } - None => String::new(), - }; - expect.assert_eq(&actual); - } - - #[test] - fn test_fn_signature_two_args() { - check( - r#" -fn foo(x: u32, y: u32) -> u32 {x + y} -fn bar() { foo(<|>3, ); } -"#, - expect![[r#" - fn foo(x: u32, y: u32) -> u32 - (, y: u32) - "#]], - ); - check( - r#" -fn foo(x: u32, y: u32) -> u32 {x + y} -fn bar() { foo(3<|>, ); } -"#, - expect![[r#" - fn foo(x: u32, y: u32) -> u32 - (, y: u32) - "#]], - ); - check( - r#" -fn foo(x: u32, y: u32) -> u32 {x + y} -fn bar() { foo(3,<|> ); } -"#, - expect![[r#" - fn foo(x: u32, y: u32) -> u32 - (x: u32, ) - "#]], - ); - check( - r#" -fn foo(x: u32, y: u32) -> u32 {x + y} -fn bar() { foo(3, <|>); } -"#, - expect![[r#" - fn foo(x: u32, y: u32) -> u32 - (x: u32, ) - "#]], - ); - } - - #[test] - fn test_fn_signature_two_args_empty() { - check( - r#" -fn foo(x: u32, y: u32) -> u32 {x + y} -fn bar() { foo(<|>); } -"#, - expect![[r#" - fn foo(x: u32, y: u32) -> u32 - (, y: u32) - "#]], - ); - } - - #[test] - fn test_fn_signature_two_args_first_generics() { - check( - r#" -fn foo(x: T, y: U) -> u32 - where T: Copy + Display, U: Debug -{ x + y } - -fn bar() { foo(<|>3, ); } -"#, - expect![[r#" - fn foo(x: i32, y: {unknown}) -> u32 - (, y: {unknown}) - "#]], - ); - } - - #[test] - fn test_fn_signature_no_params() { - check( - r#" -fn foo() -> T where T: Copy + Display {} -fn bar() { foo(<|>); } -"#, - expect![[r#" - fn foo() -> {unknown} - () - "#]], - ); - } - - #[test] - fn test_fn_signature_for_impl() { - check( - r#" -struct F; -impl F { pub fn new() { } } -fn bar() { - let _ : F = F::new(<|>); -} -"#, - expect![[r#" - fn new() - () - "#]], - ); - } - - #[test] - fn test_fn_signature_for_method_self() { - check( - r#" -struct S; -impl S { pub fn do_it(&self) {} } - -fn bar() { - let s: S = S; - s.do_it(<|>); -} -"#, - expect![[r#" - fn do_it(&self) - () - "#]], - ); - } - - #[test] - fn test_fn_signature_for_method_with_arg() { - check( - r#" -struct S; -impl S { - fn foo(&self, x: i32) {} -} - -fn main() { S.foo(<|>); } -"#, - expect![[r#" - fn foo(&self, x: i32) - () - "#]], - ); - } - - #[test] - fn test_fn_signature_for_method_with_arg_as_assoc_fn() { - check( - r#" -struct S; -impl S { - fn foo(&self, x: i32) {} -} - -fn main() { S::foo(<|>); } -"#, - expect![[r#" - fn foo(self: &S, x: i32) - (, x: i32) - "#]], - ); - } - - #[test] - fn test_fn_signature_with_docs_simple() { - check( - r#" -/// test -// non-doc-comment -fn foo(j: u32) -> u32 { - j -} - -fn bar() { - let _ = foo(<|>); -} -"#, - expect![[r#" - test - ------ - fn foo(j: u32) -> u32 - () - "#]], - ); - } - - #[test] - fn test_fn_signature_with_docs() { - check( - r#" -/// Adds one to the number given. -/// -/// # Examples -/// -/// ``` -/// let five = 5; -/// -/// assert_eq!(6, my_crate::add_one(5)); -/// ``` -pub fn add_one(x: i32) -> i32 { - x + 1 -} - -pub fn do() { - add_one(<|> -}"#, - expect![[r##" - Adds one to the number given. - - # Examples - - ``` - let five = 5; - - assert_eq!(6, my_crate::add_one(5)); - ``` - ------ - fn add_one(x: i32) -> i32 - () - "##]], - ); - } - - #[test] - fn test_fn_signature_with_docs_impl() { - check( - r#" -struct addr; -impl addr { - /// Adds one to the number given. - /// - /// # Examples - /// - /// ``` - /// let five = 5; - /// - /// assert_eq!(6, my_crate::add_one(5)); - /// ``` - pub fn add_one(x: i32) -> i32 { - x + 1 - } -} - -pub fn do_it() { - addr {}; - addr::add_one(<|>); -} -"#, - expect![[r##" - Adds one to the number given. - - # Examples - - ``` - let five = 5; - - assert_eq!(6, my_crate::add_one(5)); - ``` - ------ - fn add_one(x: i32) -> i32 - () - "##]], - ); - } - - #[test] - fn test_fn_signature_with_docs_from_actix() { - check( - r#" -struct WriteHandler; - -impl WriteHandler { - /// Method is called when writer emits error. - /// - /// If this method returns `ErrorAction::Continue` writer processing - /// continues otherwise stream processing stops. - fn error(&mut self, err: E, ctx: &mut Self::Context) -> Running { - Running::Stop - } - - /// Method is called when writer finishes. - /// - /// By default this method stops actor's `Context`. - fn finished(&mut self, ctx: &mut Self::Context) { - ctx.stop() - } -} - -pub fn foo(mut r: WriteHandler<()>) { - r.finished(<|>); -} -"#, - expect![[r#" - Method is called when writer finishes. - - By default this method stops actor's `Context`. - ------ - fn finished(&mut self, ctx: &mut {unknown}) - () - "#]], - ); - } - - #[test] - fn call_info_bad_offset() { - mark::check!(call_info_bad_offset); - check( - r#" -fn foo(x: u32, y: u32) -> u32 {x + y} -fn bar() { foo <|> (3, ); } -"#, - expect![[""]], - ); - } - - #[test] - fn test_nested_method_in_lambda() { - check( - r#" -struct Foo; -impl Foo { fn bar(&self, _: u32) { } } - -fn bar(_: u32) { } - -fn main() { - let foo = Foo; - std::thread::spawn(move || foo.bar(<|>)); -} -"#, - expect![[r#" - fn bar(&self, _: u32) - (<_: u32>) - "#]], - ); - } - - #[test] - fn works_for_tuple_structs() { - check( - r#" -/// A cool tuple struct -struct S(u32, i32); -fn main() { - let s = S(0, <|>); -} -"#, - expect![[r#" - A cool tuple struct - ------ - struct S(u32, i32) - (u32, ) - "#]], - ); - } - - #[test] - fn generic_struct() { - check( - r#" -struct S(T); -fn main() { - let s = S(<|>); -} -"#, - expect![[r#" - struct S({unknown}) - (<{unknown}>) - "#]], - ); - } - - #[test] - fn works_for_enum_variants() { - check( - r#" -enum E { - /// A Variant - A(i32), - /// Another - B, - /// And C - C { a: i32, b: i32 } -} - -fn main() { - let a = E::A(<|>); -} -"#, - expect![[r#" - A Variant - ------ - enum E::A(i32) - () - "#]], - ); - } - - #[test] - fn cant_call_struct_record() { - check( - r#" -struct S { x: u32, y: i32 } -fn main() { - let s = S(<|>); -} -"#, - expect![[""]], - ); - } - - #[test] - fn cant_call_enum_record() { - check( - r#" -enum E { - /// A Variant - A(i32), - /// Another - B, - /// And C - C { a: i32, b: i32 } -} - -fn main() { - let a = E::C(<|>); -} -"#, - expect![[""]], - ); - } - - #[test] - fn fn_signature_for_call_in_macro() { - check( - r#" -macro_rules! id { ($($tt:tt)*) => { $($tt)* } } -fn foo() { } -id! { - fn bar() { foo(<|>); } -} -"#, - expect![[r#" - fn foo() - () - "#]], - ); - } - - #[test] - fn call_info_for_lambdas() { - check( - r#" -struct S; -fn foo(s: S) -> i32 { 92 } -fn main() { - (|s| foo(s))(<|>) -} - "#, - expect![[r#" - (S) -> i32 - () - "#]], - ) - } - - #[test] - fn call_info_for_fn_ptr() { - check( - r#" -fn main(f: fn(i32, f64) -> char) { - f(0, <|>) -} - "#, - expect![[r#" - (i32, f64) -> char - (i32, ) - "#]], - ) - } -} diff --git a/crates/ra_ide/src/completion.rs b/crates/ra_ide/src/completion.rs deleted file mode 100644 index 9c33a5a43b..0000000000 --- a/crates/ra_ide/src/completion.rs +++ /dev/null @@ -1,211 +0,0 @@ -mod completion_config; -mod completion_item; -mod completion_context; -mod presentation; -mod patterns; -#[cfg(test)] -mod test_utils; - -mod complete_attribute; -mod complete_dot; -mod complete_record; -mod complete_pattern; -mod complete_fn_param; -mod complete_keyword; -mod complete_snippet; -mod complete_qualified_path; -mod complete_unqualified_path; -mod complete_postfix; -mod complete_macro_in_item_position; -mod complete_trait_impl; -mod unstable_feature_descriptor; -use ra_ide_db::RootDatabase; - -use crate::{ - completion::{ - completion_context::CompletionContext, - completion_item::{CompletionKind, Completions}, - }, - FilePosition, -}; - -//FIXME: cyclic imports caused by xtask generation, this should be better -use crate::completion::{ - complete_attribute::LintCompletion, unstable_feature_descriptor::UNSTABLE_FEATURE_DESCRIPTOR, -}; - -pub use crate::completion::{ - completion_config::CompletionConfig, - completion_item::{CompletionItem, CompletionItemKind, CompletionScore, InsertTextFormat}, -}; - -//FIXME: split the following feature into fine-grained features. - -// Feature: Magic Completions -// -// In addition to usual reference completion, rust-analyzer provides some ✨magic✨ -// completions as well: -// -// Keywords like `if`, `else` `while`, `loop` are completed with braces, and cursor -// is placed at the appropriate position. Even though `if` is easy to type, you -// still want to complete it, to get ` { }` for free! `return` is inserted with a -// space or `;` depending on the return type of the function. -// -// When completing a function call, `()` are automatically inserted. If a function -// takes arguments, the cursor is positioned inside the parenthesis. -// -// There are postfix completions, which can be triggered by typing something like -// `foo().if`. The word after `.` determines postfix completion. Possible variants are: -// -// - `expr.if` -> `if expr {}` or `if let ... {}` for `Option` or `Result` -// - `expr.match` -> `match expr {}` -// - `expr.while` -> `while expr {}` or `while let ... {}` for `Option` or `Result` -// - `expr.ref` -> `&expr` -// - `expr.refm` -> `&mut expr` -// - `expr.not` -> `!expr` -// - `expr.dbg` -> `dbg!(expr)` -// -// There also snippet completions: -// -// .Expressions -// - `pd` -> `eprintln!(" = {:?}", );` -// - `ppd` -> `eprintln!(" = {:#?}", );` -// -// .Items -// - `tfn` -> `#[test] fn feature(){}` -// - `tmod` -> -// ```rust -// #[cfg(test)] -// mod tests { -// use super::*; -// -// #[test] -// fn test_name() {} -// } -// ``` - -/// Main entry point for completion. We run completion as a two-phase process. -/// -/// First, we look at the position and collect a so-called `CompletionContext. -/// This is a somewhat messy process, because, during completion, syntax tree is -/// incomplete and can look really weird. -/// -/// Once the context is collected, we run a series of completion routines which -/// look at the context and produce completion items. One subtlety about this -/// phase is that completion engine should not filter by the substring which is -/// already present, it should give all possible variants for the identifier at -/// the caret. In other words, for -/// -/// ```no-run -/// fn f() { -/// let foo = 92; -/// let _ = bar<|> -/// } -/// ``` -/// -/// `foo` *should* be present among the completion variants. Filtering by -/// identifier prefix/fuzzy match should be done higher in the stack, together -/// with ordering of completions (currently this is done by the client). -pub(crate) fn completions( - db: &RootDatabase, - config: &CompletionConfig, - position: FilePosition, -) -> Option { - let ctx = CompletionContext::new(db, position, config)?; - - let mut acc = Completions::default(); - complete_attribute::complete_attribute(&mut acc, &ctx); - complete_fn_param::complete_fn_param(&mut acc, &ctx); - complete_keyword::complete_expr_keyword(&mut acc, &ctx); - complete_keyword::complete_use_tree_keyword(&mut acc, &ctx); - complete_snippet::complete_expr_snippet(&mut acc, &ctx); - complete_snippet::complete_item_snippet(&mut acc, &ctx); - complete_qualified_path::complete_qualified_path(&mut acc, &ctx); - complete_unqualified_path::complete_unqualified_path(&mut acc, &ctx); - complete_dot::complete_dot(&mut acc, &ctx); - complete_record::complete_record(&mut acc, &ctx); - complete_pattern::complete_pattern(&mut acc, &ctx); - complete_postfix::complete_postfix(&mut acc, &ctx); - complete_macro_in_item_position::complete_macro_in_item_position(&mut acc, &ctx); - complete_trait_impl::complete_trait_impl(&mut acc, &ctx); - - Some(acc) -} - -#[cfg(test)] -mod tests { - use crate::completion::completion_config::CompletionConfig; - use crate::mock_analysis::analysis_and_position; - - struct DetailAndDocumentation<'a> { - detail: &'a str, - documentation: &'a str, - } - - fn check_detail_and_documentation(ra_fixture: &str, expected: DetailAndDocumentation) { - let (analysis, position) = analysis_and_position(ra_fixture); - let config = CompletionConfig::default(); - let completions = analysis.completions(&config, position).unwrap().unwrap(); - for item in completions { - if item.detail() == Some(expected.detail) { - let opt = item.documentation(); - let doc = opt.as_ref().map(|it| it.as_str()); - assert_eq!(doc, Some(expected.documentation)); - return; - } - } - panic!("completion detail not found: {}", expected.detail) - } - - #[test] - fn test_completion_detail_from_macro_generated_struct_fn_doc_attr() { - check_detail_and_documentation( - r#" - //- /lib.rs - macro_rules! bar { - () => { - struct Bar; - impl Bar { - #[doc = "Do the foo"] - fn foo(&self) {} - } - } - } - - bar!(); - - fn foo() { - let bar = Bar; - bar.fo<|>; - } - "#, - DetailAndDocumentation { detail: "fn foo(&self)", documentation: "Do the foo" }, - ); - } - - #[test] - fn test_completion_detail_from_macro_generated_struct_fn_doc_comment() { - check_detail_and_documentation( - r#" - //- /lib.rs - macro_rules! bar { - () => { - struct Bar; - impl Bar { - /// Do the foo - fn foo(&self) {} - } - } - } - - bar!(); - - fn foo() { - let bar = Bar; - bar.fo<|>; - } - "#, - DetailAndDocumentation { detail: "fn foo(&self)", documentation: " Do the foo" }, - ); - } -} diff --git a/crates/ra_ide/src/completion/complete_attribute.rs b/crates/ra_ide/src/completion/complete_attribute.rs deleted file mode 100644 index f2782d4b96..0000000000 --- a/crates/ra_ide/src/completion/complete_attribute.rs +++ /dev/null @@ -1,654 +0,0 @@ -//! Completion for attributes -//! -//! This module uses a bit of static metadata to provide completions -//! for built-in attributes. - -use ra_syntax::{ast, AstNode, SyntaxKind}; -use rustc_hash::FxHashSet; - -use crate::completion::{ - completion_context::CompletionContext, - completion_item::{CompletionItem, CompletionItemKind, CompletionKind, Completions}, -}; - -use crate::completion::UNSTABLE_FEATURE_DESCRIPTOR; - -pub(super) fn complete_attribute(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> { - let attribute = ctx.attribute_under_caret.as_ref()?; - match (attribute.path(), attribute.token_tree()) { - (Some(path), Some(token_tree)) if path.to_string() == "derive" => { - complete_derive(acc, ctx, token_tree) - } - (Some(path), Some(token_tree)) if path.to_string() == "feature" => { - complete_lint(acc, ctx, token_tree, UNSTABLE_FEATURE_DESCRIPTOR); - } - (Some(path), Some(token_tree)) - if ["allow", "warn", "deny", "forbid"] - .iter() - .any(|lint_level| lint_level == &path.to_string()) => - { - complete_lint(acc, ctx, token_tree, DEFAULT_LINT_COMPLETIONS) - } - (_, Some(_token_tree)) => {} - _ => complete_attribute_start(acc, ctx, attribute), - } - Some(()) -} - -fn complete_attribute_start(acc: &mut Completions, ctx: &CompletionContext, attribute: &ast::Attr) { - for attr_completion in ATTRIBUTES { - let mut item = CompletionItem::new( - CompletionKind::Attribute, - ctx.source_range(), - attr_completion.label, - ) - .kind(CompletionItemKind::Attribute); - - if let Some(lookup) = attr_completion.lookup { - item = item.lookup_by(lookup); - } - - match (attr_completion.snippet, ctx.config.snippet_cap) { - (Some(snippet), Some(cap)) => { - item = item.insert_snippet(cap, snippet); - } - _ => {} - } - - if attribute.kind() == ast::AttrKind::Inner || !attr_completion.prefer_inner { - acc.add(item); - } - } -} - -struct AttrCompletion { - label: &'static str, - lookup: Option<&'static str>, - snippet: Option<&'static str>, - prefer_inner: bool, -} - -impl AttrCompletion { - const fn prefer_inner(self) -> AttrCompletion { - AttrCompletion { prefer_inner: true, ..self } - } -} - -const fn attr( - label: &'static str, - lookup: Option<&'static str>, - snippet: Option<&'static str>, -) -> AttrCompletion { - AttrCompletion { label, lookup, snippet, prefer_inner: false } -} - -const ATTRIBUTES: &[AttrCompletion] = &[ - attr("allow(…)", Some("allow"), Some("allow(${0:lint})")), - attr("cfg_attr(…)", Some("cfg_attr"), Some("cfg_attr(${1:predicate}, ${0:attr})")), - attr("cfg(…)", Some("cfg"), Some("cfg(${0:predicate})")), - attr("deny(…)", Some("deny"), Some("deny(${0:lint})")), - attr(r#"deprecated = "…""#, Some("deprecated"), Some(r#"deprecated = "${0:reason}""#)), - attr("derive(…)", Some("derive"), Some(r#"derive(${0:Debug})"#)), - attr(r#"doc = "…""#, Some("doc"), Some(r#"doc = "${0:docs}""#)), - attr("feature(…)", Some("feature"), Some("feature(${0:lint})")).prefer_inner(), - attr("forbid(…)", Some("forbid"), Some("forbid(${0:lint})")), - // FIXME: resolve through macro resolution? - attr("global_allocator", None, None).prefer_inner(), - attr(r#"ignore = "…""#, Some("ignore"), Some(r#"ignore = "${0:reason}""#)), - attr("inline(…)", Some("inline"), Some("inline(${0:lint})")), - attr(r#"link_name = "…""#, Some("link_name"), Some(r#"link_name = "${0:symbol_name}""#)), - attr("link", None, None), - attr("macro_export", None, None), - attr("macro_use", None, None), - attr(r#"must_use = "…""#, Some("must_use"), Some(r#"must_use = "${0:reason}""#)), - attr("no_mangle", None, None), - attr("no_std", None, None).prefer_inner(), - attr("non_exhaustive", None, None), - attr("panic_handler", None, None).prefer_inner(), - attr("path = \"…\"", Some("path"), Some("path =\"${0:path}\"")), - attr("proc_macro", None, None), - attr("proc_macro_attribute", None, None), - attr("proc_macro_derive(…)", Some("proc_macro_derive"), Some("proc_macro_derive(${0:Trait})")), - attr("recursion_limit = …", Some("recursion_limit"), Some("recursion_limit = ${0:128}")) - .prefer_inner(), - attr("repr(…)", Some("repr"), Some("repr(${0:C})")), - attr( - "should_panic(…)", - Some("should_panic"), - Some(r#"should_panic(expected = "${0:reason}")"#), - ), - attr( - r#"target_feature = "…""#, - Some("target_feature"), - Some("target_feature = \"${0:feature}\""), - ), - attr("test", None, None), - attr("used", None, None), - attr("warn(…)", Some("warn"), Some("warn(${0:lint})")), - attr( - r#"windows_subsystem = "…""#, - Some("windows_subsystem"), - Some(r#"windows_subsystem = "${0:subsystem}""#), - ) - .prefer_inner(), -]; - -fn complete_derive(acc: &mut Completions, ctx: &CompletionContext, derive_input: ast::TokenTree) { - if let Ok(existing_derives) = parse_comma_sep_input(derive_input) { - for derive_completion in DEFAULT_DERIVE_COMPLETIONS - .into_iter() - .filter(|completion| !existing_derives.contains(completion.label)) - { - let mut label = derive_completion.label.to_owned(); - for dependency in derive_completion - .dependencies - .into_iter() - .filter(|&&dependency| !existing_derives.contains(dependency)) - { - label.push_str(", "); - label.push_str(dependency); - } - acc.add( - CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), label) - .kind(CompletionItemKind::Attribute), - ); - } - - for custom_derive_name in get_derive_names_in_scope(ctx).difference(&existing_derives) { - acc.add( - CompletionItem::new( - CompletionKind::Attribute, - ctx.source_range(), - custom_derive_name, - ) - .kind(CompletionItemKind::Attribute), - ); - } - } -} - -fn complete_lint( - acc: &mut Completions, - ctx: &CompletionContext, - derive_input: ast::TokenTree, - lints_completions: &[LintCompletion], -) { - if let Ok(existing_lints) = parse_comma_sep_input(derive_input) { - for lint_completion in lints_completions - .into_iter() - .filter(|completion| !existing_lints.contains(completion.label)) - { - acc.add( - CompletionItem::new( - CompletionKind::Attribute, - ctx.source_range(), - lint_completion.label, - ) - .kind(CompletionItemKind::Attribute) - .detail(lint_completion.description), - ); - } - } -} - -fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Result, ()> { - match (derive_input.left_delimiter_token(), derive_input.right_delimiter_token()) { - (Some(left_paren), Some(right_paren)) - if left_paren.kind() == SyntaxKind::L_PAREN - && right_paren.kind() == SyntaxKind::R_PAREN => - { - let mut input_derives = FxHashSet::default(); - let mut current_derive = String::new(); - for token in derive_input - .syntax() - .children_with_tokens() - .filter_map(|token| token.into_token()) - .skip_while(|token| token != &left_paren) - .skip(1) - .take_while(|token| token != &right_paren) - { - if SyntaxKind::COMMA == token.kind() { - if !current_derive.is_empty() { - input_derives.insert(current_derive); - current_derive = String::new(); - } - } else { - current_derive.push_str(token.to_string().trim()); - } - } - - if !current_derive.is_empty() { - input_derives.insert(current_derive); - } - Ok(input_derives) - } - _ => Err(()), - } -} - -fn get_derive_names_in_scope(ctx: &CompletionContext) -> FxHashSet { - let mut result = FxHashSet::default(); - ctx.scope.process_all_names(&mut |name, scope_def| { - if let hir::ScopeDef::MacroDef(mac) = scope_def { - if mac.is_derive_macro() { - result.insert(name.to_string()); - } - } - }); - result -} - -struct DeriveCompletion { - label: &'static str, - dependencies: &'static [&'static str], -} - -/// Standard Rust derives and the information about their dependencies -/// (the dependencies are needed so that the main derive don't break the compilation when added) -#[rustfmt::skip] -const DEFAULT_DERIVE_COMPLETIONS: &[DeriveCompletion] = &[ - DeriveCompletion { label: "Clone", dependencies: &[] }, - DeriveCompletion { label: "Copy", dependencies: &["Clone"] }, - DeriveCompletion { label: "Debug", dependencies: &[] }, - DeriveCompletion { label: "Default", dependencies: &[] }, - DeriveCompletion { label: "Hash", dependencies: &[] }, - DeriveCompletion { label: "PartialEq", dependencies: &[] }, - DeriveCompletion { label: "Eq", dependencies: &["PartialEq"] }, - DeriveCompletion { label: "PartialOrd", dependencies: &["PartialEq"] }, - DeriveCompletion { label: "Ord", dependencies: &["PartialOrd", "Eq", "PartialEq"] }, -]; - -pub(crate) struct LintCompletion { - pub label: &'static str, - pub description: &'static str, -} - -#[rustfmt::skip] -const DEFAULT_LINT_COMPLETIONS: &[LintCompletion] = &[ - LintCompletion { label: "absolute_paths_not_starting_with_crate", description: r#"fully qualified paths that start with a module name instead of `crate`, `self`, or an extern crate name"# }, - LintCompletion { label: "anonymous_parameters", description: r#"detects anonymous parameters"# }, - LintCompletion { label: "box_pointers", description: r#"use of owned (Box type) heap memory"# }, - LintCompletion { label: "deprecated_in_future", description: r#"detects use of items that will be deprecated in a future version"# }, - LintCompletion { label: "elided_lifetimes_in_paths", description: r#"hidden lifetime parameters in types are deprecated"# }, - LintCompletion { label: "explicit_outlives_requirements", description: r#"outlives requirements can be inferred"# }, - LintCompletion { label: "indirect_structural_match", description: r#"pattern with const indirectly referencing non-structural-match type"# }, - LintCompletion { label: "keyword_idents", description: r#"detects edition keywords being used as an identifier"# }, - LintCompletion { label: "macro_use_extern_crate", description: r#"the `#[macro_use]` attribute is now deprecated in favor of using macros via the module system"# }, - LintCompletion { label: "meta_variable_misuse", description: r#"possible meta-variable misuse at macro definition"# }, - LintCompletion { label: "missing_copy_implementations", description: r#"detects potentially-forgotten implementations of `Copy`"# }, - LintCompletion { label: "missing_crate_level_docs", description: r#"detects crates with no crate-level documentation"# }, - LintCompletion { label: "missing_debug_implementations", description: r#"detects missing implementations of Debug"# }, - LintCompletion { label: "missing_docs", description: r#"detects missing documentation for public members"# }, - LintCompletion { label: "missing_doc_code_examples", description: r#"detects publicly-exported items without code samples in their documentation"# }, - LintCompletion { label: "non_ascii_idents", description: r#"detects non-ASCII identifiers"# }, - LintCompletion { label: "private_doc_tests", description: r#"detects code samples in docs of private items not documented by rustdoc"# }, - LintCompletion { label: "single_use_lifetimes", description: r#"detects lifetime parameters that are only used once"# }, - LintCompletion { label: "trivial_casts", description: r#"detects trivial casts which could be removed"# }, - LintCompletion { label: "trivial_numeric_casts", description: r#"detects trivial casts of numeric types which could be removed"# }, - LintCompletion { label: "unaligned_references", description: r#"detects unaligned references to fields of packed structs"# }, - LintCompletion { label: "unreachable_pub", description: r#"`pub` items not reachable from crate root"# }, - LintCompletion { label: "unsafe_code", description: r#"usage of `unsafe` code"# }, - LintCompletion { label: "unsafe_op_in_unsafe_fn", description: r#"unsafe operations in unsafe functions without an explicit unsafe block are deprecated"# }, - LintCompletion { label: "unstable_features", description: r#"enabling unstable features (deprecated. do not use)"# }, - LintCompletion { label: "unused_crate_dependencies", description: r#"crate dependencies that are never used"# }, - LintCompletion { label: "unused_extern_crates", description: r#"extern crates that are never used"# }, - LintCompletion { label: "unused_import_braces", description: r#"unnecessary braces around an imported item"# }, - LintCompletion { label: "unused_lifetimes", description: r#"detects lifetime parameters that are never used"# }, - LintCompletion { label: "unused_qualifications", description: r#"detects unnecessarily qualified names"# }, - LintCompletion { label: "unused_results", description: r#"unused result of an expression in a statement"# }, - LintCompletion { label: "variant_size_differences", description: r#"detects enums with widely varying variant sizes"# }, - LintCompletion { label: "array_into_iter", description: r#"detects calling `into_iter` on arrays"# }, - LintCompletion { label: "asm_sub_register", description: r#"using only a subset of a register for inline asm inputs"# }, - LintCompletion { label: "bare_trait_objects", description: r#"suggest using `dyn Trait` for trait objects"# }, - LintCompletion { label: "bindings_with_variant_name", description: r#"detects pattern bindings with the same name as one of the matched variants"# }, - LintCompletion { label: "cenum_impl_drop_cast", description: r#"a C-like enum implementing Drop is cast"# }, - LintCompletion { label: "clashing_extern_declarations", description: r#"detects when an extern fn has been declared with the same name but different types"# }, - LintCompletion { label: "coherence_leak_check", description: r#"distinct impls distinguished only by the leak-check code"# }, - LintCompletion { label: "confusable_idents", description: r#"detects visually confusable pairs between identifiers"# }, - LintCompletion { label: "dead_code", description: r#"detect unused, unexported items"# }, - LintCompletion { label: "deprecated", description: r#"detects use of deprecated items"# }, - LintCompletion { label: "ellipsis_inclusive_range_patterns", description: r#"`...` range patterns are deprecated"# }, - LintCompletion { label: "exported_private_dependencies", description: r#"public interface leaks type from a private dependency"# }, - LintCompletion { label: "illegal_floating_point_literal_pattern", description: r#"floating-point literals cannot be used in patterns"# }, - LintCompletion { label: "improper_ctypes", description: r#"proper use of libc types in foreign modules"# }, - LintCompletion { label: "improper_ctypes_definitions", description: r#"proper use of libc types in foreign item definitions"# }, - LintCompletion { label: "incomplete_features", description: r#"incomplete features that may function improperly in some or all cases"# }, - LintCompletion { label: "inline_no_sanitize", description: r#"detects incompatible use of `#[inline(always)]` and `#[no_sanitize(...)]`"# }, - LintCompletion { label: "intra_doc_link_resolution_failure", description: r#"failures in resolving intra-doc link targets"# }, - LintCompletion { label: "invalid_codeblock_attributes", description: r#"codeblock attribute looks a lot like a known one"# }, - LintCompletion { label: "invalid_value", description: r#"an invalid value is being created (such as a NULL reference)"# }, - LintCompletion { label: "irrefutable_let_patterns", description: r#"detects irrefutable patterns in if-let and while-let statements"# }, - LintCompletion { label: "late_bound_lifetime_arguments", description: r#"detects generic lifetime arguments in path segments with late bound lifetime parameters"# }, - LintCompletion { label: "mixed_script_confusables", description: r#"detects Unicode scripts whose mixed script confusables codepoints are solely used"# }, - LintCompletion { label: "mutable_borrow_reservation_conflict", description: r#"reservation of a two-phased borrow conflicts with other shared borrows"# }, - LintCompletion { label: "non_camel_case_types", description: r#"types, variants, traits and type parameters should have camel case names"# }, - LintCompletion { label: "non_shorthand_field_patterns", description: r#"using `Struct { x: x }` instead of `Struct { x }` in a pattern"# }, - LintCompletion { label: "non_snake_case", description: r#"variables, methods, functions, lifetime parameters and modules should have snake case names"# }, - LintCompletion { label: "non_upper_case_globals", description: r#"static constants should have uppercase identifiers"# }, - LintCompletion { label: "no_mangle_generic_items", description: r#"generic items must be mangled"# }, - LintCompletion { label: "overlapping_patterns", description: r#"detects overlapping patterns"# }, - LintCompletion { label: "path_statements", description: r#"path statements with no effect"# }, - LintCompletion { label: "private_in_public", description: r#"detect private items in public interfaces not caught by the old implementation"# }, - LintCompletion { label: "proc_macro_derive_resolution_fallback", description: r#"detects proc macro derives using inaccessible names from parent modules"# }, - LintCompletion { label: "redundant_semicolons", description: r#"detects unnecessary trailing semicolons"# }, - LintCompletion { label: "renamed_and_removed_lints", description: r#"lints that have been renamed or removed"# }, - LintCompletion { label: "safe_packed_borrows", description: r#"safe borrows of fields of packed structs were erroneously allowed"# }, - LintCompletion { label: "stable_features", description: r#"stable features found in `#[feature]` directive"# }, - LintCompletion { label: "trivial_bounds", description: r#"these bounds don't depend on an type parameters"# }, - LintCompletion { label: "type_alias_bounds", description: r#"bounds in type aliases are not enforced"# }, - LintCompletion { label: "tyvar_behind_raw_pointer", description: r#"raw pointer to an inference variable"# }, - LintCompletion { label: "uncommon_codepoints", description: r#"detects uncommon Unicode codepoints in identifiers"# }, - LintCompletion { label: "unconditional_recursion", description: r#"functions that cannot return without calling themselves"# }, - LintCompletion { label: "unknown_lints", description: r#"unrecognized lint attribute"# }, - LintCompletion { label: "unnameable_test_items", description: r#"detects an item that cannot be named being marked as `#[test_case]`"# }, - LintCompletion { label: "unreachable_code", description: r#"detects unreachable code paths"# }, - LintCompletion { label: "unreachable_patterns", description: r#"detects unreachable patterns"# }, - LintCompletion { label: "unstable_name_collisions", description: r#"detects name collision with an existing but unstable method"# }, - LintCompletion { label: "unused_allocation", description: r#"detects unnecessary allocations that can be eliminated"# }, - LintCompletion { label: "unused_assignments", description: r#"detect assignments that will never be read"# }, - LintCompletion { label: "unused_attributes", description: r#"detects attributes that were not used by the compiler"# }, - LintCompletion { label: "unused_braces", description: r#"unnecessary braces around an expression"# }, - LintCompletion { label: "unused_comparisons", description: r#"comparisons made useless by limits of the types involved"# }, - LintCompletion { label: "unused_doc_comments", description: r#"detects doc comments that aren't used by rustdoc"# }, - LintCompletion { label: "unused_features", description: r#"unused features found in crate-level `#[feature]` directives"# }, - LintCompletion { label: "unused_imports", description: r#"imports that are never used"# }, - LintCompletion { label: "unused_labels", description: r#"detects labels that are never used"# }, - LintCompletion { label: "unused_macros", description: r#"detects macros that were not used"# }, - LintCompletion { label: "unused_must_use", description: r#"unused result of a type flagged as `#[must_use]`"# }, - LintCompletion { label: "unused_mut", description: r#"detect mut variables which don't need to be mutable"# }, - LintCompletion { label: "unused_parens", description: r#"`if`, `match`, `while` and `return` do not need parentheses"# }, - LintCompletion { label: "unused_unsafe", description: r#"unnecessary use of an `unsafe` block"# }, - LintCompletion { label: "unused_variables", description: r#"detect variables which are not used in any way"# }, - LintCompletion { label: "warnings", description: r#"mass-change the level for lints which produce warnings"# }, - LintCompletion { label: "where_clauses_object_safety", description: r#"checks the object safety of where clauses"# }, - LintCompletion { label: "while_true", description: r#"suggest using `loop { }` instead of `while true { }`"# }, - LintCompletion { label: "ambiguous_associated_items", description: r#"ambiguous associated items"# }, - LintCompletion { label: "arithmetic_overflow", description: r#"arithmetic operation overflows"# }, - LintCompletion { label: "conflicting_repr_hints", description: r#"conflicts between `#[repr(..)]` hints that were previously accepted and used in practice"# }, - LintCompletion { label: "const_err", description: r#"constant evaluation detected erroneous expression"# }, - LintCompletion { label: "ill_formed_attribute_input", description: r#"ill-formed attribute inputs that were previously accepted and used in practice"# }, - LintCompletion { label: "incomplete_include", description: r#"trailing content in included file"# }, - LintCompletion { label: "invalid_type_param_default", description: r#"type parameter default erroneously allowed in invalid location"# }, - LintCompletion { label: "macro_expanded_macro_exports_accessed_by_absolute_paths", description: r#"macro-expanded `macro_export` macros from the current crate cannot be referred to by absolute paths"# }, - LintCompletion { label: "missing_fragment_specifier", description: r#"detects missing fragment specifiers in unused `macro_rules!` patterns"# }, - LintCompletion { label: "mutable_transmutes", description: r#"mutating transmuted &mut T from &T may cause undefined behavior"# }, - LintCompletion { label: "no_mangle_const_items", description: r#"const items will not have their symbols exported"# }, - LintCompletion { label: "order_dependent_trait_objects", description: r#"trait-object types were treated as different depending on marker-trait order"# }, - LintCompletion { label: "overflowing_literals", description: r#"literal out of range for its type"# }, - LintCompletion { label: "patterns_in_fns_without_body", description: r#"patterns in functions without body were erroneously allowed"# }, - LintCompletion { label: "pub_use_of_private_extern_crate", description: r#"detect public re-exports of private extern crates"# }, - LintCompletion { label: "soft_unstable", description: r#"a feature gate that doesn't break dependent crates"# }, - LintCompletion { label: "unconditional_panic", description: r#"operation will cause a panic at runtime"# }, - LintCompletion { label: "unknown_crate_types", description: r#"unknown crate type found in `#[crate_type]` directive"# }, -]; - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - - use crate::completion::{test_utils::completion_list, CompletionKind}; - - fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(ra_fixture, CompletionKind::Attribute); - expect.assert_eq(&actual); - } - - #[test] - fn empty_derive_completion() { - check( - r#" -#[derive(<|>)] -struct Test {} - "#, - expect![[r#" - at Clone - at Copy, Clone - at Debug - at Default - at Eq, PartialEq - at Hash - at Ord, PartialOrd, Eq, PartialEq - at PartialEq - at PartialOrd, PartialEq - "#]], - ); - } - - #[test] - fn empty_lint_completion() { - check( - r#"#[allow(<|>)]"#, - expect![[r#" - at absolute_paths_not_starting_with_crate fully qualified paths that start with a module name instead of `crate`, `self`, or an extern crate name - at ambiguous_associated_items ambiguous associated items - at anonymous_parameters detects anonymous parameters - at arithmetic_overflow arithmetic operation overflows - at array_into_iter detects calling `into_iter` on arrays - at asm_sub_register using only a subset of a register for inline asm inputs - at bare_trait_objects suggest using `dyn Trait` for trait objects - at bindings_with_variant_name detects pattern bindings with the same name as one of the matched variants - at box_pointers use of owned (Box type) heap memory - at cenum_impl_drop_cast a C-like enum implementing Drop is cast - at clashing_extern_declarations detects when an extern fn has been declared with the same name but different types - at coherence_leak_check distinct impls distinguished only by the leak-check code - at conflicting_repr_hints conflicts between `#[repr(..)]` hints that were previously accepted and used in practice - at confusable_idents detects visually confusable pairs between identifiers - at const_err constant evaluation detected erroneous expression - at dead_code detect unused, unexported items - at deprecated detects use of deprecated items - at deprecated_in_future detects use of items that will be deprecated in a future version - at elided_lifetimes_in_paths hidden lifetime parameters in types are deprecated - at ellipsis_inclusive_range_patterns `...` range patterns are deprecated - at explicit_outlives_requirements outlives requirements can be inferred - at exported_private_dependencies public interface leaks type from a private dependency - at ill_formed_attribute_input ill-formed attribute inputs that were previously accepted and used in practice - at illegal_floating_point_literal_pattern floating-point literals cannot be used in patterns - at improper_ctypes proper use of libc types in foreign modules - at improper_ctypes_definitions proper use of libc types in foreign item definitions - at incomplete_features incomplete features that may function improperly in some or all cases - at incomplete_include trailing content in included file - at indirect_structural_match pattern with const indirectly referencing non-structural-match type - at inline_no_sanitize detects incompatible use of `#[inline(always)]` and `#[no_sanitize(...)]` - at intra_doc_link_resolution_failure failures in resolving intra-doc link targets - at invalid_codeblock_attributes codeblock attribute looks a lot like a known one - at invalid_type_param_default type parameter default erroneously allowed in invalid location - at invalid_value an invalid value is being created (such as a NULL reference) - at irrefutable_let_patterns detects irrefutable patterns in if-let and while-let statements - at keyword_idents detects edition keywords being used as an identifier - at late_bound_lifetime_arguments detects generic lifetime arguments in path segments with late bound lifetime parameters - at macro_expanded_macro_exports_accessed_by_absolute_paths macro-expanded `macro_export` macros from the current crate cannot be referred to by absolute paths - at macro_use_extern_crate the `#[macro_use]` attribute is now deprecated in favor of using macros via the module system - at meta_variable_misuse possible meta-variable misuse at macro definition - at missing_copy_implementations detects potentially-forgotten implementations of `Copy` - at missing_crate_level_docs detects crates with no crate-level documentation - at missing_debug_implementations detects missing implementations of Debug - at missing_doc_code_examples detects publicly-exported items without code samples in their documentation - at missing_docs detects missing documentation for public members - at missing_fragment_specifier detects missing fragment specifiers in unused `macro_rules!` patterns - at mixed_script_confusables detects Unicode scripts whose mixed script confusables codepoints are solely used - at mutable_borrow_reservation_conflict reservation of a two-phased borrow conflicts with other shared borrows - at mutable_transmutes mutating transmuted &mut T from &T may cause undefined behavior - at no_mangle_const_items const items will not have their symbols exported - at no_mangle_generic_items generic items must be mangled - at non_ascii_idents detects non-ASCII identifiers - at non_camel_case_types types, variants, traits and type parameters should have camel case names - at non_shorthand_field_patterns using `Struct { x: x }` instead of `Struct { x }` in a pattern - at non_snake_case variables, methods, functions, lifetime parameters and modules should have snake case names - at non_upper_case_globals static constants should have uppercase identifiers - at order_dependent_trait_objects trait-object types were treated as different depending on marker-trait order - at overflowing_literals literal out of range for its type - at overlapping_patterns detects overlapping patterns - at path_statements path statements with no effect - at patterns_in_fns_without_body patterns in functions without body were erroneously allowed - at private_doc_tests detects code samples in docs of private items not documented by rustdoc - at private_in_public detect private items in public interfaces not caught by the old implementation - at proc_macro_derive_resolution_fallback detects proc macro derives using inaccessible names from parent modules - at pub_use_of_private_extern_crate detect public re-exports of private extern crates - at redundant_semicolons detects unnecessary trailing semicolons - at renamed_and_removed_lints lints that have been renamed or removed - at safe_packed_borrows safe borrows of fields of packed structs were erroneously allowed - at single_use_lifetimes detects lifetime parameters that are only used once - at soft_unstable a feature gate that doesn't break dependent crates - at stable_features stable features found in `#[feature]` directive - at trivial_bounds these bounds don't depend on an type parameters - at trivial_casts detects trivial casts which could be removed - at trivial_numeric_casts detects trivial casts of numeric types which could be removed - at type_alias_bounds bounds in type aliases are not enforced - at tyvar_behind_raw_pointer raw pointer to an inference variable - at unaligned_references detects unaligned references to fields of packed structs - at uncommon_codepoints detects uncommon Unicode codepoints in identifiers - at unconditional_panic operation will cause a panic at runtime - at unconditional_recursion functions that cannot return without calling themselves - at unknown_crate_types unknown crate type found in `#[crate_type]` directive - at unknown_lints unrecognized lint attribute - at unnameable_test_items detects an item that cannot be named being marked as `#[test_case]` - at unreachable_code detects unreachable code paths - at unreachable_patterns detects unreachable patterns - at unreachable_pub `pub` items not reachable from crate root - at unsafe_code usage of `unsafe` code - at unsafe_op_in_unsafe_fn unsafe operations in unsafe functions without an explicit unsafe block are deprecated - at unstable_features enabling unstable features (deprecated. do not use) - at unstable_name_collisions detects name collision with an existing but unstable method - at unused_allocation detects unnecessary allocations that can be eliminated - at unused_assignments detect assignments that will never be read - at unused_attributes detects attributes that were not used by the compiler - at unused_braces unnecessary braces around an expression - at unused_comparisons comparisons made useless by limits of the types involved - at unused_crate_dependencies crate dependencies that are never used - at unused_doc_comments detects doc comments that aren't used by rustdoc - at unused_extern_crates extern crates that are never used - at unused_features unused features found in crate-level `#[feature]` directives - at unused_import_braces unnecessary braces around an imported item - at unused_imports imports that are never used - at unused_labels detects labels that are never used - at unused_lifetimes detects lifetime parameters that are never used - at unused_macros detects macros that were not used - at unused_must_use unused result of a type flagged as `#[must_use]` - at unused_mut detect mut variables which don't need to be mutable - at unused_parens `if`, `match`, `while` and `return` do not need parentheses - at unused_qualifications detects unnecessarily qualified names - at unused_results unused result of an expression in a statement - at unused_unsafe unnecessary use of an `unsafe` block - at unused_variables detect variables which are not used in any way - at variant_size_differences detects enums with widely varying variant sizes - at warnings mass-change the level for lints which produce warnings - at where_clauses_object_safety checks the object safety of where clauses - at while_true suggest using `loop { }` instead of `while true { }` - "#]], - ) - } - - #[test] - fn no_completion_for_incorrect_derive() { - check( - r#" -#[derive{<|>)] -struct Test {} -"#, - expect![[r#""#]], - ) - } - - #[test] - fn derive_with_input_completion() { - check( - r#" -#[derive(serde::Serialize, PartialEq, <|>)] -struct Test {} -"#, - expect![[r#" - at Clone - at Copy, Clone - at Debug - at Default - at Eq - at Hash - at Ord, PartialOrd, Eq - at PartialOrd - "#]], - ) - } - - #[test] - fn test_attribute_completion() { - check( - r#"#[<|>]"#, - expect![[r#" - at allow(…) - at cfg(…) - at cfg_attr(…) - at deny(…) - at deprecated = "…" - at derive(…) - at doc = "…" - at forbid(…) - at ignore = "…" - at inline(…) - at link - at link_name = "…" - at macro_export - at macro_use - at must_use = "…" - at no_mangle - at non_exhaustive - at path = "…" - at proc_macro - at proc_macro_attribute - at proc_macro_derive(…) - at repr(…) - at should_panic(…) - at target_feature = "…" - at test - at used - at warn(…) - "#]], - ) - } - - #[test] - fn test_attribute_completion_inside_nested_attr() { - check(r#"#[cfg(<|>)]"#, expect![[]]) - } - - #[test] - fn test_inner_attribute_completion() { - check( - r"#![<|>]", - expect![[r#" - at allow(…) - at cfg(…) - at cfg_attr(…) - at deny(…) - at deprecated = "…" - at derive(…) - at doc = "…" - at feature(…) - at forbid(…) - at global_allocator - at ignore = "…" - at inline(…) - at link - at link_name = "…" - at macro_export - at macro_use - at must_use = "…" - at no_mangle - at no_std - at non_exhaustive - at panic_handler - at path = "…" - at proc_macro - at proc_macro_attribute - at proc_macro_derive(…) - at recursion_limit = … - at repr(…) - at should_panic(…) - at target_feature = "…" - at test - at used - at warn(…) - at windows_subsystem = "…" - "#]], - ); - } -} diff --git a/crates/ra_ide/src/completion/complete_fn_param.rs b/crates/ra_ide/src/completion/complete_fn_param.rs deleted file mode 100644 index 4063342572..0000000000 --- a/crates/ra_ide/src/completion/complete_fn_param.rs +++ /dev/null @@ -1,135 +0,0 @@ -//! See `complete_fn_param`. - -use ra_syntax::{ - ast::{self, ModuleItemOwner}, - match_ast, AstNode, -}; -use rustc_hash::FxHashMap; - -use crate::completion::{CompletionContext, CompletionItem, CompletionKind, Completions}; - -/// Complete repeated parameters, both name and type. For example, if all -/// functions in a file have a `spam: &mut Spam` parameter, a completion with -/// `spam: &mut Spam` insert text/label and `spam` lookup string will be -/// suggested. -pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) { - if !ctx.is_param { - return; - } - - let mut params = FxHashMap::default(); - - let me = ctx.token.ancestors().find_map(ast::Fn::cast); - let mut process_fn = |func: ast::Fn| { - if Some(&func) == me.as_ref() { - return; - } - func.param_list().into_iter().flat_map(|it| it.params()).for_each(|param| { - let text = param.syntax().text().to_string(); - params.entry(text).or_insert(param); - }) - }; - - for node in ctx.token.parent().ancestors() { - match_ast! { - match node { - ast::SourceFile(it) => it.items().filter_map(|item| match item { - ast::Item::Fn(it) => Some(it), - _ => None, - }).for_each(&mut process_fn), - ast::ItemList(it) => it.items().filter_map(|item| match item { - ast::Item::Fn(it) => Some(it), - _ => None, - }).for_each(&mut process_fn), - ast::AssocItemList(it) => it.assoc_items().filter_map(|item| match item { - ast::AssocItem::Fn(it) => Some(it), - _ => None, - }).for_each(&mut process_fn), - _ => continue, - } - }; - } - - params - .into_iter() - .filter_map(|(label, param)| { - let lookup = param.pat()?.syntax().text().to_string(); - Some((label, lookup)) - }) - .for_each(|(label, lookup)| { - CompletionItem::new(CompletionKind::Magic, ctx.source_range(), label) - .kind(crate::CompletionItemKind::Binding) - .lookup_by(lookup) - .add_to(acc) - }); -} - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - - use crate::completion::{test_utils::completion_list, CompletionKind}; - - fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(ra_fixture, CompletionKind::Magic); - expect.assert_eq(&actual); - } - - #[test] - fn test_param_completion_last_param() { - check( - r#" -fn foo(file_id: FileId) {} -fn bar(file_id: FileId) {} -fn baz(file<|>) {} -"#, - expect![[r#" - bn file_id: FileId - "#]], - ); - } - - #[test] - fn test_param_completion_nth_param() { - check( - r#" -fn foo(file_id: FileId) {} -fn baz(file<|>, x: i32) {} -"#, - expect![[r#" - bn file_id: FileId - "#]], - ); - } - - #[test] - fn test_param_completion_trait_param() { - check( - r#" -pub(crate) trait SourceRoot { - pub fn contains(&self, file_id: FileId) -> bool; - pub fn module_map(&self) -> &ModuleMap; - pub fn lines(&self, file_id: FileId) -> &LineIndex; - pub fn syntax(&self, file<|>) -} -"#, - expect![[r#" - bn file_id: FileId - "#]], - ); - } - - #[test] - fn completes_param_in_inner_function() { - check( - r#" -fn outer(text: String) { - fn inner(<|>) -} -"#, - expect![[r#" - bn text: String - "#]], - ) - } -} diff --git a/crates/ra_ide/src/completion/complete_keyword.rs b/crates/ra_ide/src/completion/complete_keyword.rs deleted file mode 100644 index b62064797b..0000000000 --- a/crates/ra_ide/src/completion/complete_keyword.rs +++ /dev/null @@ -1,536 +0,0 @@ -//! FIXME: write short doc here - -use ra_syntax::{ast, SyntaxKind}; -use test_utils::mark; - -use crate::completion::{ - CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions, -}; - -pub(super) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionContext) { - // complete keyword "crate" in use stmt - let source_range = ctx.source_range(); - match (ctx.use_item_syntax.as_ref(), ctx.path_prefix.as_ref()) { - (Some(_), None) => { - CompletionItem::new(CompletionKind::Keyword, source_range, "crate::") - .kind(CompletionItemKind::Keyword) - .insert_text("crate::") - .add_to(acc); - CompletionItem::new(CompletionKind::Keyword, source_range, "self") - .kind(CompletionItemKind::Keyword) - .add_to(acc); - CompletionItem::new(CompletionKind::Keyword, source_range, "super::") - .kind(CompletionItemKind::Keyword) - .insert_text("super::") - .add_to(acc); - } - (Some(_), Some(_)) => { - CompletionItem::new(CompletionKind::Keyword, source_range, "self") - .kind(CompletionItemKind::Keyword) - .add_to(acc); - CompletionItem::new(CompletionKind::Keyword, source_range, "super::") - .kind(CompletionItemKind::Keyword) - .insert_text("super::") - .add_to(acc); - } - _ => {} - } - - // Suggest .await syntax for types that implement Future trait - if let Some(receiver) = &ctx.dot_receiver { - if let Some(ty) = ctx.sema.type_of_expr(receiver) { - if ty.impls_future(ctx.db) { - CompletionItem::new(CompletionKind::Keyword, ctx.source_range(), "await") - .kind(CompletionItemKind::Keyword) - .detail("expr.await") - .insert_text("await") - .add_to(acc); - } - }; - } -} - -pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) { - if ctx.token.kind() == SyntaxKind::COMMENT { - mark::hit!(no_keyword_completion_in_comments); - return; - } - - let has_trait_or_impl_parent = ctx.has_impl_parent || ctx.has_trait_parent; - if ctx.trait_as_prev_sibling || ctx.impl_as_prev_sibling { - add_keyword(ctx, acc, "where", "where "); - return; - } - if ctx.unsafe_is_prev { - if ctx.has_item_list_or_source_file_parent || ctx.block_expr_parent { - add_keyword(ctx, acc, "fn", "fn $0() {}") - } - - if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { - add_keyword(ctx, acc, "trait", "trait $0 {}"); - add_keyword(ctx, acc, "impl", "impl $0 {}"); - } - - return; - } - if ctx.has_item_list_or_source_file_parent || has_trait_or_impl_parent || ctx.block_expr_parent - { - add_keyword(ctx, acc, "fn", "fn $0() {}"); - } - if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { - add_keyword(ctx, acc, "use", "use "); - add_keyword(ctx, acc, "impl", "impl $0 {}"); - add_keyword(ctx, acc, "trait", "trait $0 {}"); - } - - if ctx.has_item_list_or_source_file_parent { - add_keyword(ctx, acc, "enum", "enum $0 {}"); - add_keyword(ctx, acc, "struct", "struct $0"); - add_keyword(ctx, acc, "union", "union $0 {}"); - } - - if ctx.is_expr { - add_keyword(ctx, acc, "match", "match $0 {}"); - add_keyword(ctx, acc, "while", "while $0 {}"); - add_keyword(ctx, acc, "loop", "loop {$0}"); - add_keyword(ctx, acc, "if", "if "); - add_keyword(ctx, acc, "if let", "if let "); - } - - if ctx.if_is_prev || ctx.block_expr_parent { - add_keyword(ctx, acc, "let", "let "); - } - - if ctx.after_if { - add_keyword(ctx, acc, "else", "else {$0}"); - add_keyword(ctx, acc, "else if", "else if $0 {}"); - } - if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { - add_keyword(ctx, acc, "mod", "mod $0 {}"); - } - if ctx.bind_pat_parent || ctx.ref_pat_parent { - add_keyword(ctx, acc, "mut", "mut "); - } - if ctx.has_item_list_or_source_file_parent || has_trait_or_impl_parent || ctx.block_expr_parent - { - add_keyword(ctx, acc, "const", "const "); - add_keyword(ctx, acc, "type", "type "); - } - if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { - add_keyword(ctx, acc, "static", "static "); - }; - if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { - add_keyword(ctx, acc, "extern", "extern "); - } - if ctx.has_item_list_or_source_file_parent - || has_trait_or_impl_parent - || ctx.block_expr_parent - || ctx.is_match_arm - { - add_keyword(ctx, acc, "unsafe", "unsafe "); - } - if ctx.in_loop_body { - if ctx.can_be_stmt { - add_keyword(ctx, acc, "continue", "continue;"); - add_keyword(ctx, acc, "break", "break;"); - } else { - add_keyword(ctx, acc, "continue", "continue"); - add_keyword(ctx, acc, "break", "break"); - } - } - if ctx.has_item_list_or_source_file_parent || ctx.has_impl_parent { - add_keyword(ctx, acc, "pub", "pub ") - } - - if !ctx.is_trivial_path { - return; - } - let fn_def = match &ctx.function_syntax { - Some(it) => it, - None => return, - }; - acc.add_all(complete_return(ctx, &fn_def, ctx.can_be_stmt)); -} - -fn keyword(ctx: &CompletionContext, kw: &str, snippet: &str) -> CompletionItem { - let res = CompletionItem::new(CompletionKind::Keyword, ctx.source_range(), kw) - .kind(CompletionItemKind::Keyword); - - match ctx.config.snippet_cap { - Some(cap) => res.insert_snippet(cap, snippet), - _ => res.insert_text(if snippet.contains('$') { kw } else { snippet }), - } - .build() -} - -fn add_keyword(ctx: &CompletionContext, acc: &mut Completions, kw: &str, snippet: &str) { - acc.add(keyword(ctx, kw, snippet)); -} - -fn complete_return( - ctx: &CompletionContext, - fn_def: &ast::Fn, - can_be_stmt: bool, -) -> Option { - let snip = match (can_be_stmt, fn_def.ret_type().is_some()) { - (true, true) => "return $0;", - (true, false) => "return;", - (false, true) => "return $0", - (false, false) => "return", - }; - Some(keyword(ctx, "return", snip)) -} - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - - use crate::completion::{ - test_utils::{check_edit, completion_list}, - CompletionKind, - }; - use test_utils::mark; - - fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(ra_fixture, CompletionKind::Keyword); - expect.assert_eq(&actual) - } - - #[test] - fn test_keywords_in_use_stmt() { - check( - r"use <|>", - expect![[r#" - kw crate:: - kw self - kw super:: - "#]], - ); - - check( - r"use a::<|>", - expect![[r#" - kw self - kw super:: - "#]], - ); - - check( - r"use a::{b, <|>}", - expect![[r#" - kw self - kw super:: - "#]], - ); - } - - #[test] - fn test_keywords_at_source_file_level() { - check( - r"m<|>", - expect![[r#" - kw const - kw enum - kw extern - kw fn - kw impl - kw mod - kw pub - kw static - kw struct - kw trait - kw type - kw union - kw unsafe - kw use - "#]], - ); - } - - #[test] - fn test_keywords_in_function() { - check( - r"fn quux() { <|> }", - expect![[r#" - kw const - kw extern - kw fn - kw if - kw if let - kw impl - kw let - kw loop - kw match - kw mod - kw return - kw static - kw trait - kw type - kw unsafe - kw use - kw while - "#]], - ); - } - - #[test] - fn test_keywords_inside_block() { - check( - r"fn quux() { if true { <|> } }", - expect![[r#" - kw const - kw extern - kw fn - kw if - kw if let - kw impl - kw let - kw loop - kw match - kw mod - kw return - kw static - kw trait - kw type - kw unsafe - kw use - kw while - "#]], - ); - } - - #[test] - fn test_keywords_after_if() { - check( - r#"fn quux() { if true { () } <|> }"#, - expect![[r#" - kw const - kw else - kw else if - kw extern - kw fn - kw if - kw if let - kw impl - kw let - kw loop - kw match - kw mod - kw return - kw static - kw trait - kw type - kw unsafe - kw use - kw while - "#]], - ); - check_edit( - "else", - r#"fn quux() { if true { () } <|> }"#, - r#"fn quux() { if true { () } else {$0} }"#, - ); - } - - #[test] - fn test_keywords_in_match_arm() { - check( - r#" -fn quux() -> i32 { - match () { () => <|> } -} -"#, - expect![[r#" - kw if - kw if let - kw loop - kw match - kw return - kw unsafe - kw while - "#]], - ); - } - - #[test] - fn test_keywords_in_trait_def() { - check( - r"trait My { <|> }", - expect![[r#" - kw const - kw fn - kw type - kw unsafe - "#]], - ); - } - - #[test] - fn test_keywords_in_impl_def() { - check( - r"impl My { <|> }", - expect![[r#" - kw const - kw fn - kw pub - kw type - kw unsafe - "#]], - ); - } - - #[test] - fn test_keywords_in_loop() { - check( - r"fn my() { loop { <|> } }", - expect![[r#" - kw break - kw const - kw continue - kw extern - kw fn - kw if - kw if let - kw impl - kw let - kw loop - kw match - kw mod - kw return - kw static - kw trait - kw type - kw unsafe - kw use - kw while - "#]], - ); - } - - #[test] - fn test_keywords_after_unsafe_in_item_list() { - check( - r"unsafe <|>", - expect![[r#" - kw fn - kw impl - kw trait - "#]], - ); - } - - #[test] - fn test_keywords_after_unsafe_in_block_expr() { - check( - r"fn my_fn() { unsafe <|> }", - expect![[r#" - kw fn - kw impl - kw trait - "#]], - ); - } - - #[test] - fn test_mut_in_ref_and_in_fn_parameters_list() { - check( - r"fn my_fn(&<|>) {}", - expect![[r#" - kw mut - "#]], - ); - check( - r"fn my_fn(<|>) {}", - expect![[r#" - kw mut - "#]], - ); - check( - r"fn my_fn() { let &<|> }", - expect![[r#" - kw mut - "#]], - ); - } - - #[test] - fn test_where_keyword() { - check( - r"trait A <|>", - expect![[r#" - kw where - "#]], - ); - check( - r"impl A <|>", - expect![[r#" - kw where - "#]], - ); - } - - #[test] - fn no_keyword_completion_in_comments() { - mark::check!(no_keyword_completion_in_comments); - check( - r#" -fn test() { - let x = 2; // A comment<|> -} -"#, - expect![[""]], - ); - check( - r#" -/* -Some multi-line comment<|> -*/ -"#, - expect![[""]], - ); - check( - r#" -/// Some doc comment -/// let test<|> = 1 -"#, - expect![[""]], - ); - } - - #[test] - fn test_completion_await_impls_future() { - check( - r#" -//- /main.rs -use std::future::*; -struct A {} -impl Future for A {} -fn foo(a: A) { a.<|> } - -//- /std/lib.rs -pub mod future { - #[lang = "future_trait"] - pub trait Future {} -} -"#, - expect![[r#" - kw await expr.await - "#]], - ) - } - - #[test] - fn after_let() { - check( - r#"fn main() { let _ = <|> }"#, - expect![[r#" - kw if - kw if let - kw loop - kw match - kw return - kw while - "#]], - ) - } -} diff --git a/crates/ra_ide/src/completion/complete_postfix.rs b/crates/ra_ide/src/completion/complete_postfix.rs deleted file mode 100644 index 8735b90103..0000000000 --- a/crates/ra_ide/src/completion/complete_postfix.rs +++ /dev/null @@ -1,378 +0,0 @@ -//! FIXME: write short doc here -use ra_assists::utils::TryEnum; -use ra_syntax::{ - ast::{self, AstNode}, - TextRange, TextSize, -}; -use ra_text_edit::TextEdit; - -use crate::{ - completion::{ - completion_config::SnippetCap, - completion_context::CompletionContext, - completion_item::{Builder, CompletionKind, Completions}, - }, - CompletionItem, CompletionItemKind, -}; - -pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { - if !ctx.config.enable_postfix_completions { - return; - } - - let dot_receiver = match &ctx.dot_receiver { - Some(it) => it, - None => return, - }; - - let receiver_text = - get_receiver_text(dot_receiver, ctx.dot_receiver_is_ambiguous_float_literal); - - let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) { - Some(it) => it, - None => return, - }; - - let cap = match ctx.config.snippet_cap { - Some(it) => it, - None => return, - }; - let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty); - if let Some(try_enum) = &try_enum { - match try_enum { - TryEnum::Result => { - postfix_snippet( - ctx, - cap, - &dot_receiver, - "ifl", - "if let Ok {}", - &format!("if let Ok($1) = {} {{\n $0\n}}", receiver_text), - ) - .add_to(acc); - - postfix_snippet( - ctx, - cap, - &dot_receiver, - "while", - "while let Ok {}", - &format!("while let Ok($1) = {} {{\n $0\n}}", receiver_text), - ) - .add_to(acc); - } - TryEnum::Option => { - postfix_snippet( - ctx, - cap, - &dot_receiver, - "ifl", - "if let Some {}", - &format!("if let Some($1) = {} {{\n $0\n}}", receiver_text), - ) - .add_to(acc); - - postfix_snippet( - ctx, - cap, - &dot_receiver, - "while", - "while let Some {}", - &format!("while let Some($1) = {} {{\n $0\n}}", receiver_text), - ) - .add_to(acc); - } - } - } else if receiver_ty.is_bool() || receiver_ty.is_unknown() { - postfix_snippet( - ctx, - cap, - &dot_receiver, - "if", - "if expr {}", - &format!("if {} {{\n $0\n}}", receiver_text), - ) - .add_to(acc); - postfix_snippet( - ctx, - cap, - &dot_receiver, - "while", - "while expr {}", - &format!("while {} {{\n $0\n}}", receiver_text), - ) - .add_to(acc); - postfix_snippet(ctx, cap, &dot_receiver, "not", "!expr", &format!("!{}", receiver_text)) - .add_to(acc); - } - - postfix_snippet(ctx, cap, &dot_receiver, "ref", "&expr", &format!("&{}", receiver_text)) - .add_to(acc); - postfix_snippet( - ctx, - cap, - &dot_receiver, - "refm", - "&mut expr", - &format!("&mut {}", receiver_text), - ) - .add_to(acc); - - // The rest of the postfix completions create an expression that moves an argument, - // so it's better to consider references now to avoid breaking the compilation - let dot_receiver = include_references(dot_receiver); - let receiver_text = - get_receiver_text(&dot_receiver, ctx.dot_receiver_is_ambiguous_float_literal); - - match try_enum { - Some(try_enum) => match try_enum { - TryEnum::Result => { - postfix_snippet( - ctx, - cap, - &dot_receiver, - "match", - "match expr {}", - &format!("match {} {{\n Ok(${{1:_}}) => {{$2}},\n Err(${{3:_}}) => {{$0}},\n}}", receiver_text), - ) - .add_to(acc); - } - TryEnum::Option => { - postfix_snippet( - ctx, - cap, - &dot_receiver, - "match", - "match expr {}", - &format!( - "match {} {{\n Some(${{1:_}}) => {{$2}},\n None => {{$0}},\n}}", - receiver_text - ), - ) - .add_to(acc); - } - }, - None => { - postfix_snippet( - ctx, - cap, - &dot_receiver, - "match", - "match expr {}", - &format!("match {} {{\n ${{1:_}} => {{$0}},\n}}", receiver_text), - ) - .add_to(acc); - } - } - - postfix_snippet( - ctx, - cap, - &dot_receiver, - "box", - "Box::new(expr)", - &format!("Box::new({})", receiver_text), - ) - .add_to(acc); - - postfix_snippet( - ctx, - cap, - &dot_receiver, - "dbg", - "dbg!(expr)", - &format!("dbg!({})", receiver_text), - ) - .add_to(acc); - - postfix_snippet( - ctx, - cap, - &dot_receiver, - "call", - "function(expr)", - &format!("${{1}}({})", receiver_text), - ) - .add_to(acc); -} - -fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: bool) -> String { - if receiver_is_ambiguous_float_literal { - let text = receiver.syntax().text(); - let without_dot = ..text.len() - TextSize::of('.'); - text.slice(without_dot).to_string() - } else { - receiver.to_string() - } -} - -fn include_references(initial_element: &ast::Expr) -> ast::Expr { - let mut resulting_element = initial_element.clone(); - while let Some(parent_ref_element) = - resulting_element.syntax().parent().and_then(ast::RefExpr::cast) - { - resulting_element = ast::Expr::from(parent_ref_element); - } - resulting_element -} - -fn postfix_snippet( - ctx: &CompletionContext, - cap: SnippetCap, - receiver: &ast::Expr, - label: &str, - detail: &str, - snippet: &str, -) -> Builder { - let edit = { - let receiver_syntax = receiver.syntax(); - let receiver_range = ctx.sema.original_range(receiver_syntax).range; - let delete_range = TextRange::new(receiver_range.start(), ctx.source_range().end()); - TextEdit::replace(delete_range, snippet.to_string()) - }; - CompletionItem::new(CompletionKind::Postfix, ctx.source_range(), label) - .detail(detail) - .kind(CompletionItemKind::Snippet) - .snippet_edit(cap, edit) -} - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - - use crate::completion::{ - test_utils::{check_edit, completion_list}, - CompletionKind, - }; - - fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(ra_fixture, CompletionKind::Postfix); - expect.assert_eq(&actual) - } - - #[test] - fn postfix_completion_works_for_trivial_path_expression() { - check( - r#" -fn main() { - let bar = true; - bar.<|> -} -"#, - expect![[r#" - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn if if expr {} - sn match match expr {} - sn not !expr - sn ref &expr - sn refm &mut expr - sn while while expr {} - "#]], - ); - } - - #[test] - fn postfix_type_filtering() { - check( - r#" -fn main() { - let bar: u8 = 12; - bar.<|> -} -"#, - expect![[r#" - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn match match expr {} - sn ref &expr - sn refm &mut expr - "#]], - ) - } - - #[test] - fn option_iflet() { - check_edit( - "ifl", - r#" -enum Option { Some(T), None } - -fn main() { - let bar = Option::Some(true); - bar.<|> -} -"#, - r#" -enum Option { Some(T), None } - -fn main() { - let bar = Option::Some(true); - if let Some($1) = bar { - $0 -} -} -"#, - ); - } - - #[test] - fn result_match() { - check_edit( - "match", - r#" -enum Result { Ok(T), Err(E) } - -fn main() { - let bar = Result::Ok(true); - bar.<|> -} -"#, - r#" -enum Result { Ok(T), Err(E) } - -fn main() { - let bar = Result::Ok(true); - match bar { - Ok(${1:_}) => {$2}, - Err(${3:_}) => {$0}, -} -} -"#, - ); - } - - #[test] - fn postfix_completion_works_for_ambiguous_float_literal() { - check_edit("refm", r#"fn main() { 42.<|> }"#, r#"fn main() { &mut 42 }"#) - } - - #[test] - fn works_in_simple_macro() { - check_edit( - "dbg", - r#" -macro_rules! m { ($e:expr) => { $e } } -fn main() { - let bar: u8 = 12; - m!(bar.d<|>) -} -"#, - r#" -macro_rules! m { ($e:expr) => { $e } } -fn main() { - let bar: u8 = 12; - m!(dbg!(bar)) -} -"#, - ); - } - - #[test] - fn postfix_completion_for_references() { - check_edit("dbg", r#"fn main() { &&42.<|> }"#, r#"fn main() { dbg!(&&42) }"#); - check_edit("refm", r#"fn main() { &&42.<|> }"#, r#"fn main() { &&&mut 42 }"#); - } -} diff --git a/crates/ra_ide/src/completion/complete_qualified_path.rs b/crates/ra_ide/src/completion/complete_qualified_path.rs deleted file mode 100644 index b08f5b9b45..0000000000 --- a/crates/ra_ide/src/completion/complete_qualified_path.rs +++ /dev/null @@ -1,733 +0,0 @@ -//! Completion of paths, i.e. `some::prefix::<|>`. - -use hir::{Adt, HasVisibility, PathResolution, ScopeDef}; -use ra_syntax::AstNode; -use rustc_hash::FxHashSet; -use test_utils::mark; - -use crate::completion::{CompletionContext, Completions}; - -pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionContext) { - let path = match &ctx.path_prefix { - Some(path) => path.clone(), - None => return, - }; - - if ctx.attribute_under_caret.is_some() { - return; - } - - let context_module = ctx.scope.module(); - - let resolution = match ctx.scope.resolve_hir_path_qualifier(&path) { - Some(res) => res, - None => return, - }; - - // Add associated types on type parameters and `Self`. - resolution.assoc_type_shorthand_candidates(ctx.db, |alias| { - acc.add_type_alias(ctx, alias); - None::<()> - }); - - match resolution { - PathResolution::Def(hir::ModuleDef::Module(module)) => { - let module_scope = module.scope(ctx.db, context_module); - for (name, def) in module_scope { - if ctx.use_item_syntax.is_some() { - if let ScopeDef::Unknown = def { - if let Some(name_ref) = ctx.name_ref_syntax.as_ref() { - if name_ref.syntax().text() == name.to_string().as_str() { - // for `use self::foo<|>`, don't suggest `foo` as a completion - mark::hit!(dont_complete_current_use); - continue; - } - } - } - } - - acc.add_resolution(ctx, name.to_string(), &def); - } - } - PathResolution::Def(def @ hir::ModuleDef::Adt(_)) - | PathResolution::Def(def @ hir::ModuleDef::TypeAlias(_)) => { - if let hir::ModuleDef::Adt(Adt::Enum(e)) = def { - for variant in e.variants(ctx.db) { - acc.add_enum_variant(ctx, variant, None); - } - } - let ty = match def { - hir::ModuleDef::Adt(adt) => adt.ty(ctx.db), - hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db), - _ => unreachable!(), - }; - - // XXX: For parity with Rust bug #22519, this does not complete Ty::AssocType. - // (where AssocType is defined on a trait, not an inherent impl) - - let krate = ctx.krate; - if let Some(krate) = krate { - let traits_in_scope = ctx.scope.traits_in_scope(); - ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { - if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { - return None; - } - match item { - hir::AssocItem::Function(func) => { - acc.add_function(ctx, func, None); - } - hir::AssocItem::Const(ct) => acc.add_const(ctx, ct), - hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), - } - None::<()> - }); - - // Iterate assoc types separately - ty.iterate_assoc_items(ctx.db, krate, |item| { - if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { - return None; - } - match item { - hir::AssocItem::Function(_) | hir::AssocItem::Const(_) => {} - hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), - } - None::<()> - }); - } - } - PathResolution::Def(hir::ModuleDef::Trait(t)) => { - // Handles `Trait::assoc` as well as `::assoc`. - for item in t.items(ctx.db) { - if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { - continue; - } - match item { - hir::AssocItem::Function(func) => { - acc.add_function(ctx, func, None); - } - hir::AssocItem::Const(ct) => acc.add_const(ctx, ct), - hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), - } - } - } - PathResolution::TypeParam(_) | PathResolution::SelfType(_) => { - if let Some(krate) = ctx.krate { - let ty = match resolution { - PathResolution::TypeParam(param) => param.ty(ctx.db), - PathResolution::SelfType(impl_def) => impl_def.target_ty(ctx.db), - _ => return, - }; - - let traits_in_scope = ctx.scope.traits_in_scope(); - let mut seen = FxHashSet::default(); - ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { - if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { - return None; - } - - // We might iterate candidates of a trait multiple times here, so deduplicate - // them. - if seen.insert(item) { - match item { - hir::AssocItem::Function(func) => { - acc.add_function(ctx, func, None); - } - hir::AssocItem::Const(ct) => acc.add_const(ctx, ct), - hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), - } - } - None::<()> - }); - } - } - _ => {} - } -} - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - use test_utils::mark; - - use crate::completion::{ - test_utils::{check_edit, completion_list}, - CompletionKind, - }; - - fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(ra_fixture, CompletionKind::Reference); - expect.assert_eq(&actual); - } - - fn check_builtin(ra_fixture: &str, expect: Expect) { - let actual = completion_list(ra_fixture, CompletionKind::BuiltinType); - expect.assert_eq(&actual); - } - - #[test] - fn dont_complete_current_use() { - mark::check!(dont_complete_current_use); - check(r#"use self::foo<|>;"#, expect![[""]]); - } - - #[test] - fn dont_complete_current_use_in_braces_with_glob() { - check( - r#" -mod foo { pub struct S; } -use self::{foo::*, bar<|>}; -"#, - expect![[r#" - st S - md foo - "#]], - ); - } - - #[test] - fn dont_complete_primitive_in_use() { - check_builtin(r#"use self::<|>;"#, expect![[""]]); - } - - #[test] - fn dont_complete_primitive_in_module_scope() { - check_builtin(r#"fn foo() { self::<|> }"#, expect![[""]]); - } - - #[test] - fn completes_primitives() { - check_builtin( - r#"fn main() { let _: <|> = 92; }"#, - expect![[r#" - bt bool - bt char - bt f32 - bt f64 - bt i128 - bt i16 - bt i32 - bt i64 - bt i8 - bt isize - bt str - bt u128 - bt u16 - bt u32 - bt u64 - bt u8 - bt usize - "#]], - ); - } - - #[test] - fn completes_mod_with_same_name_as_function() { - check( - r#" -use self::my::<|>; - -mod my { pub struct Bar; } -fn my() {} -"#, - expect![[r#" - st Bar - "#]], - ); - } - - #[test] - fn filters_visibility() { - check( - r#" -use self::my::<|>; - -mod my { - struct Bar; - pub struct Foo; - pub use Bar as PublicBar; -} -"#, - expect![[r#" - st Foo - st PublicBar - "#]], - ); - } - - #[test] - fn completes_use_item_starting_with_self() { - check( - r#" -use self::m::<|>; - -mod m { pub struct Bar; } -"#, - expect![[r#" - st Bar - "#]], - ); - } - - #[test] - fn completes_use_item_starting_with_crate() { - check( - r#" -//- /lib.rs -mod foo; -struct Spam; -//- /foo.rs -use crate::Sp<|> -"#, - expect![[r#" - st Spam - md foo - "#]], - ); - } - - #[test] - fn completes_nested_use_tree() { - check( - r#" -//- /lib.rs -mod foo; -struct Spam; -//- /foo.rs -use crate::{Sp<|>}; -"#, - expect![[r#" - st Spam - md foo - "#]], - ); - } - - #[test] - fn completes_deeply_nested_use_tree() { - check( - r#" -//- /lib.rs -mod foo; -pub mod bar { - pub mod baz { - pub struct Spam; - } -} -//- /foo.rs -use crate::{bar::{baz::Sp<|>}}; -"#, - expect![[r#" - st Spam - "#]], - ); - } - - #[test] - fn completes_enum_variant() { - check( - r#" -enum E { Foo, Bar(i32) } -fn foo() { let _ = E::<|> } -"#, - expect![[r#" - ev Bar(…) (i32) - ev Foo () - "#]], - ); - } - - #[test] - fn completes_struct_associated_items() { - check( - r#" -//- /lib.rs -struct S; - -impl S { - fn a() {} - fn b(&self) {} - const C: i32 = 42; - type T = i32; -} - -fn foo() { let _ = S::<|> } -"#, - expect![[r#" - ct C const C: i32 = 42; - ta T type T = i32; - fn a() fn a() - me b() fn b(&self) - "#]], - ); - } - - #[test] - fn associated_item_visibility() { - check( - r#" -struct S; - -mod m { - impl super::S { - pub(super) fn public_method() { } - fn private_method() { } - pub(super) type PublicType = u32; - type PrivateType = u32; - pub(super) const PUBLIC_CONST: u32 = 1; - const PRIVATE_CONST: u32 = 1; - } -} - -fn foo() { let _ = S::<|> } -"#, - expect![[r#" - ct PUBLIC_CONST pub(super) const PUBLIC_CONST: u32 = 1; - ta PublicType pub(super) type PublicType = u32; - fn public_method() pub(super) fn public_method() - "#]], - ); - } - - #[test] - fn completes_enum_associated_method() { - check( - r#" -enum E {}; -impl E { fn m() { } } - -fn foo() { let _ = E::<|> } - "#, - expect![[r#" - fn m() fn m() - "#]], - ); - } - - #[test] - fn completes_union_associated_method() { - check( - r#" -union U {}; -impl U { fn m() { } } - -fn foo() { let _ = U::<|> } -"#, - expect![[r#" - fn m() fn m() - "#]], - ); - } - - #[test] - fn completes_use_paths_across_crates() { - check( - r#" -//- /main.rs -use foo::<|>; - -//- /foo/lib.rs -pub mod bar { pub struct S; } -"#, - expect![[r#" - md bar - "#]], - ); - } - - #[test] - fn completes_trait_associated_method_1() { - check( - r#" -trait Trait { fn m(); } - -fn foo() { let _ = Trait::<|> } -"#, - expect![[r#" - fn m() fn m() - "#]], - ); - } - - #[test] - fn completes_trait_associated_method_2() { - check( - r#" -trait Trait { fn m(); } - -struct S; -impl Trait for S {} - -fn foo() { let _ = S::<|> } -"#, - expect![[r#" - fn m() fn m() - "#]], - ); - } - - #[test] - fn completes_trait_associated_method_3() { - check( - r#" -trait Trait { fn m(); } - -struct S; -impl Trait for S {} - -fn foo() { let _ = ::<|> } -"#, - expect![[r#" - fn m() fn m() - "#]], - ); - } - - #[test] - fn completes_ty_param_assoc_ty() { - check( - r#" -trait Super { - type Ty; - const CONST: u8; - fn func() {} - fn method(&self) {} -} - -trait Sub: Super { - type SubTy; - const C2: (); - fn subfunc() {} - fn submethod(&self) {} -} - -fn foo() { T::<|> } -"#, - expect![[r#" - ct C2 const C2: (); - ct CONST const CONST: u8; - ta SubTy type SubTy; - ta Ty type Ty; - fn func() fn func() - me method() fn method(&self) - fn subfunc() fn subfunc() - me submethod() fn submethod(&self) - "#]], - ); - } - - #[test] - fn completes_self_param_assoc_ty() { - check( - r#" -trait Super { - type Ty; - const CONST: u8 = 0; - fn func() {} - fn method(&self) {} -} - -trait Sub: Super { - type SubTy; - const C2: () = (); - fn subfunc() {} - fn submethod(&self) {} -} - -struct Wrap(T); -impl Super for Wrap {} -impl Sub for Wrap { - fn subfunc() { - // Should be able to assume `Self: Sub + Super` - Self::<|> - } -} -"#, - expect![[r#" - ct C2 const C2: () = (); - ct CONST const CONST: u8 = 0; - ta SubTy type SubTy; - ta Ty type Ty; - fn func() fn func() - me method() fn method(&self) - fn subfunc() fn subfunc() - me submethod() fn submethod(&self) - "#]], - ); - } - - #[test] - fn completes_type_alias() { - check( - r#" -struct S; -impl S { fn foo() {} } -type T = S; -impl T { fn bar() {} } - -fn main() { T::<|>; } -"#, - expect![[r#" - fn bar() fn bar() - fn foo() fn foo() - "#]], - ); - } - - #[test] - fn completes_qualified_macros() { - check( - r#" -#[macro_export] -macro_rules! foo { () => {} } - -fn main() { let _ = crate::<|> } - "#, - expect![[r##" - ma foo!(…) #[macro_export] - macro_rules! foo - fn main() fn main() - "##]], - ); - } - - #[test] - fn test_super_super_completion() { - check( - r#" -mod a { - const A: usize = 0; - mod b { - const B: usize = 0; - mod c { use super::super::<|> } - } -} -"#, - expect![[r#" - ct A - md b - "#]], - ); - } - - #[test] - fn completes_reexported_items_under_correct_name() { - check( - r#" -fn foo() { self::m::<|> } - -mod m { - pub use super::p::wrong_fn as right_fn; - pub use super::p::WRONG_CONST as RIGHT_CONST; - pub use super::p::WrongType as RightType; -} -mod p { - fn wrong_fn() {} - const WRONG_CONST: u32 = 1; - struct WrongType {}; -} -"#, - expect![[r#" - ct RIGHT_CONST - st RightType - fn right_fn() fn wrong_fn() - "#]], - ); - - check_edit( - "RightType", - r#" -fn foo() { self::m::<|> } - -mod m { - pub use super::p::wrong_fn as right_fn; - pub use super::p::WRONG_CONST as RIGHT_CONST; - pub use super::p::WrongType as RightType; -} -mod p { - fn wrong_fn() {} - const WRONG_CONST: u32 = 1; - struct WrongType {}; -} -"#, - r#" -fn foo() { self::m::RightType } - -mod m { - pub use super::p::wrong_fn as right_fn; - pub use super::p::WRONG_CONST as RIGHT_CONST; - pub use super::p::WrongType as RightType; -} -mod p { - fn wrong_fn() {} - const WRONG_CONST: u32 = 1; - struct WrongType {}; -} -"#, - ); - } - - #[test] - fn completes_in_simple_macro_call() { - check( - r#" -macro_rules! m { ($e:expr) => { $e } } -fn main() { m!(self::f<|>); } -fn foo() {} -"#, - expect![[r#" - fn foo() fn foo() - fn main() fn main() - "#]], - ); - } - - #[test] - fn function_mod_share_name() { - check( - r#" -fn foo() { self::m::<|> } - -mod m { - pub mod z {} - pub fn z() {} -} -"#, - expect![[r#" - md z - fn z() pub fn z() - "#]], - ); - } - - #[test] - fn completes_hashmap_new() { - check( - r#" -struct RandomState; -struct HashMap {} - -impl HashMap { - pub fn new() -> HashMap { } -} -fn foo() { - HashMap::<|> -} -"#, - expect![[r#" - fn new() pub fn new() -> HashMap - "#]], - ); - } - - #[test] - fn dont_complete_attr() { - check( - r#" -mod foo { pub struct Foo; } -#[foo::<|>] -fn f() {} -"#, - expect![[""]], - ); - } -} diff --git a/crates/ra_ide/src/completion/complete_snippet.rs b/crates/ra_ide/src/completion/complete_snippet.rs deleted file mode 100644 index 28d8f78768..0000000000 --- a/crates/ra_ide/src/completion/complete_snippet.rs +++ /dev/null @@ -1,116 +0,0 @@ -//! FIXME: write short doc here - -use crate::completion::{ - completion_config::SnippetCap, completion_item::Builder, CompletionContext, CompletionItem, - CompletionItemKind, CompletionKind, Completions, -}; - -fn snippet(ctx: &CompletionContext, cap: SnippetCap, label: &str, snippet: &str) -> Builder { - CompletionItem::new(CompletionKind::Snippet, ctx.source_range(), label) - .insert_snippet(cap, snippet) - .kind(CompletionItemKind::Snippet) -} - -pub(super) fn complete_expr_snippet(acc: &mut Completions, ctx: &CompletionContext) { - if !(ctx.is_trivial_path && ctx.function_syntax.is_some()) { - return; - } - let cap = match ctx.config.snippet_cap { - Some(it) => it, - None => return, - }; - - snippet(ctx, cap, "pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc); - snippet(ctx, cap, "ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc); -} - -pub(super) fn complete_item_snippet(acc: &mut Completions, ctx: &CompletionContext) { - if !ctx.is_new_item { - return; - } - let cap = match ctx.config.snippet_cap { - Some(it) => it, - None => return, - }; - - snippet( - ctx, - cap, - "Test module", - "\ -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn ${1:test_name}() { - $0 - } -}", - ) - .lookup_by("tmod") - .add_to(acc); - - snippet( - ctx, - cap, - "Test function", - "\ -#[test] -fn ${1:feature}() { - $0 -}", - ) - .lookup_by("tfn") - .add_to(acc); - - snippet(ctx, cap, "macro_rules", "macro_rules! $1 {\n\t($2) => {\n\t\t$0\n\t};\n}").add_to(acc); - snippet(ctx, cap, "pub(crate)", "pub(crate) $0").add_to(acc); -} - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - - use crate::completion::{test_utils::completion_list, CompletionKind}; - - fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(ra_fixture, CompletionKind::Snippet); - expect.assert_eq(&actual) - } - - #[test] - fn completes_snippets_in_expressions() { - check( - r#"fn foo(x: i32) { <|> }"#, - expect![[r#" - sn pd - sn ppd - "#]], - ); - } - - #[test] - fn should_not_complete_snippets_in_path() { - check(r#"fn foo(x: i32) { ::foo<|> }"#, expect![[""]]); - check(r#"fn foo(x: i32) { ::<|> }"#, expect![[""]]); - } - - #[test] - fn completes_snippets_in_items() { - check( - r#" -#[cfg(test)] -mod tests { - <|> -} -"#, - expect![[r#" - sn Test function - sn Test module - sn macro_rules - sn pub(crate) - "#]], - ) - } -} diff --git a/crates/ra_ide/src/completion/complete_trait_impl.rs b/crates/ra_ide/src/completion/complete_trait_impl.rs deleted file mode 100644 index d9a0ef167d..0000000000 --- a/crates/ra_ide/src/completion/complete_trait_impl.rs +++ /dev/null @@ -1,488 +0,0 @@ -//! Completion for associated items in a trait implementation. -//! -//! This module adds the completion items related to implementing associated -//! items within a `impl Trait for Struct` block. The current context node -//! must be within either a `FN`, `TYPE_ALIAS`, or `CONST` node -//! and an direct child of an `IMPL`. -//! -//! # Examples -//! -//! Considering the following trait `impl`: -//! -//! ```ignore -//! trait SomeTrait { -//! fn foo(); -//! } -//! -//! impl SomeTrait for () { -//! fn f<|> -//! } -//! ``` -//! -//! may result in the completion of the following method: -//! -//! ```ignore -//! # trait SomeTrait { -//! # fn foo(); -//! # } -//! -//! impl SomeTrait for () { -//! fn foo() {}<|> -//! } -//! ``` - -use hir::{self, Docs, HasSource}; -use ra_assists::utils::get_missing_assoc_items; -use ra_syntax::{ - ast::{self, edit, Impl}, - AstNode, SyntaxKind, SyntaxNode, TextRange, T, -}; -use ra_text_edit::TextEdit; - -use crate::{ - completion::{ - CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions, - }, - display::function_declaration, -}; - -pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext) { - if let Some((trigger, impl_def)) = completion_match(ctx) { - match trigger.kind() { - SyntaxKind::NAME_REF => get_missing_assoc_items(&ctx.sema, &impl_def) - .into_iter() - .for_each(|item| match item { - hir::AssocItem::Function(fn_item) => { - add_function_impl(&trigger, acc, ctx, fn_item) - } - hir::AssocItem::TypeAlias(type_item) => { - add_type_alias_impl(&trigger, acc, ctx, type_item) - } - hir::AssocItem::Const(const_item) => { - add_const_impl(&trigger, acc, ctx, const_item) - } - }), - - SyntaxKind::FN => { - for missing_fn in get_missing_assoc_items(&ctx.sema, &impl_def) - .into_iter() - .filter_map(|item| match item { - hir::AssocItem::Function(fn_item) => Some(fn_item), - _ => None, - }) - { - add_function_impl(&trigger, acc, ctx, missing_fn); - } - } - - SyntaxKind::TYPE_ALIAS => { - for missing_fn in get_missing_assoc_items(&ctx.sema, &impl_def) - .into_iter() - .filter_map(|item| match item { - hir::AssocItem::TypeAlias(type_item) => Some(type_item), - _ => None, - }) - { - add_type_alias_impl(&trigger, acc, ctx, missing_fn); - } - } - - SyntaxKind::CONST => { - for missing_fn in get_missing_assoc_items(&ctx.sema, &impl_def) - .into_iter() - .filter_map(|item| match item { - hir::AssocItem::Const(const_item) => Some(const_item), - _ => None, - }) - { - add_const_impl(&trigger, acc, ctx, missing_fn); - } - } - - _ => {} - } - } -} - -fn completion_match(ctx: &CompletionContext) -> Option<(SyntaxNode, Impl)> { - let (trigger, impl_def_offset) = ctx.token.ancestors().find_map(|p| match p.kind() { - SyntaxKind::FN | SyntaxKind::TYPE_ALIAS | SyntaxKind::CONST | SyntaxKind::BLOCK_EXPR => { - Some((p, 2)) - } - SyntaxKind::NAME_REF => Some((p, 5)), - _ => None, - })?; - let impl_def = (0..impl_def_offset - 1) - .try_fold(trigger.parent()?, |t, _| t.parent()) - .and_then(ast::Impl::cast)?; - Some((trigger, impl_def)) -} - -fn add_function_impl( - fn_def_node: &SyntaxNode, - acc: &mut Completions, - ctx: &CompletionContext, - func: hir::Function, -) { - let fn_name = func.name(ctx.db).to_string(); - - let label = if !func.params(ctx.db).is_empty() { - format!("fn {}(..)", fn_name) - } else { - format!("fn {}()", fn_name) - }; - - let builder = CompletionItem::new(CompletionKind::Magic, ctx.source_range(), label) - .lookup_by(fn_name) - .set_documentation(func.docs(ctx.db)); - - let completion_kind = if func.has_self_param(ctx.db) { - CompletionItemKind::Method - } else { - CompletionItemKind::Function - }; - let range = TextRange::new(fn_def_node.text_range().start(), ctx.source_range().end()); - - let function_decl = function_declaration(&func.source(ctx.db).value); - match ctx.config.snippet_cap { - Some(cap) => { - let snippet = format!("{} {{\n $0\n}}", function_decl); - builder.snippet_edit(cap, TextEdit::replace(range, snippet)) - } - None => { - let header = format!("{} {{", function_decl); - builder.text_edit(TextEdit::replace(range, header)) - } - } - .kind(completion_kind) - .add_to(acc); -} - -fn add_type_alias_impl( - type_def_node: &SyntaxNode, - acc: &mut Completions, - ctx: &CompletionContext, - type_alias: hir::TypeAlias, -) { - let alias_name = type_alias.name(ctx.db).to_string(); - - let snippet = format!("type {} = ", alias_name); - - let range = TextRange::new(type_def_node.text_range().start(), ctx.source_range().end()); - - CompletionItem::new(CompletionKind::Magic, ctx.source_range(), snippet.clone()) - .text_edit(TextEdit::replace(range, snippet)) - .lookup_by(alias_name) - .kind(CompletionItemKind::TypeAlias) - .set_documentation(type_alias.docs(ctx.db)) - .add_to(acc); -} - -fn add_const_impl( - const_def_node: &SyntaxNode, - acc: &mut Completions, - ctx: &CompletionContext, - const_: hir::Const, -) { - let const_name = const_.name(ctx.db).map(|n| n.to_string()); - - if let Some(const_name) = const_name { - let snippet = make_const_compl_syntax(&const_.source(ctx.db).value); - - let range = TextRange::new(const_def_node.text_range().start(), ctx.source_range().end()); - - CompletionItem::new(CompletionKind::Magic, ctx.source_range(), snippet.clone()) - .text_edit(TextEdit::replace(range, snippet)) - .lookup_by(const_name) - .kind(CompletionItemKind::Const) - .set_documentation(const_.docs(ctx.db)) - .add_to(acc); - } -} - -fn make_const_compl_syntax(const_: &ast::Const) -> String { - let const_ = edit::remove_attrs_and_docs(const_); - - let const_start = const_.syntax().text_range().start(); - let const_end = const_.syntax().text_range().end(); - - let start = - const_.syntax().first_child_or_token().map_or(const_start, |f| f.text_range().start()); - - let end = const_ - .syntax() - .children_with_tokens() - .find(|s| s.kind() == T![;] || s.kind() == T![=]) - .map_or(const_end, |f| f.text_range().start()); - - let len = end - start; - let range = TextRange::new(0.into(), len); - - let syntax = const_.syntax().text().slice(range).to_string(); - - format!("{} = ", syntax.trim_end()) -} - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - - use crate::completion::{ - test_utils::{check_edit, completion_list}, - CompletionKind, - }; - - fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(ra_fixture, CompletionKind::Magic); - expect.assert_eq(&actual) - } - - #[test] - fn name_ref_function_type_const() { - check( - r#" -trait Test { - type TestType; - const TEST_CONST: u16; - fn test(); -} -struct T; - -impl Test for T { - t<|> -} -"#, - expect![[" -ct const TEST_CONST: u16 = \n\ -fn fn test() -ta type TestType = \n\ - "]], - ); - } - - #[test] - fn no_nested_fn_completions() { - check( - r" -trait Test { - fn test(); - fn test2(); -} -struct T; - -impl Test for T { - fn test() { - t<|> - } -} -", - expect![[""]], - ); - } - - #[test] - fn name_ref_single_function() { - check_edit( - "test", - r#" -trait Test { - fn test(); -} -struct T; - -impl Test for T { - t<|> -} -"#, - r#" -trait Test { - fn test(); -} -struct T; - -impl Test for T { - fn test() { - $0 -} -} -"#, - ); - } - - #[test] - fn single_function() { - check_edit( - "test", - r#" -trait Test { - fn test(); -} -struct T; - -impl Test for T { - fn t<|> -} -"#, - r#" -trait Test { - fn test(); -} -struct T; - -impl Test for T { - fn test() { - $0 -} -} -"#, - ); - } - - #[test] - fn hide_implemented_fn() { - check( - r#" -trait Test { - fn foo(); - fn foo_bar(); -} -struct T; - -impl Test for T { - fn foo() {} - fn f<|> -} -"#, - expect![[r#" - fn fn foo_bar() - "#]], - ); - } - - #[test] - fn generic_fn() { - check_edit( - "foo", - r#" -trait Test { - fn foo(); -} -struct T; - -impl Test for T { - fn f<|> -} -"#, - r#" -trait Test { - fn foo(); -} -struct T; - -impl Test for T { - fn foo() { - $0 -} -} -"#, - ); - check_edit( - "foo", - r#" -trait Test { - fn foo() where T: Into; -} -struct T; - -impl Test for T { - fn f<|> -} -"#, - r#" -trait Test { - fn foo() where T: Into; -} -struct T; - -impl Test for T { - fn foo() -where T: Into { - $0 -} -} -"#, - ); - } - - #[test] - fn associated_type() { - check_edit( - "SomeType", - r#" -trait Test { - type SomeType; -} - -impl Test for () { - type S<|> -} -"#, - " -trait Test { - type SomeType; -} - -impl Test for () { - type SomeType = \n\ -} -", - ); - } - - #[test] - fn associated_const() { - check_edit( - "SOME_CONST", - r#" -trait Test { - const SOME_CONST: u16; -} - -impl Test for () { - const S<|> -} -"#, - " -trait Test { - const SOME_CONST: u16; -} - -impl Test for () { - const SOME_CONST: u16 = \n\ -} -", - ); - - check_edit( - "SOME_CONST", - r#" -trait Test { - const SOME_CONST: u16 = 92; -} - -impl Test for () { - const S<|> -} -"#, - " -trait Test { - const SOME_CONST: u16 = 92; -} - -impl Test for () { - const SOME_CONST: u16 = \n\ -} -", - ); - } -} diff --git a/crates/ra_ide/src/completion/complete_unqualified_path.rs b/crates/ra_ide/src/completion/complete_unqualified_path.rs deleted file mode 100644 index bd9551f358..0000000000 --- a/crates/ra_ide/src/completion/complete_unqualified_path.rs +++ /dev/null @@ -1,658 +0,0 @@ -//! Completion of names from the current scope, e.g. locals and imported items. - -use hir::{Adt, ModuleDef, ScopeDef, Type}; -use ra_syntax::AstNode; -use test_utils::mark; - -use crate::completion::{CompletionContext, Completions}; - -pub(super) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionContext) { - if !(ctx.is_trivial_path || ctx.is_pat_binding_or_const) { - return; - } - if ctx.record_lit_syntax.is_some() - || ctx.record_pat_syntax.is_some() - || ctx.attribute_under_caret.is_some() - { - return; - } - - if let Some(ty) = &ctx.expected_type { - complete_enum_variants(acc, ctx, ty); - } - - if ctx.is_pat_binding_or_const { - return; - } - - ctx.scope.process_all_names(&mut |name, res| { - if ctx.use_item_syntax.is_some() { - if let (ScopeDef::Unknown, Some(name_ref)) = (&res, &ctx.name_ref_syntax) { - if name_ref.syntax().text() == name.to_string().as_str() { - mark::hit!(self_fulfilling_completion); - return; - } - } - } - acc.add_resolution(ctx, name.to_string(), &res) - }); -} - -fn complete_enum_variants(acc: &mut Completions, ctx: &CompletionContext, ty: &Type) { - if let Some(Adt::Enum(enum_data)) = ty.as_adt() { - let variants = enum_data.variants(ctx.db); - - let module = if let Some(module) = ctx.scope.module() { - // Compute path from the completion site if available. - module - } else { - // Otherwise fall back to the enum's definition site. - enum_data.module(ctx.db) - }; - - for variant in variants { - if let Some(path) = module.find_use_path(ctx.db, ModuleDef::from(variant)) { - // Variants with trivial paths are already added by the existing completion logic, - // so we should avoid adding these twice - if path.segments.len() > 1 { - acc.add_qualified_enum_variant(ctx, variant, path); - } - } - } - } -} - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - use test_utils::mark; - - use crate::completion::{ - test_utils::{check_edit, completion_list}, - CompletionKind, - }; - - fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(ra_fixture, CompletionKind::Reference); - expect.assert_eq(&actual) - } - - #[test] - fn self_fulfilling_completion() { - mark::check!(self_fulfilling_completion); - check( - r#" -use foo<|> -use std::collections; -"#, - expect![[r#" - ?? collections - "#]], - ); - } - - #[test] - fn bind_pat_and_path_ignore_at() { - check( - r#" -enum Enum { A, B } -fn quux(x: Option) { - match x { - None => (), - Some(en<|> @ Enum::A) => (), - } -} -"#, - expect![[""]], - ); - } - - #[test] - fn bind_pat_and_path_ignore_ref() { - check( - r#" -enum Enum { A, B } -fn quux(x: Option) { - match x { - None => (), - Some(ref en<|>) => (), - } -} -"#, - expect![[""]], - ); - } - - #[test] - fn bind_pat_and_path() { - check( - r#" -enum Enum { A, B } -fn quux(x: Option) { - match x { - None => (), - Some(En<|>) => (), - } -} -"#, - expect![[r#" - en Enum - "#]], - ); - } - - #[test] - fn completes_bindings_from_let() { - check( - r#" -fn quux(x: i32) { - let y = 92; - 1 + <|>; - let z = (); -} -"#, - expect![[r#" - fn quux(…) fn quux(x: i32) - bn x i32 - bn y i32 - "#]], - ); - } - - #[test] - fn completes_bindings_from_if_let() { - check( - r#" -fn quux() { - if let Some(x) = foo() { - let y = 92; - }; - if let Some(a) = bar() { - let b = 62; - 1 + <|> - } -} -"#, - expect![[r#" - bn a - bn b i32 - fn quux() fn quux() - "#]], - ); - } - - #[test] - fn completes_bindings_from_for() { - check( - r#" -fn quux() { - for x in &[1, 2, 3] { <|> } -} -"#, - expect![[r#" - fn quux() fn quux() - bn x - "#]], - ); - } - - #[test] - fn completes_if_prefix_is_keyword() { - mark::check!(completes_if_prefix_is_keyword); - check_edit( - "wherewolf", - r#" -fn main() { - let wherewolf = 92; - drop(where<|>) -} -"#, - r#" -fn main() { - let wherewolf = 92; - drop(wherewolf) -} -"#, - ) - } - - #[test] - fn completes_generic_params() { - check( - r#"fn quux() { <|> }"#, - expect![[r#" - tp T - fn quux() fn quux() - "#]], - ); - } - - #[test] - fn completes_generic_params_in_struct() { - check( - r#"struct S { x: <|>}"#, - expect![[r#" - st S<…> - tp Self - tp T - "#]], - ); - } - - #[test] - fn completes_self_in_enum() { - check( - r#"enum X { Y(<|>) }"#, - expect![[r#" - tp Self - en X - "#]], - ); - } - - #[test] - fn completes_module_items() { - check( - r#" -struct S; -enum E {} -fn quux() { <|> } -"#, - expect![[r#" - en E - st S - fn quux() fn quux() - "#]], - ); - } - - #[test] - fn completes_extern_prelude() { - check( - r#" -//- /lib.rs -use <|>; - -//- /other_crate/lib.rs -// nothing here -"#, - expect![[r#" - md other_crate - "#]], - ); - } - - #[test] - fn completes_module_items_in_nested_modules() { - check( - r#" -struct Foo; -mod m { - struct Bar; - fn quux() { <|> } -} -"#, - expect![[r#" - st Bar - fn quux() fn quux() - "#]], - ); - } - - #[test] - fn completes_return_type() { - check( - r#" -struct Foo; -fn x() -> <|> -"#, - expect![[r#" - st Foo - fn x() fn x() - "#]], - ); - } - - #[test] - fn dont_show_both_completions_for_shadowing() { - check( - r#" -fn foo() { - let bar = 92; - { - let bar = 62; - drop(<|>) - } -} -"#, - // FIXME: should be only one bar here - expect![[r#" - bn bar i32 - bn bar i32 - fn foo() fn foo() - "#]], - ); - } - - #[test] - fn completes_self_in_methods() { - check( - r#"impl S { fn foo(&self) { <|> } }"#, - expect![[r#" - tp Self - bn self &{unknown} - "#]], - ); - } - - #[test] - fn completes_prelude() { - check( - r#" -//- /main.rs -fn foo() { let x: <|> } - -//- /std/lib.rs -#[prelude_import] -use prelude::*; - -mod prelude { struct Option; } -"#, - expect![[r#" - st Option - fn foo() fn foo() - md std - "#]], - ); - } - - #[test] - fn completes_std_prelude_if_core_is_defined() { - check( - r#" -//- /main.rs -fn foo() { let x: <|> } - -//- /core/lib.rs -#[prelude_import] -use prelude::*; - -mod prelude { struct Option; } - -//- /std/lib.rs -#[prelude_import] -use prelude::*; - -mod prelude { struct String; } -"#, - expect![[r#" - st String - md core - fn foo() fn foo() - md std - "#]], - ); - } - - #[test] - fn completes_macros_as_value() { - check( - r#" -macro_rules! foo { () => {} } - -#[macro_use] -mod m1 { - macro_rules! bar { () => {} } -} - -mod m2 { - macro_rules! nope { () => {} } - - #[macro_export] - macro_rules! baz { () => {} } -} - -fn main() { let v = <|> } -"#, - expect![[r##" - ma bar!(…) macro_rules! bar - ma baz!(…) #[macro_export] - macro_rules! baz - ma foo!(…) macro_rules! foo - md m1 - md m2 - fn main() fn main() - "##]], - ); - } - - #[test] - fn completes_both_macro_and_value() { - check( - r#" -macro_rules! foo { () => {} } -fn foo() { <|> } -"#, - expect![[r#" - ma foo!(…) macro_rules! foo - fn foo() fn foo() - "#]], - ); - } - - #[test] - fn completes_macros_as_type() { - check( - r#" -macro_rules! foo { () => {} } -fn main() { let x: <|> } -"#, - expect![[r#" - ma foo!(…) macro_rules! foo - fn main() fn main() - "#]], - ); - } - - #[test] - fn completes_macros_as_stmt() { - check( - r#" -macro_rules! foo { () => {} } -fn main() { <|> } -"#, - expect![[r#" - ma foo!(…) macro_rules! foo - fn main() fn main() - "#]], - ); - } - - #[test] - fn completes_local_item() { - check( - r#" -fn main() { - return f<|>; - fn frobnicate() {} -} -"#, - expect![[r#" - fn frobnicate() fn frobnicate() - fn main() fn main() - "#]], - ); - } - - #[test] - fn completes_in_simple_macro_1() { - check( - r#" -macro_rules! m { ($e:expr) => { $e } } -fn quux(x: i32) { - let y = 92; - m!(<|>); -} -"#, - expect![[r#" - ma m!(…) macro_rules! m - fn quux(…) fn quux(x: i32) - bn x i32 - bn y i32 - "#]], - ); - } - - #[test] - fn completes_in_simple_macro_2() { - check( - r" -macro_rules! m { ($e:expr) => { $e } } -fn quux(x: i32) { - let y = 92; - m!(x<|>); -} -", - expect![[r#" - ma m!(…) macro_rules! m - fn quux(…) fn quux(x: i32) - bn x i32 - bn y i32 - "#]], - ); - } - - #[test] - fn completes_in_simple_macro_without_closing_parens() { - check( - r#" -macro_rules! m { ($e:expr) => { $e } } -fn quux(x: i32) { - let y = 92; - m!(x<|> -} -"#, - expect![[r#" - ma m!(…) macro_rules! m - fn quux(…) fn quux(x: i32) - bn x i32 - bn y i32 - "#]], - ); - } - - #[test] - fn completes_unresolved_uses() { - check( - r#" -use spam::Quux; - -fn main() { <|> } -"#, - expect![[r#" - ?? Quux - fn main() fn main() - "#]], - ); - } - #[test] - fn completes_enum_variant_matcharm() { - check( - r#" -enum Foo { Bar, Baz, Quux } - -fn main() { - let foo = Foo::Quux; - match foo { Qu<|> } -} -"#, - expect![[r#" - en Foo - ev Foo::Bar () - ev Foo::Baz () - ev Foo::Quux () - "#]], - ) - } - - #[test] - fn completes_enum_variant_iflet() { - check( - r#" -enum Foo { Bar, Baz, Quux } - -fn main() { - let foo = Foo::Quux; - if let Qu<|> = foo { } -} -"#, - expect![[r#" - en Foo - ev Foo::Bar () - ev Foo::Baz () - ev Foo::Quux () - "#]], - ) - } - - #[test] - fn completes_enum_variant_basic_expr() { - check( - r#" -enum Foo { Bar, Baz, Quux } -fn main() { let foo: Foo = Q<|> } -"#, - expect![[r#" - en Foo - ev Foo::Bar () - ev Foo::Baz () - ev Foo::Quux () - fn main() fn main() - "#]], - ) - } - - #[test] - fn completes_enum_variant_from_module() { - check( - r#" -mod m { pub enum E { V } } -fn f() -> m::E { V<|> } -"#, - expect![[r#" - fn f() fn f() -> m::E - md m - ev m::E::V () - "#]], - ) - } - - #[test] - fn dont_complete_attr() { - check( - r#" -struct Foo; -#[<|>] -fn f() {} -"#, - expect![[""]], - ) - } - - #[test] - fn completes_type_or_trait_in_impl_block() { - check( - r#" -trait MyTrait {} -struct MyStruct {} - -impl My<|> -"#, - expect![[r#" - st MyStruct - tt MyTrait - tp Self - "#]], - ) - } -} diff --git a/crates/ra_ide/src/completion/completion_context.rs b/crates/ra_ide/src/completion/completion_context.rs deleted file mode 100644 index 6b03b30bb5..0000000000 --- a/crates/ra_ide/src/completion/completion_context.rs +++ /dev/null @@ -1,465 +0,0 @@ -//! FIXME: write short doc here - -use hir::{Semantics, SemanticsScope, Type}; -use ra_db::SourceDatabase; -use ra_ide_db::RootDatabase; -use ra_syntax::{ - algo::{find_covering_element, find_node_at_offset}, - ast, match_ast, AstNode, NodeOrToken, - SyntaxKind::*, - SyntaxNode, SyntaxToken, TextRange, TextSize, -}; -use ra_text_edit::Indel; - -use super::patterns::{ - has_bind_pat_parent, has_block_expr_parent, has_impl_as_prev_sibling, has_impl_parent, - has_item_list_or_source_file_parent, has_ref_parent, has_trait_as_prev_sibling, - has_trait_parent, if_is_prev, is_in_loop_body, is_match_arm, unsafe_is_prev, -}; -use crate::{call_info::ActiveParameter, completion::CompletionConfig, FilePosition}; -use test_utils::mark; - -/// `CompletionContext` is created early during completion to figure out, where -/// exactly is the cursor, syntax-wise. -#[derive(Debug)] -pub(crate) struct CompletionContext<'a> { - pub(super) sema: Semantics<'a, RootDatabase>, - pub(super) scope: SemanticsScope<'a>, - pub(super) db: &'a RootDatabase, - pub(super) config: &'a CompletionConfig, - pub(super) offset: TextSize, - /// The token before the cursor, in the original file. - pub(super) original_token: SyntaxToken, - /// The token before the cursor, in the macro-expanded file. - pub(super) token: SyntaxToken, - pub(super) krate: Option, - pub(super) expected_type: Option, - pub(super) name_ref_syntax: Option, - pub(super) function_syntax: Option, - pub(super) use_item_syntax: Option, - pub(super) record_lit_syntax: Option, - pub(super) record_pat_syntax: Option, - pub(super) record_field_syntax: Option, - pub(super) impl_def: Option, - /// FIXME: `ActiveParameter` is string-based, which is very very wrong - pub(super) active_parameter: Option, - pub(super) is_param: bool, - /// If a name-binding or reference to a const in a pattern. - /// Irrefutable patterns (like let) are excluded. - pub(super) is_pat_binding_or_const: bool, - /// A single-indent path, like `foo`. `::foo` should not be considered a trivial path. - pub(super) is_trivial_path: bool, - /// If not a trivial path, the prefix (qualifier). - pub(super) path_prefix: Option, - pub(super) after_if: bool, - /// `true` if we are a statement or a last expr in the block. - pub(super) can_be_stmt: bool, - /// `true` if we expect an expression at the cursor position. - pub(super) is_expr: bool, - /// Something is typed at the "top" level, in module or impl/trait. - pub(super) is_new_item: bool, - /// The receiver if this is a field or method access, i.e. writing something.<|> - pub(super) dot_receiver: Option, - pub(super) dot_receiver_is_ambiguous_float_literal: bool, - /// If this is a call (method or function) in particular, i.e. the () are already there. - pub(super) is_call: bool, - /// Like `is_call`, but for tuple patterns. - pub(super) is_pattern_call: bool, - /// If this is a macro call, i.e. the () are already there. - pub(super) is_macro_call: bool, - pub(super) is_path_type: bool, - pub(super) has_type_args: bool, - pub(super) attribute_under_caret: Option, - pub(super) unsafe_is_prev: bool, - pub(super) if_is_prev: bool, - pub(super) block_expr_parent: bool, - pub(super) bind_pat_parent: bool, - pub(super) ref_pat_parent: bool, - pub(super) in_loop_body: bool, - pub(super) has_trait_parent: bool, - pub(super) has_impl_parent: bool, - pub(super) trait_as_prev_sibling: bool, - pub(super) impl_as_prev_sibling: bool, - pub(super) is_match_arm: bool, - pub(super) has_item_list_or_source_file_parent: bool, -} - -impl<'a> CompletionContext<'a> { - pub(super) fn new( - db: &'a RootDatabase, - position: FilePosition, - config: &'a CompletionConfig, - ) -> Option> { - let sema = Semantics::new(db); - - let original_file = sema.parse(position.file_id); - - // Insert a fake ident to get a valid parse tree. We will use this file - // to determine context, though the original_file will be used for - // actual completion. - let file_with_fake_ident = { - let parse = db.parse(position.file_id); - let edit = Indel::insert(position.offset, "intellijRulezz".to_string()); - parse.reparse(&edit).tree() - }; - let fake_ident_token = - file_with_fake_ident.syntax().token_at_offset(position.offset).right_biased().unwrap(); - - let krate = sema.to_module_def(position.file_id).map(|m| m.krate()); - let original_token = - original_file.syntax().token_at_offset(position.offset).left_biased()?; - let token = sema.descend_into_macros(original_token.clone()); - let scope = sema.scope_at_offset(&token.parent(), position.offset); - let mut ctx = CompletionContext { - sema, - scope, - db, - config, - original_token, - token, - offset: position.offset, - krate, - expected_type: None, - name_ref_syntax: None, - function_syntax: None, - use_item_syntax: None, - record_lit_syntax: None, - record_pat_syntax: None, - record_field_syntax: None, - impl_def: None, - active_parameter: ActiveParameter::at(db, position), - is_param: false, - is_pat_binding_or_const: false, - is_trivial_path: false, - path_prefix: None, - after_if: false, - can_be_stmt: false, - is_expr: false, - is_new_item: false, - dot_receiver: None, - is_call: false, - is_pattern_call: false, - is_macro_call: false, - is_path_type: false, - has_type_args: false, - dot_receiver_is_ambiguous_float_literal: false, - attribute_under_caret: None, - unsafe_is_prev: false, - in_loop_body: false, - ref_pat_parent: false, - bind_pat_parent: false, - block_expr_parent: false, - has_trait_parent: false, - has_impl_parent: false, - trait_as_prev_sibling: false, - impl_as_prev_sibling: false, - if_is_prev: false, - is_match_arm: false, - has_item_list_or_source_file_parent: false, - }; - - let mut original_file = original_file.syntax().clone(); - let mut hypothetical_file = file_with_fake_ident.syntax().clone(); - let mut offset = position.offset; - let mut fake_ident_token = fake_ident_token; - - // Are we inside a macro call? - while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = ( - find_node_at_offset::(&original_file, offset), - find_node_at_offset::(&hypothetical_file, offset), - ) { - if actual_macro_call.path().as_ref().map(|s| s.syntax().text()) - != macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text()) - { - break; - } - let hypothetical_args = match macro_call_with_fake_ident.token_tree() { - Some(tt) => tt, - None => break, - }; - if let (Some(actual_expansion), Some(hypothetical_expansion)) = ( - ctx.sema.expand(&actual_macro_call), - ctx.sema.expand_hypothetical( - &actual_macro_call, - &hypothetical_args, - fake_ident_token, - ), - ) { - let new_offset = hypothetical_expansion.1.text_range().start(); - if new_offset > actual_expansion.text_range().end() { - break; - } - original_file = actual_expansion; - hypothetical_file = hypothetical_expansion.0; - fake_ident_token = hypothetical_expansion.1; - offset = new_offset; - } else { - break; - } - } - ctx.fill_keyword_patterns(&hypothetical_file, offset); - ctx.fill(&original_file, hypothetical_file, offset); - Some(ctx) - } - - // The range of the identifier that is being completed. - pub(crate) fn source_range(&self) -> TextRange { - // check kind of macro-expanded token, but use range of original token - if self.token.kind() == IDENT || self.token.kind().is_keyword() { - mark::hit!(completes_if_prefix_is_keyword); - self.original_token.text_range() - } else { - TextRange::empty(self.offset) - } - } - - fn fill_keyword_patterns(&mut self, file_with_fake_ident: &SyntaxNode, offset: TextSize) { - let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap(); - let syntax_element = NodeOrToken::Token(fake_ident_token); - self.block_expr_parent = has_block_expr_parent(syntax_element.clone()); - self.unsafe_is_prev = unsafe_is_prev(syntax_element.clone()); - self.if_is_prev = if_is_prev(syntax_element.clone()); - self.bind_pat_parent = has_bind_pat_parent(syntax_element.clone()); - self.ref_pat_parent = has_ref_parent(syntax_element.clone()); - self.in_loop_body = is_in_loop_body(syntax_element.clone()); - self.has_trait_parent = has_trait_parent(syntax_element.clone()); - self.has_impl_parent = has_impl_parent(syntax_element.clone()); - self.impl_as_prev_sibling = has_impl_as_prev_sibling(syntax_element.clone()); - self.trait_as_prev_sibling = has_trait_as_prev_sibling(syntax_element.clone()); - self.is_match_arm = is_match_arm(syntax_element.clone()); - self.has_item_list_or_source_file_parent = - has_item_list_or_source_file_parent(syntax_element); - } - - fn fill( - &mut self, - original_file: &SyntaxNode, - file_with_fake_ident: SyntaxNode, - offset: TextSize, - ) { - // FIXME: this is wrong in at least two cases: - // * when there's no token `foo(<|>)` - // * when there is a token, but it happens to have type of it's own - self.expected_type = self - .token - .ancestors() - .find_map(|node| { - let ty = match_ast! { - match node { - ast::Pat(it) => self.sema.type_of_pat(&it), - ast::Expr(it) => self.sema.type_of_expr(&it), - _ => return None, - } - }; - Some(ty) - }) - .flatten(); - self.attribute_under_caret = find_node_at_offset(&file_with_fake_ident, offset); - - // First, let's try to complete a reference to some declaration. - if let Some(name_ref) = find_node_at_offset::(&file_with_fake_ident, offset) { - // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. - // See RFC#1685. - if is_node::(name_ref.syntax()) { - self.is_param = true; - return; - } - // FIXME: remove this (V) duplication and make the check more precise - if name_ref.syntax().ancestors().find_map(ast::RecordPatFieldList::cast).is_some() { - self.record_pat_syntax = - self.sema.find_node_at_offset_with_macros(&original_file, offset); - } - self.classify_name_ref(original_file, name_ref, offset); - } - - // Otherwise, see if this is a declaration. We can use heuristics to - // suggest declaration names, see `CompletionKind::Magic`. - if let Some(name) = find_node_at_offset::(&file_with_fake_ident, offset) { - if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::IdentPat::cast) { - self.is_pat_binding_or_const = true; - if bind_pat.at_token().is_some() - || bind_pat.ref_token().is_some() - || bind_pat.mut_token().is_some() - { - self.is_pat_binding_or_const = false; - } - if bind_pat.syntax().parent().and_then(ast::RecordPatFieldList::cast).is_some() { - self.is_pat_binding_or_const = false; - } - if let Some(let_stmt) = bind_pat.syntax().ancestors().find_map(ast::LetStmt::cast) { - if let Some(pat) = let_stmt.pat() { - if pat.syntax().text_range().contains_range(bind_pat.syntax().text_range()) - { - self.is_pat_binding_or_const = false; - } - } - } - } - if is_node::(name.syntax()) { - self.is_param = true; - return; - } - // FIXME: remove this (^) duplication and make the check more precise - if name.syntax().ancestors().find_map(ast::RecordPatFieldList::cast).is_some() { - self.record_pat_syntax = - self.sema.find_node_at_offset_with_macros(&original_file, offset); - } - } - } - - fn classify_name_ref( - &mut self, - original_file: &SyntaxNode, - name_ref: ast::NameRef, - offset: TextSize, - ) { - self.name_ref_syntax = - find_node_at_offset(&original_file, name_ref.syntax().text_range().start()); - let name_range = name_ref.syntax().text_range(); - if ast::RecordExprField::for_field_name(&name_ref).is_some() { - self.record_lit_syntax = - self.sema.find_node_at_offset_with_macros(&original_file, offset); - } - - self.impl_def = self - .sema - .ancestors_with_macros(self.token.parent()) - .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) - .find_map(ast::Impl::cast); - - let top_node = name_ref - .syntax() - .ancestors() - .take_while(|it| it.text_range() == name_range) - .last() - .unwrap(); - - match top_node.parent().map(|it| it.kind()) { - Some(SOURCE_FILE) | Some(ITEM_LIST) => { - self.is_new_item = true; - return; - } - _ => (), - } - - self.use_item_syntax = - self.sema.ancestors_with_macros(self.token.parent()).find_map(ast::Use::cast); - - self.function_syntax = self - .sema - .ancestors_with_macros(self.token.parent()) - .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) - .find_map(ast::Fn::cast); - - self.record_field_syntax = self - .sema - .ancestors_with_macros(self.token.parent()) - .take_while(|it| { - it.kind() != SOURCE_FILE && it.kind() != MODULE && it.kind() != CALL_EXPR - }) - .find_map(ast::RecordExprField::cast); - - let parent = match name_ref.syntax().parent() { - Some(it) => it, - None => return, - }; - - if let Some(segment) = ast::PathSegment::cast(parent.clone()) { - let path = segment.parent_path(); - self.is_call = path - .syntax() - .parent() - .and_then(ast::PathExpr::cast) - .and_then(|it| it.syntax().parent().and_then(ast::CallExpr::cast)) - .is_some(); - self.is_macro_call = path.syntax().parent().and_then(ast::MacroCall::cast).is_some(); - self.is_pattern_call = - path.syntax().parent().and_then(ast::TupleStructPat::cast).is_some(); - - self.is_path_type = path.syntax().parent().and_then(ast::PathType::cast).is_some(); - self.has_type_args = segment.generic_arg_list().is_some(); - - #[allow(deprecated)] - if let Some(path) = hir::Path::from_ast(path.clone()) { - if let Some(path_prefix) = path.qualifier() { - self.path_prefix = Some(path_prefix); - return; - } - } - - if path.qualifier().is_none() { - self.is_trivial_path = true; - - // Find either enclosing expr statement (thing with `;`) or a - // block. If block, check that we are the last expr. - self.can_be_stmt = name_ref - .syntax() - .ancestors() - .find_map(|node| { - if let Some(stmt) = ast::ExprStmt::cast(node.clone()) { - return Some( - stmt.syntax().text_range() == name_ref.syntax().text_range(), - ); - } - if let Some(block) = ast::BlockExpr::cast(node) { - return Some( - block.expr().map(|e| e.syntax().text_range()) - == Some(name_ref.syntax().text_range()), - ); - } - None - }) - .unwrap_or(false); - self.is_expr = path.syntax().parent().and_then(ast::PathExpr::cast).is_some(); - - if let Some(off) = name_ref.syntax().text_range().start().checked_sub(2.into()) { - if let Some(if_expr) = - self.sema.find_node_at_offset_with_macros::(original_file, off) - { - if if_expr.syntax().text_range().end() - < name_ref.syntax().text_range().start() - { - self.after_if = true; - } - } - } - } - } - if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { - // The receiver comes before the point of insertion of the fake - // ident, so it should have the same range in the non-modified file - self.dot_receiver = field_expr - .expr() - .map(|e| e.syntax().text_range()) - .and_then(|r| find_node_with_range(original_file, r)); - self.dot_receiver_is_ambiguous_float_literal = - if let Some(ast::Expr::Literal(l)) = &self.dot_receiver { - match l.kind() { - ast::LiteralKind::FloatNumber { .. } => l.token().text().ends_with('.'), - _ => false, - } - } else { - false - } - } - if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent) { - // As above - self.dot_receiver = method_call_expr - .expr() - .map(|e| e.syntax().text_range()) - .and_then(|r| find_node_with_range(original_file, r)); - self.is_call = true; - } - } -} - -fn find_node_with_range(syntax: &SyntaxNode, range: TextRange) -> Option { - find_covering_element(syntax, range).ancestors().find_map(N::cast) -} - -fn is_node(node: &SyntaxNode) -> bool { - match node.ancestors().find_map(N::cast) { - None => false, - Some(n) => n.syntax().text_range() == node.text_range(), - } -} diff --git a/crates/ra_ide/src/completion/completion_item.rs b/crates/ra_ide/src/completion/completion_item.rs deleted file mode 100644 index 7bdda316c4..0000000000 --- a/crates/ra_ide/src/completion/completion_item.rs +++ /dev/null @@ -1,384 +0,0 @@ -//! FIXME: write short doc here - -use std::fmt; - -use hir::Documentation; -use ra_syntax::TextRange; -use ra_text_edit::TextEdit; - -use crate::completion::completion_config::SnippetCap; - -/// `CompletionItem` describes a single completion variant in the editor pop-up. -/// It is basically a POD with various properties. To construct a -/// `CompletionItem`, use `new` method and the `Builder` struct. -pub struct CompletionItem { - /// Used only internally in tests, to check only specific kind of - /// completion (postfix, keyword, reference, etc). - #[allow(unused)] - pub(crate) completion_kind: CompletionKind, - /// Label in the completion pop up which identifies completion. - label: String, - /// Range of identifier that is being completed. - /// - /// It should be used primarily for UI, but we also use this to convert - /// genetic TextEdit into LSP's completion edit (see conv.rs). - /// - /// `source_range` must contain the completion offset. `insert_text` should - /// start with what `source_range` points to, or VSCode will filter out the - /// completion silently. - source_range: TextRange, - /// What happens when user selects this item. - /// - /// Typically, replaces `source_range` with new identifier. - text_edit: TextEdit, - insert_text_format: InsertTextFormat, - - /// What item (struct, function, etc) are we completing. - kind: Option, - - /// Lookup is used to check if completion item indeed can complete current - /// ident. - /// - /// That is, in `foo.bar<|>` lookup of `abracadabra` will be accepted (it - /// contains `bar` sub sequence), and `quux` will rejected. - lookup: Option, - - /// Additional info to show in the UI pop up. - detail: Option, - documentation: Option, - - /// Whether this item is marked as deprecated - deprecated: bool, - - /// If completing a function call, ask the editor to show parameter popup - /// after completion. - trigger_call_info: bool, - - /// Score is useful to pre select or display in better order completion items - score: Option, -} - -// We use custom debug for CompletionItem to make snapshot tests more readable. -impl fmt::Debug for CompletionItem { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut s = f.debug_struct("CompletionItem"); - s.field("label", &self.label()).field("source_range", &self.source_range()); - if self.text_edit().len() == 1 { - let atom = &self.text_edit().iter().next().unwrap(); - s.field("delete", &atom.delete); - s.field("insert", &atom.insert); - } else { - s.field("text_edit", &self.text_edit); - } - if let Some(kind) = self.kind().as_ref() { - s.field("kind", kind); - } - if self.lookup() != self.label() { - s.field("lookup", &self.lookup()); - } - if let Some(detail) = self.detail() { - s.field("detail", &detail); - } - if let Some(documentation) = self.documentation() { - s.field("documentation", &documentation); - } - if self.deprecated { - s.field("deprecated", &true); - } - if let Some(score) = &self.score { - s.field("score", score); - } - if self.trigger_call_info { - s.field("trigger_call_info", &true); - } - s.finish() - } -} - -#[derive(Debug, Clone, Copy, Ord, PartialOrd, Eq, PartialEq)] -pub enum CompletionScore { - /// If only type match - TypeMatch, - /// If type and name match - TypeAndNameMatch, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum CompletionItemKind { - Snippet, - Keyword, - Module, - Function, - BuiltinType, - Struct, - Enum, - EnumVariant, - Binding, - Field, - Static, - Const, - Trait, - TypeAlias, - Method, - TypeParam, - Macro, - Attribute, - UnresolvedReference, -} - -impl CompletionItemKind { - #[cfg(test)] - pub(crate) fn tag(&self) -> &'static str { - match self { - CompletionItemKind::Attribute => "at", - CompletionItemKind::Binding => "bn", - CompletionItemKind::BuiltinType => "bt", - CompletionItemKind::Const => "ct", - CompletionItemKind::Enum => "en", - CompletionItemKind::EnumVariant => "ev", - CompletionItemKind::Field => "fd", - CompletionItemKind::Function => "fn", - CompletionItemKind::Keyword => "kw", - CompletionItemKind::Macro => "ma", - CompletionItemKind::Method => "me", - CompletionItemKind::Module => "md", - CompletionItemKind::Snippet => "sn", - CompletionItemKind::Static => "sc", - CompletionItemKind::Struct => "st", - CompletionItemKind::Trait => "tt", - CompletionItemKind::TypeAlias => "ta", - CompletionItemKind::TypeParam => "tp", - CompletionItemKind::UnresolvedReference => "??", - } - } -} - -#[derive(Debug, PartialEq, Eq, Copy, Clone)] -pub(crate) enum CompletionKind { - /// Parser-based keyword completion. - Keyword, - /// Your usual "complete all valid identifiers". - Reference, - /// "Secret sauce" completions. - Magic, - Snippet, - Postfix, - BuiltinType, - Attribute, -} - -#[derive(Debug, PartialEq, Eq, Copy, Clone)] -pub enum InsertTextFormat { - PlainText, - Snippet, -} - -impl CompletionItem { - pub(crate) fn new( - completion_kind: CompletionKind, - source_range: TextRange, - label: impl Into, - ) -> Builder { - let label = label.into(); - Builder { - source_range, - completion_kind, - label, - insert_text: None, - insert_text_format: InsertTextFormat::PlainText, - detail: None, - documentation: None, - lookup: None, - kind: None, - text_edit: None, - deprecated: None, - trigger_call_info: None, - score: None, - } - } - /// What user sees in pop-up in the UI. - pub fn label(&self) -> &str { - &self.label - } - pub fn source_range(&self) -> TextRange { - self.source_range - } - - pub fn insert_text_format(&self) -> InsertTextFormat { - self.insert_text_format - } - - pub fn text_edit(&self) -> &TextEdit { - &self.text_edit - } - - /// Short one-line additional information, like a type - pub fn detail(&self) -> Option<&str> { - self.detail.as_deref() - } - /// A doc-comment - pub fn documentation(&self) -> Option { - self.documentation.clone() - } - /// What string is used for filtering. - pub fn lookup(&self) -> &str { - self.lookup.as_deref().unwrap_or(&self.label) - } - - pub fn kind(&self) -> Option { - self.kind - } - - pub fn deprecated(&self) -> bool { - self.deprecated - } - - pub fn score(&self) -> Option { - self.score - } - - pub fn trigger_call_info(&self) -> bool { - self.trigger_call_info - } -} - -/// A helper to make `CompletionItem`s. -#[must_use] -pub(crate) struct Builder { - source_range: TextRange, - completion_kind: CompletionKind, - label: String, - insert_text: Option, - insert_text_format: InsertTextFormat, - detail: Option, - documentation: Option, - lookup: Option, - kind: Option, - text_edit: Option, - deprecated: Option, - trigger_call_info: Option, - score: Option, -} - -impl Builder { - pub(crate) fn add_to(self, acc: &mut Completions) { - acc.add(self.build()) - } - - pub(crate) fn build(self) -> CompletionItem { - let label = self.label; - let text_edit = match self.text_edit { - Some(it) => it, - None => TextEdit::replace( - self.source_range, - self.insert_text.unwrap_or_else(|| label.clone()), - ), - }; - - CompletionItem { - source_range: self.source_range, - label, - insert_text_format: self.insert_text_format, - text_edit, - detail: self.detail, - documentation: self.documentation, - lookup: self.lookup, - kind: self.kind, - completion_kind: self.completion_kind, - deprecated: self.deprecated.unwrap_or(false), - trigger_call_info: self.trigger_call_info.unwrap_or(false), - score: self.score, - } - } - pub(crate) fn lookup_by(mut self, lookup: impl Into) -> Builder { - self.lookup = Some(lookup.into()); - self - } - pub(crate) fn label(mut self, label: impl Into) -> Builder { - self.label = label.into(); - self - } - pub(crate) fn insert_text(mut self, insert_text: impl Into) -> Builder { - self.insert_text = Some(insert_text.into()); - self - } - pub(crate) fn insert_snippet( - mut self, - _cap: SnippetCap, - snippet: impl Into, - ) -> Builder { - self.insert_text_format = InsertTextFormat::Snippet; - self.insert_text(snippet) - } - pub(crate) fn kind(mut self, kind: CompletionItemKind) -> Builder { - self.kind = Some(kind); - self - } - pub(crate) fn text_edit(mut self, edit: TextEdit) -> Builder { - self.text_edit = Some(edit); - self - } - pub(crate) fn snippet_edit(mut self, _cap: SnippetCap, edit: TextEdit) -> Builder { - self.insert_text_format = InsertTextFormat::Snippet; - self.text_edit(edit) - } - #[allow(unused)] - pub(crate) fn detail(self, detail: impl Into) -> Builder { - self.set_detail(Some(detail)) - } - pub(crate) fn set_detail(mut self, detail: Option>) -> Builder { - self.detail = detail.map(Into::into); - self - } - #[allow(unused)] - pub(crate) fn documentation(self, docs: Documentation) -> Builder { - self.set_documentation(Some(docs)) - } - pub(crate) fn set_documentation(mut self, docs: Option) -> Builder { - self.documentation = docs.map(Into::into); - self - } - pub(crate) fn set_deprecated(mut self, deprecated: bool) -> Builder { - self.deprecated = Some(deprecated); - self - } - pub(crate) fn set_score(mut self, score: CompletionScore) -> Builder { - self.score = Some(score); - self - } - pub(crate) fn trigger_call_info(mut self) -> Builder { - self.trigger_call_info = Some(true); - self - } -} - -impl<'a> Into for Builder { - fn into(self) -> CompletionItem { - self.build() - } -} - -/// Represents an in-progress set of completions being built. -#[derive(Debug, Default)] -pub(crate) struct Completions { - buf: Vec, -} - -impl Completions { - pub(crate) fn add(&mut self, item: impl Into) { - self.buf.push(item.into()) - } - pub(crate) fn add_all(&mut self, items: I) - where - I: IntoIterator, - I::Item: Into, - { - items.into_iter().for_each(|item| self.add(item.into())) - } -} - -impl Into> for Completions { - fn into(self) -> Vec { - self.buf - } -} diff --git a/crates/ra_ide/src/completion/patterns.rs b/crates/ra_ide/src/completion/patterns.rs deleted file mode 100644 index 7c4feff6d1..0000000000 --- a/crates/ra_ide/src/completion/patterns.rs +++ /dev/null @@ -1,194 +0,0 @@ -//! Patterns telling us certain facts about current syntax element, they are used in completion context - -use ra_syntax::{ - algo::non_trivia_sibling, - ast::{self, LoopBodyOwner}, - match_ast, AstNode, Direction, NodeOrToken, SyntaxElement, - SyntaxKind::*, - SyntaxNode, SyntaxToken, -}; - -#[cfg(test)] -use crate::completion::test_utils::check_pattern_is_applicable; - -pub(crate) fn has_trait_parent(element: SyntaxElement) -> bool { - not_same_range_ancestor(element) - .filter(|it| it.kind() == ASSOC_ITEM_LIST) - .and_then(|it| it.parent()) - .filter(|it| it.kind() == TRAIT) - .is_some() -} -#[test] -fn test_has_trait_parent() { - check_pattern_is_applicable(r"trait A { f<|> }", has_trait_parent); -} - -pub(crate) fn has_impl_parent(element: SyntaxElement) -> bool { - not_same_range_ancestor(element) - .filter(|it| it.kind() == ASSOC_ITEM_LIST) - .and_then(|it| it.parent()) - .filter(|it| it.kind() == IMPL) - .is_some() -} -#[test] -fn test_has_impl_parent() { - check_pattern_is_applicable(r"impl A { f<|> }", has_impl_parent); -} - -pub(crate) fn has_block_expr_parent(element: SyntaxElement) -> bool { - not_same_range_ancestor(element).filter(|it| it.kind() == BLOCK_EXPR).is_some() -} -#[test] -fn test_has_block_expr_parent() { - check_pattern_is_applicable(r"fn my_fn() { let a = 2; f<|> }", has_block_expr_parent); -} - -pub(crate) fn has_bind_pat_parent(element: SyntaxElement) -> bool { - element.ancestors().find(|it| it.kind() == IDENT_PAT).is_some() -} -#[test] -fn test_has_bind_pat_parent() { - check_pattern_is_applicable(r"fn my_fn(m<|>) {}", has_bind_pat_parent); - check_pattern_is_applicable(r"fn my_fn() { let m<|> }", has_bind_pat_parent); -} - -pub(crate) fn has_ref_parent(element: SyntaxElement) -> bool { - not_same_range_ancestor(element) - .filter(|it| it.kind() == REF_PAT || it.kind() == REF_EXPR) - .is_some() -} -#[test] -fn test_has_ref_parent() { - check_pattern_is_applicable(r"fn my_fn(&m<|>) {}", has_ref_parent); - check_pattern_is_applicable(r"fn my() { let &m<|> }", has_ref_parent); -} - -pub(crate) fn has_item_list_or_source_file_parent(element: SyntaxElement) -> bool { - let ancestor = not_same_range_ancestor(element); - if !ancestor.is_some() { - return true; - } - ancestor.filter(|it| it.kind() == SOURCE_FILE || it.kind() == ITEM_LIST).is_some() -} -#[test] -fn test_has_item_list_or_source_file_parent() { - check_pattern_is_applicable(r"i<|>", has_item_list_or_source_file_parent); - check_pattern_is_applicable(r"mod foo { f<|> }", has_item_list_or_source_file_parent); -} - -pub(crate) fn is_match_arm(element: SyntaxElement) -> bool { - not_same_range_ancestor(element.clone()).filter(|it| it.kind() == MATCH_ARM).is_some() - && previous_sibling_or_ancestor_sibling(element) - .and_then(|it| it.into_token()) - .filter(|it| it.kind() == FAT_ARROW) - .is_some() -} -#[test] -fn test_is_match_arm() { - check_pattern_is_applicable(r"fn my_fn() { match () { () => m<|> } }", is_match_arm); -} - -pub(crate) fn unsafe_is_prev(element: SyntaxElement) -> bool { - element - .into_token() - .and_then(|it| previous_non_trivia_token(it)) - .filter(|it| it.kind() == UNSAFE_KW) - .is_some() -} -#[test] -fn test_unsafe_is_prev() { - check_pattern_is_applicable(r"unsafe i<|>", unsafe_is_prev); -} - -pub(crate) fn if_is_prev(element: SyntaxElement) -> bool { - element - .into_token() - .and_then(|it| previous_non_trivia_token(it)) - .filter(|it| it.kind() == IF_KW) - .is_some() -} -#[test] -fn test_if_is_prev() { - check_pattern_is_applicable(r"if l<|>", if_is_prev); -} - -pub(crate) fn has_trait_as_prev_sibling(element: SyntaxElement) -> bool { - previous_sibling_or_ancestor_sibling(element).filter(|it| it.kind() == TRAIT).is_some() -} -#[test] -fn test_has_trait_as_prev_sibling() { - check_pattern_is_applicable(r"trait A w<|> {}", has_trait_as_prev_sibling); -} - -pub(crate) fn has_impl_as_prev_sibling(element: SyntaxElement) -> bool { - previous_sibling_or_ancestor_sibling(element).filter(|it| it.kind() == IMPL).is_some() -} -#[test] -fn test_has_impl_as_prev_sibling() { - check_pattern_is_applicable(r"impl A w<|> {}", has_impl_as_prev_sibling); -} - -pub(crate) fn is_in_loop_body(element: SyntaxElement) -> bool { - let leaf = match element { - NodeOrToken::Node(node) => node, - NodeOrToken::Token(token) => token.parent(), - }; - for node in leaf.ancestors() { - if node.kind() == FN || node.kind() == CLOSURE_EXPR { - break; - } - let loop_body = match_ast! { - match node { - ast::ForExpr(it) => it.loop_body(), - ast::WhileExpr(it) => it.loop_body(), - ast::LoopExpr(it) => it.loop_body(), - _ => None, - } - }; - if let Some(body) = loop_body { - if body.syntax().text_range().contains_range(leaf.text_range()) { - return true; - } - } - } - false -} - -fn not_same_range_ancestor(element: SyntaxElement) -> Option { - element - .ancestors() - .take_while(|it| it.text_range() == element.text_range()) - .last() - .and_then(|it| it.parent()) -} - -fn previous_non_trivia_token(token: SyntaxToken) -> Option { - let mut token = token.prev_token(); - while let Some(inner) = token.clone() { - if !inner.kind().is_trivia() { - return Some(inner); - } else { - token = inner.prev_token(); - } - } - None -} - -fn previous_sibling_or_ancestor_sibling(element: SyntaxElement) -> Option { - let token_sibling = non_trivia_sibling(element.clone(), Direction::Prev); - if let Some(sibling) = token_sibling { - Some(sibling) - } else { - // if not trying to find first ancestor which has such a sibling - let node = match element { - NodeOrToken::Node(node) => node, - NodeOrToken::Token(token) => token.parent(), - }; - let range = node.text_range(); - let top_node = node.ancestors().take_while(|it| it.text_range() == range).last()?; - let prev_sibling_node = top_node.ancestors().find(|it| { - non_trivia_sibling(NodeOrToken::Node(it.to_owned()), Direction::Prev).is_some() - })?; - non_trivia_sibling(NodeOrToken::Node(prev_sibling_node), Direction::Prev) - } -} diff --git a/crates/ra_ide/src/completion/presentation.rs b/crates/ra_ide/src/completion/presentation.rs deleted file mode 100644 index 9a94ff4767..0000000000 --- a/crates/ra_ide/src/completion/presentation.rs +++ /dev/null @@ -1,1230 +0,0 @@ -//! This modules takes care of rendering various definitions as completion items. -//! It also handles scoring (sorting) completions. - -use hir::{Docs, HasAttrs, HasSource, HirDisplay, ModPath, ScopeDef, StructKind, Type}; -use ra_syntax::ast::NameOwner; -use stdx::SepBy; -use test_utils::mark; - -use crate::{ - completion::{ - completion_item::Builder, CompletionContext, CompletionItem, CompletionItemKind, - CompletionKind, Completions, - }, - display::{const_label, function_declaration, macro_label, type_label}, - CompletionScore, RootDatabase, -}; - -impl Completions { - pub(crate) fn add_field(&mut self, ctx: &CompletionContext, field: hir::Field, ty: &Type) { - let is_deprecated = is_deprecated(field, ctx.db); - let name = field.name(ctx.db); - let mut completion_item = - CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.to_string()) - .kind(CompletionItemKind::Field) - .detail(ty.display(ctx.db).to_string()) - .set_documentation(field.docs(ctx.db)) - .set_deprecated(is_deprecated); - - if let Some(score) = compute_score(ctx, &ty, &name.to_string()) { - completion_item = completion_item.set_score(score); - } - - completion_item.add_to(self); - } - - pub(crate) fn add_tuple_field(&mut self, ctx: &CompletionContext, field: usize, ty: &Type) { - CompletionItem::new(CompletionKind::Reference, ctx.source_range(), field.to_string()) - .kind(CompletionItemKind::Field) - .detail(ty.display(ctx.db).to_string()) - .add_to(self); - } - - pub(crate) fn add_resolution( - &mut self, - ctx: &CompletionContext, - local_name: String, - resolution: &ScopeDef, - ) { - use hir::ModuleDef::*; - - let completion_kind = match resolution { - ScopeDef::ModuleDef(BuiltinType(..)) => CompletionKind::BuiltinType, - _ => CompletionKind::Reference, - }; - - let kind = match resolution { - ScopeDef::ModuleDef(Module(..)) => CompletionItemKind::Module, - ScopeDef::ModuleDef(Function(func)) => { - return self.add_function(ctx, *func, Some(local_name)); - } - ScopeDef::ModuleDef(Adt(hir::Adt::Struct(_))) => CompletionItemKind::Struct, - // FIXME: add CompletionItemKind::Union - ScopeDef::ModuleDef(Adt(hir::Adt::Union(_))) => CompletionItemKind::Struct, - ScopeDef::ModuleDef(Adt(hir::Adt::Enum(_))) => CompletionItemKind::Enum, - - ScopeDef::ModuleDef(EnumVariant(var)) => { - return self.add_enum_variant(ctx, *var, Some(local_name)); - } - ScopeDef::ModuleDef(Const(..)) => CompletionItemKind::Const, - ScopeDef::ModuleDef(Static(..)) => CompletionItemKind::Static, - ScopeDef::ModuleDef(Trait(..)) => CompletionItemKind::Trait, - ScopeDef::ModuleDef(TypeAlias(..)) => CompletionItemKind::TypeAlias, - ScopeDef::ModuleDef(BuiltinType(..)) => CompletionItemKind::BuiltinType, - ScopeDef::GenericParam(..) => CompletionItemKind::TypeParam, - ScopeDef::Local(..) => CompletionItemKind::Binding, - // (does this need its own kind?) - ScopeDef::AdtSelfType(..) | ScopeDef::ImplSelfType(..) => CompletionItemKind::TypeParam, - ScopeDef::MacroDef(mac) => { - return self.add_macro(ctx, Some(local_name), *mac); - } - ScopeDef::Unknown => { - return self.add( - CompletionItem::new(CompletionKind::Reference, ctx.source_range(), local_name) - .kind(CompletionItemKind::UnresolvedReference), - ); - } - }; - - let docs = match resolution { - ScopeDef::ModuleDef(Module(it)) => it.docs(ctx.db), - ScopeDef::ModuleDef(Adt(it)) => it.docs(ctx.db), - ScopeDef::ModuleDef(EnumVariant(it)) => it.docs(ctx.db), - ScopeDef::ModuleDef(Const(it)) => it.docs(ctx.db), - ScopeDef::ModuleDef(Static(it)) => it.docs(ctx.db), - ScopeDef::ModuleDef(Trait(it)) => it.docs(ctx.db), - ScopeDef::ModuleDef(TypeAlias(it)) => it.docs(ctx.db), - _ => None, - }; - - let mut completion_item = - CompletionItem::new(completion_kind, ctx.source_range(), local_name.clone()); - if let ScopeDef::Local(local) = resolution { - let ty = local.ty(ctx.db); - if !ty.is_unknown() { - completion_item = completion_item.detail(ty.display(ctx.db).to_string()); - } - }; - - if let ScopeDef::Local(local) = resolution { - if let Some(score) = compute_score(ctx, &local.ty(ctx.db), &local_name) { - completion_item = completion_item.set_score(score); - } - } - - // Add `<>` for generic types - if ctx.is_path_type && !ctx.has_type_args && ctx.config.add_call_parenthesis { - if let Some(cap) = ctx.config.snippet_cap { - let has_non_default_type_params = match resolution { - ScopeDef::ModuleDef(Adt(it)) => it.has_non_default_type_params(ctx.db), - ScopeDef::ModuleDef(TypeAlias(it)) => it.has_non_default_type_params(ctx.db), - _ => false, - }; - if has_non_default_type_params { - mark::hit!(inserts_angle_brackets_for_generics); - completion_item = completion_item - .lookup_by(local_name.clone()) - .label(format!("{}<…>", local_name)) - .insert_snippet(cap, format!("{}<$0>", local_name)); - } - } - } - - completion_item.kind(kind).set_documentation(docs).add_to(self) - } - - pub(crate) fn add_macro( - &mut self, - ctx: &CompletionContext, - name: Option, - macro_: hir::MacroDef, - ) { - // FIXME: Currently proc-macro do not have ast-node, - // such that it does not have source - if macro_.is_proc_macro() { - return; - } - - let name = match name { - Some(it) => it, - None => return, - }; - - let ast_node = macro_.source(ctx.db).value; - let detail = macro_label(&ast_node); - - let docs = macro_.docs(ctx.db); - - let mut builder = CompletionItem::new( - CompletionKind::Reference, - ctx.source_range(), - &format!("{}!", name), - ) - .kind(CompletionItemKind::Macro) - .set_documentation(docs.clone()) - .set_deprecated(is_deprecated(macro_, ctx.db)) - .detail(detail); - - let needs_bang = ctx.use_item_syntax.is_none() && !ctx.is_macro_call; - builder = match ctx.config.snippet_cap { - Some(cap) if needs_bang => { - let docs = docs.as_ref().map_or("", |s| s.as_str()); - let (bra, ket) = guess_macro_braces(&name, docs); - builder - .insert_snippet(cap, format!("{}!{}$0{}", name, bra, ket)) - .label(format!("{}!{}…{}", name, bra, ket)) - .lookup_by(format!("{}!", name)) - } - None if needs_bang => builder.insert_text(format!("{}!", name)), - _ => { - mark::hit!(dont_insert_macro_call_parens_unncessary); - builder.insert_text(name) - } - }; - - self.add(builder); - } - - pub(crate) fn add_function( - &mut self, - ctx: &CompletionContext, - func: hir::Function, - local_name: Option, - ) { - let has_self_param = func.has_self_param(ctx.db); - - let name = local_name.unwrap_or_else(|| func.name(ctx.db).to_string()); - let ast_node = func.source(ctx.db).value; - - let mut builder = - CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.clone()) - .kind(if has_self_param { - CompletionItemKind::Method - } else { - CompletionItemKind::Function - }) - .set_documentation(func.docs(ctx.db)) - .set_deprecated(is_deprecated(func, ctx.db)) - .detail(function_declaration(&ast_node)); - - let params = ast_node - .param_list() - .into_iter() - .flat_map(|it| it.params()) - .flat_map(|it| it.pat()) - .map(|pat| pat.to_string().trim_start_matches('_').into()) - .collect(); - - builder = builder.add_call_parens(ctx, name, Params::Named(params)); - - self.add(builder) - } - - pub(crate) fn add_const(&mut self, ctx: &CompletionContext, constant: hir::Const) { - let ast_node = constant.source(ctx.db).value; - let name = match ast_node.name() { - Some(name) => name, - _ => return, - }; - let detail = const_label(&ast_node); - - CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.text().to_string()) - .kind(CompletionItemKind::Const) - .set_documentation(constant.docs(ctx.db)) - .set_deprecated(is_deprecated(constant, ctx.db)) - .detail(detail) - .add_to(self); - } - - pub(crate) fn add_type_alias(&mut self, ctx: &CompletionContext, type_alias: hir::TypeAlias) { - let type_def = type_alias.source(ctx.db).value; - let name = match type_def.name() { - Some(name) => name, - _ => return, - }; - let detail = type_label(&type_def); - - CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.text().to_string()) - .kind(CompletionItemKind::TypeAlias) - .set_documentation(type_alias.docs(ctx.db)) - .set_deprecated(is_deprecated(type_alias, ctx.db)) - .detail(detail) - .add_to(self); - } - - pub(crate) fn add_qualified_enum_variant( - &mut self, - ctx: &CompletionContext, - variant: hir::EnumVariant, - path: ModPath, - ) { - self.add_enum_variant_impl(ctx, variant, None, Some(path)) - } - - pub(crate) fn add_enum_variant( - &mut self, - ctx: &CompletionContext, - variant: hir::EnumVariant, - local_name: Option, - ) { - self.add_enum_variant_impl(ctx, variant, local_name, None) - } - - fn add_enum_variant_impl( - &mut self, - ctx: &CompletionContext, - variant: hir::EnumVariant, - local_name: Option, - path: Option, - ) { - let is_deprecated = is_deprecated(variant, ctx.db); - let name = local_name.unwrap_or_else(|| variant.name(ctx.db).to_string()); - let qualified_name = match &path { - Some(it) => it.to_string(), - None => name.to_string(), - }; - let detail_types = variant - .fields(ctx.db) - .into_iter() - .map(|field| (field.name(ctx.db), field.signature_ty(ctx.db))); - let variant_kind = variant.kind(ctx.db); - let detail = match variant_kind { - StructKind::Tuple | StructKind::Unit => detail_types - .map(|(_, t)| t.display(ctx.db).to_string()) - .sep_by(", ") - .surround_with("(", ")") - .to_string(), - StructKind::Record => detail_types - .map(|(n, t)| format!("{}: {}", n, t.display(ctx.db).to_string())) - .sep_by(", ") - .surround_with("{ ", " }") - .to_string(), - }; - let mut res = CompletionItem::new( - CompletionKind::Reference, - ctx.source_range(), - qualified_name.clone(), - ) - .kind(CompletionItemKind::EnumVariant) - .set_documentation(variant.docs(ctx.db)) - .set_deprecated(is_deprecated) - .detail(detail); - - if path.is_some() { - res = res.lookup_by(name); - } - - if variant_kind == StructKind::Tuple { - mark::hit!(inserts_parens_for_tuple_enums); - let params = Params::Anonymous(variant.fields(ctx.db).len()); - res = res.add_call_parens(ctx, qualified_name, params) - } - - res.add_to(self); - } -} - -pub(crate) fn compute_score( - ctx: &CompletionContext, - ty: &Type, - name: &str, -) -> Option { - let (active_name, active_type) = if let Some(record_field) = &ctx.record_field_syntax { - mark::hit!(record_field_type_match); - let (struct_field, _local) = ctx.sema.resolve_record_field(record_field)?; - (struct_field.name(ctx.db).to_string(), struct_field.signature_ty(ctx.db)) - } else if let Some(active_parameter) = &ctx.active_parameter { - mark::hit!(active_param_type_match); - (active_parameter.name.clone(), active_parameter.ty.clone()) - } else { - return None; - }; - - // Compute score - // For the same type - if &active_type != ty { - return None; - } - - let mut res = CompletionScore::TypeMatch; - - // If same type + same name then go top position - if active_name == name { - res = CompletionScore::TypeAndNameMatch - } - - Some(res) -} - -enum Params { - Named(Vec), - Anonymous(usize), -} - -impl Params { - fn len(&self) -> usize { - match self { - Params::Named(xs) => xs.len(), - Params::Anonymous(len) => *len, - } - } - - fn is_empty(&self) -> bool { - self.len() == 0 - } -} - -impl Builder { - fn add_call_parens(mut self, ctx: &CompletionContext, name: String, params: Params) -> Builder { - if !ctx.config.add_call_parenthesis { - return self; - } - if ctx.use_item_syntax.is_some() { - mark::hit!(no_parens_in_use_item); - return self; - } - if ctx.is_pattern_call { - mark::hit!(dont_duplicate_pattern_parens); - return self; - } - if ctx.is_call { - return self; - } - - // Don't add parentheses if the expected type is some function reference. - if let Some(ty) = &ctx.expected_type { - if ty.is_fn() { - mark::hit!(no_call_parens_if_fn_ptr_needed); - return self; - } - } - - let cap = match ctx.config.snippet_cap { - Some(it) => it, - None => return self, - }; - // If not an import, add parenthesis automatically. - mark::hit!(inserts_parens_for_function_calls); - - let (snippet, label) = if params.is_empty() { - (format!("{}()$0", name), format!("{}()", name)) - } else { - self = self.trigger_call_info(); - let snippet = match (ctx.config.add_call_argument_snippets, params) { - (true, Params::Named(params)) => { - let function_params_snippet = params - .iter() - .enumerate() - .map(|(index, param_name)| format!("${{{}:{}}}", index + 1, param_name)) - .sep_by(", "); - format!("{}({})$0", name, function_params_snippet) - } - _ => { - mark::hit!(suppress_arg_snippets); - format!("{}($0)", name) - } - }; - - (snippet, format!("{}(…)", name)) - }; - self.lookup_by(name).label(label).insert_snippet(cap, snippet) - } -} - -fn is_deprecated(node: impl HasAttrs, db: &RootDatabase) -> bool { - node.attrs(db).by_key("deprecated").exists() -} - -fn guess_macro_braces(macro_name: &str, docs: &str) -> (&'static str, &'static str) { - let mut votes = [0, 0, 0]; - for (idx, s) in docs.match_indices(¯o_name) { - let (before, after) = (&docs[..idx], &docs[idx + s.len()..]); - // Ensure to match the full word - if after.starts_with('!') - && !before.ends_with(|c: char| c == '_' || c.is_ascii_alphanumeric()) - { - // It may have spaces before the braces like `foo! {}` - match after[1..].chars().find(|&c| !c.is_whitespace()) { - Some('{') => votes[0] += 1, - Some('[') => votes[1] += 1, - Some('(') => votes[2] += 1, - _ => {} - } - } - } - - // Insert a space before `{}`. - // We prefer the last one when some votes equal. - let (_vote, (bra, ket)) = votes - .iter() - .zip(&[(" {", "}"), ("[", "]"), ("(", ")")]) - .max_by_key(|&(&vote, _)| vote) - .unwrap(); - (*bra, *ket) -} - -#[cfg(test)] -mod tests { - use std::cmp::Reverse; - - use expect::{expect, Expect}; - use test_utils::mark; - - use crate::{ - completion::{ - test_utils::{ - check_edit, check_edit_with_config, do_completion, get_all_completion_items, - }, - CompletionConfig, CompletionKind, - }, - CompletionScore, - }; - - fn check(ra_fixture: &str, expect: Expect) { - let actual = do_completion(ra_fixture, CompletionKind::Reference); - expect.assert_debug_eq(&actual); - } - - fn check_scores(ra_fixture: &str, expect: Expect) { - fn display_score(score: Option) -> &'static str { - match score { - Some(CompletionScore::TypeMatch) => "[type]", - Some(CompletionScore::TypeAndNameMatch) => "[type+name]", - None => "[]".into(), - } - } - - let mut completions = get_all_completion_items(CompletionConfig::default(), ra_fixture); - completions.sort_by_key(|it| (Reverse(it.score()), it.label().to_string())); - let actual = completions - .into_iter() - .filter(|it| it.completion_kind == CompletionKind::Reference) - .map(|it| { - let tag = it.kind().unwrap().tag(); - let score = display_score(it.score()); - format!("{} {} {}\n", tag, it.label(), score) - }) - .collect::(); - expect.assert_eq(&actual); - } - - #[test] - fn enum_detail_includes_record_fields() { - check( - r#" -enum Foo { Foo { x: i32, y: i32 } } - -fn main() { Foo::Fo<|> } -"#, - expect![[r#" - [ - CompletionItem { - label: "Foo", - source_range: 54..56, - delete: 54..56, - insert: "Foo", - kind: EnumVariant, - detail: "{ x: i32, y: i32 }", - }, - ] - "#]], - ); - } - - #[test] - fn enum_detail_doesnt_include_tuple_fields() { - check( - r#" -enum Foo { Foo (i32, i32) } - -fn main() { Foo::Fo<|> } -"#, - expect![[r#" - [ - CompletionItem { - label: "Foo(…)", - source_range: 46..48, - delete: 46..48, - insert: "Foo($0)", - kind: EnumVariant, - lookup: "Foo", - detail: "(i32, i32)", - trigger_call_info: true, - }, - ] - "#]], - ); - } - - #[test] - fn enum_detail_just_parentheses_for_unit() { - check( - r#" -enum Foo { Foo } - -fn main() { Foo::Fo<|> } -"#, - expect![[r#" - [ - CompletionItem { - label: "Foo", - source_range: 35..37, - delete: 35..37, - insert: "Foo", - kind: EnumVariant, - detail: "()", - }, - ] - "#]], - ); - } - - #[test] - fn sets_deprecated_flag_in_completion_items() { - check( - r#" -#[deprecated] -fn something_deprecated() {} -#[deprecated(since = "1.0.0")] -fn something_else_deprecated() {} - -fn main() { som<|> } -"#, - expect![[r#" - [ - CompletionItem { - label: "main()", - source_range: 121..124, - delete: 121..124, - insert: "main()$0", - kind: Function, - lookup: "main", - detail: "fn main()", - }, - CompletionItem { - label: "something_deprecated()", - source_range: 121..124, - delete: 121..124, - insert: "something_deprecated()$0", - kind: Function, - lookup: "something_deprecated", - detail: "fn something_deprecated()", - deprecated: true, - }, - CompletionItem { - label: "something_else_deprecated()", - source_range: 121..124, - delete: 121..124, - insert: "something_else_deprecated()$0", - kind: Function, - lookup: "something_else_deprecated", - detail: "fn something_else_deprecated()", - deprecated: true, - }, - ] - "#]], - ); - - check( - r#" -struct A { #[deprecated] the_field: u32 } -fn foo() { A { the<|> } } -"#, - expect![[r#" - [ - CompletionItem { - label: "the_field", - source_range: 57..60, - delete: 57..60, - insert: "the_field", - kind: Field, - detail: "u32", - deprecated: true, - }, - ] - "#]], - ); - } - - #[test] - fn renders_docs() { - check( - r#" -struct S { - /// Field docs - foo: -} -impl S { - /// Method docs - fn bar(self) { self.<|> } -}"#, - expect![[r#" - [ - CompletionItem { - label: "bar()", - source_range: 94..94, - delete: 94..94, - insert: "bar()$0", - kind: Method, - lookup: "bar", - detail: "fn bar(self)", - documentation: Documentation( - "Method docs", - ), - }, - CompletionItem { - label: "foo", - source_range: 94..94, - delete: 94..94, - insert: "foo", - kind: Field, - detail: "{unknown}", - documentation: Documentation( - "Field docs", - ), - }, - ] - "#]], - ); - - check( - r#" -use self::my<|>; - -/// mod docs -mod my { } - -/// enum docs -enum E { - /// variant docs - V -} -use self::E::*; -"#, - expect![[r#" - [ - CompletionItem { - label: "E", - source_range: 10..12, - delete: 10..12, - insert: "E", - kind: Enum, - documentation: Documentation( - "enum docs", - ), - }, - CompletionItem { - label: "V", - source_range: 10..12, - delete: 10..12, - insert: "V", - kind: EnumVariant, - detail: "()", - documentation: Documentation( - "variant docs", - ), - }, - CompletionItem { - label: "my", - source_range: 10..12, - delete: 10..12, - insert: "my", - kind: Module, - documentation: Documentation( - "mod docs", - ), - }, - ] - "#]], - ) - } - - #[test] - fn dont_render_attrs() { - check( - r#" -struct S; -impl S { - #[inline] - fn the_method(&self) { } -} -fn foo(s: S) { s.<|> } -"#, - expect![[r#" - [ - CompletionItem { - label: "the_method()", - source_range: 81..81, - delete: 81..81, - insert: "the_method()$0", - kind: Method, - lookup: "the_method", - detail: "fn the_method(&self)", - }, - ] - "#]], - ) - } - - #[test] - fn inserts_parens_for_function_calls() { - mark::check!(inserts_parens_for_function_calls); - check_edit( - "no_args", - r#" -fn no_args() {} -fn main() { no_<|> } -"#, - r#" -fn no_args() {} -fn main() { no_args()$0 } -"#, - ); - - check_edit( - "with_args", - r#" -fn with_args(x: i32, y: String) {} -fn main() { with_<|> } -"#, - r#" -fn with_args(x: i32, y: String) {} -fn main() { with_args(${1:x}, ${2:y})$0 } -"#, - ); - - check_edit( - "foo", - r#" -struct S; -impl S { - fn foo(&self) {} -} -fn bar(s: &S) { s.f<|> } -"#, - r#" -struct S; -impl S { - fn foo(&self) {} -} -fn bar(s: &S) { s.foo()$0 } -"#, - ); - - check_edit( - "foo", - r#" -struct S {} -impl S { - fn foo(&self, x: i32) {} -} -fn bar(s: &S) { - s.f<|> -} -"#, - r#" -struct S {} -impl S { - fn foo(&self, x: i32) {} -} -fn bar(s: &S) { - s.foo(${1:x})$0 -} -"#, - ); - } - - #[test] - fn suppress_arg_snippets() { - mark::check!(suppress_arg_snippets); - check_edit_with_config( - CompletionConfig { add_call_argument_snippets: false, ..CompletionConfig::default() }, - "with_args", - r#" -fn with_args(x: i32, y: String) {} -fn main() { with_<|> } -"#, - r#" -fn with_args(x: i32, y: String) {} -fn main() { with_args($0) } -"#, - ); - } - - #[test] - fn strips_underscores_from_args() { - check_edit( - "foo", - r#" -fn foo(_foo: i32, ___bar: bool, ho_ge_: String) {} -fn main() { f<|> } -"#, - r#" -fn foo(_foo: i32, ___bar: bool, ho_ge_: String) {} -fn main() { foo(${1:foo}, ${2:bar}, ${3:ho_ge_})$0 } -"#, - ); - } - - #[test] - fn inserts_parens_for_tuple_enums() { - mark::check!(inserts_parens_for_tuple_enums); - check_edit( - "Some", - r#" -enum Option { Some(T), None } -use Option::*; -fn main() -> Option { - Som<|> -} -"#, - r#" -enum Option { Some(T), None } -use Option::*; -fn main() -> Option { - Some($0) -} -"#, - ); - check_edit( - "Some", - r#" -enum Option { Some(T), None } -use Option::*; -fn main(value: Option) { - match value { - Som<|> - } -} -"#, - r#" -enum Option { Some(T), None } -use Option::*; -fn main(value: Option) { - match value { - Some($0) - } -} -"#, - ); - } - - #[test] - fn dont_duplicate_pattern_parens() { - mark::check!(dont_duplicate_pattern_parens); - check_edit( - "Var", - r#" -enum E { Var(i32) } -fn main() { - match E::Var(92) { - E::<|>(92) => (), - } -} -"#, - r#" -enum E { Var(i32) } -fn main() { - match E::Var(92) { - E::Var(92) => (), - } -} -"#, - ); - } - - #[test] - fn no_call_parens_if_fn_ptr_needed() { - mark::check!(no_call_parens_if_fn_ptr_needed); - check_edit( - "foo", - r#" -fn foo(foo: u8, bar: u8) {} -struct ManualVtable { f: fn(u8, u8) } - -fn main() -> ManualVtable { - ManualVtable { f: f<|> } -} -"#, - r#" -fn foo(foo: u8, bar: u8) {} -struct ManualVtable { f: fn(u8, u8) } - -fn main() -> ManualVtable { - ManualVtable { f: foo } -} -"#, - ); - } - - #[test] - fn no_parens_in_use_item() { - mark::check!(no_parens_in_use_item); - check_edit( - "foo", - r#" -mod m { pub fn foo() {} } -use crate::m::f<|>; -"#, - r#" -mod m { pub fn foo() {} } -use crate::m::foo; -"#, - ); - } - - #[test] - fn no_parens_in_call() { - check_edit( - "foo", - r#" -fn foo(x: i32) {} -fn main() { f<|>(); } -"#, - r#" -fn foo(x: i32) {} -fn main() { foo(); } -"#, - ); - check_edit( - "foo", - r#" -struct Foo; -impl Foo { fn foo(&self){} } -fn f(foo: &Foo) { foo.f<|>(); } -"#, - r#" -struct Foo; -impl Foo { fn foo(&self){} } -fn f(foo: &Foo) { foo.foo(); } -"#, - ); - } - - #[test] - fn inserts_angle_brackets_for_generics() { - mark::check!(inserts_angle_brackets_for_generics); - check_edit( - "Vec", - r#" -struct Vec {} -fn foo(xs: Ve<|>) -"#, - r#" -struct Vec {} -fn foo(xs: Vec<$0>) -"#, - ); - check_edit( - "Vec", - r#" -type Vec = (T,); -fn foo(xs: Ve<|>) -"#, - r#" -type Vec = (T,); -fn foo(xs: Vec<$0>) -"#, - ); - check_edit( - "Vec", - r#" -struct Vec {} -fn foo(xs: Ve<|>) -"#, - r#" -struct Vec {} -fn foo(xs: Vec) -"#, - ); - check_edit( - "Vec", - r#" -struct Vec {} -fn foo(xs: Ve<|>) -"#, - r#" -struct Vec {} -fn foo(xs: Vec) -"#, - ); - } - - #[test] - fn dont_insert_macro_call_parens_unncessary() { - mark::check!(dont_insert_macro_call_parens_unncessary); - check_edit( - "frobnicate!", - r#" -//- /main.rs -use foo::<|>; -//- /foo/lib.rs -#[macro_export] -macro_rules frobnicate { () => () } -"#, - r#" -use foo::frobnicate; -"#, - ); - - check_edit( - "frobnicate!", - r#" -macro_rules frobnicate { () => () } -fn main() { frob<|>!(); } -"#, - r#" -macro_rules frobnicate { () => () } -fn main() { frobnicate!(); } -"#, - ); - } - - #[test] - fn active_param_score() { - mark::check!(active_param_type_match); - check_scores( - r#" -struct S { foo: i64, bar: u32, baz: u32 } -fn test(bar: u32) { } -fn foo(s: S) { test(s.<|>) } -"#, - expect![[r#" - fd bar [type+name] - fd baz [type] - fd foo [] - "#]], - ); - } - - #[test] - fn record_field_scores() { - mark::check!(record_field_type_match); - check_scores( - r#" -struct A { foo: i64, bar: u32, baz: u32 } -struct B { x: (), y: f32, bar: u32 } -fn foo(a: A) { B { bar: a.<|> }; } -"#, - expect![[r#" - fd bar [type+name] - fd baz [type] - fd foo [] - "#]], - ) - } - - #[test] - fn record_field_and_call_scores() { - check_scores( - r#" -struct A { foo: i64, bar: u32, baz: u32 } -struct B { x: (), y: f32, bar: u32 } -fn f(foo: i64) { } -fn foo(a: A) { B { bar: f(a.<|>) }; } -"#, - expect![[r#" - fd foo [type+name] - fd bar [] - fd baz [] - "#]], - ); - check_scores( - r#" -struct A { foo: i64, bar: u32, baz: u32 } -struct B { x: (), y: f32, bar: u32 } -fn f(foo: i64) { } -fn foo(a: A) { f(B { bar: a.<|> }); } -"#, - expect![[r#" - fd bar [type+name] - fd baz [type] - fd foo [] - "#]], - ); - } - - #[test] - fn prioritize_exact_ref_match() { - check_scores( - r#" -struct WorldSnapshot { _f: () }; -fn go(world: &WorldSnapshot) { go(w<|>) } -"#, - expect![[r#" - bn world [type+name] - st WorldSnapshot [] - fn go(…) [] - "#]], - ); - } - - #[test] - fn too_many_arguments() { - mark::check!(too_many_arguments); - check_scores( - r#" -struct Foo; -fn f(foo: &Foo) { f(foo, w<|>) } -"#, - expect![[r#" - st Foo [] - fn f(…) [] - bn foo [] - "#]], - ); - } - - #[test] - fn guesses_macro_braces() { - check_edit( - "vec!", - r#" -/// Creates a [`Vec`] containing the arguments. -/// -/// ``` -/// let v = vec![1, 2, 3]; -/// assert_eq!(v[0], 1); -/// assert_eq!(v[1], 2); -/// assert_eq!(v[2], 3); -/// ``` -macro_rules! vec { () => {} } - -fn fn main() { v<|> } -"#, - r#" -/// Creates a [`Vec`] containing the arguments. -/// -/// ``` -/// let v = vec![1, 2, 3]; -/// assert_eq!(v[0], 1); -/// assert_eq!(v[1], 2); -/// assert_eq!(v[2], 3); -/// ``` -macro_rules! vec { () => {} } - -fn fn main() { vec![$0] } -"#, - ); - - check_edit( - "foo!", - r#" -/// Foo -/// -/// Don't call `fooo!()` `fooo!()`, or `_foo![]` `_foo![]`, -/// call as `let _=foo! { hello world };` -macro_rules! foo { () => {} } -fn main() { <|> } -"#, - r#" -/// Foo -/// -/// Don't call `fooo!()` `fooo!()`, or `_foo![]` `_foo![]`, -/// call as `let _=foo! { hello world };` -macro_rules! foo { () => {} } -fn main() { foo! {$0} } -"#, - ) - } -} diff --git a/crates/ra_ide/src/completion/test_utils.rs b/crates/ra_ide/src/completion/test_utils.rs deleted file mode 100644 index 9191777455..0000000000 --- a/crates/ra_ide/src/completion/test_utils.rs +++ /dev/null @@ -1,114 +0,0 @@ -//! Runs completion for testing purposes. - -use hir::Semantics; -use itertools::Itertools; -use ra_syntax::{AstNode, NodeOrToken, SyntaxElement}; -use stdx::{format_to, trim_indent}; -use test_utils::assert_eq_text; - -use crate::{ - completion::{completion_item::CompletionKind, CompletionConfig}, - mock_analysis::analysis_and_position, - CompletionItem, -}; - -pub(crate) fn do_completion(code: &str, kind: CompletionKind) -> Vec { - do_completion_with_config(CompletionConfig::default(), code, kind) -} - -pub(crate) fn do_completion_with_config( - config: CompletionConfig, - code: &str, - kind: CompletionKind, -) -> Vec { - let mut kind_completions: Vec = get_all_completion_items(config, code) - .into_iter() - .filter(|c| c.completion_kind == kind) - .collect(); - kind_completions.sort_by(|l, r| l.label().cmp(r.label())); - kind_completions -} - -pub(crate) fn completion_list(code: &str, kind: CompletionKind) -> String { - completion_list_with_config(CompletionConfig::default(), code, kind) -} - -pub(crate) fn completion_list_with_config( - config: CompletionConfig, - code: &str, - kind: CompletionKind, -) -> String { - let mut kind_completions: Vec = get_all_completion_items(config, code) - .into_iter() - .filter(|c| c.completion_kind == kind) - .collect(); - kind_completions.sort_by_key(|c| c.label().to_owned()); - let label_width = kind_completions - .iter() - .map(|it| monospace_width(it.label())) - .max() - .unwrap_or_default() - .min(16); - kind_completions - .into_iter() - .map(|it| { - let tag = it.kind().unwrap().tag(); - let var_name = format!("{} {}", tag, it.label()); - let mut buf = var_name; - if let Some(detail) = it.detail() { - let width = label_width.saturating_sub(monospace_width(it.label())); - format_to!(buf, "{:width$} {}", "", detail, width = width); - } - format_to!(buf, "\n"); - buf - }) - .collect() -} - -fn monospace_width(s: &str) -> usize { - s.chars().count() -} - -pub(crate) fn check_edit(what: &str, ra_fixture_before: &str, ra_fixture_after: &str) { - check_edit_with_config(CompletionConfig::default(), what, ra_fixture_before, ra_fixture_after) -} - -pub(crate) fn check_edit_with_config( - config: CompletionConfig, - what: &str, - ra_fixture_before: &str, - ra_fixture_after: &str, -) { - let ra_fixture_after = trim_indent(ra_fixture_after); - let (analysis, position) = analysis_and_position(ra_fixture_before); - let completions: Vec = - analysis.completions(&config, position).unwrap().unwrap().into(); - let (completion,) = completions - .iter() - .filter(|it| it.lookup() == what) - .collect_tuple() - .unwrap_or_else(|| panic!("can't find {:?} completion in {:#?}", what, completions)); - let mut actual = analysis.file_text(position.file_id).unwrap().to_string(); - completion.text_edit().apply(&mut actual); - assert_eq_text!(&ra_fixture_after, &actual) -} - -pub(crate) fn check_pattern_is_applicable(code: &str, check: fn(SyntaxElement) -> bool) { - let (analysis, pos) = analysis_and_position(code); - analysis - .with_db(|db| { - let sema = Semantics::new(db); - let original_file = sema.parse(pos.file_id); - let token = original_file.syntax().token_at_offset(pos.offset).left_biased().unwrap(); - assert!(check(NodeOrToken::Token(token))); - }) - .unwrap(); -} - -pub(crate) fn get_all_completion_items( - config: CompletionConfig, - code: &str, -) -> Vec { - let (analysis, position) = analysis_and_position(code); - analysis.completions(&config, position).unwrap().unwrap().into() -} diff --git a/crates/ra_ide/src/diagnostics.rs b/crates/ra_ide/src/diagnostics.rs deleted file mode 100644 index 73c0b82754..0000000000 --- a/crates/ra_ide/src/diagnostics.rs +++ /dev/null @@ -1,799 +0,0 @@ -//! Collects diagnostics & fixits for a single file. -//! -//! The tricky bit here is that diagnostics are produced by hir in terms of -//! macro-expanded files, but we need to present them to the users in terms of -//! original files. So we need to map the ranges. - -use std::cell::RefCell; - -use hir::{ - diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSinkBuilder}, - HasSource, HirDisplay, Semantics, VariantDef, -}; -use itertools::Itertools; -use ra_db::SourceDatabase; -use ra_ide_db::RootDatabase; -use ra_prof::profile; -use ra_syntax::{ - algo, - ast::{self, edit::IndentLevel, make, AstNode}, - SyntaxNode, TextRange, T, -}; -use ra_text_edit::{TextEdit, TextEditBuilder}; - -use crate::{Diagnostic, FileId, FileSystemEdit, Fix, SourceFileEdit}; - -#[derive(Debug, Copy, Clone)] -pub enum Severity { - Error, - WeakWarning, -} - -pub(crate) fn diagnostics( - db: &RootDatabase, - file_id: FileId, - enable_experimental: bool, -) -> Vec { - let _p = profile("diagnostics"); - let sema = Semantics::new(db); - let parse = db.parse(file_id); - let mut res = Vec::new(); - - // [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily. - res.extend(parse.errors().iter().take(128).map(|err| Diagnostic { - range: err.range(), - message: format!("Syntax Error: {}", err), - severity: Severity::Error, - fix: None, - })); - - for node in parse.tree().syntax().descendants() { - check_unnecessary_braces_in_use_statement(&mut res, file_id, &node); - check_struct_shorthand_initialization(&mut res, file_id, &node); - } - let res = RefCell::new(res); - let mut sink = DiagnosticSinkBuilder::new() - .on::(|d| { - let original_file = d.source().file_id.original_file(db); - let fix = Fix::new( - "Create module", - FileSystemEdit::CreateFile { anchor: original_file, dst: d.candidate.clone() } - .into(), - ); - res.borrow_mut().push(Diagnostic { - range: sema.diagnostics_range(d).range, - message: d.message(), - severity: Severity::Error, - fix: Some(fix), - }) - }) - .on::(|d| { - // Note that although we could add a diagnostics to - // fill the missing tuple field, e.g : - // `struct A(usize);` - // `let a = A { 0: () }` - // but it is uncommon usage and it should not be encouraged. - let fix = if d.missed_fields.iter().any(|it| it.as_tuple_index().is_some()) { - None - } else { - let mut field_list = d.ast(db); - for f in d.missed_fields.iter() { - let field = make::record_expr_field( - make::name_ref(&f.to_string()), - Some(make::expr_unit()), - ); - field_list = field_list.append_field(&field); - } - - let edit = { - let mut builder = TextEditBuilder::default(); - algo::diff(&d.ast(db).syntax(), &field_list.syntax()) - .into_text_edit(&mut builder); - builder.finish() - }; - Some(Fix::new("Fill struct fields", SourceFileEdit { file_id, edit }.into())) - }; - - res.borrow_mut().push(Diagnostic { - range: sema.diagnostics_range(d).range, - message: d.message(), - severity: Severity::Error, - fix, - }) - }) - .on::(|d| { - let node = d.ast(db); - let replacement = format!("Ok({})", node.syntax()); - let edit = TextEdit::replace(node.syntax().text_range(), replacement); - let source_change = SourceFileEdit { file_id, edit }.into(); - let fix = Fix::new("Wrap with ok", source_change); - res.borrow_mut().push(Diagnostic { - range: sema.diagnostics_range(d).range, - message: d.message(), - severity: Severity::Error, - fix: Some(fix), - }) - }) - .on::(|d| { - res.borrow_mut().push(Diagnostic { - range: sema.diagnostics_range(d).range, - message: d.message(), - severity: Severity::Error, - fix: missing_struct_field_fix(&sema, file_id, d), - }) - }) - // Only collect experimental diagnostics when they're enabled. - .filter(|diag| !diag.is_experimental() || enable_experimental) - // Diagnostics not handled above get no fix and default treatment. - .build(|d| { - res.borrow_mut().push(Diagnostic { - message: d.message(), - range: sema.diagnostics_range(d).range, - severity: Severity::Error, - fix: None, - }) - }); - - if let Some(m) = sema.to_module_def(file_id) { - m.diagnostics(db, &mut sink); - }; - drop(sink); - res.into_inner() -} - -fn missing_struct_field_fix( - sema: &Semantics, - usage_file_id: FileId, - d: &hir::diagnostics::NoSuchField, -) -> Option { - let record_expr = sema.ast(d); - - let record_lit = ast::RecordExpr::cast(record_expr.syntax().parent()?.parent()?)?; - let def_id = sema.resolve_variant(record_lit)?; - let module; - let def_file_id; - let record_fields = match VariantDef::from(def_id) { - VariantDef::Struct(s) => { - module = s.module(sema.db); - let source = s.source(sema.db); - def_file_id = source.file_id; - let fields = source.value.field_list()?; - record_field_list(fields)? - } - VariantDef::Union(u) => { - module = u.module(sema.db); - let source = u.source(sema.db); - def_file_id = source.file_id; - source.value.record_field_list()? - } - VariantDef::EnumVariant(e) => { - module = e.module(sema.db); - let source = e.source(sema.db); - def_file_id = source.file_id; - let fields = source.value.field_list()?; - record_field_list(fields)? - } - }; - let def_file_id = def_file_id.original_file(sema.db); - - let new_field_type = sema.type_of_expr(&record_expr.expr()?)?; - if new_field_type.is_unknown() { - return None; - } - let new_field = make::record_field( - record_expr.field_name()?, - make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?), - ); - - let last_field = record_fields.fields().last()?; - let last_field_syntax = last_field.syntax(); - let indent = IndentLevel::from_node(last_field_syntax); - - let mut new_field = new_field.to_string(); - if usage_file_id != def_file_id { - new_field = format!("pub(crate) {}", new_field); - } - new_field = format!("\n{}{}", indent, new_field); - - let needs_comma = !last_field_syntax.to_string().ends_with(','); - if needs_comma { - new_field = format!(",{}", new_field); - } - - let source_change = SourceFileEdit { - file_id: def_file_id, - edit: TextEdit::insert(last_field_syntax.text_range().end(), new_field), - }; - let fix = Fix::new("Create field", source_change.into()); - return Some(fix); - - fn record_field_list(field_def_list: ast::FieldList) -> Option { - match field_def_list { - ast::FieldList::RecordFieldList(it) => Some(it), - ast::FieldList::TupleFieldList(_) => None, - } - } -} - -fn check_unnecessary_braces_in_use_statement( - acc: &mut Vec, - file_id: FileId, - node: &SyntaxNode, -) -> Option<()> { - let use_tree_list = ast::UseTreeList::cast(node.clone())?; - if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() { - let range = use_tree_list.syntax().text_range(); - let edit = - text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(&single_use_tree) - .unwrap_or_else(|| { - let to_replace = single_use_tree.syntax().text().to_string(); - let mut edit_builder = TextEditBuilder::default(); - edit_builder.delete(range); - edit_builder.insert(range.start(), to_replace); - edit_builder.finish() - }); - - acc.push(Diagnostic { - range, - message: "Unnecessary braces in use statement".to_string(), - severity: Severity::WeakWarning, - fix: Some(Fix::new( - "Remove unnecessary braces", - SourceFileEdit { file_id, edit }.into(), - )), - }); - } - - Some(()) -} - -fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement( - single_use_tree: &ast::UseTree, -) -> Option { - let use_tree_list_node = single_use_tree.syntax().parent()?; - if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind() == T![self] { - let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start(); - let end = use_tree_list_node.text_range().end(); - let range = TextRange::new(start, end); - return Some(TextEdit::delete(range)); - } - None -} - -fn check_struct_shorthand_initialization( - acc: &mut Vec, - file_id: FileId, - node: &SyntaxNode, -) -> Option<()> { - let record_lit = ast::RecordExpr::cast(node.clone())?; - let record_field_list = record_lit.record_expr_field_list()?; - for record_field in record_field_list.fields() { - if let (Some(name_ref), Some(expr)) = (record_field.name_ref(), record_field.expr()) { - let field_name = name_ref.syntax().text().to_string(); - let field_expr = expr.syntax().text().to_string(); - let field_name_is_tup_index = name_ref.as_tuple_field().is_some(); - if field_name == field_expr && !field_name_is_tup_index { - let mut edit_builder = TextEditBuilder::default(); - edit_builder.delete(record_field.syntax().text_range()); - edit_builder.insert(record_field.syntax().text_range().start(), field_name); - let edit = edit_builder.finish(); - - acc.push(Diagnostic { - range: record_field.syntax().text_range(), - message: "Shorthand struct initialization".to_string(), - severity: Severity::WeakWarning, - fix: Some(Fix::new( - "Use struct shorthand initialization", - SourceFileEdit { file_id, edit }.into(), - )), - }); - } - } - } - Some(()) -} - -#[cfg(test)] -mod tests { - use stdx::trim_indent; - use test_utils::assert_eq_text; - - use crate::mock_analysis::{analysis_and_position, single_file, MockAnalysis}; - use expect::{expect, Expect}; - - /// Takes a multi-file input fixture with annotated cursor positions, - /// and checks that: - /// * a diagnostic is produced - /// * this diagnostic touches the input cursor position - /// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied - fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) { - let after = trim_indent(ra_fixture_after); - - let (analysis, file_position) = analysis_and_position(ra_fixture_before); - let diagnostic = analysis.diagnostics(file_position.file_id, true).unwrap().pop().unwrap(); - let mut fix = diagnostic.fix.unwrap(); - let edit = fix.source_change.source_file_edits.pop().unwrap().edit; - let target_file_contents = analysis.file_text(file_position.file_id).unwrap(); - let actual = { - let mut actual = target_file_contents.to_string(); - edit.apply(&mut actual); - actual - }; - - assert_eq_text!(&after, &actual); - assert!( - diagnostic.range.start() <= file_position.offset - && diagnostic.range.end() >= file_position.offset, - "diagnostic range {:?} does not touch cursor position {:?}", - diagnostic.range, - file_position.offset - ); - } - - /// Checks that a diagnostic applies to the file containing the `<|>` cursor marker - /// which has a fix that can apply to other files. - fn check_apply_diagnostic_fix_in_other_file(ra_fixture_before: &str, ra_fixture_after: &str) { - let ra_fixture_after = &trim_indent(ra_fixture_after); - let (analysis, file_pos) = analysis_and_position(ra_fixture_before); - let current_file_id = file_pos.file_id; - let diagnostic = analysis.diagnostics(current_file_id, true).unwrap().pop().unwrap(); - let mut fix = diagnostic.fix.unwrap(); - let edit = fix.source_change.source_file_edits.pop().unwrap(); - let changed_file_id = edit.file_id; - let before = analysis.file_text(changed_file_id).unwrap(); - let actual = { - let mut actual = before.to_string(); - edit.edit.apply(&mut actual); - actual - }; - assert_eq_text!(ra_fixture_after, &actual); - } - - /// Takes a multi-file input fixture with annotated cursor position and checks that no diagnostics - /// apply to the file containing the cursor. - fn check_no_diagnostics(ra_fixture: &str) { - let mock = MockAnalysis::with_files(ra_fixture); - let files = mock.files().map(|(it, _)| it).collect::>(); - let analysis = mock.analysis(); - let diagnostics = files - .into_iter() - .flat_map(|file_id| analysis.diagnostics(file_id, true).unwrap()) - .collect::>(); - assert_eq!(diagnostics.len(), 0, "unexpected diagnostics:\n{:#?}", diagnostics); - } - - fn check_expect(ra_fixture: &str, expect: Expect) { - let (analysis, file_id) = single_file(ra_fixture); - let diagnostics = analysis.diagnostics(file_id, true).unwrap(); - expect.assert_debug_eq(&diagnostics) - } - - #[test] - fn test_wrap_return_type() { - check_fix( - r#" -//- /main.rs -use core::result::Result::{self, Ok, Err}; - -fn div(x: i32, y: i32) -> Result { - if y == 0 { - return Err(()); - } - x / y<|> -} -//- /core/lib.rs -pub mod result { - pub enum Result { Ok(T), Err(E) } -} -"#, - r#" -use core::result::Result::{self, Ok, Err}; - -fn div(x: i32, y: i32) -> Result { - if y == 0 { - return Err(()); - } - Ok(x / y) -} -"#, - ); - } - - #[test] - fn test_wrap_return_type_handles_generic_functions() { - check_fix( - r#" -//- /main.rs -use core::result::Result::{self, Ok, Err}; - -fn div(x: T) -> Result { - if x == 0 { - return Err(7); - } - <|>x -} -//- /core/lib.rs -pub mod result { - pub enum Result { Ok(T), Err(E) } -} -"#, - r#" -use core::result::Result::{self, Ok, Err}; - -fn div(x: T) -> Result { - if x == 0 { - return Err(7); - } - Ok(x) -} -"#, - ); - } - - #[test] - fn test_wrap_return_type_handles_type_aliases() { - check_fix( - r#" -//- /main.rs -use core::result::Result::{self, Ok, Err}; - -type MyResult = Result; - -fn div(x: i32, y: i32) -> MyResult { - if y == 0 { - return Err(()); - } - x <|>/ y -} -//- /core/lib.rs -pub mod result { - pub enum Result { Ok(T), Err(E) } -} -"#, - r#" -use core::result::Result::{self, Ok, Err}; - -type MyResult = Result; - -fn div(x: i32, y: i32) -> MyResult { - if y == 0 { - return Err(()); - } - Ok(x / y) -} -"#, - ); - } - - #[test] - fn test_wrap_return_type_not_applicable_when_expr_type_does_not_match_ok_type() { - check_no_diagnostics( - r#" -//- /main.rs -use core::result::Result::{self, Ok, Err}; - -fn foo() -> Result<(), i32> { 0 } - -//- /core/lib.rs -pub mod result { - pub enum Result { Ok(T), Err(E) } -} -"#, - ); - } - - #[test] - fn test_wrap_return_type_not_applicable_when_return_type_is_not_result() { - check_no_diagnostics( - r#" -//- /main.rs -use core::result::Result::{self, Ok, Err}; - -enum SomeOtherEnum { Ok(i32), Err(String) } - -fn foo() -> SomeOtherEnum { 0 } - -//- /core/lib.rs -pub mod result { - pub enum Result { Ok(T), Err(E) } -} -"#, - ); - } - - #[test] - fn test_fill_struct_fields_empty() { - check_fix( - r#" -struct TestStruct { one: i32, two: i64 } - -fn test_fn() { - let s = TestStruct {<|>}; -} -"#, - r#" -struct TestStruct { one: i32, two: i64 } - -fn test_fn() { - let s = TestStruct { one: (), two: ()}; -} -"#, - ); - } - - #[test] - fn test_fill_struct_fields_self() { - check_fix( - r#" -struct TestStruct { one: i32 } - -impl TestStruct { - fn test_fn() { let s = Self {<|>}; } -} -"#, - r#" -struct TestStruct { one: i32 } - -impl TestStruct { - fn test_fn() { let s = Self { one: ()}; } -} -"#, - ); - } - - #[test] - fn test_fill_struct_fields_enum() { - check_fix( - r#" -enum Expr { - Bin { lhs: Box, rhs: Box } -} - -impl Expr { - fn new_bin(lhs: Box, rhs: Box) -> Expr { - Expr::Bin {<|> } - } -} -"#, - r#" -enum Expr { - Bin { lhs: Box, rhs: Box } -} - -impl Expr { - fn new_bin(lhs: Box, rhs: Box) -> Expr { - Expr::Bin { lhs: (), rhs: () } - } -} -"#, - ); - } - - #[test] - fn test_fill_struct_fields_partial() { - check_fix( - r#" -struct TestStruct { one: i32, two: i64 } - -fn test_fn() { - let s = TestStruct{ two: 2<|> }; -} -"#, - r" -struct TestStruct { one: i32, two: i64 } - -fn test_fn() { - let s = TestStruct{ two: 2, one: () }; -} -", - ); - } - - #[test] - fn test_fill_struct_fields_no_diagnostic() { - check_no_diagnostics( - r" - struct TestStruct { one: i32, two: i64 } - - fn test_fn() { - let one = 1; - let s = TestStruct{ one, two: 2 }; - } - ", - ); - } - - #[test] - fn test_fill_struct_fields_no_diagnostic_on_spread() { - check_no_diagnostics( - r" - struct TestStruct { one: i32, two: i64 } - - fn test_fn() { - let one = 1; - let s = TestStruct{ ..a }; - } - ", - ); - } - - #[test] - fn test_unresolved_module_diagnostic() { - check_expect( - r#"mod foo;"#, - expect![[r#" - [ - Diagnostic { - message: "unresolved module", - range: 0..8, - severity: Error, - fix: Some( - Fix { - label: "Create module", - source_change: SourceChange { - source_file_edits: [], - file_system_edits: [ - CreateFile { - anchor: FileId( - 1, - ), - dst: "foo.rs", - }, - ], - is_snippet: false, - }, - }, - ), - }, - ] - "#]], - ); - } - - #[test] - fn range_mapping_out_of_macros() { - // FIXME: this is very wrong, but somewhat tricky to fix. - check_fix( - r#" -fn some() {} -fn items() {} -fn here() {} - -macro_rules! id { ($($tt:tt)*) => { $($tt)*}; } - -fn main() { - let _x = id![Foo { a: <|>42 }]; -} - -pub struct Foo { pub a: i32, pub b: i32 } -"#, - r#" -fn {a:42, b: ()} {} -fn items() {} -fn here() {} - -macro_rules! id { ($($tt:tt)*) => { $($tt)*}; } - -fn main() { - let _x = id![Foo { a: 42 }]; -} - -pub struct Foo { pub a: i32, pub b: i32 } -"#, - ); - } - - #[test] - fn test_check_unnecessary_braces_in_use_statement() { - check_no_diagnostics( - r#" -use a; -use a::{c, d::e}; -"#, - ); - check_fix(r#"use {<|>b};"#, r#"use b;"#); - check_fix(r#"use {b<|>};"#, r#"use b;"#); - check_fix(r#"use a::{c<|>};"#, r#"use a::c;"#); - check_fix(r#"use a::{self<|>};"#, r#"use a;"#); - check_fix(r#"use a::{c, d::{e<|>}};"#, r#"use a::{c, d::e};"#); - } - - #[test] - fn test_check_struct_shorthand_initialization() { - check_no_diagnostics( - r#" -struct A { a: &'static str } -fn main() { A { a: "hello" } } -"#, - ); - check_no_diagnostics( - r#" -struct A(usize); -fn main() { A { 0: 0 } } -"#, - ); - - check_fix( - r#" -struct A { a: &'static str } -fn main() { - let a = "haha"; - A { a<|>: a } -} -"#, - r#" -struct A { a: &'static str } -fn main() { - let a = "haha"; - A { a } -} -"#, - ); - - check_fix( - r#" -struct A { a: &'static str, b: &'static str } -fn main() { - let a = "haha"; - let b = "bb"; - A { a<|>: a, b } -} -"#, - r#" -struct A { a: &'static str, b: &'static str } -fn main() { - let a = "haha"; - let b = "bb"; - A { a, b } -} -"#, - ); - } - - #[test] - fn test_add_field_from_usage() { - check_fix( - r" -fn main() { - Foo { bar: 3, baz<|>: false}; -} -struct Foo { - bar: i32 -} -", - r" -fn main() { - Foo { bar: 3, baz: false}; -} -struct Foo { - bar: i32, - baz: bool -} -", - ) - } - - #[test] - fn test_add_field_in_other_file_from_usage() { - check_apply_diagnostic_fix_in_other_file( - r" - //- /main.rs - mod foo; - - fn main() { - <|>foo::Foo { bar: 3, baz: false}; - } - //- /foo.rs - struct Foo { - bar: i32 - } - ", - r" - struct Foo { - bar: i32, - pub(crate) baz: bool - } - ", - ) - } -} diff --git a/crates/ra_ide/src/display.rs b/crates/ra_ide/src/display.rs deleted file mode 100644 index fd42aa4352..0000000000 --- a/crates/ra_ide/src/display.rs +++ /dev/null @@ -1,83 +0,0 @@ -//! This module contains utilities for turning SyntaxNodes and HIR types -//! into types that may be used to render in a UI. - -mod navigation_target; -mod short_label; - -use ra_syntax::{ - ast::{self, AstNode, AttrsOwner, GenericParamsOwner, NameOwner}, - SyntaxKind::{ATTR, COMMENT}, -}; - -use ast::VisibilityOwner; -use stdx::format_to; - -pub use navigation_target::NavigationTarget; -pub(crate) use navigation_target::{ToNav, TryToNav}; -pub(crate) use short_label::ShortLabel; - -pub(crate) fn function_declaration(node: &ast::Fn) -> String { - let mut buf = String::new(); - if let Some(vis) = node.visibility() { - format_to!(buf, "{} ", vis); - } - if node.async_token().is_some() { - format_to!(buf, "async "); - } - if node.const_token().is_some() { - format_to!(buf, "const "); - } - if node.unsafe_token().is_some() { - format_to!(buf, "unsafe "); - } - if let Some(abi) = node.abi() { - // Keyword `extern` is included in the string. - format_to!(buf, "{} ", abi); - } - if let Some(name) = node.name() { - format_to!(buf, "fn {}", name) - } - if let Some(type_params) = node.generic_param_list() { - format_to!(buf, "{}", type_params); - } - if let Some(param_list) = node.param_list() { - format_to!(buf, "{}", param_list); - } - if let Some(ret_type) = node.ret_type() { - if ret_type.ty().is_some() { - format_to!(buf, " {}", ret_type); - } - } - if let Some(where_clause) = node.where_clause() { - format_to!(buf, "\n{}", where_clause); - } - buf -} - -pub(crate) fn const_label(node: &ast::Const) -> String { - let label: String = node - .syntax() - .children_with_tokens() - .filter(|child| !(child.kind() == COMMENT || child.kind() == ATTR)) - .map(|node| node.to_string()) - .collect(); - - label.trim().to_owned() -} - -pub(crate) fn type_label(node: &ast::TypeAlias) -> String { - let label: String = node - .syntax() - .children_with_tokens() - .filter(|child| !(child.kind() == COMMENT || child.kind() == ATTR)) - .map(|node| node.to_string()) - .collect(); - - label.trim().to_owned() -} - -pub(crate) fn macro_label(node: &ast::MacroCall) -> String { - let name = node.name().map(|name| name.syntax().text().to_string()).unwrap_or_default(); - let vis = if node.has_atom_attr("macro_export") { "#[macro_export]\n" } else { "" }; - format!("{}macro_rules! {}", vis, name) -} diff --git a/crates/ra_ide/src/display/navigation_target.rs b/crates/ra_ide/src/display/navigation_target.rs deleted file mode 100644 index fdbf75abd9..0000000000 --- a/crates/ra_ide/src/display/navigation_target.rs +++ /dev/null @@ -1,491 +0,0 @@ -//! FIXME: write short doc here - -use either::Either; -use hir::{original_range, AssocItem, FieldSource, HasSource, InFile, ModuleSource}; -use ra_db::{FileId, SourceDatabase}; -use ra_ide_db::{defs::Definition, RootDatabase}; -use ra_syntax::{ - ast::{self, DocCommentsOwner, NameOwner}, - match_ast, AstNode, SmolStr, - SyntaxKind::{self, IDENT_PAT, TYPE_PARAM}, - TextRange, -}; - -use crate::FileSymbol; - -use super::short_label::ShortLabel; - -/// `NavigationTarget` represents and element in the editor's UI which you can -/// click on to navigate to a particular piece of code. -/// -/// Typically, a `NavigationTarget` corresponds to some element in the source -/// code, like a function or a struct, but this is not strictly required. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct NavigationTarget { - pub file_id: FileId, - /// Range which encompasses the whole element. - /// - /// Should include body, doc comments, attributes, etc. - /// - /// Clients should use this range to answer "is the cursor inside the - /// element?" question. - pub full_range: TextRange, - /// A "most interesting" range withing the `full_range`. - /// - /// Typically, `full_range` is the whole syntax node, including doc - /// comments, and `focus_range` is the range of the identifier. "Most - /// interesting" range within the full range, typically the range of - /// identifier. - /// - /// Clients should place the cursor on this range when navigating to this target. - pub focus_range: Option, - pub name: SmolStr, - pub kind: SyntaxKind, - pub container_name: Option, - pub description: Option, - pub docs: Option, -} - -pub(crate) trait ToNav { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget; -} - -pub(crate) trait TryToNav { - fn try_to_nav(&self, db: &RootDatabase) -> Option; -} - -impl NavigationTarget { - pub fn focus_or_full_range(&self) -> TextRange { - self.focus_range.unwrap_or(self.full_range) - } - - pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget { - let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default(); - if let Some(src) = module.declaration_source(db) { - let frange = original_range(db, src.as_ref().map(|it| it.syntax())); - let mut res = NavigationTarget::from_syntax( - frange.file_id, - name, - None, - frange.range, - src.value.syntax().kind(), - ); - res.docs = src.value.doc_comment_text(); - res.description = src.value.short_label(); - return res; - } - module.to_nav(db) - } - - #[cfg(test)] - pub(crate) fn assert_match(&self, expected: &str) { - let actual = self.debug_render(); - test_utils::assert_eq_text!(expected.trim(), actual.trim(),); - } - - #[cfg(test)] - pub(crate) fn debug_render(&self) -> String { - let mut buf = - format!("{} {:?} {:?} {:?}", self.name, self.kind, self.file_id, self.full_range); - if let Some(focus_range) = self.focus_range { - buf.push_str(&format!(" {:?}", focus_range)) - } - if let Some(container_name) = &self.container_name { - buf.push_str(&format!(" {}", container_name)) - } - buf - } - - /// Allows `NavigationTarget` to be created from a `NameOwner` - pub(crate) fn from_named( - db: &RootDatabase, - node: InFile<&dyn ast::NameOwner>, - ) -> NavigationTarget { - let name = - node.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_")); - let focus_range = - node.value.name().map(|it| original_range(db, node.with_value(it.syntax())).range); - let frange = original_range(db, node.map(|it| it.syntax())); - - NavigationTarget::from_syntax( - frange.file_id, - name, - focus_range, - frange.range, - node.value.syntax().kind(), - ) - } - - /// Allows `NavigationTarget` to be created from a `DocCommentsOwner` and a `NameOwner` - pub(crate) fn from_doc_commented( - db: &RootDatabase, - named: InFile<&dyn ast::NameOwner>, - node: InFile<&dyn ast::DocCommentsOwner>, - ) -> NavigationTarget { - let name = - named.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_")); - let frange = original_range(db, node.map(|it| it.syntax())); - - NavigationTarget::from_syntax( - frange.file_id, - name, - None, - frange.range, - node.value.syntax().kind(), - ) - } - - fn from_syntax( - file_id: FileId, - name: SmolStr, - focus_range: Option, - full_range: TextRange, - kind: SyntaxKind, - ) -> NavigationTarget { - NavigationTarget { - file_id, - name, - kind, - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - } - } -} - -impl ToNav for FileSymbol { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { - NavigationTarget { - file_id: self.file_id, - name: self.name.clone(), - kind: self.kind, - full_range: self.range, - focus_range: self.name_range, - container_name: self.container_name.clone(), - description: description_from_symbol(db, self), - docs: docs_from_symbol(db, self), - } - } -} - -impl TryToNav for Definition { - fn try_to_nav(&self, db: &RootDatabase) -> Option { - match self { - Definition::Macro(it) => Some(it.to_nav(db)), - Definition::Field(it) => Some(it.to_nav(db)), - Definition::ModuleDef(it) => it.try_to_nav(db), - Definition::SelfType(it) => Some(it.to_nav(db)), - Definition::Local(it) => Some(it.to_nav(db)), - Definition::TypeParam(it) => Some(it.to_nav(db)), - } - } -} - -impl TryToNav for hir::ModuleDef { - fn try_to_nav(&self, db: &RootDatabase) -> Option { - let res = match self { - hir::ModuleDef::Module(it) => it.to_nav(db), - hir::ModuleDef::Function(it) => it.to_nav(db), - hir::ModuleDef::Adt(it) => it.to_nav(db), - hir::ModuleDef::EnumVariant(it) => it.to_nav(db), - hir::ModuleDef::Const(it) => it.to_nav(db), - hir::ModuleDef::Static(it) => it.to_nav(db), - hir::ModuleDef::Trait(it) => it.to_nav(db), - hir::ModuleDef::TypeAlias(it) => it.to_nav(db), - hir::ModuleDef::BuiltinType(_) => return None, - }; - Some(res) - } -} - -pub(crate) trait ToNavFromAst {} -impl ToNavFromAst for hir::Function {} -impl ToNavFromAst for hir::Const {} -impl ToNavFromAst for hir::Static {} -impl ToNavFromAst for hir::Struct {} -impl ToNavFromAst for hir::Enum {} -impl ToNavFromAst for hir::EnumVariant {} -impl ToNavFromAst for hir::Union {} -impl ToNavFromAst for hir::TypeAlias {} -impl ToNavFromAst for hir::Trait {} - -impl ToNav for D -where - D: HasSource + ToNavFromAst + Copy, - D::Ast: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, -{ - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { - let src = self.source(db); - let mut res = - NavigationTarget::from_named(db, src.as_ref().map(|it| it as &dyn ast::NameOwner)); - res.docs = src.value.doc_comment_text(); - res.description = src.value.short_label(); - res - } -} - -impl ToNav for hir::Module { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { - let src = self.definition_source(db); - let name = self.name(db).map(|it| it.to_string().into()).unwrap_or_default(); - let (syntax, focus) = match &src.value { - ModuleSource::SourceFile(node) => (node.syntax(), None), - ModuleSource::Module(node) => { - (node.syntax(), node.name().map(|it| it.syntax().text_range())) - } - }; - let frange = original_range(db, src.with_value(syntax)); - NavigationTarget::from_syntax(frange.file_id, name, focus, frange.range, syntax.kind()) - } -} - -impl ToNav for hir::ImplDef { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { - let src = self.source(db); - let derive_attr = self.is_builtin_derive(db); - let frange = if let Some(item) = &derive_attr { - original_range(db, item.syntax()) - } else { - original_range(db, src.as_ref().map(|it| it.syntax())) - }; - let focus_range = if derive_attr.is_some() { - None - } else { - src.value.self_ty().map(|ty| original_range(db, src.with_value(ty.syntax())).range) - }; - - NavigationTarget::from_syntax( - frange.file_id, - "impl".into(), - focus_range, - frange.range, - src.value.syntax().kind(), - ) - } -} - -impl ToNav for hir::Field { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { - let src = self.source(db); - - match &src.value { - FieldSource::Named(it) => { - let mut res = NavigationTarget::from_named(db, src.with_value(it)); - res.docs = it.doc_comment_text(); - res.description = it.short_label(); - res - } - FieldSource::Pos(it) => { - let frange = original_range(db, src.with_value(it.syntax())); - NavigationTarget::from_syntax( - frange.file_id, - "".into(), - None, - frange.range, - it.syntax().kind(), - ) - } - } - } -} - -impl ToNav for hir::MacroDef { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { - let src = self.source(db); - log::debug!("nav target {:#?}", src.value.syntax()); - let mut res = - NavigationTarget::from_named(db, src.as_ref().map(|it| it as &dyn ast::NameOwner)); - res.docs = src.value.doc_comment_text(); - res - } -} - -impl ToNav for hir::Adt { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { - match self { - hir::Adt::Struct(it) => it.to_nav(db), - hir::Adt::Union(it) => it.to_nav(db), - hir::Adt::Enum(it) => it.to_nav(db), - } - } -} - -impl ToNav for hir::AssocItem { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { - match self { - AssocItem::Function(it) => it.to_nav(db), - AssocItem::Const(it) => it.to_nav(db), - AssocItem::TypeAlias(it) => it.to_nav(db), - } - } -} - -impl ToNav for hir::Local { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { - let src = self.source(db); - let node = match &src.value { - Either::Left(bind_pat) => { - bind_pat.name().map_or_else(|| bind_pat.syntax().clone(), |it| it.syntax().clone()) - } - Either::Right(it) => it.syntax().clone(), - }; - let full_range = original_range(db, src.with_value(&node)); - let name = match self.name(db) { - Some(it) => it.to_string().into(), - None => "".into(), - }; - NavigationTarget { - file_id: full_range.file_id, - name, - kind: IDENT_PAT, - full_range: full_range.range, - focus_range: None, - container_name: None, - description: None, - docs: None, - } - } -} - -impl ToNav for hir::TypeParam { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { - let src = self.source(db); - let full_range = match &src.value { - Either::Left(it) => it.syntax().text_range(), - Either::Right(it) => it.syntax().text_range(), - }; - let focus_range = match &src.value { - Either::Left(_) => None, - Either::Right(it) => it.name().map(|it| it.syntax().text_range()), - }; - NavigationTarget { - file_id: src.file_id.original_file(db), - name: self.name(db).to_string().into(), - kind: TYPE_PARAM, - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - } - } -} - -pub(crate) fn docs_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option { - let parse = db.parse(symbol.file_id); - let node = symbol.ptr.to_node(parse.tree().syntax()); - - match_ast! { - match node { - ast::Fn(it) => it.doc_comment_text(), - ast::Struct(it) => it.doc_comment_text(), - ast::Enum(it) => it.doc_comment_text(), - ast::Trait(it) => it.doc_comment_text(), - ast::Module(it) => it.doc_comment_text(), - ast::TypeAlias(it) => it.doc_comment_text(), - ast::Const(it) => it.doc_comment_text(), - ast::Static(it) => it.doc_comment_text(), - ast::RecordField(it) => it.doc_comment_text(), - ast::Variant(it) => it.doc_comment_text(), - ast::MacroCall(it) => it.doc_comment_text(), - _ => None, - } - } -} - -/// Get a description of a symbol. -/// -/// e.g. `struct Name`, `enum Name`, `fn Name` -pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option { - let parse = db.parse(symbol.file_id); - let node = symbol.ptr.to_node(parse.tree().syntax()); - - match_ast! { - match node { - ast::Fn(it) => it.short_label(), - ast::Struct(it) => it.short_label(), - ast::Enum(it) => it.short_label(), - ast::Trait(it) => it.short_label(), - ast::Module(it) => it.short_label(), - ast::TypeAlias(it) => it.short_label(), - ast::Const(it) => it.short_label(), - ast::Static(it) => it.short_label(), - ast::RecordField(it) => it.short_label(), - ast::Variant(it) => it.short_label(), - _ => None, - } - } -} - -#[cfg(test)] -mod tests { - use expect::expect; - - use crate::{mock_analysis::single_file, Query}; - - #[test] - fn test_nav_for_symbol() { - let (analysis, _) = single_file( - r#" -enum FooInner { } -fn foo() { enum FooInner { } } -"#, - ); - - let navs = analysis.symbol_search(Query::new("FooInner".to_string())).unwrap(); - expect![[r#" - [ - NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..17, - focus_range: Some( - 5..13, - ), - name: "FooInner", - kind: ENUM, - container_name: None, - description: Some( - "enum FooInner", - ), - docs: None, - }, - NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 29..46, - focus_range: Some( - 34..42, - ), - name: "FooInner", - kind: ENUM, - container_name: Some( - "foo", - ), - description: Some( - "enum FooInner", - ), - docs: None, - }, - ] - "#]] - .assert_debug_eq(&navs); - } - - #[test] - fn test_world_symbols_are_case_sensitive() { - let (analysis, _) = single_file( - r#" -fn foo() {} -struct Foo; -"#, - ); - - let navs = analysis.symbol_search(Query::new("foo".to_string())).unwrap(); - assert_eq!(navs.len(), 2) - } -} diff --git a/crates/ra_ide/src/display/short_label.rs b/crates/ra_ide/src/display/short_label.rs deleted file mode 100644 index 0fdf8e9a58..0000000000 --- a/crates/ra_ide/src/display/short_label.rs +++ /dev/null @@ -1,101 +0,0 @@ -//! FIXME: write short doc here - -use ra_syntax::ast::{self, AstNode, NameOwner, VisibilityOwner}; -use stdx::format_to; - -pub(crate) trait ShortLabel { - fn short_label(&self) -> Option; -} - -impl ShortLabel for ast::Fn { - fn short_label(&self) -> Option { - Some(crate::display::function_declaration(self)) - } -} - -impl ShortLabel for ast::Struct { - fn short_label(&self) -> Option { - short_label_from_node(self, "struct ") - } -} - -impl ShortLabel for ast::Union { - fn short_label(&self) -> Option { - short_label_from_node(self, "union ") - } -} - -impl ShortLabel for ast::Enum { - fn short_label(&self) -> Option { - short_label_from_node(self, "enum ") - } -} - -impl ShortLabel for ast::Trait { - fn short_label(&self) -> Option { - if self.unsafe_token().is_some() { - short_label_from_node(self, "unsafe trait ") - } else { - short_label_from_node(self, "trait ") - } - } -} - -impl ShortLabel for ast::Module { - fn short_label(&self) -> Option { - short_label_from_node(self, "mod ") - } -} - -impl ShortLabel for ast::TypeAlias { - fn short_label(&self) -> Option { - short_label_from_node(self, "type ") - } -} - -impl ShortLabel for ast::Const { - fn short_label(&self) -> Option { - short_label_from_ty(self, self.ty(), "const ") - } -} - -impl ShortLabel for ast::Static { - fn short_label(&self) -> Option { - short_label_from_ty(self, self.ty(), "static ") - } -} - -impl ShortLabel for ast::RecordField { - fn short_label(&self) -> Option { - short_label_from_ty(self, self.ty(), "") - } -} - -impl ShortLabel for ast::Variant { - fn short_label(&self) -> Option { - Some(self.name()?.text().to_string()) - } -} - -fn short_label_from_ty(node: &T, ty: Option, prefix: &str) -> Option -where - T: NameOwner + VisibilityOwner, -{ - let mut buf = short_label_from_node(node, prefix)?; - - if let Some(type_ref) = ty { - format_to!(buf, ": {}", type_ref.syntax()); - } - - Some(buf) -} - -fn short_label_from_node(node: &T, label: &str) -> Option -where - T: NameOwner + VisibilityOwner, -{ - let mut buf = node.visibility().map(|v| format!("{} ", v.syntax())).unwrap_or_default(); - buf.push_str(label); - buf.push_str(node.name()?.text().as_str()); - Some(buf) -} diff --git a/crates/ra_ide/src/expand_macro.rs b/crates/ra_ide/src/expand_macro.rs deleted file mode 100644 index 043515f541..0000000000 --- a/crates/ra_ide/src/expand_macro.rs +++ /dev/null @@ -1,283 +0,0 @@ -use hir::Semantics; -use ra_ide_db::RootDatabase; -use ra_syntax::{ - algo::{find_node_at_offset, SyntaxRewriter}, - ast, AstNode, NodeOrToken, SyntaxKind, - SyntaxKind::*, - SyntaxNode, WalkEvent, T, -}; - -use crate::FilePosition; - -pub struct ExpandedMacro { - pub name: String, - pub expansion: String, -} - -// Feature: Expand Macro Recursively -// -// Shows the full macro expansion of the macro at current cursor. -// -// |=== -// | Editor | Action Name -// -// | VS Code | **Rust Analyzer: Expand macro recursively** -// |=== -pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option { - let sema = Semantics::new(db); - let file = sema.parse(position.file_id); - let name_ref = find_node_at_offset::(file.syntax(), position.offset)?; - let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?; - - let expanded = expand_macro_recur(&sema, &mac)?; - - // FIXME: - // macro expansion may lose all white space information - // But we hope someday we can use ra_fmt for that - let expansion = insert_whitespaces(expanded); - Some(ExpandedMacro { name: name_ref.text().to_string(), expansion }) -} - -fn expand_macro_recur( - sema: &Semantics, - macro_call: &ast::MacroCall, -) -> Option { - let mut expanded = sema.expand(macro_call)?; - - let children = expanded.descendants().filter_map(ast::MacroCall::cast); - let mut rewriter = SyntaxRewriter::default(); - - for child in children.into_iter() { - if let Some(new_node) = expand_macro_recur(sema, &child) { - // Replace the whole node if it is root - // `replace_descendants` will not replace the parent node - // but `SyntaxNode::descendants include itself - if expanded == *child.syntax() { - expanded = new_node; - } else { - rewriter.replace(child.syntax(), &new_node) - } - } - } - - let res = rewriter.rewrite(&expanded); - Some(res) -} - -// FIXME: It would also be cool to share logic here and in the mbe tests, -// which are pretty unreadable at the moment. -fn insert_whitespaces(syn: SyntaxNode) -> String { - let mut res = String::new(); - let mut token_iter = syn - .preorder_with_tokens() - .filter_map(|event| { - if let WalkEvent::Enter(NodeOrToken::Token(token)) = event { - Some(token) - } else { - None - } - }) - .peekable(); - - let mut indent = 0; - let mut last: Option = None; - - while let Some(token) = token_iter.next() { - let mut is_next = |f: fn(SyntaxKind) -> bool, default| -> bool { - token_iter.peek().map(|it| f(it.kind())).unwrap_or(default) - }; - let is_last = - |f: fn(SyntaxKind) -> bool, default| -> bool { last.map(f).unwrap_or(default) }; - - res += &match token.kind() { - k if is_text(k) && is_next(|it| !it.is_punct(), true) => token.text().to_string() + " ", - L_CURLY if is_next(|it| it != R_CURLY, true) => { - indent += 1; - let leading_space = if is_last(is_text, false) { " " } else { "" }; - format!("{}{{\n{}", leading_space, " ".repeat(indent)) - } - R_CURLY if is_last(|it| it != L_CURLY, true) => { - indent = indent.saturating_sub(1); - format!("\n{}}}", " ".repeat(indent)) - } - R_CURLY => format!("}}\n{}", " ".repeat(indent)), - T![;] => format!(";\n{}", " ".repeat(indent)), - T![->] => " -> ".to_string(), - T![=] => " = ".to_string(), - T![=>] => " => ".to_string(), - _ => token.text().to_string(), - }; - - last = Some(token.kind()); - } - - return res; - - fn is_text(k: SyntaxKind) -> bool { - k.is_keyword() || k.is_literal() || k == IDENT - } -} - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - - use crate::mock_analysis::analysis_and_position; - - fn check(ra_fixture: &str, expect: Expect) { - let (analysis, pos) = analysis_and_position(ra_fixture); - let expansion = analysis.expand_macro(pos).unwrap().unwrap(); - let actual = format!("{}\n{}", expansion.name, expansion.expansion); - expect.assert_eq(&actual); - } - - #[test] - fn macro_expand_recursive_expansion() { - check( - r#" -macro_rules! bar { - () => { fn b() {} } -} -macro_rules! foo { - () => { bar!(); } -} -macro_rules! baz { - () => { foo!(); } -} -f<|>oo!(); -"#, - expect![[r#" - foo - fn b(){} - "#]], - ); - } - - #[test] - fn macro_expand_multiple_lines() { - check( - r#" -macro_rules! foo { - () => { - fn some_thing() -> u32 { - let a = 0; - a + 10 - } - } -} -f<|>oo!(); - "#, - expect![[r#" - foo - fn some_thing() -> u32 { - let a = 0; - a+10 - }"#]], - ); - } - - #[test] - fn macro_expand_match_ast() { - check( - r#" -macro_rules! match_ast { - (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; - (match ($node:expr) { - $( ast::$ast:ident($it:ident) => $res:block, )* - _ => $catch_all:expr $(,)? - }) => {{ - $( if let Some($it) = ast::$ast::cast($node.clone()) $res else )* - { $catch_all } - }}; -} - -fn main() { - mat<|>ch_ast! { - match container { - ast::TraitDef(it) => {}, - ast::ImplDef(it) => {}, - _ => { continue }, - } - } -} -"#, - expect![[r#" - match_ast - { - if let Some(it) = ast::TraitDef::cast(container.clone()){} - else if let Some(it) = ast::ImplDef::cast(container.clone()){} - else { - { - continue - } - } - }"#]], - ); - } - - #[test] - fn macro_expand_match_ast_inside_let_statement() { - check( - r#" -macro_rules! match_ast { - (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; - (match ($node:expr) {}) => {{}}; -} - -fn main() { - let p = f(|it| { - let res = mat<|>ch_ast! { match c {}}; - Some(res) - })?; -} -"#, - expect![[r#" - match_ast - {} - "#]], - ); - } - - #[test] - fn macro_expand_inner_macro_fail_to_expand() { - check( - r#" -macro_rules! bar { - (BAD) => {}; -} -macro_rules! foo { - () => {bar!()}; -} - -fn main() { - let res = fo<|>o!(); -} -"#, - expect![[r#" - foo - "#]], - ); - } - - #[test] - fn macro_expand_with_dollar_crate() { - check( - r#" -#[macro_export] -macro_rules! bar { - () => {0}; -} -macro_rules! foo { - () => {$crate::bar!()}; -} - -fn main() { - let res = fo<|>o!(); -} -"#, - expect![[r#" - foo - 0 "#]], - ); - } -} diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs deleted file mode 100644 index 7230a0ff9e..0000000000 --- a/crates/ra_ide/src/extend_selection.rs +++ /dev/null @@ -1,654 +0,0 @@ -use std::iter::successors; - -use hir::Semantics; -use ra_ide_db::RootDatabase; -use ra_syntax::{ - algo::{self, find_covering_element, skip_trivia_token}, - ast::{self, AstNode, AstToken}, - Direction, NodeOrToken, - SyntaxKind::{self, *}, - SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, T, -}; - -use crate::FileRange; - -// Feature: Extend Selection -// -// Extends the current selection to the encompassing syntactic construct -// (expression, statement, item, module, etc). It works with multiple cursors. -// -// |=== -// | Editor | Shortcut -// -// | VS Code | kbd:[Ctrl+Shift+→] -// |=== -pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { - let sema = Semantics::new(db); - let src = sema.parse(frange.file_id); - try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range) -} - -fn try_extend_selection( - sema: &Semantics, - root: &SyntaxNode, - frange: FileRange, -) -> Option { - let range = frange.range; - - let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING]; - let list_kinds = [ - RECORD_PAT_FIELD_LIST, - MATCH_ARM_LIST, - RECORD_FIELD_LIST, - TUPLE_FIELD_LIST, - RECORD_EXPR_FIELD_LIST, - VARIANT_LIST, - USE_TREE_LIST, - GENERIC_PARAM_LIST, - GENERIC_ARG_LIST, - TYPE_BOUND_LIST, - PARAM_LIST, - ARG_LIST, - ARRAY_EXPR, - TUPLE_EXPR, - TUPLE_TYPE, - TUPLE_PAT, - WHERE_CLAUSE, - ]; - - if range.is_empty() { - let offset = range.start(); - let mut leaves = root.token_at_offset(offset); - if leaves.clone().all(|it| it.kind() == WHITESPACE) { - return Some(extend_ws(root, leaves.next()?, offset)); - } - let leaf_range = match leaves { - TokenAtOffset::None => return None, - TokenAtOffset::Single(l) => { - if string_kinds.contains(&l.kind()) { - extend_single_word_in_comment_or_string(&l, offset) - .unwrap_or_else(|| l.text_range()) - } else { - l.text_range() - } - } - TokenAtOffset::Between(l, r) => pick_best(l, r).text_range(), - }; - return Some(leaf_range); - }; - let node = match find_covering_element(root, range) { - NodeOrToken::Token(token) => { - if token.text_range() != range { - return Some(token.text_range()); - } - if let Some(comment) = ast::Comment::cast(token.clone()) { - if let Some(range) = extend_comments(comment) { - return Some(range); - } - } - token.parent() - } - NodeOrToken::Node(node) => node, - }; - - // if we are in single token_tree, we maybe live in macro or attr - if node.kind() == TOKEN_TREE { - if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) { - if let Some(range) = extend_tokens_from_range(sema, macro_call, range) { - return Some(range); - } - } - } - - if node.text_range() != range { - return Some(node.text_range()); - } - - let node = shallowest_node(&node); - - if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { - if let Some(range) = extend_list_item(&node) { - return Some(range); - } - } - - node.parent().map(|it| it.text_range()) -} - -fn extend_tokens_from_range( - sema: &Semantics, - macro_call: ast::MacroCall, - original_range: TextRange, -) -> Option { - let src = find_covering_element(¯o_call.syntax(), original_range); - let (first_token, last_token) = match src { - NodeOrToken::Node(it) => (it.first_token()?, it.last_token()?), - NodeOrToken::Token(it) => (it.clone(), it), - }; - - let mut first_token = skip_trivia_token(first_token, Direction::Next)?; - let mut last_token = skip_trivia_token(last_token, Direction::Prev)?; - - while !original_range.contains_range(first_token.text_range()) { - first_token = skip_trivia_token(first_token.next_token()?, Direction::Next)?; - } - while !original_range.contains_range(last_token.text_range()) { - last_token = skip_trivia_token(last_token.prev_token()?, Direction::Prev)?; - } - - // compute original mapped token range - let extended = { - let fst_expanded = sema.descend_into_macros(first_token.clone()); - let lst_expanded = sema.descend_into_macros(last_token.clone()); - let mut lca = algo::least_common_ancestor(&fst_expanded.parent(), &lst_expanded.parent())?; - lca = shallowest_node(&lca); - if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) { - lca = lca.parent()?; - } - lca - }; - - // Compute parent node range - let validate = |token: &SyntaxToken| { - let expanded = sema.descend_into_macros(token.clone()); - algo::least_common_ancestor(&extended, &expanded.parent()).as_ref() == Some(&extended) - }; - - // Find the first and last text range under expanded parent - let first = successors(Some(first_token), |token| { - let token = token.prev_token()?; - skip_trivia_token(token, Direction::Prev) - }) - .take_while(validate) - .last()?; - - let last = successors(Some(last_token), |token| { - let token = token.next_token()?; - skip_trivia_token(token, Direction::Next) - }) - .take_while(validate) - .last()?; - - let range = first.text_range().cover(last.text_range()); - if range.contains_range(original_range) && original_range != range { - Some(range) - } else { - None - } -} - -/// Find the shallowest node with same range, which allows us to traverse siblings. -fn shallowest_node(node: &SyntaxNode) -> SyntaxNode { - node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap() -} - -fn extend_single_word_in_comment_or_string( - leaf: &SyntaxToken, - offset: TextSize, -) -> Option { - let text: &str = leaf.text(); - let cursor_position: u32 = (offset - leaf.text_range().start()).into(); - - let (before, after) = text.split_at(cursor_position as usize); - - fn non_word_char(c: char) -> bool { - !(c.is_alphanumeric() || c == '_') - } - - let start_idx = before.rfind(non_word_char)? as u32; - let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32; - - let from: TextSize = (start_idx + 1).into(); - let to: TextSize = (cursor_position + end_idx).into(); - - let range = TextRange::new(from, to); - if range.is_empty() { - None - } else { - Some(range + leaf.text_range().start()) - } -} - -fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange { - let ws_text = ws.text(); - let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start(); - let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start(); - let ws_suffix = &ws_text.as_str()[suffix]; - let ws_prefix = &ws_text.as_str()[prefix]; - if ws_text.contains('\n') && !ws_suffix.contains('\n') { - if let Some(node) = ws.next_sibling_or_token() { - let start = match ws_prefix.rfind('\n') { - Some(idx) => ws.text_range().start() + TextSize::from((idx + 1) as u32), - None => node.text_range().start(), - }; - let end = if root.text().char_at(node.text_range().end()) == Some('\n') { - node.text_range().end() + TextSize::of('\n') - } else { - node.text_range().end() - }; - return TextRange::new(start, end); - } - } - ws.text_range() -} - -fn pick_best(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken { - return if priority(&r) > priority(&l) { r } else { l }; - fn priority(n: &SyntaxToken) -> usize { - match n.kind() { - WHITESPACE => 0, - IDENT | T![self] | T![super] | T![crate] | LIFETIME => 2, - _ => 1, - } - } -} - -/// Extend list item selection to include nearby delimiter and whitespace. -fn extend_list_item(node: &SyntaxNode) -> Option { - fn is_single_line_ws(node: &SyntaxToken) -> bool { - node.kind() == WHITESPACE && !node.text().contains('\n') - } - - fn nearby_delimiter( - delimiter_kind: SyntaxKind, - node: &SyntaxNode, - dir: Direction, - ) -> Option { - node.siblings_with_tokens(dir) - .skip(1) - .skip_while(|node| match node { - NodeOrToken::Node(_) => false, - NodeOrToken::Token(it) => is_single_line_ws(it), - }) - .next() - .and_then(|it| it.into_token()) - .filter(|node| node.kind() == delimiter_kind) - } - - let delimiter = match node.kind() { - TYPE_BOUND => T![+], - _ => T![,], - }; - - if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Next) { - // Include any following whitespace when delimiter is after list item. - let final_node = delimiter_node - .next_sibling_or_token() - .and_then(|it| it.into_token()) - .filter(|node| is_single_line_ws(node)) - .unwrap_or(delimiter_node); - - return Some(TextRange::new(node.text_range().start(), final_node.text_range().end())); - } - if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Prev) { - return Some(TextRange::new(delimiter_node.text_range().start(), node.text_range().end())); - } - - None -} - -fn extend_comments(comment: ast::Comment) -> Option { - let prev = adj_comments(&comment, Direction::Prev); - let next = adj_comments(&comment, Direction::Next); - if prev != next { - Some(TextRange::new(prev.syntax().text_range().start(), next.syntax().text_range().end())) - } else { - None - } -} - -fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment { - let mut res = comment.clone(); - for element in comment.syntax().siblings_with_tokens(dir) { - let token = match element.as_token() { - None => break, - Some(token) => token, - }; - if let Some(c) = ast::Comment::cast(token.clone()) { - res = c - } else if token.kind() != WHITESPACE || token.text().contains("\n\n") { - break; - } - } - res -} - -#[cfg(test)] -mod tests { - use crate::mock_analysis::analysis_and_position; - - use super::*; - - fn do_check(before: &str, afters: &[&str]) { - let (analysis, position) = analysis_and_position(&before); - let before = analysis.file_text(position.file_id).unwrap(); - let range = TextRange::empty(position.offset); - let mut frange = FileRange { file_id: position.file_id, range }; - - for &after in afters { - frange.range = analysis.extend_selection(frange).unwrap(); - let actual = &before[frange.range]; - assert_eq!(after, actual); - } - } - - #[test] - fn test_extend_selection_arith() { - do_check(r#"fn foo() { <|>1 + 1 }"#, &["1", "1 + 1", "{ 1 + 1 }"]); - } - - #[test] - fn test_extend_selection_list() { - do_check(r#"fn foo(<|>x: i32) {}"#, &["x", "x: i32"]); - do_check(r#"fn foo(<|>x: i32, y: i32) {}"#, &["x", "x: i32", "x: i32, "]); - do_check(r#"fn foo(<|>x: i32,y: i32) {}"#, &["x", "x: i32", "x: i32,", "(x: i32,y: i32)"]); - do_check(r#"fn foo(x: i32, <|>y: i32) {}"#, &["y", "y: i32", ", y: i32"]); - do_check(r#"fn foo(x: i32, <|>y: i32, ) {}"#, &["y", "y: i32", "y: i32, "]); - do_check(r#"fn foo(x: i32,<|>y: i32) {}"#, &["y", "y: i32", ",y: i32"]); - - do_check(r#"const FOO: [usize; 2] = [ 22<|> , 33];"#, &["22", "22 , "]); - do_check(r#"const FOO: [usize; 2] = [ 22 , 33<|>];"#, &["33", ", 33"]); - do_check(r#"const FOO: [usize; 2] = [ 22 , 33<|> ,];"#, &["33", "33 ,", "[ 22 , 33 ,]"]); - - do_check(r#"fn main() { (1, 2<|>) }"#, &["2", ", 2", "(1, 2)"]); - - do_check( - r#" -const FOO: [usize; 2] = [ - 22, - <|>33, -]"#, - &["33", "33,"], - ); - - do_check( - r#" -const FOO: [usize; 2] = [ - 22 - , 33<|>, -]"#, - &["33", "33,"], - ); - } - - #[test] - fn test_extend_selection_start_of_the_line() { - do_check( - r#" -impl S { -<|> fn foo() { - - } -}"#, - &[" fn foo() {\n\n }\n"], - ); - } - - #[test] - fn test_extend_selection_doc_comments() { - do_check( - r#" -struct A; - -/// bla -/// bla -struct B { - <|> -} - "#, - &["\n \n", "{\n \n}", "/// bla\n/// bla\nstruct B {\n \n}"], - ) - } - - #[test] - fn test_extend_selection_comments() { - do_check( - r#" -fn bar(){} - -// fn foo() { -// 1 + <|>1 -// } - -// fn foo(){} - "#, - &["1", "// 1 + 1", "// fn foo() {\n// 1 + 1\n// }"], - ); - - do_check( - r#" -// #[derive(Debug, Clone, Copy, PartialEq, Eq)] -// pub enum Direction { -// <|> Next, -// Prev -// } -"#, - &[ - "// Next,", - "// #[derive(Debug, Clone, Copy, PartialEq, Eq)]\n// pub enum Direction {\n// Next,\n// Prev\n// }", - ], - ); - - do_check( - r#" -/* -foo -_bar1<|>*/ -"#, - &["_bar1", "/*\nfoo\n_bar1*/"], - ); - - do_check(r#"//!<|>foo_2 bar"#, &["foo_2", "//!foo_2 bar"]); - - do_check(r#"/<|>/foo bar"#, &["//foo bar"]); - } - - #[test] - fn test_extend_selection_prefer_idents() { - do_check( - r#" -fn main() { foo<|>+bar;} -"#, - &["foo", "foo+bar"], - ); - do_check( - r#" -fn main() { foo+<|>bar;} -"#, - &["bar", "foo+bar"], - ); - } - - #[test] - fn test_extend_selection_prefer_lifetimes() { - do_check(r#"fn foo<<|>'a>() {}"#, &["'a", "<'a>"]); - do_check(r#"fn foo<'a<|>>() {}"#, &["'a", "<'a>"]); - } - - #[test] - fn test_extend_selection_select_first_word() { - do_check(r#"// foo bar b<|>az quxx"#, &["baz", "// foo bar baz quxx"]); - do_check( - r#" -impl S { -fn foo() { -// hel<|>lo world -} -} -"#, - &["hello", "// hello world"], - ); - } - - #[test] - fn test_extend_selection_string() { - do_check( - r#" -fn bar(){} - -" fn f<|>oo() {" -"#, - &["foo", "\" fn foo() {\""], - ); - } - - #[test] - fn test_extend_trait_bounds_list_in_where_clause() { - do_check( - r#" -fn foo() - where - R: req::Request + 'static, - R::Params: DeserializeOwned<|> + panic::UnwindSafe + 'static, - R::Result: Serialize + 'static, -"#, - &[ - "DeserializeOwned", - "DeserializeOwned + ", - "DeserializeOwned + panic::UnwindSafe + 'static", - "R::Params: DeserializeOwned + panic::UnwindSafe + 'static", - "R::Params: DeserializeOwned + panic::UnwindSafe + 'static,", - ], - ); - do_check(r#"fn foo() where T: <|>Copy"#, &["Copy"]); - do_check(r#"fn foo() where T: <|>Copy + Display"#, &["Copy", "Copy + "]); - do_check(r#"fn foo() where T: <|>Copy +Display"#, &["Copy", "Copy +"]); - do_check(r#"fn foo() where T: <|>Copy+Display"#, &["Copy", "Copy+"]); - do_check(r#"fn foo() where T: Copy + <|>Display"#, &["Display", "+ Display"]); - do_check(r#"fn foo() where T: Copy + <|>Display + Sync"#, &["Display", "Display + "]); - do_check(r#"fn foo() where T: Copy +<|>Display"#, &["Display", "+Display"]); - } - - #[test] - fn test_extend_trait_bounds_list_inline() { - do_check(r#"fn fooCopy>() {}"#, &["Copy"]); - do_check(r#"fn fooCopy + Display>() {}"#, &["Copy", "Copy + "]); - do_check(r#"fn fooCopy +Display>() {}"#, &["Copy", "Copy +"]); - do_check(r#"fn fooCopy+Display>() {}"#, &["Copy", "Copy+"]); - do_check(r#"fn fooDisplay>() {}"#, &["Display", "+ Display"]); - do_check(r#"fn fooDisplay + Sync>() {}"#, &["Display", "Display + "]); - do_check(r#"fn fooDisplay>() {}"#, &["Display", "+Display"]); - do_check( - r#"fn foo + Display, U: Copy>() {}"#, - &[ - "Copy", - "Copy + ", - "Copy + Display", - "T: Copy + Display", - "T: Copy + Display, ", - "", - ], - ); - } - - #[test] - fn test_extend_selection_on_tuple_in_type() { - do_check( - r#"fn main() { let _: (krate, <|>_crate_def_map, module_id) = (); }"#, - &["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"], - ); - // white space variations - do_check( - r#"fn main() { let _: (krate,<|>_crate_def_map,module_id) = (); }"#, - &["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"], - ); - do_check( - r#" -fn main() { let _: ( - krate, - _crate<|>_def_map, - module_id -) = (); }"#, - &[ - "_crate_def_map", - "_crate_def_map,", - "(\n krate,\n _crate_def_map,\n module_id\n)", - ], - ); - } - - #[test] - fn test_extend_selection_on_tuple_in_rvalue() { - do_check( - r#"fn main() { let var = (krate, _crate_def_map<|>, module_id); }"#, - &["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"], - ); - // white space variations - do_check( - r#"fn main() { let var = (krate,_crate<|>_def_map,module_id); }"#, - &["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"], - ); - do_check( - r#" -fn main() { let var = ( - krate, - _crate_def_map<|>, - module_id -); }"#, - &[ - "_crate_def_map", - "_crate_def_map,", - "(\n krate,\n _crate_def_map,\n module_id\n)", - ], - ); - } - - #[test] - fn test_extend_selection_on_tuple_pat() { - do_check( - r#"fn main() { let (krate, _crate_def_map<|>, module_id) = var; }"#, - &["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"], - ); - // white space variations - do_check( - r#"fn main() { let (krate,_crate<|>_def_map,module_id) = var; }"#, - &["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"], - ); - do_check( - r#" -fn main() { let ( - krate, - _crate_def_map<|>, - module_id -) = var; }"#, - &[ - "_crate_def_map", - "_crate_def_map,", - "(\n krate,\n _crate_def_map,\n module_id\n)", - ], - ); - } - - #[test] - fn extend_selection_inside_macros() { - do_check( - r#"macro_rules! foo { ($item:item) => {$item} } - foo!{fn hello(na<|>me:usize){}}"#, - &[ - "name", - "name:usize", - "(name:usize)", - "fn hello(name:usize){}", - "{fn hello(name:usize){}}", - "foo!{fn hello(name:usize){}}", - ], - ); - } - - #[test] - fn extend_selection_inside_recur_macros() { - do_check( - r#" macro_rules! foo2 { ($item:item) => {$item} } - macro_rules! foo { ($item:item) => {foo2!($item);} } - foo!{fn hello(na<|>me:usize){}}"#, - &[ - "name", - "name:usize", - "(name:usize)", - "fn hello(name:usize){}", - "{fn hello(name:usize){}}", - "foo!{fn hello(name:usize){}}", - ], - ); - } -} diff --git a/crates/ra_ide/src/file_structure.rs b/crates/ra_ide/src/file_structure.rs deleted file mode 100644 index 87cab45037..0000000000 --- a/crates/ra_ide/src/file_structure.rs +++ /dev/null @@ -1,431 +0,0 @@ -use ra_syntax::{ - ast::{self, AttrsOwner, GenericParamsOwner, NameOwner}, - match_ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, WalkEvent, -}; - -#[derive(Debug, Clone)] -pub struct StructureNode { - pub parent: Option, - pub label: String, - pub navigation_range: TextRange, - pub node_range: TextRange, - pub kind: SyntaxKind, - pub detail: Option, - pub deprecated: bool, -} - -// Feature: File Structure -// -// Provides a tree of the symbols defined in the file. Can be used to -// -// * fuzzy search symbol in a file (super useful) -// * draw breadcrumbs to describe the context around the cursor -// * draw outline of the file -// -// |=== -// | Editor | Shortcut -// -// | VS Code | kbd:[Ctrl+Shift+O] -// |=== -pub fn file_structure(file: &SourceFile) -> Vec { - let mut res = Vec::new(); - let mut stack = Vec::new(); - - for event in file.syntax().preorder() { - match event { - WalkEvent::Enter(node) => { - if let Some(mut symbol) = structure_node(&node) { - symbol.parent = stack.last().copied(); - stack.push(res.len()); - res.push(symbol); - } - } - WalkEvent::Leave(node) => { - if structure_node(&node).is_some() { - stack.pop().unwrap(); - } - } - } - } - res -} - -fn structure_node(node: &SyntaxNode) -> Option { - fn decl(node: N) -> Option { - decl_with_detail(&node, None) - } - - fn decl_with_type_ref( - node: &N, - type_ref: Option, - ) -> Option { - let detail = type_ref.map(|type_ref| { - let mut detail = String::new(); - collapse_ws(type_ref.syntax(), &mut detail); - detail - }); - decl_with_detail(node, detail) - } - - fn decl_with_detail( - node: &N, - detail: Option, - ) -> Option { - let name = node.name()?; - - Some(StructureNode { - parent: None, - label: name.text().to_string(), - navigation_range: name.syntax().text_range(), - node_range: node.syntax().text_range(), - kind: node.syntax().kind(), - detail, - deprecated: node.attrs().filter_map(|x| x.simple_name()).any(|x| x == "deprecated"), - }) - } - - fn collapse_ws(node: &SyntaxNode, output: &mut String) { - let mut can_insert_ws = false; - node.text().for_each_chunk(|chunk| { - for line in chunk.lines() { - let line = line.trim(); - if line.is_empty() { - if can_insert_ws { - output.push(' '); - can_insert_ws = false; - } - } else { - output.push_str(line); - can_insert_ws = true; - } - } - }) - } - - match_ast! { - match node { - ast::Fn(it) => { - let mut detail = String::from("fn"); - if let Some(type_param_list) = it.generic_param_list() { - collapse_ws(type_param_list.syntax(), &mut detail); - } - if let Some(param_list) = it.param_list() { - collapse_ws(param_list.syntax(), &mut detail); - } - if let Some(ret_type) = it.ret_type() { - detail.push_str(" "); - collapse_ws(ret_type.syntax(), &mut detail); - } - - decl_with_detail(&it, Some(detail)) - }, - ast::Struct(it) => decl(it), - ast::Union(it) => decl(it), - ast::Enum(it) => decl(it), - ast::Variant(it) => decl(it), - ast::Trait(it) => decl(it), - ast::Module(it) => decl(it), - ast::TypeAlias(it) => decl_with_type_ref(&it, it.ty()), - ast::RecordField(it) => decl_with_type_ref(&it, it.ty()), - ast::Const(it) => decl_with_type_ref(&it, it.ty()), - ast::Static(it) => decl_with_type_ref(&it, it.ty()), - ast::Impl(it) => { - let target_type = it.self_ty()?; - let target_trait = it.trait_(); - let label = match target_trait { - None => format!("impl {}", target_type.syntax().text()), - Some(t) => { - format!("impl {} for {}", t.syntax().text(), target_type.syntax().text(),) - } - }; - - let node = StructureNode { - parent: None, - label, - navigation_range: target_type.syntax().text_range(), - node_range: it.syntax().text_range(), - kind: it.syntax().kind(), - detail: None, - deprecated: false, - }; - Some(node) - }, - ast::MacroCall(it) => { - match it.path().and_then(|it| it.segment()).and_then(|it| it.name_ref()) { - Some(path_segment) if path_segment.text() == "macro_rules" - => decl(it), - _ => None, - } - }, - _ => None, - } - } -} - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - - use super::*; - - fn check(ra_fixture: &str, expect: Expect) { - let file = SourceFile::parse(ra_fixture).ok().unwrap(); - let structure = file_structure(&file); - expect.assert_debug_eq(&structure) - } - - #[test] - fn test_file_structure() { - check( - r#" -struct Foo { - x: i32 -} - -mod m { - fn bar1() {} - fn bar2(t: T) -> T {} - fn bar3(a: A, - b: B) -> Vec< - u32 - > {} -} - -enum E { X, Y(i32) } -type T = (); -static S: i32 = 92; -const C: i32 = 92; - -impl E {} - -impl fmt::Debug for E {} - -macro_rules! mc { - () => {} -} - -#[macro_export] -macro_rules! mcexp { - () => {} -} - -/// Doc comment -macro_rules! mcexp { - () => {} -} - -#[deprecated] -fn obsolete() {} - -#[deprecated(note = "for awhile")] -fn very_obsolete() {} -"#, - expect![[r#" - [ - StructureNode { - parent: None, - label: "Foo", - navigation_range: 8..11, - node_range: 1..26, - kind: STRUCT, - detail: None, - deprecated: false, - }, - StructureNode { - parent: Some( - 0, - ), - label: "x", - navigation_range: 18..19, - node_range: 18..24, - kind: RECORD_FIELD, - detail: Some( - "i32", - ), - deprecated: false, - }, - StructureNode { - parent: None, - label: "m", - navigation_range: 32..33, - node_range: 28..158, - kind: MODULE, - detail: None, - deprecated: false, - }, - StructureNode { - parent: Some( - 2, - ), - label: "bar1", - navigation_range: 43..47, - node_range: 40..52, - kind: FN, - detail: Some( - "fn()", - ), - deprecated: false, - }, - StructureNode { - parent: Some( - 2, - ), - label: "bar2", - navigation_range: 60..64, - node_range: 57..81, - kind: FN, - detail: Some( - "fn(t: T) -> T", - ), - deprecated: false, - }, - StructureNode { - parent: Some( - 2, - ), - label: "bar3", - navigation_range: 89..93, - node_range: 86..156, - kind: FN, - detail: Some( - "fn(a: A, b: B) -> Vec< u32 >", - ), - deprecated: false, - }, - StructureNode { - parent: None, - label: "E", - navigation_range: 165..166, - node_range: 160..180, - kind: ENUM, - detail: None, - deprecated: false, - }, - StructureNode { - parent: Some( - 6, - ), - label: "X", - navigation_range: 169..170, - node_range: 169..170, - kind: VARIANT, - detail: None, - deprecated: false, - }, - StructureNode { - parent: Some( - 6, - ), - label: "Y", - navigation_range: 172..173, - node_range: 172..178, - kind: VARIANT, - detail: None, - deprecated: false, - }, - StructureNode { - parent: None, - label: "T", - navigation_range: 186..187, - node_range: 181..193, - kind: TYPE_ALIAS, - detail: Some( - "()", - ), - deprecated: false, - }, - StructureNode { - parent: None, - label: "S", - navigation_range: 201..202, - node_range: 194..213, - kind: STATIC, - detail: Some( - "i32", - ), - deprecated: false, - }, - StructureNode { - parent: None, - label: "C", - navigation_range: 220..221, - node_range: 214..232, - kind: CONST, - detail: Some( - "i32", - ), - deprecated: false, - }, - StructureNode { - parent: None, - label: "impl E", - navigation_range: 239..240, - node_range: 234..243, - kind: IMPL, - detail: None, - deprecated: false, - }, - StructureNode { - parent: None, - label: "impl fmt::Debug for E", - navigation_range: 265..266, - node_range: 245..269, - kind: IMPL, - detail: None, - deprecated: false, - }, - StructureNode { - parent: None, - label: "mc", - navigation_range: 284..286, - node_range: 271..303, - kind: MACRO_CALL, - detail: None, - deprecated: false, - }, - StructureNode { - parent: None, - label: "mcexp", - navigation_range: 334..339, - node_range: 305..356, - kind: MACRO_CALL, - detail: None, - deprecated: false, - }, - StructureNode { - parent: None, - label: "mcexp", - navigation_range: 387..392, - node_range: 358..409, - kind: MACRO_CALL, - detail: None, - deprecated: false, - }, - StructureNode { - parent: None, - label: "obsolete", - navigation_range: 428..436, - node_range: 411..441, - kind: FN, - detail: Some( - "fn()", - ), - deprecated: true, - }, - StructureNode { - parent: None, - label: "very_obsolete", - navigation_range: 481..494, - node_range: 443..499, - kind: FN, - detail: Some( - "fn()", - ), - deprecated: true, - }, - ] - "#]], - ); - } -} diff --git a/crates/ra_ide/src/folding_ranges.rs b/crates/ra_ide/src/folding_ranges.rs deleted file mode 100644 index 0fbc9babd5..0000000000 --- a/crates/ra_ide/src/folding_ranges.rs +++ /dev/null @@ -1,422 +0,0 @@ -//! FIXME: write short doc here - -use rustc_hash::FxHashSet; - -use ra_syntax::{ - ast::{self, AstNode, AstToken, VisibilityOwner}, - Direction, NodeOrToken, SourceFile, - SyntaxKind::{self, *}, - SyntaxNode, TextRange, -}; - -#[derive(Debug, PartialEq, Eq)] -pub enum FoldKind { - Comment, - Imports, - Mods, - Block, - ArgList, -} - -#[derive(Debug)] -pub struct Fold { - pub range: TextRange, - pub kind: FoldKind, -} - -pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { - let mut res = vec![]; - let mut visited_comments = FxHashSet::default(); - let mut visited_imports = FxHashSet::default(); - let mut visited_mods = FxHashSet::default(); - - for element in file.syntax().descendants_with_tokens() { - // Fold items that span multiple lines - if let Some(kind) = fold_kind(element.kind()) { - let is_multiline = match &element { - NodeOrToken::Node(node) => node.text().contains_char('\n'), - NodeOrToken::Token(token) => token.text().contains('\n'), - }; - if is_multiline { - res.push(Fold { range: element.text_range(), kind }); - continue; - } - } - - match element { - NodeOrToken::Token(token) => { - // Fold groups of comments - if let Some(comment) = ast::Comment::cast(token) { - if !visited_comments.contains(&comment) { - if let Some(range) = - contiguous_range_for_comment(comment, &mut visited_comments) - { - res.push(Fold { range, kind: FoldKind::Comment }) - } - } - } - } - NodeOrToken::Node(node) => { - // Fold groups of imports - if node.kind() == USE && !visited_imports.contains(&node) { - if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) { - res.push(Fold { range, kind: FoldKind::Imports }) - } - } - - // Fold groups of mods - if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node) - { - if let Some(range) = - contiguous_range_for_group_unless(&node, has_visibility, &mut visited_mods) - { - res.push(Fold { range, kind: FoldKind::Mods }) - } - } - } - } - } - - res -} - -fn fold_kind(kind: SyntaxKind) -> Option { - match kind { - COMMENT => Some(FoldKind::Comment), - USE => Some(FoldKind::Imports), - ARG_LIST | PARAM_LIST => Some(FoldKind::ArgList), - ASSOC_ITEM_LIST - | RECORD_FIELD_LIST - | RECORD_PAT_FIELD_LIST - | RECORD_EXPR_FIELD_LIST - | ITEM_LIST - | EXTERN_ITEM_LIST - | USE_TREE_LIST - | BLOCK_EXPR - | MATCH_ARM_LIST - | VARIANT_LIST - | TOKEN_TREE => Some(FoldKind::Block), - _ => None, - } -} - -fn has_visibility(node: &SyntaxNode) -> bool { - ast::Module::cast(node.clone()).and_then(|m| m.visibility()).is_some() -} - -fn contiguous_range_for_group( - first: &SyntaxNode, - visited: &mut FxHashSet, -) -> Option { - contiguous_range_for_group_unless(first, |_| false, visited) -} - -fn contiguous_range_for_group_unless( - first: &SyntaxNode, - unless: impl Fn(&SyntaxNode) -> bool, - visited: &mut FxHashSet, -) -> Option { - visited.insert(first.clone()); - - let mut last = first.clone(); - for element in first.siblings_with_tokens(Direction::Next) { - let node = match element { - NodeOrToken::Token(token) => { - if let Some(ws) = ast::Whitespace::cast(token) { - if !ws.spans_multiple_lines() { - // Ignore whitespace without blank lines - continue; - } - } - // There is a blank line or another token, which means that the - // group ends here - break; - } - NodeOrToken::Node(node) => node, - }; - - // Stop if we find a node that doesn't belong to the group - if node.kind() != first.kind() || unless(&node) { - break; - } - - visited.insert(node.clone()); - last = node; - } - - if first != &last { - Some(TextRange::new(first.text_range().start(), last.text_range().end())) - } else { - // The group consists of only one element, therefore it cannot be folded - None - } -} - -fn contiguous_range_for_comment( - first: ast::Comment, - visited: &mut FxHashSet, -) -> Option { - visited.insert(first.clone()); - - // Only fold comments of the same flavor - let group_kind = first.kind(); - if !group_kind.shape.is_line() { - return None; - } - - let mut last = first.clone(); - for element in first.syntax().siblings_with_tokens(Direction::Next) { - match element { - NodeOrToken::Token(token) => { - if let Some(ws) = ast::Whitespace::cast(token.clone()) { - if !ws.spans_multiple_lines() { - // Ignore whitespace without blank lines - continue; - } - } - if let Some(c) = ast::Comment::cast(token) { - if c.kind() == group_kind { - visited.insert(c.clone()); - last = c; - continue; - } - } - // The comment group ends because either: - // * An element of a different kind was reached - // * A comment of a different flavor was reached - break; - } - NodeOrToken::Node(_) => break, - }; - } - - if first != last { - Some(TextRange::new(first.syntax().text_range().start(), last.syntax().text_range().end())) - } else { - // The group consists of only one element, therefore it cannot be folded - None - } -} - -#[cfg(test)] -mod tests { - use test_utils::extract_tags; - - use super::*; - - fn check(ra_fixture: &str) { - let (ranges, text) = extract_tags(ra_fixture, "fold"); - - let parse = SourceFile::parse(&text); - let folds = folding_ranges(&parse.tree()); - assert_eq!( - folds.len(), - ranges.len(), - "The amount of folds is different than the expected amount" - ); - - for (fold, (range, attr)) in folds.iter().zip(ranges.into_iter()) { - assert_eq!(fold.range.start(), range.start()); - assert_eq!(fold.range.end(), range.end()); - - let kind = match fold.kind { - FoldKind::Comment => "comment", - FoldKind::Imports => "imports", - FoldKind::Mods => "mods", - FoldKind::Block => "block", - FoldKind::ArgList => "arglist", - }; - assert_eq!(kind, &attr.unwrap()); - } - } - - #[test] - fn test_fold_comments() { - check( - r#" -// Hello -// this is a multiline -// comment -// - -// But this is not - -fn main() { - // We should - // also - // fold - // this one. - //! But this one is different - //! because it has another flavor - /* As does this - multiline comment */ -}"#, - ); - } - - #[test] - fn test_fold_imports() { - check( - r#" -use std::{ - str, - vec, - io as iop -}; - -fn main() { -}"#, - ); - } - - #[test] - fn test_fold_mods() { - check( - r#" - -pub mod foo; -mod after_pub; -mod after_pub_next; - -mod before_pub; -mod before_pub_next; -pub mod bar; - -mod not_folding_single; -pub mod foobar; -pub not_folding_single_next; - -#[cfg(test)] -mod with_attribute; -mod with_attribute_next; - -fn main() { -}"#, - ); - } - - #[test] - fn test_fold_import_groups() { - check( - r#" -use std::str; -use std::vec; -use std::io as iop; - -use std::mem; -use std::f64; - -use std::collections::HashMap; -// Some random comment -use std::collections::VecDeque; - -fn main() { -}"#, - ); - } - - #[test] - fn test_fold_import_and_groups() { - check( - r#" -use std::str; -use std::vec; -use std::io as iop; - -use std::mem; -use std::f64; - -use std::collections::{ - HashMap, - VecDeque, -}; -// Some random comment - -fn main() { -}"#, - ); - } - - #[test] - fn test_folds_structs() { - check( - r#" -struct Foo { -} -"#, - ); - } - - #[test] - fn test_folds_traits() { - check( - r#" -trait Foo { -} -"#, - ); - } - - #[test] - fn test_folds_macros() { - check( - r#" -macro_rules! foo { - ($($tt:tt)*) => { $($tt)* } -} -"#, - ); - } - - #[test] - fn test_fold_match_arms() { - check( - r#" -fn main() { - match 0 { - 0 => 0, - _ => 1, - } -} -"#, - ); - } - - #[test] - fn fold_big_calls() { - check( - r#" -fn main() { - frobnicate( - 1, - 2, - 3, - ) -} -"#, - ) - } - - #[test] - fn fold_record_literals() { - check( - r#" -const _: S = S { - -}; -"#, - ) - } - - #[test] - fn fold_multiline_params() { - check( - r#" -fn foo( - x: i32, - y: String, -) {} -"#, - ) - } -} diff --git a/crates/ra_ide/src/goto_definition.rs b/crates/ra_ide/src/goto_definition.rs deleted file mode 100644 index 4e3f428fae..0000000000 --- a/crates/ra_ide/src/goto_definition.rs +++ /dev/null @@ -1,967 +0,0 @@ -use hir::Semantics; -use ra_ide_db::{ - defs::{classify_name, classify_name_ref, NameClass}, - symbol_index, RootDatabase, -}; -use ra_syntax::{ - ast::{self}, - match_ast, AstNode, - SyntaxKind::*, - SyntaxToken, TokenAtOffset, T, -}; - -use crate::{ - display::{ToNav, TryToNav}, - FilePosition, NavigationTarget, RangeInfo, -}; - -// Feature: Go to Definition -// -// Navigates to the definition of an identifier. -// -// |=== -// | Editor | Shortcut -// -// | VS Code | kbd:[F12] -// |=== -pub(crate) fn goto_definition( - db: &RootDatabase, - position: FilePosition, -) -> Option>> { - let sema = Semantics::new(db); - let file = sema.parse(position.file_id).syntax().clone(); - let original_token = pick_best(file.token_at_offset(position.offset))?; - let token = sema.descend_into_macros(original_token.clone()); - let parent = token.parent(); - - let nav_targets = match_ast! { - match parent { - ast::NameRef(name_ref) => { - reference_definition(&sema, &name_ref).to_vec() - }, - ast::Name(name) => { - let def = match classify_name(&sema, &name)? { - NameClass::Definition(def) | NameClass::ConstReference(def) => def, - NameClass::FieldShorthand { local: _, field } => field, - }; - let nav = def.try_to_nav(sema.db)?; - vec![nav] - }, - _ => return None, - } - }; - - Some(RangeInfo::new(original_token.text_range(), nav_targets)) -} - -fn pick_best(tokens: TokenAtOffset) -> Option { - return tokens.max_by_key(priority); - fn priority(n: &SyntaxToken) -> usize { - match n.kind() { - IDENT | INT_NUMBER | T![self] => 2, - kind if kind.is_trivia() => 0, - _ => 1, - } - } -} - -#[derive(Debug)] -pub(crate) enum ReferenceResult { - Exact(NavigationTarget), - Approximate(Vec), -} - -impl ReferenceResult { - fn to_vec(self) -> Vec { - match self { - ReferenceResult::Exact(target) => vec![target], - ReferenceResult::Approximate(vec) => vec, - } - } -} - -pub(crate) fn reference_definition( - sema: &Semantics, - name_ref: &ast::NameRef, -) -> ReferenceResult { - let name_kind = classify_name_ref(sema, name_ref); - if let Some(def) = name_kind { - let def = def.definition(); - - return match def.try_to_nav(sema.db) { - Some(nav) => ReferenceResult::Exact(nav), - None => ReferenceResult::Approximate(Vec::new()), - }; - } - - // Fallback index based approach: - let navs = symbol_index::index_resolve(sema.db, name_ref) - .into_iter() - .map(|s| s.to_nav(sema.db)) - .collect(); - ReferenceResult::Approximate(navs) -} - -#[cfg(test)] -mod tests { - use ra_db::FileRange; - use ra_syntax::{TextRange, TextSize}; - - use crate::mock_analysis::MockAnalysis; - - fn check(ra_fixture: &str) { - let (mock, position) = MockAnalysis::with_files_and_position(ra_fixture); - let (mut expected, data) = mock.annotation(); - let analysis = mock.analysis(); - match data.as_str() { - "" => (), - "file" => { - expected.range = - TextRange::up_to(TextSize::of(&*analysis.file_text(expected.file_id).unwrap())) - } - data => panic!("bad data: {}", data), - } - - let mut navs = - analysis.goto_definition(position).unwrap().expect("no definition found").info; - if navs.len() == 0 { - panic!("unresolved reference") - } - assert_eq!(navs.len(), 1); - - let nav = navs.pop().unwrap(); - assert_eq!(expected, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }); - } - - #[test] - fn goto_def_in_items() { - check( - r#" -struct Foo; - //^^^ -enum E { X(Foo<|>) } -"#, - ); - } - - #[test] - fn goto_def_at_start_of_item() { - check( - r#" -struct Foo; - //^^^ -enum E { X(<|>Foo) } -"#, - ); - } - - #[test] - fn goto_definition_resolves_correct_name() { - check( - r#" -//- /lib.rs -use a::Foo; -mod a; -mod b; -enum E { X(Foo<|>) } - -//- /a.rs -struct Foo; - //^^^ -//- /b.rs -struct Foo; -"#, - ); - } - - #[test] - fn goto_def_for_module_declaration() { - check( - r#" -//- /lib.rs -mod <|>foo; - -//- /foo.rs -// empty -//^ file -"#, - ); - - check( - r#" -//- /lib.rs -mod <|>foo; - -//- /foo/mod.rs -// empty -//^ file -"#, - ); - } - - #[test] - fn goto_def_for_macros() { - check( - r#" -macro_rules! foo { () => { () } } - //^^^ -fn bar() { - <|>foo!(); -} -"#, - ); - } - - #[test] - fn goto_def_for_macros_from_other_crates() { - check( - r#" -//- /lib.rs -use foo::foo; -fn bar() { - <|>foo!(); -} - -//- /foo/lib.rs -#[macro_export] -macro_rules! foo { () => { () } } - //^^^ -"#, - ); - } - - #[test] - fn goto_def_for_macros_in_use_tree() { - check( - r#" -//- /lib.rs -use foo::foo<|>; - -//- /foo/lib.rs -#[macro_export] -macro_rules! foo { () => { () } } - //^^^ -"#, - ); - } - - #[test] - fn goto_def_for_macro_defined_fn_with_arg() { - check( - r#" -//- /lib.rs -macro_rules! define_fn { - ($name:ident) => (fn $name() {}) -} - -define_fn!(foo); - //^^^ - -fn bar() { - <|>foo(); -} -"#, - ); - } - - #[test] - fn goto_def_for_macro_defined_fn_no_arg() { - check( - r#" -//- /lib.rs -macro_rules! define_fn { - () => (fn foo() {}) -} - - define_fn!(); -//^^^^^^^^^^^^^ - -fn bar() { - <|>foo(); -} -"#, - ); - } - - #[test] - fn goto_definition_works_for_macro_inside_pattern() { - check( - r#" -//- /lib.rs -macro_rules! foo {() => {0}} - //^^^ - -fn bar() { - match (0,1) { - (<|>foo!(), _) => {} - } -} -"#, - ); - } - - #[test] - fn goto_definition_works_for_macro_inside_match_arm_lhs() { - check( - r#" -//- /lib.rs -macro_rules! foo {() => {0}} - //^^^ -fn bar() { - match 0 { - <|>foo!() => {} - } -} -"#, - ); - } - - #[test] - fn goto_def_for_use_alias() { - check( - r#" -//- /lib.rs -use foo as bar<|>; - -//- /foo/lib.rs -// empty -//^ file -"#, - ); - } - - #[test] - fn goto_def_for_use_alias_foo_macro() { - check( - r#" -//- /lib.rs -use foo::foo as bar<|>; - -//- /foo/lib.rs -#[macro_export] -macro_rules! foo { () => { () } } - //^^^ -"#, - ); - } - - #[test] - fn goto_def_for_methods() { - check( - r#" -//- /lib.rs -struct Foo; -impl Foo { - fn frobnicate(&self) { } - //^^^^^^^^^^ -} - -fn bar(foo: &Foo) { - foo.frobnicate<|>(); -} -"#, - ); - } - - #[test] - fn goto_def_for_fields() { - check( - r#" -struct Foo { - spam: u32, -} //^^^^ - -fn bar(foo: &Foo) { - foo.spam<|>; -} -"#, - ); - } - - #[test] - fn goto_def_for_record_fields() { - check( - r#" -//- /lib.rs -struct Foo { - spam: u32, -} //^^^^ - -fn bar() -> Foo { - Foo { - spam<|>: 0, - } -} -"#, - ); - } - - #[test] - fn goto_def_for_record_pat_fields() { - check( - r#" -//- /lib.rs -struct Foo { - spam: u32, -} //^^^^ - -fn bar(foo: Foo) -> Foo { - let Foo { spam<|>: _, } = foo -} -"#, - ); - } - - #[test] - fn goto_def_for_record_fields_macros() { - check( - r" -macro_rules! m { () => { 92 };} -struct Foo { spam: u32 } - //^^^^ - -fn bar() -> Foo { - Foo { spam<|>: m!() } -} -", - ); - } - - #[test] - fn goto_for_tuple_fields() { - check( - r#" -struct Foo(u32); - //^^^ - -fn bar() { - let foo = Foo(0); - foo.<|>0; -} -"#, - ); - } - - #[test] - fn goto_def_for_ufcs_inherent_methods() { - check( - r#" -struct Foo; -impl Foo { - fn frobnicate() { } -} //^^^^^^^^^^ - -fn bar(foo: &Foo) { - Foo::frobnicate<|>(); -} -"#, - ); - } - - #[test] - fn goto_def_for_ufcs_trait_methods_through_traits() { - check( - r#" -trait Foo { - fn frobnicate(); -} //^^^^^^^^^^ - -fn bar() { - Foo::frobnicate<|>(); -} -"#, - ); - } - - #[test] - fn goto_def_for_ufcs_trait_methods_through_self() { - check( - r#" -struct Foo; -trait Trait { - fn frobnicate(); -} //^^^^^^^^^^ -impl Trait for Foo {} - -fn bar() { - Foo::frobnicate<|>(); -} -"#, - ); - } - - #[test] - fn goto_definition_on_self() { - check( - r#" -struct Foo; -impl Foo { - //^^^ - pub fn new() -> Self { - Self<|> {} - } -} -"#, - ); - check( - r#" -struct Foo; -impl Foo { - //^^^ - pub fn new() -> Self<|> { - Self {} - } -} -"#, - ); - - check( - r#" -enum Foo { A } -impl Foo { - //^^^ - pub fn new() -> Self<|> { - Foo::A - } -} -"#, - ); - - check( - r#" -enum Foo { A } -impl Foo { - //^^^ - pub fn thing(a: &Self<|>) { - } -} -"#, - ); - } - - #[test] - fn goto_definition_on_self_in_trait_impl() { - check( - r#" -struct Foo; -trait Make { - fn new() -> Self; -} -impl Make for Foo { - //^^^ - fn new() -> Self { - Self<|> {} - } -} -"#, - ); - - check( - r#" -struct Foo; -trait Make { - fn new() -> Self; -} -impl Make for Foo { - //^^^ - fn new() -> Self<|> { - Self {} - } -} -"#, - ); - } - - #[test] - fn goto_def_when_used_on_definition_name_itself() { - check( - r#" -struct Foo<|> { value: u32 } - //^^^ - "#, - ); - - check( - r#" -struct Foo { - field<|>: string, -} //^^^^^ -"#, - ); - - check( - r#" -fn foo_test<|>() { } - //^^^^^^^^ -"#, - ); - - check( - r#" -enum Foo<|> { Variant } - //^^^ -"#, - ); - - check( - r#" -enum Foo { - Variant1, - Variant2<|>, - //^^^^^^^^ - Variant3, -} -"#, - ); - - check( - r#" -static INNER<|>: &str = ""; - //^^^^^ -"#, - ); - - check( - r#" -const INNER<|>: &str = ""; - //^^^^^ -"#, - ); - - check( - r#" -type Thing<|> = Option<()>; - //^^^^^ -"#, - ); - - check( - r#" -trait Foo<|> { } - //^^^ -"#, - ); - - check( - r#" -mod bar<|> { } - //^^^ -"#, - ); - } - - #[test] - fn goto_from_macro() { - check( - r#" -macro_rules! id { - ($($tt:tt)*) => { $($tt)* } -} -fn foo() {} - //^^^ -id! { - fn bar() { - fo<|>o(); - } -} -mod confuse_index { fn foo(); } -"#, - ); - } - - #[test] - fn goto_through_format() { - check( - r#" -#[macro_export] -macro_rules! format { - ($($arg:tt)*) => ($crate::fmt::format($crate::__export::format_args!($($arg)*))) -} -#[rustc_builtin_macro] -#[macro_export] -macro_rules! format_args { - ($fmt:expr) => ({ /* compiler built-in */ }); - ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ }) -} -pub mod __export { - pub use crate::format_args; - fn foo() {} // for index confusion -} -fn foo() -> i8 {} - //^^^ -fn test() { - format!("{}", fo<|>o()) -} -"#, - ); - } - - #[test] - fn goto_for_type_param() { - check( - r#" -struct Foo { t: <|>T } - //^ -"#, - ); - } - - #[test] - fn goto_within_macro() { - check( - r#" -macro_rules! id { - ($($tt:tt)*) => ($($tt)*) -} - -fn foo() { - let x = 1; - //^ - id!({ - let y = <|>x; - let z = y; - }); -} -"#, - ); - - check( - r#" -macro_rules! id { - ($($tt:tt)*) => ($($tt)*) -} - -fn foo() { - let x = 1; - id!({ - let y = x; - //^ - let z = <|>y; - }); -} -"#, - ); - } - - #[test] - fn goto_def_in_local_fn() { - check( - r#" -fn main() { - fn foo() { - let x = 92; - //^ - <|>x; - } -} -"#, - ); - } - - #[test] - fn goto_def_in_local_macro() { - check( - r#" -fn bar() { - macro_rules! foo { () => { () } } - //^^^ - <|>foo!(); -} -"#, - ); - } - - #[test] - fn goto_def_for_field_init_shorthand() { - check( - r#" -struct Foo { x: i32 } -fn main() { - let x = 92; - //^ - Foo { x<|> }; -} -"#, - ) - } - - #[test] - fn goto_def_for_enum_variant_field() { - check( - r#" -enum Foo { - Bar { x: i32 } -} //^ -fn baz(foo: Foo) { - match foo { - Foo::Bar { x<|> } => x - }; -} -"#, - ); - } - - #[test] - fn goto_def_for_enum_variant_self_pattern_const() { - check( - r#" -enum Foo { Bar } - //^^^ -impl Foo { - fn baz(self) { - match self { Self::Bar<|> => {} } - } -} -"#, - ); - } - - #[test] - fn goto_def_for_enum_variant_self_pattern_record() { - check( - r#" -enum Foo { Bar { val: i32 } } - //^^^ -impl Foo { - fn baz(self) -> i32 { - match self { Self::Bar<|> { val } => {} } - } -} -"#, - ); - } - - #[test] - fn goto_def_for_enum_variant_self_expr_const() { - check( - r#" -enum Foo { Bar } - //^^^ -impl Foo { - fn baz(self) { Self::Bar<|>; } -} -"#, - ); - } - - #[test] - fn goto_def_for_enum_variant_self_expr_record() { - check( - r#" -enum Foo { Bar { val: i32 } } - //^^^ -impl Foo { - fn baz(self) { Self::Bar<|> {val: 4}; } -} -"#, - ); - } - - #[test] - fn goto_def_for_type_alias_generic_parameter() { - check( - r#" -type Alias = T<|>; - //^ -"#, - ) - } - - #[test] - fn goto_def_for_macro_container() { - check( - r#" -//- /lib.rs -foo::module<|>::mac!(); - -//- /foo/lib.rs -pub mod module { - //^^^^^^ - #[macro_export] - macro_rules! _mac { () => { () } } - pub use crate::_mac as mac; -} -"#, - ); - } - - #[test] - fn goto_def_for_assoc_ty_in_path() { - check( - r#" -trait Iterator { - type Item; - //^^^^ -} - -fn f() -> impl Iterator = u8> {} -"#, - ); - } - - #[test] - fn goto_def_for_assoc_ty_in_path_multiple() { - check( - r#" -trait Iterator { - type A; - //^ - type B; -} - -fn f() -> impl Iterator = u8, B = ()> {} -"#, - ); - check( - r#" -trait Iterator { - type A; - type B; - //^ -} - -fn f() -> impl Iterator = ()> {} -"#, - ); - } - - #[test] - fn goto_def_for_assoc_ty_ufcs() { - check( - r#" -trait Iterator { - type Item; - //^^^^ -} - -fn g() -> <() as Iterator = ()>>::Item {} -"#, - ); - } - - #[test] - fn goto_def_for_assoc_ty_ufcs_multiple() { - check( - r#" -trait Iterator { - type A; - //^ - type B; -} - -fn g() -> <() as Iterator = (), B = u8>>::B {} -"#, - ); - check( - r#" -trait Iterator { - type A; - type B; - //^ -} - -fn g() -> <() as Iterator = u8>>::A {} -"#, - ); - } -} diff --git a/crates/ra_ide/src/goto_implementation.rs b/crates/ra_ide/src/goto_implementation.rs deleted file mode 100644 index 9912b71421..0000000000 --- a/crates/ra_ide/src/goto_implementation.rs +++ /dev/null @@ -1,229 +0,0 @@ -use hir::{Crate, ImplDef, Semantics}; -use ra_ide_db::RootDatabase; -use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; - -use crate::{display::ToNav, FilePosition, NavigationTarget, RangeInfo}; - -// Feature: Go to Implementation -// -// Navigates to the impl block of structs, enums or traits. Also implemented as a code lens. -// -// |=== -// | Editor | Shortcut -// -// | VS Code | kbd:[Ctrl+F12] -// |=== -pub(crate) fn goto_implementation( - db: &RootDatabase, - position: FilePosition, -) -> Option>> { - let sema = Semantics::new(db); - let source_file = sema.parse(position.file_id); - let syntax = source_file.syntax().clone(); - - let krate = sema.to_module_def(position.file_id)?.krate(); - - if let Some(nominal_def) = find_node_at_offset::(&syntax, position.offset) { - return Some(RangeInfo::new( - nominal_def.syntax().text_range(), - impls_for_def(&sema, &nominal_def, krate)?, - )); - } else if let Some(trait_def) = find_node_at_offset::(&syntax, position.offset) { - return Some(RangeInfo::new( - trait_def.syntax().text_range(), - impls_for_trait(&sema, &trait_def, krate)?, - )); - } - - None -} - -fn impls_for_def( - sema: &Semantics, - node: &ast::AdtDef, - krate: Crate, -) -> Option> { - let ty = match node { - ast::AdtDef::Struct(def) => sema.to_def(def)?.ty(sema.db), - ast::AdtDef::Enum(def) => sema.to_def(def)?.ty(sema.db), - ast::AdtDef::Union(def) => sema.to_def(def)?.ty(sema.db), - }; - - let impls = ImplDef::all_in_crate(sema.db, krate); - - Some( - impls - .into_iter() - .filter(|impl_def| ty.is_equal_for_find_impls(&impl_def.target_ty(sema.db))) - .map(|imp| imp.to_nav(sema.db)) - .collect(), - ) -} - -fn impls_for_trait( - sema: &Semantics, - node: &ast::Trait, - krate: Crate, -) -> Option> { - let tr = sema.to_def(node)?; - - let impls = ImplDef::for_trait(sema.db, krate, tr); - - Some(impls.into_iter().map(|imp| imp.to_nav(sema.db)).collect()) -} - -#[cfg(test)] -mod tests { - use ra_db::FileRange; - - use crate::mock_analysis::MockAnalysis; - - fn check(ra_fixture: &str) { - let (mock, position) = MockAnalysis::with_files_and_position(ra_fixture); - let annotations = mock.annotations(); - let analysis = mock.analysis(); - - let navs = analysis.goto_implementation(position).unwrap().unwrap().info; - - let key = |frange: &FileRange| (frange.file_id, frange.range.start()); - - let mut expected = annotations - .into_iter() - .map(|(range, data)| { - assert!(data.is_empty()); - range - }) - .collect::>(); - expected.sort_by_key(key); - - let mut actual = navs - .into_iter() - .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) - .collect::>(); - actual.sort_by_key(key); - - assert_eq!(expected, actual); - } - - #[test] - fn goto_implementation_works() { - check( - r#" -struct Foo<|>; -impl Foo {} - //^^^ -"#, - ); - } - - #[test] - fn goto_implementation_works_multiple_blocks() { - check( - r#" -struct Foo<|>; -impl Foo {} - //^^^ -impl Foo {} - //^^^ -"#, - ); - } - - #[test] - fn goto_implementation_works_multiple_mods() { - check( - r#" -struct Foo<|>; -mod a { - impl super::Foo {} - //^^^^^^^^^^ -} -mod b { - impl super::Foo {} - //^^^^^^^^^^ -} -"#, - ); - } - - #[test] - fn goto_implementation_works_multiple_files() { - check( - r#" -//- /lib.rs -struct Foo<|>; -mod a; -mod b; -//- /a.rs -impl crate::Foo {} - //^^^^^^^^^^ -//- /b.rs -impl crate::Foo {} - //^^^^^^^^^^ -"#, - ); - } - - #[test] - fn goto_implementation_for_trait() { - check( - r#" -trait T<|> {} -struct Foo; -impl T for Foo {} - //^^^ -"#, - ); - } - - #[test] - fn goto_implementation_for_trait_multiple_files() { - check( - r#" -//- /lib.rs -trait T<|> {}; -struct Foo; -mod a; -mod b; -//- /a.rs -impl crate::T for crate::Foo {} - //^^^^^^^^^^ -//- /b.rs -impl crate::T for crate::Foo {} - //^^^^^^^^^^ - "#, - ); - } - - #[test] - fn goto_implementation_all_impls() { - check( - r#" -//- /lib.rs -trait T {} -struct Foo<|>; -impl Foo {} - //^^^ -impl T for Foo {} - //^^^ -impl T for &Foo {} - //^^^^ -"#, - ); - } - - #[test] - fn goto_implementation_to_builtin_derive() { - check( - r#" - #[derive(Copy)] -//^^^^^^^^^^^^^^^ -struct Foo<|>; - -mod marker { - trait Copy {} -} -"#, - ); - } -} diff --git a/crates/ra_ide/src/goto_type_definition.rs b/crates/ra_ide/src/goto_type_definition.rs deleted file mode 100644 index 8f52feea6e..0000000000 --- a/crates/ra_ide/src/goto_type_definition.rs +++ /dev/null @@ -1,151 +0,0 @@ -use ra_ide_db::RootDatabase; -use ra_syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset, T}; - -use crate::{display::ToNav, FilePosition, NavigationTarget, RangeInfo}; - -// Feature: Go to Type Definition -// -// Navigates to the type of an identifier. -// -// |=== -// | Editor | Action Name -// -// | VS Code | **Go to Type Definition* -// |=== -pub(crate) fn goto_type_definition( - db: &RootDatabase, - position: FilePosition, -) -> Option>> { - let sema = hir::Semantics::new(db); - - let file: ast::SourceFile = sema.parse(position.file_id); - let token: SyntaxToken = pick_best(file.syntax().token_at_offset(position.offset))?; - let token: SyntaxToken = sema.descend_into_macros(token); - - let (ty, node) = sema.ancestors_with_macros(token.parent()).find_map(|node| { - let ty = match_ast! { - match node { - ast::Expr(it) => sema.type_of_expr(&it)?, - ast::Pat(it) => sema.type_of_pat(&it)?, - ast::SelfParam(it) => sema.type_of_self(&it)?, - _ => return None, - } - }; - - Some((ty, node)) - })?; - - let adt_def = ty.autoderef(db).filter_map(|ty| ty.as_adt()).last()?; - - let nav = adt_def.to_nav(db); - Some(RangeInfo::new(node.text_range(), vec![nav])) -} - -fn pick_best(tokens: TokenAtOffset) -> Option { - return tokens.max_by_key(priority); - fn priority(n: &SyntaxToken) -> usize { - match n.kind() { - IDENT | INT_NUMBER | T![self] => 2, - kind if kind.is_trivia() => 0, - _ => 1, - } - } -} - -#[cfg(test)] -mod tests { - use ra_db::FileRange; - - use crate::mock_analysis::MockAnalysis; - - fn check(ra_fixture: &str) { - let (mock, position) = MockAnalysis::with_files_and_position(ra_fixture); - let (expected, data) = mock.annotation(); - assert!(data.is_empty()); - let analysis = mock.analysis(); - - let mut navs = analysis.goto_type_definition(position).unwrap().unwrap().info; - assert_eq!(navs.len(), 1); - let nav = navs.pop().unwrap(); - assert_eq!(expected, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }); - } - - #[test] - fn goto_type_definition_works_simple() { - check( - r#" -struct Foo; - //^^^ -fn foo() { - let f: Foo; f<|> -} -"#, - ); - } - - #[test] - fn goto_type_definition_works_simple_ref() { - check( - r#" -struct Foo; - //^^^ -fn foo() { - let f: &Foo; f<|> -} -"#, - ); - } - - #[test] - fn goto_type_definition_works_through_macro() { - check( - r#" -macro_rules! id { ($($tt:tt)*) => { $($tt)* } } -struct Foo {} - //^^^ -id! { - fn bar() { let f<|> = Foo {}; } -} -"#, - ); - } - - #[test] - fn goto_type_definition_for_param() { - check( - r#" -struct Foo; - //^^^ -fn foo(<|>f: Foo) {} -"#, - ); - } - - #[test] - fn goto_type_definition_for_tuple_field() { - check( - r#" -struct Foo; - //^^^ -struct Bar(Foo); -fn foo() { - let bar = Bar(Foo); - bar.<|>0; -} -"#, - ); - } - - #[test] - fn goto_def_for_self_param() { - check( - r#" -struct Foo; - //^^^ -impl Foo { - fn f(&self<|>) {} -} -"#, - ) - } -} diff --git a/crates/ra_ide/src/hover.rs b/crates/ra_ide/src/hover.rs deleted file mode 100644 index aa48cb412f..0000000000 --- a/crates/ra_ide/src/hover.rs +++ /dev/null @@ -1,2387 +0,0 @@ -use hir::{ - Adt, AsAssocItem, AssocItemContainer, Documentation, FieldSource, HasSource, HirDisplay, - Module, ModuleDef, ModuleSource, Semantics, -}; -use itertools::Itertools; -use ra_db::SourceDatabase; -use ra_ide_db::{ - defs::{classify_name, classify_name_ref, Definition}, - RootDatabase, -}; -use ra_syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset, T}; -use stdx::format_to; -use test_utils::mark; - -use crate::{ - display::{macro_label, ShortLabel, ToNav, TryToNav}, - markup::Markup, - runnables::runnable, - FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, -}; - -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct HoverConfig { - pub implementations: bool, - pub run: bool, - pub debug: bool, - pub goto_type_def: bool, -} - -impl Default for HoverConfig { - fn default() -> Self { - Self { implementations: true, run: true, debug: true, goto_type_def: true } - } -} - -impl HoverConfig { - pub const NO_ACTIONS: Self = - Self { implementations: false, run: false, debug: false, goto_type_def: false }; - - pub fn any(&self) -> bool { - self.implementations || self.runnable() || self.goto_type_def - } - - pub fn none(&self) -> bool { - !self.any() - } - - pub fn runnable(&self) -> bool { - self.run || self.debug - } -} - -#[derive(Debug, Clone)] -pub enum HoverAction { - Runnable(Runnable), - Implementaion(FilePosition), - GoToType(Vec), -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct HoverGotoTypeData { - pub mod_path: String, - pub nav: NavigationTarget, -} - -/// Contains the results when hovering over an item -#[derive(Debug, Default)] -pub struct HoverResult { - pub markup: Markup, - pub actions: Vec, -} - -// Feature: Hover -// -// Shows additional information, like type of an expression or documentation for definition when "focusing" code. -// Focusing is usually hovering with a mouse, but can also be triggered with a shortcut. -pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option> { - let sema = Semantics::new(db); - let file = sema.parse(position.file_id).syntax().clone(); - let token = pick_best(file.token_at_offset(position.offset))?; - let token = sema.descend_into_macros(token); - - let mut res = HoverResult::default(); - - let node = token.parent(); - let definition = match_ast! { - match node { - ast::NameRef(name_ref) => classify_name_ref(&sema, &name_ref).map(|d| d.definition()), - ast::Name(name) => classify_name(&sema, &name).map(|d| d.definition()), - _ => None, - } - }; - if let Some(definition) = definition { - if let Some(markup) = hover_for_definition(db, definition) { - res.markup = markup; - if let Some(action) = show_implementations_action(db, definition) { - res.actions.push(action); - } - - if let Some(action) = runnable_action(&sema, definition, position.file_id) { - res.actions.push(action); - } - - if let Some(action) = goto_type_action(db, definition) { - res.actions.push(action); - } - - let range = sema.original_range(&node).range; - return Some(RangeInfo::new(range, res)); - } - } - - let node = token - .ancestors() - .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?; - - let ty = match_ast! { - match node { - ast::Expr(it) => sema.type_of_expr(&it)?, - ast::Pat(it) => sema.type_of_pat(&it)?, - // If this node is a MACRO_CALL, it means that `descend_into_macros` failed to resolve. - // (e.g expanding a builtin macro). So we give up here. - ast::MacroCall(_it) => return None, - _ => return None, - } - }; - - res.markup = Markup::fenced_block(&ty.display(db)); - let range = sema.original_range(&node).range; - Some(RangeInfo::new(range, res)) -} - -fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option { - fn to_action(nav_target: NavigationTarget) -> HoverAction { - HoverAction::Implementaion(FilePosition { - file_id: nav_target.file_id, - offset: nav_target.focus_or_full_range().start(), - }) - } - - match def { - Definition::ModuleDef(it) => match it { - ModuleDef::Adt(Adt::Struct(it)) => Some(to_action(it.to_nav(db))), - ModuleDef::Adt(Adt::Union(it)) => Some(to_action(it.to_nav(db))), - ModuleDef::Adt(Adt::Enum(it)) => Some(to_action(it.to_nav(db))), - ModuleDef::Trait(it) => Some(to_action(it.to_nav(db))), - _ => None, - }, - _ => None, - } -} - -fn runnable_action( - sema: &Semantics, - def: Definition, - file_id: FileId, -) -> Option { - match def { - Definition::ModuleDef(it) => match it { - ModuleDef::Module(it) => match it.definition_source(sema.db).value { - ModuleSource::Module(it) => runnable(&sema, it.syntax().clone(), file_id) - .map(|it| HoverAction::Runnable(it)), - _ => None, - }, - ModuleDef::Function(it) => { - let src = it.source(sema.db); - if src.file_id != file_id.into() { - mark::hit!(hover_macro_generated_struct_fn_doc_comment); - mark::hit!(hover_macro_generated_struct_fn_doc_attr); - - return None; - } - - runnable(&sema, src.value.syntax().clone(), file_id) - .map(|it| HoverAction::Runnable(it)) - } - _ => None, - }, - _ => None, - } -} - -fn goto_type_action(db: &RootDatabase, def: Definition) -> Option { - match def { - Definition::Local(it) => { - let mut targets: Vec = Vec::new(); - let mut push_new_def = |item: ModuleDef| { - if !targets.contains(&item) { - targets.push(item); - } - }; - - it.ty(db).walk(db, |t| { - if let Some(adt) = t.as_adt() { - push_new_def(adt.into()); - } else if let Some(trait_) = t.as_dyn_trait() { - push_new_def(trait_.into()); - } else if let Some(traits) = t.as_impl_traits(db) { - traits.into_iter().for_each(|it| push_new_def(it.into())); - } else if let Some(trait_) = t.as_associated_type_parent_trait(db) { - push_new_def(trait_.into()); - } - }); - - let targets = targets - .into_iter() - .filter_map(|it| { - Some(HoverGotoTypeData { - mod_path: render_path( - db, - it.module(db)?, - it.name(db).map(|name| name.to_string()), - ), - nav: it.try_to_nav(db)?, - }) - }) - .collect(); - - Some(HoverAction::GoToType(targets)) - } - _ => None, - } -} - -fn hover_markup( - docs: Option, - desc: Option, - mod_path: Option, -) -> Option { - match desc { - Some(desc) => { - let mut buf = String::new(); - - if let Some(mod_path) = mod_path { - if !mod_path.is_empty() { - format_to!(buf, "```rust\n{}\n```\n\n", mod_path); - } - } - format_to!(buf, "```rust\n{}\n```", desc); - - if let Some(doc) = docs { - format_to!(buf, "\n___\n\n{}", doc); - } - Some(buf.into()) - } - None => docs.map(Markup::from), - } -} - -fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option { - match def { - Definition::Field(f) => Some(f.parent_def(db).name(db)), - Definition::Local(l) => l.parent(db).name(db), - Definition::ModuleDef(md) => match md { - ModuleDef::Function(f) => match f.as_assoc_item(db)?.container(db) { - AssocItemContainer::Trait(t) => Some(t.name(db)), - AssocItemContainer::ImplDef(i) => i.target_ty(db).as_adt().map(|adt| adt.name(db)), - }, - ModuleDef::EnumVariant(e) => Some(e.parent_enum(db).name(db)), - _ => None, - }, - Definition::SelfType(i) => i.target_ty(db).as_adt().map(|adt| adt.name(db)), - _ => None, - } - .map(|name| name.to_string()) -} - -fn render_path(db: &RootDatabase, module: Module, item_name: Option) -> String { - let crate_name = - db.crate_graph()[module.krate().into()].display_name.as_ref().map(ToString::to_string); - let module_path = module - .path_to_root(db) - .into_iter() - .rev() - .flat_map(|it| it.name(db).map(|name| name.to_string())); - crate_name.into_iter().chain(module_path).chain(item_name).join("::") -} - -fn definition_mod_path(db: &RootDatabase, def: &Definition) -> Option { - def.module(db).map(|module| render_path(db, module, definition_owner_name(db, def))) -} - -fn hover_for_definition(db: &RootDatabase, def: Definition) -> Option { - let mod_path = definition_mod_path(db, &def); - return match def { - Definition::Macro(it) => { - let src = it.source(db); - let docs = Documentation::from_ast(&src.value).map(Into::into); - hover_markup(docs, Some(macro_label(&src.value)), mod_path) - } - Definition::Field(it) => { - let src = it.source(db); - match src.value { - FieldSource::Named(it) => { - let docs = Documentation::from_ast(&it).map(Into::into); - hover_markup(docs, it.short_label(), mod_path) - } - _ => None, - } - } - Definition::ModuleDef(it) => match it { - ModuleDef::Module(it) => match it.definition_source(db).value { - ModuleSource::Module(it) => { - let docs = Documentation::from_ast(&it).map(Into::into); - hover_markup(docs, it.short_label(), mod_path) - } - _ => None, - }, - ModuleDef::Function(it) => from_def_source(db, it, mod_path), - ModuleDef::Adt(Adt::Struct(it)) => from_def_source(db, it, mod_path), - ModuleDef::Adt(Adt::Union(it)) => from_def_source(db, it, mod_path), - ModuleDef::Adt(Adt::Enum(it)) => from_def_source(db, it, mod_path), - ModuleDef::EnumVariant(it) => from_def_source(db, it, mod_path), - ModuleDef::Const(it) => from_def_source(db, it, mod_path), - ModuleDef::Static(it) => from_def_source(db, it, mod_path), - ModuleDef::Trait(it) => from_def_source(db, it, mod_path), - ModuleDef::TypeAlias(it) => from_def_source(db, it, mod_path), - ModuleDef::BuiltinType(it) => return Some(it.to_string().into()), - }, - Definition::Local(it) => return Some(Markup::fenced_block(&it.ty(db).display(db))), - Definition::TypeParam(_) | Definition::SelfType(_) => { - // FIXME: Hover for generic param - None - } - }; - - fn from_def_source(db: &RootDatabase, def: D, mod_path: Option) -> Option - where - D: HasSource, - A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel + ast::AttrsOwner, - { - let src = def.source(db); - let docs = Documentation::from_ast(&src.value).map(Into::into); - hover_markup(docs, src.value.short_label(), mod_path) - } -} - -fn pick_best(tokens: TokenAtOffset) -> Option { - return tokens.max_by_key(priority); - fn priority(n: &SyntaxToken) -> usize { - match n.kind() { - IDENT | INT_NUMBER => 3, - T!['('] | T![')'] => 2, - kind if kind.is_trivia() => 0, - _ => 1, - } - } -} - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - use ra_db::FileLoader; - - use crate::mock_analysis::analysis_and_position; - - use super::*; - - fn check_hover_no_result(ra_fixture: &str) { - let (analysis, position) = analysis_and_position(ra_fixture); - assert!(analysis.hover(position).unwrap().is_none()); - } - - fn check(ra_fixture: &str, expect: Expect) { - let (analysis, position) = analysis_and_position(ra_fixture); - let hover = analysis.hover(position).unwrap().unwrap(); - - let content = analysis.db.file_text(position.file_id); - let hovered_element = &content[hover.range]; - - let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup); - expect.assert_eq(&actual) - } - - fn check_actions(ra_fixture: &str, expect: Expect) { - let (analysis, position) = analysis_and_position(ra_fixture); - let hover = analysis.hover(position).unwrap().unwrap(); - expect.assert_debug_eq(&hover.info.actions) - } - - #[test] - fn hover_shows_type_of_an_expression() { - check( - r#" -pub fn foo() -> u32 { 1 } - -fn main() { - let foo_test = foo()<|>; -} -"#, - expect![[r#" - *foo()* - ```rust - u32 - ``` - "#]], - ); - } - - #[test] - fn hover_shows_long_type_of_an_expression() { - check( - r#" -struct Scan { a: A, b: B, c: C } -struct Iter { inner: I } -enum Option { Some(T), None } - -struct OtherStruct { i: T } - -fn scan(a: A, b: B, c: C) -> Iter, B, C>> { - Iter { inner: Scan { a, b, c } } -} - -fn main() { - let num: i32 = 55; - let closure = |memo: &mut u32, value: &u32, _another: &mut u32| -> Option { - Option::Some(*memo + value) - }; - let number = 5u32; - let mut iter<|> = scan(OtherStruct { i: num }, closure, number); -} -"#, - expect![[r#" - *iter* - ```rust - Iter>, |&mut u32, &u32, &mut u32| -> Option, u32>> - ``` - "#]], - ); - } - - #[test] - fn hover_shows_fn_signature() { - // Single file with result - check( - r#" -pub fn foo() -> u32 { 1 } - -fn main() { let foo_test = fo<|>o(); } -"#, - expect![[r#" - *foo* - ```rust - pub fn foo() -> u32 - ``` - "#]], - ); - - // Multiple candidates but results are ambiguous. - check( - r#" -//- /a.rs -pub fn foo() -> u32 { 1 } - -//- /b.rs -pub fn foo() -> &str { "" } - -//- /c.rs -pub fn foo(a: u32, b: u32) {} - -//- /main.rs -mod a; -mod b; -mod c; - -fn main() { let foo_test = fo<|>o(); } - "#, - expect![[r#" - *foo* - ```rust - {unknown} - ``` - "#]], - ); - } - - #[test] - fn hover_shows_fn_signature_with_type_params() { - check( - r#" -pub fn foo<'a, T: AsRef>(b: &'a T) -> &'a str { } - -fn main() { let foo_test = fo<|>o(); } - "#, - expect![[r#" - *foo* - ```rust - pub fn foo<'a, T: AsRef>(b: &'a T) -> &'a str - ``` - "#]], - ); - } - - #[test] - fn hover_shows_fn_signature_on_fn_name() { - check( - r#" -pub fn foo<|>(a: u32, b: u32) -> u32 {} - -fn main() { } -"#, - expect![[r#" - *foo* - ```rust - pub fn foo(a: u32, b: u32) -> u32 - ``` - "#]], - ); - } - - #[test] - fn hover_shows_struct_field_info() { - // Hovering over the field when instantiating - check( - r#" -struct Foo { field_a: u32 } - -fn main() { - let foo = Foo { field_a<|>: 0, }; -} -"#, - expect![[r#" - *field_a* - ```rust - Foo - ``` - - ```rust - field_a: u32 - ``` - "#]], - ); - - // Hovering over the field in the definition - check( - r#" -struct Foo { field_a<|>: u32 } - -fn main() { - let foo = Foo { field_a: 0 }; -} -"#, - expect![[r#" - *field_a* - ```rust - Foo - ``` - - ```rust - field_a: u32 - ``` - "#]], - ); - } - - #[test] - fn hover_const_static() { - check( - r#"const foo<|>: u32 = 0;"#, - expect![[r#" - *foo* - ```rust - const foo: u32 - ``` - "#]], - ); - check( - r#"static foo<|>: u32 = 0;"#, - expect![[r#" - *foo* - ```rust - static foo: u32 - ``` - "#]], - ); - } - - #[test] - fn hover_default_generic_types() { - check( - r#" -struct Test { k: K, t: T } - -fn main() { - let zz<|> = Test { t: 23u8, k: 33 }; -}"#, - expect![[r#" - *zz* - ```rust - Test - ``` - "#]], - ); - } - - #[test] - fn hover_some() { - check( - r#" -enum Option { Some(T) } -use Option::Some; - -fn main() { So<|>me(12); } -"#, - expect![[r#" - *Some* - ```rust - Option - ``` - - ```rust - Some - ``` - "#]], - ); - - check( - r#" -enum Option { Some(T) } -use Option::Some; - -fn main() { let b<|>ar = Some(12); } -"#, - expect![[r#" - *bar* - ```rust - Option - ``` - "#]], - ); - } - - #[test] - fn hover_enum_variant() { - check( - r#" -enum Option { - /// The None variant - Non<|>e -} -"#, - expect![[r#" - *None* - ```rust - Option - ``` - - ```rust - None - ``` - ___ - - The None variant - "#]], - ); - - check( - r#" -enum Option { - /// The Some variant - Some(T) -} -fn main() { - let s = Option::Som<|>e(12); -} -"#, - expect![[r#" - *Some* - ```rust - Option - ``` - - ```rust - Some - ``` - ___ - - The Some variant - "#]], - ); - } - - #[test] - fn hover_for_local_variable() { - check( - r#"fn func(foo: i32) { fo<|>o; }"#, - expect![[r#" - *foo* - ```rust - i32 - ``` - "#]], - ) - } - - #[test] - fn hover_for_local_variable_pat() { - check( - r#"fn func(fo<|>o: i32) {}"#, - expect![[r#" - *foo* - ```rust - i32 - ``` - "#]], - ) - } - - #[test] - fn hover_local_var_edge() { - check( - r#"fn func(foo: i32) { if true { <|>foo; }; }"#, - expect![[r#" - *foo* - ```rust - i32 - ``` - "#]], - ) - } - - #[test] - fn hover_for_param_edge() { - check( - r#"fn func(<|>foo: i32) {}"#, - expect![[r#" - *foo* - ```rust - i32 - ``` - "#]], - ) - } - - #[test] - fn test_hover_infer_associated_method_result() { - check( - r#" -struct Thing { x: u32 } - -impl Thing { - fn new() -> Thing { Thing { x: 0 } } -} - -fn main() { let foo_<|>test = Thing::new(); } - "#, - expect![[r#" - *foo_test* - ```rust - Thing - ``` - "#]], - ) - } - - #[test] - fn test_hover_infer_associated_method_exact() { - check( - r#" -mod wrapper { - struct Thing { x: u32 } - - impl Thing { - fn new() -> Thing { Thing { x: 0 } } - } -} - -fn main() { let foo_test = wrapper::Thing::new<|>(); } -"#, - expect![[r#" - *new* - ```rust - wrapper::Thing - ``` - - ```rust - fn new() -> Thing - ``` - "#]], - ) - } - - #[test] - fn test_hover_infer_associated_const_in_pattern() { - check( - r#" -struct X; -impl X { - const C: u32 = 1; -} - -fn main() { - match 1 { - X::C<|> => {}, - 2 => {}, - _ => {} - }; -} -"#, - expect![[r#" - *C* - ```rust - const C: u32 - ``` - "#]], - ) - } - - #[test] - fn test_hover_self() { - check( - r#" -struct Thing { x: u32 } -impl Thing { - fn new() -> Self { Self<|> { x: 0 } } -} -"#, - expect![[r#" - *Self { x: 0 }* - ```rust - Thing - ``` - "#]], - ) - } /* FIXME: revive these tests - let (analysis, position) = analysis_and_position( - " - struct Thing { x: u32 } - impl Thing { - fn new() -> Self<|> { - Self { x: 0 } - } - } - ", - ); - - let hover = analysis.hover(position).unwrap().unwrap(); - assert_eq!(trim_markup(&hover.info.markup.as_str()), ("Thing")); - - let (analysis, position) = analysis_and_position( - " - enum Thing { A } - impl Thing { - pub fn new() -> Self<|> { - Thing::A - } - } - ", - ); - let hover = analysis.hover(position).unwrap().unwrap(); - assert_eq!(trim_markup(&hover.info.markup.as_str()), ("enum Thing")); - - let (analysis, position) = analysis_and_position( - " - enum Thing { A } - impl Thing { - pub fn thing(a: Self<|>) { - } - } - ", - ); - let hover = analysis.hover(position).unwrap().unwrap(); - assert_eq!(trim_markup(&hover.info.markup.as_str()), ("enum Thing")); - */ - - #[test] - fn test_hover_shadowing_pat() { - check( - r#" -fn x() {} - -fn y() { - let x = 0i32; - x<|>; -} -"#, - expect![[r#" - *x* - ```rust - i32 - ``` - "#]], - ) - } - - #[test] - fn test_hover_macro_invocation() { - check( - r#" -macro_rules! foo { () => {} } - -fn f() { fo<|>o!(); } -"#, - expect![[r#" - *foo* - ```rust - macro_rules! foo - ``` - "#]], - ) - } - - #[test] - fn test_hover_tuple_field() { - check( - r#"struct TS(String, i32<|>);"#, - expect![[r#" - *i32* - i32 - "#]], - ) - } - - #[test] - fn test_hover_through_macro() { - check( - r#" -macro_rules! id { ($($tt:tt)*) => { $($tt)* } } -fn foo() {} -id! { - fn bar() { fo<|>o(); } -} -"#, - expect![[r#" - *foo* - ```rust - fn foo() - ``` - "#]], - ); - } - - #[test] - fn test_hover_through_expr_in_macro() { - check( - r#" -macro_rules! id { ($($tt:tt)*) => { $($tt)* } } -fn foo(bar:u32) { let a = id!(ba<|>r); } -"#, - expect![[r#" - *bar* - ```rust - u32 - ``` - "#]], - ); - } - - #[test] - fn test_hover_through_expr_in_macro_recursive() { - check( - r#" -macro_rules! id_deep { ($($tt:tt)*) => { $($tt)* } } -macro_rules! id { ($($tt:tt)*) => { id_deep!($($tt)*) } } -fn foo(bar:u32) { let a = id!(ba<|>r); } -"#, - expect![[r#" - *bar* - ```rust - u32 - ``` - "#]], - ); - } - - #[test] - fn test_hover_through_func_in_macro_recursive() { - check( - r#" -macro_rules! id_deep { ($($tt:tt)*) => { $($tt)* } } -macro_rules! id { ($($tt:tt)*) => { id_deep!($($tt)*) } } -fn bar() -> u32 { 0 } -fn foo() { let a = id!([0u32, bar(<|>)] ); } -"#, - expect![[r#" - *bar()* - ```rust - u32 - ``` - "#]], - ); - } - - #[test] - fn test_hover_through_literal_string_in_macro() { - check( - r#" -macro_rules! arr { ($($tt:tt)*) => { [$($tt)*)] } } -fn foo() { - let mastered_for_itunes = ""; - let _ = arr!("Tr<|>acks", &mastered_for_itunes); -} -"#, - expect![[r#" - *"Tracks"* - ```rust - &str - ``` - "#]], - ); - } - - #[test] - fn test_hover_through_assert_macro() { - check( - r#" -#[rustc_builtin_macro] -macro_rules! assert {} - -fn bar() -> bool { true } -fn foo() { - assert!(ba<|>r()); -} -"#, - expect![[r#" - *bar* - ```rust - fn bar() -> bool - ``` - "#]], - ); - } - - #[test] - fn test_hover_through_literal_string_in_builtin_macro() { - check_hover_no_result( - r#" - #[rustc_builtin_macro] - macro_rules! format {} - - fn foo() { - format!("hel<|>lo {}", 0); - } - "#, - ); - } - - #[test] - fn test_hover_non_ascii_space_doc() { - check( - " -/// <- `\u{3000}` here -fn foo() { } - -fn bar() { fo<|>o(); } -", - expect![[r#" - *foo* - ```rust - fn foo() - ``` - ___ - - <- ` ` here - "#]], - ); - } - - #[test] - fn test_hover_function_show_qualifiers() { - check( - r#"async fn foo<|>() {}"#, - expect![[r#" - *foo* - ```rust - async fn foo() - ``` - "#]], - ); - check( - r#"pub const unsafe fn foo<|>() {}"#, - expect![[r#" - *foo* - ```rust - pub const unsafe fn foo() - ``` - "#]], - ); - check( - r#"pub(crate) async unsafe extern "C" fn foo<|>() {}"#, - expect![[r#" - *foo* - ```rust - pub(crate) async unsafe extern "C" fn foo() - ``` - "#]], - ); - } - - #[test] - fn test_hover_trait_show_qualifiers() { - check_actions( - r"unsafe trait foo<|>() {}", - expect![[r#" - [ - Implementaion( - FilePosition { - file_id: FileId( - 1, - ), - offset: 13, - }, - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_mod_with_same_name_as_function() { - check( - r#" -use self::m<|>y::Bar; -mod my { pub struct Bar; } - -fn my() {} -"#, - expect![[r#" - *my* - ```rust - mod my - ``` - "#]], - ); - } - - #[test] - fn test_hover_struct_doc_comment() { - check( - r#" -/// bar docs -struct Bar; - -fn foo() { let bar = Ba<|>r; } -"#, - expect![[r#" - *Bar* - ```rust - struct Bar - ``` - ___ - - bar docs - "#]], - ); - } - - #[test] - fn test_hover_struct_doc_attr() { - check( - r#" -#[doc = "bar docs"] -struct Bar; - -fn foo() { let bar = Ba<|>r; } -"#, - expect![[r#" - *Bar* - ```rust - struct Bar - ``` - ___ - - bar docs - "#]], - ); - } - - #[test] - fn test_hover_struct_doc_attr_multiple_and_mixed() { - check( - r#" -/// bar docs 0 -#[doc = "bar docs 1"] -#[doc = "bar docs 2"] -struct Bar; - -fn foo() { let bar = Ba<|>r; } -"#, - expect![[r#" - *Bar* - ```rust - struct Bar - ``` - ___ - - bar docs 0 - - bar docs 1 - - bar docs 2 - "#]], - ); - } - - #[test] - fn test_hover_macro_generated_struct_fn_doc_comment() { - mark::check!(hover_macro_generated_struct_fn_doc_comment); - - check( - r#" -macro_rules! bar { - () => { - struct Bar; - impl Bar { - /// Do the foo - fn foo(&self) {} - } - } -} - -bar!(); - -fn foo() { let bar = Bar; bar.fo<|>o(); } -"#, - expect![[r#" - *foo* - ```rust - Bar - ``` - - ```rust - fn foo(&self) - ``` - ___ - - Do the foo - "#]], - ); - } - - #[test] - fn test_hover_macro_generated_struct_fn_doc_attr() { - mark::check!(hover_macro_generated_struct_fn_doc_attr); - - check( - r#" -macro_rules! bar { - () => { - struct Bar; - impl Bar { - #[doc = "Do the foo"] - fn foo(&self) {} - } - } -} - -bar!(); - -fn foo() { let bar = Bar; bar.fo<|>o(); } -"#, - expect![[r#" - *foo* - ```rust - Bar - ``` - - ```rust - fn foo(&self) - ``` - ___ - - Do the foo - "#]], - ); - } - - #[test] - fn test_hover_trait_has_impl_action() { - check_actions( - r#"trait foo<|>() {}"#, - expect![[r#" - [ - Implementaion( - FilePosition { - file_id: FileId( - 1, - ), - offset: 6, - }, - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_struct_has_impl_action() { - check_actions( - r"struct foo<|>() {}", - expect![[r#" - [ - Implementaion( - FilePosition { - file_id: FileId( - 1, - ), - offset: 7, - }, - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_union_has_impl_action() { - check_actions( - r#"union foo<|>() {}"#, - expect![[r#" - [ - Implementaion( - FilePosition { - file_id: FileId( - 1, - ), - offset: 6, - }, - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_enum_has_impl_action() { - check_actions( - r"enum foo<|>() { A, B }", - expect![[r#" - [ - Implementaion( - FilePosition { - file_id: FileId( - 1, - ), - offset: 5, - }, - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_test_has_action() { - check_actions( - r#" -#[test] -fn foo_<|>test() {} -"#, - expect![[r#" - [ - Runnable( - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..24, - focus_range: Some( - 11..19, - ), - name: "foo_test", - kind: FN, - container_name: None, - description: None, - docs: None, - }, - kind: Test { - test_id: Path( - "foo_test", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg_exprs: [], - }, - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_test_mod_has_action() { - check_actions( - r#" -mod tests<|> { - #[test] - fn foo_test() {} -} -"#, - expect![[r#" - [ - Runnable( - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..46, - focus_range: Some( - 4..9, - ), - name: "tests", - kind: MODULE, - container_name: None, - description: None, - docs: None, - }, - kind: TestMod { - path: "tests", - }, - cfg_exprs: [], - }, - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_struct_has_goto_type_action() { - check_actions( - r#" -struct S{ f1: u32 } - -fn main() { let s<|>t = S{ f1:0 }; } - "#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "S", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..19, - focus_range: Some( - 7..8, - ), - name: "S", - kind: STRUCT, - container_name: None, - description: Some( - "struct S", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_generic_struct_has_goto_type_actions() { - check_actions( - r#" -struct Arg(u32); -struct S{ f1: T } - -fn main() { let s<|>t = S{ f1:Arg(0) }; } -"#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "S", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 17..37, - focus_range: Some( - 24..25, - ), - name: "S", - kind: STRUCT, - container_name: None, - description: Some( - "struct S", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "Arg", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..16, - focus_range: Some( - 7..10, - ), - name: "Arg", - kind: STRUCT, - container_name: None, - description: Some( - "struct Arg", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_generic_struct_has_flattened_goto_type_actions() { - check_actions( - r#" -struct Arg(u32); -struct S{ f1: T } - -fn main() { let s<|>t = S{ f1: S{ f1: Arg(0) } }; } - "#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "S", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 17..37, - focus_range: Some( - 24..25, - ), - name: "S", - kind: STRUCT, - container_name: None, - description: Some( - "struct S", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "Arg", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..16, - focus_range: Some( - 7..10, - ), - name: "Arg", - kind: STRUCT, - container_name: None, - description: Some( - "struct Arg", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_tuple_has_goto_type_actions() { - check_actions( - r#" -struct A(u32); -struct B(u32); -mod M { - pub struct C(u32); -} - -fn main() { let s<|>t = (A(1), B(2), M::C(3) ); } -"#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "A", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..14, - focus_range: Some( - 7..8, - ), - name: "A", - kind: STRUCT, - container_name: None, - description: Some( - "struct A", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "B", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 15..29, - focus_range: Some( - 22..23, - ), - name: "B", - kind: STRUCT, - container_name: None, - description: Some( - "struct B", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "M::C", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 42..60, - focus_range: Some( - 53..54, - ), - name: "C", - kind: STRUCT, - container_name: None, - description: Some( - "pub struct C", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_return_impl_trait_has_goto_type_action() { - check_actions( - r#" -trait Foo {} -fn foo() -> impl Foo {} - -fn main() { let s<|>t = foo(); } -"#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "Foo", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..12, - focus_range: Some( - 6..9, - ), - name: "Foo", - kind: TRAIT, - container_name: None, - description: Some( - "trait Foo", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_generic_return_impl_trait_has_goto_type_action() { - check_actions( - r#" -trait Foo {} -struct S; -fn foo() -> impl Foo {} - -fn main() { let s<|>t = foo(); } -"#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "Foo", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..15, - focus_range: Some( - 6..9, - ), - name: "Foo", - kind: TRAIT, - container_name: None, - description: Some( - "trait Foo", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "S", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 16..25, - focus_range: Some( - 23..24, - ), - name: "S", - kind: STRUCT, - container_name: None, - description: Some( - "struct S", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_return_impl_traits_has_goto_type_action() { - check_actions( - r#" -trait Foo {} -trait Bar {} -fn foo() -> impl Foo + Bar {} - -fn main() { let s<|>t = foo(); } - "#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "Foo", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..12, - focus_range: Some( - 6..9, - ), - name: "Foo", - kind: TRAIT, - container_name: None, - description: Some( - "trait Foo", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "Bar", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 13..25, - focus_range: Some( - 19..22, - ), - name: "Bar", - kind: TRAIT, - container_name: None, - description: Some( - "trait Bar", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_generic_return_impl_traits_has_goto_type_action() { - check_actions( - r#" -trait Foo {} -trait Bar {} -struct S1 {} -struct S2 {} - -fn foo() -> impl Foo + Bar {} - -fn main() { let s<|>t = foo(); } -"#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "Foo", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..15, - focus_range: Some( - 6..9, - ), - name: "Foo", - kind: TRAIT, - container_name: None, - description: Some( - "trait Foo", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "Bar", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 16..31, - focus_range: Some( - 22..25, - ), - name: "Bar", - kind: TRAIT, - container_name: None, - description: Some( - "trait Bar", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "S1", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 32..44, - focus_range: Some( - 39..41, - ), - name: "S1", - kind: STRUCT, - container_name: None, - description: Some( - "struct S1", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "S2", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 45..57, - focus_range: Some( - 52..54, - ), - name: "S2", - kind: STRUCT, - container_name: None, - description: Some( - "struct S2", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_arg_impl_trait_has_goto_type_action() { - check_actions( - r#" -trait Foo {} -fn foo(ar<|>g: &impl Foo) {} -"#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "Foo", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..12, - focus_range: Some( - 6..9, - ), - name: "Foo", - kind: TRAIT, - container_name: None, - description: Some( - "trait Foo", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_arg_impl_traits_has_goto_type_action() { - check_actions( - r#" -trait Foo {} -trait Bar {} -struct S{} - -fn foo(ar<|>g: &impl Foo + Bar) {} -"#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "Foo", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..12, - focus_range: Some( - 6..9, - ), - name: "Foo", - kind: TRAIT, - container_name: None, - description: Some( - "trait Foo", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "Bar", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 13..28, - focus_range: Some( - 19..22, - ), - name: "Bar", - kind: TRAIT, - container_name: None, - description: Some( - "trait Bar", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "S", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 29..39, - focus_range: Some( - 36..37, - ), - name: "S", - kind: STRUCT, - container_name: None, - description: Some( - "struct S", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_arg_generic_impl_trait_has_goto_type_action() { - check_actions( - r#" -trait Foo {} -struct S {} -fn foo(ar<|>g: &impl Foo) {} -"#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "Foo", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..15, - focus_range: Some( - 6..9, - ), - name: "Foo", - kind: TRAIT, - container_name: None, - description: Some( - "trait Foo", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "S", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 16..27, - focus_range: Some( - 23..24, - ), - name: "S", - kind: STRUCT, - container_name: None, - description: Some( - "struct S", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_dyn_return_has_goto_type_action() { - check_actions( - r#" -trait Foo {} -struct S; -impl Foo for S {} - -struct B{} -fn foo() -> B {} - -fn main() { let s<|>t = foo(); } -"#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "B", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 42..55, - focus_range: Some( - 49..50, - ), - name: "B", - kind: STRUCT, - container_name: None, - description: Some( - "struct B", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "Foo", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..12, - focus_range: Some( - 6..9, - ), - name: "Foo", - kind: TRAIT, - container_name: None, - description: Some( - "trait Foo", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_dyn_arg_has_goto_type_action() { - check_actions( - r#" -trait Foo {} -fn foo(ar<|>g: &dyn Foo) {} -"#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "Foo", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..12, - focus_range: Some( - 6..9, - ), - name: "Foo", - kind: TRAIT, - container_name: None, - description: Some( - "trait Foo", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_generic_dyn_arg_has_goto_type_action() { - check_actions( - r#" -trait Foo {} -struct S {} -fn foo(ar<|>g: &dyn Foo) {} -"#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "Foo", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..15, - focus_range: Some( - 6..9, - ), - name: "Foo", - kind: TRAIT, - container_name: None, - description: Some( - "trait Foo", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "S", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 16..27, - focus_range: Some( - 23..24, - ), - name: "S", - kind: STRUCT, - container_name: None, - description: Some( - "struct S", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_goto_type_action_links_order() { - check_actions( - r#" -trait ImplTrait {} -trait DynTrait {} -struct B {} -struct S {} - -fn foo(a<|>rg: &impl ImplTrait>>>) {} - "#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "ImplTrait", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..21, - focus_range: Some( - 6..15, - ), - name: "ImplTrait", - kind: TRAIT, - container_name: None, - description: Some( - "trait ImplTrait", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "B", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 43..57, - focus_range: Some( - 50..51, - ), - name: "B", - kind: STRUCT, - container_name: None, - description: Some( - "struct B", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "DynTrait", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 22..42, - focus_range: Some( - 28..36, - ), - name: "DynTrait", - kind: TRAIT, - container_name: None, - description: Some( - "trait DynTrait", - ), - docs: None, - }, - }, - HoverGotoTypeData { - mod_path: "S", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 58..69, - focus_range: Some( - 65..66, - ), - name: "S", - kind: STRUCT, - container_name: None, - description: Some( - "struct S", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } - - #[test] - fn test_hover_associated_type_has_goto_type_action() { - check_actions( - r#" -trait Foo { - type Item; - fn get(self) -> Self::Item {} -} - -struct Bar{} -struct S{} - -impl Foo for S { type Item = Bar; } - -fn test() -> impl Foo { S {} } - -fn main() { let s<|>t = test().get(); } -"#, - expect![[r#" - [ - GoToType( - [ - HoverGotoTypeData { - mod_path: "Foo", - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..62, - focus_range: Some( - 6..9, - ), - name: "Foo", - kind: TRAIT, - container_name: None, - description: Some( - "trait Foo", - ), - docs: None, - }, - }, - ], - ), - ] - "#]], - ); - } -} diff --git a/crates/ra_ide/src/inlay_hints.rs b/crates/ra_ide/src/inlay_hints.rs deleted file mode 100644 index 1bacead63c..0000000000 --- a/crates/ra_ide/src/inlay_hints.rs +++ /dev/null @@ -1,922 +0,0 @@ -use hir::{Adt, Callable, HirDisplay, Semantics, Type}; -use ra_ide_db::RootDatabase; -use ra_prof::profile; -use ra_syntax::{ - ast::{self, ArgListOwner, AstNode}, - match_ast, Direction, NodeOrToken, SmolStr, SyntaxKind, TextRange, T, -}; -use stdx::to_lower_snake_case; - -use crate::FileId; -use ast::NameOwner; -use either::Either; - -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct InlayHintsConfig { - pub type_hints: bool, - pub parameter_hints: bool, - pub chaining_hints: bool, - pub max_length: Option, -} - -impl Default for InlayHintsConfig { - fn default() -> Self { - Self { type_hints: true, parameter_hints: true, chaining_hints: true, max_length: None } - } -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum InlayKind { - TypeHint, - ParameterHint, - ChainingHint, -} - -#[derive(Debug)] -pub struct InlayHint { - pub range: TextRange, - pub kind: InlayKind, - pub label: SmolStr, -} - -// Feature: Inlay Hints -// -// rust-analyzer shows additional information inline with the source code. -// Editors usually render this using read-only virtual text snippets interspersed with code. -// -// rust-analyzer shows hits for -// -// * types of local variables -// * names of function arguments -// * types of chained expressions -// -// **Note:** VS Code does not have native support for inlay hints https://github.com/microsoft/vscode/issues/16221[yet] and the hints are implemented using decorations. -// This approach has limitations, the caret movement and bracket highlighting near the edges of the hint may be weird: -// https://github.com/rust-analyzer/rust-analyzer/issues/1623[1], https://github.com/rust-analyzer/rust-analyzer/issues/3453[2]. -// -// |=== -// | Editor | Action Name -// -// | VS Code | **Rust Analyzer: Toggle inlay hints* -// |=== -pub(crate) fn inlay_hints( - db: &RootDatabase, - file_id: FileId, - config: &InlayHintsConfig, -) -> Vec { - let _p = profile("inlay_hints"); - let sema = Semantics::new(db); - let file = sema.parse(file_id); - - let mut res = Vec::new(); - for node in file.syntax().descendants() { - if let Some(expr) = ast::Expr::cast(node.clone()) { - get_chaining_hints(&mut res, &sema, config, expr); - } - - match_ast! { - match node { - ast::CallExpr(it) => { get_param_name_hints(&mut res, &sema, config, ast::Expr::from(it)); }, - ast::MethodCallExpr(it) => { get_param_name_hints(&mut res, &sema, config, ast::Expr::from(it)); }, - ast::IdentPat(it) => { get_bind_pat_hints(&mut res, &sema, config, it); }, - _ => (), - } - } - } - res -} - -fn get_chaining_hints( - acc: &mut Vec, - sema: &Semantics, - config: &InlayHintsConfig, - expr: ast::Expr, -) -> Option<()> { - if !config.chaining_hints { - return None; - } - - if matches!(expr, ast::Expr::RecordExpr(_)) { - return None; - } - - let mut tokens = expr - .syntax() - .siblings_with_tokens(Direction::Next) - .filter_map(NodeOrToken::into_token) - .filter(|t| match t.kind() { - SyntaxKind::WHITESPACE if !t.text().contains('\n') => false, - SyntaxKind::COMMENT => false, - _ => true, - }); - - // Chaining can be defined as an expression whose next sibling tokens are newline and dot - // Ignoring extra whitespace and comments - let next = tokens.next()?.kind(); - let next_next = tokens.next()?.kind(); - if next == SyntaxKind::WHITESPACE && next_next == T![.] { - let ty = sema.type_of_expr(&expr)?; - if ty.is_unknown() { - return None; - } - if matches!(expr, ast::Expr::PathExpr(_)) { - if let Some(Adt::Struct(st)) = ty.as_adt() { - if st.fields(sema.db).is_empty() { - return None; - } - } - } - let label = ty.display_truncated(sema.db, config.max_length).to_string(); - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::ChainingHint, - label: label.into(), - }); - } - Some(()) -} - -fn get_param_name_hints( - acc: &mut Vec, - sema: &Semantics, - config: &InlayHintsConfig, - expr: ast::Expr, -) -> Option<()> { - if !config.parameter_hints { - return None; - } - - let args = match &expr { - ast::Expr::CallExpr(expr) => expr.arg_list()?.args(), - ast::Expr::MethodCallExpr(expr) => expr.arg_list()?.args(), - _ => return None, - }; - - let callable = get_callable(sema, &expr)?; - let hints = callable - .params(sema.db) - .into_iter() - .zip(args) - .filter_map(|((param, _ty), arg)| match param? { - Either::Left(self_param) => Some((self_param.to_string(), arg)), - Either::Right(pat) => { - let param_name = match pat { - ast::Pat::IdentPat(it) => it.name()?.to_string(), - it => it.to_string(), - }; - Some((param_name, arg)) - } - }) - .filter(|(param_name, arg)| should_show_param_name_hint(sema, &callable, ¶m_name, &arg)) - .map(|(param_name, arg)| InlayHint { - range: arg.syntax().text_range(), - kind: InlayKind::ParameterHint, - label: param_name.into(), - }); - - acc.extend(hints); - Some(()) -} - -fn get_bind_pat_hints( - acc: &mut Vec, - sema: &Semantics, - config: &InlayHintsConfig, - pat: ast::IdentPat, -) -> Option<()> { - if !config.type_hints { - return None; - } - - let ty = sema.type_of_pat(&pat.clone().into())?; - - if should_not_display_type_hint(sema.db, &pat, &ty) { - return None; - } - - acc.push(InlayHint { - range: pat.syntax().text_range(), - kind: InlayKind::TypeHint, - label: ty.display_truncated(sema.db, config.max_length).to_string().into(), - }); - Some(()) -} - -fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &Type) -> bool { - if let Some(Adt::Enum(enum_data)) = pat_ty.as_adt() { - let pat_text = bind_pat.to_string(); - enum_data - .variants(db) - .into_iter() - .map(|variant| variant.name(db).to_string()) - .any(|enum_name| enum_name == pat_text) - } else { - false - } -} - -fn should_not_display_type_hint( - db: &RootDatabase, - bind_pat: &ast::IdentPat, - pat_ty: &Type, -) -> bool { - if pat_ty.is_unknown() { - return true; - } - - if let Some(Adt::Struct(s)) = pat_ty.as_adt() { - if s.fields(db).is_empty() && s.name(db).to_string() == bind_pat.to_string() { - return true; - } - } - - for node in bind_pat.syntax().ancestors() { - match_ast! { - match node { - ast::LetStmt(it) => { - return it.ty().is_some() - }, - ast::Param(it) => { - return it.ty().is_some() - }, - ast::MatchArm(_it) => { - return pat_is_enum_variant(db, bind_pat, pat_ty); - }, - ast::IfExpr(it) => { - return it.condition().and_then(|condition| condition.pat()).is_some() - && pat_is_enum_variant(db, bind_pat, pat_ty); - }, - ast::WhileExpr(it) => { - return it.condition().and_then(|condition| condition.pat()).is_some() - && pat_is_enum_variant(db, bind_pat, pat_ty); - }, - _ => (), - } - } - } - false -} - -fn should_show_param_name_hint( - sema: &Semantics, - callable: &Callable, - param_name: &str, - argument: &ast::Expr, -) -> bool { - let param_name = param_name.trim_start_matches('_'); - let fn_name = match callable.kind() { - hir::CallableKind::Function(it) => Some(it.name(sema.db).to_string()), - hir::CallableKind::TupleStruct(_) - | hir::CallableKind::TupleEnumVariant(_) - | hir::CallableKind::Closure => None, - }; - if param_name.is_empty() - || Some(param_name) == fn_name.as_ref().map(|s| s.trim_start_matches('_')) - || is_argument_similar_to_param_name(sema, argument, param_name) - || param_name.starts_with("ra_fixture") - { - return false; - } - - // avoid displaying hints for common functions like map, filter, etc. - // or other obvious words used in std - !(callable.n_params() == 1 && is_obvious_param(param_name)) -} - -fn is_argument_similar_to_param_name( - sema: &Semantics, - argument: &ast::Expr, - param_name: &str, -) -> bool { - if is_enum_name_similar_to_param_name(sema, argument, param_name) { - return true; - } - match get_string_representation(argument) { - None => false, - Some(repr) => { - let argument_string = repr.trim_start_matches('_'); - argument_string.starts_with(param_name) || argument_string.ends_with(param_name) - } - } -} - -fn is_enum_name_similar_to_param_name( - sema: &Semantics, - argument: &ast::Expr, - param_name: &str, -) -> bool { - match sema.type_of_expr(argument).and_then(|t| t.as_adt()) { - Some(Adt::Enum(e)) => to_lower_snake_case(&e.name(sema.db).to_string()) == param_name, - _ => false, - } -} - -fn get_string_representation(expr: &ast::Expr) -> Option { - match expr { - ast::Expr::MethodCallExpr(method_call_expr) => { - Some(method_call_expr.name_ref()?.to_string()) - } - ast::Expr::RefExpr(ref_expr) => get_string_representation(&ref_expr.expr()?), - _ => Some(expr.to_string()), - } -} - -fn is_obvious_param(param_name: &str) -> bool { - let is_obvious_param_name = - matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other"); - param_name.len() == 1 || is_obvious_param_name -} - -fn get_callable(sema: &Semantics, expr: &ast::Expr) -> Option { - match expr { - ast::Expr::CallExpr(expr) => sema.type_of_expr(&expr.expr()?)?.as_callable(sema.db), - ast::Expr::MethodCallExpr(expr) => sema.resolve_method_call_as_callable(expr), - _ => None, - } -} - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - use test_utils::extract_annotations; - - use crate::{inlay_hints::InlayHintsConfig, mock_analysis::single_file}; - - fn check(ra_fixture: &str) { - check_with_config(InlayHintsConfig::default(), ra_fixture); - } - - fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) { - let (analysis, file_id) = single_file(ra_fixture); - let expected = extract_annotations(&*analysis.file_text(file_id).unwrap()); - let inlay_hints = analysis.inlay_hints(file_id, &config).unwrap(); - let actual = - inlay_hints.into_iter().map(|it| (it.range, it.label.to_string())).collect::>(); - assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual); - } - - fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) { - let (analysis, file_id) = single_file(ra_fixture); - let inlay_hints = analysis.inlay_hints(file_id, &config).unwrap(); - expect.assert_debug_eq(&inlay_hints) - } - - #[test] - fn param_hints_only() { - check_with_config( - InlayHintsConfig { - parameter_hints: true, - type_hints: false, - chaining_hints: false, - max_length: None, - }, - r#" -fn foo(a: i32, b: i32) -> i32 { a + b } -fn main() { - let _x = foo( - 4, - //^ a - 4, - //^ b - ); -}"#, - ); - } - - #[test] - fn hints_disabled() { - check_with_config( - InlayHintsConfig { - type_hints: false, - parameter_hints: false, - chaining_hints: false, - max_length: None, - }, - r#" -fn foo(a: i32, b: i32) -> i32 { a + b } -fn main() { - let _x = foo(4, 4); -}"#, - ); - } - - #[test] - fn type_hints_only() { - check_with_config( - InlayHintsConfig { - type_hints: true, - parameter_hints: false, - chaining_hints: false, - max_length: None, - }, - r#" -fn foo(a: i32, b: i32) -> i32 { a + b } -fn main() { - let _x = foo(4, 4); - //^^ i32 -}"#, - ); - } - - #[test] - fn default_generic_types_should_not_be_displayed() { - check( - r#" -struct Test { k: K, t: T } - -fn main() { - let zz = Test { t: 23u8, k: 33 }; - //^^ Test - let zz_ref = &zz; - //^^^^^^ &Test - let test = || zz; - //^^^^ || -> Test -}"#, - ); - } - - #[test] - fn let_statement() { - check( - r#" -#[derive(PartialEq)] -enum Option { None, Some(T) } - -#[derive(PartialEq)] -struct Test { a: Option, b: u8 } - -fn main() { - struct InnerStruct {} - - let test = 54; - //^^^^ i32 - let test: i32 = 33; - let mut test = 33; - //^^^^^^^^ i32 - let _ = 22; - let test = "test"; - //^^^^ &str - let test = InnerStruct {}; - - let test = unresolved(); - - let test = (42, 'a'); - //^^^^ (i32, char) - let (a, (b, (c,)) = (2, (3, (9.2,)); - //^ i32 ^ i32 ^ f64 - let &x = &92; - //^ i32 -}"#, - ); - } - - #[test] - fn closure_parameters() { - check( - r#" -fn main() { - let mut start = 0; - //^^^^^^^^^ i32 - (0..2).for_each(|increment| { start += increment; }); - //^^^^^^^^^ i32 - - let multiply = - //^^^^^^^^ |…| -> i32 - | a, b| a * b - //^ i32 ^ i32 - ; - - let _: i32 = multiply(1, 2); - let multiply_ref = &multiply; - //^^^^^^^^^^^^ &|…| -> i32 - - let return_42 = || 42; - //^^^^^^^^^ || -> i32 -}"#, - ); - } - - #[test] - fn for_expression() { - check( - r#" -fn main() { - let mut start = 0; - //^^^^^^^^^ i32 - for increment in 0..2 { start += increment; } - //^^^^^^^^^ i32 -}"#, - ); - } - - #[test] - fn if_expr() { - check( - r#" -enum Option { None, Some(T) } -use Option::*; - -struct Test { a: Option, b: u8 } - -fn main() { - let test = Some(Test { a: Some(3), b: 1 }); - //^^^^ Option - if let None = &test {}; - if let test = &test {}; - //^^^^ &Option - if let Some(test) = &test {}; - //^^^^ &Test - if let Some(Test { a, b }) = &test {}; - //^ &Option ^ &u8 - if let Some(Test { a: x, b: y }) = &test {}; - //^ &Option ^ &u8 - if let Some(Test { a: Some(x), b: y }) = &test {}; - //^ &u32 ^ &u8 - if let Some(Test { a: None, b: y }) = &test {}; - //^ &u8 - if let Some(Test { b: y, .. }) = &test {}; - //^ &u8 - if test == None {} -}"#, - ); - } - - #[test] - fn while_expr() { - check( - r#" -enum Option { None, Some(T) } -use Option::*; - -struct Test { a: Option, b: u8 } - -fn main() { - let test = Some(Test { a: Some(3), b: 1 }); - //^^^^ Option - while let Some(Test { a: Some(x), b: y }) = &test {}; - //^ &u32 ^ &u8 -}"#, - ); - } - - #[test] - fn match_arm_list() { - check( - r#" -enum Option { None, Some(T) } -use Option::*; - -struct Test { a: Option, b: u8 } - -fn main() { - match Some(Test { a: Some(3), b: 1 }) { - None => (), - test => (), - //^^^^ Option - Some(Test { a: Some(x), b: y }) => (), - //^ u32 ^ u8 - _ => {} - } -}"#, - ); - } - - #[test] - fn hint_truncation() { - check_with_config( - InlayHintsConfig { max_length: Some(8), ..Default::default() }, - r#" -struct Smol(T); - -struct VeryLongOuterName(T); - -fn main() { - let a = Smol(0u32); - //^ Smol - let b = VeryLongOuterName(0usize); - //^ VeryLongOuterName<…> - let c = Smol(Smol(0u32)) - //^ Smol> -}"#, - ); - } - - #[test] - fn function_call_parameter_hint() { - check( - r#" -enum Option { None, Some(T) } -use Option::*; - -struct FileId {} -struct SmolStr {} - -struct TextRange {} -struct SyntaxKind {} -struct NavigationTarget {} - -struct Test {} - -impl Test { - fn method(&self, mut param: i32) -> i32 { param * 2 } - - fn from_syntax( - file_id: FileId, - name: SmolStr, - focus_range: Option, - full_range: TextRange, - kind: SyntaxKind, - docs: Option, - ) -> NavigationTarget { - NavigationTarget {} - } -} - -fn test_func(mut foo: i32, bar: i32, msg: &str, _: i32, last: i32) -> i32 { - foo + bar -} - -fn main() { - let not_literal = 1; - //^^^^^^^^^^^ i32 - let _: i32 = test_func(1, 2, "hello", 3, not_literal); - //^ foo ^ bar ^^^^^^^ msg ^^^^^^^^^^^ last - let t: Test = Test {}; - t.method(123); - //^^^ param - Test::method(&t, 3456); - //^^ &self ^^^^ param - Test::from_syntax( - FileId {}, - //^^^^^^^^^ file_id - "impl".into(), - //^^^^^^^^^^^^^ name - None, - //^^^^ focus_range - TextRange {}, - //^^^^^^^^^^^^ full_range - SyntaxKind {}, - //^^^^^^^^^^^^^ kind - None, - //^^^^ docs - ); -}"#, - ); - } - - #[test] - fn omitted_parameters_hints_heuristics() { - check_with_config( - InlayHintsConfig { max_length: Some(8), ..Default::default() }, - r#" -fn map(f: i32) {} -fn filter(predicate: i32) {} - -struct TestVarContainer { - test_var: i32, -} - -impl TestVarContainer { - fn test_var(&self) -> i32 { - self.test_var - } -} - -struct Test {} - -impl Test { - fn map(self, f: i32) -> Self { - self - } - - fn filter(self, predicate: i32) -> Self { - self - } - - fn field(self, value: i32) -> Self { - self - } - - fn no_hints_expected(&self, _: i32, test_var: i32) {} - - fn frob(&self, frob: bool) {} -} - -struct Param {} - -fn different_order(param: &Param) {} -fn different_order_mut(param: &mut Param) {} -fn has_underscore(_param: bool) {} -fn enum_matches_param_name(completion_kind: CompletionKind) {} - -fn twiddle(twiddle: bool) {} -fn doo(_doo: bool) {} - -enum CompletionKind { - Keyword, -} - -fn main() { - let container: TestVarContainer = TestVarContainer { test_var: 42 }; - let test: Test = Test {}; - - map(22); - filter(33); - - let test_processed: Test = test.map(1).filter(2).field(3); - - let test_var: i32 = 55; - test_processed.no_hints_expected(22, test_var); - test_processed.no_hints_expected(33, container.test_var); - test_processed.no_hints_expected(44, container.test_var()); - test_processed.frob(false); - - twiddle(true); - doo(true); - - let mut param_begin: Param = Param {}; - different_order(¶m_begin); - different_order(&mut param_begin); - - let param: bool = true; - has_underscore(param); - - enum_matches_param_name(CompletionKind::Keyword); - - let a: f64 = 7.0; - let b: f64 = 4.0; - let _: f64 = a.div_euclid(b); - let _: f64 = a.abs_sub(b); -}"#, - ); - } - - #[test] - fn unit_structs_have_no_type_hints() { - check_with_config( - InlayHintsConfig { max_length: Some(8), ..Default::default() }, - r#" -enum Result { Ok(T), Err(E) } -use Result::*; - -struct SyntheticSyntax; - -fn main() { - match Ok(()) { - Ok(_) => (), - Err(SyntheticSyntax) => (), - } -}"#, - ); - } - - #[test] - fn chaining_hints_ignore_comments() { - check_expect( - InlayHintsConfig { - parameter_hints: false, - type_hints: false, - chaining_hints: true, - max_length: None, - }, - r#" -struct A(B); -impl A { fn into_b(self) -> B { self.0 } } -struct B(C); -impl B { fn into_c(self) -> C { self.0 } } -struct C; - -fn main() { - let c = A(B(C)) - .into_b() // This is a comment - .into_c(); -} -"#, - expect![[r#" - [ - InlayHint { - range: 147..172, - kind: ChainingHint, - label: "B", - }, - InlayHint { - range: 147..154, - kind: ChainingHint, - label: "A", - }, - ] - "#]], - ); - } - - #[test] - fn chaining_hints_without_newlines() { - check_with_config( - InlayHintsConfig { - parameter_hints: false, - type_hints: false, - chaining_hints: true, - max_length: None, - }, - r#" -struct A(B); -impl A { fn into_b(self) -> B { self.0 } } -struct B(C); -impl B { fn into_c(self) -> C { self.0 } } -struct C; - -fn main() { - let c = A(B(C)).into_b().into_c(); -}"#, - ); - } - - #[test] - fn struct_access_chaining_hints() { - check_expect( - InlayHintsConfig { - parameter_hints: false, - type_hints: false, - chaining_hints: true, - max_length: None, - }, - r#" -struct A { pub b: B } -struct B { pub c: C } -struct C(pub bool); -struct D; - -impl D { - fn foo(&self) -> i32 { 42 } -} - -fn main() { - let x = A { b: B { c: C(true) } } - .b - .c - .0; - let x = D - .foo(); -}"#, - expect![[r#" - [ - InlayHint { - range: 143..190, - kind: ChainingHint, - label: "C", - }, - InlayHint { - range: 143..179, - kind: ChainingHint, - label: "B", - }, - ] - "#]], - ); - } - - #[test] - fn generic_chaining_hints() { - check_expect( - InlayHintsConfig { - parameter_hints: false, - type_hints: false, - chaining_hints: true, - max_length: None, - }, - r#" -struct A(T); -struct B(T); -struct C(T); -struct X(T, R); - -impl A { - fn new(t: T) -> Self { A(t) } - fn into_b(self) -> B { B(self.0) } -} -impl B { - fn into_c(self) -> C { C(self.0) } -} -fn main() { - let c = A::new(X(42, true)) - .into_b() - .into_c(); -} -"#, - expect![[r#" - [ - InlayHint { - range: 246..283, - kind: ChainingHint, - label: "B>", - }, - InlayHint { - range: 246..265, - kind: ChainingHint, - label: "A>", - }, - ] - "#]], - ); - } -} diff --git a/crates/ra_ide/src/join_lines.rs b/crates/ra_ide/src/join_lines.rs deleted file mode 100644 index 6907c09e80..0000000000 --- a/crates/ra_ide/src/join_lines.rs +++ /dev/null @@ -1,750 +0,0 @@ -use itertools::Itertools; -use ra_fmt::{compute_ws, extract_trivial_expression}; -use ra_syntax::{ - algo::{find_covering_element, non_trivia_sibling}, - ast::{self, AstNode, AstToken}, - Direction, NodeOrToken, SourceFile, - SyntaxKind::{self, WHITESPACE}, - SyntaxNode, SyntaxToken, TextRange, TextSize, T, -}; -use ra_text_edit::{TextEdit, TextEditBuilder}; - -// Feature: Join Lines -// -// Join selected lines into one, smartly fixing up whitespace, trailing commas, and braces. -// -// |=== -// | Editor | Action Name -// -// | VS Code | **Rust Analyzer: Join lines** -// |=== -pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { - let range = if range.is_empty() { - let syntax = file.syntax(); - let text = syntax.text().slice(range.start()..); - let pos = match text.find_char('\n') { - None => return TextEditBuilder::default().finish(), - Some(pos) => pos, - }; - TextRange::at(range.start() + pos, TextSize::of('\n')) - } else { - range - }; - - let node = match find_covering_element(file.syntax(), range) { - NodeOrToken::Node(node) => node, - NodeOrToken::Token(token) => token.parent(), - }; - let mut edit = TextEditBuilder::default(); - for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) { - let range = match range.intersect(token.text_range()) { - Some(range) => range, - None => continue, - } - token.text_range().start(); - let text = token.text(); - for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') { - let pos: TextSize = (pos as u32).into(); - let off = token.text_range().start() + range.start() + pos; - if !edit.invalidates_offset(off) { - remove_newline(&mut edit, &token, off); - } - } - } - - edit.finish() -} - -fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextSize) { - if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { - // The node is either the first or the last in the file - let suff = &token.text()[TextRange::new( - offset - token.text_range().start() + TextSize::of('\n'), - TextSize::of(token.text().as_str()), - )]; - let spaces = suff.bytes().take_while(|&b| b == b' ').count(); - - edit.replace(TextRange::at(offset, ((spaces + 1) as u32).into()), " ".to_string()); - return; - } - - // The node is between two other nodes - let prev = token.prev_sibling_or_token().unwrap(); - let next = token.next_sibling_or_token().unwrap(); - if is_trailing_comma(prev.kind(), next.kind()) { - // Removes: trailing comma, newline (incl. surrounding whitespace) - edit.delete(TextRange::new(prev.text_range().start(), token.text_range().end())); - return; - } - if prev.kind() == T![,] && next.kind() == T!['}'] { - // Removes: comma, newline (incl. surrounding whitespace) - let space = if let Some(left) = prev.prev_sibling_or_token() { - compute_ws(left.kind(), next.kind()) - } else { - " " - }; - edit.replace( - TextRange::new(prev.text_range().start(), token.text_range().end()), - space.to_string(), - ); - return; - } - - if let (Some(_), Some(next)) = ( - prev.as_token().cloned().and_then(ast::Comment::cast), - next.as_token().cloned().and_then(ast::Comment::cast), - ) { - // Removes: newline (incl. surrounding whitespace), start of the next comment - edit.delete(TextRange::new( - token.text_range().start(), - next.syntax().text_range().start() + TextSize::of(next.prefix()), - )); - return; - } - - // Special case that turns something like: - // - // ``` - // my_function({<|> - // - // }) - // ``` - // - // into `my_function()` - if join_single_expr_block(edit, token).is_some() { - return; - } - // ditto for - // - // ``` - // use foo::{<|> - // bar - // }; - // ``` - if join_single_use_tree(edit, token).is_some() { - return; - } - - // Remove newline but add a computed amount of whitespace characters - edit.replace(token.text_range(), compute_ws(prev.kind(), next.kind()).to_string()); -} - -fn has_comma_after(node: &SyntaxNode) -> bool { - match non_trivia_sibling(node.clone().into(), Direction::Next) { - Some(n) => n.kind() == T![,], - _ => false, - } -} - -fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { - let block_expr = ast::BlockExpr::cast(token.parent())?; - if !block_expr.is_standalone() { - return None; - } - let expr = extract_trivial_expression(&block_expr)?; - - let block_range = block_expr.syntax().text_range(); - let mut buf = expr.syntax().text().to_string(); - - // Match block needs to have a comma after the block - if let Some(match_arm) = block_expr.syntax().parent().and_then(ast::MatchArm::cast) { - if !has_comma_after(match_arm.syntax()) { - buf.push(','); - } - } - - edit.replace(block_range, buf); - - Some(()) -} - -fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { - let use_tree_list = ast::UseTreeList::cast(token.parent())?; - let (tree,) = use_tree_list.use_trees().collect_tuple()?; - edit.replace(use_tree_list.syntax().text_range(), tree.syntax().text().to_string()); - Some(()) -} - -fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { - matches!((left, right), (T![,], T![')']) | (T![,], T![']'])) -} - -#[cfg(test)] -mod tests { - use ra_syntax::SourceFile; - use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range}; - - use super::*; - - fn check_join_lines(before: &str, after: &str) { - let (before_cursor_pos, before) = extract_offset(before); - let file = SourceFile::parse(&before).ok().unwrap(); - - let range = TextRange::empty(before_cursor_pos); - let result = join_lines(&file, range); - - let actual = { - let mut actual = before.to_string(); - result.apply(&mut actual); - actual - }; - let actual_cursor_pos = result - .apply_to_offset(before_cursor_pos) - .expect("cursor position is affected by the edit"); - let actual = add_cursor(&actual, actual_cursor_pos); - assert_eq_text!(after, &actual); - } - - #[test] - fn test_join_lines_comma() { - check_join_lines( - r" -fn foo() { - <|>foo(1, - ) -} -", - r" -fn foo() { - <|>foo(1) -} -", - ); - } - - #[test] - fn test_join_lines_lambda_block() { - check_join_lines( - r" -pub fn reparse(&self, edit: &AtomTextEdit) -> File { - <|>self.incremental_reparse(edit).unwrap_or_else(|| { - self.full_reparse(edit) - }) -} -", - r" -pub fn reparse(&self, edit: &AtomTextEdit) -> File { - <|>self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) -} -", - ); - } - - #[test] - fn test_join_lines_block() { - check_join_lines( - r" -fn foo() { - foo(<|>{ - 92 - }) -}", - r" -fn foo() { - foo(<|>92) -}", - ); - } - - #[test] - fn test_join_lines_diverging_block() { - let before = r" - fn foo() { - loop { - match x { - 92 => <|>{ - continue; - } - } - } - } - "; - let after = r" - fn foo() { - loop { - match x { - 92 => <|>continue, - } - } - } - "; - check_join_lines(before, after); - } - - #[test] - fn join_lines_adds_comma_for_block_in_match_arm() { - check_join_lines( - r" -fn foo(e: Result) { - match e { - Ok(u) => <|>{ - u.foo() - } - Err(v) => v, - } -}", - r" -fn foo(e: Result) { - match e { - Ok(u) => <|>u.foo(), - Err(v) => v, - } -}", - ); - } - - #[test] - fn join_lines_multiline_in_block() { - check_join_lines( - r" -fn foo() { - match ty { - <|> Some(ty) => { - match ty { - _ => false, - } - } - _ => true, - } -} -", - r" -fn foo() { - match ty { - <|> Some(ty) => match ty { - _ => false, - }, - _ => true, - } -} -", - ); - } - - #[test] - fn join_lines_keeps_comma_for_block_in_match_arm() { - // We already have a comma - check_join_lines( - r" -fn foo(e: Result) { - match e { - Ok(u) => <|>{ - u.foo() - }, - Err(v) => v, - } -}", - r" -fn foo(e: Result) { - match e { - Ok(u) => <|>u.foo(), - Err(v) => v, - } -}", - ); - - // comma with whitespace between brace and , - check_join_lines( - r" -fn foo(e: Result) { - match e { - Ok(u) => <|>{ - u.foo() - } , - Err(v) => v, - } -}", - r" -fn foo(e: Result) { - match e { - Ok(u) => <|>u.foo() , - Err(v) => v, - } -}", - ); - - // comma with newline between brace and , - check_join_lines( - r" -fn foo(e: Result) { - match e { - Ok(u) => <|>{ - u.foo() - } - , - Err(v) => v, - } -}", - r" -fn foo(e: Result) { - match e { - Ok(u) => <|>u.foo() - , - Err(v) => v, - } -}", - ); - } - - #[test] - fn join_lines_keeps_comma_with_single_arg_tuple() { - // A single arg tuple - check_join_lines( - r" -fn foo() { - let x = (<|>{ - 4 - },); -}", - r" -fn foo() { - let x = (<|>4,); -}", - ); - - // single arg tuple with whitespace between brace and comma - check_join_lines( - r" -fn foo() { - let x = (<|>{ - 4 - } ,); -}", - r" -fn foo() { - let x = (<|>4 ,); -}", - ); - - // single arg tuple with newline between brace and comma - check_join_lines( - r" -fn foo() { - let x = (<|>{ - 4 - } - ,); -}", - r" -fn foo() { - let x = (<|>4 - ,); -}", - ); - } - - #[test] - fn test_join_lines_use_items_left() { - // No space after the '{' - check_join_lines( - r" -<|>use ra_syntax::{ - TextSize, TextRange, -};", - r" -<|>use ra_syntax::{TextSize, TextRange, -};", - ); - } - - #[test] - fn test_join_lines_use_items_right() { - // No space after the '}' - check_join_lines( - r" -use ra_syntax::{ -<|> TextSize, TextRange -};", - r" -use ra_syntax::{ -<|> TextSize, TextRange};", - ); - } - - #[test] - fn test_join_lines_use_items_right_comma() { - // No space after the '}' - check_join_lines( - r" -use ra_syntax::{ -<|> TextSize, TextRange, -};", - r" -use ra_syntax::{ -<|> TextSize, TextRange};", - ); - } - - #[test] - fn test_join_lines_use_tree() { - check_join_lines( - r" -use ra_syntax::{ - algo::<|>{ - find_token_at_offset, - }, - ast, -};", - r" -use ra_syntax::{ - algo::<|>find_token_at_offset, - ast, -};", - ); - } - - #[test] - fn test_join_lines_normal_comments() { - check_join_lines( - r" -fn foo() { - // Hello<|> - // world! -} -", - r" -fn foo() { - // Hello<|> world! -} -", - ); - } - - #[test] - fn test_join_lines_doc_comments() { - check_join_lines( - r" -fn foo() { - /// Hello<|> - /// world! -} -", - r" -fn foo() { - /// Hello<|> world! -} -", - ); - } - - #[test] - fn test_join_lines_mod_comments() { - check_join_lines( - r" -fn foo() { - //! Hello<|> - //! world! -} -", - r" -fn foo() { - //! Hello<|> world! -} -", - ); - } - - #[test] - fn test_join_lines_multiline_comments_1() { - check_join_lines( - r" -fn foo() { - // Hello<|> - /* world! */ -} -", - r" -fn foo() { - // Hello<|> world! */ -} -", - ); - } - - #[test] - fn test_join_lines_multiline_comments_2() { - check_join_lines( - r" -fn foo() { - // The<|> - /* quick - brown - fox! */ -} -", - r" -fn foo() { - // The<|> quick - brown - fox! */ -} -", - ); - } - - fn check_join_lines_sel(before: &str, after: &str) { - let (sel, before) = extract_range(before); - let parse = SourceFile::parse(&before); - let result = join_lines(&parse.tree(), sel); - let actual = { - let mut actual = before.to_string(); - result.apply(&mut actual); - actual - }; - assert_eq_text!(after, &actual); - } - - #[test] - fn test_join_lines_selection_fn_args() { - check_join_lines_sel( - r" -fn foo() { - <|>foo(1, - 2, - 3, - <|>) -} - ", - r" -fn foo() { - foo(1, 2, 3) -} - ", - ); - } - - #[test] - fn test_join_lines_selection_struct() { - check_join_lines_sel( - r" -struct Foo <|>{ - f: u32, -}<|> - ", - r" -struct Foo { f: u32 } - ", - ); - } - - #[test] - fn test_join_lines_selection_dot_chain() { - check_join_lines_sel( - r" -fn foo() { - join(<|>type_params.type_params() - .filter_map(|it| it.name()) - .map(|it| it.text())<|>) -}", - r" -fn foo() { - join(type_params.type_params().filter_map(|it| it.name()).map(|it| it.text())) -}", - ); - } - - #[test] - fn test_join_lines_selection_lambda_block_body() { - check_join_lines_sel( - r" -pub fn handle_find_matching_brace() { - params.offsets - .map(|offset| <|>{ - world.analysis().matching_brace(&file, offset).unwrap_or(offset) - }<|>) - .collect(); -}", - r" -pub fn handle_find_matching_brace() { - params.offsets - .map(|offset| world.analysis().matching_brace(&file, offset).unwrap_or(offset)) - .collect(); -}", - ); - } - - #[test] - fn test_join_lines_commented_block() { - check_join_lines( - r" -fn main() { - let _ = { - // <|>foo - // bar - 92 - }; -} - ", - r" -fn main() { - let _ = { - // <|>foo bar - 92 - }; -} - ", - ) - } - - #[test] - fn join_lines_mandatory_blocks_block() { - check_join_lines( - r" -<|>fn foo() { - 92 -} - ", - r" -<|>fn foo() { 92 -} - ", - ); - - check_join_lines( - r" -fn foo() { - <|>if true { - 92 - } -} - ", - r" -fn foo() { - <|>if true { 92 - } -} - ", - ); - - check_join_lines( - r" -fn foo() { - <|>loop { - 92 - } -} - ", - r" -fn foo() { - <|>loop { 92 - } -} - ", - ); - - check_join_lines( - r" -fn foo() { - <|>unsafe { - 92 - } -} - ", - r" -fn foo() { - <|>unsafe { 92 - } -} - ", - ); - } -} diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs deleted file mode 100644 index 0fede0d879..0000000000 --- a/crates/ra_ide/src/lib.rs +++ /dev/null @@ -1,534 +0,0 @@ -//! ra_ide crate provides "ide-centric" APIs for the rust-analyzer. That is, -//! it generally operates with files and text ranges, and returns results as -//! Strings, suitable for displaying to the human. -//! -//! What powers this API are the `RootDatabase` struct, which defines a `salsa` -//! database, and the `ra_hir` crate, where majority of the analysis happens. -//! However, IDE specific bits of the analysis (most notably completion) happen -//! in this crate. - -// For proving that RootDatabase is RefUnwindSafe. -#![recursion_limit = "128"] - -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - -pub mod mock_analysis; - -mod markup; -mod prime_caches; -mod display; - -mod call_hierarchy; -mod call_info; -mod completion; -mod diagnostics; -mod expand_macro; -mod extend_selection; -mod file_structure; -mod folding_ranges; -mod goto_definition; -mod goto_implementation; -mod goto_type_definition; -mod hover; -mod inlay_hints; -mod join_lines; -mod matching_brace; -mod parent_module; -mod references; -mod runnables; -mod ssr; -mod status; -mod syntax_highlighting; -mod syntax_tree; -mod typing; - -use std::sync::Arc; - -use ra_cfg::CfgOptions; -use ra_db::{ - salsa::{self, ParallelDatabase}, - CheckCanceled, Env, FileLoader, FileSet, SourceDatabase, VfsPath, -}; -use ra_ide_db::{ - symbol_index::{self, FileSymbol}, - LineIndexDatabase, -}; -use ra_syntax::{SourceFile, TextRange, TextSize}; - -use crate::display::ToNav; - -pub use crate::{ - call_hierarchy::CallItem, - call_info::CallInfo, - completion::{ - CompletionConfig, CompletionItem, CompletionItemKind, CompletionScore, InsertTextFormat, - }, - diagnostics::Severity, - display::NavigationTarget, - expand_macro::ExpandedMacro, - file_structure::StructureNode, - folding_ranges::{Fold, FoldKind}, - hover::{HoverAction, HoverConfig, HoverGotoTypeData, HoverResult}, - inlay_hints::{InlayHint, InlayHintsConfig, InlayKind}, - markup::Markup, - references::{Declaration, Reference, ReferenceAccess, ReferenceKind, ReferenceSearchResult}, - runnables::{Runnable, RunnableKind, TestId}, - syntax_highlighting::{ - Highlight, HighlightModifier, HighlightModifiers, HighlightTag, HighlightedRange, - }, -}; - -pub use hir::{Documentation, Semantics}; -pub use ra_assists::{Assist, AssistConfig, AssistId, AssistKind, ResolvedAssist}; -pub use ra_db::{ - Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRoot, - SourceRootId, -}; -pub use ra_ide_db::{ - change::AnalysisChange, - line_index::{LineCol, LineIndex}, - search::SearchScope, - source_change::{FileSystemEdit, SourceChange, SourceFileEdit}, - symbol_index::Query, - RootDatabase, -}; -pub use ra_ssr::SsrError; -pub use ra_text_edit::{Indel, TextEdit}; - -pub type Cancelable = Result; - -#[derive(Debug)] -pub struct Diagnostic { - pub message: String, - pub range: TextRange, - pub severity: Severity, - pub fix: Option, -} - -#[derive(Debug)] -pub struct Fix { - pub label: String, - pub source_change: SourceChange, -} - -impl Fix { - pub fn new(label: impl Into, source_change: SourceChange) -> Self { - let label = label.into(); - assert!(label.starts_with(char::is_uppercase) && !label.ends_with('.')); - Self { label, source_change } - } -} - -/// Info associated with a text range. -#[derive(Debug)] -pub struct RangeInfo { - pub range: TextRange, - pub info: T, -} - -impl RangeInfo { - pub fn new(range: TextRange, info: T) -> RangeInfo { - RangeInfo { range, info } - } -} - -/// `AnalysisHost` stores the current state of the world. -#[derive(Debug)] -pub struct AnalysisHost { - db: RootDatabase, -} - -impl AnalysisHost { - pub fn new(lru_capacity: Option) -> AnalysisHost { - AnalysisHost { db: RootDatabase::new(lru_capacity) } - } - - pub fn update_lru_capacity(&mut self, lru_capacity: Option) { - self.db.update_lru_capacity(lru_capacity); - } - - /// Returns a snapshot of the current state, which you can query for - /// semantic information. - pub fn analysis(&self) -> Analysis { - Analysis { db: self.db.snapshot() } - } - - /// Applies changes to the current state of the world. If there are - /// outstanding snapshots, they will be canceled. - pub fn apply_change(&mut self, change: AnalysisChange) { - self.db.apply_change(change) - } - - pub fn maybe_collect_garbage(&mut self) { - self.db.maybe_collect_garbage(); - } - - pub fn collect_garbage(&mut self) { - self.db.collect_garbage(); - } - /// NB: this clears the database - pub fn per_query_memory_usage(&mut self) -> Vec<(String, ra_prof::Bytes)> { - self.db.per_query_memory_usage() - } - pub fn request_cancellation(&mut self) { - self.db.request_cancellation(); - } - pub fn raw_database(&self) -> &RootDatabase { - &self.db - } - pub fn raw_database_mut(&mut self) -> &mut RootDatabase { - &mut self.db - } -} - -impl Default for AnalysisHost { - fn default() -> AnalysisHost { - AnalysisHost::new(None) - } -} - -/// Analysis is a snapshot of a world state at a moment in time. It is the main -/// entry point for asking semantic information about the world. When the world -/// state is advanced using `AnalysisHost::apply_change` method, all existing -/// `Analysis` are canceled (most method return `Err(Canceled)`). -#[derive(Debug)] -pub struct Analysis { - db: salsa::Snapshot, -} - -// As a general design guideline, `Analysis` API are intended to be independent -// from the language server protocol. That is, when exposing some functionality -// we should think in terms of "what API makes most sense" and not in terms of -// "what types LSP uses". Although currently LSP is the only consumer of the -// API, the API should in theory be usable as a library, or via a different -// protocol. -impl Analysis { - // Creates an analysis instance for a single file, without any extenal - // dependencies, stdlib support or ability to apply changes. See - // `AnalysisHost` for creating a fully-featured analysis. - pub fn from_single_file(text: String) -> (Analysis, FileId) { - let mut host = AnalysisHost::default(); - let file_id = FileId(0); - let mut file_set = FileSet::default(); - file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_string())); - let source_root = SourceRoot::new_local(file_set); - - let mut change = AnalysisChange::new(); - change.set_roots(vec![source_root]); - let mut crate_graph = CrateGraph::default(); - // FIXME: cfg options - // Default to enable test for single file. - let mut cfg_options = CfgOptions::default(); - cfg_options.insert_atom("test".into()); - crate_graph.add_crate_root( - file_id, - Edition::Edition2018, - None, - cfg_options, - Env::default(), - Default::default(), - ); - change.change_file(file_id, Some(Arc::new(text))); - change.set_crate_graph(crate_graph); - host.apply_change(change); - (host.analysis(), file_id) - } - - /// Debug info about the current state of the analysis. - pub fn status(&self) -> Cancelable { - self.with_db(|db| status::status(&*db)) - } - - pub fn prime_caches(&self, files: Vec) -> Cancelable<()> { - self.with_db(|db| prime_caches::prime_caches(db, files)) - } - - /// Gets the text of the source file. - pub fn file_text(&self, file_id: FileId) -> Cancelable> { - self.with_db(|db| db.file_text(file_id)) - } - - /// Gets the syntax tree of the file. - pub fn parse(&self, file_id: FileId) -> Cancelable { - self.with_db(|db| db.parse(file_id).tree()) - } - - /// Gets the file's `LineIndex`: data structure to convert between absolute - /// offsets and line/column representation. - pub fn file_line_index(&self, file_id: FileId) -> Cancelable> { - self.with_db(|db| db.line_index(file_id)) - } - - /// Selects the next syntactic nodes encompassing the range. - pub fn extend_selection(&self, frange: FileRange) -> Cancelable { - self.with_db(|db| extend_selection::extend_selection(db, frange)) - } - - /// Returns position of the matching brace (all types of braces are - /// supported). - pub fn matching_brace(&self, position: FilePosition) -> Cancelable> { - self.with_db(|db| { - let parse = db.parse(position.file_id); - let file = parse.tree(); - matching_brace::matching_brace(&file, position.offset) - }) - } - - /// Returns a syntax tree represented as `String`, for debug purposes. - // FIXME: use a better name here. - pub fn syntax_tree( - &self, - file_id: FileId, - text_range: Option, - ) -> Cancelable { - self.with_db(|db| syntax_tree::syntax_tree(&db, file_id, text_range)) - } - - pub fn expand_macro(&self, position: FilePosition) -> Cancelable> { - self.with_db(|db| expand_macro::expand_macro(db, position)) - } - - /// Returns an edit to remove all newlines in the range, cleaning up minor - /// stuff like trailing commas. - pub fn join_lines(&self, frange: FileRange) -> Cancelable { - self.with_db(|db| { - let parse = db.parse(frange.file_id); - join_lines::join_lines(&parse.tree(), frange.range) - }) - } - - /// Returns an edit which should be applied when opening a new line, fixing - /// up minor stuff like continuing the comment. - /// The edit will be a snippet (with `$0`). - pub fn on_enter(&self, position: FilePosition) -> Cancelable> { - self.with_db(|db| typing::on_enter(&db, position)) - } - - /// Returns an edit which should be applied after a character was typed. - /// - /// This is useful for some on-the-fly fixups, like adding `;` to `let =` - /// automatically. - pub fn on_char_typed( - &self, - position: FilePosition, - char_typed: char, - ) -> Cancelable> { - // Fast path to not even parse the file. - if !typing::TRIGGER_CHARS.contains(char_typed) { - return Ok(None); - } - self.with_db(|db| typing::on_char_typed(&db, position, char_typed)) - } - - /// Returns a tree representation of symbols in the file. Useful to draw a - /// file outline. - pub fn file_structure(&self, file_id: FileId) -> Cancelable> { - self.with_db(|db| file_structure::file_structure(&db.parse(file_id).tree())) - } - - /// Returns a list of the places in the file where type hints can be displayed. - pub fn inlay_hints( - &self, - file_id: FileId, - config: &InlayHintsConfig, - ) -> Cancelable> { - self.with_db(|db| inlay_hints::inlay_hints(db, file_id, config)) - } - - /// Returns the set of folding ranges. - pub fn folding_ranges(&self, file_id: FileId) -> Cancelable> { - self.with_db(|db| folding_ranges::folding_ranges(&db.parse(file_id).tree())) - } - - /// Fuzzy searches for a symbol. - pub fn symbol_search(&self, query: Query) -> Cancelable> { - self.with_db(|db| { - symbol_index::world_symbols(db, query) - .into_iter() - .map(|s| s.to_nav(db)) - .collect::>() - }) - } - - /// Returns the definitions from the symbol at `position`. - pub fn goto_definition( - &self, - position: FilePosition, - ) -> Cancelable>>> { - self.with_db(|db| goto_definition::goto_definition(db, position)) - } - - /// Returns the impls from the symbol at `position`. - pub fn goto_implementation( - &self, - position: FilePosition, - ) -> Cancelable>>> { - self.with_db(|db| goto_implementation::goto_implementation(db, position)) - } - - /// Returns the type definitions for the symbol at `position`. - pub fn goto_type_definition( - &self, - position: FilePosition, - ) -> Cancelable>>> { - self.with_db(|db| goto_type_definition::goto_type_definition(db, position)) - } - - /// Finds all usages of the reference at point. - pub fn find_all_refs( - &self, - position: FilePosition, - search_scope: Option, - ) -> Cancelable> { - self.with_db(|db| { - references::find_all_refs(&Semantics::new(db), position, search_scope).map(|it| it.info) - }) - } - - /// Returns a short text describing element at position. - pub fn hover(&self, position: FilePosition) -> Cancelable>> { - self.with_db(|db| hover::hover(db, position)) - } - - /// Computes parameter information for the given call expression. - pub fn call_info(&self, position: FilePosition) -> Cancelable> { - self.with_db(|db| call_info::call_info(db, position)) - } - - /// Computes call hierarchy candidates for the given file position. - pub fn call_hierarchy( - &self, - position: FilePosition, - ) -> Cancelable>>> { - self.with_db(|db| call_hierarchy::call_hierarchy(db, position)) - } - - /// Computes incoming calls for the given file position. - pub fn incoming_calls(&self, position: FilePosition) -> Cancelable>> { - self.with_db(|db| call_hierarchy::incoming_calls(db, position)) - } - - /// Computes incoming calls for the given file position. - pub fn outgoing_calls(&self, position: FilePosition) -> Cancelable>> { - self.with_db(|db| call_hierarchy::outgoing_calls(db, position)) - } - - /// Returns a `mod name;` declaration which created the current module. - pub fn parent_module(&self, position: FilePosition) -> Cancelable> { - self.with_db(|db| parent_module::parent_module(db, position)) - } - - /// Returns crates this file belongs too. - pub fn crate_for(&self, file_id: FileId) -> Cancelable> { - self.with_db(|db| parent_module::crate_for(db, file_id)) - } - - /// Returns the edition of the given crate. - pub fn crate_edition(&self, crate_id: CrateId) -> Cancelable { - self.with_db(|db| db.crate_graph()[crate_id].edition) - } - - /// Returns the root file of the given crate. - pub fn crate_root(&self, crate_id: CrateId) -> Cancelable { - self.with_db(|db| db.crate_graph()[crate_id].root_file_id) - } - - /// Returns the set of possible targets to run for the current file. - pub fn runnables(&self, file_id: FileId) -> Cancelable> { - self.with_db(|db| runnables::runnables(db, file_id)) - } - - /// Computes syntax highlighting for the given file - pub fn highlight(&self, file_id: FileId) -> Cancelable> { - self.with_db(|db| syntax_highlighting::highlight(db, file_id, None, false)) - } - - /// Computes syntax highlighting for the given file range. - pub fn highlight_range(&self, frange: FileRange) -> Cancelable> { - self.with_db(|db| { - syntax_highlighting::highlight(db, frange.file_id, Some(frange.range), false) - }) - } - - /// Computes syntax highlighting for the given file. - pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancelable { - self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow)) - } - - /// Computes completions at the given position. - pub fn completions( - &self, - config: &CompletionConfig, - position: FilePosition, - ) -> Cancelable>> { - self.with_db(|db| completion::completions(db, config, position).map(Into::into)) - } - - /// Computes resolved assists with source changes for the given position. - pub fn resolved_assists( - &self, - config: &AssistConfig, - frange: FileRange, - ) -> Cancelable> { - self.with_db(|db| ra_assists::Assist::resolved(db, config, frange)) - } - - /// Computes unresolved assists (aka code actions aka intentions) for the given - /// position. - pub fn unresolved_assists( - &self, - config: &AssistConfig, - frange: FileRange, - ) -> Cancelable> { - self.with_db(|db| Assist::unresolved(db, config, frange)) - } - - /// Computes the set of diagnostics for the given file. - pub fn diagnostics( - &self, - file_id: FileId, - enable_experimental: bool, - ) -> Cancelable> { - self.with_db(|db| diagnostics::diagnostics(db, file_id, enable_experimental)) - } - - /// Returns the edit required to rename reference at the position to the new - /// name. - pub fn rename( - &self, - position: FilePosition, - new_name: &str, - ) -> Cancelable>> { - self.with_db(|db| references::rename(db, position, new_name)) - } - - pub fn structural_search_replace( - &self, - query: &str, - parse_only: bool, - position: FilePosition, - selections: Vec, - ) -> Cancelable> { - self.with_db(|db| { - let edits = ssr::parse_search_replace(query, parse_only, db, position, selections)?; - Ok(SourceChange::from(edits)) - }) - } - - /// Performs an operation on that may be Canceled. - fn with_db T + std::panic::UnwindSafe, T>( - &self, - f: F, - ) -> Cancelable { - self.db.catch_canceled(f) - } -} - -#[test] -fn analysis_is_send() { - fn is_send() {} - is_send::(); -} diff --git a/crates/ra_ide/src/matching_brace.rs b/crates/ra_ide/src/matching_brace.rs deleted file mode 100644 index 742d70c9c0..0000000000 --- a/crates/ra_ide/src/matching_brace.rs +++ /dev/null @@ -1,73 +0,0 @@ -use ra_syntax::{ - ast::{self, AstNode}, - SourceFile, SyntaxKind, TextSize, T, -}; -use test_utils::mark; - -// Feature: Matching Brace -// -// If the cursor is on any brace (`<>(){}[]||`) which is a part of a brace-pair, -// moves cursor to the matching brace. It uses the actual parser to determine -// braces, so it won't confuse generics with comparisons. -// -// |=== -// | Editor | Action Name -// -// | VS Code | **Rust Analyzer: Find matching brace** -// |=== -pub fn matching_brace(file: &SourceFile, offset: TextSize) -> Option { - const BRACES: &[SyntaxKind] = - &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>], T![|], T![|]]; - let (brace_token, brace_idx) = file - .syntax() - .token_at_offset(offset) - .filter_map(|node| { - let idx = BRACES.iter().position(|&brace| brace == node.kind())?; - Some((node, idx)) - }) - .next()?; - let parent = brace_token.parent(); - if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) { - mark::hit!(pipes_not_braces); - return None; - } - let matching_kind = BRACES[brace_idx ^ 1]; - let matching_node = parent - .children_with_tokens() - .filter_map(|it| it.into_token()) - .find(|node| node.kind() == matching_kind && node != &brace_token)?; - Some(matching_node.text_range().start()) -} - -#[cfg(test)] -mod tests { - use test_utils::{add_cursor, assert_eq_text, extract_offset}; - - use super::*; - - #[test] - fn test_matching_brace() { - fn do_check(before: &str, after: &str) { - let (pos, before) = extract_offset(before); - let parse = SourceFile::parse(&before); - let new_pos = match matching_brace(&parse.tree(), pos) { - None => pos, - Some(pos) => pos, - }; - let actual = add_cursor(&before, new_pos); - assert_eq_text!(after, &actual); - } - - do_check("struct Foo { a: i32, }<|>", "struct Foo <|>{ a: i32, }"); - do_check("fn main() { |x: i32|<|> x * 2;}", "fn main() { <|>|x: i32| x * 2;}"); - do_check("fn main() { <|>|x: i32| x * 2;}", "fn main() { |x: i32<|>| x * 2;}"); - - { - mark::check!(pipes_not_braces); - do_check( - "fn main() { match 92 { 1 | 2 |<|> 3 => 92 } }", - "fn main() { match 92 { 1 | 2 |<|> 3 => 92 } }", - ); - } - } -} diff --git a/crates/ra_ide/src/mock_analysis.rs b/crates/ra_ide/src/mock_analysis.rs deleted file mode 100644 index c7e0f4b58f..0000000000 --- a/crates/ra_ide/src/mock_analysis.rs +++ /dev/null @@ -1,176 +0,0 @@ -//! FIXME: write short doc here -use std::sync::Arc; - -use ra_cfg::CfgOptions; -use ra_db::{CrateName, FileSet, SourceRoot, VfsPath}; -use test_utils::{ - extract_annotations, extract_range_or_offset, Fixture, RangeOrOffset, CURSOR_MARKER, -}; - -use crate::{ - Analysis, AnalysisChange, AnalysisHost, CrateGraph, Edition, FileId, FilePosition, FileRange, -}; - -/// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis -/// from a set of in-memory files. -#[derive(Debug, Default)] -pub struct MockAnalysis { - files: Vec, -} - -impl MockAnalysis { - /// Creates `MockAnalysis` using a fixture data in the following format: - /// - /// ```not_rust - /// //- /main.rs - /// mod foo; - /// fn main() {} - /// - /// //- /foo.rs - /// struct Baz; - /// ``` - pub fn with_files(ra_fixture: &str) -> MockAnalysis { - let (res, pos) = MockAnalysis::with_fixture(ra_fixture); - assert!(pos.is_none()); - res - } - - /// Same as `with_files`, but requires that a single file contains a `<|>` marker, - /// whose position is also returned. - pub fn with_files_and_position(fixture: &str) -> (MockAnalysis, FilePosition) { - let (res, position) = MockAnalysis::with_fixture(fixture); - let (file_id, range_or_offset) = position.expect("expected a marker (<|>)"); - let offset = match range_or_offset { - RangeOrOffset::Range(_) => panic!(), - RangeOrOffset::Offset(it) => it, - }; - (res, FilePosition { file_id, offset }) - } - - fn with_fixture(fixture: &str) -> (MockAnalysis, Option<(FileId, RangeOrOffset)>) { - let mut position = None; - let mut res = MockAnalysis::default(); - for mut entry in Fixture::parse(fixture) { - if entry.text.contains(CURSOR_MARKER) { - assert!(position.is_none(), "only one marker (<|>) per fixture is allowed"); - let (range_or_offset, text) = extract_range_or_offset(&entry.text); - entry.text = text; - let file_id = res.add_file_fixture(entry); - position = Some((file_id, range_or_offset)); - } else { - res.add_file_fixture(entry); - } - } - (res, position) - } - - fn add_file_fixture(&mut self, fixture: Fixture) -> FileId { - let file_id = FileId((self.files.len() + 1) as u32); - self.files.push(fixture); - file_id - } - - pub fn id_of(&self, path: &str) -> FileId { - let (file_id, _) = - self.files().find(|(_, data)| path == data.path).expect("no file in this mock"); - file_id - } - pub fn annotations(&self) -> Vec<(FileRange, String)> { - self.files() - .flat_map(|(file_id, fixture)| { - let annotations = extract_annotations(&fixture.text); - annotations - .into_iter() - .map(move |(range, data)| (FileRange { file_id, range }, data)) - }) - .collect() - } - pub fn files(&self) -> impl Iterator + '_ { - self.files.iter().enumerate().map(|(idx, fixture)| (FileId(idx as u32 + 1), fixture)) - } - pub fn annotation(&self) -> (FileRange, String) { - let mut all = self.annotations(); - assert_eq!(all.len(), 1); - all.pop().unwrap() - } - pub fn analysis_host(self) -> AnalysisHost { - let mut host = AnalysisHost::default(); - let mut change = AnalysisChange::new(); - let mut file_set = FileSet::default(); - let mut crate_graph = CrateGraph::default(); - let mut root_crate = None; - for (i, data) in self.files.into_iter().enumerate() { - let path = data.path; - assert!(path.starts_with('/')); - - let mut cfg = CfgOptions::default(); - data.cfg_atoms.iter().for_each(|it| cfg.insert_atom(it.into())); - data.cfg_key_values.iter().for_each(|(k, v)| cfg.insert_key_value(k.into(), v.into())); - let edition: Edition = - data.edition.and_then(|it| it.parse().ok()).unwrap_or(Edition::Edition2018); - - let file_id = FileId(i as u32 + 1); - let env = data.env.into_iter().collect(); - if path == "/lib.rs" || path == "/main.rs" { - root_crate = Some(crate_graph.add_crate_root( - file_id, - edition, - None, - cfg, - env, - Default::default(), - )); - } else if path.ends_with("/lib.rs") { - let base = &path[..path.len() - "/lib.rs".len()]; - let crate_name = &base[base.rfind('/').unwrap() + '/'.len_utf8()..]; - let other_crate = crate_graph.add_crate_root( - file_id, - edition, - Some(crate_name.to_string()), - cfg, - env, - Default::default(), - ); - if let Some(root_crate) = root_crate { - crate_graph - .add_dep(root_crate, CrateName::new(crate_name).unwrap(), other_crate) - .unwrap(); - } - } - let path = VfsPath::new_virtual_path(path.to_string()); - file_set.insert(file_id, path); - change.change_file(file_id, Some(Arc::new(data.text).to_owned())); - } - change.set_crate_graph(crate_graph); - change.set_roots(vec![SourceRoot::new_local(file_set)]); - host.apply_change(change); - host - } - pub fn analysis(self) -> Analysis { - self.analysis_host().analysis() - } -} - -/// Creates analysis from a multi-file fixture, returns positions marked with <|>. -pub fn analysis_and_position(ra_fixture: &str) -> (Analysis, FilePosition) { - let (mock, position) = MockAnalysis::with_files_and_position(ra_fixture); - (mock.analysis(), position) -} - -/// Creates analysis for a single file. -pub fn single_file(ra_fixture: &str) -> (Analysis, FileId) { - let mock = MockAnalysis::with_files(ra_fixture); - let file_id = mock.id_of("/main.rs"); - (mock.analysis(), file_id) -} - -/// Creates analysis for a single file, returns range marked with a pair of <|>. -pub fn analysis_and_range(ra_fixture: &str) -> (Analysis, FileRange) { - let (res, position) = MockAnalysis::with_fixture(ra_fixture); - let (file_id, range_or_offset) = position.expect("expected a marker (<|>)"); - let range = match range_or_offset { - RangeOrOffset::Range(it) => it, - RangeOrOffset::Offset(_) => panic!(), - }; - (res.analysis(), FileRange { file_id, range }) -} diff --git a/crates/ra_ide/src/parent_module.rs b/crates/ra_ide/src/parent_module.rs deleted file mode 100644 index e3e0c76392..0000000000 --- a/crates/ra_ide/src/parent_module.rs +++ /dev/null @@ -1,155 +0,0 @@ -use hir::Semantics; -use ra_db::{CrateId, FileId, FilePosition}; -use ra_ide_db::RootDatabase; -use ra_syntax::{ - algo::find_node_at_offset, - ast::{self, AstNode}, -}; -use test_utils::mark; - -use crate::NavigationTarget; - -// Feature: Parent Module -// -// Navigates to the parent module of the current module. -// -// |=== -// | Editor | Action Name -// -// | VS Code | **Rust Analyzer: Locate parent module** -// |=== - -/// This returns `Vec` because a module may be included from several places. We -/// don't handle this case yet though, so the Vec has length at most one. -pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec { - let sema = Semantics::new(db); - let source_file = sema.parse(position.file_id); - - let mut module = find_node_at_offset::(source_file.syntax(), position.offset); - - // If cursor is literally on `mod foo`, go to the grandpa. - if let Some(m) = &module { - if !m - .item_list() - .map_or(false, |it| it.syntax().text_range().contains_inclusive(position.offset)) - { - mark::hit!(test_resolve_parent_module_on_module_decl); - module = m.syntax().ancestors().skip(1).find_map(ast::Module::cast); - } - } - - let module = match module { - Some(module) => sema.to_def(&module), - None => sema.to_module_def(position.file_id), - }; - let module = match module { - None => return Vec::new(), - Some(it) => it, - }; - let nav = NavigationTarget::from_module_to_decl(db, module); - vec![nav] -} - -/// Returns `Vec` for the same reason as `parent_module` -pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec { - let sema = Semantics::new(db); - let module = match sema.to_module_def(file_id) { - Some(it) => it, - None => return Vec::new(), - }; - let krate = module.krate(); - vec![krate.into()] -} - -#[cfg(test)] -mod tests { - use ra_cfg::CfgOptions; - use ra_db::Env; - use test_utils::mark; - - use crate::{ - mock_analysis::{analysis_and_position, MockAnalysis}, - AnalysisChange, CrateGraph, - Edition::Edition2018, - }; - - #[test] - fn test_resolve_parent_module() { - let (analysis, pos) = analysis_and_position( - " - //- /lib.rs - mod foo; - //- /foo.rs - <|>// empty - ", - ); - let nav = analysis.parent_module(pos).unwrap().pop().unwrap(); - nav.assert_match("foo MODULE FileId(1) 0..8"); - } - - #[test] - fn test_resolve_parent_module_on_module_decl() { - mark::check!(test_resolve_parent_module_on_module_decl); - let (analysis, pos) = analysis_and_position( - " - //- /lib.rs - mod foo; - - //- /foo.rs - mod <|>bar; - - //- /foo/bar.rs - // empty - ", - ); - let nav = analysis.parent_module(pos).unwrap().pop().unwrap(); - nav.assert_match("foo MODULE FileId(1) 0..8"); - } - - #[test] - fn test_resolve_parent_module_for_inline() { - let (analysis, pos) = analysis_and_position( - " - //- /lib.rs - mod foo { - mod bar { - mod baz { <|> } - } - } - ", - ); - let nav = analysis.parent_module(pos).unwrap().pop().unwrap(); - nav.assert_match("baz MODULE FileId(1) 32..44"); - } - - #[test] - fn test_resolve_crate_root() { - let mock = MockAnalysis::with_files( - r#" -//- /bar.rs -mod foo; -//- /foo.rs -// empty -"#, - ); - let root_file = mock.id_of("/bar.rs"); - let mod_file = mock.id_of("/foo.rs"); - let mut host = mock.analysis_host(); - assert!(host.analysis().crate_for(mod_file).unwrap().is_empty()); - - let mut crate_graph = CrateGraph::default(); - let crate_id = crate_graph.add_crate_root( - root_file, - Edition2018, - None, - CfgOptions::default(), - Env::default(), - Default::default(), - ); - let mut change = AnalysisChange::new(); - change.set_crate_graph(crate_graph); - host.apply_change(change); - - assert_eq!(host.analysis().crate_for(mod_file).unwrap(), vec![crate_id]); - } -} diff --git a/crates/ra_ide/src/references.rs b/crates/ra_ide/src/references.rs deleted file mode 100644 index cf456630a5..0000000000 --- a/crates/ra_ide/src/references.rs +++ /dev/null @@ -1,695 +0,0 @@ -//! This module implements a reference search. -//! First, the element at the cursor position must be either an `ast::Name` -//! or `ast::NameRef`. If it's a `ast::NameRef`, at the classification step we -//! try to resolve the direct tree parent of this element, otherwise we -//! already have a definition and just need to get its HIR together with -//! some information that is needed for futher steps of searching. -//! After that, we collect files that might contain references and look -//! for text occurrences of the identifier. If there's an `ast::NameRef` -//! at the index that the match starts at and its tree parent is -//! resolved to the search element definition, we get a reference. - -mod rename; - -use hir::Semantics; -use ra_ide_db::{ - defs::{classify_name, classify_name_ref, Definition}, - search::SearchScope, - RootDatabase, -}; -use ra_prof::profile; -use ra_syntax::{ - algo::find_node_at_offset, - ast::{self, NameOwner}, - AstNode, SyntaxKind, SyntaxNode, TextRange, TokenAtOffset, -}; - -use crate::{display::TryToNav, FilePosition, FileRange, NavigationTarget, RangeInfo}; - -pub(crate) use self::rename::rename; - -pub use ra_ide_db::search::{Reference, ReferenceAccess, ReferenceKind}; - -#[derive(Debug, Clone)] -pub struct ReferenceSearchResult { - declaration: Declaration, - references: Vec, -} - -#[derive(Debug, Clone)] -pub struct Declaration { - pub nav: NavigationTarget, - pub kind: ReferenceKind, - pub access: Option, -} - -impl ReferenceSearchResult { - pub fn declaration(&self) -> &Declaration { - &self.declaration - } - - pub fn decl_target(&self) -> &NavigationTarget { - &self.declaration.nav - } - - pub fn references(&self) -> &[Reference] { - &self.references - } - - /// Total number of references - /// At least 1 since all valid references should - /// Have a declaration - pub fn len(&self) -> usize { - self.references.len() + 1 - } -} - -// allow turning ReferenceSearchResult into an iterator -// over References -impl IntoIterator for ReferenceSearchResult { - type Item = Reference; - type IntoIter = std::vec::IntoIter; - - fn into_iter(mut self) -> Self::IntoIter { - let mut v = Vec::with_capacity(self.len()); - v.push(Reference { - file_range: FileRange { - file_id: self.declaration.nav.file_id, - range: self.declaration.nav.focus_or_full_range(), - }, - kind: self.declaration.kind, - access: self.declaration.access, - }); - v.append(&mut self.references); - v.into_iter() - } -} - -pub(crate) fn find_all_refs( - sema: &Semantics, - position: FilePosition, - search_scope: Option, -) -> Option> { - let _p = profile("find_all_refs"); - let syntax = sema.parse(position.file_id).syntax().clone(); - - let (opt_name, search_kind) = if let Some(name) = - get_struct_def_name_for_struct_literal_search(&sema, &syntax, position) - { - (Some(name), ReferenceKind::StructLiteral) - } else { - ( - sema.find_node_at_offset_with_descend::(&syntax, position.offset), - ReferenceKind::Other, - ) - }; - - let RangeInfo { range, info: def } = find_name(&sema, &syntax, position, opt_name)?; - - let references = def - .find_usages(sema, search_scope) - .into_iter() - .filter(|r| search_kind == ReferenceKind::Other || search_kind == r.kind) - .collect(); - - let decl_range = def.try_to_nav(sema.db)?.focus_or_full_range(); - - let declaration = Declaration { - nav: def.try_to_nav(sema.db)?, - kind: ReferenceKind::Other, - access: decl_access(&def, &syntax, decl_range), - }; - - Some(RangeInfo::new(range, ReferenceSearchResult { declaration, references })) -} - -fn find_name( - sema: &Semantics, - syntax: &SyntaxNode, - position: FilePosition, - opt_name: Option, -) -> Option> { - if let Some(name) = opt_name { - let def = classify_name(sema, &name)?.definition(); - let range = name.syntax().text_range(); - return Some(RangeInfo::new(range, def)); - } - let name_ref = - sema.find_node_at_offset_with_descend::(&syntax, position.offset)?; - let def = classify_name_ref(sema, &name_ref)?.definition(); - let range = name_ref.syntax().text_range(); - Some(RangeInfo::new(range, def)) -} - -fn decl_access(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> Option { - match def { - Definition::Local(_) | Definition::Field(_) => {} - _ => return None, - }; - - let stmt = find_node_at_offset::(syntax, range.start())?; - if stmt.initializer().is_some() { - let pat = stmt.pat()?; - if let ast::Pat::IdentPat(it) = pat { - if it.mut_token().is_some() { - return Some(ReferenceAccess::Write); - } - } - } - - None -} - -fn get_struct_def_name_for_struct_literal_search( - sema: &Semantics, - syntax: &SyntaxNode, - position: FilePosition, -) -> Option { - if let TokenAtOffset::Between(ref left, ref right) = syntax.token_at_offset(position.offset) { - if right.kind() != SyntaxKind::L_CURLY && right.kind() != SyntaxKind::L_PAREN { - return None; - } - if let Some(name) = - sema.find_node_at_offset_with_descend::(&syntax, left.text_range().start()) - { - return name.syntax().ancestors().find_map(ast::Struct::cast).and_then(|l| l.name()); - } - if sema - .find_node_at_offset_with_descend::( - &syntax, - left.text_range().start(), - ) - .is_some() - { - return left.ancestors().find_map(ast::Struct::cast).and_then(|l| l.name()); - } - } - None -} - -#[cfg(test)] -mod tests { - use crate::{ - mock_analysis::{analysis_and_position, MockAnalysis}, - Declaration, Reference, ReferenceSearchResult, SearchScope, - }; - - #[test] - fn test_struct_literal_after_space() { - let refs = get_all_refs( - r#" -struct Foo <|>{ - a: i32, -} -impl Foo { - fn f() -> i32 { 42 } -} -fn main() { - let f: Foo; - f = Foo {a: Foo::f()}; -} -"#, - ); - check_result( - refs, - "Foo STRUCT FileId(1) 0..26 7..10 Other", - &["FileId(1) 101..104 StructLiteral"], - ); - } - - #[test] - fn test_struct_literal_before_space() { - let refs = get_all_refs( - r#" -struct Foo<|> {} - fn main() { - let f: Foo; - f = Foo {}; -} -"#, - ); - check_result( - refs, - "Foo STRUCT FileId(1) 0..13 7..10 Other", - &["FileId(1) 41..44 Other", "FileId(1) 54..57 StructLiteral"], - ); - } - - #[test] - fn test_struct_literal_with_generic_type() { - let refs = get_all_refs( - r#" -struct Foo <|>{} - fn main() { - let f: Foo::; - f = Foo {}; -} -"#, - ); - check_result( - refs, - "Foo STRUCT FileId(1) 0..16 7..10 Other", - &["FileId(1) 64..67 StructLiteral"], - ); - } - - #[test] - fn test_struct_literal_for_tuple() { - let refs = get_all_refs( - r#" -struct Foo<|>(i32); - -fn main() { - let f: Foo; - f = Foo(1); -} -"#, - ); - check_result( - refs, - "Foo STRUCT FileId(1) 0..16 7..10 Other", - &["FileId(1) 54..57 StructLiteral"], - ); - } - - #[test] - fn test_find_all_refs_for_local() { - let refs = get_all_refs( - r#" -fn main() { - let mut i = 1; - let j = 1; - i = i<|> + j; - - { - i = 0; - } - - i = 5; -}"#, - ); - check_result( - refs, - "i IDENT_PAT FileId(1) 24..25 Other Write", - &[ - "FileId(1) 50..51 Other Write", - "FileId(1) 54..55 Other Read", - "FileId(1) 76..77 Other Write", - "FileId(1) 94..95 Other Write", - ], - ); - } - - #[test] - fn search_filters_by_range() { - let refs = get_all_refs( - r#" -fn foo() { - let spam<|> = 92; - spam + spam -} -fn bar() { - let spam = 92; - spam + spam -} -"#, - ); - check_result( - refs, - "spam IDENT_PAT FileId(1) 19..23 Other", - &["FileId(1) 34..38 Other Read", "FileId(1) 41..45 Other Read"], - ); - } - - #[test] - fn test_find_all_refs_for_param_inside() { - let refs = get_all_refs( - r#" -fn foo(i : u32) -> u32 { - i<|> -} -"#, - ); - check_result(refs, "i IDENT_PAT FileId(1) 7..8 Other", &["FileId(1) 29..30 Other Read"]); - } - - #[test] - fn test_find_all_refs_for_fn_param() { - let refs = get_all_refs( - r#" -fn foo(i<|> : u32) -> u32 { - i -} -"#, - ); - check_result(refs, "i IDENT_PAT FileId(1) 7..8 Other", &["FileId(1) 29..30 Other Read"]); - } - - #[test] - fn test_find_all_refs_field_name() { - let refs = get_all_refs( - r#" -//- /lib.rs -struct Foo { - pub spam<|>: u32, -} - -fn main(s: Foo) { - let f = s.spam; -} -"#, - ); - check_result( - refs, - "spam RECORD_FIELD FileId(1) 17..30 21..25 Other", - &["FileId(1) 67..71 Other Read"], - ); - } - - #[test] - fn test_find_all_refs_impl_item_name() { - let refs = get_all_refs( - r#" -struct Foo; -impl Foo { - fn f<|>(&self) { } -} -"#, - ); - check_result(refs, "f FN FileId(1) 27..43 30..31 Other", &[]); - } - - #[test] - fn test_find_all_refs_enum_var_name() { - let refs = get_all_refs( - r#" -enum Foo { - A, - B<|>, - C, -} -"#, - ); - check_result(refs, "B VARIANT FileId(1) 22..23 22..23 Other", &[]); - } - - #[test] - fn test_find_all_refs_two_modules() { - let (analysis, pos) = analysis_and_position( - r#" -//- /lib.rs -pub mod foo; -pub mod bar; - -fn f() { - let i = foo::Foo { n: 5 }; -} - -//- /foo.rs -use crate::bar; - -pub struct Foo { - pub n: u32, -} - -fn f() { - let i = bar::Bar { n: 5 }; -} - -//- /bar.rs -use crate::foo; - -pub struct Bar { - pub n: u32, -} - -fn f() { - let i = foo::Foo<|> { n: 5 }; -} -"#, - ); - let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); - check_result( - refs, - "Foo STRUCT FileId(2) 17..51 28..31 Other", - &["FileId(1) 53..56 StructLiteral", "FileId(3) 79..82 StructLiteral"], - ); - } - - // `mod foo;` is not in the results because `foo` is an `ast::Name`. - // So, there are two references: the first one is a definition of the `foo` module, - // which is the whole `foo.rs`, and the second one is in `use foo::Foo`. - #[test] - fn test_find_all_refs_decl_module() { - let (analysis, pos) = analysis_and_position( - r#" -//- /lib.rs -mod foo<|>; - -use foo::Foo; - -fn f() { - let i = Foo { n: 5 }; -} - -//- /foo.rs -pub struct Foo { - pub n: u32, -} -"#, - ); - let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); - check_result(refs, "foo SOURCE_FILE FileId(2) 0..35 Other", &["FileId(1) 14..17 Other"]); - } - - #[test] - fn test_find_all_refs_super_mod_vis() { - let (analysis, pos) = analysis_and_position( - r#" -//- /lib.rs -mod foo; - -//- /foo.rs -mod some; -use some::Foo; - -fn f() { - let i = Foo { n: 5 }; -} - -//- /foo/some.rs -pub(super) struct Foo<|> { - pub n: u32, -} -"#, - ); - let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); - check_result( - refs, - "Foo STRUCT FileId(3) 0..41 18..21 Other", - &["FileId(2) 20..23 Other", "FileId(2) 47..50 StructLiteral"], - ); - } - - #[test] - fn test_find_all_refs_with_scope() { - let code = r#" - //- /lib.rs - mod foo; - mod bar; - - pub fn quux<|>() {} - - //- /foo.rs - fn f() { super::quux(); } - - //- /bar.rs - fn f() { super::quux(); } - "#; - - let (mock, pos) = MockAnalysis::with_files_and_position(code); - let bar = mock.id_of("/bar.rs"); - let analysis = mock.analysis(); - - let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); - check_result( - refs, - "quux FN FileId(1) 19..35 26..30 Other", - &["FileId(2) 16..20 StructLiteral", "FileId(3) 16..20 StructLiteral"], - ); - - let refs = - analysis.find_all_refs(pos, Some(SearchScope::single_file(bar))).unwrap().unwrap(); - check_result( - refs, - "quux FN FileId(1) 19..35 26..30 Other", - &["FileId(3) 16..20 StructLiteral"], - ); - } - - #[test] - fn test_find_all_refs_macro_def() { - let refs = get_all_refs( - r#" -#[macro_export] -macro_rules! m1<|> { () => (()) } - -fn foo() { - m1(); - m1(); -} -"#, - ); - check_result( - refs, - "m1 MACRO_CALL FileId(1) 0..46 29..31 Other", - &["FileId(1) 63..65 StructLiteral", "FileId(1) 73..75 StructLiteral"], - ); - } - - #[test] - fn test_basic_highlight_read_write() { - let refs = get_all_refs( - r#" -fn foo() { - let mut i<|> = 0; - i = i + 1; -} -"#, - ); - check_result( - refs, - "i IDENT_PAT FileId(1) 23..24 Other Write", - &["FileId(1) 34..35 Other Write", "FileId(1) 38..39 Other Read"], - ); - } - - #[test] - fn test_basic_highlight_field_read_write() { - let refs = get_all_refs( - r#" -struct S { - f: u32, -} - -fn foo() { - let mut s = S{f: 0}; - s.f<|> = 0; -} -"#, - ); - check_result( - refs, - "f RECORD_FIELD FileId(1) 15..21 15..16 Other", - &["FileId(1) 55..56 Other Read", "FileId(1) 68..69 Other Write"], - ); - } - - #[test] - fn test_basic_highlight_decl_no_write() { - let refs = get_all_refs( - r#" -fn foo() { - let i<|>; - i = 1; -} -"#, - ); - check_result(refs, "i IDENT_PAT FileId(1) 19..20 Other", &["FileId(1) 26..27 Other Write"]); - } - - #[test] - fn test_find_struct_function_refs_outside_module() { - let refs = get_all_refs( - r#" -mod foo { - pub struct Foo; - - impl Foo { - pub fn new<|>() -> Foo { - Foo - } - } -} - -fn main() { - let _f = foo::Foo::new(); -} -"#, - ); - check_result( - refs, - "new FN FileId(1) 54..101 61..64 Other", - &["FileId(1) 146..149 StructLiteral"], - ); - } - - #[test] - fn test_find_all_refs_nested_module() { - let code = r#" - //- /lib.rs - mod foo { - mod bar; - } - - fn f<|>() {} - - //- /foo/bar.rs - use crate::f; - - fn g() { - f(); - } - "#; - - let (analysis, pos) = analysis_and_position(code); - let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); - check_result( - refs, - "f FN FileId(1) 26..35 29..30 Other", - &["FileId(2) 11..12 Other", "FileId(2) 28..29 StructLiteral"], - ); - } - - fn get_all_refs(ra_fixture: &str) -> ReferenceSearchResult { - let (analysis, position) = analysis_and_position(ra_fixture); - analysis.find_all_refs(position, None).unwrap().unwrap() - } - - fn check_result(res: ReferenceSearchResult, expected_decl: &str, expected_refs: &[&str]) { - res.declaration().assert_match(expected_decl); - assert_eq!(res.references.len(), expected_refs.len()); - res.references() - .iter() - .enumerate() - .for_each(|(i, r)| ref_assert_match(r, expected_refs[i])); - } - - impl Declaration { - fn debug_render(&self) -> String { - let mut s = format!("{} {:?}", self.nav.debug_render(), self.kind); - if let Some(access) = self.access { - s.push_str(&format!(" {:?}", access)); - } - s - } - - fn assert_match(&self, expected: &str) { - let actual = self.debug_render(); - test_utils::assert_eq_text!(expected.trim(), actual.trim(),); - } - } - - fn ref_debug_render(r: &Reference) -> String { - let mut s = format!("{:?} {:?} {:?}", r.file_range.file_id, r.file_range.range, r.kind); - if let Some(access) = r.access { - s.push_str(&format!(" {:?}", access)); - } - s - } - - fn ref_assert_match(r: &Reference, expected: &str) { - let actual = ref_debug_render(r); - test_utils::assert_eq_text!(expected.trim(), actual.trim(),); - } -} diff --git a/crates/ra_ide/src/references/rename.rs b/crates/ra_ide/src/references/rename.rs deleted file mode 100644 index c8d80fcf7c..0000000000 --- a/crates/ra_ide/src/references/rename.rs +++ /dev/null @@ -1,1010 +0,0 @@ -//! FIXME: write short doc here - -use hir::{Module, ModuleDef, ModuleSource, Semantics}; -use ra_db::SourceDatabaseExt; -use ra_ide_db::{ - defs::{classify_name, classify_name_ref, Definition, NameClass, NameRefClass}, - RootDatabase, -}; -use ra_syntax::{ - algo::find_node_at_offset, - ast::{self, NameOwner}, - lex_single_valid_syntax_kind, match_ast, AstNode, SyntaxKind, SyntaxNode, SyntaxToken, -}; -use ra_text_edit::TextEdit; -use std::convert::TryInto; -use test_utils::mark; - -use crate::{ - references::find_all_refs, FilePosition, FileSystemEdit, RangeInfo, Reference, ReferenceKind, - SourceChange, SourceFileEdit, TextRange, TextSize, -}; - -pub(crate) fn rename( - db: &RootDatabase, - position: FilePosition, - new_name: &str, -) -> Option> { - let sema = Semantics::new(db); - - match lex_single_valid_syntax_kind(new_name)? { - SyntaxKind::IDENT | SyntaxKind::UNDERSCORE => (), - SyntaxKind::SELF_KW => return rename_to_self(&sema, position), - _ => return None, - } - - let source_file = sema.parse(position.file_id); - let syntax = source_file.syntax(); - if let Some(module) = find_module_at_offset(&sema, position, syntax) { - rename_mod(&sema, position, module, new_name) - } else if let Some(self_token) = - syntax.token_at_offset(position.offset).find(|t| t.kind() == SyntaxKind::SELF_KW) - { - rename_self_to_param(&sema, position, self_token, new_name) - } else { - rename_reference(&sema, position, new_name) - } -} - -fn find_module_at_offset( - sema: &Semantics, - position: FilePosition, - syntax: &SyntaxNode, -) -> Option { - let ident = syntax.token_at_offset(position.offset).find(|t| t.kind() == SyntaxKind::IDENT)?; - - let module = match_ast! { - match (ident.parent()) { - ast::NameRef(name_ref) => { - match classify_name_ref(sema, &name_ref)? { - NameRefClass::Definition(Definition::ModuleDef(ModuleDef::Module(module))) => module, - _ => return None, - } - }, - ast::Name(name) => { - match classify_name(&sema, &name)? { - NameClass::Definition(Definition::ModuleDef(ModuleDef::Module(module))) => module, - _ => return None, - } - }, - _ => return None, - } - }; - - Some(module) -} - -fn source_edit_from_reference(reference: Reference, new_name: &str) -> SourceFileEdit { - let mut replacement_text = String::new(); - let file_id = reference.file_range.file_id; - let range = match reference.kind { - ReferenceKind::FieldShorthandForField => { - mark::hit!(test_rename_struct_field_for_shorthand); - replacement_text.push_str(new_name); - replacement_text.push_str(": "); - TextRange::new(reference.file_range.range.start(), reference.file_range.range.start()) - } - ReferenceKind::FieldShorthandForLocal => { - mark::hit!(test_rename_local_for_field_shorthand); - replacement_text.push_str(": "); - replacement_text.push_str(new_name); - TextRange::new(reference.file_range.range.end(), reference.file_range.range.end()) - } - _ => { - replacement_text.push_str(new_name); - reference.file_range.range - } - }; - SourceFileEdit { file_id, edit: TextEdit::replace(range, replacement_text) } -} - -fn rename_mod( - sema: &Semantics, - position: FilePosition, - module: Module, - new_name: &str, -) -> Option> { - let mut source_file_edits = Vec::new(); - let mut file_system_edits = Vec::new(); - - let src = module.definition_source(sema.db); - let file_id = src.file_id.original_file(sema.db); - match src.value { - ModuleSource::SourceFile(..) => { - // mod is defined in path/to/dir/mod.rs - let dst = if module.is_mod_rs(sema.db) { - format!("../{}/mod.rs", new_name) - } else { - format!("{}.rs", new_name) - }; - let move_file = FileSystemEdit::MoveFile { src: file_id, anchor: file_id, dst }; - file_system_edits.push(move_file); - } - ModuleSource::Module(..) => {} - } - - if let Some(src) = module.declaration_source(sema.db) { - let file_id = src.file_id.original_file(sema.db); - let name = src.value.name()?; - let edit = SourceFileEdit { - file_id, - edit: TextEdit::replace(name.syntax().text_range(), new_name.into()), - }; - source_file_edits.push(edit); - } - - let RangeInfo { range, info: refs } = find_all_refs(sema, position, None)?; - let ref_edits = refs - .references - .into_iter() - .map(|reference| source_edit_from_reference(reference, new_name)); - source_file_edits.extend(ref_edits); - - Some(RangeInfo::new(range, SourceChange::from_edits(source_file_edits, file_system_edits))) -} - -fn rename_to_self( - sema: &Semantics, - position: FilePosition, -) -> Option> { - let source_file = sema.parse(position.file_id); - let syn = source_file.syntax(); - - let fn_def = find_node_at_offset::(syn, position.offset)?; - let params = fn_def.param_list()?; - if params.self_param().is_some() { - return None; // method already has self param - } - let first_param = params.params().next()?; - let mutable = match first_param.ty() { - Some(ast::Type::RefType(rt)) => rt.mut_token().is_some(), - _ => return None, // not renaming other types - }; - - let RangeInfo { range, info: refs } = find_all_refs(sema, position, None)?; - - let param_range = first_param.syntax().text_range(); - let (param_ref, usages): (Vec, Vec) = refs - .into_iter() - .partition(|reference| param_range.intersect(reference.file_range.range).is_some()); - - if param_ref.is_empty() { - return None; - } - - let mut edits = usages - .into_iter() - .map(|reference| source_edit_from_reference(reference, "self")) - .collect::>(); - - edits.push(SourceFileEdit { - file_id: position.file_id, - edit: TextEdit::replace( - param_range, - String::from(if mutable { "&mut self" } else { "&self" }), - ), - }); - - Some(RangeInfo::new(range, SourceChange::from(edits))) -} - -fn text_edit_from_self_param( - syn: &SyntaxNode, - self_param: &ast::SelfParam, - new_name: &str, -) -> Option { - fn target_type_name(impl_def: &ast::Impl) -> Option { - if let Some(ast::Type::PathType(p)) = impl_def.self_ty() { - return Some(p.path()?.segment()?.name_ref()?.text().to_string()); - } - None - } - - let impl_def = find_node_at_offset::(syn, self_param.syntax().text_range().start())?; - let type_name = target_type_name(&impl_def)?; - - let mut replacement_text = String::from(new_name); - replacement_text.push_str(": "); - replacement_text.push_str(self_param.mut_token().map_or("&", |_| "&mut ")); - replacement_text.push_str(type_name.as_str()); - - Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text)) -} - -fn rename_self_to_param( - sema: &Semantics, - position: FilePosition, - self_token: SyntaxToken, - new_name: &str, -) -> Option> { - let source_file = sema.parse(position.file_id); - let syn = source_file.syntax(); - - let text = sema.db.file_text(position.file_id); - let fn_def = find_node_at_offset::(syn, position.offset)?; - let search_range = fn_def.syntax().text_range(); - - let mut edits: Vec = vec![]; - - for (idx, _) in text.match_indices("self") { - let offset: TextSize = idx.try_into().unwrap(); - if !search_range.contains_inclusive(offset) { - continue; - } - if let Some(ref usage) = - syn.token_at_offset(offset).find(|t| t.kind() == SyntaxKind::SELF_KW) - { - let edit = if let Some(ref self_param) = ast::SelfParam::cast(usage.parent()) { - text_edit_from_self_param(syn, self_param, new_name)? - } else { - TextEdit::replace(usage.text_range(), String::from(new_name)) - }; - edits.push(SourceFileEdit { file_id: position.file_id, edit }); - } - } - - let range = ast::SelfParam::cast(self_token.parent()) - .map_or(self_token.text_range(), |p| p.syntax().text_range()); - - Some(RangeInfo::new(range, SourceChange::from(edits))) -} - -fn rename_reference( - sema: &Semantics, - position: FilePosition, - new_name: &str, -) -> Option> { - let RangeInfo { range, info: refs } = find_all_refs(sema, position, None)?; - - let edit = refs - .into_iter() - .map(|reference| source_edit_from_reference(reference, new_name)) - .collect::>(); - - if edit.is_empty() { - return None; - } - - Some(RangeInfo::new(range, SourceChange::from(edit))) -} - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - use ra_text_edit::TextEditBuilder; - use stdx::trim_indent; - use test_utils::{assert_eq_text, mark}; - - use crate::{mock_analysis::analysis_and_position, FileId}; - - fn check(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) { - let ra_fixture_after = &trim_indent(ra_fixture_after); - let (analysis, position) = analysis_and_position(ra_fixture_before); - let source_change = analysis.rename(position, new_name).unwrap(); - let mut text_edit_builder = TextEditBuilder::default(); - let mut file_id: Option = None; - if let Some(change) = source_change { - for edit in change.info.source_file_edits { - file_id = Some(edit.file_id); - for indel in edit.edit.into_iter() { - text_edit_builder.replace(indel.delete, indel.insert); - } - } - } - let mut result = analysis.file_text(file_id.unwrap()).unwrap().to_string(); - text_edit_builder.finish().apply(&mut result); - assert_eq_text!(ra_fixture_after, &*result); - } - - fn check_expect(new_name: &str, ra_fixture: &str, expect: Expect) { - let (analysis, position) = analysis_and_position(ra_fixture); - let source_change = analysis.rename(position, new_name).unwrap().unwrap(); - expect.assert_debug_eq(&source_change) - } - - #[test] - fn test_rename_to_underscore() { - check("_", r#"fn main() { let i<|> = 1; }"#, r#"fn main() { let _ = 1; }"#); - } - - #[test] - fn test_rename_to_raw_identifier() { - check("r#fn", r#"fn main() { let i<|> = 1; }"#, r#"fn main() { let r#fn = 1; }"#); - } - - #[test] - fn test_rename_to_invalid_identifier() { - let (analysis, position) = analysis_and_position(r#"fn main() { let i<|> = 1; }"#); - let new_name = "invalid!"; - let source_change = analysis.rename(position, new_name).unwrap(); - assert!(source_change.is_none()); - } - - #[test] - fn test_rename_for_local() { - check( - "k", - r#" -fn main() { - let mut i = 1; - let j = 1; - i = i<|> + j; - - { i = 0; } - - i = 5; -} -"#, - r#" -fn main() { - let mut k = 1; - let j = 1; - k = k + j; - - { k = 0; } - - k = 5; -} -"#, - ); - } - - #[test] - fn test_rename_for_macro_args() { - check( - "b", - r#" -macro_rules! foo {($i:ident) => {$i} } -fn main() { - let a<|> = "test"; - foo!(a); -} -"#, - r#" -macro_rules! foo {($i:ident) => {$i} } -fn main() { - let b = "test"; - foo!(b); -} -"#, - ); - } - - #[test] - fn test_rename_for_macro_args_rev() { - check( - "b", - r#" -macro_rules! foo {($i:ident) => {$i} } -fn main() { - let a = "test"; - foo!(a<|>); -} -"#, - r#" -macro_rules! foo {($i:ident) => {$i} } -fn main() { - let b = "test"; - foo!(b); -} -"#, - ); - } - - #[test] - fn test_rename_for_macro_define_fn() { - check( - "bar", - r#" -macro_rules! define_fn {($id:ident) => { fn $id{} }} -define_fn!(foo); -fn main() { - fo<|>o(); -} -"#, - r#" -macro_rules! define_fn {($id:ident) => { fn $id{} }} -define_fn!(bar); -fn main() { - bar(); -} -"#, - ); - } - - #[test] - fn test_rename_for_macro_define_fn_rev() { - check( - "bar", - r#" -macro_rules! define_fn {($id:ident) => { fn $id{} }} -define_fn!(fo<|>o); -fn main() { - foo(); -} -"#, - r#" -macro_rules! define_fn {($id:ident) => { fn $id{} }} -define_fn!(bar); -fn main() { - bar(); -} -"#, - ); - } - - #[test] - fn test_rename_for_param_inside() { - check("j", r#"fn foo(i : u32) -> u32 { i<|> }"#, r#"fn foo(j : u32) -> u32 { j }"#); - } - - #[test] - fn test_rename_refs_for_fn_param() { - check("j", r#"fn foo(i<|> : u32) -> u32 { i }"#, r#"fn foo(j : u32) -> u32 { j }"#); - } - - #[test] - fn test_rename_for_mut_param() { - check("j", r#"fn foo(mut i<|> : u32) -> u32 { i }"#, r#"fn foo(mut j : u32) -> u32 { j }"#); - } - - #[test] - fn test_rename_struct_field() { - check( - "j", - r#" -struct Foo { i<|>: i32 } - -impl Foo { - fn new(i: i32) -> Self { - Self { i: i } - } -} -"#, - r#" -struct Foo { j: i32 } - -impl Foo { - fn new(i: i32) -> Self { - Self { j: i } - } -} -"#, - ); - } - - #[test] - fn test_rename_struct_field_for_shorthand() { - mark::check!(test_rename_struct_field_for_shorthand); - check( - "j", - r#" -struct Foo { i<|>: i32 } - -impl Foo { - fn new(i: i32) -> Self { - Self { i } - } -} -"#, - r#" -struct Foo { j: i32 } - -impl Foo { - fn new(i: i32) -> Self { - Self { j: i } - } -} -"#, - ); - } - - #[test] - fn test_rename_local_for_field_shorthand() { - mark::check!(test_rename_local_for_field_shorthand); - check( - "j", - r#" -struct Foo { i: i32 } - -impl Foo { - fn new(i<|>: i32) -> Self { - Self { i } - } -} -"#, - r#" -struct Foo { i: i32 } - -impl Foo { - fn new(j: i32) -> Self { - Self { i: j } - } -} -"#, - ); - } - - #[test] - fn test_field_shorthand_correct_struct() { - check( - "j", - r#" -struct Foo { i<|>: i32 } -struct Bar { i: i32 } - -impl Bar { - fn new(i: i32) -> Self { - Self { i } - } -} -"#, - r#" -struct Foo { j: i32 } -struct Bar { i: i32 } - -impl Bar { - fn new(i: i32) -> Self { - Self { i } - } -} -"#, - ); - } - - #[test] - fn test_shadow_local_for_struct_shorthand() { - check( - "j", - r#" -struct Foo { i: i32 } - -fn baz(i<|>: i32) -> Self { - let x = Foo { i }; - { - let i = 0; - Foo { i } - } -} -"#, - r#" -struct Foo { i: i32 } - -fn baz(j: i32) -> Self { - let x = Foo { i: j }; - { - let i = 0; - Foo { i } - } -} -"#, - ); - } - - #[test] - fn test_rename_mod() { - check_expect( - "foo2", - r#" -//- /lib.rs -mod bar; - -//- /bar.rs -mod foo<|>; - -//- /bar/foo.rs -// empty -"#, - expect![[r#" - RangeInfo { - range: 4..7, - info: SourceChange { - source_file_edits: [ - SourceFileEdit { - file_id: FileId( - 2, - ), - edit: TextEdit { - indels: [ - Indel { - insert: "foo2", - delete: 4..7, - }, - ], - }, - }, - ], - file_system_edits: [ - MoveFile { - src: FileId( - 3, - ), - anchor: FileId( - 3, - ), - dst: "foo2.rs", - }, - ], - is_snippet: false, - }, - } - "#]], - ); - } - - #[test] - fn test_rename_mod_in_use_tree() { - check_expect( - "quux", - r#" -//- /main.rs -pub mod foo; -pub mod bar; -fn main() {} - -//- /foo.rs -pub struct FooContent; - -//- /bar.rs -use crate::foo<|>::FooContent; -"#, - expect![[r#" - RangeInfo { - range: 11..14, - info: SourceChange { - source_file_edits: [ - SourceFileEdit { - file_id: FileId( - 1, - ), - edit: TextEdit { - indels: [ - Indel { - insert: "quux", - delete: 8..11, - }, - ], - }, - }, - SourceFileEdit { - file_id: FileId( - 3, - ), - edit: TextEdit { - indels: [ - Indel { - insert: "quux", - delete: 11..14, - }, - ], - }, - }, - ], - file_system_edits: [ - MoveFile { - src: FileId( - 2, - ), - anchor: FileId( - 2, - ), - dst: "quux.rs", - }, - ], - is_snippet: false, - }, - } - "#]], - ); - } - - #[test] - fn test_rename_mod_in_dir() { - check_expect( - "foo2", - r#" -//- /lib.rs -mod fo<|>o; -//- /foo/mod.rs -// emtpy -"#, - expect![[r#" - RangeInfo { - range: 4..7, - info: SourceChange { - source_file_edits: [ - SourceFileEdit { - file_id: FileId( - 1, - ), - edit: TextEdit { - indels: [ - Indel { - insert: "foo2", - delete: 4..7, - }, - ], - }, - }, - ], - file_system_edits: [ - MoveFile { - src: FileId( - 2, - ), - anchor: FileId( - 2, - ), - dst: "../foo2/mod.rs", - }, - ], - is_snippet: false, - }, - } - "#]], - ); - } - - #[test] - fn test_rename_unusually_nested_mod() { - check_expect( - "bar", - r#" -//- /lib.rs -mod outer { mod fo<|>o; } - -//- /outer/foo.rs -// emtpy -"#, - expect![[r#" - RangeInfo { - range: 16..19, - info: SourceChange { - source_file_edits: [ - SourceFileEdit { - file_id: FileId( - 1, - ), - edit: TextEdit { - indels: [ - Indel { - insert: "bar", - delete: 16..19, - }, - ], - }, - }, - ], - file_system_edits: [ - MoveFile { - src: FileId( - 2, - ), - anchor: FileId( - 2, - ), - dst: "bar.rs", - }, - ], - is_snippet: false, - }, - } - "#]], - ); - } - - #[test] - fn test_module_rename_in_path() { - check( - "baz", - r#" -mod <|>foo { pub fn bar() {} } - -fn main() { foo::bar(); } -"#, - r#" -mod baz { pub fn bar() {} } - -fn main() { baz::bar(); } -"#, - ); - } - - #[test] - fn test_rename_mod_filename_and_path() { - check_expect( - "foo2", - r#" -//- /lib.rs -mod bar; -fn f() { - bar::foo::fun() -} - -//- /bar.rs -pub mod foo<|>; - -//- /bar/foo.rs -// pub fn fun() {} -"#, - expect![[r#" - RangeInfo { - range: 8..11, - info: SourceChange { - source_file_edits: [ - SourceFileEdit { - file_id: FileId( - 2, - ), - edit: TextEdit { - indels: [ - Indel { - insert: "foo2", - delete: 8..11, - }, - ], - }, - }, - SourceFileEdit { - file_id: FileId( - 1, - ), - edit: TextEdit { - indels: [ - Indel { - insert: "foo2", - delete: 27..30, - }, - ], - }, - }, - ], - file_system_edits: [ - MoveFile { - src: FileId( - 3, - ), - anchor: FileId( - 3, - ), - dst: "foo2.rs", - }, - ], - is_snippet: false, - }, - } - "#]], - ); - } - - #[test] - fn test_enum_variant_from_module_1() { - check( - "Baz", - r#" -mod foo { - pub enum Foo { Bar<|> } -} - -fn func(f: foo::Foo) { - match f { - foo::Foo::Bar => {} - } -} -"#, - r#" -mod foo { - pub enum Foo { Baz } -} - -fn func(f: foo::Foo) { - match f { - foo::Foo::Baz => {} - } -} -"#, - ); - } - - #[test] - fn test_enum_variant_from_module_2() { - check( - "baz", - r#" -mod foo { - pub struct Foo { pub bar<|>: uint } -} - -fn foo(f: foo::Foo) { - let _ = f.bar; -} -"#, - r#" -mod foo { - pub struct Foo { pub baz: uint } -} - -fn foo(f: foo::Foo) { - let _ = f.baz; -} -"#, - ); - } - - #[test] - fn test_parameter_to_self() { - check( - "self", - r#" -struct Foo { i: i32 } - -impl Foo { - fn f(foo<|>: &mut Foo) -> i32 { - foo.i - } -} -"#, - r#" -struct Foo { i: i32 } - -impl Foo { - fn f(&mut self) -> i32 { - self.i - } -} -"#, - ); - } - - #[test] - fn test_self_to_parameter() { - check( - "foo", - r#" -struct Foo { i: i32 } - -impl Foo { - fn f(&mut <|>self) -> i32 { - self.i - } -} -"#, - r#" -struct Foo { i: i32 } - -impl Foo { - fn f(foo: &mut Foo) -> i32 { - foo.i - } -} -"#, - ); - } - - #[test] - fn test_self_in_path_to_parameter() { - check( - "foo", - r#" -struct Foo { i: i32 } - -impl Foo { - fn f(&self) -> i32 { - let self_var = 1; - self<|>.i - } -} -"#, - r#" -struct Foo { i: i32 } - -impl Foo { - fn f(foo: &Foo) -> i32 { - let self_var = 1; - foo.i - } -} -"#, - ); - } -} diff --git a/crates/ra_ide/src/runnables.rs b/crates/ra_ide/src/runnables.rs deleted file mode 100644 index 3b7162b841..0000000000 --- a/crates/ra_ide/src/runnables.rs +++ /dev/null @@ -1,883 +0,0 @@ -use std::fmt; - -use hir::{AsAssocItem, Attrs, HirFileId, InFile, Semantics}; -use itertools::Itertools; -use ra_cfg::CfgExpr; -use ra_ide_db::RootDatabase; -use ra_syntax::{ - ast::{self, AstNode, AttrsOwner, DocCommentsOwner, ModuleItemOwner, NameOwner}, - match_ast, SyntaxNode, -}; - -use crate::{display::ToNav, FileId, NavigationTarget}; - -#[derive(Debug, Clone)] -pub struct Runnable { - pub nav: NavigationTarget, - pub kind: RunnableKind, - pub cfg_exprs: Vec, -} - -#[derive(Debug, Clone)] -pub enum TestId { - Name(String), - Path(String), -} - -impl fmt::Display for TestId { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - TestId::Name(name) => write!(f, "{}", name), - TestId::Path(path) => write!(f, "{}", path), - } - } -} - -#[derive(Debug, Clone)] -pub enum RunnableKind { - Test { test_id: TestId, attr: TestAttr }, - TestMod { path: String }, - Bench { test_id: TestId }, - DocTest { test_id: TestId }, - Bin, -} - -#[derive(Debug, Eq, PartialEq)] -pub struct RunnableAction { - pub run_title: &'static str, - pub debugee: bool, -} - -const TEST: RunnableAction = RunnableAction { run_title: "▶\u{fe0e} Run Test", debugee: true }; -const DOCTEST: RunnableAction = - RunnableAction { run_title: "▶\u{fe0e} Run Doctest", debugee: false }; -const BENCH: RunnableAction = RunnableAction { run_title: "▶\u{fe0e} Run Bench", debugee: true }; -const BIN: RunnableAction = RunnableAction { run_title: "▶\u{fe0e} Run", debugee: true }; - -impl Runnable { - // test package::module::testname - pub fn label(&self, target: Option) -> String { - match &self.kind { - RunnableKind::Test { test_id, .. } => format!("test {}", test_id), - RunnableKind::TestMod { path } => format!("test-mod {}", path), - RunnableKind::Bench { test_id } => format!("bench {}", test_id), - RunnableKind::DocTest { test_id, .. } => format!("doctest {}", test_id), - RunnableKind::Bin => { - target.map_or_else(|| "run binary".to_string(), |t| format!("run {}", t)) - } - } - } - - pub fn action(&self) -> &'static RunnableAction { - match &self.kind { - RunnableKind::Test { .. } | RunnableKind::TestMod { .. } => &TEST, - RunnableKind::DocTest { .. } => &DOCTEST, - RunnableKind::Bench { .. } => &BENCH, - RunnableKind::Bin => &BIN, - } - } -} - -// Feature: Run -// -// Shows a popup suggesting to run a test/benchmark/binary **at the current cursor -// location**. Super useful for repeatedly running just a single test. Do bind this -// to a shortcut! -// -// |=== -// | Editor | Action Name -// -// | VS Code | **Rust Analyzer: Run** -// |=== -pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { - let sema = Semantics::new(db); - let source_file = sema.parse(file_id); - source_file.syntax().descendants().filter_map(|i| runnable(&sema, i, file_id)).collect() -} - -pub(crate) fn runnable( - sema: &Semantics, - item: SyntaxNode, - file_id: FileId, -) -> Option { - match_ast! { - match item { - ast::Fn(it) => runnable_fn(sema, it, file_id), - ast::Module(it) => runnable_mod(sema, it, file_id), - _ => None, - } - } -} - -fn runnable_fn( - sema: &Semantics, - fn_def: ast::Fn, - file_id: FileId, -) -> Option { - let name_string = fn_def.name()?.text().to_string(); - - let kind = if name_string == "main" { - RunnableKind::Bin - } else { - let test_id = match sema.to_def(&fn_def).map(|def| def.module(sema.db)) { - Some(module) => { - let def = sema.to_def(&fn_def)?; - let impl_trait_name = def.as_assoc_item(sema.db).and_then(|assoc_item| { - match assoc_item.container(sema.db) { - hir::AssocItemContainer::Trait(trait_item) => { - Some(trait_item.name(sema.db).to_string()) - } - hir::AssocItemContainer::ImplDef(impl_def) => impl_def - .target_ty(sema.db) - .as_adt() - .map(|adt| adt.name(sema.db).to_string()), - } - }); - - let path_iter = module - .path_to_root(sema.db) - .into_iter() - .rev() - .filter_map(|it| it.name(sema.db)) - .map(|name| name.to_string()); - - let path = if let Some(impl_trait_name) = impl_trait_name { - path_iter - .chain(std::iter::once(impl_trait_name)) - .chain(std::iter::once(name_string)) - .join("::") - } else { - path_iter.chain(std::iter::once(name_string)).join("::") - }; - - TestId::Path(path) - } - None => TestId::Name(name_string), - }; - - if has_test_related_attribute(&fn_def) { - let attr = TestAttr::from_fn(&fn_def); - RunnableKind::Test { test_id, attr } - } else if fn_def.has_atom_attr("bench") { - RunnableKind::Bench { test_id } - } else if has_doc_test(&fn_def) { - RunnableKind::DocTest { test_id } - } else { - return None; - } - }; - - let attrs = Attrs::from_attrs_owner(sema.db, InFile::new(HirFileId::from(file_id), &fn_def)); - let cfg_exprs = attrs.cfg().collect(); - - let nav = if let RunnableKind::DocTest { .. } = kind { - NavigationTarget::from_doc_commented( - sema.db, - InFile::new(file_id.into(), &fn_def), - InFile::new(file_id.into(), &fn_def), - ) - } else { - NavigationTarget::from_named(sema.db, InFile::new(file_id.into(), &fn_def)) - }; - Some(Runnable { nav, kind, cfg_exprs }) -} - -#[derive(Debug, Copy, Clone)] -pub struct TestAttr { - pub ignore: bool, -} - -impl TestAttr { - fn from_fn(fn_def: &ast::Fn) -> TestAttr { - let ignore = fn_def - .attrs() - .filter_map(|attr| attr.simple_name()) - .any(|attribute_text| attribute_text == "ignore"); - TestAttr { ignore } - } -} - -/// This is a method with a heuristics to support test methods annotated with custom test annotations, such as -/// `#[test_case(...)]`, `#[tokio::test]` and similar. -/// Also a regular `#[test]` annotation is supported. -/// -/// It may produce false positives, for example, `#[wasm_bindgen_test]` requires a different command to run the test, -/// but it's better than not to have the runnables for the tests at all. -fn has_test_related_attribute(fn_def: &ast::Fn) -> bool { - fn_def - .attrs() - .filter_map(|attr| attr.path()) - .map(|path| path.syntax().to_string().to_lowercase()) - .any(|attribute_text| attribute_text.contains("test")) -} - -fn has_doc_test(fn_def: &ast::Fn) -> bool { - fn_def.doc_comment_text().map_or(false, |comment| comment.contains("```")) -} - -fn runnable_mod( - sema: &Semantics, - module: ast::Module, - file_id: FileId, -) -> Option { - if !has_test_function_or_multiple_test_submodules(&module) { - return None; - } - let module_def = sema.to_def(&module)?; - - let path = module_def - .path_to_root(sema.db) - .into_iter() - .rev() - .filter_map(|it| it.name(sema.db)) - .join("::"); - - let attrs = Attrs::from_attrs_owner(sema.db, InFile::new(HirFileId::from(file_id), &module)); - let cfg_exprs = attrs.cfg().collect(); - let nav = module_def.to_nav(sema.db); - Some(Runnable { nav, kind: RunnableKind::TestMod { path }, cfg_exprs }) -} - -// We could create runnables for modules with number_of_test_submodules > 0, -// but that bloats the runnables for no real benefit, since all tests can be run by the submodule already -fn has_test_function_or_multiple_test_submodules(module: &ast::Module) -> bool { - if let Some(item_list) = module.item_list() { - let mut number_of_test_submodules = 0; - - for item in item_list.items() { - match item { - ast::Item::Fn(f) => { - if has_test_related_attribute(&f) { - return true; - } - } - ast::Item::Module(submodule) => { - if has_test_function_or_multiple_test_submodules(&submodule) { - number_of_test_submodules += 1; - } - } - _ => (), - } - } - - number_of_test_submodules > 1 - } else { - false - } -} - -#[cfg(test)] -mod tests { - use expect::{expect, Expect}; - - use crate::mock_analysis::analysis_and_position; - - use super::{RunnableAction, BENCH, BIN, DOCTEST, TEST}; - - fn check( - ra_fixture: &str, - // FIXME: fold this into `expect` as well - actions: &[&RunnableAction], - expect: Expect, - ) { - let (analysis, position) = analysis_and_position(ra_fixture); - let runnables = analysis.runnables(position.file_id).unwrap(); - expect.assert_debug_eq(&runnables); - assert_eq!( - actions, - runnables.into_iter().map(|it| it.action()).collect::>().as_slice() - ); - } - - #[test] - fn test_runnables() { - check( - r#" -//- /lib.rs -<|> -fn main() {} - -#[test] -fn test_foo() {} - -#[test] -#[ignore] -fn test_foo() {} - -#[bench] -fn bench() {} -"#, - &[&BIN, &TEST, &TEST, &BENCH], - expect![[r#" - [ - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 1..13, - focus_range: Some( - 4..8, - ), - name: "main", - kind: FN, - container_name: None, - description: None, - docs: None, - }, - kind: Bin, - cfg_exprs: [], - }, - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 15..39, - focus_range: Some( - 26..34, - ), - name: "test_foo", - kind: FN, - container_name: None, - description: None, - docs: None, - }, - kind: Test { - test_id: Path( - "test_foo", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg_exprs: [], - }, - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 41..75, - focus_range: Some( - 62..70, - ), - name: "test_foo", - kind: FN, - container_name: None, - description: None, - docs: None, - }, - kind: Test { - test_id: Path( - "test_foo", - ), - attr: TestAttr { - ignore: true, - }, - }, - cfg_exprs: [], - }, - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 77..99, - focus_range: Some( - 89..94, - ), - name: "bench", - kind: FN, - container_name: None, - description: None, - docs: None, - }, - kind: Bench { - test_id: Path( - "bench", - ), - }, - cfg_exprs: [], - }, - ] - "#]], - ); - } - - #[test] - fn test_runnables_doc_test() { - check( - r#" -//- /lib.rs -<|> -fn main() {} - -/// ``` -/// let x = 5; -/// ``` -fn foo() {} -"#, - &[&BIN, &DOCTEST], - expect![[r#" - [ - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 1..13, - focus_range: Some( - 4..8, - ), - name: "main", - kind: FN, - container_name: None, - description: None, - docs: None, - }, - kind: Bin, - cfg_exprs: [], - }, - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 15..57, - focus_range: None, - name: "foo", - kind: FN, - container_name: None, - description: None, - docs: None, - }, - kind: DocTest { - test_id: Path( - "foo", - ), - }, - cfg_exprs: [], - }, - ] - "#]], - ); - } - - #[test] - fn test_runnables_doc_test_in_impl() { - check( - r#" -//- /lib.rs -<|> -fn main() {} - -struct Data; -impl Data { - /// ``` - /// let x = 5; - /// ``` - fn foo() {} -} -"#, - &[&BIN, &DOCTEST], - expect![[r#" - [ - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 1..13, - focus_range: Some( - 4..8, - ), - name: "main", - kind: FN, - container_name: None, - description: None, - docs: None, - }, - kind: Bin, - cfg_exprs: [], - }, - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 44..98, - focus_range: None, - name: "foo", - kind: FN, - container_name: None, - description: None, - docs: None, - }, - kind: DocTest { - test_id: Path( - "Data::foo", - ), - }, - cfg_exprs: [], - }, - ] - "#]], - ); - } - - #[test] - fn test_runnables_module() { - check( - r#" -//- /lib.rs -<|> -mod test_mod { - #[test] - fn test_foo1() {} -} -"#, - &[&TEST, &TEST], - expect![[r#" - [ - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 1..51, - focus_range: Some( - 5..13, - ), - name: "test_mod", - kind: MODULE, - container_name: None, - description: None, - docs: None, - }, - kind: TestMod { - path: "test_mod", - }, - cfg_exprs: [], - }, - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 20..49, - focus_range: Some( - 35..44, - ), - name: "test_foo1", - kind: FN, - container_name: None, - description: None, - docs: None, - }, - kind: Test { - test_id: Path( - "test_mod::test_foo1", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg_exprs: [], - }, - ] - "#]], - ); - } - - #[test] - fn only_modules_with_test_functions_or_more_than_one_test_submodule_have_runners() { - check( - r#" -//- /lib.rs -<|> -mod root_tests { - mod nested_tests_0 { - mod nested_tests_1 { - #[test] - fn nested_test_11() {} - - #[test] - fn nested_test_12() {} - } - - mod nested_tests_2 { - #[test] - fn nested_test_2() {} - } - - mod nested_tests_3 {} - } - - mod nested_tests_4 {} -} -"#, - &[&TEST, &TEST, &TEST, &TEST, &TEST, &TEST], - expect![[r#" - [ - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 22..323, - focus_range: Some( - 26..40, - ), - name: "nested_tests_0", - kind: MODULE, - container_name: None, - description: None, - docs: None, - }, - kind: TestMod { - path: "root_tests::nested_tests_0", - }, - cfg_exprs: [], - }, - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 51..192, - focus_range: Some( - 55..69, - ), - name: "nested_tests_1", - kind: MODULE, - container_name: None, - description: None, - docs: None, - }, - kind: TestMod { - path: "root_tests::nested_tests_0::nested_tests_1", - }, - cfg_exprs: [], - }, - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 84..126, - focus_range: Some( - 107..121, - ), - name: "nested_test_11", - kind: FN, - container_name: None, - description: None, - docs: None, - }, - kind: Test { - test_id: Path( - "root_tests::nested_tests_0::nested_tests_1::nested_test_11", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg_exprs: [], - }, - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 140..182, - focus_range: Some( - 163..177, - ), - name: "nested_test_12", - kind: FN, - container_name: None, - description: None, - docs: None, - }, - kind: Test { - test_id: Path( - "root_tests::nested_tests_0::nested_tests_1::nested_test_12", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg_exprs: [], - }, - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 202..286, - focus_range: Some( - 206..220, - ), - name: "nested_tests_2", - kind: MODULE, - container_name: None, - description: None, - docs: None, - }, - kind: TestMod { - path: "root_tests::nested_tests_0::nested_tests_2", - }, - cfg_exprs: [], - }, - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 235..276, - focus_range: Some( - 258..271, - ), - name: "nested_test_2", - kind: FN, - container_name: None, - description: None, - docs: None, - }, - kind: Test { - test_id: Path( - "root_tests::nested_tests_0::nested_tests_2::nested_test_2", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg_exprs: [], - }, - ] - "#]], - ); - } - - #[test] - fn test_runnables_with_feature() { - check( - r#" -//- /lib.rs crate:foo cfg:feature=foo -<|> -#[test] -#[cfg(feature = "foo")] -fn test_foo1() {} -"#, - &[&TEST], - expect![[r#" - [ - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 1..50, - focus_range: Some( - 36..45, - ), - name: "test_foo1", - kind: FN, - container_name: None, - description: None, - docs: None, - }, - kind: Test { - test_id: Path( - "test_foo1", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg_exprs: [ - KeyValue { - key: "feature", - value: "foo", - }, - ], - }, - ] - "#]], - ); - } - - #[test] - fn test_runnables_with_features() { - check( - r#" -//- /lib.rs crate:foo cfg:feature=foo,feature=bar -<|> -#[test] -#[cfg(all(feature = "foo", feature = "bar"))] -fn test_foo1() {} -"#, - &[&TEST], - expect![[r#" - [ - Runnable { - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 1..72, - focus_range: Some( - 58..67, - ), - name: "test_foo1", - kind: FN, - container_name: None, - description: None, - docs: None, - }, - kind: Test { - test_id: Path( - "test_foo1", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg_exprs: [ - All( - [ - KeyValue { - key: "feature", - value: "foo", - }, - KeyValue { - key: "feature", - value: "bar", - }, - ], - ), - ], - }, - ] - "#]], - ); - } - - #[test] - fn test_runnables_no_test_function_in_module() { - check( - r#" -//- /lib.rs -<|> -mod test_mod { - fn foo1() {} -} -"#, - &[], - expect![[r#" - [] - "#]], - ); - } -} diff --git a/crates/ra_ide/src/ssr.rs b/crates/ra_ide/src/ssr.rs deleted file mode 100644 index 4348b43beb..0000000000 --- a/crates/ra_ide/src/ssr.rs +++ /dev/null @@ -1,72 +0,0 @@ -use ra_db::{FilePosition, FileRange}; -use ra_ide_db::RootDatabase; - -use crate::SourceFileEdit; -use ra_ssr::{MatchFinder, SsrError, SsrRule}; - -// Feature: Structural Search and Replace -// -// Search and replace with named wildcards that will match any expression, type, path, pattern or item. -// The syntax for a structural search replace command is ` ==>> `. -// A `$` placeholder in the search pattern will match any AST node and `$` will reference it in the replacement. -// Within a macro call, a placeholder will match up until whatever token follows the placeholder. -// -// All paths in both the search pattern and the replacement template must resolve in the context -// in which this command is invoked. Paths in the search pattern will then match the code if they -// resolve to the same item, even if they're written differently. For example if we invoke the -// command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers -// to `foo::Bar` will match. -// -// Paths in the replacement template will be rendered appropriately for the context in which the -// replacement occurs. For example if our replacement template is `foo::Bar` and we match some -// code in the `foo` module, we'll insert just `Bar`. -// -// Method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will match -// `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`. -// -// The scope of the search / replace will be restricted to the current selection if any, otherwise -// it will apply to the whole workspace. -// -// Placeholders may be given constraints by writing them as `${::...}`. -// -// Supported constraints: -// -// |=== -// | Constraint | Restricts placeholder -// -// | kind(literal) | Is a literal (e.g. `42` or `"forty two"`) -// | not(a) | Negates the constraint `a` -// |=== -// -// Available via the command `rust-analyzer.ssr`. -// -// ```rust -// // Using structural search replace command [foo($a, $b) ==>> ($a).foo($b)] -// -// // BEFORE -// String::from(foo(y + 5, z)) -// -// // AFTER -// String::from((y + 5).foo(z)) -// ``` -// -// |=== -// | Editor | Action Name -// -// | VS Code | **Rust Analyzer: Structural Search Replace** -// |=== -pub fn parse_search_replace( - rule: &str, - parse_only: bool, - db: &RootDatabase, - resolve_context: FilePosition, - selections: Vec, -) -> Result, SsrError> { - let rule: SsrRule = rule.parse()?; - let mut match_finder = MatchFinder::in_context(db, resolve_context, selections); - match_finder.add_rule(rule)?; - if parse_only { - return Ok(Vec::new()); - } - Ok(match_finder.edits()) -} diff --git a/crates/ra_ide/src/status.rs b/crates/ra_ide/src/status.rs deleted file mode 100644 index 08e6f69cb3..0000000000 --- a/crates/ra_ide/src/status.rs +++ /dev/null @@ -1,145 +0,0 @@ -use std::{fmt, iter::FromIterator, sync::Arc}; - -use hir::MacroFile; -use ra_db::{ - salsa::debug::{DebugQueryTable, TableEntry}, - FileTextQuery, SourceRootId, -}; -use ra_ide_db::{ - symbol_index::{LibrarySymbolsQuery, SymbolIndex}, - RootDatabase, -}; -use ra_prof::{memory_usage, Bytes}; -use ra_syntax::{ast, Parse, SyntaxNode}; -use rustc_hash::FxHashMap; - -use crate::FileId; - -fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { - ra_db::ParseQuery.in_db(db).entries::() -} -fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { - hir::db::ParseMacroQuery.in_db(db).entries::() -} - -// Feature: Status -// -// Shows internal statistic about memory usage of rust-analyzer. -// -// |=== -// | Editor | Action Name -// -// | VS Code | **Rust Analyzer: Status** -// |=== -pub(crate) fn status(db: &RootDatabase) -> String { - let files_stats = FileTextQuery.in_db(db).entries::(); - let syntax_tree_stats = syntax_tree_stats(db); - let macro_syntax_tree_stats = macro_syntax_tree_stats(db); - let symbols_stats = LibrarySymbolsQuery.in_db(db).entries::(); - format!( - "{}\n{}\n{}\n{} (macros)\n\n\nmemory:\n{}\ngc {:?} seconds ago", - files_stats, - symbols_stats, - syntax_tree_stats, - macro_syntax_tree_stats, - memory_usage(), - db.last_gc.elapsed().as_secs(), - ) -} - -#[derive(Default)] -struct FilesStats { - total: usize, - size: Bytes, -} - -impl fmt::Display for FilesStats { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - write!(fmt, "{} ({}) files", self.total, self.size) - } -} - -impl FromIterator>> for FilesStats { - fn from_iter(iter: T) -> FilesStats - where - T: IntoIterator>>, - { - let mut res = FilesStats::default(); - for entry in iter { - res.total += 1; - res.size += entry.value.unwrap().len(); - } - res - } -} - -#[derive(Default)] -pub(crate) struct SyntaxTreeStats { - total: usize, - pub(crate) retained: usize, -} - -impl fmt::Display for SyntaxTreeStats { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - write!(fmt, "{} trees, {} retained", self.total, self.retained) - } -} - -impl FromIterator>> for SyntaxTreeStats { - fn from_iter(iter: T) -> SyntaxTreeStats - where - T: IntoIterator>>, - { - let mut res = SyntaxTreeStats::default(); - for entry in iter { - res.total += 1; - res.retained += entry.value.is_some() as usize; - } - res - } -} - -impl FromIterator, M)>>> for SyntaxTreeStats { - fn from_iter(iter: T) -> SyntaxTreeStats - where - T: IntoIterator, M)>>>, - { - let mut res = SyntaxTreeStats::default(); - for entry in iter { - res.total += 1; - res.retained += entry.value.is_some() as usize; - } - res - } -} - -#[derive(Default)] -struct LibrarySymbolsStats { - total: usize, - size: Bytes, -} - -impl fmt::Display for LibrarySymbolsStats { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - write!(fmt, "{} ({}) symbols", self.total, self.size) - } -} - -impl FromIterator>>> - for LibrarySymbolsStats -{ - fn from_iter(iter: T) -> LibrarySymbolsStats - where - T: IntoIterator>>>, - { - let mut res = LibrarySymbolsStats::default(); - for entry in iter { - let value = entry.value.unwrap(); - for symbols in value.values() { - res.total += symbols.len(); - res.size += symbols.memory_size(); - } - } - res - } -} diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs deleted file mode 100644 index f71b804fe9..0000000000 --- a/crates/ra_ide/src/syntax_highlighting.rs +++ /dev/null @@ -1,822 +0,0 @@ -mod tags; -mod html; -mod injection; -#[cfg(test)] -mod tests; - -use hir::{Name, Semantics, VariantDef}; -use ra_ide_db::{ - defs::{classify_name, classify_name_ref, Definition, NameClass, NameRefClass}, - RootDatabase, -}; -use ra_prof::profile; -use ra_syntax::{ - ast::{self, HasFormatSpecifier}, - AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, - SyntaxKind::*, - TextRange, WalkEvent, T, -}; -use rustc_hash::FxHashMap; - -use crate::FileId; - -use ast::FormatSpecifier; -pub(crate) use html::highlight_as_html; -pub use tags::{Highlight, HighlightModifier, HighlightModifiers, HighlightTag}; - -#[derive(Debug, Clone)] -pub struct HighlightedRange { - pub range: TextRange, - pub highlight: Highlight, - pub binding_hash: Option, -} - -// Feature: Semantic Syntax Highlighting -// -// rust-analyzer highlights the code semantically. -// For example, `bar` in `foo::Bar` might be colored differently depending on whether `Bar` is an enum or a trait. -// rust-analyzer does not specify colors directly, instead it assigns tag (like `struct`) and a set of modifiers (like `declaration`) to each token. -// It's up to the client to map those to specific colors. -// -// The general rule is that a reference to an entity gets colored the same way as the entity itself. -// We also give special modifier for `mut` and `&mut` local variables. -pub(crate) fn highlight( - db: &RootDatabase, - file_id: FileId, - range_to_highlight: Option, - syntactic_name_ref_highlighting: bool, -) -> Vec { - let _p = profile("highlight"); - let sema = Semantics::new(db); - - // Determine the root based on the given range. - let (root, range_to_highlight) = { - let source_file = sema.parse(file_id); - match range_to_highlight { - Some(range) => { - let node = match source_file.syntax().covering_element(range) { - NodeOrToken::Node(it) => it, - NodeOrToken::Token(it) => it.parent(), - }; - (node, range) - } - None => (source_file.syntax().clone(), source_file.syntax().text_range()), - } - }; - - let mut bindings_shadow_count: FxHashMap = FxHashMap::default(); - // We use a stack for the DFS traversal below. - // When we leave a node, the we use it to flatten the highlighted ranges. - let mut stack = HighlightedRangeStack::new(); - - let mut current_macro_call: Option = None; - let mut format_string: Option = None; - - // Walk all nodes, keeping track of whether we are inside a macro or not. - // If in macro, expand it first and highlight the expanded code. - for event in root.preorder_with_tokens() { - match &event { - WalkEvent::Enter(_) => stack.push(), - WalkEvent::Leave(_) => stack.pop(), - }; - - let event_range = match &event { - WalkEvent::Enter(it) => it.text_range(), - WalkEvent::Leave(it) => it.text_range(), - }; - - // Element outside of the viewport, no need to highlight - if range_to_highlight.intersect(event_range).is_none() { - continue; - } - - // Track "inside macro" state - match event.clone().map(|it| it.into_node().and_then(ast::MacroCall::cast)) { - WalkEvent::Enter(Some(mc)) => { - current_macro_call = Some(mc.clone()); - if let Some(range) = macro_call_range(&mc) { - stack.add(HighlightedRange { - range, - highlight: HighlightTag::Macro.into(), - binding_hash: None, - }); - } - if let Some(name) = mc.is_macro_rules() { - if let Some((highlight, binding_hash)) = highlight_element( - &sema, - &mut bindings_shadow_count, - syntactic_name_ref_highlighting, - name.syntax().clone().into(), - ) { - stack.add(HighlightedRange { - range: name.syntax().text_range(), - highlight, - binding_hash, - }); - } - } - continue; - } - WalkEvent::Leave(Some(mc)) => { - assert!(current_macro_call == Some(mc)); - current_macro_call = None; - format_string = None; - } - _ => (), - } - - // Check for Rust code in documentation - match &event { - WalkEvent::Leave(NodeOrToken::Node(node)) => { - if let Some((doctest, range_mapping, new_comments)) = - injection::extract_doc_comments(node) - { - injection::highlight_doc_comment( - doctest, - range_mapping, - new_comments, - &mut stack, - ); - } - } - _ => (), - } - - let element = match event { - WalkEvent::Enter(it) => it, - WalkEvent::Leave(_) => continue, - }; - - let range = element.text_range(); - - let element_to_highlight = if current_macro_call.is_some() && element.kind() != COMMENT { - // Inside a macro -- expand it first - let token = match element.clone().into_token() { - Some(it) if it.parent().kind() == TOKEN_TREE => it, - _ => continue, - }; - let token = sema.descend_into_macros(token.clone()); - let parent = token.parent(); - - // Check if macro takes a format string and remember it for highlighting later. - // The macros that accept a format string expand to a compiler builtin macros - // `format_args` and `format_args_nl`. - if let Some(name) = parent - .parent() - .and_then(ast::MacroCall::cast) - .and_then(|mc| mc.path()) - .and_then(|p| p.segment()) - .and_then(|s| s.name_ref()) - { - match name.text().as_str() { - "format_args" | "format_args_nl" => { - format_string = parent - .children_with_tokens() - .filter(|t| t.kind() != WHITESPACE) - .nth(1) - .filter(|e| { - ast::String::can_cast(e.kind()) - || ast::RawString::can_cast(e.kind()) - }) - } - _ => {} - } - } - - // We only care Name and Name_ref - match (token.kind(), parent.kind()) { - (IDENT, NAME) | (IDENT, NAME_REF) => parent.into(), - _ => token.into(), - } - } else { - element.clone() - }; - - if let Some(token) = element.as_token().cloned().and_then(ast::RawString::cast) { - let expanded = element_to_highlight.as_token().unwrap().clone(); - if injection::highlight_injection(&mut stack, &sema, token, expanded).is_some() { - continue; - } - } - - let is_format_string = format_string.as_ref() == Some(&element_to_highlight); - - if let Some((highlight, binding_hash)) = highlight_element( - &sema, - &mut bindings_shadow_count, - syntactic_name_ref_highlighting, - element_to_highlight.clone(), - ) { - stack.add(HighlightedRange { range, highlight, binding_hash }); - if let Some(string) = - element_to_highlight.as_token().cloned().and_then(ast::String::cast) - { - if is_format_string { - stack.push(); - string.lex_format_specifier(|piece_range, kind| { - if let Some(highlight) = highlight_format_specifier(kind) { - stack.add(HighlightedRange { - range: piece_range + range.start(), - highlight: highlight.into(), - binding_hash: None, - }); - } - }); - stack.pop(); - } - // Highlight escape sequences - if let Some(char_ranges) = string.char_ranges() { - stack.push(); - for (piece_range, _) in char_ranges.iter().filter(|(_, char)| char.is_ok()) { - if string.text()[piece_range.start().into()..].starts_with('\\') { - stack.add(HighlightedRange { - range: piece_range + range.start(), - highlight: HighlightTag::EscapeSequence.into(), - binding_hash: None, - }); - } - } - stack.pop_and_inject(None); - } - } else if let Some(string) = - element_to_highlight.as_token().cloned().and_then(ast::RawString::cast) - { - if is_format_string { - stack.push(); - string.lex_format_specifier(|piece_range, kind| { - if let Some(highlight) = highlight_format_specifier(kind) { - stack.add(HighlightedRange { - range: piece_range + range.start(), - highlight: highlight.into(), - binding_hash: None, - }); - } - }); - stack.pop(); - } - } - } - } - - stack.flattened() -} - -#[derive(Debug)] -struct HighlightedRangeStack { - stack: Vec>, -} - -/// We use a stack to implement the flattening logic for the highlighted -/// syntax ranges. -impl HighlightedRangeStack { - fn new() -> Self { - Self { stack: vec![Vec::new()] } - } - - fn push(&mut self) { - self.stack.push(Vec::new()); - } - - /// Flattens the highlighted ranges. - /// - /// For example `#[cfg(feature = "foo")]` contains the nested ranges: - /// 1) parent-range: Attribute [0, 23) - /// 2) child-range: String [16, 21) - /// - /// The following code implements the flattening, for our example this results to: - /// `[Attribute [0, 16), String [16, 21), Attribute [21, 23)]` - fn pop(&mut self) { - let children = self.stack.pop().unwrap(); - let prev = self.stack.last_mut().unwrap(); - let needs_flattening = !children.is_empty() - && !prev.is_empty() - && prev.last().unwrap().range.contains_range(children.first().unwrap().range); - if !needs_flattening { - prev.extend(children); - } else { - let mut parent = prev.pop().unwrap(); - for ele in children { - assert!(parent.range.contains_range(ele.range)); - - let cloned = Self::intersect(&mut parent, &ele); - if !parent.range.is_empty() { - prev.push(parent); - } - prev.push(ele); - parent = cloned; - } - if !parent.range.is_empty() { - prev.push(parent); - } - } - } - - /// Intersects the `HighlightedRange` `parent` with `child`. - /// `parent` is mutated in place, becoming the range before `child`. - /// Returns the range (of the same type as `parent`) *after* `child`. - fn intersect(parent: &mut HighlightedRange, child: &HighlightedRange) -> HighlightedRange { - assert!(parent.range.contains_range(child.range)); - - let mut cloned = parent.clone(); - parent.range = TextRange::new(parent.range.start(), child.range.start()); - cloned.range = TextRange::new(child.range.end(), cloned.range.end()); - - cloned - } - - /// Remove the `HighlightRange` of `parent` that's currently covered by `child`. - fn intersect_partial(parent: &mut HighlightedRange, child: &HighlightedRange) { - assert!( - parent.range.start() <= child.range.start() - && parent.range.end() >= child.range.start() - && child.range.end() > parent.range.end() - ); - - parent.range = TextRange::new(parent.range.start(), child.range.start()); - } - - /// Similar to `pop`, but can modify arbitrary prior ranges (where `pop`) - /// can only modify the last range currently on the stack. - /// Can be used to do injections that span multiple ranges, like the - /// doctest injection below. - /// If `overwrite_parent` is non-optional, the highlighting of the parent range - /// is overwritten with the argument. - /// - /// Note that `pop` can be simulated by `pop_and_inject(false)` but the - /// latter is computationally more expensive. - fn pop_and_inject(&mut self, overwrite_parent: Option) { - let mut children = self.stack.pop().unwrap(); - let prev = self.stack.last_mut().unwrap(); - children.sort_by_key(|range| range.range.start()); - prev.sort_by_key(|range| range.range.start()); - - for child in children { - if let Some(idx) = - prev.iter().position(|parent| parent.range.contains_range(child.range)) - { - if let Some(tag) = overwrite_parent { - prev[idx].highlight = tag; - } - - let cloned = Self::intersect(&mut prev[idx], &child); - let insert_idx = if prev[idx].range.is_empty() { - prev.remove(idx); - idx - } else { - idx + 1 - }; - prev.insert(insert_idx, child); - if !cloned.range.is_empty() { - prev.insert(insert_idx + 1, cloned); - } - } else { - let maybe_idx = - prev.iter().position(|parent| parent.range.contains(child.range.start())); - match (overwrite_parent, maybe_idx) { - (Some(_), Some(idx)) => { - Self::intersect_partial(&mut prev[idx], &child); - let insert_idx = if prev[idx].range.is_empty() { - prev.remove(idx); - idx - } else { - idx + 1 - }; - prev.insert(insert_idx, child); - } - (_, None) => { - let idx = prev - .binary_search_by_key(&child.range.start(), |range| range.range.start()) - .unwrap_or_else(|x| x); - prev.insert(idx, child); - } - _ => { - unreachable!("child range should be completely contained in parent range"); - } - } - } - } - } - - fn add(&mut self, range: HighlightedRange) { - self.stack - .last_mut() - .expect("during DFS traversal, the stack must not be empty") - .push(range) - } - - fn flattened(mut self) -> Vec { - assert_eq!( - self.stack.len(), - 1, - "after DFS traversal, the stack should only contain a single element" - ); - let mut res = self.stack.pop().unwrap(); - res.sort_by_key(|range| range.range.start()); - // Check that ranges are sorted and disjoint - assert!(res - .iter() - .zip(res.iter().skip(1)) - .all(|(left, right)| left.range.end() <= right.range.start())); - res - } -} - -fn highlight_format_specifier(kind: FormatSpecifier) -> Option { - Some(match kind { - FormatSpecifier::Open - | FormatSpecifier::Close - | FormatSpecifier::Colon - | FormatSpecifier::Fill - | FormatSpecifier::Align - | FormatSpecifier::Sign - | FormatSpecifier::NumberSign - | FormatSpecifier::DollarSign - | FormatSpecifier::Dot - | FormatSpecifier::Asterisk - | FormatSpecifier::QuestionMark => HighlightTag::FormatSpecifier, - FormatSpecifier::Integer | FormatSpecifier::Zero => HighlightTag::NumericLiteral, - FormatSpecifier::Identifier => HighlightTag::Local, - }) -} - -fn macro_call_range(macro_call: &ast::MacroCall) -> Option { - let path = macro_call.path()?; - let name_ref = path.segment()?.name_ref()?; - - let range_start = name_ref.syntax().text_range().start(); - let mut range_end = name_ref.syntax().text_range().end(); - for sibling in path.syntax().siblings_with_tokens(Direction::Next) { - match sibling.kind() { - T![!] | IDENT => range_end = sibling.text_range().end(), - _ => (), - } - } - - Some(TextRange::new(range_start, range_end)) -} - -fn is_possibly_unsafe(name_ref: &ast::NameRef) -> bool { - name_ref - .syntax() - .parent() - .and_then(|parent| { - ast::FieldExpr::cast(parent.clone()) - .map(|_| true) - .or_else(|| ast::RecordPatField::cast(parent).map(|_| true)) - }) - .unwrap_or(false) -} - -fn highlight_element( - sema: &Semantics, - bindings_shadow_count: &mut FxHashMap, - syntactic_name_ref_highlighting: bool, - element: SyntaxElement, -) -> Option<(Highlight, Option)> { - let db = sema.db; - let mut binding_hash = None; - let highlight: Highlight = match element.kind() { - FN => { - bindings_shadow_count.clear(); - return None; - } - - // Highlight definitions depending on the "type" of the definition. - NAME => { - let name = element.into_node().and_then(ast::Name::cast).unwrap(); - let name_kind = classify_name(sema, &name); - - if let Some(NameClass::Definition(Definition::Local(local))) = &name_kind { - if let Some(name) = local.name(db) { - let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); - *shadow_count += 1; - binding_hash = Some(calc_binding_hash(&name, *shadow_count)) - } - }; - - match name_kind { - Some(NameClass::Definition(def)) => { - highlight_name(db, def, false) | HighlightModifier::Definition - } - Some(NameClass::ConstReference(def)) => highlight_name(db, def, false), - Some(NameClass::FieldShorthand { field, .. }) => { - let mut h = HighlightTag::Field.into(); - if let Definition::Field(field) = field { - if let VariantDef::Union(_) = field.parent_def(db) { - h |= HighlightModifier::Unsafe; - } - } - - h - } - None => highlight_name_by_syntax(name) | HighlightModifier::Definition, - } - } - - // Highlight references like the definitions they resolve to - NAME_REF if element.ancestors().any(|it| it.kind() == ATTR) => { - Highlight::from(HighlightTag::Function) | HighlightModifier::Attribute - } - NAME_REF => { - let name_ref = element.into_node().and_then(ast::NameRef::cast).unwrap(); - let possibly_unsafe = is_possibly_unsafe(&name_ref); - match classify_name_ref(sema, &name_ref) { - Some(name_kind) => match name_kind { - NameRefClass::Definition(def) => { - if let Definition::Local(local) = &def { - if let Some(name) = local.name(db) { - let shadow_count = - bindings_shadow_count.entry(name.clone()).or_default(); - binding_hash = Some(calc_binding_hash(&name, *shadow_count)) - } - }; - highlight_name(db, def, possibly_unsafe) - } - NameRefClass::FieldShorthand { .. } => HighlightTag::Field.into(), - }, - None if syntactic_name_ref_highlighting => { - highlight_name_ref_by_syntax(name_ref, sema) - } - None => HighlightTag::UnresolvedReference.into(), - } - } - - // Simple token-based highlighting - COMMENT => { - let comment = element.into_token().and_then(ast::Comment::cast)?; - let h = HighlightTag::Comment; - match comment.kind().doc { - Some(_) => h | HighlightModifier::Documentation, - None => h.into(), - } - } - STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => HighlightTag::StringLiteral.into(), - ATTR => HighlightTag::Attribute.into(), - INT_NUMBER | FLOAT_NUMBER => HighlightTag::NumericLiteral.into(), - BYTE => HighlightTag::ByteLiteral.into(), - CHAR => HighlightTag::CharLiteral.into(), - QUESTION => Highlight::new(HighlightTag::Operator) | HighlightModifier::ControlFlow, - LIFETIME => { - let h = Highlight::new(HighlightTag::Lifetime); - match element.parent().map(|it| it.kind()) { - Some(LIFETIME_PARAM) | Some(LABEL) => h | HighlightModifier::Definition, - _ => h, - } - } - p if p.is_punct() => match p { - T![::] | T![->] | T![=>] | T![&] | T![..] | T![=] | T![@] => { - HighlightTag::Operator.into() - } - T![!] if element.parent().and_then(ast::MacroCall::cast).is_some() => { - HighlightTag::Macro.into() - } - T![*] if element.parent().and_then(ast::PtrType::cast).is_some() => { - HighlightTag::Keyword.into() - } - T![*] if element.parent().and_then(ast::PrefixExpr::cast).is_some() => { - let prefix_expr = element.parent().and_then(ast::PrefixExpr::cast)?; - - let expr = prefix_expr.expr()?; - let ty = sema.type_of_expr(&expr)?; - if ty.is_raw_ptr() { - HighlightTag::Operator | HighlightModifier::Unsafe - } else if let Some(ast::PrefixOp::Deref) = prefix_expr.op_kind() { - HighlightTag::Operator.into() - } else { - HighlightTag::Punctuation.into() - } - } - T![-] if element.parent().and_then(ast::PrefixExpr::cast).is_some() => { - HighlightTag::NumericLiteral.into() - } - _ if element.parent().and_then(ast::PrefixExpr::cast).is_some() => { - HighlightTag::Operator.into() - } - _ if element.parent().and_then(ast::BinExpr::cast).is_some() => { - HighlightTag::Operator.into() - } - _ if element.parent().and_then(ast::RangeExpr::cast).is_some() => { - HighlightTag::Operator.into() - } - _ if element.parent().and_then(ast::RangePat::cast).is_some() => { - HighlightTag::Operator.into() - } - _ if element.parent().and_then(ast::RestPat::cast).is_some() => { - HighlightTag::Operator.into() - } - _ if element.parent().and_then(ast::Attr::cast).is_some() => { - HighlightTag::Attribute.into() - } - _ => HighlightTag::Punctuation.into(), - }, - - k if k.is_keyword() => { - let h = Highlight::new(HighlightTag::Keyword); - match k { - T![break] - | T![continue] - | T![else] - | T![if] - | T![loop] - | T![match] - | T![return] - | T![while] - | T![in] => h | HighlightModifier::ControlFlow, - T![for] if !is_child_of_impl(&element) => h | HighlightModifier::ControlFlow, - T![unsafe] => h | HighlightModifier::Unsafe, - T![true] | T![false] => HighlightTag::BoolLiteral.into(), - T![self] => { - let self_param_is_mut = element - .parent() - .and_then(ast::SelfParam::cast) - .and_then(|p| p.mut_token()) - .is_some(); - // closure to enforce lazyness - let self_path = || { - sema.resolve_path(&element.parent()?.parent().and_then(ast::Path::cast)?) - }; - if self_param_is_mut - || matches!(self_path(), - Some(hir::PathResolution::Local(local)) - if local.is_self(db) - && (local.is_mut(db) || local.ty(db).is_mutable_reference()) - ) - { - HighlightTag::SelfKeyword | HighlightModifier::Mutable - } else { - HighlightTag::SelfKeyword.into() - } - } - _ => h, - } - } - - _ => return None, - }; - - return Some((highlight, binding_hash)); - - fn calc_binding_hash(name: &Name, shadow_count: u32) -> u64 { - fn hash(x: T) -> u64 { - use std::{collections::hash_map::DefaultHasher, hash::Hasher}; - - let mut hasher = DefaultHasher::new(); - x.hash(&mut hasher); - hasher.finish() - } - - hash((name, shadow_count)) - } -} - -fn is_child_of_impl(element: &SyntaxElement) -> bool { - match element.parent() { - Some(e) => e.kind() == IMPL, - _ => false, - } -} - -fn highlight_name(db: &RootDatabase, def: Definition, possibly_unsafe: bool) -> Highlight { - match def { - Definition::Macro(_) => HighlightTag::Macro, - Definition::Field(field) => { - let mut h = HighlightTag::Field.into(); - if possibly_unsafe { - if let VariantDef::Union(_) = field.parent_def(db) { - h |= HighlightModifier::Unsafe; - } - } - - return h; - } - Definition::ModuleDef(def) => match def { - hir::ModuleDef::Module(_) => HighlightTag::Module, - hir::ModuleDef::Function(func) => { - let mut h = HighlightTag::Function.into(); - if func.is_unsafe(db) { - h |= HighlightModifier::Unsafe; - } - return h; - } - hir::ModuleDef::Adt(hir::Adt::Struct(_)) => HighlightTag::Struct, - hir::ModuleDef::Adt(hir::Adt::Enum(_)) => HighlightTag::Enum, - hir::ModuleDef::Adt(hir::Adt::Union(_)) => HighlightTag::Union, - hir::ModuleDef::EnumVariant(_) => HighlightTag::EnumVariant, - hir::ModuleDef::Const(_) => HighlightTag::Constant, - hir::ModuleDef::Trait(_) => HighlightTag::Trait, - hir::ModuleDef::TypeAlias(_) => HighlightTag::TypeAlias, - hir::ModuleDef::BuiltinType(_) => HighlightTag::BuiltinType, - hir::ModuleDef::Static(s) => { - let mut h = Highlight::new(HighlightTag::Static); - if s.is_mut(db) { - h |= HighlightModifier::Mutable; - h |= HighlightModifier::Unsafe; - } - return h; - } - }, - Definition::SelfType(_) => HighlightTag::SelfType, - Definition::TypeParam(_) => HighlightTag::TypeParam, - Definition::Local(local) => { - let tag = - if local.is_param(db) { HighlightTag::ValueParam } else { HighlightTag::Local }; - let mut h = Highlight::new(tag); - if local.is_mut(db) || local.ty(db).is_mutable_reference() { - h |= HighlightModifier::Mutable; - } - return h; - } - } - .into() -} - -fn highlight_name_by_syntax(name: ast::Name) -> Highlight { - let default = HighlightTag::UnresolvedReference; - - let parent = match name.syntax().parent() { - Some(it) => it, - _ => return default.into(), - }; - - let tag = match parent.kind() { - STRUCT => HighlightTag::Struct, - ENUM => HighlightTag::Enum, - UNION => HighlightTag::Union, - TRAIT => HighlightTag::Trait, - TYPE_ALIAS => HighlightTag::TypeAlias, - TYPE_PARAM => HighlightTag::TypeParam, - RECORD_FIELD => HighlightTag::Field, - MODULE => HighlightTag::Module, - FN => HighlightTag::Function, - CONST => HighlightTag::Constant, - STATIC => HighlightTag::Static, - VARIANT => HighlightTag::EnumVariant, - IDENT_PAT => HighlightTag::Local, - _ => default, - }; - - tag.into() -} - -fn highlight_name_ref_by_syntax(name: ast::NameRef, sema: &Semantics) -> Highlight { - let default = HighlightTag::UnresolvedReference; - - let parent = match name.syntax().parent() { - Some(it) => it, - _ => return default.into(), - }; - - let tag = match parent.kind() { - METHOD_CALL_EXPR => HighlightTag::Function, - FIELD_EXPR => { - let h = HighlightTag::Field; - let is_union = ast::FieldExpr::cast(parent) - .and_then(|field_expr| { - let field = sema.resolve_field(&field_expr)?; - Some(if let VariantDef::Union(_) = field.parent_def(sema.db) { - true - } else { - false - }) - }) - .unwrap_or(false); - return if is_union { h | HighlightModifier::Unsafe } else { h.into() }; - } - PATH_SEGMENT => { - let path = match parent.parent().and_then(ast::Path::cast) { - Some(it) => it, - _ => return default.into(), - }; - let expr = match path.syntax().parent().and_then(ast::PathExpr::cast) { - Some(it) => it, - _ => { - // within path, decide whether it is module or adt by checking for uppercase name - return if name.text().chars().next().unwrap_or_default().is_uppercase() { - HighlightTag::Struct - } else { - HighlightTag::Module - } - .into(); - } - }; - let parent = match expr.syntax().parent() { - Some(it) => it, - None => return default.into(), - }; - - match parent.kind() { - CALL_EXPR => HighlightTag::Function, - _ => { - if name.text().chars().next().unwrap_or_default().is_uppercase() { - HighlightTag::Struct - } else { - HighlightTag::Constant - } - } - } - } - _ => default, - }; - - tag.into() -} diff --git a/crates/ra_ide/src/syntax_highlighting/html.rs b/crates/ra_ide/src/syntax_highlighting/html.rs deleted file mode 100644 index a5e7d28676..0000000000 --- a/crates/ra_ide/src/syntax_highlighting/html.rs +++ /dev/null @@ -1,97 +0,0 @@ -//! Renders a bit of code as HTML. - -use oorandom::Rand32; -use ra_db::SourceDatabase; -use ra_syntax::{AstNode, TextRange, TextSize}; - -use crate::{syntax_highlighting::highlight, FileId, RootDatabase}; - -pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { - let parse = db.parse(file_id); - - fn rainbowify(seed: u64) -> String { - let mut rng = Rand32::new(seed); - format!( - "hsl({h},{s}%,{l}%)", - h = rng.rand_range(0..361), - s = rng.rand_range(42..99), - l = rng.rand_range(40..91), - ) - } - - let ranges = highlight(db, file_id, None, false); - let text = parse.tree().syntax().to_string(); - let mut prev_pos = TextSize::from(0); - let mut buf = String::new(); - buf.push_str(&STYLE); - buf.push_str("
");
-    for range in &ranges {
-        if range.range.start() > prev_pos {
-            let curr = &text[TextRange::new(prev_pos, range.range.start())];
-            let text = html_escape(curr);
-            buf.push_str(&text);
-        }
-        let curr = &text[TextRange::new(range.range.start(), range.range.end())];
-
-        let class = range.highlight.to_string().replace('.', " ");
-        let color = match (rainbow, range.binding_hash) {
-            (true, Some(hash)) => {
-                format!(" data-binding-hash=\"{}\" style=\"color: {};\"", hash, rainbowify(hash))
-            }
-            _ => "".into(),
-        };
-        buf.push_str(&format!("{}", class, color, html_escape(curr)));
-
-        prev_pos = range.range.end();
-    }
-    // Add the remaining (non-highlighted) text
-    let curr = &text[TextRange::new(prev_pos, TextSize::of(&text))];
-    let text = html_escape(curr);
-    buf.push_str(&text);
-    buf.push_str("
"); - buf -} - -//FIXME: like, real html escaping -fn html_escape(text: &str) -> String { - text.replace("<", "<").replace(">", ">") -} - -const STYLE: &str = " - -"; diff --git a/crates/ra_ide/src/syntax_highlighting/injection.rs b/crates/ra_ide/src/syntax_highlighting/injection.rs deleted file mode 100644 index 8665b480fd..0000000000 --- a/crates/ra_ide/src/syntax_highlighting/injection.rs +++ /dev/null @@ -1,188 +0,0 @@ -//! Syntax highlighting injections such as highlighting of documentation tests. - -use std::{collections::BTreeMap, convert::TryFrom}; - -use ast::{HasQuotes, HasStringValue}; -use hir::Semantics; -use ra_syntax::{ast, AstToken, SyntaxNode, SyntaxToken, TextRange, TextSize}; -use stdx::SepBy; - -use crate::{ - call_info::ActiveParameter, Analysis, Highlight, HighlightModifier, HighlightTag, - HighlightedRange, RootDatabase, -}; - -use super::HighlightedRangeStack; - -pub(super) fn highlight_injection( - acc: &mut HighlightedRangeStack, - sema: &Semantics, - literal: ast::RawString, - expanded: SyntaxToken, -) -> Option<()> { - let active_parameter = ActiveParameter::at_token(&sema, expanded)?; - if !active_parameter.name.starts_with("ra_fixture") { - return None; - } - let value = literal.value()?; - let (analysis, tmp_file_id) = Analysis::from_single_file(value.into_owned()); - - if let Some(range) = literal.open_quote_text_range() { - acc.add(HighlightedRange { - range, - highlight: HighlightTag::StringLiteral.into(), - binding_hash: None, - }) - } - - for mut h in analysis.highlight(tmp_file_id).unwrap() { - if let Some(r) = literal.map_range_up(h.range) { - h.range = r; - acc.add(h) - } - } - - if let Some(range) = literal.close_quote_text_range() { - acc.add(HighlightedRange { - range, - highlight: HighlightTag::StringLiteral.into(), - binding_hash: None, - }) - } - - Some(()) -} - -/// Mapping from extracted documentation code to original code -type RangesMap = BTreeMap; - -const RUSTDOC_FENCE: &'static str = "```"; -const RUSTDOC_FENCE_TOKENS: &[&'static str] = - &["", "rust", "should_panic", "ignore", "no_run", "compile_fail", "edition2015", "edition2018"]; - -/// Extracts Rust code from documentation comments as well as a mapping from -/// the extracted source code back to the original source ranges. -/// Lastly, a vector of new comment highlight ranges (spanning only the -/// comment prefix) is returned which is used in the syntax highlighting -/// injection to replace the previous (line-spanning) comment ranges. -pub(super) fn extract_doc_comments( - node: &SyntaxNode, -) -> Option<(String, RangesMap, Vec)> { - // wrap the doctest into function body to get correct syntax highlighting - let prefix = "fn doctest() {\n"; - let suffix = "}\n"; - // Mapping from extracted documentation code to original code - let mut range_mapping: RangesMap = BTreeMap::new(); - let mut line_start = TextSize::try_from(prefix.len()).unwrap(); - let mut is_codeblock = false; - let mut is_doctest = false; - // Replace the original, line-spanning comment ranges by new, only comment-prefix - // spanning comment ranges. - let mut new_comments = Vec::new(); - let doctest = node - .children_with_tokens() - .filter_map(|el| el.into_token().and_then(ast::Comment::cast)) - .filter(|comment| comment.kind().doc.is_some()) - .filter(|comment| { - if let Some(idx) = comment.text().find(RUSTDOC_FENCE) { - is_codeblock = !is_codeblock; - // Check whether code is rust by inspecting fence guards - let guards = &comment.text()[idx + RUSTDOC_FENCE.len()..]; - let is_rust = - guards.split(',').all(|sub| RUSTDOC_FENCE_TOKENS.contains(&sub.trim())); - is_doctest = is_codeblock && is_rust; - false - } else { - is_doctest - } - }) - .map(|comment| { - let prefix_len = comment.prefix().len(); - let line: &str = comment.text().as_str(); - let range = comment.syntax().text_range(); - - // whitespace after comment is ignored - let pos = if let Some(ws) = line.chars().nth(prefix_len).filter(|c| c.is_whitespace()) { - prefix_len + ws.len_utf8() - } else { - prefix_len - }; - - // lines marked with `#` should be ignored in output, we skip the `#` char - let pos = if let Some(ws) = line.chars().nth(pos).filter(|&c| c == '#') { - pos + ws.len_utf8() - } else { - pos - }; - - range_mapping.insert(line_start, range.start() + TextSize::try_from(pos).unwrap()); - new_comments.push(HighlightedRange { - range: TextRange::new( - range.start(), - range.start() + TextSize::try_from(pos).unwrap(), - ), - highlight: HighlightTag::Comment | HighlightModifier::Documentation, - binding_hash: None, - }); - line_start += range.len() - TextSize::try_from(pos).unwrap(); - line_start += TextSize::try_from('\n'.len_utf8()).unwrap(); - - line[pos..].to_owned() - }) - .sep_by("\n") - .to_string(); - - if doctest.is_empty() { - return None; - } - - let doctest = format!("{}{}{}", prefix, doctest, suffix); - Some((doctest, range_mapping, new_comments)) -} - -/// Injection of syntax highlighting of doctests. -pub(super) fn highlight_doc_comment( - text: String, - range_mapping: RangesMap, - new_comments: Vec, - stack: &mut HighlightedRangeStack, -) { - let (analysis, tmp_file_id) = Analysis::from_single_file(text); - - stack.push(); - for mut h in analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)).unwrap() { - // Determine start offset and end offset in case of multi-line ranges - let mut start_offset = None; - let mut end_offset = None; - for (line_start, orig_line_start) in range_mapping.range(..h.range.end()).rev() { - // It's possible for orig_line_start - line_start to be negative. Add h.range.start() - // here and remove it from the end range after the loop below so that the values are - // always non-negative. - let offset = h.range.start() + orig_line_start - line_start; - if line_start <= &h.range.start() { - start_offset.get_or_insert(offset); - break; - } else { - end_offset.get_or_insert(offset); - } - } - if let Some(start_offset) = start_offset { - h.range = TextRange::new( - start_offset, - h.range.end() + end_offset.unwrap_or(start_offset) - h.range.start(), - ); - - h.highlight |= HighlightModifier::Injected; - stack.add(h); - } - } - - // Inject the comment prefix highlight ranges - stack.push(); - for comment in new_comments { - stack.add(comment); - } - stack.pop_and_inject(None); - stack - .pop_and_inject(Some(Highlight::from(HighlightTag::Generic) | HighlightModifier::Injected)); -} diff --git a/crates/ra_ide/src/syntax_highlighting/tests.rs b/crates/ra_ide/src/syntax_highlighting/tests.rs deleted file mode 100644 index 730efff0de..0000000000 --- a/crates/ra_ide/src/syntax_highlighting/tests.rs +++ /dev/null @@ -1,401 +0,0 @@ -use std::fs; - -use expect::{expect_file, ExpectFile}; -use test_utils::project_dir; - -use crate::{mock_analysis::single_file, FileRange, TextRange}; - -#[test] -fn test_highlighting() { - check_highlighting( - r#" -use inner::{self as inner_mod}; -mod inner {} - -#[derive(Clone, Debug)] -struct Foo { - pub x: i32, - pub y: i32, -} - -trait Bar { - fn bar(&self) -> i32; -} - -impl Bar for Foo { - fn bar(&self) -> i32 { - self.x - } -} - -impl Foo { - fn baz(mut self) -> i32 { - self.x - } - - fn qux(&mut self) { - self.x = 0; - } -} - -static mut STATIC_MUT: i32 = 0; - -fn foo<'a, T>() -> T { - foo::<'a, i32>() -} - -macro_rules! def_fn { - ($($tt:tt)*) => {$($tt)*} -} - -def_fn! { - fn bar() -> u32 { - 100 - } -} - -macro_rules! noop { - ($expr:expr) => { - $expr - } -} - -// comment -fn main() { - println!("Hello, {}!", 92); - - let mut vec = Vec::new(); - if true { - let x = 92; - vec.push(Foo { x, y: 1 }); - } - unsafe { - vec.set_len(0); - STATIC_MUT = 1; - } - - for e in vec { - // Do nothing - } - - noop!(noop!(1)); - - let mut x = 42; - let y = &mut x; - let z = &y; - - let Foo { x: z, y } = Foo { x: z, y }; - - y; -} - -enum Option { - Some(T), - None, -} -use Option::*; - -impl Option { - fn and(self, other: Option) -> Option<(T, U)> { - match other { - None => unimplemented!(), - Nope => Nope, - } - } -} -"# - .trim(), - expect_file!["crates/ra_ide/test_data/highlighting.html"], - false, - ); -} - -#[test] -fn test_rainbow_highlighting() { - check_highlighting( - r#" -fn main() { - let hello = "hello"; - let x = hello.to_string(); - let y = hello.to_string(); - - let x = "other color please!"; - let y = x.to_string(); -} - -fn bar() { - let mut hello = "hello"; -} -"# - .trim(), - expect_file!["crates/ra_ide/test_data/rainbow_highlighting.html"], - true, - ); -} - -#[test] -fn accidentally_quadratic() { - let file = project_dir().join("crates/ra_syntax/test_data/accidentally_quadratic"); - let src = fs::read_to_string(file).unwrap(); - - let (analysis, file_id) = single_file(&src); - - // let t = std::time::Instant::now(); - let _ = analysis.highlight(file_id).unwrap(); - // eprintln!("elapsed: {:?}", t.elapsed()); -} - -#[test] -fn test_ranges() { - let (analysis, file_id) = single_file( - r#" -#[derive(Clone, Debug)] -struct Foo { - pub x: i32, - pub y: i32, -} -"#, - ); - - // The "x" - let highlights = &analysis - .highlight_range(FileRange { file_id, range: TextRange::at(45.into(), 1.into()) }) - .unwrap(); - - assert_eq!(&highlights[0].highlight.to_string(), "field.declaration"); -} - -#[test] -fn test_flattening() { - check_highlighting( - r##" -fn fixture(ra_fixture: &str) {} - -fn main() { - fixture(r#" - trait Foo { - fn foo() { - println!("2 + 2 = {}", 4); - } - }"# - ); -}"## - .trim(), - expect_file!["crates/ra_ide/test_data/highlight_injection.html"], - false, - ); -} - -#[test] -fn ranges_sorted() { - let (analysis, file_id) = single_file( - r#" -#[foo(bar = "bar")] -macro_rules! test {} -}"# - .trim(), - ); - let _ = analysis.highlight(file_id).unwrap(); -} - -#[test] -fn test_string_highlighting() { - // The format string detection is based on macro-expansion, - // thus, we have to copy the macro definition from `std` - check_highlighting( - r#" -macro_rules! println { - ($($arg:tt)*) => ({ - $crate::io::_print($crate::format_args_nl!($($arg)*)); - }) -} -#[rustc_builtin_macro] -macro_rules! format_args_nl { - ($fmt:expr) => {{ /* compiler built-in */ }}; - ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }}; -} - -fn main() { - // from https://doc.rust-lang.org/std/fmt/index.html - println!("Hello"); // => "Hello" - println!("Hello, {}!", "world"); // => "Hello, world!" - println!("The number is {}", 1); // => "The number is 1" - println!("{:?}", (3, 4)); // => "(3, 4)" - println!("{value}", value=4); // => "4" - println!("{} {}", 1, 2); // => "1 2" - println!("{:04}", 42); // => "0042" with leading zerosV - println!("{1} {} {0} {}", 1, 2); // => "2 1 1 2" - println!("{argument}", argument = "test"); // => "test" - println!("{name} {}", 1, name = 2); // => "2 1" - println!("{a} {c} {b}", a="a", b='b', c=3); // => "a 3 b" - println!("{{{}}}", 2); // => "{2}" - println!("Hello {:5}!", "x"); - println!("Hello {:1$}!", "x", 5); - println!("Hello {1:0$}!", 5, "x"); - println!("Hello {:width$}!", "x", width = 5); - println!("Hello {:<5}!", "x"); - println!("Hello {:-<5}!", "x"); - println!("Hello {:^5}!", "x"); - println!("Hello {:>5}!", "x"); - println!("Hello {:+}!", 5); - println!("{:#x}!", 27); - println!("Hello {:05}!", 5); - println!("Hello {:05}!", -5); - println!("{:#010x}!", 27); - println!("Hello {0} is {1:.5}", "x", 0.01); - println!("Hello {1} is {2:.0$}", 5, "x", 0.01); - println!("Hello {0} is {2:.1$}", "x", 5, 0.01); - println!("Hello {} is {:.*}", "x", 5, 0.01); - println!("Hello {} is {2:.*}", "x", 5, 0.01); - println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01); - println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56); - println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56"); - println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56"); - println!("Hello {{}}"); - println!("{{ Hello"); - - println!(r"Hello, {}!", "world"); - - // escape sequences - println!("Hello\nWorld"); - println!("\u{48}\x65\x6C\x6C\x6F World"); - - println!("{\x41}", A = 92); - println!("{ничоси}", ничоси = 92); -}"# - .trim(), - expect_file!["crates/ra_ide/test_data/highlight_strings.html"], - false, - ); -} - -#[test] -fn test_unsafe_highlighting() { - check_highlighting( - r#" -unsafe fn unsafe_fn() {} - -union Union { - a: u32, - b: f32, -} - -struct HasUnsafeFn; - -impl HasUnsafeFn { - unsafe fn unsafe_method(&self) {} -} - -struct TypeForStaticMut { - a: u8 -} - -static mut global_mut: TypeForStaticMut = TypeForStaticMut { a: 0 }; - -fn main() { - let x = &5 as *const usize; - let u = Union { b: 0 }; - unsafe { - unsafe_fn(); - let b = u.b; - match u { - Union { b: 0 } => (), - Union { a } => (), - } - HasUnsafeFn.unsafe_method(); - let y = *(x); - let z = -x; - let a = global_mut.a; - } -} -"# - .trim(), - expect_file!["crates/ra_ide/test_data/highlight_unsafe.html"], - false, - ); -} - -#[test] -fn test_highlight_doctest() { - check_highlighting( - r#" -/// ``` -/// let _ = "early doctests should not go boom"; -/// ``` -struct Foo { - bar: bool, -} - -impl Foo { - pub const bar: bool = true; - - /// Constructs a new `Foo`. - /// - /// # Examples - /// - /// ``` - /// # #![allow(unused_mut)] - /// let mut foo: Foo = Foo::new(); - /// ``` - pub const fn new() -> Foo { - Foo { bar: true } - } - - /// `bar` method on `Foo`. - /// - /// # Examples - /// - /// ``` - /// use x::y; - /// - /// let foo = Foo::new(); - /// - /// // calls bar on foo - /// assert!(foo.bar()); - /// - /// let bar = foo.bar || Foo::bar; - /// - /// /* multi-line - /// comment */ - /// - /// let multi_line_string = "Foo - /// bar - /// "; - /// - /// ``` - /// - /// ```rust,no_run - /// let foobar = Foo::new().bar(); - /// ``` - /// - /// ```sh - /// echo 1 - /// ``` - pub fn foo(&self) -> bool { - true - } -} - -/// ``` -/// noop!(1); -/// ``` -macro_rules! noop { - ($expr:expr) => { - $expr - } -} -"# - .trim(), - expect_file!["crates/ra_ide/test_data/highlight_doctest.html"], - false, - ); -} - -/// Highlights the code given by the `ra_fixture` argument, renders the -/// result as HTML, and compares it with the HTML file given as `snapshot`. -/// Note that the `snapshot` file is overwritten by the rendered HTML. -fn check_highlighting(ra_fixture: &str, expect: ExpectFile, rainbow: bool) { - let (analysis, file_id) = single_file(ra_fixture); - let actual_html = &analysis.highlight_as_html(file_id, rainbow).unwrap(); - expect.assert_eq(actual_html) -} diff --git a/crates/ra_ide/src/syntax_tree.rs b/crates/ra_ide/src/syntax_tree.rs deleted file mode 100644 index 07217e8087..0000000000 --- a/crates/ra_ide/src/syntax_tree.rs +++ /dev/null @@ -1,359 +0,0 @@ -use ra_db::{FileId, SourceDatabase}; -use ra_ide_db::RootDatabase; -use ra_syntax::{ - algo, AstNode, NodeOrToken, SourceFile, - SyntaxKind::{RAW_STRING, STRING}, - SyntaxToken, TextRange, TextSize, -}; - -// Feature: Show Syntax Tree -// -// Shows the parse tree of the current file. It exists mostly for debugging -// rust-analyzer itself. -// -// |=== -// | Editor | Action Name -// -// | VS Code | **Rust Analyzer: Show Syntax Tree** -// |=== -pub(crate) fn syntax_tree( - db: &RootDatabase, - file_id: FileId, - text_range: Option, -) -> String { - let parse = db.parse(file_id); - if let Some(text_range) = text_range { - let node = match algo::find_covering_element(parse.tree().syntax(), text_range) { - NodeOrToken::Node(node) => node, - NodeOrToken::Token(token) => { - if let Some(tree) = syntax_tree_for_string(&token, text_range) { - return tree; - } - token.parent() - } - }; - - format!("{:#?}", node) - } else { - format!("{:#?}", parse.tree().syntax()) - } -} - -/// Attempts parsing the selected contents of a string literal -/// as rust syntax and returns its syntax tree -fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option { - // When the range is inside a string - // we'll attempt parsing it as rust syntax - // to provide the syntax tree of the contents of the string - match token.kind() { - STRING | RAW_STRING => syntax_tree_for_token(token, text_range), - _ => None, - } -} - -fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option { - // Range of the full node - let node_range = node.text_range(); - let text = node.text().to_string(); - - // We start at some point inside the node - // Either we have selected the whole string - // or our selection is inside it - let start = text_range.start() - node_range.start(); - - // how many characters we have selected - let len = text_range.len(); - - let node_len = node_range.len(); - - let start = start; - - // We want to cap our length - let len = len.min(node_len); - - // Ensure our slice is inside the actual string - let end = - if start + len < TextSize::of(&text) { start + len } else { TextSize::of(&text) - start }; - - let text = &text[TextRange::new(start, end)]; - - // Remove possible extra string quotes from the start - // and the end of the string - let text = text - .trim_start_matches('r') - .trim_start_matches('#') - .trim_start_matches('"') - .trim_end_matches('#') - .trim_end_matches('"') - .trim() - // Remove custom markers - .replace("<|>", ""); - - let parsed = SourceFile::parse(&text); - - // If the "file" parsed without errors, - // return its syntax - if parsed.errors().is_empty() { - return Some(format!("{:#?}", parsed.tree().syntax())); - } - - None -} - -#[cfg(test)] -mod tests { - use test_utils::assert_eq_text; - - use crate::mock_analysis::{analysis_and_range, single_file}; - - #[test] - fn test_syntax_tree_without_range() { - // Basic syntax - let (analysis, file_id) = single_file(r#"fn foo() {}"#); - let syn = analysis.syntax_tree(file_id, None).unwrap(); - - assert_eq_text!( - syn.trim(), - r#" -SOURCE_FILE@0..11 - FN@0..11 - FN_KW@0..2 "fn" - WHITESPACE@2..3 " " - NAME@3..6 - IDENT@3..6 "foo" - PARAM_LIST@6..8 - L_PAREN@6..7 "(" - R_PAREN@7..8 ")" - WHITESPACE@8..9 " " - BLOCK_EXPR@9..11 - L_CURLY@9..10 "{" - R_CURLY@10..11 "}" -"# - .trim() - ); - - let (analysis, file_id) = single_file( - r#" -fn test() { - assert!(" - fn foo() { - } - ", ""); -}"# - .trim(), - ); - let syn = analysis.syntax_tree(file_id, None).unwrap(); - - assert_eq_text!( - syn.trim(), - r#" -SOURCE_FILE@0..60 - FN@0..60 - FN_KW@0..2 "fn" - WHITESPACE@2..3 " " - NAME@3..7 - IDENT@3..7 "test" - PARAM_LIST@7..9 - L_PAREN@7..8 "(" - R_PAREN@8..9 ")" - WHITESPACE@9..10 " " - BLOCK_EXPR@10..60 - L_CURLY@10..11 "{" - WHITESPACE@11..16 "\n " - EXPR_STMT@16..58 - MACRO_CALL@16..57 - PATH@16..22 - PATH_SEGMENT@16..22 - NAME_REF@16..22 - IDENT@16..22 "assert" - BANG@22..23 "!" - TOKEN_TREE@23..57 - L_PAREN@23..24 "(" - STRING@24..52 "\"\n fn foo() {\n ..." - COMMA@52..53 "," - WHITESPACE@53..54 " " - STRING@54..56 "\"\"" - R_PAREN@56..57 ")" - SEMICOLON@57..58 ";" - WHITESPACE@58..59 "\n" - R_CURLY@59..60 "}" -"# - .trim() - ); - } - - #[test] - fn test_syntax_tree_with_range() { - let (analysis, range) = analysis_and_range(r#"<|>fn foo() {}<|>"#.trim()); - let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); - - assert_eq_text!( - syn.trim(), - r#" -FN@0..11 - FN_KW@0..2 "fn" - WHITESPACE@2..3 " " - NAME@3..6 - IDENT@3..6 "foo" - PARAM_LIST@6..8 - L_PAREN@6..7 "(" - R_PAREN@7..8 ")" - WHITESPACE@8..9 " " - BLOCK_EXPR@9..11 - L_CURLY@9..10 "{" - R_CURLY@10..11 "}" -"# - .trim() - ); - - let (analysis, range) = analysis_and_range( - r#"fn test() { - <|>assert!(" - fn foo() { - } - ", "");<|> -}"# - .trim(), - ); - let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); - - assert_eq_text!( - syn.trim(), - r#" -EXPR_STMT@16..58 - MACRO_CALL@16..57 - PATH@16..22 - PATH_SEGMENT@16..22 - NAME_REF@16..22 - IDENT@16..22 "assert" - BANG@22..23 "!" - TOKEN_TREE@23..57 - L_PAREN@23..24 "(" - STRING@24..52 "\"\n fn foo() {\n ..." - COMMA@52..53 "," - WHITESPACE@53..54 " " - STRING@54..56 "\"\"" - R_PAREN@56..57 ")" - SEMICOLON@57..58 ";" -"# - .trim() - ); - } - - #[test] - fn test_syntax_tree_inside_string() { - let (analysis, range) = analysis_and_range( - r#"fn test() { - assert!(" -<|>fn foo() { -}<|> -fn bar() { -} - ", ""); -}"# - .trim(), - ); - let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); - assert_eq_text!( - syn.trim(), - r#" -SOURCE_FILE@0..12 - FN@0..12 - FN_KW@0..2 "fn" - WHITESPACE@2..3 " " - NAME@3..6 - IDENT@3..6 "foo" - PARAM_LIST@6..8 - L_PAREN@6..7 "(" - R_PAREN@7..8 ")" - WHITESPACE@8..9 " " - BLOCK_EXPR@9..12 - L_CURLY@9..10 "{" - WHITESPACE@10..11 "\n" - R_CURLY@11..12 "}" -"# - .trim() - ); - - // With a raw string - let (analysis, range) = analysis_and_range( - r###"fn test() { - assert!(r#" -<|>fn foo() { -}<|> -fn bar() { -} - "#, ""); -}"### - .trim(), - ); - let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); - assert_eq_text!( - syn.trim(), - r#" -SOURCE_FILE@0..12 - FN@0..12 - FN_KW@0..2 "fn" - WHITESPACE@2..3 " " - NAME@3..6 - IDENT@3..6 "foo" - PARAM_LIST@6..8 - L_PAREN@6..7 "(" - R_PAREN@7..8 ")" - WHITESPACE@8..9 " " - BLOCK_EXPR@9..12 - L_CURLY@9..10 "{" - WHITESPACE@10..11 "\n" - R_CURLY@11..12 "}" -"# - .trim() - ); - - // With a raw string - let (analysis, range) = analysis_and_range( - r###"fn test() { - assert!(r<|>#" -fn foo() { -} -fn bar() { -}"<|>#, ""); -}"### - .trim(), - ); - let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); - assert_eq_text!( - syn.trim(), - r#" -SOURCE_FILE@0..25 - FN@0..12 - FN_KW@0..2 "fn" - WHITESPACE@2..3 " " - NAME@3..6 - IDENT@3..6 "foo" - PARAM_LIST@6..8 - L_PAREN@6..7 "(" - R_PAREN@7..8 ")" - WHITESPACE@8..9 " " - BLOCK_EXPR@9..12 - L_CURLY@9..10 "{" - WHITESPACE@10..11 "\n" - R_CURLY@11..12 "}" - WHITESPACE@12..13 "\n" - FN@13..25 - FN_KW@13..15 "fn" - WHITESPACE@15..16 " " - NAME@16..19 - IDENT@16..19 "bar" - PARAM_LIST@19..21 - L_PAREN@19..20 "(" - R_PAREN@20..21 ")" - WHITESPACE@21..22 " " - BLOCK_EXPR@22..25 - L_CURLY@22..23 "{" - WHITESPACE@23..24 "\n" - R_CURLY@24..25 "}" -"# - .trim() - ); - } -} diff --git a/crates/ra_ide/src/typing.rs b/crates/ra_ide/src/typing.rs deleted file mode 100644 index d3ce744b44..0000000000 --- a/crates/ra_ide/src/typing.rs +++ /dev/null @@ -1,365 +0,0 @@ -//! This module handles auto-magic editing actions applied together with users -//! edits. For example, if the user typed -//! -//! ```text -//! foo -//! .bar() -//! .baz() -//! | // <- cursor is here -//! ``` -//! -//! and types `.` next, we want to indent the dot. -//! -//! Language server executes such typing assists synchronously. That is, they -//! block user's typing and should be pretty fast for this reason! - -mod on_enter; - -use ra_db::{FilePosition, SourceDatabase}; -use ra_fmt::leading_indent; -use ra_ide_db::{source_change::SourceFileEdit, RootDatabase}; -use ra_syntax::{ - algo::find_node_at_offset, - ast::{self, AstToken}, - AstNode, SourceFile, - SyntaxKind::{FIELD_EXPR, METHOD_CALL_EXPR}, - TextRange, TextSize, -}; - -use ra_text_edit::TextEdit; - -use crate::SourceChange; - -pub(crate) use on_enter::on_enter; - -pub(crate) const TRIGGER_CHARS: &str = ".=>"; - -// Feature: On Typing Assists -// -// Some features trigger on typing certain characters: -// -// - typing `let =` tries to smartly add `;` if `=` is followed by an existing expression -// - typing `.` in a chain method call auto-indents -pub(crate) fn on_char_typed( - db: &RootDatabase, - position: FilePosition, - char_typed: char, -) -> Option { - assert!(TRIGGER_CHARS.contains(char_typed)); - let file = &db.parse(position.file_id).tree(); - assert_eq!(file.syntax().text().char_at(position.offset), Some(char_typed)); - let edit = on_char_typed_inner(file, position.offset, char_typed)?; - Some(SourceFileEdit { file_id: position.file_id, edit }.into()) -} - -fn on_char_typed_inner(file: &SourceFile, offset: TextSize, char_typed: char) -> Option { - assert!(TRIGGER_CHARS.contains(char_typed)); - match char_typed { - '.' => on_dot_typed(file, offset), - '=' => on_eq_typed(file, offset), - '>' => on_arrow_typed(file, offset), - _ => unreachable!(), - } -} - -/// Returns an edit which should be applied after `=` was typed. Primarily, -/// this works when adding `let =`. -// FIXME: use a snippet completion instead of this hack here. -fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option { - assert_eq!(file.syntax().text().char_at(offset), Some('=')); - let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), offset)?; - if let_stmt.semicolon_token().is_some() { - return None; - } - if let Some(expr) = let_stmt.initializer() { - let expr_range = expr.syntax().text_range(); - if expr_range.contains(offset) && offset != expr_range.start() { - return None; - } - if file.syntax().text().slice(offset..expr_range.start()).contains_char('\n') { - return None; - } - } else { - return None; - } - let offset = let_stmt.syntax().text_range().end(); - Some(TextEdit::insert(offset, ";".to_string())) -} - -/// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately. -fn on_dot_typed(file: &SourceFile, offset: TextSize) -> Option { - assert_eq!(file.syntax().text().char_at(offset), Some('.')); - let whitespace = - file.syntax().token_at_offset(offset).left_biased().and_then(ast::Whitespace::cast)?; - - let current_indent = { - let text = whitespace.text(); - let newline = text.rfind('\n')?; - &text[newline + 1..] - }; - let current_indent_len = TextSize::of(current_indent); - - let parent = whitespace.syntax().parent(); - // Make sure dot is a part of call chain - if !matches!(parent.kind(), FIELD_EXPR | METHOD_CALL_EXPR) { - return None; - } - let prev_indent = leading_indent(&parent)?; - let target_indent = format!(" {}", prev_indent); - let target_indent_len = TextSize::of(&target_indent); - if current_indent_len == target_indent_len { - return None; - } - - Some(TextEdit::replace(TextRange::new(offset - current_indent_len, offset), target_indent)) -} - -/// Adds a space after an arrow when `fn foo() { ... }` is turned into `fn foo() -> { ... }` -fn on_arrow_typed(file: &SourceFile, offset: TextSize) -> Option { - let file_text = file.syntax().text(); - assert_eq!(file_text.char_at(offset), Some('>')); - let after_arrow = offset + TextSize::of('>'); - if file_text.char_at(after_arrow) != Some('{') { - return None; - } - if find_node_at_offset::(file.syntax(), offset).is_none() { - return None; - } - - Some(TextEdit::insert(after_arrow, " ".to_string())) -} - -#[cfg(test)] -mod tests { - use test_utils::{assert_eq_text, extract_offset}; - - use super::*; - - fn do_type_char(char_typed: char, before: &str) -> Option { - let (offset, before) = extract_offset(before); - let edit = TextEdit::insert(offset, char_typed.to_string()); - let mut before = before.to_string(); - edit.apply(&mut before); - let parse = SourceFile::parse(&before); - on_char_typed_inner(&parse.tree(), offset, char_typed).map(|it| { - it.apply(&mut before); - before.to_string() - }) - } - - fn type_char(char_typed: char, ra_fixture_before: &str, ra_fixture_after: &str) { - let actual = do_type_char(char_typed, ra_fixture_before) - .unwrap_or_else(|| panic!("typing `{}` did nothing", char_typed)); - - assert_eq_text!(ra_fixture_after, &actual); - } - - fn type_char_noop(char_typed: char, before: &str) { - let file_change = do_type_char(char_typed, before); - assert!(file_change.is_none()) - } - - #[test] - fn test_on_eq_typed() { - // do_check(r" - // fn foo() { - // let foo =<|> - // } - // ", r" - // fn foo() { - // let foo =; - // } - // "); - type_char( - '=', - r" -fn foo() { - let foo <|> 1 + 1 -} -", - r" -fn foo() { - let foo = 1 + 1; -} -", - ); - // do_check(r" - // fn foo() { - // let foo =<|> - // let bar = 1; - // } - // ", r" - // fn foo() { - // let foo =; - // let bar = 1; - // } - // "); - } - - #[test] - fn indents_new_chain_call() { - type_char( - '.', - r" - fn main() { - xs.foo() - <|> - } - ", - r" - fn main() { - xs.foo() - . - } - ", - ); - type_char_noop( - '.', - r" - fn main() { - xs.foo() - <|> - } - ", - ) - } - - #[test] - fn indents_new_chain_call_with_semi() { - type_char( - '.', - r" - fn main() { - xs.foo() - <|>; - } - ", - r" - fn main() { - xs.foo() - .; - } - ", - ); - type_char_noop( - '.', - r" - fn main() { - xs.foo() - <|>; - } - ", - ) - } - - #[test] - fn indents_new_chain_call_with_let() { - type_char( - '.', - r#" -fn main() { - let _ = foo - <|> - bar() -} -"#, - r#" -fn main() { - let _ = foo - . - bar() -} -"#, - ); - } - - #[test] - fn indents_continued_chain_call() { - type_char( - '.', - r" - fn main() { - xs.foo() - .first() - <|> - } - ", - r" - fn main() { - xs.foo() - .first() - . - } - ", - ); - type_char_noop( - '.', - r" - fn main() { - xs.foo() - .first() - <|> - } - ", - ); - } - - #[test] - fn indents_middle_of_chain_call() { - type_char( - '.', - r" - fn source_impl() { - let var = enum_defvariant_list().unwrap() - <|> - .nth(92) - .unwrap(); - } - ", - r" - fn source_impl() { - let var = enum_defvariant_list().unwrap() - . - .nth(92) - .unwrap(); - } - ", - ); - type_char_noop( - '.', - r" - fn source_impl() { - let var = enum_defvariant_list().unwrap() - <|> - .nth(92) - .unwrap(); - } - ", - ); - } - - #[test] - fn dont_indent_freestanding_dot() { - type_char_noop( - '.', - r" - fn main() { - <|> - } - ", - ); - type_char_noop( - '.', - r" - fn main() { - <|> - } - ", - ); - } - - #[test] - fn adds_space_after_return_type() { - type_char('>', "fn foo() -<|>{ 92 }", "fn foo() -> { 92 }") - } -} diff --git a/crates/ra_ide/src/typing/on_enter.rs b/crates/ra_ide/src/typing/on_enter.rs deleted file mode 100644 index 143b1ae413..0000000000 --- a/crates/ra_ide/src/typing/on_enter.rs +++ /dev/null @@ -1,256 +0,0 @@ -//! Handles the `Enter` key press. At the momently, this only continues -//! comments, but should handle indent some time in the future as well. - -use ra_db::{FilePosition, SourceDatabase}; -use ra_ide_db::RootDatabase; -use ra_syntax::{ - ast::{self, AstToken}, - AstNode, SmolStr, SourceFile, - SyntaxKind::*, - SyntaxToken, TextRange, TextSize, TokenAtOffset, -}; -use ra_text_edit::TextEdit; -use test_utils::mark; - -// Feature: On Enter -// -// rust-analyzer can override kbd:[Enter] key to make it smarter: -// -// - kbd:[Enter] inside triple-slash comments automatically inserts `///` -// - kbd:[Enter] in the middle or after a trailing space in `//` inserts `//` -// -// This action needs to be assigned to shortcut explicitly. -// -// VS Code:: -// -// Add the following to `keybindings.json`: -// [source,json] -// ---- -// { -// "key": "Enter", -// "command": "rust-analyzer.onEnter", -// "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust" -// } -// ---- -pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option { - let parse = db.parse(position.file_id); - let file = parse.tree(); - let comment = file - .syntax() - .token_at_offset(position.offset) - .left_biased() - .and_then(ast::Comment::cast)?; - - if comment.kind().shape.is_block() { - return None; - } - - let prefix = comment.prefix(); - let comment_range = comment.syntax().text_range(); - if position.offset < comment_range.start() + TextSize::of(prefix) { - return None; - } - - let mut remove_last_space = false; - // Continuing single-line non-doc comments (like this one :) ) is annoying - if prefix == "//" && comment_range.end() == position.offset { - if comment.text().ends_with(' ') { - mark::hit!(continues_end_of_line_comment_with_space); - remove_last_space = true; - } else if !followed_by_comment(&comment) { - return None; - } - } - - let indent = node_indent(&file, comment.syntax())?; - let inserted = format!("\n{}{} $0", indent, prefix); - let delete = if remove_last_space { - TextRange::new(position.offset - TextSize::of(' '), position.offset) - } else { - TextRange::empty(position.offset) - }; - let edit = TextEdit::replace(delete, inserted); - Some(edit) -} - -fn followed_by_comment(comment: &ast::Comment) -> bool { - let ws = match comment.syntax().next_token().and_then(ast::Whitespace::cast) { - Some(it) => it, - None => return false, - }; - if ws.spans_multiple_lines() { - return false; - } - ws.syntax().next_token().and_then(ast::Comment::cast).is_some() -} - -fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option { - let ws = match file.syntax().token_at_offset(token.text_range().start()) { - TokenAtOffset::Between(l, r) => { - assert!(r == *token); - l - } - TokenAtOffset::Single(n) => { - assert!(n == *token); - return Some("".into()); - } - TokenAtOffset::None => unreachable!(), - }; - if ws.kind() != WHITESPACE { - return None; - } - let text = ws.text(); - let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0); - Some(text[pos..].into()) -} - -#[cfg(test)] -mod tests { - use stdx::trim_indent; - use test_utils::{assert_eq_text, mark}; - - use crate::mock_analysis::analysis_and_position; - - fn apply_on_enter(before: &str) -> Option { - let (analysis, position) = analysis_and_position(&before); - let result = analysis.on_enter(position).unwrap()?; - - let mut actual = analysis.file_text(position.file_id).unwrap().to_string(); - result.apply(&mut actual); - Some(actual) - } - - fn do_check(ra_fixture_before: &str, ra_fixture_after: &str) { - let ra_fixture_after = &trim_indent(ra_fixture_after); - let actual = apply_on_enter(ra_fixture_before).unwrap(); - assert_eq_text!(ra_fixture_after, &actual); - } - - fn do_check_noop(ra_fixture_text: &str) { - assert!(apply_on_enter(ra_fixture_text).is_none()) - } - - #[test] - fn continues_doc_comment() { - do_check( - r" -/// Some docs<|> -fn foo() { -} -", - r" -/// Some docs -/// $0 -fn foo() { -} -", - ); - - do_check( - r" -impl S { - /// Some<|> docs. - fn foo() {} -} -", - r" -impl S { - /// Some - /// $0 docs. - fn foo() {} -} -", - ); - - do_check( - r" -///<|> Some docs -fn foo() { -} -", - r" -/// -/// $0 Some docs -fn foo() { -} -", - ); - } - - #[test] - fn does_not_continue_before_doc_comment() { - do_check_noop(r"<|>//! docz"); - } - - #[test] - fn continues_code_comment_in_the_middle_of_line() { - do_check( - r" -fn main() { - // Fix<|> me - let x = 1 + 1; -} -", - r" -fn main() { - // Fix - // $0 me - let x = 1 + 1; -} -", - ); - } - - #[test] - fn continues_code_comment_in_the_middle_several_lines() { - do_check( - r" -fn main() { - // Fix<|> - // me - let x = 1 + 1; -} -", - r" -fn main() { - // Fix - // $0 - // me - let x = 1 + 1; -} -", - ); - } - - #[test] - fn does_not_continue_end_of_line_comment() { - do_check_noop( - r" -fn main() { - // Fix me<|> - let x = 1 + 1; -} -", - ); - } - - #[test] - fn continues_end_of_line_comment_with_space() { - mark::check!(continues_end_of_line_comment_with_space); - do_check( - r#" -fn main() { - // Fix me <|> - let x = 1 + 1; -} -"#, - r#" -fn main() { - // Fix me - // $0 - let x = 1 + 1; -} -"#, - ); - } -} diff --git a/crates/ra_ide/test_data/highlight_unsafe.html b/crates/ra_ide/test_data/highlight_unsafe.html deleted file mode 100644 index 79409fe816..0000000000 --- a/crates/ra_ide/test_data/highlight_unsafe.html +++ /dev/null @@ -1,72 +0,0 @@ - - -
unsafe fn unsafe_fn() {}
-
-union Union {
-    a: u32,
-    b: f32,
-}
-
-struct HasUnsafeFn;
-
-impl HasUnsafeFn {
-    unsafe fn unsafe_method(&self) {}
-}
-
-struct TypeForStaticMut {
-    a: u8
-}
-
-static mut global_mut: TypeForStaticMut = TypeForStaticMut { a: 0 };
-
-fn main() {
-    let x = &5 as *const usize;
-    let u = Union { b: 0 };
-    unsafe {
-        unsafe_fn();
-        let b = u.b;
-        match u {
-            Union { b: 0 } => (),
-            Union { a } => (),
-        }
-        HasUnsafeFn.unsafe_method();
-        let y = *(x);
-        let z = -x;
-        let a = global_mut.a;
-    }
-}
\ No newline at end of file diff --git a/crates/ra_ide_db/Cargo.toml b/crates/ra_ide_db/Cargo.toml deleted file mode 100644 index 2716a38cc9..0000000000 --- a/crates/ra_ide_db/Cargo.toml +++ /dev/null @@ -1,32 +0,0 @@ -[package] -edition = "2018" -name = "ra_ide_db" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[features] -wasm = [] - -[dependencies] -log = "0.4.8" -rayon = "1.3.0" -fst = { version = "0.4", default-features = false } -rustc-hash = "1.1.0" -once_cell = "1.3.1" -either = "1.5.3" - -stdx = { path = "../stdx" } - -ra_syntax = { path = "../ra_syntax" } -ra_text_edit = { path = "../ra_text_edit" } -ra_db = { path = "../ra_db" } -ra_prof = { path = "../ra_prof" } -test_utils = { path = "../test_utils" } - -# ra_ide should depend only on the top-level `hir` package. if you need -# something from some `hir_xxx` subpackage, reexport the API via `hir`. -hir = { path = "../ra_hir", package = "ra_hir" } diff --git a/crates/ra_ide_db/src/change.rs b/crates/ra_ide_db/src/change.rs deleted file mode 100644 index b13df8b855..0000000000 --- a/crates/ra_ide_db/src/change.rs +++ /dev/null @@ -1,318 +0,0 @@ -//! Defines a unit of change that can applied to a state of IDE to get the next -//! state. Changes are transactional. - -use std::{fmt, sync::Arc, time}; - -use ra_db::{ - salsa::{Database, Durability, SweepStrategy}, - CrateGraph, FileId, SourceDatabase, SourceDatabaseExt, SourceRoot, SourceRootId, -}; -use ra_prof::{memory_usage, profile, Bytes}; -use rustc_hash::FxHashSet; - -use crate::{symbol_index::SymbolsDatabase, RootDatabase}; - -#[derive(Default)] -pub struct AnalysisChange { - roots: Option>, - files_changed: Vec<(FileId, Option>)>, - crate_graph: Option, -} - -impl fmt::Debug for AnalysisChange { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - let mut d = fmt.debug_struct("AnalysisChange"); - if let Some(roots) = &self.roots { - d.field("roots", roots); - } - if !self.files_changed.is_empty() { - d.field("files_changed", &self.files_changed.len()); - } - if self.crate_graph.is_some() { - d.field("crate_graph", &self.crate_graph); - } - d.finish() - } -} - -impl AnalysisChange { - pub fn new() -> AnalysisChange { - AnalysisChange::default() - } - - pub fn set_roots(&mut self, roots: Vec) { - self.roots = Some(roots); - } - - pub fn change_file(&mut self, file_id: FileId, new_text: Option>) { - self.files_changed.push((file_id, new_text)) - } - - pub fn set_crate_graph(&mut self, graph: CrateGraph) { - self.crate_graph = Some(graph); - } -} - -#[derive(Debug)] -struct AddFile { - file_id: FileId, - path: String, - text: Arc, -} - -#[derive(Debug)] -struct RemoveFile { - file_id: FileId, - path: String, -} - -#[derive(Default)] -struct RootChange { - added: Vec, - removed: Vec, -} - -impl fmt::Debug for RootChange { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_struct("AnalysisChange") - .field("added", &self.added.len()) - .field("removed", &self.removed.len()) - .finish() - } -} - -const GC_COOLDOWN: time::Duration = time::Duration::from_millis(100); - -impl RootDatabase { - pub fn request_cancellation(&mut self) { - let _p = profile("RootDatabase::request_cancellation"); - self.salsa_runtime_mut().synthetic_write(Durability::LOW); - } - - pub fn apply_change(&mut self, change: AnalysisChange) { - let _p = profile("RootDatabase::apply_change"); - self.request_cancellation(); - log::info!("apply_change {:?}", change); - if let Some(roots) = change.roots { - let mut local_roots = FxHashSet::default(); - let mut library_roots = FxHashSet::default(); - for (idx, root) in roots.into_iter().enumerate() { - let root_id = SourceRootId(idx as u32); - let durability = durability(&root); - if root.is_library { - library_roots.insert(root_id); - } else { - local_roots.insert(root_id); - } - for file_id in root.iter() { - self.set_file_source_root_with_durability(file_id, root_id, durability); - } - self.set_source_root_with_durability(root_id, Arc::new(root), durability); - } - self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); - self.set_library_roots_with_durability(Arc::new(library_roots), Durability::HIGH); - } - - for (file_id, text) in change.files_changed { - let source_root_id = self.file_source_root(file_id); - let source_root = self.source_root(source_root_id); - let durability = durability(&source_root); - // XXX: can't actually remove the file, just reset the text - let text = text.unwrap_or_default(); - self.set_file_text_with_durability(file_id, text, durability) - } - if let Some(crate_graph) = change.crate_graph { - self.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH) - } - } - - pub fn maybe_collect_garbage(&mut self) { - if cfg!(feature = "wasm") { - return; - } - - if self.last_gc_check.elapsed() > GC_COOLDOWN { - self.last_gc_check = crate::wasm_shims::Instant::now(); - } - } - - pub fn collect_garbage(&mut self) { - if cfg!(feature = "wasm") { - return; - } - - let _p = profile("RootDatabase::collect_garbage"); - self.last_gc = crate::wasm_shims::Instant::now(); - - let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); - - ra_db::ParseQuery.in_db(self).sweep(sweep); - hir::db::ParseMacroQuery.in_db(self).sweep(sweep); - - // Macros do take significant space, but less then the syntax trees - // self.query(hir::db::MacroDefQuery).sweep(sweep); - // self.query(hir::db::MacroArgTextQuery).sweep(sweep); - // self.query(hir::db::MacroExpandQuery).sweep(sweep); - - hir::db::AstIdMapQuery.in_db(self).sweep(sweep); - - hir::db::BodyWithSourceMapQuery.in_db(self).sweep(sweep); - - hir::db::ExprScopesQuery.in_db(self).sweep(sweep); - hir::db::InferQueryQuery.in_db(self).sweep(sweep); - hir::db::BodyQuery.in_db(self).sweep(sweep); - } - - // Feature: Memory Usage - // - // Clears rust-analyzer's internal database and prints memory usage statistics. - // - // |=== - // | Editor | Action Name - // - // | VS Code | **Rust Analyzer: Memory Usage (Clears Database)** - // |=== - pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> { - let mut acc: Vec<(String, Bytes)> = vec![]; - let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); - macro_rules! sweep_each_query { - ($($q:path)*) => {$( - let before = memory_usage().allocated; - $q.in_db(self).sweep(sweep); - let after = memory_usage().allocated; - let q: $q = Default::default(); - let name = format!("{:?}", q); - acc.push((name, before - after)); - - let before = memory_usage().allocated; - $q.in_db(self).sweep(sweep.discard_everything()); - let after = memory_usage().allocated; - let q: $q = Default::default(); - let name = format!("{:?} (deps)", q); - acc.push((name, before - after)); - - let before = memory_usage().allocated; - $q.in_db(self).purge(); - let after = memory_usage().allocated; - let q: $q = Default::default(); - let name = format!("{:?} (purge)", q); - acc.push((name, before - after)); - )*} - } - sweep_each_query![ - // SourceDatabase - ra_db::ParseQuery - ra_db::CrateGraphQuery - - // SourceDatabaseExt - ra_db::FileTextQuery - ra_db::FileSourceRootQuery - ra_db::SourceRootQuery - ra_db::SourceRootCratesQuery - - // AstDatabase - hir::db::AstIdMapQuery - hir::db::MacroArgTextQuery - hir::db::MacroDefQuery - hir::db::ParseMacroQuery - hir::db::MacroExpandQuery - - // DefDatabase - hir::db::ItemTreeQuery - hir::db::CrateDefMapQueryQuery - hir::db::StructDataQuery - hir::db::UnionDataQuery - hir::db::EnumDataQuery - hir::db::ImplDataQuery - hir::db::TraitDataQuery - hir::db::TypeAliasDataQuery - hir::db::FunctionDataQuery - hir::db::ConstDataQuery - hir::db::StaticDataQuery - hir::db::BodyWithSourceMapQuery - hir::db::BodyQuery - hir::db::ExprScopesQuery - hir::db::GenericParamsQuery - hir::db::AttrsQuery - hir::db::ModuleLangItemsQuery - hir::db::CrateLangItemsQuery - hir::db::LangItemQuery - hir::db::DocumentationQuery - hir::db::ImportMapQuery - - // HirDatabase - hir::db::InferQueryQuery - hir::db::TyQuery - hir::db::ValueTyQuery - hir::db::ImplSelfTyQuery - hir::db::ImplTraitQuery - hir::db::FieldTypesQuery - hir::db::CallableItemSignatureQuery - hir::db::GenericPredicatesForParamQuery - hir::db::GenericPredicatesQuery - hir::db::GenericDefaultsQuery - hir::db::InherentImplsInCrateQuery - hir::db::TraitImplsInCrateQuery - hir::db::TraitImplsInDepsQuery - hir::db::AssociatedTyDataQuery - hir::db::AssociatedTyDataQuery - hir::db::TraitDatumQuery - hir::db::StructDatumQuery - hir::db::ImplDatumQuery - hir::db::FnDefDatumQuery - hir::db::ReturnTypeImplTraitsQuery - hir::db::InternCallableDefQuery - hir::db::InternTypeParamIdQuery - hir::db::InternImplTraitIdQuery - hir::db::InternClosureQuery - hir::db::AssociatedTyValueQuery - hir::db::TraitSolveQuery - - // SymbolsDatabase - crate::symbol_index::FileSymbolsQuery - crate::symbol_index::LibrarySymbolsQuery - crate::symbol_index::LocalRootsQuery - crate::symbol_index::LibraryRootsQuery - - // LineIndexDatabase - crate::LineIndexQuery - ]; - - // To collect interned data, we need to bump the revision counter by performing a synthetic - // write. - // We do this after collecting the non-interned queries to correctly attribute memory used - // by interned data. - self.salsa_runtime_mut().synthetic_write(Durability::HIGH); - - sweep_each_query![ - // AstDatabase - hir::db::InternMacroQuery - hir::db::InternEagerExpansionQuery - - // InternDatabase - hir::db::InternFunctionQuery - hir::db::InternStructQuery - hir::db::InternUnionQuery - hir::db::InternEnumQuery - hir::db::InternConstQuery - hir::db::InternStaticQuery - hir::db::InternTraitQuery - hir::db::InternTypeAliasQuery - hir::db::InternImplQuery - - // HirDatabase - hir::db::InternTypeParamIdQuery - ]; - - acc.sort_by_key(|it| std::cmp::Reverse(it.1)); - acc - } -} - -fn durability(source_root: &SourceRoot) -> Durability { - if source_root.is_library { - Durability::HIGH - } else { - Durability::LOW - } -} diff --git a/crates/ra_ide_db/src/defs.rs b/crates/ra_ide_db/src/defs.rs deleted file mode 100644 index b51000b03f..0000000000 --- a/crates/ra_ide_db/src/defs.rs +++ /dev/null @@ -1,333 +0,0 @@ -//! `NameDefinition` keeps information about the element we want to search references for. -//! The element is represented by `NameKind`. It's located inside some `container` and -//! has a `visibility`, which defines a search scope. -//! Note that the reference search is possible for not all of the classified items. - -// FIXME: this badly needs rename/rewrite (matklad, 2020-02-06). - -use hir::{ - Field, HasVisibility, ImplDef, Local, MacroDef, Module, ModuleDef, Name, PathResolution, - Semantics, TypeParam, Visibility, -}; -use ra_prof::profile; -use ra_syntax::{ - ast::{self, AstNode}, - match_ast, SyntaxNode, -}; - -use crate::RootDatabase; - -// FIXME: a more precise name would probably be `Symbol`? -#[derive(Debug, PartialEq, Eq, Copy, Clone)] -pub enum Definition { - Macro(MacroDef), - Field(Field), - ModuleDef(ModuleDef), - SelfType(ImplDef), - Local(Local), - TypeParam(TypeParam), -} - -impl Definition { - pub fn module(&self, db: &RootDatabase) -> Option { - match self { - Definition::Macro(it) => it.module(db), - Definition::Field(it) => Some(it.parent_def(db).module(db)), - Definition::ModuleDef(it) => it.module(db), - Definition::SelfType(it) => Some(it.module(db)), - Definition::Local(it) => Some(it.module(db)), - Definition::TypeParam(it) => Some(it.module(db)), - } - } - - pub fn visibility(&self, db: &RootDatabase) -> Option { - match self { - Definition::Macro(_) => None, - Definition::Field(sf) => Some(sf.visibility(db)), - Definition::ModuleDef(def) => def.definition_visibility(db), - Definition::SelfType(_) => None, - Definition::Local(_) => None, - Definition::TypeParam(_) => None, - } - } - - pub fn name(&self, db: &RootDatabase) -> Option { - let name = match self { - Definition::Macro(it) => it.name(db)?, - Definition::Field(it) => it.name(db), - Definition::ModuleDef(def) => match def { - hir::ModuleDef::Module(it) => it.name(db)?, - hir::ModuleDef::Function(it) => it.name(db), - hir::ModuleDef::Adt(def) => match def { - hir::Adt::Struct(it) => it.name(db), - hir::Adt::Union(it) => it.name(db), - hir::Adt::Enum(it) => it.name(db), - }, - hir::ModuleDef::EnumVariant(it) => it.name(db), - hir::ModuleDef::Const(it) => it.name(db)?, - hir::ModuleDef::Static(it) => it.name(db)?, - hir::ModuleDef::Trait(it) => it.name(db), - hir::ModuleDef::TypeAlias(it) => it.name(db), - hir::ModuleDef::BuiltinType(_) => return None, - }, - Definition::SelfType(_) => return None, - Definition::Local(it) => it.name(db)?, - Definition::TypeParam(it) => it.name(db), - }; - Some(name) - } -} - -#[derive(Debug)] -pub enum NameClass { - Definition(Definition), - /// `None` in `if let None = Some(82) {}` - ConstReference(Definition), - FieldShorthand { - local: Local, - field: Definition, - }, -} - -impl NameClass { - pub fn into_definition(self) -> Option { - match self { - NameClass::Definition(it) => Some(it), - NameClass::ConstReference(_) => None, - NameClass::FieldShorthand { local, field: _ } => Some(Definition::Local(local)), - } - } - - pub fn definition(self) -> Definition { - match self { - NameClass::Definition(it) | NameClass::ConstReference(it) => it, - NameClass::FieldShorthand { local: _, field } => field, - } - } -} - -pub fn classify_name(sema: &Semantics, name: &ast::Name) -> Option { - let _p = profile("classify_name"); - - let parent = name.syntax().parent()?; - - if let Some(bind_pat) = ast::IdentPat::cast(parent.clone()) { - if let Some(def) = sema.resolve_bind_pat_to_const(&bind_pat) { - return Some(NameClass::ConstReference(Definition::ModuleDef(def))); - } - } - - match_ast! { - match parent { - ast::Rename(it) => { - let use_tree = it.syntax().parent().and_then(ast::UseTree::cast)?; - let path = use_tree.path()?; - let path_segment = path.segment()?; - let name_ref_class = path_segment - .name_ref() - // The rename might be from a `self` token, so fallback to the name higher - // in the use tree. - .or_else(||{ - if path_segment.self_token().is_none() { - return None; - } - - let use_tree = use_tree - .syntax() - .parent() - .as_ref() - // Skip over UseTreeList - .and_then(SyntaxNode::parent) - .and_then(ast::UseTree::cast)?; - let path = use_tree.path()?; - let path_segment = path.segment()?; - path_segment.name_ref() - }) - .and_then(|name_ref| classify_name_ref(sema, &name_ref))?; - - Some(NameClass::Definition(name_ref_class.definition())) - }, - ast::IdentPat(it) => { - let local = sema.to_def(&it)?; - - if let Some(record_field_pat) = it.syntax().parent().and_then(ast::RecordPatField::cast) { - if record_field_pat.name_ref().is_none() { - if let Some(field) = sema.resolve_record_field_pat(&record_field_pat) { - let field = Definition::Field(field); - return Some(NameClass::FieldShorthand { local, field }); - } - } - } - - Some(NameClass::Definition(Definition::Local(local))) - }, - ast::RecordField(it) => { - let field: hir::Field = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::Field(field))) - }, - ast::Module(it) => { - let def = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::ModuleDef(def.into()))) - }, - ast::Struct(it) => { - let def: hir::Struct = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::ModuleDef(def.into()))) - }, - ast::Union(it) => { - let def: hir::Union = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::ModuleDef(def.into()))) - }, - ast::Enum(it) => { - let def: hir::Enum = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::ModuleDef(def.into()))) - }, - ast::Trait(it) => { - let def: hir::Trait = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::ModuleDef(def.into()))) - }, - ast::Static(it) => { - let def: hir::Static = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::ModuleDef(def.into()))) - }, - ast::Variant(it) => { - let def: hir::EnumVariant = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::ModuleDef(def.into()))) - }, - ast::Fn(it) => { - let def: hir::Function = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::ModuleDef(def.into()))) - }, - ast::Const(it) => { - let def: hir::Const = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::ModuleDef(def.into()))) - }, - ast::TypeAlias(it) => { - let def: hir::TypeAlias = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::ModuleDef(def.into()))) - }, - ast::MacroCall(it) => { - let def = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::Macro(def))) - }, - ast::TypeParam(it) => { - let def = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::TypeParam(def))) - }, - _ => None, - } - } -} - -#[derive(Debug)] -pub enum NameRefClass { - Definition(Definition), - FieldShorthand { local: Local, field: Definition }, -} - -impl NameRefClass { - pub fn definition(self) -> Definition { - match self { - NameRefClass::Definition(def) => def, - NameRefClass::FieldShorthand { local, field: _ } => Definition::Local(local), - } - } -} - -// Note: we don't have unit-tests for this rather important function. -// It is primarily exercised via goto definition tests in `ra_ide`. -pub fn classify_name_ref( - sema: &Semantics, - name_ref: &ast::NameRef, -) -> Option { - let _p = profile("classify_name_ref"); - - let parent = name_ref.syntax().parent()?; - - if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) { - if let Some(func) = sema.resolve_method_call(&method_call) { - return Some(NameRefClass::Definition(Definition::ModuleDef(func.into()))); - } - } - - if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { - if let Some(field) = sema.resolve_field(&field_expr) { - return Some(NameRefClass::Definition(Definition::Field(field))); - } - } - - if let Some(record_field) = ast::RecordExprField::for_field_name(name_ref) { - if let Some((field, local)) = sema.resolve_record_field(&record_field) { - let field = Definition::Field(field); - let res = match local { - None => NameRefClass::Definition(field), - Some(local) => NameRefClass::FieldShorthand { field, local }, - }; - return Some(res); - } - } - - if let Some(record_field_pat) = ast::RecordPatField::cast(parent.clone()) { - if let Some(field) = sema.resolve_record_field_pat(&record_field_pat) { - let field = Definition::Field(field); - return Some(NameRefClass::Definition(field)); - } - } - - if ast::AssocTypeArg::cast(parent.clone()).is_some() { - // `Trait` - // ^^^^^ - let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?; - let resolved = sema.resolve_path(&path)?; - if let PathResolution::Def(ModuleDef::Trait(tr)) = resolved { - if let Some(ty) = tr - .items(sema.db) - .iter() - .filter_map(|assoc| match assoc { - hir::AssocItem::TypeAlias(it) => Some(*it), - _ => None, - }) - .find(|alias| alias.name(sema.db).to_string() == **name_ref.text()) - { - return Some(NameRefClass::Definition(Definition::ModuleDef( - ModuleDef::TypeAlias(ty), - ))); - } - } - } - - if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { - if let Some(path) = macro_call.path() { - if path.qualifier().is_none() { - // Only use this to resolve single-segment macro calls like `foo!()`. Multi-segment - // paths are handled below (allowing `log<|>::info!` to resolve to the log crate). - if let Some(macro_def) = sema.resolve_macro_call(¯o_call) { - return Some(NameRefClass::Definition(Definition::Macro(macro_def))); - } - } - } - } - - let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?; - let resolved = sema.resolve_path(&path)?; - Some(NameRefClass::Definition(resolved.into())) -} - -impl From for Definition { - fn from(path_resolution: PathResolution) -> Self { - match path_resolution { - PathResolution::Def(def) => Definition::ModuleDef(def), - PathResolution::AssocItem(item) => { - let def = match item { - hir::AssocItem::Function(it) => it.into(), - hir::AssocItem::Const(it) => it.into(), - hir::AssocItem::TypeAlias(it) => it.into(), - }; - Definition::ModuleDef(def) - } - PathResolution::Local(local) => Definition::Local(local), - PathResolution::TypeParam(par) => Definition::TypeParam(par), - PathResolution::Macro(def) => Definition::Macro(def), - PathResolution::SelfType(impl_def) => Definition::SelfType(impl_def), - } - } -} diff --git a/crates/ra_ide_db/src/imports_locator.rs b/crates/ra_ide_db/src/imports_locator.rs deleted file mode 100644 index 1fba71ff85..0000000000 --- a/crates/ra_ide_db/src/imports_locator.rs +++ /dev/null @@ -1,65 +0,0 @@ -//! This module contains an import search funcionality that is provided to the ra_assists module. -//! Later, this should be moved away to a separate crate that is accessible from the ra_assists module. - -use hir::{Crate, MacroDef, ModuleDef, Semantics}; -use ra_prof::profile; -use ra_syntax::{ast, AstNode, SyntaxKind::NAME}; - -use crate::{ - defs::{classify_name, Definition}, - symbol_index::{self, FileSymbol, Query}, - RootDatabase, -}; -use either::Either; -use rustc_hash::FxHashSet; - -pub fn find_imports<'a>( - sema: &Semantics<'a, RootDatabase>, - krate: Crate, - name_to_import: &str, -) -> Vec> { - let _p = profile("search_for_imports"); - let db = sema.db; - - // Query dependencies first. - let mut candidates: FxHashSet<_> = - krate.query_external_importables(db, name_to_import).collect(); - - // Query the local crate using the symbol index. - let local_results = { - let mut query = Query::new(name_to_import.to_string()); - query.exact(); - query.limit(40); - symbol_index::crate_symbols(db, krate.into(), query) - }; - - candidates.extend( - local_results - .into_iter() - .filter_map(|import_candidate| get_name_definition(sema, &import_candidate)) - .filter_map(|name_definition_to_import| match name_definition_to_import { - Definition::ModuleDef(module_def) => Some(Either::Left(module_def)), - Definition::Macro(macro_def) => Some(Either::Right(macro_def)), - _ => None, - }), - ); - - candidates.into_iter().collect() -} - -fn get_name_definition<'a>( - sema: &Semantics<'a, RootDatabase>, - import_candidate: &FileSymbol, -) -> Option { - let _p = profile("get_name_definition"); - let file_id = import_candidate.file_id; - - let candidate_node = import_candidate.ptr.to_node(sema.parse(file_id).syntax()); - let candidate_name_node = if candidate_node.kind() != NAME { - candidate_node.children().find(|it| it.kind() == NAME)? - } else { - candidate_node - }; - let name = ast::Name::cast(candidate_name_node)?; - classify_name(sema, &name)?.into_definition() -} diff --git a/crates/ra_ide_db/src/lib.rs b/crates/ra_ide_db/src/lib.rs deleted file mode 100644 index 6900cac73e..0000000000 --- a/crates/ra_ide_db/src/lib.rs +++ /dev/null @@ -1,139 +0,0 @@ -//! This crate defines the core datastructure representing IDE state -- `RootDatabase`. -//! -//! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search. - -pub mod line_index; -pub mod symbol_index; -pub mod change; -pub mod defs; -pub mod search; -pub mod imports_locator; -pub mod source_change; -mod wasm_shims; - -use std::{fmt, sync::Arc}; - -use hir::db::{AstDatabase, DefDatabase, HirDatabase}; -use ra_db::{ - salsa::{self, Durability}, - Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, - Upcast, -}; -use rustc_hash::FxHashSet; - -use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase}; - -#[salsa::database( - ra_db::SourceDatabaseStorage, - ra_db::SourceDatabaseExtStorage, - LineIndexDatabaseStorage, - symbol_index::SymbolsDatabaseStorage, - hir::db::InternDatabaseStorage, - hir::db::AstDatabaseStorage, - hir::db::DefDatabaseStorage, - hir::db::HirDatabaseStorage -)] -pub struct RootDatabase { - storage: salsa::Storage, - pub last_gc: crate::wasm_shims::Instant, - pub last_gc_check: crate::wasm_shims::Instant, -} - -impl fmt::Debug for RootDatabase { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("RootDatabase").finish() - } -} - -impl Upcast for RootDatabase { - fn upcast(&self) -> &(dyn AstDatabase + 'static) { - &*self - } -} - -impl Upcast for RootDatabase { - fn upcast(&self) -> &(dyn DefDatabase + 'static) { - &*self - } -} - -impl Upcast for RootDatabase { - fn upcast(&self) -> &(dyn HirDatabase + 'static) { - &*self - } -} - -impl FileLoader for RootDatabase { - fn file_text(&self, file_id: FileId) -> Arc { - FileLoaderDelegate(self).file_text(file_id) - } - fn resolve_path(&self, anchor: FileId, path: &str) -> Option { - FileLoaderDelegate(self).resolve_path(anchor, path) - } - fn relevant_crates(&self, file_id: FileId) -> Arc> { - FileLoaderDelegate(self).relevant_crates(file_id) - } -} - -impl salsa::Database for RootDatabase { - fn on_propagated_panic(&self) -> ! { - Canceled::throw() - } - fn salsa_event(&self, event: salsa::Event) { - match event.kind { - salsa::EventKind::DidValidateMemoizedValue { .. } - | salsa::EventKind::WillExecute { .. } => { - self.check_canceled(); - } - _ => (), - } - } -} - -impl Default for RootDatabase { - fn default() -> RootDatabase { - RootDatabase::new(None) - } -} - -impl RootDatabase { - pub fn new(lru_capacity: Option) -> RootDatabase { - let mut db = RootDatabase { - storage: salsa::Storage::default(), - last_gc: crate::wasm_shims::Instant::now(), - last_gc_check: crate::wasm_shims::Instant::now(), - }; - db.set_crate_graph_with_durability(Default::default(), Durability::HIGH); - db.set_local_roots_with_durability(Default::default(), Durability::HIGH); - db.set_library_roots_with_durability(Default::default(), Durability::HIGH); - db.update_lru_capacity(lru_capacity); - db - } - - pub fn update_lru_capacity(&mut self, lru_capacity: Option) { - let lru_capacity = lru_capacity.unwrap_or(ra_db::DEFAULT_LRU_CAP); - ra_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); - hir::db::ParseMacroQuery.in_db_mut(self).set_lru_capacity(lru_capacity); - hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(lru_capacity); - } -} - -impl salsa::ParallelDatabase for RootDatabase { - fn snapshot(&self) -> salsa::Snapshot { - salsa::Snapshot::new(RootDatabase { - storage: self.storage.snapshot(), - last_gc: self.last_gc, - last_gc_check: self.last_gc_check, - }) - } -} - -#[salsa::query_group(LineIndexDatabaseStorage)] -pub trait LineIndexDatabase: ra_db::SourceDatabase + CheckCanceled { - fn line_index(&self, file_id: FileId) -> Arc; -} - -fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc { - let text = db.file_text(file_id); - Arc::new(LineIndex::new(&*text)) -} diff --git a/crates/ra_ide_db/src/line_index.rs b/crates/ra_ide_db/src/line_index.rs deleted file mode 100644 index 2ab662098a..0000000000 --- a/crates/ra_ide_db/src/line_index.rs +++ /dev/null @@ -1,281 +0,0 @@ -//! `LineIndex` maps flat `TextSize` offsets into `(Line, Column)` -//! representation. -use std::iter; - -use ra_syntax::{TextRange, TextSize}; -use rustc_hash::FxHashMap; -use stdx::partition_point; - -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct LineIndex { - /// Offset the the beginning of each line, zero-based - pub(crate) newlines: Vec, - /// List of non-ASCII characters on each line - pub(crate) utf16_lines: FxHashMap>, -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct LineCol { - /// Zero-based - pub line: u32, - /// Zero-based - pub col_utf16: u32, -} - -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -pub(crate) struct Utf16Char { - /// Start offset of a character inside a line, zero-based - pub(crate) start: TextSize, - /// End offset of a character inside a line, zero-based - pub(crate) end: TextSize, -} - -impl Utf16Char { - /// Returns the length in 8-bit UTF-8 code units. - fn len(&self) -> TextSize { - self.end - self.start - } - - /// Returns the length in 16-bit UTF-16 code units. - fn len_utf16(&self) -> usize { - if self.len() == TextSize::from(4) { - 2 - } else { - 1 - } - } -} - -impl LineIndex { - pub fn new(text: &str) -> LineIndex { - let mut utf16_lines = FxHashMap::default(); - let mut utf16_chars = Vec::new(); - - let mut newlines = vec![0.into()]; - let mut curr_row = 0.into(); - let mut curr_col = 0.into(); - let mut line = 0; - for c in text.chars() { - let c_len = TextSize::of(c); - curr_row += c_len; - if c == '\n' { - newlines.push(curr_row); - - // Save any utf-16 characters seen in the previous line - if !utf16_chars.is_empty() { - utf16_lines.insert(line, utf16_chars); - utf16_chars = Vec::new(); - } - - // Prepare for processing the next line - curr_col = 0.into(); - line += 1; - continue; - } - - if !c.is_ascii() { - utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + c_len }); - } - - curr_col += c_len; - } - - // Save any utf-16 characters seen in the last line - if !utf16_chars.is_empty() { - utf16_lines.insert(line, utf16_chars); - } - - LineIndex { newlines, utf16_lines } - } - - pub fn line_col(&self, offset: TextSize) -> LineCol { - let line = partition_point(&self.newlines, |&it| it <= offset) - 1; - let line_start_offset = self.newlines[line]; - let col = offset - line_start_offset; - - LineCol { line: line as u32, col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32 } - } - - pub fn offset(&self, line_col: LineCol) -> TextSize { - //FIXME: return Result - let col = self.utf16_to_utf8_col(line_col.line, line_col.col_utf16); - self.newlines[line_col.line as usize] + col - } - - pub fn lines(&self, range: TextRange) -> impl Iterator + '_ { - let lo = partition_point(&self.newlines, |&it| it < range.start()); - let hi = partition_point(&self.newlines, |&it| it <= range.end()); - let all = iter::once(range.start()) - .chain(self.newlines[lo..hi].iter().copied()) - .chain(iter::once(range.end())); - - all.clone() - .zip(all.skip(1)) - .map(|(lo, hi)| TextRange::new(lo, hi)) - .filter(|it| !it.is_empty()) - } - - fn utf8_to_utf16_col(&self, line: u32, col: TextSize) -> usize { - let mut res: usize = col.into(); - if let Some(utf16_chars) = self.utf16_lines.get(&line) { - for c in utf16_chars { - if c.end <= col { - res -= usize::from(c.len()) - c.len_utf16(); - } else { - // From here on, all utf16 characters come *after* the character we are mapping, - // so we don't need to take them into account - break; - } - } - } - res - } - - fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize { - if let Some(utf16_chars) = self.utf16_lines.get(&line) { - for c in utf16_chars { - if col > u32::from(c.start) { - col += u32::from(c.len()) - c.len_utf16() as u32; - } else { - // From here on, all utf16 characters come *after* the character we are mapping, - // so we don't need to take them into account - break; - } - } - } - - col.into() - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_line_index() { - let text = "hello\nworld"; - let index = LineIndex::new(text); - assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); - assert_eq!(index.line_col(1.into()), LineCol { line: 0, col_utf16: 1 }); - assert_eq!(index.line_col(5.into()), LineCol { line: 0, col_utf16: 5 }); - assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 0 }); - assert_eq!(index.line_col(7.into()), LineCol { line: 1, col_utf16: 1 }); - assert_eq!(index.line_col(8.into()), LineCol { line: 1, col_utf16: 2 }); - assert_eq!(index.line_col(10.into()), LineCol { line: 1, col_utf16: 4 }); - assert_eq!(index.line_col(11.into()), LineCol { line: 1, col_utf16: 5 }); - assert_eq!(index.line_col(12.into()), LineCol { line: 1, col_utf16: 6 }); - - let text = "\nhello\nworld"; - let index = LineIndex::new(text); - assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); - assert_eq!(index.line_col(1.into()), LineCol { line: 1, col_utf16: 0 }); - assert_eq!(index.line_col(2.into()), LineCol { line: 1, col_utf16: 1 }); - assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 5 }); - assert_eq!(index.line_col(7.into()), LineCol { line: 2, col_utf16: 0 }); - } - - #[test] - fn test_char_len() { - assert_eq!('メ'.len_utf8(), 3); - assert_eq!('メ'.len_utf16(), 1); - } - - #[test] - fn test_empty_index() { - let col_index = LineIndex::new( - " -const C: char = 'x'; -", - ); - assert_eq!(col_index.utf16_lines.len(), 0); - } - - #[test] - fn test_single_char() { - let col_index = LineIndex::new( - " -const C: char = 'メ'; -", - ); - - assert_eq!(col_index.utf16_lines.len(), 1); - assert_eq!(col_index.utf16_lines[&1].len(), 1); - assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); - - // UTF-8 to UTF-16, no changes - assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); - - // UTF-8 to UTF-16 - assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20); - - // UTF-16 to UTF-8, no changes - assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15)); - - // UTF-16 to UTF-8 - assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); - - let col_index = LineIndex::new("a𐐏b"); - assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5)); - } - - #[test] - fn test_string() { - let col_index = LineIndex::new( - " -const C: char = \"メ メ\"; -", - ); - - assert_eq!(col_index.utf16_lines.len(), 1); - assert_eq!(col_index.utf16_lines[&1].len(), 2); - assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); - assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() }); - - // UTF-8 to UTF-16 - assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); - - assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19); - assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21); - - assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15); - - // UTF-16 to UTF-8 - assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15)); - - // メ UTF-8: 0xE3 0x83 0xA1, UTF-16: 0x30E1 - assert_eq!(col_index.utf16_to_utf8_col(1, 17), TextSize::from(17)); // first メ at 17..20 - assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20)); // space - assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); // second メ at 21..24 - - assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15)); - } - - #[test] - fn test_splitlines() { - fn r(lo: u32, hi: u32) -> TextRange { - TextRange::new(lo.into(), hi.into()) - } - - let text = "a\nbb\nccc\n"; - let line_index = LineIndex::new(text); - - let actual = line_index.lines(r(0, 9)).collect::>(); - let expected = vec![r(0, 2), r(2, 5), r(5, 9)]; - assert_eq!(actual, expected); - - let text = ""; - let line_index = LineIndex::new(text); - - let actual = line_index.lines(r(0, 0)).collect::>(); - let expected = vec![]; - assert_eq!(actual, expected); - - let text = "\n"; - let line_index = LineIndex::new(text); - - let actual = line_index.lines(r(0, 1)).collect::>(); - let expected = vec![r(0, 1)]; - assert_eq!(actual, expected) - } -} diff --git a/crates/ra_ide_db/src/search.rs b/crates/ra_ide_db/src/search.rs deleted file mode 100644 index 0b862b449f..0000000000 --- a/crates/ra_ide_db/src/search.rs +++ /dev/null @@ -1,323 +0,0 @@ -//! Implementation of find-usages functionality. -//! -//! It is based on the standard ide trick: first, we run a fast text search to -//! get a super-set of matches. Then, we we confirm each match using precise -//! name resolution. - -use std::{convert::TryInto, mem}; - -use hir::{DefWithBody, HasSource, Module, ModuleSource, Semantics, Visibility}; -use once_cell::unsync::Lazy; -use ra_db::{FileId, FileRange, SourceDatabaseExt}; -use ra_prof::profile; -use ra_syntax::{ast, match_ast, AstNode, TextRange, TextSize}; -use rustc_hash::FxHashMap; - -use crate::{ - defs::{classify_name_ref, Definition, NameRefClass}, - RootDatabase, -}; - -#[derive(Debug, Clone)] -pub struct Reference { - pub file_range: FileRange, - pub kind: ReferenceKind, - pub access: Option, -} - -#[derive(Debug, Clone, PartialEq)] -pub enum ReferenceKind { - FieldShorthandForField, - FieldShorthandForLocal, - StructLiteral, - Other, -} - -#[derive(Debug, Copy, Clone, PartialEq)] -pub enum ReferenceAccess { - Read, - Write, -} - -/// Generally, `search_scope` returns files that might contain references for the element. -/// For `pub(crate)` things it's a crate, for `pub` things it's a crate and dependant crates. -/// In some cases, the location of the references is known to within a `TextRange`, -/// e.g. for things like local variables. -pub struct SearchScope { - entries: FxHashMap>, -} - -impl SearchScope { - fn new(entries: FxHashMap>) -> SearchScope { - SearchScope { entries } - } - - pub fn empty() -> SearchScope { - SearchScope::new(FxHashMap::default()) - } - - pub fn single_file(file: FileId) -> SearchScope { - SearchScope::new(std::iter::once((file, None)).collect()) - } - - pub fn files(files: &[FileId]) -> SearchScope { - SearchScope::new(files.iter().map(|f| (*f, None)).collect()) - } - - pub fn intersection(&self, other: &SearchScope) -> SearchScope { - let (mut small, mut large) = (&self.entries, &other.entries); - if small.len() > large.len() { - mem::swap(&mut small, &mut large) - } - - let res = small - .iter() - .filter_map(|(file_id, r1)| { - let r2 = large.get(file_id)?; - let r = intersect_ranges(*r1, *r2)?; - Some((*file_id, r)) - }) - .collect(); - - return SearchScope::new(res); - - fn intersect_ranges( - r1: Option, - r2: Option, - ) -> Option> { - match (r1, r2) { - (None, r) | (r, None) => Some(r), - (Some(r1), Some(r2)) => { - let r = r1.intersect(r2)?; - Some(Some(r)) - } - } - } - } -} - -impl IntoIterator for SearchScope { - type Item = (FileId, Option); - type IntoIter = std::collections::hash_map::IntoIter>; - - fn into_iter(self) -> Self::IntoIter { - self.entries.into_iter() - } -} - -impl Definition { - fn search_scope(&self, db: &RootDatabase) -> SearchScope { - let _p = profile("search_scope"); - let module = match self.module(db) { - Some(it) => it, - None => return SearchScope::empty(), - }; - let module_src = module.definition_source(db); - let file_id = module_src.file_id.original_file(db); - - if let Definition::Local(var) = self { - let range = match var.parent(db) { - DefWithBody::Function(f) => f.source(db).value.syntax().text_range(), - DefWithBody::Const(c) => c.source(db).value.syntax().text_range(), - DefWithBody::Static(s) => s.source(db).value.syntax().text_range(), - }; - let mut res = FxHashMap::default(); - res.insert(file_id, Some(range)); - return SearchScope::new(res); - } - - let vis = self.visibility(db); - - if let Some(Visibility::Module(module)) = vis.and_then(|it| it.into()) { - let module: Module = module.into(); - let mut res = FxHashMap::default(); - - let mut to_visit = vec![module]; - let mut is_first = true; - while let Some(module) = to_visit.pop() { - let src = module.definition_source(db); - let file_id = src.file_id.original_file(db); - match src.value { - ModuleSource::Module(m) => { - if is_first { - let range = Some(m.syntax().text_range()); - res.insert(file_id, range); - } else { - // We have already added the enclosing file to the search scope, - // so do nothing. - } - } - ModuleSource::SourceFile(_) => { - res.insert(file_id, None); - } - }; - is_first = false; - to_visit.extend(module.children(db)); - } - - return SearchScope::new(res); - } - - if let Some(Visibility::Public) = vis { - let source_root_id = db.file_source_root(file_id); - let source_root = db.source_root(source_root_id); - let mut res = source_root.iter().map(|id| (id, None)).collect::>(); - - let krate = module.krate(); - for rev_dep in krate.reverse_dependencies(db) { - let root_file = rev_dep.root_file(db); - let source_root_id = db.file_source_root(root_file); - let source_root = db.source_root(source_root_id); - res.extend(source_root.iter().map(|id| (id, None))); - } - return SearchScope::new(res); - } - - let mut res = FxHashMap::default(); - let range = match module_src.value { - ModuleSource::Module(m) => Some(m.syntax().text_range()), - ModuleSource::SourceFile(_) => None, - }; - res.insert(file_id, range); - SearchScope::new(res) - } - - pub fn find_usages( - &self, - sema: &Semantics, - search_scope: Option, - ) -> Vec { - let _p = profile("Definition::find_usages"); - - let search_scope = { - let base = self.search_scope(sema.db); - match search_scope { - None => base, - Some(scope) => base.intersection(&scope), - } - }; - - let name = match self.name(sema.db) { - None => return Vec::new(), - Some(it) => it.to_string(), - }; - - let pat = name.as_str(); - let mut refs = vec![]; - - for (file_id, search_range) in search_scope { - let text = sema.db.file_text(file_id); - let search_range = - search_range.unwrap_or(TextRange::up_to(TextSize::of(text.as_str()))); - - let tree = Lazy::new(|| sema.parse(file_id).syntax().clone()); - - for (idx, _) in text.match_indices(pat) { - let offset: TextSize = idx.try_into().unwrap(); - if !search_range.contains_inclusive(offset) { - continue; - } - - let name_ref: ast::NameRef = - if let Some(name_ref) = sema.find_node_at_offset_with_descend(&tree, offset) { - name_ref - } else { - continue; - }; - - match classify_name_ref(&sema, &name_ref) { - Some(NameRefClass::Definition(def)) if &def == self => { - let kind = if is_record_lit_name_ref(&name_ref) - || is_call_expr_name_ref(&name_ref) - { - ReferenceKind::StructLiteral - } else { - ReferenceKind::Other - }; - - let file_range = sema.original_range(name_ref.syntax()); - refs.push(Reference { - file_range, - kind, - access: reference_access(&def, &name_ref), - }); - } - Some(NameRefClass::FieldShorthand { local, field }) => { - match self { - Definition::Field(_) if &field == self => refs.push(Reference { - file_range: sema.original_range(name_ref.syntax()), - kind: ReferenceKind::FieldShorthandForField, - access: reference_access(&field, &name_ref), - }), - Definition::Local(l) if &local == l => refs.push(Reference { - file_range: sema.original_range(name_ref.syntax()), - kind: ReferenceKind::FieldShorthandForLocal, - access: reference_access(&Definition::Local(local), &name_ref), - }), - - _ => {} // not a usage - }; - } - _ => {} // not a usage - } - } - } - refs - } -} - -fn reference_access(def: &Definition, name_ref: &ast::NameRef) -> Option { - // Only Locals and Fields have accesses for now. - match def { - Definition::Local(_) | Definition::Field(_) => {} - _ => return None, - }; - - let mode = name_ref.syntax().ancestors().find_map(|node| { - match_ast! { - match (node) { - ast::BinExpr(expr) => { - if expr.op_kind()?.is_assignment() { - // If the variable or field ends on the LHS's end then it's a Write (covers fields and locals). - // FIXME: This is not terribly accurate. - if let Some(lhs) = expr.lhs() { - if lhs.syntax().text_range().end() == name_ref.syntax().text_range().end() { - return Some(ReferenceAccess::Write); - } - } - } - Some(ReferenceAccess::Read) - }, - _ => None - } - } - }); - - // Default Locals and Fields to read - mode.or(Some(ReferenceAccess::Read)) -} - -fn is_call_expr_name_ref(name_ref: &ast::NameRef) -> bool { - name_ref - .syntax() - .ancestors() - .find_map(ast::CallExpr::cast) - .and_then(|c| match c.expr()? { - ast::Expr::PathExpr(p) => { - Some(p.path()?.segment()?.name_ref().as_ref() == Some(name_ref)) - } - _ => None, - }) - .unwrap_or(false) -} - -fn is_record_lit_name_ref(name_ref: &ast::NameRef) -> bool { - name_ref - .syntax() - .ancestors() - .find_map(ast::RecordExpr::cast) - .and_then(|l| l.path()) - .and_then(|p| p.segment()) - .map(|p| p.name_ref().as_ref() == Some(name_ref)) - .unwrap_or(false) -} diff --git a/crates/ra_ide_db/src/source_change.rs b/crates/ra_ide_db/src/source_change.rs deleted file mode 100644 index abb83f4213..0000000000 --- a/crates/ra_ide_db/src/source_change.rs +++ /dev/null @@ -1,59 +0,0 @@ -//! This modules defines type to represent changes to the source code, that flow -//! from the server to the client. -//! -//! It can be viewed as a dual for `AnalysisChange`. - -use ra_db::FileId; -use ra_text_edit::TextEdit; - -#[derive(Default, Debug, Clone)] -pub struct SourceChange { - pub source_file_edits: Vec, - pub file_system_edits: Vec, - pub is_snippet: bool, -} - -impl SourceChange { - /// Creates a new SourceChange with the given label - /// from the edits. - pub fn from_edits( - source_file_edits: Vec, - file_system_edits: Vec, - ) -> Self { - SourceChange { source_file_edits, file_system_edits, is_snippet: false } - } -} - -#[derive(Debug, Clone)] -pub struct SourceFileEdit { - pub file_id: FileId, - pub edit: TextEdit, -} - -impl From for SourceChange { - fn from(edit: SourceFileEdit) -> SourceChange { - vec![edit].into() - } -} - -impl From> for SourceChange { - fn from(source_file_edits: Vec) -> SourceChange { - SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false } - } -} - -#[derive(Debug, Clone)] -pub enum FileSystemEdit { - CreateFile { anchor: FileId, dst: String }, - MoveFile { src: FileId, anchor: FileId, dst: String }, -} - -impl From for SourceChange { - fn from(edit: FileSystemEdit) -> SourceChange { - SourceChange { - source_file_edits: Vec::new(), - file_system_edits: vec![edit], - is_snippet: false, - } - } -} diff --git a/crates/ra_ide_db/src/symbol_index.rs b/crates/ra_ide_db/src/symbol_index.rs deleted file mode 100644 index 35a2c5be3b..0000000000 --- a/crates/ra_ide_db/src/symbol_index.rs +++ /dev/null @@ -1,430 +0,0 @@ -//! This module handles fuzzy-searching of functions, structs and other symbols -//! by name across the whole workspace and dependencies. -//! -//! It works by building an incrementally-updated text-search index of all -//! symbols. The backbone of the index is the **awesome** `fst` crate by -//! @BurntSushi. -//! -//! In a nutshell, you give a set of strings to `fst`, and it builds a -//! finite state machine describing this set of strings. The strings which -//! could fuzzy-match a pattern can also be described by a finite state machine. -//! What is freaking cool is that you can now traverse both state machines in -//! lock-step to enumerate the strings which are both in the input set and -//! fuzz-match the query. Or, more formally, given two languages described by -//! FSTs, one can build a product FST which describes the intersection of the -//! languages. -//! -//! `fst` does not support cheap updating of the index, but it supports unioning -//! of state machines. So, to account for changing source code, we build an FST -//! for each library (which is assumed to never change) and an FST for each Rust -//! file in the current workspace, and run a query against the union of all -//! those FSTs. - -use std::{ - cmp::Ordering, - fmt, - hash::{Hash, Hasher}, - mem, - sync::Arc, -}; - -use fst::{self, Streamer}; -use hir::db::DefDatabase; -use ra_db::{ - salsa::{self, ParallelDatabase}, - CrateId, FileId, SourceDatabaseExt, SourceRootId, -}; -use ra_prof::profile; -use ra_syntax::{ - ast::{self, NameOwner}, - match_ast, AstNode, Parse, SmolStr, SourceFile, - SyntaxKind::{self, *}, - SyntaxNode, SyntaxNodePtr, TextRange, WalkEvent, -}; -use rayon::prelude::*; -use rustc_hash::{FxHashMap, FxHashSet}; - -use crate::RootDatabase; - -#[derive(Debug)] -pub struct Query { - query: String, - lowercased: String, - only_types: bool, - libs: bool, - exact: bool, - limit: usize, -} - -impl Query { - pub fn new(query: String) -> Query { - let lowercased = query.to_lowercase(); - Query { - query, - lowercased, - only_types: false, - libs: false, - exact: false, - limit: usize::max_value(), - } - } - - pub fn only_types(&mut self) { - self.only_types = true; - } - - pub fn libs(&mut self) { - self.libs = true; - } - - pub fn exact(&mut self) { - self.exact = true; - } - - pub fn limit(&mut self, limit: usize) { - self.limit = limit - } -} - -#[salsa::query_group(SymbolsDatabaseStorage)] -pub trait SymbolsDatabase: hir::db::HirDatabase + SourceDatabaseExt { - fn file_symbols(&self, file_id: FileId) -> Arc; - fn library_symbols(&self) -> Arc>; - /// The set of "local" (that is, from the current workspace) roots. - /// Files in local roots are assumed to change frequently. - #[salsa::input] - fn local_roots(&self) -> Arc>; - /// The set of roots for crates.io libraries. - /// Files in libraries are assumed to never change. - #[salsa::input] - fn library_roots(&self) -> Arc>; -} - -fn library_symbols(db: &dyn SymbolsDatabase) -> Arc> { - let _p = profile("library_symbols"); - - let roots = db.library_roots(); - let res = roots - .iter() - .map(|&root_id| { - let root = db.source_root(root_id); - let files = root - .iter() - .map(|it| (it, SourceDatabaseExt::file_text(db, it))) - .collect::>(); - let symbol_index = SymbolIndex::for_files( - files.into_par_iter().map(|(file, text)| (file, SourceFile::parse(&text))), - ); - (root_id, symbol_index) - }) - .collect(); - Arc::new(res) -} - -fn file_symbols(db: &dyn SymbolsDatabase, file_id: FileId) -> Arc { - db.check_canceled(); - let parse = db.parse(file_id); - - let symbols = source_file_to_file_symbols(&parse.tree(), file_id); - - // FIXME: add macros here - - Arc::new(SymbolIndex::new(symbols)) -} - -/// Need to wrap Snapshot to provide `Clone` impl for `map_with` -struct Snap(DB); -impl Clone for Snap> { - fn clone(&self) -> Snap> { - Snap(self.0.snapshot()) - } -} - -// Feature: Workspace Symbol -// -// Uses fuzzy-search to find types, modules and functions by name across your -// project and dependencies. This is **the** most useful feature, which improves code -// navigation tremendously. It mostly works on top of the built-in LSP -// functionality, however `#` and `*` symbols can be used to narrow down the -// search. Specifically, -// -// - `Foo` searches for `Foo` type in the current workspace -// - `foo#` searches for `foo` function in the current workspace -// - `Foo*` searches for `Foo` type among dependencies, including `stdlib` -// - `foo#*` searches for `foo` function among dependencies -// -// That is, `#` switches from "types" to all symbols, `*` switches from the current -// workspace to dependencies. -// -// |=== -// | Editor | Shortcut -// -// | VS Code | kbd:[Ctrl+T] -// |=== -pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec { - let _p = ra_prof::profile("world_symbols").detail(|| query.query.clone()); - - let tmp1; - let tmp2; - let buf: Vec<&SymbolIndex> = if query.libs { - tmp1 = db.library_symbols(); - tmp1.values().collect() - } else { - let mut files = Vec::new(); - for &root in db.local_roots().iter() { - let sr = db.source_root(root); - files.extend(sr.iter()) - } - - let snap = Snap(db.snapshot()); - tmp2 = files - .par_iter() - .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) - .collect::>(); - tmp2.iter().map(|it| &**it).collect() - }; - query.search(&buf) -} - -pub fn crate_symbols(db: &RootDatabase, krate: CrateId, query: Query) -> Vec { - // FIXME(#4842): This now depends on CrateDefMap, why not build the entire symbol index from - // that instead? - - let def_map = db.crate_def_map(krate); - let mut files = Vec::new(); - let mut modules = vec![def_map.root]; - while let Some(module) = modules.pop() { - let data = &def_map[module]; - files.extend(data.origin.file_id()); - modules.extend(data.children.values()); - } - - let snap = Snap(db.snapshot()); - - let buf = files - .par_iter() - .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) - .collect::>(); - let buf = buf.iter().map(|it| &**it).collect::>(); - - query.search(&buf) -} - -pub fn index_resolve(db: &RootDatabase, name_ref: &ast::NameRef) -> Vec { - let name = name_ref.text(); - let mut query = Query::new(name.to_string()); - query.exact(); - query.limit(4); - world_symbols(db, query) -} - -#[derive(Default)] -pub struct SymbolIndex { - symbols: Vec, - map: fst::Map>, -} - -impl fmt::Debug for SymbolIndex { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("SymbolIndex").field("n_symbols", &self.symbols.len()).finish() - } -} - -impl PartialEq for SymbolIndex { - fn eq(&self, other: &SymbolIndex) -> bool { - self.symbols == other.symbols - } -} - -impl Eq for SymbolIndex {} - -impl Hash for SymbolIndex { - fn hash(&self, hasher: &mut H) { - self.symbols.hash(hasher) - } -} - -impl SymbolIndex { - fn new(mut symbols: Vec) -> SymbolIndex { - fn cmp(lhs: &FileSymbol, rhs: &FileSymbol) -> Ordering { - let lhs_chars = lhs.name.chars().map(|c| c.to_ascii_lowercase()); - let rhs_chars = rhs.name.chars().map(|c| c.to_ascii_lowercase()); - lhs_chars.cmp(rhs_chars) - } - - symbols.par_sort_by(cmp); - - let mut builder = fst::MapBuilder::memory(); - - let mut last_batch_start = 0; - - for idx in 0..symbols.len() { - if let Some(next_symbol) = symbols.get(idx + 1) { - if cmp(&symbols[last_batch_start], next_symbol) == Ordering::Equal { - continue; - } - } - - let start = last_batch_start; - let end = idx + 1; - last_batch_start = end; - - let key = symbols[start].name.as_str().to_ascii_lowercase(); - let value = SymbolIndex::range_to_map_value(start, end); - - builder.insert(key, value).unwrap(); - } - - let map = fst::Map::new(builder.into_inner().unwrap()).unwrap(); - SymbolIndex { symbols, map } - } - - pub fn len(&self) -> usize { - self.symbols.len() - } - - pub fn memory_size(&self) -> usize { - self.map.as_fst().size() + self.symbols.len() * mem::size_of::() - } - - pub(crate) fn for_files( - files: impl ParallelIterator)>, - ) -> SymbolIndex { - let symbols = files - .flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id)) - .collect::>(); - SymbolIndex::new(symbols) - } - - fn range_to_map_value(start: usize, end: usize) -> u64 { - debug_assert![start <= (std::u32::MAX as usize)]; - debug_assert![end <= (std::u32::MAX as usize)]; - - ((start as u64) << 32) | end as u64 - } - - fn map_value_to_range(value: u64) -> (usize, usize) { - let end = value as u32 as usize; - let start = (value >> 32) as usize; - (start, end) - } -} - -impl Query { - pub(crate) fn search(self, indices: &[&SymbolIndex]) -> Vec { - let mut op = fst::map::OpBuilder::new(); - for file_symbols in indices.iter() { - let automaton = fst::automaton::Subsequence::new(&self.lowercased); - op = op.add(file_symbols.map.search(automaton)) - } - let mut stream = op.union(); - let mut res = Vec::new(); - while let Some((_, indexed_values)) = stream.next() { - for indexed_value in indexed_values { - let symbol_index = &indices[indexed_value.index]; - let (start, end) = SymbolIndex::map_value_to_range(indexed_value.value); - - for symbol in &symbol_index.symbols[start..end] { - if self.only_types && !is_type(symbol.kind) { - continue; - } - if self.exact && symbol.name != self.query { - continue; - } - - res.push(symbol.clone()); - if res.len() >= self.limit { - return res; - } - } - } - } - res - } -} - -fn is_type(kind: SyntaxKind) -> bool { - matches!(kind, STRUCT | ENUM | TRAIT | TYPE_ALIAS) -} - -/// The actual data that is stored in the index. It should be as compact as -/// possible. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct FileSymbol { - pub file_id: FileId, - pub name: SmolStr, - pub kind: SyntaxKind, - pub range: TextRange, - pub ptr: SyntaxNodePtr, - pub name_range: Option, - pub container_name: Option, -} - -fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec { - let mut symbols = Vec::new(); - let mut stack = Vec::new(); - - for event in source_file.syntax().preorder() { - match event { - WalkEvent::Enter(node) => { - if let Some(mut symbol) = to_file_symbol(&node, file_id) { - symbol.container_name = stack.last().cloned(); - - stack.push(symbol.name.clone()); - symbols.push(symbol); - } - } - - WalkEvent::Leave(node) => { - if to_symbol(&node).is_some() { - stack.pop(); - } - } - } - } - - symbols -} - -fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { - fn decl(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { - let name = node.name()?; - let name_range = name.syntax().text_range(); - let name = name.text().clone(); - let ptr = SyntaxNodePtr::new(node.syntax()); - - Some((name, ptr, name_range)) - } - match_ast! { - match node { - ast::Fn(it) => decl(it), - ast::Struct(it) => decl(it), - ast::Enum(it) => decl(it), - ast::Trait(it) => decl(it), - ast::Module(it) => decl(it), - ast::TypeAlias(it) => decl(it), - ast::Const(it) => decl(it), - ast::Static(it) => decl(it), - ast::MacroCall(it) => { - if it.is_macro_rules().is_some() { - decl(it) - } else { - None - } - }, - _ => None, - } - } -} - -fn to_file_symbol(node: &SyntaxNode, file_id: FileId) -> Option { - to_symbol(node).map(move |(name, ptr, name_range)| FileSymbol { - name, - kind: node.kind(), - range: node.text_range(), - ptr, - file_id, - name_range: Some(name_range), - container_name: None, - }) -} diff --git a/crates/ra_mbe/Cargo.toml b/crates/ra_mbe/Cargo.toml deleted file mode 100644 index a26746a194..0000000000 --- a/crates/ra_mbe/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -edition = "2018" -name = "ra_mbe" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -ra_syntax = { path = "../ra_syntax" } -ra_parser = { path = "../ra_parser" } -tt = { path = "../ra_tt", package = "ra_tt" } -rustc-hash = "1.1.0" -smallvec = "1.2.0" -log = "0.4.8" - -[dev-dependencies] -test_utils = { path = "../test_utils" } diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs deleted file mode 100644 index dec7ba22ea..0000000000 --- a/crates/ra_mbe/src/lib.rs +++ /dev/null @@ -1,278 +0,0 @@ -//! `mbe` (short for Macro By Example) crate contains code for handling -//! `macro_rules` macros. It uses `TokenTree` (from `ra_tt` package) as the -//! interface, although it contains some code to bridge `SyntaxNode`s and -//! `TokenTree`s as well! - -mod parser; -mod mbe_expander; -mod syntax_bridge; -mod tt_iter; -mod subtree_source; - -#[cfg(test)] -mod tests; - -pub use tt::{Delimiter, Punct}; - -use crate::{ - parser::{parse_pattern, Op}, - tt_iter::TtIter, -}; - -#[derive(Debug, PartialEq, Eq)] -pub enum ParseError { - Expected(String), - RepetitionEmtpyTokenTree, -} - -#[derive(Debug, PartialEq, Eq, Clone)] -pub enum ExpandError { - NoMatchingRule, - UnexpectedToken, - BindingError(String), - ConversionError, - InvalidRepeat, - ProcMacroError(tt::ExpansionError), -} - -impl From for ExpandError { - fn from(it: tt::ExpansionError) -> Self { - ExpandError::ProcMacroError(it) - } -} - -pub use crate::syntax_bridge::{ - ast_to_token_tree, parse_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, - TokenMap, -}; - -/// This struct contains AST for a single `macro_rules` definition. What might -/// be very confusing is that AST has almost exactly the same shape as -/// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident` -/// and `$()*` have special meaning (see `Var` and `Repeat` data structures) -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct MacroRules { - rules: Vec, - /// Highest id of the token we have in TokenMap - shift: Shift, -} - -#[derive(Clone, Debug, PartialEq, Eq)] -struct Rule { - lhs: tt::Subtree, - rhs: tt::Subtree, -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -struct Shift(u32); - -impl Shift { - fn new(tt: &tt::Subtree) -> Shift { - // Note that TokenId is started from zero, - // We have to add 1 to prevent duplication. - let value = max_id(tt).map_or(0, |it| it + 1); - return Shift(value); - - // Find the max token id inside a subtree - fn max_id(subtree: &tt::Subtree) -> Option { - subtree - .token_trees - .iter() - .filter_map(|tt| match tt { - tt::TokenTree::Subtree(subtree) => { - let tree_id = max_id(subtree); - match subtree.delimiter { - Some(it) if it.id != tt::TokenId::unspecified() => { - Some(tree_id.map_or(it.id.0, |t| t.max(it.id.0))) - } - _ => tree_id, - } - } - tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) - if ident.id != tt::TokenId::unspecified() => - { - Some(ident.id.0) - } - _ => None, - }) - .max() - } - } - - /// Shift given TokenTree token id - fn shift_all(self, tt: &mut tt::Subtree) { - for t in tt.token_trees.iter_mut() { - match t { - tt::TokenTree::Leaf(leaf) => match leaf { - tt::Leaf::Ident(ident) => ident.id = self.shift(ident.id), - tt::Leaf::Punct(punct) => punct.id = self.shift(punct.id), - tt::Leaf::Literal(lit) => lit.id = self.shift(lit.id), - }, - tt::TokenTree::Subtree(tt) => { - if let Some(it) = tt.delimiter.as_mut() { - it.id = self.shift(it.id); - }; - self.shift_all(tt) - } - } - } - } - - fn shift(self, id: tt::TokenId) -> tt::TokenId { - if id == tt::TokenId::unspecified() { - return id; - } - tt::TokenId(id.0 + self.0) - } - - fn unshift(self, id: tt::TokenId) -> Option { - id.0.checked_sub(self.0).map(tt::TokenId) - } -} - -#[derive(Debug, Eq, PartialEq)] -pub enum Origin { - Def, - Call, -} - -impl MacroRules { - pub fn parse(tt: &tt::Subtree) -> Result { - // Note: this parsing can be implemented using mbe machinery itself, by - // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing - // manually seems easier. - let mut src = TtIter::new(tt); - let mut rules = Vec::new(); - while src.len() > 0 { - let rule = Rule::parse(&mut src)?; - rules.push(rule); - if let Err(()) = src.expect_char(';') { - if src.len() > 0 { - return Err(ParseError::Expected("expected `:`".to_string())); - } - break; - } - } - - for rule in rules.iter() { - validate(&rule.lhs)?; - } - - Ok(MacroRules { rules, shift: Shift::new(tt) }) - } - - pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult { - // apply shift - let mut tt = tt.clone(); - self.shift.shift_all(&mut tt); - mbe_expander::expand(self, &tt) - } - - pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { - self.shift.shift(id) - } - - pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) { - match self.shift.unshift(id) { - Some(id) => (id, Origin::Call), - None => (id, Origin::Def), - } - } -} - -impl Rule { - fn parse(src: &mut TtIter) -> Result { - let mut lhs = src - .expect_subtree() - .map_err(|()| ParseError::Expected("expected subtree".to_string()))? - .clone(); - lhs.delimiter = None; - src.expect_char('=').map_err(|()| ParseError::Expected("expected `=`".to_string()))?; - src.expect_char('>').map_err(|()| ParseError::Expected("expected `>`".to_string()))?; - let mut rhs = src - .expect_subtree() - .map_err(|()| ParseError::Expected("expected subtree".to_string()))? - .clone(); - rhs.delimiter = None; - Ok(crate::Rule { lhs, rhs }) - } -} - -fn to_parse_error(e: ExpandError) -> ParseError { - let msg = match e { - ExpandError::InvalidRepeat => "invalid repeat".to_string(), - _ => "invalid macro definition".to_string(), - }; - ParseError::Expected(msg) -} - -fn validate(pattern: &tt::Subtree) -> Result<(), ParseError> { - for op in parse_pattern(pattern) { - let op = op.map_err(to_parse_error)?; - - match op { - Op::TokenTree(tt::TokenTree::Subtree(subtree)) => validate(subtree)?, - Op::Repeat { subtree, separator, .. } => { - // Checks that no repetition which could match an empty token - // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558 - - if separator.is_none() { - if parse_pattern(subtree).all(|child_op| { - match child_op.map_err(to_parse_error) { - Ok(Op::Var { kind, .. }) => { - // vis is optional - if kind.map_or(false, |it| it == "vis") { - return true; - } - } - Ok(Op::Repeat { kind, .. }) => { - return matches!( - kind, - parser::RepeatKind::ZeroOrMore | parser::RepeatKind::ZeroOrOne - ) - } - _ => {} - } - false - }) { - return Err(ParseError::RepetitionEmtpyTokenTree); - } - } - validate(subtree)? - } - _ => (), - } - } - Ok(()) -} - -#[derive(Debug)] -pub struct ExpandResult(pub T, pub Option); - -impl ExpandResult { - pub fn ok(t: T) -> ExpandResult { - ExpandResult(t, None) - } - - pub fn only_err(err: ExpandError) -> ExpandResult - where - T: Default, - { - ExpandResult(Default::default(), Some(err)) - } - - pub fn map(self, f: impl FnOnce(T) -> U) -> ExpandResult { - ExpandResult(f(self.0), self.1) - } - - pub fn result(self) -> Result { - self.1.map(Err).unwrap_or(Ok(self.0)) - } -} - -impl From> for ExpandResult { - fn from(result: Result) -> ExpandResult { - result - .map_or_else(|e| ExpandResult(Default::default(), Some(e)), |it| ExpandResult(it, None)) - } -} diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs deleted file mode 100644 index b1eacf1242..0000000000 --- a/crates/ra_mbe/src/mbe_expander.rs +++ /dev/null @@ -1,180 +0,0 @@ -//! This module takes a (parsed) definition of `macro_rules` invocation, a -//! `tt::TokenTree` representing an argument of macro invocation, and produces a -//! `tt::TokenTree` for the result of the expansion. - -mod matcher; -mod transcriber; - -use ra_syntax::SmolStr; -use rustc_hash::FxHashMap; - -use crate::{ExpandError, ExpandResult}; - -pub(crate) fn expand(rules: &crate::MacroRules, input: &tt::Subtree) -> ExpandResult { - expand_rules(&rules.rules, input) -} - -fn expand_rules(rules: &[crate::Rule], input: &tt::Subtree) -> ExpandResult { - let mut match_: Option<(matcher::Match, &crate::Rule)> = None; - for rule in rules { - let new_match = match matcher::match_(&rule.lhs, input) { - Ok(m) => m, - Err(_e) => { - // error in pattern parsing - continue; - } - }; - if new_match.err.is_none() { - // If we find a rule that applies without errors, we're done. - // Unconditionally returning the transcription here makes the - // `test_repeat_bad_var` test fail. - let ExpandResult(res, transcribe_err) = - transcriber::transcribe(&rule.rhs, &new_match.bindings); - if transcribe_err.is_none() { - return ExpandResult::ok(res); - } - } - // Use the rule if we matched more tokens, or had fewer errors - if let Some((prev_match, _)) = &match_ { - if (new_match.unmatched_tts, new_match.err_count) - < (prev_match.unmatched_tts, prev_match.err_count) - { - match_ = Some((new_match, rule)); - } - } else { - match_ = Some((new_match, rule)); - } - } - if let Some((match_, rule)) = match_ { - // if we got here, there was no match without errors - let ExpandResult(result, transcribe_err) = - transcriber::transcribe(&rule.rhs, &match_.bindings); - ExpandResult(result, match_.err.or(transcribe_err)) - } else { - ExpandResult(tt::Subtree::default(), Some(ExpandError::NoMatchingRule)) - } -} - -/// The actual algorithm for expansion is not too hard, but is pretty tricky. -/// `Bindings` structure is the key to understanding what we are doing here. -/// -/// On the high level, it stores mapping from meta variables to the bits of -/// syntax it should be substituted with. For example, if `$e:expr` is matched -/// with `1 + 1` by macro_rules, the `Binding` will store `$e -> 1 + 1`. -/// -/// The tricky bit is dealing with repetitions (`$()*`). Consider this example: -/// -/// ```not_rust -/// macro_rules! foo { -/// ($($ i:ident $($ e:expr),*);*) => { -/// $(fn $ i() { $($ e);*; })* -/// } -/// } -/// foo! { foo 1,2,3; bar 4,5,6 } -/// ``` -/// -/// Here, the `$i` meta variable is matched first with `foo` and then with -/// `bar`, and `$e` is matched in turn with `1`, `2`, `3`, `4`, `5`, `6`. -/// -/// To represent such "multi-mappings", we use a recursive structures: we map -/// variables not to values, but to *lists* of values or other lists (that is, -/// to the trees). -/// -/// For the above example, the bindings would store -/// -/// ```not_rust -/// i -> [foo, bar] -/// e -> [[1, 2, 3], [4, 5, 6]] -/// ``` -/// -/// We construct `Bindings` in the `match_lhs`. The interesting case is -/// `TokenTree::Repeat`, where we use `push_nested` to create the desired -/// nesting structure. -/// -/// The other side of the puzzle is `expand_subtree`, where we use the bindings -/// to substitute meta variables in the output template. When expanding, we -/// maintain a `nesting` stack of indices which tells us which occurrence from -/// the `Bindings` we should take. We push to the stack when we enter a -/// repetition. -/// -/// In other words, `Bindings` is a *multi* mapping from `SmolStr` to -/// `tt::TokenTree`, where the index to select a particular `TokenTree` among -/// many is not a plain `usize`, but an `&[usize]`. -#[derive(Debug, Default)] -struct Bindings { - inner: FxHashMap, -} - -#[derive(Debug)] -enum Binding { - Fragment(Fragment), - Nested(Vec), - Empty, -} - -#[derive(Debug, Clone)] -enum Fragment { - /// token fragments are just copy-pasted into the output - Tokens(tt::TokenTree), - /// Ast fragments are inserted with fake delimiters, so as to make things - /// like `$i * 2` where `$i = 1 + 1` work as expectd. - Ast(tt::TokenTree), -} - -#[cfg(test)] -mod tests { - use ra_syntax::{ast, AstNode}; - - use super::*; - use crate::ast_to_token_tree; - - #[test] - fn test_expand_rule() { - assert_err( - "($($i:ident);*) => ($i)", - "foo!{a}", - ExpandError::BindingError(String::from( - "expected simple binding, found nested binding `i`", - )), - ); - - // FIXME: - // Add an err test case for ($($i:ident)) => ($()) - } - - fn assert_err(macro_body: &str, invocation: &str, err: ExpandError) { - assert_eq!(expand_first(&create_rules(&format_macro(macro_body)), invocation).1, Some(err)); - } - - fn format_macro(macro_body: &str) -> String { - format!( - " - macro_rules! foo {{ - {} - }} -", - macro_body - ) - } - - fn create_rules(macro_definition: &str) -> crate::MacroRules { - let source_file = ast::SourceFile::parse(macro_definition).ok().unwrap(); - let macro_definition = - source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); - - let (definition_tt, _) = - ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap(); - crate::MacroRules::parse(&definition_tt).unwrap() - } - - fn expand_first(rules: &crate::MacroRules, invocation: &str) -> ExpandResult { - let source_file = ast::SourceFile::parse(invocation).ok().unwrap(); - let macro_invocation = - source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); - - let (invocation_tt, _) = - ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap(); - - expand_rules(&rules.rules, &invocation_tt) - } -} diff --git a/crates/ra_mbe/src/mbe_expander/matcher.rs b/crates/ra_mbe/src/mbe_expander/matcher.rs deleted file mode 100644 index f9e515b811..0000000000 --- a/crates/ra_mbe/src/mbe_expander/matcher.rs +++ /dev/null @@ -1,477 +0,0 @@ -//! FIXME: write short doc here - -use crate::{ - mbe_expander::{Binding, Bindings, Fragment}, - parser::{parse_pattern, Op, RepeatKind, Separator}, - subtree_source::SubtreeTokenSource, - tt_iter::TtIter, - ExpandError, -}; - -use super::ExpandResult; -use ra_parser::{FragmentKind::*, TreeSink}; -use ra_syntax::{SmolStr, SyntaxKind}; -use tt::buffer::{Cursor, TokenBuffer}; - -impl Bindings { - fn push_optional(&mut self, name: &SmolStr) { - // FIXME: Do we have a better way to represent an empty token ? - // Insert an empty subtree for empty token - let tt = tt::Subtree::default().into(); - self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt))); - } - - fn push_empty(&mut self, name: &SmolStr) { - self.inner.insert(name.clone(), Binding::Empty); - } - - fn push_nested(&mut self, idx: usize, nested: Bindings) -> Result<(), ExpandError> { - for (key, value) in nested.inner { - if !self.inner.contains_key(&key) { - self.inner.insert(key.clone(), Binding::Nested(Vec::new())); - } - match self.inner.get_mut(&key) { - Some(Binding::Nested(it)) => { - // insert empty nested bindings before this one - while it.len() < idx { - it.push(Binding::Nested(vec![])); - } - it.push(value); - } - _ => { - return Err(ExpandError::BindingError(format!( - "could not find binding `{}`", - key - ))); - } - } - } - Ok(()) - } -} - -macro_rules! err { - () => { - ExpandError::BindingError(format!("")) - }; - ($($tt:tt)*) => { - ExpandError::BindingError(format!($($tt)*)) - }; -} - -#[derive(Debug, Default)] -pub(super) struct Match { - pub bindings: Bindings, - /// We currently just keep the first error and count the rest to compare matches. - pub err: Option, - pub err_count: usize, - /// How many top-level token trees were left to match. - pub unmatched_tts: usize, -} - -impl Match { - pub fn add_err(&mut self, err: ExpandError) { - let prev_err = self.err.take(); - self.err = prev_err.or(Some(err)); - self.err_count += 1; - } -} - -// General note: These functions have two channels to return errors, a `Result` -// return value and the `&mut Match`. The returned Result is for pattern parsing -// errors; if a branch of the macro definition doesn't parse, it doesn't make -// sense to try using it. Matching errors are added to the `Match`. It might -// make sense to make pattern parsing a separate step? - -pub(super) fn match_(pattern: &tt::Subtree, src: &tt::Subtree) -> Result { - assert!(pattern.delimiter == None); - - let mut res = Match::default(); - let mut src = TtIter::new(src); - - match_subtree(&mut res, pattern, &mut src)?; - - if src.len() > 0 { - res.unmatched_tts += src.len(); - res.add_err(err!("leftover tokens")); - } - - Ok(res) -} - -fn match_subtree( - res: &mut Match, - pattern: &tt::Subtree, - src: &mut TtIter, -) -> Result<(), ExpandError> { - for op in parse_pattern(pattern) { - match op? { - Op::TokenTree(tt::TokenTree::Leaf(lhs)) => { - let rhs = match src.expect_leaf() { - Ok(l) => l, - Err(()) => { - res.add_err(err!("expected leaf: `{}`", lhs)); - continue; - } - }; - match (lhs, rhs) { - ( - tt::Leaf::Punct(tt::Punct { char: lhs, .. }), - tt::Leaf::Punct(tt::Punct { char: rhs, .. }), - ) if lhs == rhs => (), - ( - tt::Leaf::Ident(tt::Ident { text: lhs, .. }), - tt::Leaf::Ident(tt::Ident { text: rhs, .. }), - ) if lhs == rhs => (), - ( - tt::Leaf::Literal(tt::Literal { text: lhs, .. }), - tt::Leaf::Literal(tt::Literal { text: rhs, .. }), - ) if lhs == rhs => (), - _ => { - res.add_err(ExpandError::UnexpectedToken); - } - } - } - Op::TokenTree(tt::TokenTree::Subtree(lhs)) => { - let rhs = match src.expect_subtree() { - Ok(s) => s, - Err(()) => { - res.add_err(err!("expected subtree")); - continue; - } - }; - if lhs.delimiter_kind() != rhs.delimiter_kind() { - res.add_err(err!("mismatched delimiter")); - continue; - } - let mut src = TtIter::new(rhs); - match_subtree(res, lhs, &mut src)?; - if src.len() > 0 { - res.add_err(err!("leftover tokens")); - } - } - Op::Var { name, kind } => { - let kind = match kind { - Some(k) => k, - None => { - res.add_err(ExpandError::UnexpectedToken); - continue; - } - }; - let ExpandResult(matched, match_err) = match_meta_var(kind.as_str(), src); - match matched { - Some(fragment) => { - res.bindings.inner.insert(name.clone(), Binding::Fragment(fragment)); - } - None if match_err.is_none() => res.bindings.push_optional(name), - _ => {} - } - if let Some(err) = match_err { - res.add_err(err); - } - } - Op::Repeat { subtree, kind, separator } => { - match_repeat(res, subtree, kind, separator, src)?; - } - } - } - Ok(()) -} - -impl<'a> TtIter<'a> { - fn eat_separator(&mut self, separator: &Separator) -> bool { - let mut fork = self.clone(); - let ok = match separator { - Separator::Ident(lhs) => match fork.expect_ident() { - Ok(rhs) => rhs.text == lhs.text, - _ => false, - }, - Separator::Literal(lhs) => match fork.expect_literal() { - Ok(rhs) => match rhs { - tt::Leaf::Literal(rhs) => rhs.text == lhs.text, - tt::Leaf::Ident(rhs) => rhs.text == lhs.text, - tt::Leaf::Punct(_) => false, - }, - _ => false, - }, - Separator::Puncts(lhss) => lhss.iter().all(|lhs| match fork.expect_punct() { - Ok(rhs) => rhs.char == lhs.char, - _ => false, - }), - }; - if ok { - *self = fork; - } - ok - } - - pub(crate) fn expect_tt(&mut self) -> Result { - match self.peek_n(0) { - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '\'' => { - return self.expect_lifetime(); - } - _ => (), - } - - let tt = self.next().ok_or_else(|| ())?.clone(); - let punct = match tt { - tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => { - punct - } - _ => return Ok(tt), - }; - - let (second, third) = match (self.peek_n(0), self.peek_n(1)) { - ( - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))), - ) if p2.spacing == tt::Spacing::Joint => (p2.char, Some(p3.char)), - (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2.char, None), - _ => return Ok(tt), - }; - - match (punct.char, second, third) { - ('.', '.', Some('.')) - | ('.', '.', Some('=')) - | ('<', '<', Some('=')) - | ('>', '>', Some('=')) => { - let tt2 = self.next().unwrap().clone(); - let tt3 = self.next().unwrap().clone(); - Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2, tt3] }.into()) - } - ('-', '=', None) - | ('-', '>', None) - | (':', ':', None) - | ('!', '=', None) - | ('.', '.', None) - | ('*', '=', None) - | ('/', '=', None) - | ('&', '&', None) - | ('&', '=', None) - | ('%', '=', None) - | ('^', '=', None) - | ('+', '=', None) - | ('<', '<', None) - | ('<', '=', None) - | ('=', '=', None) - | ('=', '>', None) - | ('>', '=', None) - | ('>', '>', None) - | ('|', '=', None) - | ('|', '|', None) => { - let tt2 = self.next().unwrap().clone(); - Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2] }.into()) - } - _ => Ok(tt), - } - } - - pub(crate) fn expect_lifetime(&mut self) -> Result { - let punct = self.expect_punct()?; - if punct.char != '\'' { - return Err(()); - } - let ident = self.expect_ident()?; - - Ok(tt::Subtree { - delimiter: None, - token_trees: vec![ - tt::Leaf::Punct(punct.clone()).into(), - tt::Leaf::Ident(ident.clone()).into(), - ], - } - .into()) - } - - pub(crate) fn expect_fragment( - &mut self, - fragment_kind: ra_parser::FragmentKind, - ) -> ExpandResult> { - pub(crate) struct OffsetTokenSink<'a> { - pub(crate) cursor: Cursor<'a>, - pub(crate) error: bool, - } - - impl<'a> TreeSink for OffsetTokenSink<'a> { - fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { - if kind == SyntaxKind::LIFETIME { - n_tokens = 2; - } - for _ in 0..n_tokens { - self.cursor = self.cursor.bump_subtree(); - } - } - fn start_node(&mut self, _kind: SyntaxKind) {} - fn finish_node(&mut self) {} - fn error(&mut self, _error: ra_parser::ParseError) { - self.error = true; - } - } - - let buffer = TokenBuffer::new(&self.inner.as_slice()); - let mut src = SubtreeTokenSource::new(&buffer); - let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; - - ra_parser::parse_fragment(&mut src, &mut sink, fragment_kind); - - let mut err = None; - if !sink.cursor.is_root() || sink.error { - err = Some(err!("expected {:?}", fragment_kind)); - } - - let mut curr = buffer.begin(); - let mut res = vec![]; - - if sink.cursor.is_root() { - while curr != sink.cursor { - if let Some(token) = curr.token_tree() { - res.push(token); - } - curr = curr.bump(); - } - } - self.inner = self.inner.as_slice()[res.len()..].iter(); - if res.len() == 0 && err.is_none() { - err = Some(err!("no tokens consumed")); - } - let res = match res.len() { - 1 => Some(res[0].clone()), - 0 => None, - _ => Some(tt::TokenTree::Subtree(tt::Subtree { - delimiter: None, - token_trees: res.into_iter().cloned().collect(), - })), - }; - ExpandResult(res, err) - } - - pub(crate) fn eat_vis(&mut self) -> Option { - let mut fork = self.clone(); - match fork.expect_fragment(Visibility) { - ExpandResult(tt, None) => { - *self = fork; - tt - } - ExpandResult(_, Some(_)) => None, - } - } -} - -pub(super) fn match_repeat( - res: &mut Match, - pattern: &tt::Subtree, - kind: RepeatKind, - separator: Option, - src: &mut TtIter, -) -> Result<(), ExpandError> { - // Dirty hack to make macro-expansion terminate. - // This should be replaced by a propper macro-by-example implementation - let mut limit = 65536; - let mut counter = 0; - - for i in 0.. { - let mut fork = src.clone(); - - if let Some(separator) = &separator { - if i != 0 && !fork.eat_separator(separator) { - break; - } - } - - let mut nested = Match::default(); - match_subtree(&mut nested, pattern, &mut fork)?; - if nested.err.is_none() { - limit -= 1; - if limit == 0 { - log::warn!( - "match_lhs exceeded repeat pattern limit => {:#?}\n{:#?}\n{:#?}\n{:#?}", - pattern, - src, - kind, - separator - ); - break; - } - *src = fork; - - if let Err(err) = res.bindings.push_nested(counter, nested.bindings) { - res.add_err(err); - } - counter += 1; - if counter == 1 { - if let RepeatKind::ZeroOrOne = kind { - break; - } - } - } else { - break; - } - } - - match (kind, counter) { - (RepeatKind::OneOrMore, 0) => { - res.add_err(ExpandError::UnexpectedToken); - } - (_, 0) => { - // Collect all empty variables in subtrees - let mut vars = Vec::new(); - collect_vars(&mut vars, pattern)?; - for var in vars { - res.bindings.push_empty(&var) - } - } - _ => (), - } - Ok(()) -} - -fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult> { - let fragment = match kind { - "path" => Path, - "expr" => Expr, - "ty" => Type, - "pat" => Pattern, - "stmt" => Statement, - "block" => Block, - "meta" => MetaItem, - "item" => Item, - _ => { - let tt_result = match kind { - "ident" => input - .expect_ident() - .map(|ident| Some(tt::Leaf::from(ident.clone()).into())) - .map_err(|()| err!("expected ident")), - "tt" => input.expect_tt().map(Some).map_err(|()| err!()), - "lifetime" => input - .expect_lifetime() - .map(|tt| Some(tt)) - .map_err(|()| err!("expected lifetime")), - "literal" => input - .expect_literal() - .map(|literal| Some(tt::Leaf::from(literal.clone()).into())) - .map_err(|()| err!()), - // `vis` is optional - "vis" => match input.eat_vis() { - Some(vis) => Ok(Some(vis)), - None => Ok(None), - }, - _ => Err(ExpandError::UnexpectedToken), - }; - return tt_result.map(|it| it.map(Fragment::Tokens)).into(); - } - }; - let result = input.expect_fragment(fragment); - result.map(|tt| if kind == "expr" { tt.map(Fragment::Ast) } else { tt.map(Fragment::Tokens) }) -} - -fn collect_vars(buf: &mut Vec, pattern: &tt::Subtree) -> Result<(), ExpandError> { - for op in parse_pattern(pattern) { - match op? { - Op::Var { name, .. } => buf.push(name.clone()), - Op::TokenTree(tt::TokenTree::Leaf(_)) => (), - Op::TokenTree(tt::TokenTree::Subtree(subtree)) => collect_vars(buf, subtree)?, - Op::Repeat { subtree, .. } => collect_vars(buf, subtree)?, - } - } - Ok(()) -} diff --git a/crates/ra_mbe/src/mbe_expander/transcriber.rs b/crates/ra_mbe/src/mbe_expander/transcriber.rs deleted file mode 100644 index 7c9bb4d00e..0000000000 --- a/crates/ra_mbe/src/mbe_expander/transcriber.rs +++ /dev/null @@ -1,254 +0,0 @@ -//! Transcriber takes a template, like `fn $ident() {}`, a set of bindings like -//! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}` - -use ra_syntax::SmolStr; - -use super::ExpandResult; -use crate::{ - mbe_expander::{Binding, Bindings, Fragment}, - parser::{parse_template, Op, RepeatKind, Separator}, - ExpandError, -}; - -impl Bindings { - fn contains(&self, name: &str) -> bool { - self.inner.contains_key(name) - } - - fn get(&self, name: &str, nesting: &mut [NestingState]) -> Result<&Fragment, ExpandError> { - let mut b = self.inner.get(name).ok_or_else(|| { - ExpandError::BindingError(format!("could not find binding `{}`", name)) - })?; - for nesting_state in nesting.iter_mut() { - nesting_state.hit = true; - b = match b { - Binding::Fragment(_) => break, - Binding::Nested(bs) => bs.get(nesting_state.idx).ok_or_else(|| { - nesting_state.at_end = true; - ExpandError::BindingError(format!("could not find nested binding `{}`", name)) - })?, - Binding::Empty => { - nesting_state.at_end = true; - return Err(ExpandError::BindingError(format!( - "could not find empty binding `{}`", - name - ))); - } - }; - } - match b { - Binding::Fragment(it) => Ok(it), - Binding::Nested(_) => Err(ExpandError::BindingError(format!( - "expected simple binding, found nested binding `{}`", - name - ))), - Binding::Empty => Err(ExpandError::BindingError(format!( - "expected simple binding, found empty binding `{}`", - name - ))), - } - } -} - -pub(super) fn transcribe(template: &tt::Subtree, bindings: &Bindings) -> ExpandResult { - assert!(template.delimiter == None); - let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new() }; - let mut arena: Vec = Vec::new(); - expand_subtree(&mut ctx, template, &mut arena) -} - -#[derive(Debug)] -struct NestingState { - idx: usize, - /// `hit` is currently necessary to tell `expand_repeat` if it should stop - /// because there is no variable in use by the current repetition - hit: bool, - /// `at_end` is currently necessary to tell `expand_repeat` if it should stop - /// because there is no more value avaible for the current repetition - at_end: bool, -} - -#[derive(Debug)] -struct ExpandCtx<'a> { - bindings: &'a Bindings, - nesting: Vec, -} - -fn expand_subtree( - ctx: &mut ExpandCtx, - template: &tt::Subtree, - arena: &mut Vec, -) -> ExpandResult { - // remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation - let start_elements = arena.len(); - let mut err = None; - for op in parse_template(template) { - let op = match op { - Ok(op) => op, - Err(e) => { - err = Some(e); - break; - } - }; - match op { - Op::TokenTree(tt @ tt::TokenTree::Leaf(..)) => arena.push(tt.clone()), - Op::TokenTree(tt::TokenTree::Subtree(tt)) => { - let ExpandResult(tt, e) = expand_subtree(ctx, tt, arena); - err = err.or(e); - arena.push(tt.into()); - } - Op::Var { name, kind: _ } => { - let ExpandResult(fragment, e) = expand_var(ctx, name); - err = err.or(e); - push_fragment(arena, fragment); - } - Op::Repeat { subtree, kind, separator } => { - let ExpandResult(fragment, e) = expand_repeat(ctx, subtree, kind, separator, arena); - err = err.or(e); - push_fragment(arena, fragment) - } - } - } - // drain the elements added in this instance of expand_subtree - let tts = arena.drain(start_elements..arena.len()).collect(); - ExpandResult(tt::Subtree { delimiter: template.delimiter, token_trees: tts }, err) -} - -fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> ExpandResult { - if v == "crate" { - // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path. - let tt = - tt::Leaf::from(tt::Ident { text: "$crate".into(), id: tt::TokenId::unspecified() }) - .into(); - ExpandResult::ok(Fragment::Tokens(tt)) - } else if !ctx.bindings.contains(v) { - // Note that it is possible to have a `$var` inside a macro which is not bound. - // For example: - // ``` - // macro_rules! foo { - // ($a:ident, $b:ident, $c:tt) => { - // macro_rules! bar { - // ($bi:ident) => { - // fn $bi() -> u8 {$c} - // } - // } - // } - // ``` - // We just treat it a normal tokens - let tt = tt::Subtree { - delimiter: None, - token_trees: vec![ - tt::Leaf::from(tt::Punct { - char: '$', - spacing: tt::Spacing::Alone, - id: tt::TokenId::unspecified(), - }) - .into(), - tt::Leaf::from(tt::Ident { text: v.clone(), id: tt::TokenId::unspecified() }) - .into(), - ], - } - .into(); - ExpandResult::ok(Fragment::Tokens(tt)) - } else { - ctx.bindings.get(&v, &mut ctx.nesting).map_or_else( - |e| ExpandResult(Fragment::Tokens(tt::TokenTree::empty()), Some(e)), - |b| ExpandResult::ok(b.clone()), - ) - } -} - -fn expand_repeat( - ctx: &mut ExpandCtx, - template: &tt::Subtree, - kind: RepeatKind, - separator: Option, - arena: &mut Vec, -) -> ExpandResult { - let mut buf: Vec = Vec::new(); - ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false }); - // Dirty hack to make macro-expansion terminate. - // This should be replaced by a proper macro-by-example implementation - let limit = 65536; - let mut has_seps = 0; - let mut counter = 0; - - loop { - let ExpandResult(mut t, e) = expand_subtree(ctx, template, arena); - let nesting_state = ctx.nesting.last_mut().unwrap(); - if nesting_state.at_end || !nesting_state.hit { - break; - } - nesting_state.idx += 1; - nesting_state.hit = false; - - counter += 1; - if counter == limit { - log::warn!( - "expand_tt excced in repeat pattern exceed limit => {:#?}\n{:#?}", - template, - ctx - ); - break; - } - - if e.is_some() { - continue; - } - - t.delimiter = None; - push_subtree(&mut buf, t); - - if let Some(ref sep) = separator { - match sep { - Separator::Ident(ident) => { - has_seps = 1; - buf.push(tt::Leaf::from(ident.clone()).into()); - } - Separator::Literal(lit) => { - has_seps = 1; - buf.push(tt::Leaf::from(lit.clone()).into()); - } - - Separator::Puncts(puncts) => { - has_seps = puncts.len(); - for punct in puncts { - buf.push(tt::Leaf::from(*punct).into()); - } - } - } - } - - if RepeatKind::ZeroOrOne == kind { - break; - } - } - - ctx.nesting.pop().unwrap(); - for _ in 0..has_seps { - buf.pop(); - } - - // Check if it is a single token subtree without any delimiter - // e.g {Delimiter:None> ['>'] /Delimiter:None>} - let tt = tt::Subtree { delimiter: None, token_trees: buf }.into(); - - if RepeatKind::OneOrMore == kind && counter == 0 { - return ExpandResult(Fragment::Tokens(tt), Some(ExpandError::UnexpectedToken)); - } - ExpandResult::ok(Fragment::Tokens(tt)) -} - -fn push_fragment(buf: &mut Vec, fragment: Fragment) { - match fragment { - Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt), - Fragment::Tokens(tt) | Fragment::Ast(tt) => buf.push(tt), - } -} - -fn push_subtree(buf: &mut Vec, tt: tt::Subtree) { - match tt.delimiter { - None => buf.extend(tt.token_trees), - _ => buf.push(tt.into()), - } -} diff --git a/crates/ra_mbe/src/parser.rs b/crates/ra_mbe/src/parser.rs deleted file mode 100644 index 1e5dafbdf9..0000000000 --- a/crates/ra_mbe/src/parser.rs +++ /dev/null @@ -1,184 +0,0 @@ -//! Parser recognizes special macro syntax, `$var` and `$(repeat)*`, in token -//! trees. - -use ra_syntax::SmolStr; -use smallvec::SmallVec; - -use crate::{tt_iter::TtIter, ExpandError}; - -#[derive(Debug)] -pub(crate) enum Op<'a> { - Var { name: &'a SmolStr, kind: Option<&'a SmolStr> }, - Repeat { subtree: &'a tt::Subtree, kind: RepeatKind, separator: Option }, - TokenTree(&'a tt::TokenTree), -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) enum RepeatKind { - ZeroOrMore, - OneOrMore, - ZeroOrOne, -} - -#[derive(Clone, Debug, Eq)] -pub(crate) enum Separator { - Literal(tt::Literal), - Ident(tt::Ident), - Puncts(SmallVec<[tt::Punct; 3]>), -} - -// Note that when we compare a Separator, we just care about its textual value. -impl PartialEq for Separator { - fn eq(&self, other: &Separator) -> bool { - use Separator::*; - - match (self, other) { - (Ident(ref a), Ident(ref b)) => a.text == b.text, - (Literal(ref a), Literal(ref b)) => a.text == b.text, - (Puncts(ref a), Puncts(ref b)) if a.len() == b.len() => { - let a_iter = a.iter().map(|a| a.char); - let b_iter = b.iter().map(|b| b.char); - a_iter.eq(b_iter) - } - _ => false, - } - } -} - -pub(crate) fn parse_template( - template: &tt::Subtree, -) -> impl Iterator, ExpandError>> { - parse_inner(template, Mode::Template) -} - -pub(crate) fn parse_pattern( - pattern: &tt::Subtree, -) -> impl Iterator, ExpandError>> { - parse_inner(pattern, Mode::Pattern) -} - -#[derive(Clone, Copy)] -enum Mode { - Pattern, - Template, -} - -fn parse_inner(src: &tt::Subtree, mode: Mode) -> impl Iterator, ExpandError>> { - let mut src = TtIter::new(src); - std::iter::from_fn(move || { - let first = src.next()?; - Some(next_op(first, &mut src, mode)) - }) -} - -macro_rules! err { - ($($tt:tt)*) => { - ExpandError::UnexpectedToken - }; -} - -macro_rules! bail { - ($($tt:tt)*) => { - return Err(err!($($tt)*)) - }; -} - -fn next_op<'a>( - first: &'a tt::TokenTree, - src: &mut TtIter<'a>, - mode: Mode, -) -> Result, ExpandError> { - let res = match first { - tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '$', .. })) => { - // Note that the '$' itself is a valid token inside macro_rules. - let second = match src.next() { - None => return Ok(Op::TokenTree(first)), - Some(it) => it, - }; - match second { - tt::TokenTree::Subtree(subtree) => { - let (separator, kind) = parse_repeat(src)?; - Op::Repeat { subtree, separator, kind } - } - tt::TokenTree::Leaf(leaf) => match leaf { - tt::Leaf::Punct(..) => return Err(ExpandError::UnexpectedToken), - tt::Leaf::Ident(ident) => { - let name = &ident.text; - let kind = eat_fragment_kind(src, mode)?; - Op::Var { name, kind } - } - tt::Leaf::Literal(lit) => { - if is_boolean_literal(lit) { - let name = &lit.text; - let kind = eat_fragment_kind(src, mode)?; - Op::Var { name, kind } - } else { - bail!("bad var 2"); - } - } - }, - } - } - tt => Op::TokenTree(tt), - }; - Ok(res) -} - -fn eat_fragment_kind<'a>( - src: &mut TtIter<'a>, - mode: Mode, -) -> Result, ExpandError> { - if let Mode::Pattern = mode { - src.expect_char(':').map_err(|()| err!("bad fragment specifier 1"))?; - let ident = src.expect_ident().map_err(|()| err!("bad fragment specifier 1"))?; - return Ok(Some(&ident.text)); - }; - Ok(None) -} - -fn is_boolean_literal(lit: &tt::Literal) -> bool { - matches!(lit.text.as_str(), "true" | "false") -} - -fn parse_repeat(src: &mut TtIter) -> Result<(Option, RepeatKind), ExpandError> { - let mut separator = Separator::Puncts(SmallVec::new()); - for tt in src { - let tt = match tt { - tt::TokenTree::Leaf(leaf) => leaf, - tt::TokenTree::Subtree(_) => return Err(ExpandError::InvalidRepeat), - }; - let has_sep = match &separator { - Separator::Puncts(puncts) => !puncts.is_empty(), - _ => true, - }; - match tt { - tt::Leaf::Ident(_) | tt::Leaf::Literal(_) if has_sep => { - return Err(ExpandError::InvalidRepeat) - } - tt::Leaf::Ident(ident) => separator = Separator::Ident(ident.clone()), - tt::Leaf::Literal(lit) => separator = Separator::Literal(lit.clone()), - tt::Leaf::Punct(punct) => { - let repeat_kind = match punct.char { - '*' => RepeatKind::ZeroOrMore, - '+' => RepeatKind::OneOrMore, - '?' => RepeatKind::ZeroOrOne, - _ => { - match &mut separator { - Separator::Puncts(puncts) => { - if puncts.len() == 3 { - return Err(ExpandError::InvalidRepeat); - } - puncts.push(punct.clone()) - } - _ => return Err(ExpandError::InvalidRepeat), - } - continue; - } - }; - let separator = if has_sep { Some(separator) } else { None }; - return Ok((separator, repeat_kind)); - } - } - } - Err(ExpandError::InvalidRepeat) -} diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs deleted file mode 100644 index d7866452dd..0000000000 --- a/crates/ra_mbe/src/subtree_source.rs +++ /dev/null @@ -1,197 +0,0 @@ -//! FIXME: write short doc here - -use ra_parser::{Token, TokenSource}; -use ra_syntax::{lex_single_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T}; -use std::cell::{Cell, Ref, RefCell}; -use tt::buffer::{Cursor, TokenBuffer}; - -#[derive(Debug, Clone, Eq, PartialEq)] -struct TtToken { - pub kind: SyntaxKind, - pub is_joint_to_next: bool, - pub text: SmolStr, -} - -pub(crate) struct SubtreeTokenSource<'a> { - cached_cursor: Cell>, - cached: RefCell>>, - curr: (Token, usize), -} - -impl<'a> SubtreeTokenSource<'a> { - // Helper function used in test - #[cfg(test)] - pub fn text(&self) -> SmolStr { - match *self.get(self.curr.1) { - Some(ref tt) => tt.text.clone(), - _ => SmolStr::new(""), - } - } -} - -impl<'a> SubtreeTokenSource<'a> { - pub fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> { - let cursor = buffer.begin(); - - let mut res = SubtreeTokenSource { - curr: (Token { kind: EOF, is_jointed_to_next: false }, 0), - cached_cursor: Cell::new(cursor), - cached: RefCell::new(Vec::with_capacity(10)), - }; - res.curr = (res.mk_token(0), 0); - res - } - - fn mk_token(&self, pos: usize) -> Token { - match *self.get(pos) { - Some(ref tt) => Token { kind: tt.kind, is_jointed_to_next: tt.is_joint_to_next }, - None => Token { kind: EOF, is_jointed_to_next: false }, - } - } - - fn get(&self, pos: usize) -> Ref> { - fn is_lifetime(c: Cursor) -> Option<(Cursor, SmolStr)> { - let tkn = c.token_tree(); - - if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tkn { - if punct.char == '\'' { - let next = c.bump(); - if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = next.token_tree() { - let res_cursor = next.bump(); - let text = SmolStr::new("'".to_string() + &ident.to_string()); - - return Some((res_cursor, text)); - } else { - panic!("Next token must be ident : {:#?}", next.token_tree()); - } - } - } - - None - } - - if pos < self.cached.borrow().len() { - return Ref::map(self.cached.borrow(), |c| &c[pos]); - } - - { - let mut cached = self.cached.borrow_mut(); - while pos >= cached.len() { - let cursor = self.cached_cursor.get(); - if cursor.eof() { - cached.push(None); - continue; - } - - if let Some((curr, text)) = is_lifetime(cursor) { - cached.push(Some(TtToken { kind: LIFETIME, is_joint_to_next: false, text })); - self.cached_cursor.set(curr); - continue; - } - - match cursor.token_tree() { - Some(tt::TokenTree::Leaf(leaf)) => { - cached.push(Some(convert_leaf(&leaf))); - self.cached_cursor.set(cursor.bump()); - } - Some(tt::TokenTree::Subtree(subtree)) => { - self.cached_cursor.set(cursor.subtree().unwrap()); - cached.push(Some(convert_delim(subtree.delimiter_kind(), false))); - } - None => { - if let Some(subtree) = cursor.end() { - cached.push(Some(convert_delim(subtree.delimiter_kind(), true))); - self.cached_cursor.set(cursor.bump()); - } - } - } - } - } - - Ref::map(self.cached.borrow(), |c| &c[pos]) - } -} - -impl<'a> TokenSource for SubtreeTokenSource<'a> { - fn current(&self) -> Token { - self.curr.0 - } - - /// Lookahead n token - fn lookahead_nth(&self, n: usize) -> Token { - self.mk_token(self.curr.1 + n) - } - - /// bump cursor to next token - fn bump(&mut self) { - if self.current().kind == EOF { - return; - } - - self.curr = (self.mk_token(self.curr.1 + 1), self.curr.1 + 1); - } - - /// Is the current token a specified keyword? - fn is_keyword(&self, kw: &str) -> bool { - match *self.get(self.curr.1) { - Some(ref t) => t.text == *kw, - _ => false, - } - } -} - -fn convert_delim(d: Option, closing: bool) -> TtToken { - let (kinds, texts) = match d { - Some(tt::DelimiterKind::Parenthesis) => ([T!['('], T![')']], "()"), - Some(tt::DelimiterKind::Brace) => ([T!['{'], T!['}']], "{}"), - Some(tt::DelimiterKind::Bracket) => ([T!['['], T![']']], "[]"), - None => ([L_DOLLAR, R_DOLLAR], ""), - }; - - let idx = closing as usize; - let kind = kinds[idx]; - let text = if !texts.is_empty() { &texts[idx..texts.len() - (1 - idx)] } else { "" }; - TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text) } -} - -fn convert_literal(l: &tt::Literal) -> TtToken { - let kind = lex_single_syntax_kind(&l.text) - .map(|(kind, _error)| kind) - .filter(|kind| kind.is_literal()) - .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l)); - - TtToken { kind, is_joint_to_next: false, text: l.text.clone() } -} - -fn convert_ident(ident: &tt::Ident) -> TtToken { - let kind = match ident.text.as_ref() { - "true" => T![true], - "false" => T![false], - i if i.starts_with('\'') => LIFETIME, - _ => SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT), - }; - - TtToken { kind, is_joint_to_next: false, text: ident.text.clone() } -} - -fn convert_punct(p: tt::Punct) -> TtToken { - let kind = match SyntaxKind::from_char(p.char) { - None => panic!("{:#?} is not a valid punct", p), - Some(kind) => kind, - }; - - let text = { - let mut buf = [0u8; 4]; - let s: &str = p.char.encode_utf8(&mut buf); - SmolStr::new(s) - }; - TtToken { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text } -} - -fn convert_leaf(leaf: &tt::Leaf) -> TtToken { - match leaf { - tt::Leaf::Literal(l) => convert_literal(l), - tt::Leaf::Ident(ident) => convert_ident(ident), - tt::Leaf::Punct(punct) => convert_punct(*punct), - } -} diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs deleted file mode 100644 index 5fc48507ff..0000000000 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ /dev/null @@ -1,832 +0,0 @@ -//! FIXME: write short doc here - -use ra_parser::{FragmentKind, ParseError, TreeSink}; -use ra_syntax::{ - ast::{self, make::tokens::doc_comment}, - tokenize, AstToken, Parse, SmolStr, SyntaxKind, - SyntaxKind::*, - SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, T, -}; -use rustc_hash::FxHashMap; -use tt::buffer::{Cursor, TokenBuffer}; - -use crate::subtree_source::SubtreeTokenSource; -use crate::ExpandError; - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum TokenTextRange { - Token(TextRange), - Delimiter(TextRange, TextRange), -} - -impl TokenTextRange { - pub fn by_kind(self, kind: SyntaxKind) -> Option { - match self { - TokenTextRange::Token(it) => Some(it), - TokenTextRange::Delimiter(open, close) => match kind { - T!['{'] | T!['('] | T!['['] => Some(open), - T!['}'] | T![')'] | T![']'] => Some(close), - _ => None, - }, - } - } -} - -/// Maps `tt::TokenId` to the relative range of the original token. -#[derive(Debug, PartialEq, Eq, Clone, Default)] -pub struct TokenMap { - /// Maps `tt::TokenId` to the *relative* source range. - entries: Vec<(tt::TokenId, TokenTextRange)>, -} - -/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro -/// will consume). -pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> Option<(tt::Subtree, TokenMap)> { - syntax_node_to_token_tree(ast.syntax()) -} - -/// Convert the syntax node to a `TokenTree` (what macro -/// will consume). -pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> { - let global_offset = node.text_range().start(); - let mut c = Convertor::new(node, global_offset); - let subtree = c.go()?; - Some((subtree, c.id_alloc.map)) -} - -// The following items are what `rustc` macro can be parsed into : -// link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141 -// * Expr(P) -> token_tree_to_expr -// * Pat(P) -> token_tree_to_pat -// * Ty(P) -> token_tree_to_ty -// * Stmts(SmallVec<[ast::Stmt; 1]>) -> token_tree_to_stmts -// * Items(SmallVec<[P; 1]>) -> token_tree_to_items -// -// * TraitItems(SmallVec<[ast::TraitItem; 1]>) -// * AssocItems(SmallVec<[ast::AssocItem; 1]>) -// * ForeignItems(SmallVec<[ast::ForeignItem; 1]> - -pub fn token_tree_to_syntax_node( - tt: &tt::Subtree, - fragment_kind: FragmentKind, -) -> Result<(Parse, TokenMap), ExpandError> { - let tmp; - let tokens = match tt { - tt::Subtree { delimiter: None, token_trees } => token_trees.as_slice(), - _ => { - tmp = [tt.clone().into()]; - &tmp[..] - } - }; - let buffer = TokenBuffer::new(&tokens); - let mut token_source = SubtreeTokenSource::new(&buffer); - let mut tree_sink = TtTreeSink::new(buffer.begin()); - ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind); - if tree_sink.roots.len() != 1 { - return Err(ExpandError::ConversionError); - } - //FIXME: would be cool to report errors - let (parse, range_map) = tree_sink.finish(); - Ok((parse, range_map)) -} - -/// Convert a string to a `TokenTree` -pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> { - let (tokens, errors) = tokenize(text); - if !errors.is_empty() { - return None; - } - - let mut conv = RawConvertor { - text, - offset: TextSize::default(), - inner: tokens.iter(), - id_alloc: TokenIdAlloc { - map: Default::default(), - global_offset: TextSize::default(), - next_id: 0, - }, - }; - - let subtree = conv.go()?; - Some((subtree, conv.id_alloc.map)) -} - -impl TokenMap { - pub fn token_by_range(&self, relative_range: TextRange) -> Option { - let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { - TokenTextRange::Token(it) => *it == relative_range, - TokenTextRange::Delimiter(open, close) => { - *open == relative_range || *close == relative_range - } - })?; - Some(token_id) - } - - pub fn range_by_token(&self, token_id: tt::TokenId) -> Option { - let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; - Some(range) - } - - fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { - self.entries.push((token_id, TokenTextRange::Token(relative_range))); - } - - fn insert_delim( - &mut self, - token_id: tt::TokenId, - open_relative_range: TextRange, - close_relative_range: TextRange, - ) -> usize { - let res = self.entries.len(); - self.entries - .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range))); - res - } - - fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { - let (_, token_text_range) = &mut self.entries[idx]; - if let TokenTextRange::Delimiter(dim, _) = token_text_range { - *token_text_range = TokenTextRange::Delimiter(*dim, close_relative_range); - } - } - - fn remove_delim(&mut self, idx: usize) { - // FIXME: This could be accidently quadratic - self.entries.remove(idx); - } -} - -/// Returns the textual content of a doc comment block as a quoted string -/// That is, strips leading `///` (or `/**`, etc) -/// and strips the ending `*/` -/// And then quote the string, which is needed to convert to `tt::Literal` -fn doc_comment_text(comment: &ast::Comment) -> SmolStr { - let prefix_len = comment.prefix().len(); - let mut text = &comment.text()[prefix_len..]; - - // Remove ending "*/" - if comment.kind().shape == ast::CommentShape::Block { - text = &text[0..text.len() - 2]; - } - - // Quote the string - // Note that `tt::Literal` expect an escaped string - let text = format!("{:?}", text.escape_default().to_string()); - text.into() -} - -fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option> { - let comment = ast::Comment::cast(token.clone())?; - let doc = comment.kind().doc?; - - // Make `doc="\" Comments\"" - let mut meta_tkns = Vec::new(); - meta_tkns.push(mk_ident("doc")); - meta_tkns.push(mk_punct('=')); - meta_tkns.push(mk_doc_literal(&comment)); - - // Make `#![]` - let mut token_trees = Vec::new(); - token_trees.push(mk_punct('#')); - if let ast::CommentPlacement::Inner = doc { - token_trees.push(mk_punct('!')); - } - token_trees.push(tt::TokenTree::from(tt::Subtree { - delimiter: Some(tt::Delimiter { - kind: tt::DelimiterKind::Bracket, - id: tt::TokenId::unspecified(), - }), - token_trees: meta_tkns, - })); - - return Some(token_trees); - - // Helper functions - fn mk_ident(s: &str) -> tt::TokenTree { - tt::TokenTree::from(tt::Leaf::from(tt::Ident { - text: s.into(), - id: tt::TokenId::unspecified(), - })) - } - - fn mk_punct(c: char) -> tt::TokenTree { - tt::TokenTree::from(tt::Leaf::from(tt::Punct { - char: c, - spacing: tt::Spacing::Alone, - id: tt::TokenId::unspecified(), - })) - } - - fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree { - let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() }; - - tt::TokenTree::from(tt::Leaf::from(lit)) - } -} - -struct TokenIdAlloc { - map: TokenMap, - global_offset: TextSize, - next_id: u32, -} - -impl TokenIdAlloc { - fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId { - let relative_range = absolute_range - self.global_offset; - let token_id = tt::TokenId(self.next_id); - self.next_id += 1; - self.map.insert(token_id, relative_range); - token_id - } - - fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) { - let token_id = tt::TokenId(self.next_id); - self.next_id += 1; - let idx = self.map.insert_delim( - token_id, - open_abs_range - self.global_offset, - open_abs_range - self.global_offset, - ); - (token_id, idx) - } - - fn close_delim(&mut self, idx: usize, close_abs_range: Option) { - match close_abs_range { - None => { - self.map.remove_delim(idx); - } - Some(close) => { - self.map.update_close_delim(idx, close - self.global_offset); - } - } - } -} - -/// A Raw Token (straightly from lexer) convertor -struct RawConvertor<'a> { - text: &'a str, - offset: TextSize, - id_alloc: TokenIdAlloc, - inner: std::slice::Iter<'a, RawToken>, -} - -trait SrcToken: std::fmt::Debug { - fn kind(&self) -> SyntaxKind; - - fn to_char(&self) -> Option; - - fn to_text(&self) -> SmolStr; -} - -trait TokenConvertor { - type Token: SrcToken; - - fn go(&mut self) -> Option { - let mut subtree = tt::Subtree::default(); - subtree.delimiter = None; - while self.peek().is_some() { - self.collect_leaf(&mut subtree.token_trees); - } - if subtree.token_trees.is_empty() { - return None; - } - if subtree.token_trees.len() == 1 { - if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] { - return Some(first.clone()); - } - } - Some(subtree) - } - - fn collect_leaf(&mut self, result: &mut Vec) { - let (token, range) = match self.bump() { - None => return, - Some(it) => it, - }; - - let k: SyntaxKind = token.kind(); - if k == COMMENT { - if let Some(tokens) = self.convert_doc_comment(&token) { - result.extend(tokens); - } - return; - } - - result.push(if k.is_punct() { - assert_eq!(range.len(), TextSize::of('.')); - let delim = match k { - T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])), - T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])), - T!['['] => Some((tt::DelimiterKind::Bracket, T![']'])), - _ => None, - }; - - if let Some((kind, closed)) = delim { - let mut subtree = tt::Subtree::default(); - let (id, idx) = self.id_alloc().open_delim(range); - subtree.delimiter = Some(tt::Delimiter { kind, id }); - - while self.peek().map(|it| it.kind() != closed).unwrap_or(false) { - self.collect_leaf(&mut subtree.token_trees); - } - let last_range = match self.bump() { - None => { - // For error resilience, we insert an char punct for the opening delim here - self.id_alloc().close_delim(idx, None); - let leaf: tt::Leaf = tt::Punct { - id: self.id_alloc().alloc(range), - char: token.to_char().unwrap(), - spacing: tt::Spacing::Alone, - } - .into(); - result.push(leaf.into()); - result.extend(subtree.token_trees); - return; - } - Some(it) => it.1, - }; - self.id_alloc().close_delim(idx, Some(last_range)); - subtree.into() - } else { - let spacing = match self.peek() { - Some(next) - if next.kind().is_trivia() - || next.kind() == T!['['] - || next.kind() == T!['{'] - || next.kind() == T!['('] => - { - tt::Spacing::Alone - } - Some(next) if next.kind().is_punct() => tt::Spacing::Joint, - _ => tt::Spacing::Alone, - }; - let char = match token.to_char() { - Some(c) => c, - None => { - panic!("Token from lexer must be single char: token = {:#?}", token); - } - }; - tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into() - } - } else { - macro_rules! make_leaf { - ($i:ident) => { - tt::$i { id: self.id_alloc().alloc(range), text: token.to_text() }.into() - }; - } - let leaf: tt::Leaf = match k { - T![true] | T![false] => make_leaf!(Ident), - IDENT => make_leaf!(Ident), - k if k.is_keyword() => make_leaf!(Ident), - k if k.is_literal() => make_leaf!(Literal), - LIFETIME => { - let char_unit = TextSize::of('\''); - let r = TextRange::at(range.start(), char_unit); - let apostrophe = tt::Leaf::from(tt::Punct { - char: '\'', - spacing: tt::Spacing::Joint, - id: self.id_alloc().alloc(r), - }); - result.push(apostrophe.into()); - - let r = TextRange::at(range.start() + char_unit, range.len() - char_unit); - let ident = tt::Leaf::from(tt::Ident { - text: SmolStr::new(&token.to_text()[1..]), - id: self.id_alloc().alloc(r), - }); - result.push(ident.into()); - return; - } - _ => return, - }; - - leaf.into() - }); - } - - fn convert_doc_comment(&self, token: &Self::Token) -> Option>; - - fn bump(&mut self) -> Option<(Self::Token, TextRange)>; - - fn peek(&self) -> Option; - - fn id_alloc(&mut self) -> &mut TokenIdAlloc; -} - -impl<'a> SrcToken for (RawToken, &'a str) { - fn kind(&self) -> SyntaxKind { - self.0.kind - } - - fn to_char(&self) -> Option { - self.1.chars().next() - } - - fn to_text(&self) -> SmolStr { - self.1.into() - } -} - -impl RawConvertor<'_> {} - -impl<'a> TokenConvertor for RawConvertor<'a> { - type Token = (RawToken, &'a str); - - fn convert_doc_comment(&self, token: &Self::Token) -> Option> { - convert_doc_comment(&doc_comment(token.1)) - } - - fn bump(&mut self) -> Option<(Self::Token, TextRange)> { - let token = self.inner.next()?; - let range = TextRange::at(self.offset, token.len); - self.offset += token.len; - - Some(((*token, &self.text[range]), range)) - } - - fn peek(&self) -> Option { - let token = self.inner.as_slice().get(0).cloned(); - - token.map(|it| { - let range = TextRange::at(self.offset, it.len); - (it, &self.text[range]) - }) - } - - fn id_alloc(&mut self) -> &mut TokenIdAlloc { - &mut self.id_alloc - } -} - -struct Convertor { - id_alloc: TokenIdAlloc, - current: Option, - range: TextRange, - punct_offset: Option<(SyntaxToken, TextSize)>, -} - -impl Convertor { - fn new(node: &SyntaxNode, global_offset: TextSize) -> Convertor { - Convertor { - id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } }, - current: node.first_token(), - range: node.text_range(), - punct_offset: None, - } - } -} - -#[derive(Debug)] -enum SynToken { - Ordiniary(SyntaxToken), - Punch(SyntaxToken, TextSize), -} - -impl SynToken { - fn token(&self) -> &SyntaxToken { - match self { - SynToken::Ordiniary(it) => it, - SynToken::Punch(it, _) => it, - } - } -} - -impl SrcToken for SynToken { - fn kind(&self) -> SyntaxKind { - self.token().kind() - } - fn to_char(&self) -> Option { - match self { - SynToken::Ordiniary(_) => None, - SynToken::Punch(it, i) => it.text().chars().nth((*i).into()), - } - } - fn to_text(&self) -> SmolStr { - self.token().text().clone() - } -} - -impl TokenConvertor for Convertor { - type Token = SynToken; - fn convert_doc_comment(&self, token: &Self::Token) -> Option> { - convert_doc_comment(token.token()) - } - - fn bump(&mut self) -> Option<(Self::Token, TextRange)> { - if let Some((punct, offset)) = self.punct_offset.clone() { - if usize::from(offset) + 1 < punct.text().len() { - let offset = offset + TextSize::of('.'); - let range = punct.text_range(); - self.punct_offset = Some((punct.clone(), offset)); - let range = TextRange::at(range.start() + offset, TextSize::of('.')); - return Some((SynToken::Punch(punct, offset), range)); - } - } - - let curr = self.current.clone()?; - if !&self.range.contains_range(curr.text_range()) { - return None; - } - self.current = curr.next_token(); - - let token = if curr.kind().is_punct() { - let range = curr.text_range(); - let range = TextRange::at(range.start(), TextSize::of('.')); - self.punct_offset = Some((curr.clone(), 0.into())); - (SynToken::Punch(curr, 0.into()), range) - } else { - self.punct_offset = None; - let range = curr.text_range(); - (SynToken::Ordiniary(curr), range) - }; - - Some(token) - } - - fn peek(&self) -> Option { - if let Some((punct, mut offset)) = self.punct_offset.clone() { - offset = offset + TextSize::of('.'); - if usize::from(offset) < punct.text().len() { - return Some(SynToken::Punch(punct, offset)); - } - } - - let curr = self.current.clone()?; - if !self.range.contains_range(curr.text_range()) { - return None; - } - - let token = if curr.kind().is_punct() { - SynToken::Punch(curr, 0.into()) - } else { - SynToken::Ordiniary(curr) - }; - Some(token) - } - - fn id_alloc(&mut self) -> &mut TokenIdAlloc { - &mut self.id_alloc - } -} - -struct TtTreeSink<'a> { - buf: String, - cursor: Cursor<'a>, - open_delims: FxHashMap, - text_pos: TextSize, - inner: SyntaxTreeBuilder, - token_map: TokenMap, - - // Number of roots - // Use for detect ill-form tree which is not single root - roots: smallvec::SmallVec<[usize; 1]>, -} - -impl<'a> TtTreeSink<'a> { - fn new(cursor: Cursor<'a>) -> Self { - TtTreeSink { - buf: String::new(), - cursor, - open_delims: FxHashMap::default(), - text_pos: 0.into(), - inner: SyntaxTreeBuilder::default(), - roots: smallvec::SmallVec::new(), - token_map: TokenMap::default(), - } - } - - fn finish(self) -> (Parse, TokenMap) { - (self.inner.finish(), self.token_map) - } -} - -fn delim_to_str(d: Option, closing: bool) -> SmolStr { - let texts = match d { - Some(tt::DelimiterKind::Parenthesis) => "()", - Some(tt::DelimiterKind::Brace) => "{}", - Some(tt::DelimiterKind::Bracket) => "[]", - None => return "".into(), - }; - - let idx = closing as usize; - let text = &texts[idx..texts.len() - (1 - idx)]; - text.into() -} - -impl<'a> TreeSink for TtTreeSink<'a> { - fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { - if kind == L_DOLLAR || kind == R_DOLLAR { - self.cursor = self.cursor.bump_subtree(); - return; - } - if kind == LIFETIME { - n_tokens = 2; - } - - let mut last = self.cursor; - for _ in 0..n_tokens { - if self.cursor.eof() { - break; - } - last = self.cursor; - let text: SmolStr = match self.cursor.token_tree() { - Some(tt::TokenTree::Leaf(leaf)) => { - // Mark the range if needed - let (text, id) = match leaf { - tt::Leaf::Ident(ident) => (ident.text.clone(), ident.id), - tt::Leaf::Punct(punct) => { - (SmolStr::new_inline_from_ascii(1, &[punct.char as u8]), punct.id) - } - tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id), - }; - let range = TextRange::at(self.text_pos, TextSize::of(text.as_str())); - self.token_map.insert(id, range); - self.cursor = self.cursor.bump(); - text - } - Some(tt::TokenTree::Subtree(subtree)) => { - self.cursor = self.cursor.subtree().unwrap(); - if let Some(id) = subtree.delimiter.map(|it| it.id) { - self.open_delims.insert(id, self.text_pos); - } - delim_to_str(subtree.delimiter_kind(), false) - } - None => { - if let Some(parent) = self.cursor.end() { - self.cursor = self.cursor.bump(); - if let Some(id) = parent.delimiter.map(|it| it.id) { - if let Some(open_delim) = self.open_delims.get(&id) { - let open_range = TextRange::at(*open_delim, TextSize::of('(')); - let close_range = TextRange::at(self.text_pos, TextSize::of('(')); - self.token_map.insert_delim(id, open_range, close_range); - } - } - delim_to_str(parent.delimiter_kind(), true) - } else { - continue; - } - } - }; - self.buf += &text; - self.text_pos += TextSize::of(text.as_str()); - } - - let text = SmolStr::new(self.buf.as_str()); - self.buf.clear(); - self.inner.token(kind, text); - - // Add whitespace between adjoint puncts - let next = last.bump(); - if let ( - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(curr))), - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(_))), - ) = (last.token_tree(), next.token_tree()) - { - // Note: We always assume the semi-colon would be the last token in - // other parts of RA such that we don't add whitespace here. - if curr.spacing == tt::Spacing::Alone && curr.char != ';' { - self.inner.token(WHITESPACE, " ".into()); - self.text_pos += TextSize::of(' '); - } - } - } - - fn start_node(&mut self, kind: SyntaxKind) { - self.inner.start_node(kind); - - match self.roots.last_mut() { - None | Some(0) => self.roots.push(1), - Some(ref mut n) => **n += 1, - }; - } - - fn finish_node(&mut self) { - self.inner.finish_node(); - *self.roots.last_mut().unwrap() -= 1; - } - - fn error(&mut self, error: ParseError) { - self.inner.error(error, self.text_pos) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::tests::parse_macro; - use ra_parser::TokenSource; - use ra_syntax::{ - algo::{insert_children, InsertPosition}, - ast::AstNode, - }; - - #[test] - fn convert_tt_token_source() { - let expansion = parse_macro( - r#" - macro_rules! literals { - ($i:ident) => { - { - let a = 'c'; - let c = 1000; - let f = 12E+99_f64; - let s = "rust1"; - } - } - } - "#, - ) - .expand_tt("literals!(foo);"); - let tts = &[expansion.into()]; - let buffer = tt::buffer::TokenBuffer::new(tts); - let mut tt_src = SubtreeTokenSource::new(&buffer); - let mut tokens = vec![]; - while tt_src.current().kind != EOF { - tokens.push((tt_src.current().kind, tt_src.text())); - tt_src.bump(); - } - - // [${] - // [let] [a] [=] ['c'] [;] - assert_eq!(tokens[2 + 3].1, "'c'"); - assert_eq!(tokens[2 + 3].0, CHAR); - // [let] [c] [=] [1000] [;] - assert_eq!(tokens[2 + 5 + 3].1, "1000"); - assert_eq!(tokens[2 + 5 + 3].0, INT_NUMBER); - // [let] [f] [=] [12E+99_f64] [;] - assert_eq!(tokens[2 + 10 + 3].1, "12E+99_f64"); - assert_eq!(tokens[2 + 10 + 3].0, FLOAT_NUMBER); - - // [let] [s] [=] ["rust1"] [;] - assert_eq!(tokens[2 + 15 + 3].1, "\"rust1\""); - assert_eq!(tokens[2 + 15 + 3].0, STRING); - } - - #[test] - fn stmts_token_trees_to_expr_is_err() { - let expansion = parse_macro( - r#" - macro_rules! stmts { - () => { - let a = 0; - let b = 0; - let c = 0; - let d = 0; - } - } - "#, - ) - .expand_tt("stmts!();"); - assert!(token_tree_to_syntax_node(&expansion, FragmentKind::Expr).is_err()); - } - - #[test] - fn test_token_tree_last_child_is_white_space() { - let source_file = ast::SourceFile::parse("f!({} );").ok().unwrap(); - let macro_call = source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); - let token_tree = macro_call.token_tree().unwrap(); - - // Token Tree now is : - // TokenTree - // - T!['('] - // - TokenTree - // - T!['{'] - // - T!['}'] - // - WHITE_SPACE - // - T![')'] - - let rbrace = - token_tree.syntax().descendants_with_tokens().find(|it| it.kind() == T!['}']).unwrap(); - let space = token_tree - .syntax() - .descendants_with_tokens() - .find(|it| it.kind() == SyntaxKind::WHITESPACE) - .unwrap(); - - // reorder th white space, such that the white is inside the inner token-tree. - let token_tree = insert_children( - &rbrace.parent().unwrap(), - InsertPosition::Last, - std::iter::once(space), - ); - - // Token Tree now is : - // TokenTree - // - T!['{'] - // - T!['}'] - // - WHITE_SPACE - let token_tree = ast::TokenTree::cast(token_tree).unwrap(); - let tt = ast_to_token_tree(&token_tree).unwrap().0; - - assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace)); - } - - #[test] - fn test_token_tree_multi_char_punct() { - let source_file = ast::SourceFile::parse("struct Foo { a: x::Y }").ok().unwrap(); - let struct_def = source_file.syntax().descendants().find_map(ast::Struct::cast).unwrap(); - let tt = ast_to_token_tree(&struct_def).unwrap().0; - token_tree_to_syntax_node(&tt, FragmentKind::Item).unwrap(); - } -} diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs deleted file mode 100644 index 286983d60b..0000000000 --- a/crates/ra_mbe/src/tests.rs +++ /dev/null @@ -1,1897 +0,0 @@ -use std::fmt::Write; - -use ra_parser::FragmentKind; -use ra_syntax::{ast, AstNode, NodeOrToken, SyntaxKind::IDENT, SyntaxNode, WalkEvent, T}; -use test_utils::assert_eq_text; - -use super::*; - -mod rule_parsing { - use ra_syntax::{ast, AstNode}; - - use super::*; - use crate::ast_to_token_tree; - - #[test] - fn test_valid_arms() { - fn check(macro_body: &str) { - let m = parse_macro_arm(macro_body); - m.unwrap(); - } - - check("($i:ident) => ()"); - check("($($i:ident)*) => ($_)"); - check("($($true:ident)*) => ($true)"); - check("($($false:ident)*) => ($false)"); - check("($) => ($)"); - } - - #[test] - fn test_invalid_arms() { - fn check(macro_body: &str, err: &str) { - let m = parse_macro_arm(macro_body); - assert_eq!(m, Err(ParseError::Expected(String::from(err)))); - } - - check("invalid", "expected subtree"); - - check("$i:ident => ()", "expected subtree"); - check("($i:ident) ()", "expected `=`"); - check("($($i:ident)_) => ()", "invalid repeat"); - - check("($i) => ($i)", "invalid macro definition"); - check("($i:) => ($i)", "invalid macro definition"); - } - - fn parse_macro_arm(arm_definition: &str) -> Result { - let macro_definition = format!(" macro_rules! m {{ {} }} ", arm_definition); - let source_file = ast::SourceFile::parse(¯o_definition).ok().unwrap(); - let macro_definition = - source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); - - let (definition_tt, _) = - ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap(); - crate::MacroRules::parse(&definition_tt) - } -} - -// Good first issue (although a slightly challenging one): -// -// * Pick a random test from here -// https://github.com/intellij-rust/intellij-rust/blob/c4e9feee4ad46e7953b1948c112533360b6087bb/src/test/kotlin/org/rust/lang/core/macros/RsMacroExpansionTest.kt -// * Port the test to rust and add it to this module -// * Make it pass :-) - -#[test] -fn test_token_id_shift() { - let expansion = parse_macro( - r#" -macro_rules! foobar { - ($e:ident) => { foo bar $e } -} -"#, - ) - .expand_tt("foobar!(baz);"); - - fn get_id(t: &tt::TokenTree) -> Option { - if let tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = t { - return Some(ident.id.0); - } - None - } - - assert_eq!(expansion.token_trees.len(), 3); - // {($e:ident) => { foo bar $e }} - // 012345 67 8 9 T 12 - assert_eq!(get_id(&expansion.token_trees[0]), Some(9)); - assert_eq!(get_id(&expansion.token_trees[1]), Some(10)); - - // The input args of macro call include parentheses: - // (baz) - // So baz should be 12+1+1 - assert_eq!(get_id(&expansion.token_trees[2]), Some(14)); -} - -#[test] -fn test_token_map() { - let expanded = parse_macro( - r#" -macro_rules! foobar { - ($e:ident) => { fn $e() {} } -} -"#, - ) - .expand_tt("foobar!(baz);"); - - let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap(); - let content = node.syntax_node().to_string(); - - let get_text = |id, kind| -> String { - content[token_map.range_by_token(id).unwrap().by_kind(kind).unwrap()].to_string() - }; - - assert_eq!(expanded.token_trees.len(), 4); - // {($e:ident) => { fn $e() {} }} - // 012345 67 8 9 T12 3 - - assert_eq!(get_text(tt::TokenId(9), IDENT), "fn"); - assert_eq!(get_text(tt::TokenId(12), T!['(']), "("); - assert_eq!(get_text(tt::TokenId(13), T!['{']), "{"); -} - -#[test] -fn test_convert_tt() { - parse_macro(r#" -macro_rules! impl_froms { - ($e:ident: $($v:ident),*) => { - $( - impl From<$v> for $e { - fn from(it: $v) -> $e { - $e::$v(it) - } - } - )* - } -} -"#) - .assert_expand_tt( - "impl_froms!(TokenTree: Leaf, Subtree);", - "impl From for TokenTree {fn from (it : Leaf) -> TokenTree {TokenTree ::Leaf (it)}} \ - impl From for TokenTree {fn from (it : Subtree) -> TokenTree {TokenTree ::Subtree (it)}}" - ); -} - -#[test] -fn test_convert_tt2() { - parse_macro( - r#" -macro_rules! impl_froms { - ($e:ident: $($v:ident),*) => { - $( - impl From<$v> for $e { - fn from(it: $v) -> $e { - $e::$v(it) - } - } - )* - } -} -"#, - ) - .assert_expand( - "impl_froms!(TokenTree: Leaf, Subtree);", - r#" -SUBTREE $ - IDENT impl 20 - IDENT From 21 - PUNCH < [joint] 22 - IDENT Leaf 53 - PUNCH > [alone] 25 - IDENT for 26 - IDENT TokenTree 51 - SUBTREE {} 29 - IDENT fn 30 - IDENT from 31 - SUBTREE () 32 - IDENT it 33 - PUNCH : [alone] 34 - IDENT Leaf 53 - PUNCH - [joint] 37 - PUNCH > [alone] 38 - IDENT TokenTree 51 - SUBTREE {} 41 - IDENT TokenTree 51 - PUNCH : [joint] 44 - PUNCH : [joint] 45 - IDENT Leaf 53 - SUBTREE () 48 - IDENT it 49 - IDENT impl 20 - IDENT From 21 - PUNCH < [joint] 22 - IDENT Subtree 55 - PUNCH > [alone] 25 - IDENT for 26 - IDENT TokenTree 51 - SUBTREE {} 29 - IDENT fn 30 - IDENT from 31 - SUBTREE () 32 - IDENT it 33 - PUNCH : [alone] 34 - IDENT Subtree 55 - PUNCH - [joint] 37 - PUNCH > [alone] 38 - IDENT TokenTree 51 - SUBTREE {} 41 - IDENT TokenTree 51 - PUNCH : [joint] 44 - PUNCH : [joint] 45 - IDENT Subtree 55 - SUBTREE () 48 - IDENT it 49 -"#, - ); -} - -#[test] -fn test_lifetime_split() { - parse_macro( - r#" -macro_rules! foo { - ($($t:tt)*) => { $($t)*} -} -"#, - ) - .assert_expand( - r#"foo!(static bar: &'static str = "hello";);"#, - r#" -SUBTREE $ - IDENT static 17 - IDENT bar 18 - PUNCH : [alone] 19 - PUNCH & [alone] 20 - PUNCH ' [joint] 21 - IDENT static 22 - IDENT str 23 - PUNCH = [alone] 24 - LITERAL "hello" 25 - PUNCH ; [joint] 26 -"#, - ); -} - -#[test] -fn test_expr_order() { - let expanded = parse_macro( - r#" - macro_rules! foo { - ($ i:expr) => { - fn bar() { $ i * 2; } - } - } -"#, - ) - .expand_items("foo! { 1 + 1}"); - - let dump = format!("{:#?}", expanded); - assert_eq_text!( - dump.trim(), - r#"MACRO_ITEMS@0..15 - FN@0..15 - FN_KW@0..2 "fn" - NAME@2..5 - IDENT@2..5 "bar" - PARAM_LIST@5..7 - L_PAREN@5..6 "(" - R_PAREN@6..7 ")" - BLOCK_EXPR@7..15 - L_CURLY@7..8 "{" - EXPR_STMT@8..14 - BIN_EXPR@8..13 - BIN_EXPR@8..11 - LITERAL@8..9 - INT_NUMBER@8..9 "1" - PLUS@9..10 "+" - LITERAL@10..11 - INT_NUMBER@10..11 "1" - STAR@11..12 "*" - LITERAL@12..13 - INT_NUMBER@12..13 "2" - SEMICOLON@13..14 ";" - R_CURLY@14..15 "}""#, - ); -} - -#[test] -fn test_fail_match_pattern_by_first_token() { - parse_macro( - r#" - macro_rules! foo { - ($ i:ident) => ( - mod $ i {} - ); - (= $ i:ident) => ( - fn $ i() {} - ); - (+ $ i:ident) => ( - struct $ i; - ) - } -"#, - ) - .assert_expand_items("foo! { foo }", "mod foo {}") - .assert_expand_items("foo! { = bar }", "fn bar () {}") - .assert_expand_items("foo! { + Baz }", "struct Baz ;"); -} - -#[test] -fn test_fail_match_pattern_by_last_token() { - parse_macro( - r#" - macro_rules! foo { - ($ i:ident) => ( - mod $ i {} - ); - ($ i:ident =) => ( - fn $ i() {} - ); - ($ i:ident +) => ( - struct $ i; - ) - } -"#, - ) - .assert_expand_items("foo! { foo }", "mod foo {}") - .assert_expand_items("foo! { bar = }", "fn bar () {}") - .assert_expand_items("foo! { Baz + }", "struct Baz ;"); -} - -#[test] -fn test_fail_match_pattern_by_word_token() { - parse_macro( - r#" - macro_rules! foo { - ($ i:ident) => ( - mod $ i {} - ); - (spam $ i:ident) => ( - fn $ i() {} - ); - (eggs $ i:ident) => ( - struct $ i; - ) - } -"#, - ) - .assert_expand_items("foo! { foo }", "mod foo {}") - .assert_expand_items("foo! { spam bar }", "fn bar () {}") - .assert_expand_items("foo! { eggs Baz }", "struct Baz ;"); -} - -#[test] -fn test_match_group_pattern_by_separator_token() { - parse_macro( - r#" - macro_rules! foo { - ($ ($ i:ident),*) => ($ ( - mod $ i {} - )*); - ($ ($ i:ident)#*) => ($ ( - fn $ i() {} - )*); - ($ i:ident ,# $ j:ident) => ( - struct $ i; - struct $ j; - ) - } -"#, - ) - .assert_expand_items("foo! { foo, bar }", "mod foo {} mod bar {}") - .assert_expand_items("foo! { foo# bar }", "fn foo () {} fn bar () {}") - .assert_expand_items("foo! { Foo,# Bar }", "struct Foo ; struct Bar ;"); -} - -#[test] -fn test_match_group_pattern_with_multiple_defs() { - parse_macro( - r#" - macro_rules! foo { - ($ ($ i:ident),*) => ( struct Bar { $ ( - fn $ i {} - )*} ); - } -"#, - ) - .assert_expand_items("foo! { foo, bar }", "struct Bar {fn foo {} fn bar {}}"); -} - -#[test] -fn test_match_group_pattern_with_multiple_statement() { - parse_macro( - r#" - macro_rules! foo { - ($ ($ i:ident),*) => ( fn baz { $ ( - $ i (); - )*} ); - } -"#, - ) - .assert_expand_items("foo! { foo, bar }", "fn baz {foo () ; bar () ;}"); -} - -#[test] -fn test_match_group_pattern_with_multiple_statement_without_semi() { - parse_macro( - r#" - macro_rules! foo { - ($ ($ i:ident),*) => ( fn baz { $ ( - $i() - );*} ); - } -"#, - ) - .assert_expand_items("foo! { foo, bar }", "fn baz {foo () ;bar ()}"); -} - -#[test] -fn test_match_group_empty_fixed_token() { - parse_macro( - r#" - macro_rules! foo { - ($ ($ i:ident)* #abc) => ( fn baz { $ ( - $ i (); - )*} ); - } -"#, - ) - .assert_expand_items("foo! {#abc}", "fn baz {}"); -} - -#[test] -fn test_match_group_in_subtree() { - parse_macro( - r#" - macro_rules! foo { - (fn $name:ident {$($i:ident)*} ) => ( fn $name() { $ ( - $ i (); - )*} ); - }"#, - ) - .assert_expand_items("foo! {fn baz {a b} }", "fn baz () {a () ; b () ;}"); -} - -#[test] -fn test_match_group_with_multichar_sep() { - parse_macro( - r#" - macro_rules! foo { - (fn $name:ident {$($i:literal)*} ) => ( fn $name() -> bool { $($i)&&*} ); - }"#, - ) - .assert_expand_items("foo! (fn baz {true true} );", "fn baz () -> bool {true &&true}"); -} - -#[test] -fn test_match_group_zero_match() { - parse_macro( - r#" - macro_rules! foo { - ( $($i:ident)* ) => (); - }"#, - ) - .assert_expand_items("foo! ();", ""); -} - -#[test] -fn test_match_group_in_group() { - parse_macro( - r#" - macro_rules! foo { - { $( ( $($i:ident)* ) )* } => ( $( ( $($i)* ) )* ); - }"#, - ) - .assert_expand_items("foo! ( (a b) );", "(a b)"); -} - -#[test] -fn test_expand_to_item_list() { - let tree = parse_macro( - " - macro_rules! structs { - ($($i:ident),*) => { - $(struct $i { field: u32 } )* - } - } - ", - ) - .expand_items("structs!(Foo, Bar);"); - assert_eq!( - format!("{:#?}", tree).trim(), - r#" -MACRO_ITEMS@0..40 - STRUCT@0..20 - STRUCT_KW@0..6 "struct" - NAME@6..9 - IDENT@6..9 "Foo" - RECORD_FIELD_LIST@9..20 - L_CURLY@9..10 "{" - RECORD_FIELD@10..19 - NAME@10..15 - IDENT@10..15 "field" - COLON@15..16 ":" - PATH_TYPE@16..19 - PATH@16..19 - PATH_SEGMENT@16..19 - NAME_REF@16..19 - IDENT@16..19 "u32" - R_CURLY@19..20 "}" - STRUCT@20..40 - STRUCT_KW@20..26 "struct" - NAME@26..29 - IDENT@26..29 "Bar" - RECORD_FIELD_LIST@29..40 - L_CURLY@29..30 "{" - RECORD_FIELD@30..39 - NAME@30..35 - IDENT@30..35 "field" - COLON@35..36 ":" - PATH_TYPE@36..39 - PATH@36..39 - PATH_SEGMENT@36..39 - NAME_REF@36..39 - IDENT@36..39 "u32" - R_CURLY@39..40 "}""# - .trim() - ); -} - -fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree { - if let tt::TokenTree::Subtree(subtree) = tt { - return &subtree; - } - unreachable!("It is not a subtree"); -} -fn to_literal(tt: &tt::TokenTree) -> &tt::Literal { - if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = tt { - return lit; - } - unreachable!("It is not a literal"); -} - -fn to_punct(tt: &tt::TokenTree) -> &tt::Punct { - if let tt::TokenTree::Leaf(tt::Leaf::Punct(lit)) = tt { - return lit; - } - unreachable!("It is not a Punct"); -} - -#[test] -fn test_expand_literals_to_token_tree() { - let expansion = parse_macro( - r#" - macro_rules! literals { - ($i:ident) => { - { - let a = 'c'; - let c = 1000; - let f = 12E+99_f64; - let s = "rust1"; - } - } - } - "#, - ) - .expand_tt("literals!(foo);"); - let stm_tokens = &to_subtree(&expansion.token_trees[0]).token_trees; - - // [let] [a] [=] ['c'] [;] - assert_eq!(to_literal(&stm_tokens[3]).text, "'c'"); - // [let] [c] [=] [1000] [;] - assert_eq!(to_literal(&stm_tokens[5 + 3]).text, "1000"); - // [let] [f] [=] [12E+99_f64] [;] - assert_eq!(to_literal(&stm_tokens[10 + 3]).text, "12E+99_f64"); - // [let] [s] [=] ["rust1"] [;] - assert_eq!(to_literal(&stm_tokens[15 + 3]).text, "\"rust1\""); -} - -#[test] -fn test_attr_to_token_tree() { - let expansion = parse_to_token_tree_by_syntax( - r#" - #[derive(Copy)] - struct Foo; - "#, - ); - - assert_eq!(to_punct(&expansion.token_trees[0]).char, '#'); - assert_eq!( - to_subtree(&expansion.token_trees[1]).delimiter_kind(), - Some(tt::DelimiterKind::Bracket) - ); -} - -#[test] -fn test_two_idents() { - parse_macro( - r#" - macro_rules! foo { - ($ i:ident, $ j:ident) => { - fn foo() { let a = $ i; let b = $j; } - } - } -"#, - ) - .assert_expand_items("foo! { foo, bar }", "fn foo () {let a = foo ; let b = bar ;}"); -} - -#[test] -fn test_tt_to_stmts() { - let stmts = parse_macro( - r#" - macro_rules! foo { - () => { - let a = 0; - a = 10 + 1; - a - } - } -"#, - ) - .expand_statements("foo!{}"); - - assert_eq!( - format!("{:#?}", stmts).trim(), - r#"MACRO_STMTS@0..15 - LET_STMT@0..7 - LET_KW@0..3 "let" - IDENT_PAT@3..4 - NAME@3..4 - IDENT@3..4 "a" - EQ@4..5 "=" - LITERAL@5..6 - INT_NUMBER@5..6 "0" - SEMICOLON@6..7 ";" - EXPR_STMT@7..14 - BIN_EXPR@7..13 - PATH_EXPR@7..8 - PATH@7..8 - PATH_SEGMENT@7..8 - NAME_REF@7..8 - IDENT@7..8 "a" - EQ@8..9 "=" - BIN_EXPR@9..13 - LITERAL@9..11 - INT_NUMBER@9..11 "10" - PLUS@11..12 "+" - LITERAL@12..13 - INT_NUMBER@12..13 "1" - SEMICOLON@13..14 ";" - EXPR_STMT@14..15 - PATH_EXPR@14..15 - PATH@14..15 - PATH_SEGMENT@14..15 - NAME_REF@14..15 - IDENT@14..15 "a""#, - ); -} - -#[test] -fn test_match_literal() { - parse_macro( - r#" - macro_rules! foo { - ('(') => { - fn foo() {} - } - } -"#, - ) - .assert_expand_items("foo! ['('];", "fn foo () {}"); -} - -// The following tests are port from intellij-rust directly -// https://github.com/intellij-rust/intellij-rust/blob/c4e9feee4ad46e7953b1948c112533360b6087bb/src/test/kotlin/org/rust/lang/core/macros/RsMacroExpansionTest.kt - -#[test] -fn test_path() { - parse_macro( - r#" - macro_rules! foo { - ($ i:path) => { - fn foo() { let a = $ i; } - } - } -"#, - ) - .assert_expand_items("foo! { foo }", "fn foo () {let a = foo ;}") - .assert_expand_items( - "foo! { bar::::baz:: }", - "fn foo () {let a = bar ::< u8 >:: baz ::< u8 > ;}", - ); -} - -#[test] -fn test_two_paths() { - parse_macro( - r#" - macro_rules! foo { - ($ i:path, $ j:path) => { - fn foo() { let a = $ i; let b = $j; } - } - } -"#, - ) - .assert_expand_items("foo! { foo, bar }", "fn foo () {let a = foo ; let b = bar ;}"); -} - -#[test] -fn test_path_with_path() { - parse_macro( - r#" - macro_rules! foo { - ($ i:path) => { - fn foo() { let a = $ i :: bar; } - } - } -"#, - ) - .assert_expand_items("foo! { foo }", "fn foo () {let a = foo :: bar ;}"); -} - -#[test] -fn test_expr() { - parse_macro( - r#" - macro_rules! foo { - ($ i:expr) => { - fn bar() { $ i; } - } - } -"#, - ) - .assert_expand_items( - "foo! { 2 + 2 * baz(3).quux() }", - "fn bar () {2 + 2 * baz (3) . quux () ;}", - ); -} - -#[test] -fn test_last_expr() { - parse_macro( - r#" - macro_rules! vec { - ($($item:expr),*) => { - { - let mut v = Vec::new(); - $( - v.push($item); - )* - v - } - }; - } -"#, - ) - .assert_expand_items( - "vec!(1,2,3);", - "{let mut v = Vec :: new () ; v . push (1) ; v . push (2) ; v . push (3) ; v}", - ); -} - -#[test] -fn test_ty() { - parse_macro( - r#" - macro_rules! foo { - ($ i:ty) => ( - fn bar() -> $ i { unimplemented!() } - ) - } -"#, - ) - .assert_expand_items("foo! { Baz }", "fn bar () -> Baz < u8 > {unimplemented ! ()}"); -} - -#[test] -fn test_ty_with_complex_type() { - parse_macro( - r#" - macro_rules! foo { - ($ i:ty) => ( - fn bar() -> $ i { unimplemented!() } - ) - } -"#, - ) - // Reference lifetime struct with generic type - .assert_expand_items( - "foo! { &'a Baz }", - "fn bar () -> & 'a Baz < u8 > {unimplemented ! ()}", - ) - // extern "Rust" func type - .assert_expand_items( - r#"foo! { extern "Rust" fn() -> Ret }"#, - r#"fn bar () -> extern "Rust" fn () -> Ret {unimplemented ! ()}"#, - ); -} - -#[test] -fn test_pat_() { - parse_macro( - r#" - macro_rules! foo { - ($ i:pat) => { fn foo() { let $ i; } } - } -"#, - ) - .assert_expand_items("foo! { (a, b) }", "fn foo () {let (a , b) ;}"); -} - -#[test] -fn test_stmt() { - parse_macro( - r#" - macro_rules! foo { - ($ i:stmt) => ( - fn bar() { $ i; } - ) - } -"#, - ) - .assert_expand_items("foo! { 2 }", "fn bar () {2 ;}") - .assert_expand_items("foo! { let a = 0 }", "fn bar () {let a = 0 ;}"); -} - -#[test] -fn test_single_item() { - parse_macro( - r#" - macro_rules! foo { - ($ i:item) => ( - $ i - ) - } -"#, - ) - .assert_expand_items("foo! {mod c {}}", "mod c {}"); -} - -#[test] -fn test_all_items() { - parse_macro( - r#" - macro_rules! foo { - ($ ($ i:item)*) => ($ ( - $ i - )*) - } -"#, - ). - assert_expand_items( - r#" - foo! { - extern crate a; - mod b; - mod c {} - use d; - const E: i32 = 0; - static F: i32 = 0; - impl G {} - struct H; - enum I { Foo } - trait J {} - fn h() {} - extern {} - type T = u8; - } -"#, - r#"extern crate a ; mod b ; mod c {} use d ; const E : i32 = 0 ; static F : i32 = 0 ; impl G {} struct H ; enum I {Foo} trait J {} fn h () {} extern {} type T = u8 ;"#, - ); -} - -#[test] -fn test_block() { - parse_macro( - r#" - macro_rules! foo { - ($ i:block) => { fn foo() $ i } - } -"#, - ) - .assert_expand_statements("foo! { { 1; } }", "fn foo () {1 ;}"); -} - -#[test] -fn test_meta() { - parse_macro( - r#" - macro_rules! foo { - ($ i:meta) => ( - #[$ i] - fn bar() {} - ) - } -"#, - ) - .assert_expand_items( - r#"foo! { cfg(target_os = "windows") }"#, - r#"# [cfg (target_os = "windows")] fn bar () {}"#, - ); -} - -#[test] -fn test_meta_doc_comments() { - parse_macro( - r#" - macro_rules! foo { - ($(#[$ i:meta])+) => ( - $(#[$ i])+ - fn bar() {} - ) - } -"#, - ). - assert_expand_items( - r#"foo! { - /// Single Line Doc 1 - /** - MultiLines Doc - */ - }"#, - "# [doc = \" Single Line Doc 1\"] # [doc = \"\\\\n MultiLines Doc\\\\n \"] fn bar () {}", - ); -} - -#[test] -fn test_tt_block() { - parse_macro( - r#" - macro_rules! foo { - ($ i:tt) => { fn foo() $ i } - } - "#, - ) - .assert_expand_items(r#"foo! { { 1; } }"#, r#"fn foo () {1 ;}"#); -} - -#[test] -fn test_tt_group() { - parse_macro( - r#" - macro_rules! foo { - ($($ i:tt)*) => { $($ i)* } - } - "#, - ) - .assert_expand_items(r#"foo! { fn foo() {} }"#, r#"fn foo () {}"#); -} - -#[test] -fn test_tt_composite() { - parse_macro( - r#" - macro_rules! foo { - ($i:tt) => { 0 } - } - "#, - ) - .assert_expand_items(r#"foo! { => }"#, r#"0"#); -} - -#[test] -fn test_tt_composite2() { - let node = parse_macro( - r#" - macro_rules! foo { - ($($tt:tt)*) => { abs!(=> $($tt)*) } - } - "#, - ) - .expand_items(r#"foo!{#}"#); - - let res = format!("{:#?}", &node); - assert_eq_text!( - res.trim(), - r###"MACRO_ITEMS@0..10 - MACRO_CALL@0..10 - PATH@0..3 - PATH_SEGMENT@0..3 - NAME_REF@0..3 - IDENT@0..3 "abs" - BANG@3..4 "!" - TOKEN_TREE@4..10 - L_PAREN@4..5 "(" - EQ@5..6 "=" - R_ANGLE@6..7 ">" - WHITESPACE@7..8 " " - POUND@8..9 "#" - R_PAREN@9..10 ")""### - ); -} - -#[test] -fn test_lifetime() { - parse_macro( - r#" - macro_rules! foo { - ($ lt:lifetime) => { struct Ref<$ lt>{ s: &$ lt str } } - } -"#, - ) - .assert_expand_items(r#"foo!{'a}"#, r#"struct Ref <'a > {s : &'a str}"#); -} - -#[test] -fn test_literal() { - parse_macro( - r#" - macro_rules! foo { - ($ type:ty $ lit:literal) => { const VALUE: $ type = $ lit;}; - } -"#, - ) - .assert_expand_items(r#"foo!(u8 0);"#, r#"const VALUE : u8 = 0 ;"#); -} - -#[test] -fn test_boolean_is_ident() { - parse_macro( - r#" - macro_rules! foo { - ($lit0:literal, $lit1:literal) => { const VALUE: (bool,bool) = ($lit0,$lit1); }; - } -"#, - ) - .assert_expand( - r#"foo!(true,false);"#, - r#" -SUBTREE $ - IDENT const 14 - IDENT VALUE 15 - PUNCH : [alone] 16 - SUBTREE () 17 - IDENT bool 18 - PUNCH , [alone] 19 - IDENT bool 20 - PUNCH = [alone] 21 - SUBTREE () 22 - IDENT true 29 - PUNCH , [joint] 25 - IDENT false 31 - PUNCH ; [alone] 28 -"#, - ); -} - -#[test] -fn test_vis() { - parse_macro( - r#" - macro_rules! foo { - ($ vis:vis $ name:ident) => { $ vis fn $ name() {}}; - } -"#, - ) - .assert_expand_items(r#"foo!(pub foo);"#, r#"pub fn foo () {}"#) - // test optional cases - .assert_expand_items(r#"foo!(foo);"#, r#"fn foo () {}"#); -} - -#[test] -fn test_inner_macro_rules() { - parse_macro( - r#" -macro_rules! foo { - ($a:ident, $b:ident, $c:tt) => { - - macro_rules! bar { - ($bi:ident) => { - fn $bi() -> u8 {$c} - } - } - - bar!($a); - fn $b() -> u8 {$c} - } -} -"#, - ). - assert_expand_items( - r#"foo!(x,y, 1);"#, - r#"macro_rules ! bar {($ bi : ident) => {fn $ bi () -> u8 {1}}} bar ! (x) ; fn y () -> u8 {1}"#, - ); -} - -// The following tests are based on real world situations -#[test] -fn test_vec() { - let fixture = parse_macro( - r#" - macro_rules! vec { - ($($item:expr),*) => { - { - let mut v = Vec::new(); - $( - v.push($item); - )* - v - } - }; -} -"#, - ); - fixture - .assert_expand_items(r#"vec!();"#, r#"{let mut v = Vec :: new () ; v}"#) - .assert_expand_items( - r#"vec![1u32,2];"#, - r#"{let mut v = Vec :: new () ; v . push (1u32) ; v . push (2) ; v}"#, - ); - - let tree = fixture.expand_expr(r#"vec![1u32,2];"#); - - assert_eq!( - format!("{:#?}", tree).trim(), - r#"BLOCK_EXPR@0..45 - L_CURLY@0..1 "{" - LET_STMT@1..20 - LET_KW@1..4 "let" - IDENT_PAT@4..8 - MUT_KW@4..7 "mut" - NAME@7..8 - IDENT@7..8 "v" - EQ@8..9 "=" - CALL_EXPR@9..19 - PATH_EXPR@9..17 - PATH@9..17 - PATH@9..12 - PATH_SEGMENT@9..12 - NAME_REF@9..12 - IDENT@9..12 "Vec" - COLON2@12..14 "::" - PATH_SEGMENT@14..17 - NAME_REF@14..17 - IDENT@14..17 "new" - ARG_LIST@17..19 - L_PAREN@17..18 "(" - R_PAREN@18..19 ")" - SEMICOLON@19..20 ";" - EXPR_STMT@20..33 - METHOD_CALL_EXPR@20..32 - PATH_EXPR@20..21 - PATH@20..21 - PATH_SEGMENT@20..21 - NAME_REF@20..21 - IDENT@20..21 "v" - DOT@21..22 "." - NAME_REF@22..26 - IDENT@22..26 "push" - ARG_LIST@26..32 - L_PAREN@26..27 "(" - LITERAL@27..31 - INT_NUMBER@27..31 "1u32" - R_PAREN@31..32 ")" - SEMICOLON@32..33 ";" - EXPR_STMT@33..43 - METHOD_CALL_EXPR@33..42 - PATH_EXPR@33..34 - PATH@33..34 - PATH_SEGMENT@33..34 - NAME_REF@33..34 - IDENT@33..34 "v" - DOT@34..35 "." - NAME_REF@35..39 - IDENT@35..39 "push" - ARG_LIST@39..42 - L_PAREN@39..40 "(" - LITERAL@40..41 - INT_NUMBER@40..41 "2" - R_PAREN@41..42 ")" - SEMICOLON@42..43 ";" - PATH_EXPR@43..44 - PATH@43..44 - PATH_SEGMENT@43..44 - NAME_REF@43..44 - IDENT@43..44 "v" - R_CURLY@44..45 "}""# - ); -} - -#[test] -fn test_winapi_struct() { - // from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/macros.rs#L366 - - parse_macro( - r#" -macro_rules! STRUCT { - ($(#[$attrs:meta])* struct $name:ident { - $($field:ident: $ftype:ty,)+ - }) => ( - #[repr(C)] #[derive(Copy)] $(#[$attrs])* - pub struct $name { - $(pub $field: $ftype,)+ - } - impl Clone for $name { - #[inline] - fn clone(&self) -> $name { *self } - } - #[cfg(feature = "impl-default")] - impl Default for $name { - #[inline] - fn default() -> $name { unsafe { $crate::_core::mem::zeroed() } } - } - ); -} -"#, - ). - // from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/shared/d3d9caps.rs - assert_expand_items(r#"STRUCT!{struct D3DVSHADERCAPS2_0 {Caps: u8,}}"#, - "# [repr (C)] # [derive (Copy)] pub struct D3DVSHADERCAPS2_0 {pub Caps : u8 ,} impl Clone for D3DVSHADERCAPS2_0 {# [inline] fn clone (& self) -> D3DVSHADERCAPS2_0 {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DVSHADERCAPS2_0 {# [inline] fn default () -> D3DVSHADERCAPS2_0 {unsafe {$crate :: _core :: mem :: zeroed ()}}}" - ) - .assert_expand_items(r#"STRUCT!{#[cfg_attr(target_arch = "x86", repr(packed))] struct D3DCONTENTPROTECTIONCAPS {Caps : u8 ,}}"#, - "# [repr (C)] # [derive (Copy)] # [cfg_attr (target_arch = \"x86\" , repr (packed))] pub struct D3DCONTENTPROTECTIONCAPS {pub Caps : u8 ,} impl Clone for D3DCONTENTPROTECTIONCAPS {# [inline] fn clone (& self) -> D3DCONTENTPROTECTIONCAPS {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DCONTENTPROTECTIONCAPS {# [inline] fn default () -> D3DCONTENTPROTECTIONCAPS {unsafe {$crate :: _core :: mem :: zeroed ()}}}" - ); -} - -#[test] -fn test_int_base() { - parse_macro( - r#" -macro_rules! int_base { - ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => { - #[stable(feature = "rust1", since = "1.0.0")] - impl fmt::$Trait for $T { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - $Radix.fmt_int(*self as $U, f) - } - } - } -} -"#, - ).assert_expand_items(r#" int_base!{Binary for isize as usize -> Binary}"#, - "# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt ::Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}" - ); -} - -#[test] -fn test_generate_pattern_iterators() { - // from https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/str/mod.rs - parse_macro( - r#" -macro_rules! generate_pattern_iterators { - { double ended; with $(#[$common_stability_attribute:meta])*, - $forward_iterator:ident, - $reverse_iterator:ident, $iterty:ty - } => { - fn foo(){} - } -} -"#, - ).assert_expand_items( - r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );"#, - "fn foo () {}", - ); -} - -#[test] -fn test_impl_fn_for_zst() { - // from https://github.com/rust-lang/rust/blob/5d20ff4d2718c820632b38c1e49d4de648a9810b/src/libcore/internal_macros.rs - parse_macro( - r#" -macro_rules! impl_fn_for_zst { - { $( $( #[$attr: meta] )* - struct $Name: ident impl$( <$( $lifetime : lifetime ),+> )? Fn = - |$( $arg: ident: $ArgTy: ty ),*| -> $ReturnTy: ty -$body: block; )+ - } => { - $( - $( #[$attr] )* - struct $Name; - - impl $( <$( $lifetime ),+> )? Fn<($( $ArgTy, )*)> for $Name { - #[inline] - extern "rust-call" fn call(&self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy { - $body - } - } - - impl $( <$( $lifetime ),+> )? FnMut<($( $ArgTy, )*)> for $Name { - #[inline] - extern "rust-call" fn call_mut( - &mut self, - ($( $arg, )*): ($( $ArgTy, )*) - ) -> $ReturnTy { - Fn::call(&*self, ($( $arg, )*)) - } - } - - impl $( <$( $lifetime ),+> )? FnOnce<($( $ArgTy, )*)> for $Name { - type Output = $ReturnTy; - - #[inline] - extern "rust-call" fn call_once(self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy { - Fn::call(&self, ($( $arg, )*)) - } - } - )+ -} - } -"#, - ).assert_expand_items(r#" -impl_fn_for_zst ! { - # [ derive ( Clone ) ] - struct CharEscapeDebugContinue impl Fn = | c : char | -> char :: EscapeDebug { - c . escape_debug_ext ( false ) - } ; - - # [ derive ( Clone ) ] - struct CharEscapeUnicode impl Fn = | c : char | -> char :: EscapeUnicode { - c . escape_unicode ( ) - } ; - # [ derive ( Clone ) ] - struct CharEscapeDefault impl Fn = | c : char | -> char :: EscapeDefault { - c . escape_default ( ) - } ; - } -"#, - "# [derive (Clone)] struct CharEscapeDebugContinue ; impl Fn < (char ,) > for CharEscapeDebugContinue {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeDebug {{c . escape_debug_ext (false)}}} impl FnMut < (char ,) > for CharEscapeDebugContinue {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeDebug {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeDebugContinue {type Output = char :: EscapeDebug ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeDebug {Fn :: call (& self , (c ,))}} # [derive (Clone)] struct CharEscapeUnicode ; impl Fn < (char ,) > for CharEscapeUnicode {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeUnicode {{c . escape_unicode ()}}} impl FnMut < (char ,) > for CharEscapeUnicode {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeUnicode {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeUnicode {type Output = char :: EscapeUnicode ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeUnicode {Fn :: call (& self , (c ,))}} # [derive (Clone)] struct CharEscapeDefault ; impl Fn < (char ,) > for CharEscapeDefault {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeDefault {{c . escape_default ()}}} impl FnMut < (char ,) > for CharEscapeDefault {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeDefault {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeDefault {type Output = char :: EscapeDefault ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeDefault {Fn :: call (& self , (c ,))}}" - ); -} - -#[test] -fn test_impl_nonzero_fmt() { - // from https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/num/mod.rs#L12 - parse_macro( - r#" - macro_rules! impl_nonzero_fmt { - ( #[$stability: meta] ( $( $Trait: ident ),+ ) for $Ty: ident ) => { - fn foo () {} - } - } -"#, - ).assert_expand_items( - r#"impl_nonzero_fmt! { # [stable(feature= "nonzero",since="1.28.0")] (Debug,Display,Binary,Octal,LowerHex,UpperHex) for NonZeroU8}"#, - "fn foo () {}", - ); -} - -#[test] -fn test_cfg_if_items() { - // from https://github.com/rust-lang/rust/blob/33fe1131cadba69d317156847be9a402b89f11bb/src/libstd/macros.rs#L986 - parse_macro( - r#" - macro_rules! __cfg_if_items { - (($($not:meta,)*) ; ) => {}; - (($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => { - __cfg_if_items! { ($($not,)* $($m,)*) ; $($rest)* } - } - } -"#, - ).assert_expand_items( - r#"__cfg_if_items ! { ( rustdoc , ) ; ( ( ) ( # [ cfg ( any ( target_os = "redox" , unix ) ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as unix ; # [ cfg ( windows ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as windows ; # [ cfg ( any ( target_os = "linux" , target_os = "l4re" ) ) ] pub mod linux ; ) ) , }"#, - "__cfg_if_items ! {(rustdoc ,) ;}", - ); -} - -#[test] -fn test_cfg_if_main() { - // from https://github.com/rust-lang/rust/blob/3d211248393686e0f73851fc7548f6605220fbe1/src/libpanic_unwind/macros.rs#L9 - parse_macro( - r#" - macro_rules! cfg_if { - ($( - if #[cfg($($meta:meta),*)] { $($it:item)* } - ) else * else { - $($it2:item)* - }) => { - __cfg_if_items! { - () ; - $( ( ($($meta),*) ($($it)*) ), )* - ( () ($($it2)*) ), - } - }; - - // Internal macro to Apply a cfg attribute to a list of items - (@__apply $m:meta, $($it:item)*) => { - $(#[$m] $it)* - }; - } -"#, - ).assert_expand_items(r#" -cfg_if ! { - if # [ cfg ( target_env = "msvc" ) ] { - // no extra unwinder support needed - } else if # [ cfg ( all ( target_arch = "wasm32" , not ( target_os = "emscripten" ) ) ) ] { - // no unwinder on the system! - } else { - mod libunwind ; - pub use libunwind :: * ; - } - } -"#, - "__cfg_if_items ! {() ; ((target_env = \"msvc\") ()) , ((all (target_arch = \"wasm32\" , not (target_os = \"emscripten\"))) ()) , (() (mod libunwind ; pub use libunwind :: * ;)) ,}" - ).assert_expand_items( - r#" -cfg_if ! { @ __apply cfg ( all ( not ( any ( not ( any ( target_os = "solaris" , target_os = "illumos" ) ) ) ) ) ) , } -"#, - "", - ); -} - -#[test] -fn test_proptest_arbitrary() { - // from https://github.com/AltSysrq/proptest/blob/d1c4b049337d2f75dd6f49a095115f7c532e5129/proptest/src/arbitrary/macros.rs#L16 - parse_macro( - r#" -macro_rules! arbitrary { - ([$($bounds : tt)*] $typ: ty, $strat: ty, $params: ty; - $args: ident => $logic: expr) => { - impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ { - type Parameters = $params; - type Strategy = $strat; - fn arbitrary_with($args: Self::Parameters) -> Self::Strategy { - $logic - } - } - }; - -}"#, - ).assert_expand_items(r#"arbitrary ! ( [ A : Arbitrary ] - Vec < A > , - VecStrategy < A :: Strategy > , - RangedParams1 < A :: Parameters > ; - args => { let product_unpack ! [ range , a ] = args ; vec ( any_with :: < A > ( a ) , range ) } - ) ;"#, - "impl
$crate :: arbitrary :: Arbitrary for Vec < A > {type Parameters = RangedParams1 < A :: Parameters > ; type Strategy = VecStrategy < A :: Strategy > ; fn arbitrary_with (args : Self :: Parameters) -> Self :: Strategy {{let product_unpack ! [range , a] = args ; vec (any_with :: < A > (a) , range)}}}" - ); -} - -#[test] -fn test_old_ridl() { - // This is from winapi 2.8, which do not have a link from github - // - let expanded = parse_macro( - r#" -#[macro_export] -macro_rules! RIDL { - (interface $interface:ident ($vtbl:ident) : $pinterface:ident ($pvtbl:ident) - {$( - fn $method:ident(&mut self $(,$p:ident : $t:ty)*) -> $rtr:ty - ),+} - ) => { - impl $interface { - $(pub unsafe fn $method(&mut self) -> $rtr { - ((*self.lpVtbl).$method)(self $(,$p)*) - })+ - } - }; -}"#, - ).expand_tt(r#" - RIDL!{interface ID3D11Asynchronous(ID3D11AsynchronousVtbl): ID3D11DeviceChild(ID3D11DeviceChildVtbl) { - fn GetDataSize(&mut self) -> UINT - }}"#); - - assert_eq!(expanded.to_string(), "impl ID3D11Asynchronous {pub unsafe fn GetDataSize (& mut self) -> UINT {((* self . lpVtbl) .GetDataSize) (self)}}"); -} - -#[test] -fn test_quick_error() { - let expanded = parse_macro( - r#" -macro_rules! quick_error { - - (SORT [enum $name:ident $( #[$meta:meta] )*] - items [$($( #[$imeta:meta] )* - => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*] - {$( $ifuncs:tt )*} )* ] - buf [ ] - queue [ ] - ) => { - quick_error!(ENUMINITION [enum $name $( #[$meta] )*] - body [] - queue [$( - $( #[$imeta] )* - => - $iitem: $imode [$( $ivar: $ityp ),*] - )*] - ); -}; - -} -"#, - ) - .expand_tt( - r#" -quick_error ! (SORT [enum Wrapped # [derive (Debug)]] items [ - => One : UNIT [] {} - => Two : TUPLE [s :String] {display ("two: {}" , s) from ()} - ] buf [] queue []) ; -"#, - ); - - assert_eq!(expanded.to_string(), "quick_error ! (ENUMINITION [enum Wrapped # [derive (Debug)]] body [] queue [=> One : UNIT [] => Two : TUPLE [s : String]]) ;"); -} - -#[test] -fn test_empty_repeat_vars_in_empty_repeat_vars() { - parse_macro( - r#" -macro_rules! delegate_impl { - ([$self_type:ident, $self_wrap:ty, $self_map:ident] - pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* { - - // "Escaped" associated types. Stripped before making the `trait` - // itself, but forwarded when delegating impls. - $( - @escape [type $assoc_name_ext:ident] - // Associated types. Forwarded. - )* - $( - @section type - $( - $(#[$_assoc_attr:meta])* - type $assoc_name:ident $(: $assoc_bound:ty)*; - )+ - )* - // Methods. Forwarded. Using $self_map!(self) around the self argument. - // Methods must use receiver `self` or explicit type like `self: &Self` - // &self and &mut self are _not_ supported. - $( - @section self - $( - $(#[$_method_attr:meta])* - fn $method_name:ident(self $(: $self_selftype:ty)* $(,$marg:ident : $marg_ty:ty)*) -> $mret:ty; - )+ - )* - // Arbitrary tail that is ignored when forwarding. - $( - @section nodelegate - $($tail:tt)* - )* - }) => { - impl<> $name for $self_wrap where $self_type: $name { - $( - $( - fn $method_name(self $(: $self_selftype)* $(,$marg: $marg_ty)*) -> $mret { - $self_map!(self).$method_name($($marg),*) - } - )* - )* - } - } -} -"#, - ).assert_expand_items( - r#"delegate_impl ! {[G , & 'a mut G , deref] pub trait Data : GraphBase {@ section type type NodeWeight ;}}"#, - "impl <> Data for & \'a mut G where G : Data {}", - ); -} - -#[test] -fn expr_interpolation() { - let expanded = parse_macro( - r#" - macro_rules! id { - ($expr:expr) => { - map($expr) - } - } - "#, - ) - .expand_expr("id!(x + foo);"); - - assert_eq!(expanded.to_string(), "map(x+foo)"); -} - -pub(crate) struct MacroFixture { - rules: MacroRules, -} - -impl MacroFixture { - pub(crate) fn expand_tt(&self, invocation: &str) -> tt::Subtree { - self.try_expand_tt(invocation).unwrap() - } - - fn try_expand_tt(&self, invocation: &str) -> Result { - let source_file = ast::SourceFile::parse(invocation).tree(); - let macro_invocation = - source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); - - let (invocation_tt, _) = ast_to_token_tree(¯o_invocation.token_tree().unwrap()) - .ok_or_else(|| ExpandError::ConversionError)?; - - self.rules.expand(&invocation_tt).result() - } - - fn assert_expand_err(&self, invocation: &str, err: &ExpandError) { - assert_eq!(self.try_expand_tt(invocation).as_ref(), Err(err)); - } - - fn expand_items(&self, invocation: &str) -> SyntaxNode { - let expanded = self.expand_tt(invocation); - token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap().0.syntax_node() - } - - fn expand_statements(&self, invocation: &str) -> SyntaxNode { - let expanded = self.expand_tt(invocation); - token_tree_to_syntax_node(&expanded, FragmentKind::Statements).unwrap().0.syntax_node() - } - - fn expand_expr(&self, invocation: &str) -> SyntaxNode { - let expanded = self.expand_tt(invocation); - token_tree_to_syntax_node(&expanded, FragmentKind::Expr).unwrap().0.syntax_node() - } - - fn assert_expand_tt(&self, invocation: &str, expected: &str) { - let expansion = self.expand_tt(invocation); - assert_eq!(expansion.to_string(), expected); - } - - fn assert_expand(&self, invocation: &str, expected: &str) { - let expansion = self.expand_tt(invocation); - let actual = format!("{:?}", expansion); - test_utils::assert_eq_text!(&actual.trim(), &expected.trim()); - } - - fn assert_expand_items(&self, invocation: &str, expected: &str) -> &MacroFixture { - self.assert_expansion(FragmentKind::Items, invocation, expected); - self - } - - fn assert_expand_statements(&self, invocation: &str, expected: &str) -> &MacroFixture { - self.assert_expansion(FragmentKind::Statements, invocation, expected); - self - } - - fn assert_expansion(&self, kind: FragmentKind, invocation: &str, expected: &str) { - let expanded = self.expand_tt(invocation); - assert_eq!(expanded.to_string(), expected); - - let expected = expected.replace("$crate", "C_C__C"); - - // wrap the given text to a macro call - let expected = { - let wrapped = format!("wrap_macro!( {} )", expected); - let wrapped = ast::SourceFile::parse(&wrapped); - let wrapped = - wrapped.tree().syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let mut wrapped = ast_to_token_tree(&wrapped).unwrap().0; - wrapped.delimiter = None; - wrapped - }; - - let expanded_tree = token_tree_to_syntax_node(&expanded, kind).unwrap().0.syntax_node(); - let expanded_tree = debug_dump_ignore_spaces(&expanded_tree).trim().to_string(); - - let expected_tree = token_tree_to_syntax_node(&expected, kind).unwrap().0.syntax_node(); - let expected_tree = debug_dump_ignore_spaces(&expected_tree).trim().to_string(); - - let expected_tree = expected_tree.replace("C_C__C", "$crate"); - assert_eq!( - expanded_tree, expected_tree, - "\nleft:\n{}\nright:\n{}", - expanded_tree, expected_tree, - ); - } -} - -fn parse_macro_to_tt(ra_fixture: &str) -> tt::Subtree { - let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap(); - let macro_definition = - source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); - - let (definition_tt, _) = ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap(); - - let parsed = parse_to_token_tree( - &ra_fixture[macro_definition.token_tree().unwrap().syntax().text_range()], - ) - .unwrap() - .0; - assert_eq!(definition_tt, parsed); - - definition_tt -} - -pub(crate) fn parse_macro(ra_fixture: &str) -> MacroFixture { - let definition_tt = parse_macro_to_tt(ra_fixture); - let rules = MacroRules::parse(&definition_tt).unwrap(); - MacroFixture { rules } -} - -pub(crate) fn parse_macro_error(ra_fixture: &str) -> ParseError { - let definition_tt = parse_macro_to_tt(ra_fixture); - - match MacroRules::parse(&definition_tt) { - Ok(_) => panic!("Expect error"), - Err(err) => err, - } -} - -pub(crate) fn parse_to_token_tree_by_syntax(ra_fixture: &str) -> tt::Subtree { - let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap(); - let tt = syntax_node_to_token_tree(source_file.syntax()).unwrap().0; - - let parsed = parse_to_token_tree(ra_fixture).unwrap().0; - assert_eq!(tt, parsed); - - parsed -} - -fn debug_dump_ignore_spaces(node: &ra_syntax::SyntaxNode) -> String { - let mut level = 0; - let mut buf = String::new(); - macro_rules! indent { - () => { - for _ in 0..level { - buf.push_str(" "); - } - }; - } - - for event in node.preorder_with_tokens() { - match event { - WalkEvent::Enter(element) => { - match element { - NodeOrToken::Node(node) => { - indent!(); - writeln!(buf, "{:?}", node.kind()).unwrap(); - } - NodeOrToken::Token(token) => match token.kind() { - ra_syntax::SyntaxKind::WHITESPACE => {} - _ => { - indent!(); - writeln!(buf, "{:?}", token.kind()).unwrap(); - } - }, - } - level += 1; - } - WalkEvent::Leave(_) => level -= 1, - } - } - - buf -} - -#[test] -fn test_issue_2520() { - let macro_fixture = parse_macro( - r#" - macro_rules! my_macro { - { - ( $( - $( [] $sname:ident : $stype:ty )? - $( [$expr:expr] $nname:ident : $ntype:ty )? - ),* ) - } => { - Test { - $( - $( $sname, )? - )* - } - }; - } - "#, - ); - - macro_fixture.assert_expand_items( - r#"my_macro ! { - ([] p1 : u32 , [|_| S0K0] s : S0K0 , [] k0 : i32) - }"#, - "Test {p1 , k0 ,}", - ); -} - -#[test] -fn test_issue_3861() { - let macro_fixture = parse_macro( - r#" - macro_rules! rgb_color { - ($p:expr, $t: ty) => { - pub fn new() { - let _ = 0 as $t << $p; - } - }; - } - "#, - ); - - macro_fixture.expand_items(r#"rgb_color!(8 + 8, u32);"#); -} - -#[test] -fn test_repeat_bad_var() { - // FIXME: the second rule of the macro should be removed and an error about - // `$( $c )+` raised - parse_macro( - r#" - macro_rules! foo { - ($( $b:ident )+) => { - $( $c )+ - }; - ($( $b:ident )+) => { - $( $b )+ - } - } - "#, - ) - .assert_expand_items("foo!(b0 b1);", "b0 b1"); -} - -#[test] -fn test_no_space_after_semi_colon() { - let expanded = parse_macro( - r#" - macro_rules! with_std { ($($i:item)*) => ($(#[cfg(feature = "std")]$i)*) } - "#, - ) - .expand_items(r#"with_std! {mod m;mod f;}"#); - - let dump = format!("{:#?}", expanded); - assert_eq_text!( - dump.trim(), - r###"MACRO_ITEMS@0..52 - MODULE@0..26 - ATTR@0..21 - POUND@0..1 "#" - L_BRACK@1..2 "[" - PATH@2..5 - PATH_SEGMENT@2..5 - NAME_REF@2..5 - IDENT@2..5 "cfg" - TOKEN_TREE@5..20 - L_PAREN@5..6 "(" - IDENT@6..13 "feature" - EQ@13..14 "=" - STRING@14..19 "\"std\"" - R_PAREN@19..20 ")" - R_BRACK@20..21 "]" - MOD_KW@21..24 "mod" - NAME@24..25 - IDENT@24..25 "m" - SEMICOLON@25..26 ";" - MODULE@26..52 - ATTR@26..47 - POUND@26..27 "#" - L_BRACK@27..28 "[" - PATH@28..31 - PATH_SEGMENT@28..31 - NAME_REF@28..31 - IDENT@28..31 "cfg" - TOKEN_TREE@31..46 - L_PAREN@31..32 "(" - IDENT@32..39 "feature" - EQ@39..40 "=" - STRING@40..45 "\"std\"" - R_PAREN@45..46 ")" - R_BRACK@46..47 "]" - MOD_KW@47..50 "mod" - NAME@50..51 - IDENT@50..51 "f" - SEMICOLON@51..52 ";""###, - ); -} - -// https://github.com/rust-lang/rust/blob/master/src/test/ui/issues/issue-57597.rs -#[test] -fn test_rustc_issue_57597() { - fn test_error(fixture: &str) { - assert_eq!(parse_macro_error(fixture), ParseError::RepetitionEmtpyTokenTree); - } - - test_error("macro_rules! foo { ($($($i:ident)?)+) => {}; }"); - test_error("macro_rules! foo { ($($($i:ident)?)*) => {}; }"); - test_error("macro_rules! foo { ($($($i:ident)?)?) => {}; }"); - test_error("macro_rules! foo { ($($($($i:ident)?)?)?) => {}; }"); - test_error("macro_rules! foo { ($($($($i:ident)*)?)?) => {}; }"); - test_error("macro_rules! foo { ($($($($i:ident)?)*)?) => {}; }"); - test_error("macro_rules! foo { ($($($($i:ident)?)?)*) => {}; }"); - test_error("macro_rules! foo { ($($($($i:ident)*)*)?) => {}; }"); - test_error("macro_rules! foo { ($($($($i:ident)?)*)*) => {}; }"); - test_error("macro_rules! foo { ($($($($i:ident)?)*)+) => {}; }"); - test_error("macro_rules! foo { ($($($($i:ident)+)?)*) => {}; }"); - test_error("macro_rules! foo { ($($($($i:ident)+)*)?) => {}; }"); -} - -#[test] -fn test_expand_bad_literal() { - parse_macro( - r#" - macro_rules! foo { ($i:literal) => {}; } - "#, - ) - .assert_expand_err(r#"foo!(&k");"#, &ExpandError::BindingError("".into())); -} - -#[test] -fn test_empty_comments() { - parse_macro( - r#" - macro_rules! one_arg_macro { ($fmt:expr) => (); } - "#, - ) - .assert_expand_err( - r#"one_arg_macro!(/**/)"#, - &ExpandError::BindingError("expected Expr".into()), - ); -} diff --git a/crates/ra_parser/Cargo.toml b/crates/ra_parser/Cargo.toml deleted file mode 100644 index 72ec3e4d9a..0000000000 --- a/crates/ra_parser/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -edition = "2018" -name = "ra_parser" -version = "0.1.0" -authors = ["rust-analyzer developers"] -publish = false -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -drop_bomb = "0.1.4" diff --git a/crates/ra_parser/src/grammar.rs b/crates/ra_parser/src/grammar.rs deleted file mode 100644 index c2e1d701e2..0000000000 --- a/crates/ra_parser/src/grammar.rs +++ /dev/null @@ -1,293 +0,0 @@ -//! This is the actual "grammar" of the Rust language. -//! -//! Each function in this module and its children corresponds -//! to a production of the formal grammar. Submodules roughly -//! correspond to different *areas* of the grammar. By convention, -//! each submodule starts with `use super::*` import and exports -//! "public" productions via `pub(super)`. -//! -//! See docs for `Parser` to learn about API, available to the grammar, -//! and see docs for `Event` to learn how this actually manages to -//! produce parse trees. -//! -//! Code in this module also contains inline tests, which start with -//! `// test name-of-the-test` comment and look like this: -//! -//! ``` -//! // test function_with_zero_parameters -//! // fn foo() {} -//! ``` -//! -//! After adding a new inline-test, run `cargo xtask codegen` to -//! extract it as a standalone text-fixture into -//! `crates/ra_syntax/test_data/parser/`, and run `cargo test` once to -//! create the "gold" value. -//! -//! Coding convention: rules like `where_clause` always produce either a -//! node or an error, rules like `opt_where_clause` may produce nothing. -//! Non-opt rules typically start with `assert!(p.at(FIRST_TOKEN))`, the -//! caller is responsible for branching on the first token. -mod attributes; -mod expressions; -mod items; -mod params; -mod paths; -mod patterns; -mod type_args; -mod type_params; -mod types; - -use crate::{ - parser::{CompletedMarker, Marker, Parser}, - SyntaxKind::{self, *}, - TokenSet, -}; - -pub(crate) fn root(p: &mut Parser) { - let m = p.start(); - p.eat(SHEBANG); - items::mod_contents(p, false); - m.complete(p, SOURCE_FILE); -} - -/// Various pieces of syntax that can be parsed by macros by example -pub(crate) mod fragments { - use super::*; - - pub(crate) use super::{ - expressions::block_expr, paths::type_path as path, patterns::pattern, types::type_, - }; - - pub(crate) fn expr(p: &mut Parser) { - let _ = expressions::expr(p); - } - - pub(crate) fn stmt(p: &mut Parser) { - expressions::stmt(p, expressions::StmtWithSemi::No) - } - - pub(crate) fn opt_visibility(p: &mut Parser) { - let _ = super::opt_visibility(p); - } - - // Parse a meta item , which excluded [], e.g : #[ MetaItem ] - pub(crate) fn meta_item(p: &mut Parser) { - fn is_delimiter(p: &mut Parser) -> bool { - matches!(p.current(), T!['{'] | T!['('] | T!['[']) - } - - if is_delimiter(p) { - items::token_tree(p); - return; - } - - let m = p.start(); - while !p.at(EOF) { - if is_delimiter(p) { - items::token_tree(p); - break; - } else { - // https://doc.rust-lang.org/reference/attributes.html - // https://doc.rust-lang.org/reference/paths.html#simple-paths - // The start of an meta must be a simple path - match p.current() { - IDENT | T![::] | T![super] | T![self] | T![crate] => p.bump_any(), - T![=] => { - p.bump_any(); - match p.current() { - c if c.is_literal() => p.bump_any(), - T![true] | T![false] => p.bump_any(), - _ => {} - } - break; - } - _ => break, - } - } - } - - m.complete(p, TOKEN_TREE); - } - - pub(crate) fn item(p: &mut Parser) { - items::item_or_macro(p, true, items::ItemFlavor::Mod) - } - - pub(crate) fn macro_items(p: &mut Parser) { - let m = p.start(); - items::mod_contents(p, false); - m.complete(p, MACRO_ITEMS); - } - - pub(crate) fn macro_stmts(p: &mut Parser) { - let m = p.start(); - - while !p.at(EOF) { - if p.at(T![;]) { - p.bump(T![;]); - continue; - } - - expressions::stmt(p, expressions::StmtWithSemi::Optional); - } - - m.complete(p, MACRO_STMTS); - } -} - -pub(crate) fn reparser( - node: SyntaxKind, - first_child: Option, - parent: Option, -) -> Option { - let res = match node { - BLOCK_EXPR => expressions::block_expr, - RECORD_FIELD_LIST => items::record_field_def_list, - RECORD_EXPR_FIELD_LIST => items::record_field_list, - VARIANT_LIST => items::enum_variant_list, - MATCH_ARM_LIST => items::match_arm_list, - USE_TREE_LIST => items::use_tree_list, - EXTERN_ITEM_LIST => items::extern_item_list, - TOKEN_TREE if first_child? == T!['{'] => items::token_tree, - ASSOC_ITEM_LIST => match parent? { - IMPL => items::impl_item_list, - TRAIT => items::trait_item_list, - _ => return None, - }, - ITEM_LIST => items::mod_item_list, - _ => return None, - }; - Some(res) -} - -#[derive(Clone, Copy, PartialEq, Eq)] -enum BlockLike { - Block, - NotBlock, -} - -impl BlockLike { - fn is_block(self) -> bool { - self == BlockLike::Block - } -} - -fn opt_visibility(p: &mut Parser) -> bool { - match p.current() { - T![pub] => { - let m = p.start(); - p.bump(T![pub]); - if p.at(T!['(']) { - match p.nth(1) { - // test crate_visibility - // pub(crate) struct S; - // pub(self) struct S; - // pub(self) struct S; - // pub(self) struct S; - T![crate] | T![self] | T![super] => { - p.bump_any(); - p.bump_any(); - p.expect(T![')']); - } - T![in] => { - p.bump_any(); - p.bump_any(); - paths::use_path(p); - p.expect(T![')']); - } - _ => (), - } - } - m.complete(p, VISIBILITY); - } - // test crate_keyword_vis - // crate fn main() { } - // struct S { crate field: u32 } - // struct T(crate u32); - // - // test crate_keyword_path - // fn foo() { crate::foo(); } - T![crate] if !p.nth_at(1, T![::]) => { - let m = p.start(); - p.bump(T![crate]); - m.complete(p, VISIBILITY); - } - _ => return false, - } - true -} - -fn opt_alias(p: &mut Parser) { - if p.at(T![as]) { - let m = p.start(); - p.bump(T![as]); - if !p.eat(T![_]) { - name(p); - } - m.complete(p, RENAME); - } -} - -fn abi(p: &mut Parser) { - assert!(p.at(T![extern])); - let abi = p.start(); - p.bump(T![extern]); - match p.current() { - STRING | RAW_STRING => p.bump_any(), - _ => (), - } - abi.complete(p, ABI); -} - -fn opt_fn_ret_type(p: &mut Parser) -> bool { - if p.at(T![->]) { - let m = p.start(); - p.bump(T![->]); - types::type_no_bounds(p); - m.complete(p, RET_TYPE); - true - } else { - false - } -} - -fn name_r(p: &mut Parser, recovery: TokenSet) { - if p.at(IDENT) { - let m = p.start(); - p.bump(IDENT); - m.complete(p, NAME); - } else { - p.err_recover("expected a name", recovery); - } -} - -fn name(p: &mut Parser) { - name_r(p, TokenSet::EMPTY) -} - -fn name_ref(p: &mut Parser) { - if p.at(IDENT) { - let m = p.start(); - p.bump(IDENT); - m.complete(p, NAME_REF); - } else { - p.err_and_bump("expected identifier"); - } -} - -fn name_ref_or_index(p: &mut Parser) { - assert!(p.at(IDENT) || p.at(INT_NUMBER)); - let m = p.start(); - p.bump_any(); - m.complete(p, NAME_REF); -} - -fn error_block(p: &mut Parser, message: &str) { - assert!(p.at(T!['{'])); - let m = p.start(); - p.error(message); - p.bump(T!['{']); - expressions::expr_block_contents(p); - p.eat(T!['}']); - m.complete(p, ERROR); -} diff --git a/crates/ra_parser/src/grammar/attributes.rs b/crates/ra_parser/src/grammar/attributes.rs deleted file mode 100644 index f3158ade30..0000000000 --- a/crates/ra_parser/src/grammar/attributes.rs +++ /dev/null @@ -1,48 +0,0 @@ -//! FIXME: write short doc here - -use super::*; - -pub(super) fn inner_attributes(p: &mut Parser) { - while p.at(T![#]) && p.nth(1) == T![!] { - attribute(p, true) - } -} - -pub(super) fn outer_attributes(p: &mut Parser) { - while p.at(T![#]) { - attribute(p, false) - } -} - -fn attribute(p: &mut Parser, inner: bool) { - let attr = p.start(); - assert!(p.at(T![#])); - p.bump(T![#]); - - if inner { - assert!(p.at(T![!])); - p.bump(T![!]); - } - - if p.eat(T!['[']) { - paths::use_path(p); - - match p.current() { - T![=] => { - p.bump(T![=]); - if expressions::literal(p).is_none() { - p.error("expected literal"); - } - } - T!['('] | T!['['] | T!['{'] => items::token_tree(p), - _ => {} - } - - if !p.eat(T![']']) { - p.error("expected `]`"); - } - } else { - p.error("expected `[`"); - } - attr.complete(p, ATTR); -} diff --git a/crates/ra_parser/src/grammar/expressions.rs b/crates/ra_parser/src/grammar/expressions.rs deleted file mode 100644 index e1c25a838f..0000000000 --- a/crates/ra_parser/src/grammar/expressions.rs +++ /dev/null @@ -1,652 +0,0 @@ -//! FIXME: write short doc here - -mod atom; - -pub(crate) use self::atom::{block_expr, match_arm_list}; -pub(super) use self::atom::{literal, LITERAL_FIRST}; -use super::*; - -pub(super) enum StmtWithSemi { - Yes, - No, - Optional, -} - -const EXPR_FIRST: TokenSet = LHS_FIRST; - -pub(super) fn expr(p: &mut Parser) -> (Option, BlockLike) { - let r = Restrictions { forbid_structs: false, prefer_stmt: false }; - expr_bp(p, r, 1) -} - -pub(super) fn expr_with_attrs(p: &mut Parser) -> bool { - let m = p.start(); - let has_attrs = p.at(T![#]); - attributes::outer_attributes(p); - - let (cm, _block_like) = expr(p); - let success = cm.is_some(); - - match (has_attrs, cm) { - (true, Some(cm)) => { - let kind = cm.kind(); - cm.undo_completion(p).abandon(p); - m.complete(p, kind); - } - _ => m.abandon(p), - } - - success -} - -pub(super) fn expr_stmt(p: &mut Parser) -> (Option, BlockLike) { - let r = Restrictions { forbid_structs: false, prefer_stmt: true }; - expr_bp(p, r, 1) -} - -fn expr_no_struct(p: &mut Parser) { - let r = Restrictions { forbid_structs: true, prefer_stmt: false }; - expr_bp(p, r, 1); -} - -fn is_expr_stmt_attr_allowed(kind: SyntaxKind) -> bool { - let forbid = matches!(kind, BIN_EXPR | RANGE_EXPR); - !forbid -} - -pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) { - let m = p.start(); - // test attr_on_expr_stmt - // fn foo() { - // #[A] foo(); - // #[B] bar!{} - // #[C] #[D] {} - // #[D] return (); - // } - let has_attrs = p.at(T![#]); - attributes::outer_attributes(p); - - if p.at(T![let]) { - let_stmt(p, m, with_semi); - return; - } - - // test block_items - // fn a() { fn b() {} } - let m = match items::maybe_item(p, m, items::ItemFlavor::Mod) { - Ok(()) => return, - Err(m) => m, - }; - - let (cm, blocklike) = expr_stmt(p); - let kind = cm.as_ref().map(|cm| cm.kind()).unwrap_or(ERROR); - - if has_attrs && !is_expr_stmt_attr_allowed(kind) { - // test_err attr_on_expr_not_allowed - // fn foo() { - // #[A] 1 + 2; - // #[B] if true {}; - // } - p.error(format!("attributes are not allowed on {:?}", kind)); - } - - if p.at(T!['}']) { - // test attr_on_last_expr_in_block - // fn foo() { - // { #[A] bar!()? } - // #[B] &() - // } - if let Some(cm) = cm { - cm.undo_completion(p).abandon(p); - m.complete(p, kind); - } else { - m.abandon(p); - } - } else { - // test no_semi_after_block - // fn foo() { - // if true {} - // loop {} - // match () {} - // while true {} - // for _ in () {} - // {} - // {} - // macro_rules! test { - // () => {} - // } - // test!{} - // } - - match with_semi { - StmtWithSemi::Yes => { - if blocklike.is_block() { - p.eat(T![;]); - } else { - p.expect(T![;]); - } - } - StmtWithSemi::No => {} - StmtWithSemi::Optional => { - if p.at(T![;]) { - p.eat(T![;]); - } - } - } - - m.complete(p, EXPR_STMT); - } - - // test let_stmt - // fn foo() { - // let a; - // let b: i32; - // let c = 92; - // let d: i32 = 92; - // let e: !; - // let _: ! = {}; - // let f = #[attr]||{}; - // } - fn let_stmt(p: &mut Parser, m: Marker, with_semi: StmtWithSemi) { - assert!(p.at(T![let])); - p.bump(T![let]); - patterns::pattern(p); - if p.at(T![:]) { - types::ascription(p); - } - if p.eat(T![=]) { - expressions::expr_with_attrs(p); - } - - match with_semi { - StmtWithSemi::Yes => { - p.expect(T![;]); - } - StmtWithSemi::No => {} - StmtWithSemi::Optional => { - if p.at(T![;]) { - p.eat(T![;]); - } - } - } - m.complete(p, LET_STMT); - } -} - -pub(super) fn expr_block_contents(p: &mut Parser) { - // This is checked by a validator - attributes::inner_attributes(p); - - while !p.at(EOF) && !p.at(T!['}']) { - // test nocontentexpr - // fn foo(){ - // ;;;some_expr();;;;{;;;};;;;Ok(()) - // } - - // test nocontentexpr_after_item - // fn simple_function() { - // enum LocalEnum { - // One, - // Two, - // }; - // fn f() {}; - // struct S {}; - // } - - if p.at(T![;]) { - p.bump(T![;]); - continue; - } - - stmt(p, StmtWithSemi::Yes) - } -} - -#[derive(Clone, Copy)] -struct Restrictions { - forbid_structs: bool, - prefer_stmt: bool, -} - -/// Binding powers of operators for a Pratt parser. -/// -/// See https://www.oilshell.org/blog/2016/11/03.html -#[rustfmt::skip] -fn current_op(p: &Parser) -> (u8, SyntaxKind) { - const NOT_AN_OP: (u8, SyntaxKind) = (0, T![@]); - match p.current() { - T![|] if p.at(T![||]) => (3, T![||]), - T![|] if p.at(T![|=]) => (1, T![|=]), - T![|] => (6, T![|]), - T![>] if p.at(T![>>=]) => (1, T![>>=]), - T![>] if p.at(T![>>]) => (9, T![>>]), - T![>] if p.at(T![>=]) => (5, T![>=]), - T![>] => (5, T![>]), - T![=] if p.at(T![=>]) => NOT_AN_OP, - T![=] if p.at(T![==]) => (5, T![==]), - T![=] => (1, T![=]), - T![<] if p.at(T![<=]) => (5, T![<=]), - T![<] if p.at(T![<<=]) => (1, T![<<=]), - T![<] if p.at(T![<<]) => (9, T![<<]), - T![<] => (5, T![<]), - T![+] if p.at(T![+=]) => (1, T![+=]), - T![+] => (10, T![+]), - T![^] if p.at(T![^=]) => (1, T![^=]), - T![^] => (7, T![^]), - T![%] if p.at(T![%=]) => (1, T![%=]), - T![%] => (11, T![%]), - T![&] if p.at(T![&=]) => (1, T![&=]), - T![&] if p.at(T![&&]) => (4, T![&&]), - T![&] => (8, T![&]), - T![/] if p.at(T![/=]) => (1, T![/=]), - T![/] => (11, T![/]), - T![*] if p.at(T![*=]) => (1, T![*=]), - T![*] => (11, T![*]), - T![.] if p.at(T![..=]) => (2, T![..=]), - T![.] if p.at(T![..]) => (2, T![..]), - T![!] if p.at(T![!=]) => (5, T![!=]), - T![-] if p.at(T![-=]) => (1, T![-=]), - T![-] => (10, T![-]), - T![as] => (12, T![as]), - - _ => NOT_AN_OP - } -} - -// Parses expression with binding power of at least bp. -fn expr_bp(p: &mut Parser, mut r: Restrictions, bp: u8) -> (Option, BlockLike) { - let mut lhs = match lhs(p, r) { - Some((lhs, blocklike)) => { - // test stmt_bin_expr_ambiguity - // fn foo() { - // let _ = {1} & 2; - // {1} &2; - // } - if r.prefer_stmt && blocklike.is_block() { - return (Some(lhs), BlockLike::Block); - } - lhs - } - None => return (None, BlockLike::NotBlock), - }; - - loop { - let is_range = p.at(T![..]) || p.at(T![..=]); - let (op_bp, op) = current_op(p); - if op_bp < bp { - break; - } - // test as_precedence - // fn foo() { - // let _ = &1 as *const i32; - // } - if p.at(T![as]) { - lhs = cast_expr(p, lhs); - continue; - } - let m = lhs.precede(p); - p.bump(op); - - // test binop_resets_statementness - // fn foo() { - // v = {1}&2; - // } - r = Restrictions { prefer_stmt: false, ..r }; - - if is_range { - // test postfix_range - // fn foo() { - // let x = 1..; - // match 1.. { _ => () }; - // match a.b()..S { _ => () }; - // } - let has_trailing_expression = - p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{'])); - if !has_trailing_expression { - // no RHS - lhs = m.complete(p, RANGE_EXPR); - break; - } - } - - expr_bp(p, Restrictions { prefer_stmt: false, ..r }, op_bp + 1); - lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR }); - } - (Some(lhs), BlockLike::NotBlock) -} - -const LHS_FIRST: TokenSet = - atom::ATOM_EXPR_FIRST.union(token_set![T![&], T![*], T![!], T![.], T![-]]); - -fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { - let m; - let kind = match p.current() { - // test ref_expr - // fn foo() { - // // reference operator - // let _ = &1; - // let _ = &mut &f(); - // let _ = &raw; - // let _ = &raw.0; - // // raw reference operator - // let _ = &raw mut foo; - // let _ = &raw const foo; - // } - T![&] => { - m = p.start(); - p.bump(T![&]); - if p.at(IDENT) - && p.at_contextual_kw("raw") - && (p.nth_at(1, T![mut]) || p.nth_at(1, T![const])) - { - p.bump_remap(T![raw]); - p.bump_any(); - } else { - p.eat(T![mut]); - } - REF_EXPR - } - // test unary_expr - // fn foo() { - // **&1; - // !!true; - // --1; - // } - T![*] | T![!] | T![-] => { - m = p.start(); - p.bump_any(); - PREFIX_EXPR - } - _ => { - // test full_range_expr - // fn foo() { xs[..]; } - for &op in [T![..=], T![..]].iter() { - if p.at(op) { - m = p.start(); - p.bump(op); - if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{'])) { - expr_bp(p, r, 2); - } - return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock)); - } - } - - // test expression_after_block - // fn foo() { - // let mut p = F{x: 5}; - // {p}.x = 10; - // } - // - let (lhs, blocklike) = atom::atom_expr(p, r)?; - return Some(postfix_expr(p, lhs, blocklike, !(r.prefer_stmt && blocklike.is_block()))); - } - }; - // parse the interior of the unary expression - expr_bp(p, r, 255); - Some((m.complete(p, kind), BlockLike::NotBlock)) -} - -fn postfix_expr( - p: &mut Parser, - mut lhs: CompletedMarker, - // Calls are disallowed if the type is a block and we prefer statements because the call cannot be disambiguated from a tuple - // E.g. `while true {break}();` is parsed as - // `while true {break}; ();` - mut block_like: BlockLike, - mut allow_calls: bool, -) -> (CompletedMarker, BlockLike) { - loop { - lhs = match p.current() { - // test stmt_postfix_expr_ambiguity - // fn foo() { - // match () { - // _ => {} - // () => {} - // [] => {} - // } - // } - T!['('] if allow_calls => call_expr(p, lhs), - T!['['] if allow_calls => index_expr(p, lhs), - T![.] => match postfix_dot_expr(p, lhs) { - Ok(it) => it, - Err(it) => { - lhs = it; - break; - } - }, - T![?] => try_expr(p, lhs), - _ => break, - }; - allow_calls = true; - block_like = BlockLike::NotBlock; - } - return (lhs, block_like); - - fn postfix_dot_expr( - p: &mut Parser, - lhs: CompletedMarker, - ) -> Result { - assert!(p.at(T![.])); - if p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::])) { - return Ok(method_call_expr(p, lhs)); - } - - // test await_expr - // fn foo() { - // x.await; - // x.0.await; - // x.0().await?.hello(); - // } - if p.nth(1) == T![await] { - let m = lhs.precede(p); - p.bump(T![.]); - p.bump(T![await]); - return Ok(m.complete(p, AWAIT_EXPR)); - } - - if p.at(T![..=]) || p.at(T![..]) { - return Err(lhs); - } - - Ok(field_expr(p, lhs)) - } -} - -// test call_expr -// fn foo() { -// let _ = f(); -// let _ = f()(1)(1, 2,); -// let _ = f(::func()); -// f(::func()); -// } -fn call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { - assert!(p.at(T!['('])); - let m = lhs.precede(p); - arg_list(p); - m.complete(p, CALL_EXPR) -} - -// test index_expr -// fn foo() { -// x[1][2]; -// } -fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { - assert!(p.at(T!['['])); - let m = lhs.precede(p); - p.bump(T!['[']); - expr(p); - p.expect(T![']']); - m.complete(p, INDEX_EXPR) -} - -// test method_call_expr -// fn foo() { -// x.foo(); -// y.bar::(1, 2,); -// } -fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { - assert!(p.at(T![.]) && p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::]))); - let m = lhs.precede(p); - p.bump_any(); - name_ref(p); - type_args::opt_type_arg_list(p, true); - if p.at(T!['(']) { - arg_list(p); - } - m.complete(p, METHOD_CALL_EXPR) -} - -// test field_expr -// fn foo() { -// x.foo; -// x.0.bar; -// x.0(); -// } - -// test_err bad_tuple_index_expr -// fn foo() { -// x.0.; -// x.1i32; -// x.0x01; -// } -#[allow(clippy::if_same_then_else)] -fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { - assert!(p.at(T![.])); - let m = lhs.precede(p); - p.bump(T![.]); - if p.at(IDENT) || p.at(INT_NUMBER) { - name_ref_or_index(p) - } else if p.at(FLOAT_NUMBER) { - // FIXME: How to recover and instead parse INT + T![.]? - p.bump_any(); - } else { - p.error("expected field name or number") - } - m.complete(p, FIELD_EXPR) -} - -// test try_expr -// fn foo() { -// x?; -// } -fn try_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { - assert!(p.at(T![?])); - let m = lhs.precede(p); - p.bump(T![?]); - m.complete(p, TRY_EXPR) -} - -// test cast_expr -// fn foo() { -// 82 as i32; -// 81 as i8 + 1; -// 79 as i16 - 1; -// 0x36 as u8 <= 0x37; -// } -fn cast_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { - assert!(p.at(T![as])); - let m = lhs.precede(p); - p.bump(T![as]); - // Use type_no_bounds(), because cast expressions are not - // allowed to have bounds. - types::type_no_bounds(p); - m.complete(p, CAST_EXPR) -} - -fn arg_list(p: &mut Parser) { - assert!(p.at(T!['('])); - let m = p.start(); - p.bump(T!['(']); - while !p.at(T![')']) && !p.at(EOF) { - // test arg_with_attr - // fn main() { - // foo(#[attr] 92) - // } - if !expr_with_attrs(p) { - break; - } - if !p.at(T![')']) && !p.expect(T![,]) { - break; - } - } - p.eat(T![')']); - m.complete(p, ARG_LIST); -} - -// test path_expr -// fn foo() { -// let _ = a; -// let _ = a::b; -// let _ = ::a::; -// let _ = format!(); -// } -fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) { - assert!(paths::is_path_start(p)); - let m = p.start(); - paths::expr_path(p); - match p.current() { - T!['{'] if !r.forbid_structs => { - record_field_list(p); - (m.complete(p, RECORD_EXPR), BlockLike::NotBlock) - } - T![!] if !p.at(T![!=]) => { - let block_like = items::macro_call_after_excl(p); - (m.complete(p, MACRO_CALL), block_like) - } - _ => (m.complete(p, PATH_EXPR), BlockLike::NotBlock), - } -} - -// test record_lit -// fn foo() { -// S {}; -// S { x, y: 32, }; -// S { x, y: 32, ..Default::default() }; -// TupleStruct { 0: 1 }; -// } -pub(crate) fn record_field_list(p: &mut Parser) { - assert!(p.at(T!['{'])); - let m = p.start(); - p.bump(T!['{']); - while !p.at(EOF) && !p.at(T!['}']) { - let m = p.start(); - // test record_literal_field_with_attr - // fn main() { - // S { #[cfg(test)] field: 1 } - // } - attributes::outer_attributes(p); - - match p.current() { - IDENT | INT_NUMBER => { - // test_err record_literal_before_ellipsis_recovery - // fn main() { - // S { field ..S::default() } - // } - if p.nth_at(1, T![:]) || p.nth_at(1, T![..]) { - name_ref_or_index(p); - p.expect(T![:]); - } - expr(p); - m.complete(p, RECORD_EXPR_FIELD); - } - T![.] if p.at(T![..]) => { - m.abandon(p); - p.bump(T![..]); - expr(p); - } - T!['{'] => { - error_block(p, "expected a field"); - m.abandon(p); - } - _ => { - p.err_and_bump("expected identifier"); - m.abandon(p); - } - } - if !p.at(T!['}']) { - p.expect(T![,]); - } - } - p.expect(T!['}']); - m.complete(p, RECORD_EXPR_FIELD_LIST); -} diff --git a/crates/ra_parser/src/grammar/expressions/atom.rs b/crates/ra_parser/src/grammar/expressions/atom.rs deleted file mode 100644 index 0b01d3bc64..0000000000 --- a/crates/ra_parser/src/grammar/expressions/atom.rs +++ /dev/null @@ -1,611 +0,0 @@ -//! FIXME: write short doc here - -use super::*; - -// test expr_literals -// fn foo() { -// let _ = true; -// let _ = false; -// let _ = 1; -// let _ = 2.0; -// let _ = b'a'; -// let _ = 'b'; -// let _ = "c"; -// let _ = r"d"; -// let _ = b"e"; -// let _ = br"f"; -// } -pub(crate) const LITERAL_FIRST: TokenSet = token_set![ - TRUE_KW, - FALSE_KW, - INT_NUMBER, - FLOAT_NUMBER, - BYTE, - CHAR, - STRING, - RAW_STRING, - BYTE_STRING, - RAW_BYTE_STRING -]; - -pub(crate) fn literal(p: &mut Parser) -> Option { - if !p.at_ts(LITERAL_FIRST) { - return None; - } - let m = p.start(); - p.bump_any(); - Some(m.complete(p, LITERAL)) -} - -// E.g. for after the break in `if break {}`, this should not match -pub(super) const ATOM_EXPR_FIRST: TokenSet = - LITERAL_FIRST.union(paths::PATH_FIRST).union(token_set![ - T!['('], - T!['{'], - T!['['], - L_DOLLAR, - T![|], - T![move], - T![box], - T![if], - T![while], - T![match], - T![unsafe], - T![return], - T![break], - T![continue], - T![async], - T![try], - T![loop], - T![for], - LIFETIME, - ]); - -const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW, R_DOLLAR]; - -pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { - if let Some(m) = literal(p) { - return Some((m, BlockLike::NotBlock)); - } - if paths::is_path_start(p) { - return Some(path_expr(p, r)); - } - let la = p.nth(1); - let done = match p.current() { - T!['('] => tuple_expr(p), - T!['['] => array_expr(p), - L_DOLLAR => meta_var_expr(p), - T![|] => lambda_expr(p), - T![move] if la == T![|] => lambda_expr(p), - T![async] if la == T![|] || (la == T![move] && p.nth(2) == T![|]) => lambda_expr(p), - T![if] => if_expr(p), - - T![loop] => loop_expr(p, None), - T![box] => box_expr(p, None), - T![for] => for_expr(p, None), - T![while] => while_expr(p, None), - T![try] => try_block_expr(p, None), - LIFETIME if la == T![:] => { - let m = p.start(); - label(p); - match p.current() { - T![loop] => loop_expr(p, Some(m)), - T![for] => for_expr(p, Some(m)), - T![while] => while_expr(p, Some(m)), - // test labeled_block - // fn f() { 'label: {}; } - T!['{'] => { - block_expr(p); - m.complete(p, EFFECT_EXPR) - } - _ => { - // test_err misplaced_label_err - // fn main() { - // 'loop: impl - // } - p.error("expected a loop"); - m.complete(p, ERROR); - return None; - } - } - } - T![async] if la == T!['{'] || (la == T![move] && p.nth(2) == T!['{']) => { - let m = p.start(); - p.bump(T![async]); - p.eat(T![move]); - block_expr(p); - m.complete(p, EFFECT_EXPR) - } - T![match] => match_expr(p), - // test unsafe_block - // fn f() { unsafe { } } - T![unsafe] if la == T!['{'] => { - let m = p.start(); - p.bump(T![unsafe]); - block_expr(p); - m.complete(p, EFFECT_EXPR) - } - T!['{'] => { - // test for_range_from - // fn foo() { - // for x in 0 .. { - // break; - // } - // } - block_expr_unchecked(p) - } - T![return] => return_expr(p), - T![continue] => continue_expr(p), - T![break] => break_expr(p, r), - _ => { - p.err_recover("expected expression", EXPR_RECOVERY_SET); - return None; - } - }; - let blocklike = match done.kind() { - IF_EXPR | WHILE_EXPR | FOR_EXPR | LOOP_EXPR | MATCH_EXPR | BLOCK_EXPR | EFFECT_EXPR => { - BlockLike::Block - } - _ => BlockLike::NotBlock, - }; - Some((done, blocklike)) -} - -// test tuple_expr -// fn foo() { -// (); -// (1); -// (1,); -// } -fn tuple_expr(p: &mut Parser) -> CompletedMarker { - assert!(p.at(T!['('])); - let m = p.start(); - p.expect(T!['(']); - - let mut saw_comma = false; - let mut saw_expr = false; - while !p.at(EOF) && !p.at(T![')']) { - saw_expr = true; - if !p.at_ts(EXPR_FIRST) { - p.error("expected expression"); - break; - } - expr(p); - if !p.at(T![')']) { - saw_comma = true; - p.expect(T![,]); - } - } - p.expect(T![')']); - m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR }) -} - -// test array_expr -// fn foo() { -// []; -// [1]; -// [1, 2,]; -// [1; 2]; -// } -fn array_expr(p: &mut Parser) -> CompletedMarker { - assert!(p.at(T!['['])); - let m = p.start(); - - let mut n_exprs = 0u32; - let mut has_semi = false; - - p.bump(T!['[']); - while !p.at(EOF) && !p.at(T![']']) { - n_exprs += 1; - - // test array_attrs - // const A: &[i64] = &[1, #[cfg(test)] 2]; - if !expr_with_attrs(p) { - break; - } - - if n_exprs == 1 && p.eat(T![;]) { - has_semi = true; - continue; - } - - if has_semi || !p.at(T![']']) && !p.expect(T![,]) { - break; - } - } - p.expect(T![']']); - - m.complete(p, ARRAY_EXPR) -} - -// test lambda_expr -// fn foo() { -// || (); -// || -> i32 { 92 }; -// |x| x; -// move |x: i32,| x; -// async || {}; -// move || {}; -// async move || {}; -// } -fn lambda_expr(p: &mut Parser) -> CompletedMarker { - assert!( - p.at(T![|]) - || (p.at(T![move]) && p.nth(1) == T![|]) - || (p.at(T![async]) && p.nth(1) == T![|]) - || (p.at(T![async]) && p.nth(1) == T![move] && p.nth(2) == T![|]) - ); - let m = p.start(); - p.eat(T![async]); - p.eat(T![move]); - params::param_list_closure(p); - if opt_fn_ret_type(p) { - // test lambda_ret_block - // fn main() { || -> i32 { 92 }(); } - block_expr(p); - } else { - if p.at_ts(EXPR_FIRST) { - expr(p); - } else { - p.error("expected expression"); - } - } - m.complete(p, CLOSURE_EXPR) -} - -// test if_expr -// fn foo() { -// if true {}; -// if true {} else {}; -// if true {} else if false {} else {}; -// if S {}; -// if { true } { } else { }; -// } -fn if_expr(p: &mut Parser) -> CompletedMarker { - assert!(p.at(T![if])); - let m = p.start(); - p.bump(T![if]); - cond(p); - block_expr(p); - if p.at(T![else]) { - p.bump(T![else]); - if p.at(T![if]) { - if_expr(p); - } else { - block_expr(p); - } - } - m.complete(p, IF_EXPR) -} - -// test label -// fn foo() { -// 'a: loop {} -// 'b: while true {} -// 'c: for x in () {} -// } -fn label(p: &mut Parser) { - assert!(p.at(LIFETIME) && p.nth(1) == T![:]); - let m = p.start(); - p.bump(LIFETIME); - p.bump_any(); - m.complete(p, LABEL); -} - -// test loop_expr -// fn foo() { -// loop {}; -// } -fn loop_expr(p: &mut Parser, m: Option) -> CompletedMarker { - assert!(p.at(T![loop])); - let m = m.unwrap_or_else(|| p.start()); - p.bump(T![loop]); - block_expr(p); - m.complete(p, LOOP_EXPR) -} - -// test while_expr -// fn foo() { -// while true {}; -// while let Some(x) = it.next() {}; -// while { true } {}; -// } -fn while_expr(p: &mut Parser, m: Option) -> CompletedMarker { - assert!(p.at(T![while])); - let m = m.unwrap_or_else(|| p.start()); - p.bump(T![while]); - cond(p); - block_expr(p); - m.complete(p, WHILE_EXPR) -} - -// test for_expr -// fn foo() { -// for x in [] {}; -// } -fn for_expr(p: &mut Parser, m: Option) -> CompletedMarker { - assert!(p.at(T![for])); - let m = m.unwrap_or_else(|| p.start()); - p.bump(T![for]); - patterns::pattern(p); - p.expect(T![in]); - expr_no_struct(p); - block_expr(p); - m.complete(p, FOR_EXPR) -} - -// test cond -// fn foo() { if let Some(_) = None {} } -// fn bar() { -// if let Some(_) | Some(_) = None {} -// if let | Some(_) = None {} -// while let Some(_) | Some(_) = None {} -// while let | Some(_) = None {} -// } -fn cond(p: &mut Parser) { - let m = p.start(); - if p.eat(T![let]) { - patterns::pattern_top(p); - p.expect(T![=]); - } - expr_no_struct(p); - m.complete(p, CONDITION); -} - -// test match_expr -// fn foo() { -// match () { }; -// match S {}; -// match { } { _ => () }; -// match { S {} } {}; -// } -fn match_expr(p: &mut Parser) -> CompletedMarker { - assert!(p.at(T![match])); - let m = p.start(); - p.bump(T![match]); - expr_no_struct(p); - if p.at(T!['{']) { - match_arm_list(p); - } else { - p.error("expected `{`") - } - m.complete(p, MATCH_EXPR) -} - -pub(crate) fn match_arm_list(p: &mut Parser) { - assert!(p.at(T!['{'])); - let m = p.start(); - p.eat(T!['{']); - - // test match_arms_inner_attribute - // fn foo() { - // match () { - // #![doc("Inner attribute")] - // #![doc("Can be")] - // #![doc("Stacked")] - // _ => (), - // } - // } - attributes::inner_attributes(p); - - while !p.at(EOF) && !p.at(T!['}']) { - if p.at(T!['{']) { - error_block(p, "expected match arm"); - continue; - } - - // test match_arms_commas - // fn foo() { - // match () { - // _ => (), - // _ => {} - // _ => () - // } - // } - if match_arm(p).is_block() { - p.eat(T![,]); - } else if !p.at(T!['}']) { - p.expect(T![,]); - } - } - p.expect(T!['}']); - m.complete(p, MATCH_ARM_LIST); -} - -// test match_arm -// fn foo() { -// match () { -// _ => (), -// _ if Test > Test{field: 0} => (), -// X | Y if Z => (), -// | X | Y if Z => (), -// | X => (), -// }; -// } -fn match_arm(p: &mut Parser) -> BlockLike { - let m = p.start(); - // test match_arms_outer_attributes - // fn foo() { - // match () { - // #[cfg(feature = "some")] - // _ => (), - // #[cfg(feature = "other")] - // _ => (), - // #[cfg(feature = "many")] - // #[cfg(feature = "attributes")] - // #[cfg(feature = "before")] - // _ => (), - // } - // } - attributes::outer_attributes(p); - - patterns::pattern_top_r(p, TokenSet::EMPTY); - if p.at(T![if]) { - match_guard(p); - } - p.expect(T![=>]); - let blocklike = expr_stmt(p).1; - m.complete(p, MATCH_ARM); - blocklike -} - -// test match_guard -// fn foo() { -// match () { -// _ if foo => (), -// } -// } -fn match_guard(p: &mut Parser) -> CompletedMarker { - assert!(p.at(T![if])); - let m = p.start(); - p.bump(T![if]); - expr(p); - m.complete(p, MATCH_GUARD) -} - -// test block -// fn a() {} -// fn b() { let _ = 1; } -// fn c() { 1; 2; } -// fn d() { 1; 2 } -pub(crate) fn block_expr(p: &mut Parser) { - if !p.at(T!['{']) { - p.error("expected a block"); - return; - } - block_expr_unchecked(p); -} - -fn block_expr_unchecked(p: &mut Parser) -> CompletedMarker { - assert!(p.at(T!['{'])); - let m = p.start(); - p.bump(T!['{']); - expr_block_contents(p); - p.expect(T!['}']); - m.complete(p, BLOCK_EXPR) -} - -// test return_expr -// fn foo() { -// return; -// return 92; -// } -fn return_expr(p: &mut Parser) -> CompletedMarker { - assert!(p.at(T![return])); - let m = p.start(); - p.bump(T![return]); - if p.at_ts(EXPR_FIRST) { - expr(p); - } - m.complete(p, RETURN_EXPR) -} - -// test continue_expr -// fn foo() { -// loop { -// continue; -// continue 'l; -// } -// } -fn continue_expr(p: &mut Parser) -> CompletedMarker { - assert!(p.at(T![continue])); - let m = p.start(); - p.bump(T![continue]); - p.eat(LIFETIME); - m.complete(p, CONTINUE_EXPR) -} - -// test break_expr -// fn foo() { -// loop { -// break; -// break 'l; -// break 92; -// break 'l 92; -// } -// } -fn break_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker { - assert!(p.at(T![break])); - let m = p.start(); - p.bump(T![break]); - p.eat(LIFETIME); - // test break_ambiguity - // fn foo(){ - // if break {} - // while break {} - // for i in break {} - // match break {} - // } - if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{'])) { - expr(p); - } - m.complete(p, BREAK_EXPR) -} - -// test try_block_expr -// fn foo() { -// let _ = try {}; -// } -fn try_block_expr(p: &mut Parser, m: Option) -> CompletedMarker { - assert!(p.at(T![try])); - let m = m.unwrap_or_else(|| p.start()); - // Special-case `try!` as macro. - // This is a hack until we do proper edition support - if p.nth_at(1, T![!]) { - // test try_macro_fallback - // fn foo() { try!(Ok(())); } - let path = p.start(); - let path_segment = p.start(); - let name_ref = p.start(); - p.bump_remap(IDENT); - name_ref.complete(p, NAME_REF); - path_segment.complete(p, PATH_SEGMENT); - path.complete(p, PATH); - let _block_like = items::macro_call_after_excl(p); - return m.complete(p, MACRO_CALL); - } - - p.bump(T![try]); - block_expr(p); - m.complete(p, EFFECT_EXPR) -} - -// test box_expr -// fn foo() { -// let x = box 1i32; -// let y = (box 1i32, box 2i32); -// let z = Foo(box 1i32, box 2i32); -// } -fn box_expr(p: &mut Parser, m: Option) -> CompletedMarker { - assert!(p.at(T![box])); - let m = m.unwrap_or_else(|| p.start()); - p.bump(T![box]); - if p.at_ts(EXPR_FIRST) { - expr(p); - } - m.complete(p, BOX_EXPR) -} - -/// Expression from `$var` macro expansion, wrapped in dollars -fn meta_var_expr(p: &mut Parser) -> CompletedMarker { - assert!(p.at(L_DOLLAR)); - let m = p.start(); - p.bump(L_DOLLAR); - let (completed, _is_block) = - expr_bp(p, Restrictions { forbid_structs: false, prefer_stmt: false }, 1); - - match (completed, p.current()) { - (Some(it), R_DOLLAR) => { - p.bump(R_DOLLAR); - m.abandon(p); - it - } - _ => { - while !p.at(R_DOLLAR) { - p.bump_any() - } - p.bump(R_DOLLAR); - m.complete(p, ERROR) - } - } -} diff --git a/crates/ra_parser/src/grammar/items.rs b/crates/ra_parser/src/grammar/items.rs deleted file mode 100644 index cca524ceaa..0000000000 --- a/crates/ra_parser/src/grammar/items.rs +++ /dev/null @@ -1,492 +0,0 @@ -//! FIXME: write short doc here - -mod consts; -mod adt; -mod traits; -mod use_item; - -pub(crate) use self::{ - adt::{enum_variant_list, record_field_def_list}, - expressions::{match_arm_list, record_field_list}, - traits::{impl_item_list, trait_item_list}, - use_item::use_tree_list, -}; -use super::*; - -// test mod_contents -// fn foo() {} -// macro_rules! foo {} -// foo::bar!(); -// super::baz! {} -// struct S; -pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) { - attributes::inner_attributes(p); - while !(stop_on_r_curly && p.at(T!['}']) || p.at(EOF)) { - item_or_macro(p, stop_on_r_curly, ItemFlavor::Mod) - } -} - -pub(super) enum ItemFlavor { - Mod, - Trait, -} - -pub(super) const ITEM_RECOVERY_SET: TokenSet = token_set![ - FN_KW, STRUCT_KW, ENUM_KW, IMPL_KW, TRAIT_KW, CONST_KW, STATIC_KW, LET_KW, MOD_KW, PUB_KW, - CRATE_KW, USE_KW, MACRO_KW -]; - -pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool, flavor: ItemFlavor) { - let m = p.start(); - attributes::outer_attributes(p); - let m = match maybe_item(p, m, flavor) { - Ok(()) => { - if p.at(T![;]) { - p.err_and_bump( - "expected item, found `;`\n\ - consider removing this semicolon", - ); - } - return; - } - Err(m) => m, - }; - if paths::is_use_path_start(p) { - match macro_call(p) { - BlockLike::Block => (), - BlockLike::NotBlock => { - p.expect(T![;]); - } - } - m.complete(p, MACRO_CALL); - } else { - m.abandon(p); - if p.at(T!['{']) { - error_block(p, "expected an item"); - } else if p.at(T!['}']) && !stop_on_r_curly { - let e = p.start(); - p.error("unmatched `}`"); - p.bump(T!['}']); - e.complete(p, ERROR); - } else if !p.at(EOF) && !p.at(T!['}']) { - p.err_and_bump("expected an item"); - } else { - p.error("expected an item"); - } - } -} - -pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Result<(), Marker> { - // test_err pub_expr - // fn foo() { pub 92; } - let has_visibility = opt_visibility(p); - - let m = match items_without_modifiers(p, m) { - Ok(()) => return Ok(()), - Err(m) => m, - }; - - let mut has_mods = false; - - // modifiers - has_mods |= p.eat(T![const]); - - // test_err async_without_semicolon - // fn foo() { let _ = async {} } - if p.at(T![async]) && p.nth(1) != T!['{'] && p.nth(1) != T![move] && p.nth(1) != T![|] { - p.eat(T![async]); - has_mods = true; - } - - // test_err unsafe_block_in_mod - // fn foo(){} unsafe { } fn bar(){} - if p.at(T![unsafe]) && p.nth(1) != T!['{'] { - p.eat(T![unsafe]); - has_mods = true; - } - - if p.at(T![extern]) { - has_mods = true; - abi(p); - } - if p.at(IDENT) && p.at_contextual_kw("auto") && p.nth(1) == T![trait] { - p.bump_remap(T![auto]); - has_mods = true; - } - - if p.at(IDENT) - && p.at_contextual_kw("default") - && (match p.nth(1) { - T![impl] => true, - T![unsafe] => { - // test default_unsafe_impl - // default unsafe impl Foo {} - - // test default_unsafe_fn - // impl T for Foo { - // default unsafe fn foo() {} - // } - if p.nth(2) == T![impl] || p.nth(2) == T![fn] { - p.bump_remap(T![default]); - p.bump(T![unsafe]); - has_mods = true; - } - false - } - T![fn] | T![type] | T![const] => { - if let ItemFlavor::Mod = flavor { - true - } else { - false - } - } - _ => false, - }) - { - p.bump_remap(T![default]); - has_mods = true; - } - if p.at(IDENT) && p.at_contextual_kw("existential") && p.nth(1) == T![type] { - p.bump_remap(T![existential]); - has_mods = true; - } - - // items - match p.current() { - // test async_fn - // async fn foo() {} - - // test extern_fn - // extern fn foo() {} - - // test const_fn - // const fn foo() {} - - // test const_unsafe_fn - // const unsafe fn foo() {} - - // test unsafe_extern_fn - // unsafe extern "C" fn foo() {} - - // test unsafe_fn - // unsafe fn foo() {} - - // test combined_fns - // async unsafe fn foo() {} - // const unsafe fn bar() {} - - // test_err wrong_order_fns - // unsafe async fn foo() {} - // unsafe const fn bar() {} - T![fn] => { - fn_def(p); - m.complete(p, FN); - } - - // test unsafe_trait - // unsafe trait T {} - - // test auto_trait - // auto trait T {} - - // test unsafe_auto_trait - // unsafe auto trait T {} - T![trait] => { - traits::trait_def(p); - m.complete(p, TRAIT); - } - - // test unsafe_impl - // unsafe impl Foo {} - - // test default_impl - // default impl Foo {} - - // test_err default_fn_type - // trait T { - // default type T = Bar; - // default fn foo() {} - // } - - // test default_fn_type - // impl T for Foo { - // default type T = Bar; - // default fn foo() {} - // } - T![const] => { - consts::const_def(p, m); - } - - // test unsafe_default_impl - // unsafe default impl Foo {} - T![impl] => { - traits::impl_def(p); - m.complete(p, IMPL); - } - - // test existential_type - // existential type Foo: Fn() -> usize; - T![type] => { - type_def(p, m); - } - _ => { - if !has_visibility && !has_mods { - return Err(m); - } else { - if has_mods { - p.error("expected existential, fn, trait or impl"); - } else { - p.error("expected an item"); - } - m.complete(p, ERROR); - } - } - } - Ok(()) -} - -fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> { - let la = p.nth(1); - match p.current() { - // test extern_crate - // extern crate foo; - T![extern] if la == T![crate] => extern_crate_item(p, m), - T![type] => { - type_def(p, m); - } - T![mod] => mod_item(p, m), - T![struct] => { - // test struct_items - // struct Foo; - // struct Foo {} - // struct Foo(); - // struct Foo(String, usize); - // struct Foo { - // a: i32, - // b: f32, - // } - adt::struct_def(p, m); - } - // test pub_macro_def - // pub macro m($:ident) {} - T![macro] => { - macro_def(p, m); - } - IDENT if p.at_contextual_kw("union") && p.nth(1) == IDENT => { - // test union_items - // union Foo {} - // union Foo { - // a: i32, - // b: f32, - // } - adt::union_def(p, m); - } - T![enum] => adt::enum_def(p, m), - T![use] => use_item::use_item(p, m), - T![const] if (la == IDENT || la == T![_] || la == T![mut]) => consts::const_def(p, m), - T![static] => consts::static_def(p, m), - // test extern_block - // extern {} - T![extern] - if la == T!['{'] || ((la == STRING || la == RAW_STRING) && p.nth(2) == T!['{']) => - { - abi(p); - extern_item_list(p); - m.complete(p, EXTERN_BLOCK); - } - _ => return Err(m), - }; - Ok(()) -} - -fn extern_crate_item(p: &mut Parser, m: Marker) { - assert!(p.at(T![extern])); - p.bump(T![extern]); - assert!(p.at(T![crate])); - p.bump(T![crate]); - - if p.at(T![self]) { - p.bump(T![self]); - } else { - name_ref(p); - } - - opt_alias(p); - p.expect(T![;]); - m.complete(p, EXTERN_CRATE); -} - -pub(crate) fn extern_item_list(p: &mut Parser) { - assert!(p.at(T!['{'])); - let m = p.start(); - p.bump(T!['{']); - mod_contents(p, true); - p.expect(T!['}']); - m.complete(p, EXTERN_ITEM_LIST); -} - -fn fn_def(p: &mut Parser) { - assert!(p.at(T![fn])); - p.bump(T![fn]); - - name_r(p, ITEM_RECOVERY_SET); - // test function_type_params - // fn foo(){} - type_params::opt_type_param_list(p); - - if p.at(T!['(']) { - params::param_list_fn_def(p); - } else { - p.error("expected function arguments"); - } - // test function_ret_type - // fn foo() {} - // fn bar() -> () {} - opt_fn_ret_type(p); - - // test function_where_clause - // fn foo() where T: Copy {} - type_params::opt_where_clause(p); - - // test fn_decl - // trait T { fn foo(); } - if p.at(T![;]) { - p.bump(T![;]); - } else { - expressions::block_expr(p) - } -} - -// test type_item -// type Foo = Bar; -fn type_def(p: &mut Parser, m: Marker) { - assert!(p.at(T![type])); - p.bump(T![type]); - - name(p); - - // test type_item_type_params - // type Result = (); - type_params::opt_type_param_list(p); - - if p.at(T![:]) { - type_params::bounds(p); - } - - // test type_item_where_clause - // type Foo where Foo: Copy = (); - type_params::opt_where_clause(p); - if p.eat(T![=]) { - types::type_(p); - } - p.expect(T![;]); - m.complete(p, TYPE_ALIAS); -} - -pub(crate) fn mod_item(p: &mut Parser, m: Marker) { - assert!(p.at(T![mod])); - p.bump(T![mod]); - - name(p); - if p.at(T!['{']) { - mod_item_list(p); - } else if !p.eat(T![;]) { - p.error("expected `;` or `{`"); - } - m.complete(p, MODULE); -} - -pub(crate) fn mod_item_list(p: &mut Parser) { - assert!(p.at(T!['{'])); - let m = p.start(); - p.bump(T!['{']); - mod_contents(p, true); - p.expect(T!['}']); - m.complete(p, ITEM_LIST); -} - -// test macro_def -// macro m { ($i:ident) => {} } -// macro m($i:ident) {} -fn macro_def(p: &mut Parser, m: Marker) { - p.expect(T![macro]); - name_r(p, ITEM_RECOVERY_SET); - if p.at(T!['{']) { - token_tree(p); - } else if !p.at(T!['(']) { - p.error("unmatched `(`"); - } else { - let m = p.start(); - token_tree(p); - match p.current() { - T!['{'] | T!['['] | T!['('] => token_tree(p), - _ => p.error("expected `{`, `[`, `(`"), - } - m.complete(p, TOKEN_TREE); - } - - m.complete(p, MACRO_DEF); -} - -fn macro_call(p: &mut Parser) -> BlockLike { - assert!(paths::is_use_path_start(p)); - paths::use_path(p); - macro_call_after_excl(p) -} - -pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike { - p.expect(T![!]); - if p.at(IDENT) { - name(p); - } - // Special-case `macro_rules! try`. - // This is a hack until we do proper edition support - - // test try_macro_rules - // macro_rules! try { () => {} } - if p.at(T![try]) { - let m = p.start(); - p.bump_remap(IDENT); - m.complete(p, NAME); - } - - match p.current() { - T!['{'] => { - token_tree(p); - BlockLike::Block - } - T!['('] | T!['['] => { - token_tree(p); - BlockLike::NotBlock - } - _ => { - p.error("expected `{`, `[`, `(`"); - BlockLike::NotBlock - } - } -} - -pub(crate) fn token_tree(p: &mut Parser) { - let closing_paren_kind = match p.current() { - T!['{'] => T!['}'], - T!['('] => T![')'], - T!['['] => T![']'], - _ => unreachable!(), - }; - let m = p.start(); - p.bump_any(); - while !p.at(EOF) && !p.at(closing_paren_kind) { - match p.current() { - T!['{'] | T!['('] | T!['['] => token_tree(p), - T!['}'] => { - p.error("unmatched `}`"); - m.complete(p, TOKEN_TREE); - return; - } - T![')'] | T![']'] => p.err_and_bump("unmatched brace"), - _ => p.bump_any(), - } - } - p.expect(closing_paren_kind); - m.complete(p, TOKEN_TREE); -} diff --git a/crates/ra_parser/src/grammar/items/adt.rs b/crates/ra_parser/src/grammar/items/adt.rs deleted file mode 100644 index addfb59d4b..0000000000 --- a/crates/ra_parser/src/grammar/items/adt.rs +++ /dev/null @@ -1,178 +0,0 @@ -//! FIXME: write short doc here - -use super::*; - -pub(super) fn struct_def(p: &mut Parser, m: Marker) { - assert!(p.at(T![struct])); - p.bump(T![struct]); - struct_or_union(p, m, T![struct], STRUCT); -} - -pub(super) fn union_def(p: &mut Parser, m: Marker) { - assert!(p.at_contextual_kw("union")); - p.bump_remap(T![union]); - struct_or_union(p, m, T![union], UNION); -} - -fn struct_or_union(p: &mut Parser, m: Marker, kw: SyntaxKind, def: SyntaxKind) { - name_r(p, ITEM_RECOVERY_SET); - type_params::opt_type_param_list(p); - match p.current() { - T![where] => { - type_params::opt_where_clause(p); - match p.current() { - T![;] => { - p.bump(T![;]); - } - T!['{'] => record_field_def_list(p), - _ => { - //FIXME: special case `(` error message - p.error("expected `;` or `{`"); - } - } - } - T![;] if kw == T![struct] => { - p.bump(T![;]); - } - T!['{'] => record_field_def_list(p), - T!['('] if kw == T![struct] => { - tuple_field_def_list(p); - // test tuple_struct_where - // struct Test(T) where T: Clone; - // struct Test(T); - type_params::opt_where_clause(p); - p.expect(T![;]); - } - _ if kw == T![struct] => { - p.error("expected `;`, `{`, or `(`"); - } - _ => { - p.error("expected `{`"); - } - } - m.complete(p, def); -} - -pub(super) fn enum_def(p: &mut Parser, m: Marker) { - assert!(p.at(T![enum])); - p.bump(T![enum]); - name_r(p, ITEM_RECOVERY_SET); - type_params::opt_type_param_list(p); - type_params::opt_where_clause(p); - if p.at(T!['{']) { - enum_variant_list(p); - } else { - p.error("expected `{`") - } - m.complete(p, ENUM); -} - -pub(crate) fn enum_variant_list(p: &mut Parser) { - assert!(p.at(T!['{'])); - let m = p.start(); - p.bump(T!['{']); - while !p.at(EOF) && !p.at(T!['}']) { - if p.at(T!['{']) { - error_block(p, "expected enum variant"); - continue; - } - let var = p.start(); - attributes::outer_attributes(p); - if p.at(IDENT) { - name(p); - match p.current() { - T!['{'] => record_field_def_list(p), - T!['('] => tuple_field_def_list(p), - _ => (), - } - - // test variant_discriminant - // enum E { X(i32) = 10 } - if p.eat(T![=]) { - expressions::expr(p); - } - var.complete(p, VARIANT); - } else { - var.abandon(p); - p.err_and_bump("expected enum variant"); - } - if !p.at(T!['}']) { - p.expect(T![,]); - } - } - p.expect(T!['}']); - m.complete(p, VARIANT_LIST); -} - -pub(crate) fn record_field_def_list(p: &mut Parser) { - assert!(p.at(T!['{'])); - let m = p.start(); - p.bump(T!['{']); - while !p.at(T!['}']) && !p.at(EOF) { - if p.at(T!['{']) { - error_block(p, "expected field"); - continue; - } - record_field_def(p); - if !p.at(T!['}']) { - p.expect(T![,]); - } - } - p.expect(T!['}']); - m.complete(p, RECORD_FIELD_LIST); - - fn record_field_def(p: &mut Parser) { - let m = p.start(); - // test record_field_attrs - // struct S { - // #[serde(with = "url_serde")] - // pub uri: Uri, - // } - attributes::outer_attributes(p); - opt_visibility(p); - if p.at(IDENT) { - name(p); - p.expect(T![:]); - types::type_(p); - m.complete(p, RECORD_FIELD); - } else { - m.abandon(p); - p.err_and_bump("expected field declaration"); - } - } -} - -fn tuple_field_def_list(p: &mut Parser) { - assert!(p.at(T!['('])); - let m = p.start(); - if !p.expect(T!['(']) { - return; - } - while !p.at(T![')']) && !p.at(EOF) { - let m = p.start(); - // test tuple_field_attrs - // struct S ( - // #[serde(with = "url_serde")] - // pub Uri, - // ); - // - // enum S { - // Uri(#[serde(with = "url_serde")] Uri), - // } - attributes::outer_attributes(p); - opt_visibility(p); - if !p.at_ts(types::TYPE_FIRST) { - p.error("expected a type"); - m.complete(p, ERROR); - break; - } - types::type_(p); - m.complete(p, TUPLE_FIELD); - - if !p.at(T![')']) { - p.expect(T![,]); - } - } - p.expect(T![')']); - m.complete(p, TUPLE_FIELD_LIST); -} diff --git a/crates/ra_parser/src/grammar/items/consts.rs b/crates/ra_parser/src/grammar/items/consts.rs deleted file mode 100644 index 35ad766dce..0000000000 --- a/crates/ra_parser/src/grammar/items/consts.rs +++ /dev/null @@ -1,33 +0,0 @@ -//! FIXME: write short doc here - -use super::*; - -pub(super) fn static_def(p: &mut Parser, m: Marker) { - const_or_static(p, m, T![static], STATIC) -} - -pub(super) fn const_def(p: &mut Parser, m: Marker) { - const_or_static(p, m, T![const], CONST) -} - -fn const_or_static(p: &mut Parser, m: Marker, kw: SyntaxKind, def: SyntaxKind) { - assert!(p.at(kw)); - p.bump(kw); - p.eat(T![mut]); // FIXME: validator to forbid const mut - - // Allow `_` in place of an identifier in a `const`. - let is_const_underscore = kw == T![const] && p.eat(T![_]); - if !is_const_underscore { - name(p); - } - - // test_err static_underscore - // static _: i32 = 5; - - types::ascription(p); - if p.eat(T![=]) { - expressions::expr(p); - } - p.expect(T![;]); - m.complete(p, def); -} diff --git a/crates/ra_parser/src/grammar/items/traits.rs b/crates/ra_parser/src/grammar/items/traits.rs deleted file mode 100644 index ef9c8ff5b0..0000000000 --- a/crates/ra_parser/src/grammar/items/traits.rs +++ /dev/null @@ -1,153 +0,0 @@ -//! FIXME: write short doc here - -use super::*; - -// test trait_item -// trait T: Hash + Clone where U: Copy {} -// trait X: Hash + Clone where U: Copy {} -pub(super) fn trait_def(p: &mut Parser) { - assert!(p.at(T![trait])); - p.bump(T![trait]); - name_r(p, ITEM_RECOVERY_SET); - type_params::opt_type_param_list(p); - // test trait_alias - // trait Z = T; - // trait Z = T where U: Copy; - // trait Z = where Self: T; - if p.eat(T![=]) { - type_params::bounds_without_colon(p); - type_params::opt_where_clause(p); - p.expect(T![;]); - return; - } - if p.at(T![:]) { - type_params::bounds(p); - } - type_params::opt_where_clause(p); - if p.at(T!['{']) { - trait_item_list(p); - } else { - p.error("expected `{`"); - } -} - -// test trait_item_list -// impl F { -// type A: Clone; -// const B: i32; -// fn foo() {} -// fn bar(&self); -// } -pub(crate) fn trait_item_list(p: &mut Parser) { - assert!(p.at(T!['{'])); - let m = p.start(); - p.bump(T!['{']); - while !p.at(EOF) && !p.at(T!['}']) { - if p.at(T!['{']) { - error_block(p, "expected an item"); - continue; - } - item_or_macro(p, true, ItemFlavor::Trait); - } - p.expect(T!['}']); - m.complete(p, ASSOC_ITEM_LIST); -} - -// test impl_def -// impl Foo {} -pub(super) fn impl_def(p: &mut Parser) { - assert!(p.at(T![impl])); - p.bump(T![impl]); - if choose_type_params_over_qpath(p) { - type_params::opt_type_param_list(p); - } - - // FIXME: never type - // impl ! {} - - // test impl_def_neg - // impl !Send for X {} - p.eat(T![!]); - impl_type(p); - if p.eat(T![for]) { - impl_type(p); - } - type_params::opt_where_clause(p); - if p.at(T!['{']) { - impl_item_list(p); - } else { - p.error("expected `{`"); - } -} - -// test impl_item_list -// impl F { -// type A = i32; -// const B: i32 = 92; -// fn foo() {} -// fn bar(&self) {} -// } -pub(crate) fn impl_item_list(p: &mut Parser) { - assert!(p.at(T!['{'])); - let m = p.start(); - p.bump(T!['{']); - // test impl_inner_attributes - // enum F{} - // impl F { - // //! This is a doc comment - // #![doc("This is also a doc comment")] - // } - attributes::inner_attributes(p); - - while !p.at(EOF) && !p.at(T!['}']) { - if p.at(T!['{']) { - error_block(p, "expected an item"); - continue; - } - item_or_macro(p, true, ItemFlavor::Mod); - } - p.expect(T!['}']); - m.complete(p, ASSOC_ITEM_LIST); -} - -// test impl_type_params -// impl Bar {} -fn choose_type_params_over_qpath(p: &Parser) -> bool { - // There's an ambiguity between generic parameters and qualified paths in impls. - // If we see `<` it may start both, so we have to inspect some following tokens. - // The following combinations can only start generics, - // but not qualified paths (with one exception): - // `<` `>` - empty generic parameters - // `<` `#` - generic parameters with attributes - // `<` `const` - const generic parameters - // `<` (LIFETIME|IDENT) `>` - single generic parameter - // `<` (LIFETIME|IDENT) `,` - first generic parameter in a list - // `<` (LIFETIME|IDENT) `:` - generic parameter with bounds - // `<` (LIFETIME|IDENT) `=` - generic parameter with a default - // The only truly ambiguous case is - // `<` IDENT `>` `::` IDENT ... - // we disambiguate it in favor of generics (`impl ::absolute::Path { ... }`) - // because this is what almost always expected in practice, qualified paths in impls - // (`impl ::AssocTy { ... }`) aren't even allowed by type checker at the moment. - if !p.at(T![<]) { - return false; - } - if p.nth(1) == T![#] || p.nth(1) == T![>] || p.nth(1) == CONST_KW { - return true; - } - (p.nth(1) == LIFETIME || p.nth(1) == IDENT) - && (p.nth(2) == T![>] || p.nth(2) == T![,] || p.nth(2) == T![:] || p.nth(2) == T![=]) -} - -// test_err impl_type -// impl Type {} -// impl Trait1 for T {} -// impl impl NotType {} -// impl Trait2 for impl NotType {} -pub(crate) fn impl_type(p: &mut Parser) { - if p.at(T![impl]) { - p.error("expected trait or type"); - return; - } - types::type_(p); -} diff --git a/crates/ra_parser/src/grammar/items/use_item.rs b/crates/ra_parser/src/grammar/items/use_item.rs deleted file mode 100644 index 8e836a77e1..0000000000 --- a/crates/ra_parser/src/grammar/items/use_item.rs +++ /dev/null @@ -1,132 +0,0 @@ -//! FIXME: write short doc here - -use super::*; - -pub(super) fn use_item(p: &mut Parser, m: Marker) { - assert!(p.at(T![use])); - p.bump(T![use]); - use_tree(p, true); - p.expect(T![;]); - m.complete(p, USE); -} - -/// Parse a use 'tree', such as `some::path` in `use some::path;` -/// Note that this is called both by `use_item` and `use_tree_list`, -/// so handles both `some::path::{inner::path}` and `inner::path` in -/// `use some::path::{inner::path};` -fn use_tree(p: &mut Parser, top_level: bool) { - let m = p.start(); - match p.current() { - // Finish the use_tree for cases of e.g. - // `use some::path::{self, *};` or `use *;` - // This does not handle cases such as `use some::path::*` - // N.B. in Rust 2015 `use *;` imports all from crate root - // however in Rust 2018 `use *;` errors: ('cannot glob-import all possible crates') - // FIXME: Add this error (if not out of scope) - - // test use_star - // use *; - // use ::*; - // use some::path::{*}; - // use some::path::{::*}; - T![*] => p.bump(T![*]), - T![:] if p.at(T![::]) && p.nth(2) == T![*] => { - // Parse `use ::*;`, which imports all from the crate root in Rust 2015 - // This is invalid inside a use_tree_list, (e.g. `use some::path::{::*}`) - // but still parses and errors later: ('crate root in paths can only be used in start position') - // FIXME: Add this error (if not out of scope) - // In Rust 2018, it is always invalid (see above) - p.bump(T![::]); - p.bump(T![*]); - } - // Open a use tree list - // Handles cases such as `use {some::path};` or `{inner::path}` in - // `use some::path::{{inner::path}, other::path}` - - // test use_tree_list - // use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`) - // use {path::from::root}; // Rust 2015 - // use ::{some::arbritrary::path}; // Rust 2015 - // use ::{{{root::export}}}; // Nonsensical but perfectly legal nesting - T!['{'] => { - use_tree_list(p); - } - T![:] if p.at(T![::]) && p.nth(2) == T!['{'] => { - p.bump(T![::]); - use_tree_list(p); - } - // Parse a 'standard' path. - // Also handles aliases (e.g. `use something as something_else`) - - // test use_path - // use ::crate_name; // Rust 2018 - All flavours - // use crate_name; // Rust 2018 - Anchored paths - // use item_in_scope_or_crate_name; // Rust 2018 - Uniform Paths - // - // use self::module::Item; - // use crate::Item; - // use self::some::Struct; - // use crate_name::some_item; - _ if paths::is_use_path_start(p) => { - paths::use_path(p); - match p.current() { - T![as] => { - // test use_alias - // use some::path as some_name; - // use some::{ - // other::path as some_other_name, - // different::path as different_name, - // yet::another::path, - // running::out::of::synonyms::for_::different::* - // }; - // use Trait as _; - opt_alias(p); - } - T![:] if p.at(T![::]) => { - p.bump(T![::]); - match p.current() { - T![*] => { - p.bump(T![*]); - } - // test use_tree_list_after_path - // use crate::{Item}; - // use self::{Item}; - T!['{'] => use_tree_list(p), - _ => { - // is this unreachable? - p.error("expected `{` or `*`"); - } - } - } - _ => (), - } - } - _ => { - m.abandon(p); - let msg = "expected one of `*`, `::`, `{`, `self`, `super` or an identifier"; - if top_level { - p.err_recover(msg, ITEM_RECOVERY_SET); - } else { - // if we are parsing a nested tree, we have to eat a token to - // main balanced `{}` - p.err_and_bump(msg); - } - return; - } - } - m.complete(p, USE_TREE); -} - -pub(crate) fn use_tree_list(p: &mut Parser) { - assert!(p.at(T!['{'])); - let m = p.start(); - p.bump(T!['{']); - while !p.at(EOF) && !p.at(T!['}']) { - use_tree(p, false); - if !p.at(T!['}']) { - p.expect(T![,]); - } - } - p.expect(T!['}']); - m.complete(p, USE_TREE_LIST); -} diff --git a/crates/ra_parser/src/grammar/params.rs b/crates/ra_parser/src/grammar/params.rs deleted file mode 100644 index f0da173cc1..0000000000 --- a/crates/ra_parser/src/grammar/params.rs +++ /dev/null @@ -1,188 +0,0 @@ -//! FIXME: write short doc here - -use super::*; - -// test param_list -// fn a() {} -// fn b(x: i32) {} -// fn c(x: i32, ) {} -// fn d(x: i32, y: ()) {} -pub(super) fn param_list_fn_def(p: &mut Parser) { - list_(p, Flavor::FnDef) -} - -// test param_list_opt_patterns -// fn foo)>(){} -pub(super) fn param_list_fn_trait(p: &mut Parser) { - list_(p, Flavor::FnTrait) -} - -pub(super) fn param_list_fn_ptr(p: &mut Parser) { - list_(p, Flavor::FnPointer) -} - -pub(super) fn param_list_closure(p: &mut Parser) { - list_(p, Flavor::Closure) -} - -#[derive(Debug, Clone, Copy)] -enum Flavor { - FnDef, // Includes trait fn params; omitted param idents are not supported - FnTrait, // Params for `Fn(...)`/`FnMut(...)`/`FnOnce(...)` annotations - FnPointer, - Closure, -} - -fn list_(p: &mut Parser, flavor: Flavor) { - use Flavor::*; - - let (bra, ket) = match flavor { - Closure => (T![|], T![|]), - FnDef | FnTrait | FnPointer => (T!['('], T![')']), - }; - - let m = p.start(); - p.bump(bra); - - if let FnDef = flavor { - // test self_param_outer_attr - // fn f(#[must_use] self) {} - attributes::outer_attributes(p); - opt_self_param(p); - } - - while !p.at(EOF) && !p.at(ket) { - // test param_outer_arg - // fn f(#[attr1] pat: Type) {} - attributes::outer_attributes(p); - - if !p.at_ts(VALUE_PARAMETER_FIRST) { - p.error("expected value parameter"); - break; - } - let param = value_parameter(p, flavor); - if !p.at(ket) { - p.expect(T![,]); - } - if let Variadic(true) = param { - break; - } - } - - p.expect(ket); - m.complete(p, PARAM_LIST); -} - -const VALUE_PARAMETER_FIRST: TokenSet = patterns::PATTERN_FIRST.union(types::TYPE_FIRST); - -struct Variadic(bool); - -fn value_parameter(p: &mut Parser, flavor: Flavor) -> Variadic { - let mut res = Variadic(false); - let m = p.start(); - match flavor { - // test param_list_vararg - // extern "C" { fn printf(format: *const i8, ...) -> i32; } - Flavor::FnDef | Flavor::FnPointer if p.eat(T![...]) => res = Variadic(true), - - // test fn_def_param - // fn foo((x, y): (i32, i32)) {} - Flavor::FnDef => { - patterns::pattern(p); - if variadic_param(p) { - res = Variadic(true) - } else { - types::ascription(p); - } - } - // test value_parameters_no_patterns - // type F = Box; - Flavor::FnTrait => { - types::type_(p); - } - // test fn_pointer_param_ident_path - // type Foo = fn(Bar::Baz); - // type Qux = fn(baz: Bar::Baz); - - // test fn_pointer_unnamed_arg - // type Foo = fn(_: bar); - Flavor::FnPointer => { - if (p.at(IDENT) || p.at(UNDERSCORE)) && p.nth(1) == T![:] && !p.nth_at(1, T![::]) { - patterns::pattern_single(p); - if variadic_param(p) { - res = Variadic(true) - } else { - types::ascription(p); - } - } else { - types::type_(p); - } - } - // test closure_params - // fn main() { - // let foo = |bar, baz: Baz, qux: Qux::Quux| (); - // } - Flavor::Closure => { - patterns::pattern_single(p); - if p.at(T![:]) && !p.at(T![::]) { - types::ascription(p); - } - } - } - m.complete(p, PARAM); - res -} - -fn variadic_param(p: &mut Parser) -> bool { - if p.at(T![:]) && p.nth_at(1, T![...]) { - p.bump(T![:]); - p.bump(T![...]); - true - } else { - false - } -} - -// test self_param -// impl S { -// fn a(self) {} -// fn b(&self,) {} -// fn c(&'a self,) {} -// fn d(&'a mut self, x: i32) {} -// fn e(mut self) {} -// } -fn opt_self_param(p: &mut Parser) { - let m; - if p.at(T![self]) || p.at(T![mut]) && p.nth(1) == T![self] { - m = p.start(); - p.eat(T![mut]); - p.eat(T![self]); - // test arb_self_types - // impl S { - // fn a(self: &Self) {} - // fn b(mut self: Box) {} - // } - if p.at(T![:]) { - types::ascription(p); - } - } else { - let la1 = p.nth(1); - let la2 = p.nth(2); - let la3 = p.nth(3); - let n_toks = match (p.current(), la1, la2, la3) { - (T![&], T![self], _, _) => 2, - (T![&], T![mut], T![self], _) => 3, - (T![&], LIFETIME, T![self], _) => 3, - (T![&], LIFETIME, T![mut], T![self]) => 4, - _ => return, - }; - m = p.start(); - for _ in 0..n_toks { - p.bump_any(); - } - } - m.complete(p, SELF_PARAM); - if !p.at(T![')']) { - p.expect(T![,]); - } -} diff --git a/crates/ra_parser/src/grammar/paths.rs b/crates/ra_parser/src/grammar/paths.rs deleted file mode 100644 index b503af1dc9..0000000000 --- a/crates/ra_parser/src/grammar/paths.rs +++ /dev/null @@ -1,115 +0,0 @@ -//! FIXME: write short doc here - -use super::*; - -pub(super) const PATH_FIRST: TokenSet = - token_set![IDENT, T![self], T![super], T![crate], T![:], T![<]]; - -pub(super) fn is_path_start(p: &Parser) -> bool { - is_use_path_start(p) || p.at(T![<]) -} - -pub(super) fn is_use_path_start(p: &Parser) -> bool { - match p.current() { - IDENT | T![self] | T![super] | T![crate] => true, - T![:] if p.at(T![::]) => true, - _ => false, - } -} - -pub(super) fn use_path(p: &mut Parser) { - path(p, Mode::Use) -} - -pub(crate) fn type_path(p: &mut Parser) { - path(p, Mode::Type) -} - -pub(super) fn expr_path(p: &mut Parser) { - path(p, Mode::Expr) -} - -#[derive(Clone, Copy, Eq, PartialEq)] -enum Mode { - Use, - Type, - Expr, -} - -fn path(p: &mut Parser, mode: Mode) { - let path = p.start(); - path_segment(p, mode, true); - let mut qual = path.complete(p, PATH); - loop { - let use_tree = matches!(p.nth(2), T![*] | T!['{']); - if p.at(T![::]) && !use_tree { - let path = qual.precede(p); - p.bump(T![::]); - path_segment(p, mode, false); - let path = path.complete(p, PATH); - qual = path; - } else { - break; - } - } -} - -fn path_segment(p: &mut Parser, mode: Mode, first: bool) { - let m = p.start(); - // test qual_paths - // type X = ::Output; - // fn foo() { ::default(); } - if first && p.eat(T![<]) { - types::type_(p); - if p.eat(T![as]) { - if is_use_path_start(p) { - types::path_type(p); - } else { - p.error("expected a trait"); - } - } - p.expect(T![>]); - } else { - let mut empty = true; - if first { - p.eat(T![::]); - empty = false; - } - match p.current() { - IDENT => { - name_ref(p); - opt_path_type_args(p, mode); - } - // test crate_path - // use crate::foo; - T![self] | T![super] | T![crate] => p.bump_any(), - _ => { - p.err_recover("expected identifier", items::ITEM_RECOVERY_SET); - if empty { - // test_err empty_segment - // use crate::; - m.abandon(p); - return; - } - } - }; - } - m.complete(p, PATH_SEGMENT); -} - -fn opt_path_type_args(p: &mut Parser, mode: Mode) { - match mode { - Mode::Use => {} - Mode::Type => { - // test path_fn_trait_args - // type F = Box ()>; - if p.at(T!['(']) { - params::param_list_fn_trait(p); - opt_fn_ret_type(p); - } else { - type_args::opt_type_arg_list(p, false) - } - } - Mode::Expr => type_args::opt_type_arg_list(p, true), - } -} diff --git a/crates/ra_parser/src/grammar/patterns.rs b/crates/ra_parser/src/grammar/patterns.rs deleted file mode 100644 index 716bdc9784..0000000000 --- a/crates/ra_parser/src/grammar/patterns.rs +++ /dev/null @@ -1,379 +0,0 @@ -//! FIXME: write short doc here - -use super::*; - -pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST - .union(paths::PATH_FIRST) - .union(token_set![T![box], T![ref], T![mut], T!['('], T!['['], T![&], T![_], T![-], T![.]]); - -pub(crate) fn pattern(p: &mut Parser) { - pattern_r(p, PAT_RECOVERY_SET); -} - -/// Parses a pattern list separated by pipes `|` -pub(super) fn pattern_top(p: &mut Parser) { - pattern_top_r(p, PAT_RECOVERY_SET) -} - -pub(crate) fn pattern_single(p: &mut Parser) { - pattern_single_r(p, PAT_RECOVERY_SET); -} - -/// Parses a pattern list separated by pipes `|` -/// using the given `recovery_set` -pub(super) fn pattern_top_r(p: &mut Parser, recovery_set: TokenSet) { - p.eat(T![|]); - pattern_r(p, recovery_set); -} - -/// Parses a pattern list separated by pipes `|`, with no leading `|`,using the -/// given `recovery_set` -// test or_pattern -// fn main() { -// match () { -// (_ | _) => (), -// &(_ | _) => (), -// (_ | _,) => (), -// [_ | _,] => (), -// } -// } -fn pattern_r(p: &mut Parser, recovery_set: TokenSet) { - let m = p.start(); - pattern_single_r(p, recovery_set); - - if !p.at(T![|]) { - m.abandon(p); - return; - } - while p.eat(T![|]) { - pattern_single_r(p, recovery_set); - } - m.complete(p, OR_PAT); -} - -fn pattern_single_r(p: &mut Parser, recovery_set: TokenSet) { - if let Some(lhs) = atom_pat(p, recovery_set) { - // test range_pat - // fn main() { - // match 92 { - // 0 ... 100 => (), - // 101 ..= 200 => (), - // 200 .. 301=> (), - // } - // } - for &range_op in [T![...], T![..=], T![..]].iter() { - if p.at(range_op) { - let m = lhs.precede(p); - p.bump(range_op); - atom_pat(p, recovery_set); - m.complete(p, RANGE_PAT); - return; - } - } - } -} - -const PAT_RECOVERY_SET: TokenSet = - token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA]; - -fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option { - let m = match p.nth(0) { - T![box] => box_pat(p), - T![ref] | T![mut] => bind_pat(p, true), - IDENT => match p.nth(1) { - // Checks the token after an IDENT to see if a pattern is a path (Struct { .. }) or macro - // (T![x]). - T!['('] | T!['{'] | T![!] => path_or_macro_pat(p), - T![:] if p.nth_at(1, T![::]) => path_or_macro_pat(p), - _ => bind_pat(p, true), - }, - - // test type_path_in_pattern - // fn main() { let <_>::Foo = (); } - _ if paths::is_path_start(p) => path_or_macro_pat(p), - _ if is_literal_pat_start(p) => literal_pat(p), - - T![.] if p.at(T![..]) => dot_dot_pat(p), - T![_] => placeholder_pat(p), - T![&] => ref_pat(p), - T!['('] => tuple_pat(p), - T!['['] => slice_pat(p), - - _ => { - p.err_recover("expected pattern", recovery_set); - return None; - } - }; - - Some(m) -} - -fn is_literal_pat_start(p: &Parser) -> bool { - p.at(T![-]) && (p.nth(1) == INT_NUMBER || p.nth(1) == FLOAT_NUMBER) - || p.at_ts(expressions::LITERAL_FIRST) -} - -// test literal_pattern -// fn main() { -// match () { -// -1 => (), -// 92 => (), -// 'c' => (), -// "hello" => (), -// } -// } -fn literal_pat(p: &mut Parser) -> CompletedMarker { - assert!(is_literal_pat_start(p)); - let m = p.start(); - if p.at(T![-]) { - p.bump(T![-]); - } - expressions::literal(p); - m.complete(p, LITERAL_PAT) -} - -// test path_part -// fn foo() { -// let foo::Bar = (); -// let ::Bar = (); -// let Bar { .. } = (); -// let Bar(..) = (); -// } -fn path_or_macro_pat(p: &mut Parser) -> CompletedMarker { - assert!(paths::is_path_start(p)); - let m = p.start(); - paths::expr_path(p); - let kind = match p.current() { - T!['('] => { - tuple_pat_fields(p); - TUPLE_STRUCT_PAT - } - T!['{'] => { - record_field_pat_list(p); - RECORD_PAT - } - // test marco_pat - // fn main() { - // let m!(x) = 0; - // } - T![!] => { - items::macro_call_after_excl(p); - return m.complete(p, MACRO_CALL).precede(p).complete(p, MACRO_PAT); - } - _ => PATH_PAT, - }; - m.complete(p, kind) -} - -// test tuple_pat_fields -// fn foo() { -// let S() = (); -// let S(_) = (); -// let S(_,) = (); -// let S(_, .. , x) = (); -// } -fn tuple_pat_fields(p: &mut Parser) { - assert!(p.at(T!['('])); - p.bump(T!['(']); - pat_list(p, T![')']); - p.expect(T![')']); -} - -// test record_field_pat_list -// fn foo() { -// let S {} = (); -// let S { f, ref mut g } = (); -// let S { h: _, ..} = (); -// let S { h: _, } = (); -// } -fn record_field_pat_list(p: &mut Parser) { - assert!(p.at(T!['{'])); - let m = p.start(); - p.bump(T!['{']); - while !p.at(EOF) && !p.at(T!['}']) { - match p.current() { - // A trailing `..` is *not* treated as a REST_PAT. - T![.] if p.at(T![..]) => p.bump(T![..]), - T!['{'] => error_block(p, "expected ident"), - - c => { - let m = p.start(); - match c { - // test record_field_pat - // fn foo() { - // let S { 0: 1 } = (); - // let S { x: 1 } = (); - // } - IDENT | INT_NUMBER if p.nth(1) == T![:] => { - name_ref_or_index(p); - p.bump(T![:]); - pattern(p); - } - T![box] => { - // FIXME: not all box patterns should be allowed - box_pat(p); - } - _ => { - bind_pat(p, false); - } - } - m.complete(p, RECORD_PAT_FIELD); - } - } - if !p.at(T!['}']) { - p.expect(T![,]); - } - } - p.expect(T!['}']); - m.complete(p, RECORD_PAT_FIELD_LIST); -} - -// test placeholder_pat -// fn main() { let _ = (); } -fn placeholder_pat(p: &mut Parser) -> CompletedMarker { - assert!(p.at(T![_])); - let m = p.start(); - p.bump(T![_]); - m.complete(p, WILDCARD_PAT) -} - -// test dot_dot_pat -// fn main() { -// let .. = (); -// // -// // Tuples -// // -// let (a, ..) = (); -// let (a, ..,) = (); -// let Tuple(a, ..) = (); -// let Tuple(a, ..,) = (); -// let (.., ..) = (); -// let Tuple(.., ..) = (); -// let (.., a, ..) = (); -// let Tuple(.., a, ..) = (); -// // -// // Slices -// // -// let [..] = (); -// let [head, ..] = (); -// let [head, tail @ ..] = (); -// let [head, .., cons] = (); -// let [head, mid @ .., cons] = (); -// let [head, .., .., cons] = (); -// let [head, .., mid, tail @ ..] = (); -// let [head, .., mid, .., cons] = (); -// } -fn dot_dot_pat(p: &mut Parser) -> CompletedMarker { - assert!(p.at(T![..])); - let m = p.start(); - p.bump(T![..]); - m.complete(p, REST_PAT) -} - -// test ref_pat -// fn main() { -// let &a = (); -// let &mut b = (); -// } -fn ref_pat(p: &mut Parser) -> CompletedMarker { - assert!(p.at(T![&])); - let m = p.start(); - p.bump(T![&]); - p.eat(T![mut]); - pattern_single(p); - m.complete(p, REF_PAT) -} - -// test tuple_pat -// fn main() { -// let (a, b, ..) = (); -// let (a,) = (); -// let (..) = (); -// let () = (); -// } -fn tuple_pat(p: &mut Parser) -> CompletedMarker { - assert!(p.at(T!['('])); - let m = p.start(); - p.bump(T!['(']); - let mut has_comma = false; - let mut has_pat = false; - let mut has_rest = false; - while !p.at(EOF) && !p.at(T![')']) { - has_pat = true; - if !p.at_ts(PATTERN_FIRST) { - p.error("expected a pattern"); - break; - } - has_rest |= p.at(T![..]); - - pattern(p); - if !p.at(T![')']) { - has_comma = true; - p.expect(T![,]); - } - } - p.expect(T![')']); - - m.complete(p, if !has_comma && !has_rest && has_pat { PAREN_PAT } else { TUPLE_PAT }) -} - -// test slice_pat -// fn main() { -// let [a, b, ..] = []; -// } -fn slice_pat(p: &mut Parser) -> CompletedMarker { - assert!(p.at(T!['['])); - let m = p.start(); - p.bump(T!['[']); - pat_list(p, T![']']); - p.expect(T![']']); - m.complete(p, SLICE_PAT) -} - -fn pat_list(p: &mut Parser, ket: SyntaxKind) { - while !p.at(EOF) && !p.at(ket) { - if !p.at_ts(PATTERN_FIRST) { - p.error("expected a pattern"); - break; - } - - pattern(p); - if !p.at(ket) { - p.expect(T![,]); - } - } -} - -// test bind_pat -// fn main() { -// let a = (); -// let mut b = (); -// let ref c = (); -// let ref mut d = (); -// let e @ _ = (); -// let ref mut f @ g @ _ = (); -// } -fn bind_pat(p: &mut Parser, with_at: bool) -> CompletedMarker { - let m = p.start(); - p.eat(T![ref]); - p.eat(T![mut]); - name(p); - if with_at && p.eat(T![@]) { - pattern_single(p); - } - m.complete(p, IDENT_PAT) -} - -// test box_pat -// fn main() { -// let box i = (); -// let box Outer { box i, j: box Inner(box &x) } = (); -// let box ref mut i = (); -// } -fn box_pat(p: &mut Parser) -> CompletedMarker { - assert!(p.at(T![box])); - let m = p.start(); - p.bump(T![box]); - pattern_single(p); - m.complete(p, BOX_PAT) -} diff --git a/crates/ra_parser/src/grammar/type_args.rs b/crates/ra_parser/src/grammar/type_args.rs deleted file mode 100644 index aef7cd6fbb..0000000000 --- a/crates/ra_parser/src/grammar/type_args.rs +++ /dev/null @@ -1,63 +0,0 @@ -//! FIXME: write short doc here - -use super::*; - -pub(super) fn opt_type_arg_list(p: &mut Parser, colon_colon_required: bool) { - let m; - if p.at(T![::]) && p.nth(2) == T![<] { - m = p.start(); - p.bump(T![::]); - p.bump(T![<]); - } else if !colon_colon_required && p.at(T![<]) && p.nth(1) != T![=] { - m = p.start(); - p.bump(T![<]); - } else { - return; - } - - while !p.at(EOF) && !p.at(T![>]) { - type_arg(p); - if !p.at(T![>]) && !p.expect(T![,]) { - break; - } - } - p.expect(T![>]); - m.complete(p, GENERIC_ARG_LIST); -} - -// test type_arg -// type A = B<'static, i32, 1, { 2 }, Item=u64>; -fn type_arg(p: &mut Parser) { - let m = p.start(); - match p.current() { - LIFETIME => { - p.bump(LIFETIME); - m.complete(p, LIFETIME_ARG); - } - // test associated_type_bounds - // fn print_all>(printables: T) {} - IDENT if p.nth(1) == T![:] && p.nth(2) != T![:] => { - name_ref(p); - type_params::bounds(p); - m.complete(p, ASSOC_TYPE_ARG); - } - IDENT if p.nth(1) == T![=] => { - name_ref(p); - p.bump_any(); - types::type_(p); - m.complete(p, ASSOC_TYPE_ARG); - } - T!['{'] => { - expressions::block_expr(p); - m.complete(p, CONST_ARG); - } - k if k.is_literal() => { - expressions::literal(p); - m.complete(p, CONST_ARG); - } - _ => { - types::type_(p); - m.complete(p, TYPE_ARG); - } - } -} diff --git a/crates/ra_parser/src/grammar/type_params.rs b/crates/ra_parser/src/grammar/type_params.rs deleted file mode 100644 index 90dabb4c0f..0000000000 --- a/crates/ra_parser/src/grammar/type_params.rs +++ /dev/null @@ -1,209 +0,0 @@ -//! FIXME: write short doc here - -use super::*; - -pub(super) fn opt_type_param_list(p: &mut Parser) { - if !p.at(T![<]) { - return; - } - type_param_list(p); -} - -fn type_param_list(p: &mut Parser) { - assert!(p.at(T![<])); - let m = p.start(); - p.bump(T![<]); - - while !p.at(EOF) && !p.at(T![>]) { - let m = p.start(); - - // test generic_lifetime_type_attribute - // fn foo<#[derive(Lifetime)] 'a, #[derive(Type)] T>(_: &'a T) { - // } - attributes::outer_attributes(p); - - match p.current() { - LIFETIME => lifetime_param(p, m), - IDENT => type_param(p, m), - CONST_KW => type_const_param(p, m), - _ => { - m.abandon(p); - p.err_and_bump("expected type parameter") - } - } - if !p.at(T![>]) && !p.expect(T![,]) { - break; - } - } - p.expect(T![>]); - m.complete(p, GENERIC_PARAM_LIST); -} - -fn lifetime_param(p: &mut Parser, m: Marker) { - assert!(p.at(LIFETIME)); - p.bump(LIFETIME); - if p.at(T![:]) { - lifetime_bounds(p); - } - m.complete(p, LIFETIME_PARAM); -} - -fn type_param(p: &mut Parser, m: Marker) { - assert!(p.at(IDENT)); - name(p); - if p.at(T![:]) { - bounds(p); - } - // test type_param_default - // struct S; - if p.at(T![=]) { - p.bump(T![=]); - types::type_(p) - } - m.complete(p, TYPE_PARAM); -} - -// test const_param -// struct S; -fn type_const_param(p: &mut Parser, m: Marker) { - assert!(p.at(CONST_KW)); - p.bump(T![const]); - name(p); - types::ascription(p); - m.complete(p, CONST_PARAM); -} - -// test type_param_bounds -// struct S; -pub(super) fn bounds(p: &mut Parser) { - assert!(p.at(T![:])); - p.bump(T![:]); - bounds_without_colon(p); -} - -fn lifetime_bounds(p: &mut Parser) { - assert!(p.at(T![:])); - p.bump(T![:]); - while p.at(LIFETIME) { - p.bump(LIFETIME); - if !p.eat(T![+]) { - break; - } - } -} - -pub(super) fn bounds_without_colon_m(p: &mut Parser, marker: Marker) -> CompletedMarker { - while type_bound(p) { - if !p.eat(T![+]) { - break; - } - } - - marker.complete(p, TYPE_BOUND_LIST) -} - -pub(super) fn bounds_without_colon(p: &mut Parser) { - let m = p.start(); - bounds_without_colon_m(p, m); -} - -fn type_bound(p: &mut Parser) -> bool { - let m = p.start(); - let has_paren = p.eat(T!['(']); - p.eat(T![?]); - match p.current() { - LIFETIME => p.bump(LIFETIME), - T![for] => types::for_type(p), - _ if paths::is_use_path_start(p) => types::path_type_(p, false), - _ => { - m.abandon(p); - return false; - } - } - if has_paren { - p.expect(T![')']); - } - m.complete(p, TYPE_BOUND); - - true -} - -// test where_clause -// fn foo() -// where -// 'a: 'b + 'c, -// T: Clone + Copy + 'static, -// Iterator::Item: 'a, -// ::Item: 'a -// {} -pub(super) fn opt_where_clause(p: &mut Parser) { - if !p.at(T![where]) { - return; - } - let m = p.start(); - p.bump(T![where]); - - while is_where_predicate(p) { - where_predicate(p); - - let comma = p.eat(T![,]); - - if is_where_clause_end(p) { - break; - } - - if !comma { - p.error("expected comma"); - } - } - - m.complete(p, WHERE_CLAUSE); -} - -fn is_where_predicate(p: &mut Parser) -> bool { - match p.current() { - LIFETIME => true, - T![impl] => false, - token => types::TYPE_FIRST.contains(token), - } -} - -fn is_where_clause_end(p: &mut Parser) -> bool { - matches!(p.current(), T!['{'] | T![;] | T![=]) -} - -fn where_predicate(p: &mut Parser) { - let m = p.start(); - match p.current() { - LIFETIME => { - p.bump(LIFETIME); - if p.at(T![:]) { - bounds(p); - } else { - p.error("expected colon"); - } - } - T![impl] => { - p.error("expected lifetime or type"); - } - _ => { - // test where_pred_for - // fn for_trait() - // where - // for<'a> F: Fn(&'a str) - // { } - if p.at(T![for]) { - types::for_binder(p); - } - - types::type_(p); - - if p.at(T![:]) { - bounds(p); - } else { - p.error("expected colon"); - } - } - } - m.complete(p, WHERE_PRED); -} diff --git a/crates/ra_parser/src/grammar/types.rs b/crates/ra_parser/src/grammar/types.rs deleted file mode 100644 index 0aa173a52b..0000000000 --- a/crates/ra_parser/src/grammar/types.rs +++ /dev/null @@ -1,324 +0,0 @@ -//! FIXME: write short doc here - -use super::*; - -pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![ - T!['('], - T!['['], - T![<], - T![!], - T![*], - T![&], - T![_], - T![fn], - T![unsafe], - T![extern], - T![for], - T![impl], - T![dyn], -]); - -const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA, L_DOLLAR]; - -pub(crate) fn type_(p: &mut Parser) { - type_with_bounds_cond(p, true); -} - -pub(super) fn type_no_bounds(p: &mut Parser) { - type_with_bounds_cond(p, false); -} - -fn type_with_bounds_cond(p: &mut Parser, allow_bounds: bool) { - match p.current() { - T!['('] => paren_or_tuple_type(p), - T![!] => never_type(p), - T![*] => pointer_type(p), - T!['['] => array_or_slice_type(p), - T![&] => reference_type(p), - T![_] => placeholder_type(p), - T![fn] | T![unsafe] | T![extern] => fn_pointer_type(p), - T![for] => for_type(p), - T![impl] => impl_trait_type(p), - T![dyn] => dyn_trait_type(p), - // Some path types are not allowed to have bounds (no plus) - T![<] => path_type_(p, allow_bounds), - _ if paths::is_use_path_start(p) => path_or_macro_type_(p, allow_bounds), - _ => { - p.err_recover("expected type", TYPE_RECOVERY_SET); - } - } -} - -pub(super) fn ascription(p: &mut Parser) { - p.expect(T![:]); - type_(p) -} - -fn paren_or_tuple_type(p: &mut Parser) { - assert!(p.at(T!['('])); - let m = p.start(); - p.bump(T!['(']); - let mut n_types: u32 = 0; - let mut trailing_comma: bool = false; - while !p.at(EOF) && !p.at(T![')']) { - n_types += 1; - type_(p); - if p.eat(T![,]) { - trailing_comma = true; - } else { - trailing_comma = false; - break; - } - } - p.expect(T![')']); - - let kind = if n_types == 1 && !trailing_comma { - // test paren_type - // type T = (i32); - PAREN_TYPE - } else { - // test unit_type - // type T = (); - - // test singleton_tuple_type - // type T = (i32,); - TUPLE_TYPE - }; - m.complete(p, kind); -} - -// test never_type -// type Never = !; -fn never_type(p: &mut Parser) { - assert!(p.at(T![!])); - let m = p.start(); - p.bump(T![!]); - m.complete(p, NEVER_TYPE); -} - -fn pointer_type(p: &mut Parser) { - assert!(p.at(T![*])); - let m = p.start(); - p.bump(T![*]); - - match p.current() { - // test pointer_type_mut - // type M = *mut (); - // type C = *mut (); - T![mut] | T![const] => p.bump_any(), - _ => { - // test_err pointer_type_no_mutability - // type T = *(); - p.error( - "expected mut or const in raw pointer type \ - (use `*mut T` or `*const T` as appropriate)", - ); - } - }; - - type_no_bounds(p); - m.complete(p, PTR_TYPE); -} - -fn array_or_slice_type(p: &mut Parser) { - assert!(p.at(T!['['])); - let m = p.start(); - p.bump(T!['[']); - - type_(p); - let kind = match p.current() { - // test slice_type - // type T = [()]; - T![']'] => { - p.bump(T![']']); - SLICE_TYPE - } - - // test array_type - // type T = [(); 92]; - T![;] => { - p.bump(T![;]); - expressions::expr(p); - p.expect(T![']']); - ARRAY_TYPE - } - // test_err array_type_missing_semi - // type T = [() 92]; - _ => { - p.error("expected `;` or `]`"); - SLICE_TYPE - } - }; - m.complete(p, kind); -} - -// test reference_type; -// type A = &(); -// type B = &'static (); -// type C = &mut (); -fn reference_type(p: &mut Parser) { - assert!(p.at(T![&])); - let m = p.start(); - p.bump(T![&]); - p.eat(LIFETIME); - p.eat(T![mut]); - type_no_bounds(p); - m.complete(p, REF_TYPE); -} - -// test placeholder_type -// type Placeholder = _; -fn placeholder_type(p: &mut Parser) { - assert!(p.at(T![_])); - let m = p.start(); - p.bump(T![_]); - m.complete(p, INFER_TYPE); -} - -// test fn_pointer_type -// type A = fn(); -// type B = unsafe fn(); -// type C = unsafe extern "C" fn(); -// type D = extern "C" fn ( u8 , ... ) -> u8; -fn fn_pointer_type(p: &mut Parser) { - let m = p.start(); - p.eat(T![unsafe]); - if p.at(T![extern]) { - abi(p); - } - // test_err fn_pointer_type_missing_fn - // type F = unsafe (); - if !p.eat(T![fn]) { - m.abandon(p); - p.error("expected `fn`"); - return; - } - if p.at(T!['(']) { - params::param_list_fn_ptr(p); - } else { - p.error("expected parameters") - } - // test fn_pointer_type_with_ret - // type F = fn() -> (); - opt_fn_ret_type(p); - m.complete(p, FN_PTR_TYPE); -} - -pub(super) fn for_binder(p: &mut Parser) { - assert!(p.at(T![for])); - p.bump(T![for]); - if p.at(T![<]) { - type_params::opt_type_param_list(p); - } else { - p.error("expected `<`"); - } -} - -// test for_type -// type A = for<'a> fn() -> (); -// type B = for<'a> unsafe extern "C" fn(&'a ()) -> (); -// type Obj = for<'a> PartialEq<&'a i32>; -pub(super) fn for_type(p: &mut Parser) { - assert!(p.at(T![for])); - let m = p.start(); - for_binder(p); - match p.current() { - T![fn] | T![unsafe] | T![extern] => {} - // OK: legacy trait object format - _ if paths::is_use_path_start(p) => {} - _ => { - p.error("expected a function pointer or path"); - } - } - type_no_bounds(p); - m.complete(p, FOR_TYPE); -} - -// test impl_trait_type -// type A = impl Iterator> + 'a; -fn impl_trait_type(p: &mut Parser) { - assert!(p.at(T![impl])); - let m = p.start(); - p.bump(T![impl]); - type_params::bounds_without_colon(p); - m.complete(p, IMPL_TRAIT_TYPE); -} - -// test dyn_trait_type -// type A = dyn Iterator> + 'a; -fn dyn_trait_type(p: &mut Parser) { - assert!(p.at(T![dyn])); - let m = p.start(); - p.bump(T![dyn]); - type_params::bounds_without_colon(p); - m.complete(p, DYN_TRAIT_TYPE); -} - -// test path_type -// type A = Foo; -// type B = ::Foo; -// type C = self::Foo; -// type D = super::Foo; -pub(super) fn path_type(p: &mut Parser) { - path_type_(p, true) -} - -// test macro_call_type -// type A = foo!(); -// type B = crate::foo!(); -fn path_or_macro_type_(p: &mut Parser, allow_bounds: bool) { - assert!(paths::is_path_start(p)); - let m = p.start(); - paths::type_path(p); - - let kind = if p.at(T![!]) && !p.at(T![!=]) { - items::macro_call_after_excl(p); - MACRO_CALL - } else { - PATH_TYPE - }; - - let path = m.complete(p, kind); - - if allow_bounds { - opt_path_type_bounds_as_dyn_trait_type(p, path); - } -} - -pub(super) fn path_type_(p: &mut Parser, allow_bounds: bool) { - assert!(paths::is_path_start(p)); - let m = p.start(); - paths::type_path(p); - - // test path_type_with_bounds - // fn foo() -> Box {} - // fn foo() -> Box {} - let path = m.complete(p, PATH_TYPE); - if allow_bounds { - opt_path_type_bounds_as_dyn_trait_type(p, path); - } -} - -/// This turns a parsed PATH_TYPE optionally into a DYN_TRAIT_TYPE -/// with a TYPE_BOUND_LIST -fn opt_path_type_bounds_as_dyn_trait_type(p: &mut Parser, path_type_marker: CompletedMarker) { - if !p.at(T![+]) { - return; - } - - // First create a TYPE_BOUND from the completed PATH_TYPE - let m = path_type_marker.precede(p).complete(p, TYPE_BOUND); - - // Next setup a marker for the TYPE_BOUND_LIST - let m = m.precede(p); - - // This gets consumed here so it gets properly set - // in the TYPE_BOUND_LIST - p.eat(T![+]); - - // Parse rest of the bounds into the TYPE_BOUND_LIST - let m = type_params::bounds_without_colon_m(p, m); - - // Finally precede everything with DYN_TRAIT_TYPE - m.precede(p).complete(p, DYN_TRAIT_TYPE); -} diff --git a/crates/ra_parser/src/lib.rs b/crates/ra_parser/src/lib.rs deleted file mode 100644 index eeb8ad66bd..0000000000 --- a/crates/ra_parser/src/lib.rs +++ /dev/null @@ -1,149 +0,0 @@ -//! The Rust parser. -//! -//! The parser doesn't know about concrete representation of tokens and syntax -//! trees. Abstract `TokenSource` and `TreeSink` traits are used instead. As a -//! consequence, this crates does not contain a lexer. -//! -//! The `Parser` struct from the `parser` module is a cursor into the sequence -//! of tokens. Parsing routines use `Parser` to inspect current state and -//! advance the parsing. -//! -//! The actual parsing happens in the `grammar` module. -//! -//! Tests for this crate live in `ra_syntax` crate. - -#[macro_use] -mod token_set; -#[macro_use] -mod syntax_kind; -mod event; -mod parser; -mod grammar; - -pub(crate) use token_set::TokenSet; - -pub use syntax_kind::SyntaxKind; - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct ParseError(pub Box); - -/// `TokenSource` abstracts the source of the tokens parser operates on. -/// -/// Hopefully this will allow us to treat text and token trees in the same way! -pub trait TokenSource { - fn current(&self) -> Token; - - /// Lookahead n token - fn lookahead_nth(&self, n: usize) -> Token; - - /// bump cursor to next token - fn bump(&mut self); - - /// Is the current token a specified keyword? - fn is_keyword(&self, kw: &str) -> bool; -} - -/// `Token` abstracts the cursor of `TokenSource` operates on. -#[derive(Debug, Copy, Clone, Eq, PartialEq)] -pub struct Token { - /// What is the current token? - pub kind: SyntaxKind, - - /// Is the current token joined to the next one (`> >` vs `>>`). - pub is_jointed_to_next: bool, -} - -/// `TreeSink` abstracts details of a particular syntax tree implementation. -pub trait TreeSink { - /// Adds new token to the current branch. - fn token(&mut self, kind: SyntaxKind, n_tokens: u8); - - /// Start new branch and make it current. - fn start_node(&mut self, kind: SyntaxKind); - - /// Finish current branch and restore previous - /// branch as current. - fn finish_node(&mut self); - - fn error(&mut self, error: ParseError); -} - -fn parse_from_tokens(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink, f: F) -where - F: FnOnce(&mut parser::Parser), -{ - let mut p = parser::Parser::new(token_source); - f(&mut p); - let events = p.finish(); - event::process(tree_sink, events); -} - -/// Parse given tokens into the given sink as a rust file. -pub fn parse(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) { - parse_from_tokens(token_source, tree_sink, grammar::root); -} - -#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] -pub enum FragmentKind { - Path, - Expr, - Statement, - Type, - Pattern, - Item, - Block, - Visibility, - MetaItem, - - // These kinds are used when parsing the result of expansion - // FIXME: use separate fragment kinds for macro inputs and outputs? - Items, - Statements, -} - -pub fn parse_fragment( - token_source: &mut dyn TokenSource, - tree_sink: &mut dyn TreeSink, - fragment_kind: FragmentKind, -) { - let parser: fn(&'_ mut parser::Parser) = match fragment_kind { - FragmentKind::Path => grammar::fragments::path, - FragmentKind::Expr => grammar::fragments::expr, - FragmentKind::Type => grammar::fragments::type_, - FragmentKind::Pattern => grammar::fragments::pattern, - FragmentKind::Item => grammar::fragments::item, - FragmentKind::Block => grammar::fragments::block_expr, - FragmentKind::Visibility => grammar::fragments::opt_visibility, - FragmentKind::MetaItem => grammar::fragments::meta_item, - FragmentKind::Statement => grammar::fragments::stmt, - FragmentKind::Items => grammar::fragments::macro_items, - FragmentKind::Statements => grammar::fragments::macro_stmts, - }; - parse_from_tokens(token_source, tree_sink, parser) -} - -/// A parsing function for a specific braced-block. -pub struct Reparser(fn(&mut parser::Parser)); - -impl Reparser { - /// If the node is a braced block, return the corresponding `Reparser`. - pub fn for_node( - node: SyntaxKind, - first_child: Option, - parent: Option, - ) -> Option { - grammar::reparser(node, first_child, parent).map(Reparser) - } - - /// Re-parse given tokens using this `Reparser`. - /// - /// Tokens must start with `{`, end with `}` and form a valid brace - /// sequence. - pub fn parse(self, token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) { - let Reparser(r) = self; - let mut p = parser::Parser::new(token_source); - r(&mut p); - let events = p.finish(); - event::process(tree_sink, events); - } -} diff --git a/crates/ra_parser/src/parser.rs b/crates/ra_parser/src/parser.rs deleted file mode 100644 index d797f2cc96..0000000000 --- a/crates/ra_parser/src/parser.rs +++ /dev/null @@ -1,350 +0,0 @@ -//! FIXME: write short doc here - -use std::cell::Cell; - -use drop_bomb::DropBomb; - -use crate::{ - event::Event, - ParseError, - SyntaxKind::{self, EOF, ERROR, TOMBSTONE}, - TokenSet, TokenSource, T, -}; - -/// `Parser` struct provides the low-level API for -/// navigating through the stream of tokens and -/// constructing the parse tree. The actual parsing -/// happens in the `grammar` module. -/// -/// However, the result of this `Parser` is not a real -/// tree, but rather a flat stream of events of the form -/// "start expression, consume number literal, -/// finish expression". See `Event` docs for more. -pub(crate) struct Parser<'t> { - token_source: &'t mut dyn TokenSource, - events: Vec, - steps: Cell, -} - -impl<'t> Parser<'t> { - pub(super) fn new(token_source: &'t mut dyn TokenSource) -> Parser<'t> { - Parser { token_source, events: Vec::new(), steps: Cell::new(0) } - } - - pub(crate) fn finish(self) -> Vec { - self.events - } - - /// Returns the kind of the current token. - /// If parser has already reached the end of input, - /// the special `EOF` kind is returned. - pub(crate) fn current(&self) -> SyntaxKind { - self.nth(0) - } - - /// Lookahead operation: returns the kind of the next nth - /// token. - pub(crate) fn nth(&self, n: usize) -> SyntaxKind { - assert!(n <= 3); - - let steps = self.steps.get(); - assert!(steps <= 10_000_000, "the parser seems stuck"); - self.steps.set(steps + 1); - - self.token_source.lookahead_nth(n).kind - } - - /// Checks if the current token is `kind`. - pub(crate) fn at(&self, kind: SyntaxKind) -> bool { - self.nth_at(0, kind) - } - - pub(crate) fn nth_at(&self, n: usize, kind: SyntaxKind) -> bool { - match kind { - T![-=] => self.at_composite2(n, T![-], T![=]), - T![->] => self.at_composite2(n, T![-], T![>]), - T![::] => self.at_composite2(n, T![:], T![:]), - T![!=] => self.at_composite2(n, T![!], T![=]), - T![..] => self.at_composite2(n, T![.], T![.]), - T![*=] => self.at_composite2(n, T![*], T![=]), - T![/=] => self.at_composite2(n, T![/], T![=]), - T![&&] => self.at_composite2(n, T![&], T![&]), - T![&=] => self.at_composite2(n, T![&], T![=]), - T![%=] => self.at_composite2(n, T![%], T![=]), - T![^=] => self.at_composite2(n, T![^], T![=]), - T![+=] => self.at_composite2(n, T![+], T![=]), - T![<<] => self.at_composite2(n, T![<], T![<]), - T![<=] => self.at_composite2(n, T![<], T![=]), - T![==] => self.at_composite2(n, T![=], T![=]), - T![=>] => self.at_composite2(n, T![=], T![>]), - T![>=] => self.at_composite2(n, T![>], T![=]), - T![>>] => self.at_composite2(n, T![>], T![>]), - T![|=] => self.at_composite2(n, T![|], T![=]), - T![||] => self.at_composite2(n, T![|], T![|]), - - T![...] => self.at_composite3(n, T![.], T![.], T![.]), - T![..=] => self.at_composite3(n, T![.], T![.], T![=]), - T![<<=] => self.at_composite3(n, T![<], T![<], T![=]), - T![>>=] => self.at_composite3(n, T![>], T![>], T![=]), - - _ => self.token_source.lookahead_nth(n).kind == kind, - } - } - - /// Consume the next token if `kind` matches. - pub(crate) fn eat(&mut self, kind: SyntaxKind) -> bool { - if !self.at(kind) { - return false; - } - let n_raw_tokens = match kind { - T![-=] - | T![->] - | T![::] - | T![!=] - | T![..] - | T![*=] - | T![/=] - | T![&&] - | T![&=] - | T![%=] - | T![^=] - | T![+=] - | T![<<] - | T![<=] - | T![==] - | T![=>] - | T![>=] - | T![>>] - | T![|=] - | T![||] => 2, - - T![...] | T![..=] | T![<<=] | T![>>=] => 3, - _ => 1, - }; - self.do_bump(kind, n_raw_tokens); - true - } - - fn at_composite2(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind) -> bool { - let t1 = self.token_source.lookahead_nth(n); - if t1.kind != k1 || !t1.is_jointed_to_next { - return false; - } - let t2 = self.token_source.lookahead_nth(n + 1); - t2.kind == k2 - } - - fn at_composite3(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind, k3: SyntaxKind) -> bool { - let t1 = self.token_source.lookahead_nth(n); - if t1.kind != k1 || !t1.is_jointed_to_next { - return false; - } - let t2 = self.token_source.lookahead_nth(n + 1); - if t2.kind != k2 || !t2.is_jointed_to_next { - return false; - } - let t3 = self.token_source.lookahead_nth(n + 2); - t3.kind == k3 - } - - /// Checks if the current token is in `kinds`. - pub(crate) fn at_ts(&self, kinds: TokenSet) -> bool { - kinds.contains(self.current()) - } - - /// Checks if the current token is contextual keyword with text `t`. - pub(crate) fn at_contextual_kw(&self, kw: &str) -> bool { - self.token_source.is_keyword(kw) - } - - /// Starts a new node in the syntax tree. All nodes and tokens - /// consumed between the `start` and the corresponding `Marker::complete` - /// belong to the same node. - pub(crate) fn start(&mut self) -> Marker { - let pos = self.events.len() as u32; - self.push_event(Event::tombstone()); - Marker::new(pos) - } - - /// Consume the next token if `kind` matches. - pub(crate) fn bump(&mut self, kind: SyntaxKind) { - assert!(self.eat(kind)); - } - - /// Advances the parser by one token - pub(crate) fn bump_any(&mut self) { - let kind = self.nth(0); - if kind == EOF { - return; - } - self.do_bump(kind, 1) - } - - /// Advances the parser by one token, remapping its kind. - /// This is useful to create contextual keywords from - /// identifiers. For example, the lexer creates an `union` - /// *identifier* token, but the parser remaps it to the - /// `union` keyword, and keyword is what ends up in the - /// final tree. - pub(crate) fn bump_remap(&mut self, kind: SyntaxKind) { - if self.nth(0) == EOF { - // FIXME: panic!? - return; - } - self.do_bump(kind, 1); - } - - /// Emit error with the `message` - /// FIXME: this should be much more fancy and support - /// structured errors with spans and notes, like rustc - /// does. - pub(crate) fn error>(&mut self, message: T) { - let msg = ParseError(Box::new(message.into())); - self.push_event(Event::Error { msg }) - } - - /// Consume the next token if it is `kind` or emit an error - /// otherwise. - pub(crate) fn expect(&mut self, kind: SyntaxKind) -> bool { - if self.eat(kind) { - return true; - } - self.error(format!("expected {:?}", kind)); - false - } - - /// Create an error node and consume the next token. - pub(crate) fn err_and_bump(&mut self, message: &str) { - self.err_recover(message, TokenSet::EMPTY); - } - - /// Create an error node and consume the next token. - pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) { - match self.current() { - T!['{'] | T!['}'] => { - self.error(message); - return; - } - _ => (), - } - - if self.at_ts(recovery) { - self.error(message); - return; - } - - let m = self.start(); - self.error(message); - self.bump_any(); - m.complete(self, ERROR); - } - - fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) { - for _ in 0..n_raw_tokens { - self.token_source.bump(); - } - - self.push_event(Event::Token { kind, n_raw_tokens }); - } - - fn push_event(&mut self, event: Event) { - self.events.push(event) - } -} - -/// See `Parser::start`. -pub(crate) struct Marker { - pos: u32, - bomb: DropBomb, -} - -impl Marker { - fn new(pos: u32) -> Marker { - Marker { pos, bomb: DropBomb::new("Marker must be either completed or abandoned") } - } - - /// Finishes the syntax tree node and assigns `kind` to it, - /// and mark the create a `CompletedMarker` for possible future - /// operation like `.precede()` to deal with forward_parent. - pub(crate) fn complete(mut self, p: &mut Parser, kind: SyntaxKind) -> CompletedMarker { - self.bomb.defuse(); - let idx = self.pos as usize; - match p.events[idx] { - Event::Start { kind: ref mut slot, .. } => { - *slot = kind; - } - _ => unreachable!(), - } - let finish_pos = p.events.len() as u32; - p.push_event(Event::Finish); - CompletedMarker::new(self.pos, finish_pos, kind) - } - - /// Abandons the syntax tree node. All its children - /// are attached to its parent instead. - pub(crate) fn abandon(mut self, p: &mut Parser) { - self.bomb.defuse(); - let idx = self.pos as usize; - if idx == p.events.len() - 1 { - match p.events.pop() { - Some(Event::Start { kind: TOMBSTONE, forward_parent: None }) => (), - _ => unreachable!(), - } - } - } -} - -pub(crate) struct CompletedMarker { - start_pos: u32, - finish_pos: u32, - kind: SyntaxKind, -} - -impl CompletedMarker { - fn new(start_pos: u32, finish_pos: u32, kind: SyntaxKind) -> Self { - CompletedMarker { start_pos, finish_pos, kind } - } - - /// This method allows to create a new node which starts - /// *before* the current one. That is, parser could start - /// node `A`, then complete it, and then after parsing the - /// whole `A`, decide that it should have started some node - /// `B` before starting `A`. `precede` allows to do exactly - /// that. See also docs about `forward_parent` in `Event::Start`. - /// - /// Given completed events `[START, FINISH]` and its corresponding - /// `CompletedMarker(pos: 0, _)`. - /// Append a new `START` events as `[START, FINISH, NEWSTART]`, - /// then mark `NEWSTART` as `START`'s parent with saving its relative - /// distance to `NEWSTART` into forward_parent(=2 in this case); - pub(crate) fn precede(self, p: &mut Parser) -> Marker { - let new_pos = p.start(); - let idx = self.start_pos as usize; - match p.events[idx] { - Event::Start { ref mut forward_parent, .. } => { - *forward_parent = Some(new_pos.pos - self.start_pos); - } - _ => unreachable!(), - } - new_pos - } - - /// Undo this completion and turns into a `Marker` - pub(crate) fn undo_completion(self, p: &mut Parser) -> Marker { - let start_idx = self.start_pos as usize; - let finish_idx = self.finish_pos as usize; - match p.events[start_idx] { - Event::Start { ref mut kind, forward_parent: None } => *kind = TOMBSTONE, - _ => unreachable!(), - } - match p.events[finish_idx] { - ref mut slot @ Event::Finish => *slot = Event::tombstone(), - _ => unreachable!(), - } - Marker::new(self.start_pos) - } - - pub(crate) fn kind(&self) -> SyntaxKind { - self.kind - } -} diff --git a/crates/ra_proc_macro/Cargo.toml b/crates/ra_proc_macro/Cargo.toml deleted file mode 100644 index c4b6e9e7ba..0000000000 --- a/crates/ra_proc_macro/Cargo.toml +++ /dev/null @@ -1,18 +0,0 @@ -[package] -edition = "2018" -name = "ra_proc_macro" -version = "0.1.0" -authors = ["rust-analyzer developers"] -publish = false -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -ra_tt = { path = "../ra_tt" } -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -log = "0.4.8" -crossbeam-channel = "0.4.0" -jod-thread = "0.1.1" diff --git a/crates/ra_proc_macro/src/lib.rs b/crates/ra_proc_macro/src/lib.rs deleted file mode 100644 index 004943b9e0..0000000000 --- a/crates/ra_proc_macro/src/lib.rs +++ /dev/null @@ -1,112 +0,0 @@ -//! Client-side Proc-Macro crate -//! -//! We separate proc-macro expanding logic to an extern program to allow -//! different implementations (e.g. wasm or dylib loading). And this crate -//! is used to provide basic infrastructure for communication between two -//! processes: Client (RA itself), Server (the external program) - -mod rpc; -mod process; -pub mod msg; - -use process::{ProcMacroProcessSrv, ProcMacroProcessThread}; -use ra_tt::{SmolStr, Subtree}; -use std::{ - ffi::OsStr, - io, - path::{Path, PathBuf}, - sync::Arc, -}; - -pub use rpc::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask, ProcMacroKind}; - -#[derive(Debug, Clone)] -pub struct ProcMacroProcessExpander { - process: Arc, - dylib_path: PathBuf, - name: SmolStr, -} - -impl Eq for ProcMacroProcessExpander {} -impl PartialEq for ProcMacroProcessExpander { - fn eq(&self, other: &Self) -> bool { - self.name == other.name - && self.dylib_path == other.dylib_path - && Arc::ptr_eq(&self.process, &other.process) - } -} - -impl ra_tt::TokenExpander for ProcMacroProcessExpander { - fn expand( - &self, - subtree: &Subtree, - _attr: Option<&Subtree>, - ) -> Result { - self.process.custom_derive(&self.dylib_path, subtree, &self.name) - } -} - -#[derive(Debug)] -enum ProcMacroClientKind { - Process { process: Arc, thread: ProcMacroProcessThread }, - Dummy, -} - -#[derive(Debug)] -pub struct ProcMacroClient { - kind: ProcMacroClientKind, -} - -impl ProcMacroClient { - pub fn extern_process( - process_path: PathBuf, - args: impl IntoIterator>, - ) -> io::Result { - let (thread, process) = ProcMacroProcessSrv::run(process_path, args)?; - Ok(ProcMacroClient { - kind: ProcMacroClientKind::Process { process: Arc::new(process), thread }, - }) - } - - pub fn dummy() -> ProcMacroClient { - ProcMacroClient { kind: ProcMacroClientKind::Dummy } - } - - pub fn by_dylib_path( - &self, - dylib_path: &Path, - ) -> Vec<(SmolStr, Arc)> { - match &self.kind { - ProcMacroClientKind::Dummy => vec![], - ProcMacroClientKind::Process { process, .. } => { - let macros = match process.find_proc_macros(dylib_path) { - Err(err) => { - eprintln!("Failed to find proc macros. Error: {:#?}", err); - return vec![]; - } - Ok(macros) => macros, - }; - - macros - .into_iter() - .filter_map(|(name, kind)| { - // FIXME: Support custom derive only for now. - match kind { - ProcMacroKind::CustomDerive => { - let name = SmolStr::new(&name); - let expander: Arc = - Arc::new(ProcMacroProcessExpander { - process: process.clone(), - name: name.clone(), - dylib_path: dylib_path.into(), - }); - Some((name, expander)) - } - _ => None, - } - }) - .collect() - } - } - } -} diff --git a/crates/ra_proc_macro/src/msg.rs b/crates/ra_proc_macro/src/msg.rs deleted file mode 100644 index 95d9b8804e..0000000000 --- a/crates/ra_proc_macro/src/msg.rs +++ /dev/null @@ -1,88 +0,0 @@ -//! Defines messages for cross-process message passing based on `ndjson` wire protocol - -use std::{ - convert::TryFrom, - io::{self, BufRead, Write}, -}; - -use crate::{ - rpc::{ListMacrosResult, ListMacrosTask}, - ExpansionResult, ExpansionTask, -}; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub enum Request { - ListMacro(ListMacrosTask), - ExpansionMacro(ExpansionTask), -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub enum Response { - Error(ResponseError), - ListMacro(ListMacrosResult), - ExpansionMacro(ExpansionResult), -} - -macro_rules! impl_try_from_response { - ($ty:ty, $tag:ident) => { - impl TryFrom for $ty { - type Error = &'static str; - fn try_from(value: Response) -> Result { - match value { - Response::$tag(res) => Ok(res), - _ => Err(concat!("Failed to convert response to ", stringify!($tag))), - } - } - } - }; -} - -impl_try_from_response!(ListMacrosResult, ListMacro); -impl_try_from_response!(ExpansionResult, ExpansionMacro); - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ResponseError { - pub code: ErrorCode, - pub message: String, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub enum ErrorCode { - ServerErrorEnd, - ExpansionError, -} - -pub trait Message: Serialize + DeserializeOwned { - fn read(inp: &mut impl BufRead) -> io::Result> { - Ok(match read_json(inp)? { - None => None, - Some(text) => Some(serde_json::from_str(&text)?), - }) - } - fn write(self, out: &mut impl Write) -> io::Result<()> { - let text = serde_json::to_string(&self)?; - write_json(out, &text) - } -} - -impl Message for Request {} -impl Message for Response {} - -fn read_json(inp: &mut impl BufRead) -> io::Result> { - let mut buf = String::new(); - inp.read_line(&mut buf)?; - buf.pop(); // Remove traling '\n' - Ok(match buf.len() { - 0 => None, - _ => Some(buf), - }) -} - -fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { - log::debug!("> {}", msg); - out.write_all(msg.as_bytes())?; - out.write_all(b"\n")?; - out.flush()?; - Ok(()) -} diff --git a/crates/ra_proc_macro/src/process.rs b/crates/ra_proc_macro/src/process.rs deleted file mode 100644 index 5bcdacb487..0000000000 --- a/crates/ra_proc_macro/src/process.rs +++ /dev/null @@ -1,203 +0,0 @@ -//! Handle process life-time and message passing for proc-macro client - -use crossbeam_channel::{bounded, Receiver, Sender}; -use ra_tt::Subtree; - -use crate::msg::{ErrorCode, Message, Request, Response, ResponseError}; -use crate::rpc::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask, ProcMacroKind}; - -use io::{BufRead, BufReader}; -use std::{ - convert::{TryFrom, TryInto}, - ffi::{OsStr, OsString}, - io::{self, Write}, - path::{Path, PathBuf}, - process::{Child, Command, Stdio}, - sync::{Arc, Weak}, -}; - -#[derive(Debug, Default)] -pub(crate) struct ProcMacroProcessSrv { - inner: Option>>, -} - -#[derive(Debug)] -pub(crate) struct ProcMacroProcessThread { - // XXX: drop order is significant - sender: Arc>, - handle: jod_thread::JoinHandle<()>, -} - -impl ProcMacroProcessSrv { - pub fn run( - process_path: PathBuf, - args: impl IntoIterator>, - ) -> io::Result<(ProcMacroProcessThread, ProcMacroProcessSrv)> { - let process = Process::run(process_path, args)?; - - let (task_tx, task_rx) = bounded(0); - let handle = jod_thread::spawn(move || { - client_loop(task_rx, process); - }); - - let task_tx = Arc::new(task_tx); - let srv = ProcMacroProcessSrv { inner: Some(Arc::downgrade(&task_tx)) }; - let thread = ProcMacroProcessThread { handle, sender: task_tx }; - - Ok((thread, srv)) - } - - pub fn find_proc_macros( - &self, - dylib_path: &Path, - ) -> Result, ra_tt::ExpansionError> { - let task = ListMacrosTask { lib: dylib_path.to_path_buf() }; - - let result: ListMacrosResult = self.send_task(Request::ListMacro(task))?; - Ok(result.macros) - } - - pub fn custom_derive( - &self, - dylib_path: &Path, - subtree: &Subtree, - derive_name: &str, - ) -> Result { - let task = ExpansionTask { - macro_body: subtree.clone(), - macro_name: derive_name.to_string(), - attributes: None, - lib: dylib_path.to_path_buf(), - }; - - let result: ExpansionResult = self.send_task(Request::ExpansionMacro(task))?; - Ok(result.expansion) - } - - pub fn send_task(&self, req: Request) -> Result - where - R: TryFrom, - { - let sender = match &self.inner { - None => return Err(ra_tt::ExpansionError::Unknown("No sender is found.".to_string())), - Some(it) => it, - }; - - let (result_tx, result_rx) = bounded(0); - let sender = match sender.upgrade() { - None => { - return Err(ra_tt::ExpansionError::Unknown("Proc macro process is closed.".into())) - } - Some(it) => it, - }; - sender.send(Task { req: req.into(), result_tx }).unwrap(); - let res = result_rx - .recv() - .map_err(|_| ra_tt::ExpansionError::Unknown("Proc macro thread is closed.".into()))?; - - match res { - Some(Response::Error(err)) => { - return Err(ra_tt::ExpansionError::ExpansionError(err.message)); - } - Some(res) => Ok(res.try_into().map_err(|err| { - ra_tt::ExpansionError::Unknown(format!( - "Fail to get response, reason : {:#?} ", - err - )) - })?), - None => Err(ra_tt::ExpansionError::Unknown("Empty result".into())), - } - } -} - -fn client_loop(task_rx: Receiver, mut process: Process) { - let (mut stdin, mut stdout) = match process.stdio() { - None => return, - Some(it) => it, - }; - - for task in task_rx { - let Task { req, result_tx } = task; - - match send_request(&mut stdin, &mut stdout, req) { - Ok(res) => result_tx.send(res).unwrap(), - Err(_err) => { - let res = Response::Error(ResponseError { - code: ErrorCode::ServerErrorEnd, - message: "Server closed".into(), - }); - result_tx.send(res.into()).unwrap(); - // Restart the process - if process.restart().is_err() { - break; - } - let stdio = match process.stdio() { - None => break, - Some(it) => it, - }; - stdin = stdio.0; - stdout = stdio.1; - } - } - } -} - -struct Task { - req: Request, - result_tx: Sender>, -} - -struct Process { - path: PathBuf, - args: Vec, - child: Child, -} - -impl Drop for Process { - fn drop(&mut self) { - let _ = self.child.kill(); - } -} - -impl Process { - fn run( - path: PathBuf, - args: impl IntoIterator>, - ) -> io::Result { - let args = args.into_iter().map(|s| s.as_ref().into()).collect(); - let child = mk_child(&path, &args)?; - Ok(Process { path, args, child }) - } - - fn restart(&mut self) -> io::Result<()> { - let _ = self.child.kill(); - self.child = mk_child(&self.path, &self.args)?; - Ok(()) - } - - fn stdio(&mut self) -> Option<(impl Write, impl BufRead)> { - let stdin = self.child.stdin.take()?; - let stdout = self.child.stdout.take()?; - let read = BufReader::new(stdout); - - Some((stdin, read)) - } -} - -fn mk_child(path: &Path, args: impl IntoIterator>) -> io::Result { - Command::new(&path) - .args(args) - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .stderr(Stdio::inherit()) - .spawn() -} - -fn send_request( - mut writer: &mut impl Write, - mut reader: &mut impl BufRead, - req: Request, -) -> io::Result> { - req.write(&mut writer)?; - Ok(Response::read(&mut reader)?) -} diff --git a/crates/ra_proc_macro/src/rpc.rs b/crates/ra_proc_macro/src/rpc.rs deleted file mode 100644 index 4ce4859263..0000000000 --- a/crates/ra_proc_macro/src/rpc.rs +++ /dev/null @@ -1,266 +0,0 @@ -//! Data struture serialization related stuff for RPC -//! -//! Defines all necessary rpc serialization data structures, -//! which includes `ra_tt` related data and some task messages. -//! Although adding `Serialize` and `Deserialize` traits to `ra_tt` directly seems -//! to be much easier, we deliberately duplicate `ra_tt` structs with `#[serde(with = "XXDef")]` -//! for separation of code responsibility. - -use ra_tt::{ - Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, SmolStr, Spacing, Subtree, TokenId, - TokenTree, -}; -use serde::{Deserialize, Serialize}; -use std::path::PathBuf; - -#[derive(Clone, Eq, PartialEq, Debug, Serialize, Deserialize)] -pub struct ListMacrosTask { - pub lib: PathBuf, -} - -#[derive(Clone, Eq, PartialEq, Debug, Serialize, Deserialize)] -pub enum ProcMacroKind { - CustomDerive, - FuncLike, - Attr, -} - -#[derive(Clone, Eq, PartialEq, Debug, Default, Serialize, Deserialize)] -pub struct ListMacrosResult { - pub macros: Vec<(String, ProcMacroKind)>, -} - -#[derive(Clone, Eq, PartialEq, Debug, Serialize, Deserialize)] -pub struct ExpansionTask { - /// Argument of macro call. - /// - /// In custom derive this will be a struct or enum; in attribute-like macro - underlying - /// item; in function-like macro - the macro body. - #[serde(with = "SubtreeDef")] - pub macro_body: Subtree, - - /// Name of macro to expand. - /// - /// In custom derive this is the name of the derived trait (`Serialize`, `Getters`, etc.). - /// In attribute-like and function-like macros - single name of macro itself (`show_streams`). - pub macro_name: String, - - /// Possible attributes for the attribute-like macros. - #[serde(with = "opt_subtree_def")] - pub attributes: Option, - - pub lib: PathBuf, -} - -#[derive(Clone, Eq, PartialEq, Debug, Default, Serialize, Deserialize)] -pub struct ExpansionResult { - #[serde(with = "SubtreeDef")] - pub expansion: Subtree, -} - -#[derive(Serialize, Deserialize)] -#[serde(remote = "DelimiterKind")] -enum DelimiterKindDef { - Parenthesis, - Brace, - Bracket, -} - -#[derive(Serialize, Deserialize)] -#[serde(remote = "TokenId")] -struct TokenIdDef(u32); - -#[derive(Serialize, Deserialize)] -#[serde(remote = "Delimiter")] -struct DelimiterDef { - #[serde(with = "TokenIdDef")] - pub id: TokenId, - #[serde(with = "DelimiterKindDef")] - pub kind: DelimiterKind, -} - -#[derive(Serialize, Deserialize)] -#[serde(remote = "Subtree")] -struct SubtreeDef { - #[serde(default, with = "opt_delimiter_def")] - pub delimiter: Option, - #[serde(with = "vec_token_tree")] - pub token_trees: Vec, -} - -#[derive(Serialize, Deserialize)] -#[serde(remote = "TokenTree")] -enum TokenTreeDef { - #[serde(with = "LeafDef")] - Leaf(Leaf), - #[serde(with = "SubtreeDef")] - Subtree(Subtree), -} - -#[derive(Serialize, Deserialize)] -#[serde(remote = "Leaf")] -enum LeafDef { - #[serde(with = "LiteralDef")] - Literal(Literal), - #[serde(with = "PunctDef")] - Punct(Punct), - #[serde(with = "IdentDef")] - Ident(Ident), -} - -#[derive(Serialize, Deserialize)] -#[serde(remote = "Literal")] -struct LiteralDef { - pub text: SmolStr, - #[serde(with = "TokenIdDef")] - pub id: TokenId, -} - -#[derive(Serialize, Deserialize)] -#[serde(remote = "Punct")] -struct PunctDef { - pub char: char, - #[serde(with = "SpacingDef")] - pub spacing: Spacing, - #[serde(with = "TokenIdDef")] - pub id: TokenId, -} - -#[derive(Serialize, Deserialize)] -#[serde(remote = "Spacing")] -enum SpacingDef { - Alone, - Joint, -} - -#[derive(Serialize, Deserialize)] -#[serde(remote = "Ident")] -struct IdentDef { - pub text: SmolStr, - #[serde(with = "TokenIdDef")] - pub id: TokenId, -} - -mod opt_delimiter_def { - use super::{Delimiter, DelimiterDef}; - use serde::{Deserialize, Deserializer, Serialize, Serializer}; - - pub fn serialize(value: &Option, serializer: S) -> Result - where - S: Serializer, - { - #[derive(Serialize)] - struct Helper<'a>(#[serde(with = "DelimiterDef")] &'a Delimiter); - value.as_ref().map(Helper).serialize(serializer) - } - - pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - { - #[derive(Deserialize)] - struct Helper(#[serde(with = "DelimiterDef")] Delimiter); - let helper = Option::deserialize(deserializer)?; - Ok(helper.map(|Helper(external)| external)) - } -} - -mod opt_subtree_def { - use super::{Subtree, SubtreeDef}; - use serde::{Deserialize, Deserializer, Serialize, Serializer}; - - pub fn serialize(value: &Option, serializer: S) -> Result - where - S: Serializer, - { - #[derive(Serialize)] - struct Helper<'a>(#[serde(with = "SubtreeDef")] &'a Subtree); - value.as_ref().map(Helper).serialize(serializer) - } - - pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - { - #[derive(Deserialize)] - struct Helper(#[serde(with = "SubtreeDef")] Subtree); - let helper = Option::deserialize(deserializer)?; - Ok(helper.map(|Helper(external)| external)) - } -} - -mod vec_token_tree { - use super::{TokenTree, TokenTreeDef}; - use serde::{ser::SerializeSeq, Deserialize, Deserializer, Serialize, Serializer}; - - pub fn serialize(value: &Vec, serializer: S) -> Result - where - S: Serializer, - { - #[derive(Serialize)] - struct Helper<'a>(#[serde(with = "TokenTreeDef")] &'a TokenTree); - - let items: Vec<_> = value.iter().map(Helper).collect(); - let mut seq = serializer.serialize_seq(Some(items.len()))?; - for element in items { - seq.serialize_element(&element)?; - } - seq.end() - } - - pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - { - #[derive(Deserialize)] - struct Helper(#[serde(with = "TokenTreeDef")] TokenTree); - - let helper = Vec::deserialize(deserializer)?; - Ok(helper.into_iter().map(|Helper(external)| external).collect()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - fn fixture_token_tree() -> Subtree { - let mut subtree = Subtree::default(); - subtree - .token_trees - .push(TokenTree::Leaf(Ident { text: "struct".into(), id: TokenId(0) }.into())); - subtree - .token_trees - .push(TokenTree::Leaf(Ident { text: "Foo".into(), id: TokenId(1) }.into())); - subtree.token_trees.push(TokenTree::Subtree( - Subtree { - delimiter: Some(Delimiter { id: TokenId(2), kind: DelimiterKind::Brace }), - token_trees: vec![], - } - .into(), - )); - subtree - } - - #[test] - fn test_proc_macro_rpc_works() { - let tt = fixture_token_tree(); - let task = ExpansionTask { - macro_body: tt.clone(), - macro_name: Default::default(), - attributes: None, - lib: Default::default(), - }; - - let json = serde_json::to_string(&task).unwrap(); - let back: ExpansionTask = serde_json::from_str(&json).unwrap(); - - assert_eq!(task.macro_body, back.macro_body); - - let result = ExpansionResult { expansion: tt.clone() }; - let json = serde_json::to_string(&result).unwrap(); - let back: ExpansionResult = serde_json::from_str(&json).unwrap(); - - assert_eq!(result, back); - } -} diff --git a/crates/ra_proc_macro_srv/Cargo.toml b/crates/ra_proc_macro_srv/Cargo.toml deleted file mode 100644 index bc119a6c71..0000000000 --- a/crates/ra_proc_macro_srv/Cargo.toml +++ /dev/null @@ -1,26 +0,0 @@ -[package] -edition = "2018" -name = "ra_proc_macro_srv" -version = "0.1.0" -authors = ["rust-analyzer developers"] -publish = false -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -ra_tt = { path = "../ra_tt" } -ra_mbe = { path = "../ra_mbe" } -ra_proc_macro = { path = "../ra_proc_macro" } -goblin = "0.2.1" -libloading = "0.6.0" -memmap = "0.7" -test_utils = { path = "../test_utils" } - -[dev-dependencies] -cargo_metadata = "0.11.1" -difference = "2.0.0" -# used as proc macro test target -serde_derive = "1.0.106" -ra_toolchain = { path = "../ra_toolchain" } diff --git a/crates/ra_proc_macro_srv/src/cli.rs b/crates/ra_proc_macro_srv/src/cli.rs deleted file mode 100644 index 1437794c9e..0000000000 --- a/crates/ra_proc_macro_srv/src/cli.rs +++ /dev/null @@ -1,39 +0,0 @@ -//! Driver for proc macro server - -use crate::ProcMacroSrv; -use ra_proc_macro::msg::{self, Message}; -use std::io; - -pub fn run() -> io::Result<()> { - let mut srv = ProcMacroSrv::default(); - - while let Some(req) = read_request()? { - let res = match req { - msg::Request::ListMacro(task) => srv.list_macros(&task).map(msg::Response::ListMacro), - msg::Request::ExpansionMacro(task) => { - srv.expand(&task).map(msg::Response::ExpansionMacro) - } - }; - - let msg = res.unwrap_or_else(|err| { - msg::Response::Error(msg::ResponseError { - code: msg::ErrorCode::ExpansionError, - message: err, - }) - }); - - if let Err(err) = write_response(msg) { - eprintln!("Write message error: {}", err); - } - } - - Ok(()) -} - -fn read_request() -> io::Result> { - msg::Request::read(&mut io::stdin().lock()) -} - -fn write_response(msg: msg::Response) -> io::Result<()> { - msg.write(&mut io::stdout().lock()) -} diff --git a/crates/ra_proc_macro_srv/src/dylib.rs b/crates/ra_proc_macro_srv/src/dylib.rs deleted file mode 100644 index 1addbbd54f..0000000000 --- a/crates/ra_proc_macro_srv/src/dylib.rs +++ /dev/null @@ -1,224 +0,0 @@ -//! Handles dynamic library loading for proc macro - -use crate::{proc_macro::bridge, rustc_server::TokenStream}; -use std::fs::File; -use std::path::{Path, PathBuf}; - -use goblin::{mach::Mach, Object}; -use libloading::Library; -use memmap::Mmap; -use ra_proc_macro::ProcMacroKind; -use std::io; - -const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; - -fn invalid_data_err(e: impl Into>) -> io::Error { - io::Error::new(io::ErrorKind::InvalidData, e) -} - -fn is_derive_registrar_symbol(symbol: &str) -> bool { - symbol.contains(NEW_REGISTRAR_SYMBOL) -} - -fn find_registrar_symbol(file: &Path) -> io::Result> { - let file = File::open(file)?; - let buffer = unsafe { Mmap::map(&file)? }; - let object = Object::parse(&buffer).map_err(invalid_data_err)?; - - let name = match object { - Object::Elf(elf) => { - let symbols = elf.dynstrtab.to_vec().map_err(invalid_data_err)?; - symbols.into_iter().find(|s| is_derive_registrar_symbol(s)).map(&str::to_owned) - } - Object::PE(pe) => pe - .exports - .iter() - .flat_map(|s| s.name) - .find(|s| is_derive_registrar_symbol(s)) - .map(&str::to_owned), - Object::Mach(Mach::Binary(binary)) => { - let exports = binary.exports().map_err(invalid_data_err)?; - exports - .iter() - .map(|s| { - // In macos doc: - // https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man3/dlsym.3.html - // Unlike other dyld API's, the symbol name passed to dlsym() must NOT be - // prepended with an underscore. - if s.name.starts_with('_') { - &s.name[1..] - } else { - &s.name - } - }) - .find(|s| is_derive_registrar_symbol(s)) - .map(&str::to_owned) - } - _ => return Ok(None), - }; - return Ok(name); -} - -/// Loads dynamic library in platform dependent manner. -/// -/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described -/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample) -/// and [here](https://github.com/rust-lang/rust/issues/60593). -/// -/// Usage of RTLD_DEEPBIND -/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1) -/// -/// It seems that on Windows that behaviour is default, so we do nothing in that case. -#[cfg(windows)] -fn load_library(file: &Path) -> Result { - Library::new(file) -} - -#[cfg(unix)] -fn load_library(file: &Path) -> Result { - use libloading::os::unix::Library as UnixLibrary; - use std::os::raw::c_int; - - const RTLD_NOW: c_int = 0x00002; - const RTLD_DEEPBIND: c_int = 0x00008; - - UnixLibrary::open(Some(file), RTLD_NOW | RTLD_DEEPBIND).map(|lib| lib.into()) -} - -struct ProcMacroLibraryLibloading { - // Hold the dylib to prevent it from unloading - _lib: Library, - exported_macros: Vec, -} - -impl ProcMacroLibraryLibloading { - fn open(file: &Path) -> io::Result { - let symbol_name = find_registrar_symbol(file)?.ok_or_else(|| { - invalid_data_err(format!("Cannot find registrar symbol in file {}", file.display())) - })?; - - let lib = load_library(file).map_err(invalid_data_err)?; - let exported_macros = { - let macros: libloading::Symbol<&&[bridge::client::ProcMacro]> = - unsafe { lib.get(symbol_name.as_bytes()) }.map_err(invalid_data_err)?; - macros.to_vec() - }; - - Ok(ProcMacroLibraryLibloading { _lib: lib, exported_macros }) - } -} - -pub struct Expander { - inner: ProcMacroLibraryLibloading, -} - -impl Expander { - pub fn new(lib: &Path) -> io::Result { - // Some libraries for dynamic loading require canonicalized path even when it is - // already absolute - let lib = lib.canonicalize()?; - - let lib = ensure_file_with_lock_free_access(&lib)?; - - let library = ProcMacroLibraryLibloading::open(&lib)?; - - Ok(Expander { inner: library }) - } - - pub fn expand( - &self, - macro_name: &str, - macro_body: &ra_tt::Subtree, - attributes: Option<&ra_tt::Subtree>, - ) -> Result { - let parsed_body = TokenStream::with_subtree(macro_body.clone()); - - let parsed_attributes = attributes - .map_or(crate::rustc_server::TokenStream::new(), |attr| { - TokenStream::with_subtree(attr.clone()) - }); - - for proc_macro in &self.inner.exported_macros { - match proc_macro { - bridge::client::ProcMacro::CustomDerive { trait_name, client, .. } - if *trait_name == macro_name => - { - let res = client.run( - &crate::proc_macro::bridge::server::SameThread, - crate::rustc_server::Rustc::default(), - parsed_body, - ); - return res.map(|it| it.subtree); - } - bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => { - let res = client.run( - &crate::proc_macro::bridge::server::SameThread, - crate::rustc_server::Rustc::default(), - parsed_body, - ); - return res.map(|it| it.subtree); - } - bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => { - let res = client.run( - &crate::proc_macro::bridge::server::SameThread, - crate::rustc_server::Rustc::default(), - parsed_attributes, - parsed_body, - ); - return res.map(|it| it.subtree); - } - _ => continue, - } - } - - Err(bridge::PanicMessage::String("Nothing to expand".to_string())) - } - - pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { - self.inner - .exported_macros - .iter() - .map(|proc_macro| match proc_macro { - bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { - (trait_name.to_string(), ProcMacroKind::CustomDerive) - } - bridge::client::ProcMacro::Bang { name, .. } => { - (name.to_string(), ProcMacroKind::FuncLike) - } - bridge::client::ProcMacro::Attr { name, .. } => { - (name.to_string(), ProcMacroKind::Attr) - } - }) - .collect() - } -} - -/// Copy the dylib to temp directory to prevent locking in Windows -#[cfg(windows)] -fn ensure_file_with_lock_free_access(path: &Path) -> io::Result { - use std::{ffi::OsString, time::SystemTime}; - - let mut to = std::env::temp_dir(); - - let file_name = path.file_name().ok_or_else(|| { - io::Error::new( - io::ErrorKind::InvalidInput, - format!("File path is invalid: {}", path.display()), - ) - })?; - - // generate a time deps unique number - let t = SystemTime::now().duration_since(std::time::UNIX_EPOCH).expect("Time went backwards"); - - let mut unique_name = OsString::from(t.as_millis().to_string()); - unique_name.push(file_name); - - to.push(unique_name); - std::fs::copy(path, &to).unwrap(); - Ok(to) -} - -#[cfg(unix)] -fn ensure_file_with_lock_free_access(path: &Path) -> io::Result { - Ok(path.to_path_buf()) -} diff --git a/crates/ra_proc_macro_srv/src/lib.rs b/crates/ra_proc_macro_srv/src/lib.rs deleted file mode 100644 index 922bb84bbf..0000000000 --- a/crates/ra_proc_macro_srv/src/lib.rs +++ /dev/null @@ -1,69 +0,0 @@ -//! RA Proc Macro Server -//! -//! This library is able to call compiled Rust custom derive dynamic libraries on arbitrary code. -//! The general idea here is based on https://github.com/fedochet/rust-proc-macro-expander. -//! -//! But we adapt it to better fit RA needs: -//! -//! * We use `ra_tt` for proc-macro `TokenStream` server, it is easier to manipulate and interact with -//! RA than `proc-macro2` token stream. -//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable` -//! rustc rather than `unstable`. (Although in gerenal ABI compatibility is still an issue) - -#[allow(dead_code)] -#[doc(hidden)] -mod proc_macro; - -#[doc(hidden)] -mod rustc_server; - -mod dylib; - -use proc_macro::bridge::client::TokenStream; -use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask}; -use std::{ - collections::{hash_map::Entry, HashMap}, - fs, - path::{Path, PathBuf}, - time::SystemTime, -}; - -#[derive(Default)] -pub(crate) struct ProcMacroSrv { - expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>, -} - -impl ProcMacroSrv { - pub fn expand(&mut self, task: &ExpansionTask) -> Result { - let expander = self.expander(&task.lib)?; - match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) { - Ok(expansion) => Ok(ExpansionResult { expansion }), - Err(msg) => { - Err(format!("Cannot perform expansion for {}: error {:?}", &task.macro_name, msg)) - } - } - } - - pub fn list_macros(&mut self, task: &ListMacrosTask) -> Result { - let expander = self.expander(&task.lib)?; - Ok(ListMacrosResult { macros: expander.list_macros() }) - } - - fn expander(&mut self, path: &Path) -> Result<&dylib::Expander, String> { - let time = fs::metadata(path).and_then(|it| it.modified()).map_err(|err| { - format!("Failed to get file metadata for {}: {:?}", path.display(), err) - })?; - - Ok(match self.expanders.entry((path.to_path_buf(), time)) { - Entry::Vacant(v) => v.insert(dylib::Expander::new(path).map_err(|err| { - format!("Cannot create expander for {}: {:?}", path.display(), err) - })?), - Entry::Occupied(e) => e.into_mut(), - }) - } -} - -pub mod cli; - -#[cfg(test)] -mod tests; diff --git a/crates/ra_proc_macro_srv/src/rustc_server.rs b/crates/ra_proc_macro_srv/src/rustc_server.rs deleted file mode 100644 index cc32d5a6dd..0000000000 --- a/crates/ra_proc_macro_srv/src/rustc_server.rs +++ /dev/null @@ -1,704 +0,0 @@ -//! Rustc proc-macro server implementation with ra_tt -//! -//! Based on idea from https://github.com/fedochet/rust-proc-macro-expander -//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that -//! we could provide any TokenStream implementation. -//! The original idea from fedochet is using proc-macro2 as backend, -//! we use ra_tt instead for better intergation with RA. -//! -//! FIXME: No span and source file information is implemented yet - -use crate::proc_macro::bridge::{self, server}; -use ra_tt as tt; - -use std::collections::{Bound, HashMap}; -use std::hash::Hash; -use std::iter::FromIterator; -use std::str::FromStr; -use std::{ascii, vec::IntoIter}; - -type Group = tt::Subtree; -type TokenTree = tt::TokenTree; -type Punct = tt::Punct; -type Spacing = tt::Spacing; -type Literal = tt::Literal; -type Span = tt::TokenId; - -#[derive(Debug, Clone)] -pub struct TokenStream { - pub subtree: tt::Subtree, -} - -impl TokenStream { - pub fn new() -> Self { - TokenStream { subtree: Default::default() } - } - - pub fn with_subtree(subtree: tt::Subtree) -> Self { - TokenStream { subtree } - } - - pub fn is_empty(&self) -> bool { - self.subtree.token_trees.is_empty() - } -} - -/// Creates a token stream containing a single token tree. -impl From for TokenStream { - fn from(tree: TokenTree) -> TokenStream { - TokenStream { subtree: tt::Subtree { delimiter: None, token_trees: vec![tree] } } - } -} - -/// Collects a number of token trees into a single stream. -impl FromIterator for TokenStream { - fn from_iter>(trees: I) -> Self { - trees.into_iter().map(TokenStream::from).collect() - } -} - -/// A "flattening" operation on token streams, collects token trees -/// from multiple token streams into a single stream. -impl FromIterator for TokenStream { - fn from_iter>(streams: I) -> Self { - let mut builder = TokenStreamBuilder::new(); - streams.into_iter().for_each(|stream| builder.push(stream)); - builder.build() - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, trees: I) { - self.extend(trees.into_iter().map(TokenStream::from)); - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, streams: I) { - for item in streams { - for tkn in item { - match tkn { - tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => { - self.subtree.token_trees.extend(subtree.token_trees); - } - _ => { - self.subtree.token_trees.push(tkn); - } - } - } - } - } -} - -type Level = crate::proc_macro::Level; -type LineColumn = crate::proc_macro::LineColumn; -type SourceFile = crate::proc_macro::SourceFile; - -/// A structure representing a diagnostic message and associated children -/// messages. -#[derive(Clone, Debug)] -pub struct Diagnostic { - level: Level, - message: String, - spans: Vec, - children: Vec, -} - -impl Diagnostic { - /// Creates a new diagnostic with the given `level` and `message`. - pub fn new>(level: Level, message: T) -> Diagnostic { - Diagnostic { level, message: message.into(), spans: vec![], children: vec![] } - } -} - -// Rustc Server Ident has to be `Copyable` -// We use a stub here for bypassing -#[derive(Hash, Eq, PartialEq, Copy, Clone)] -pub struct IdentId(u32); - -#[derive(Clone, Hash, Eq, PartialEq)] -struct IdentData(tt::Ident); - -#[derive(Default)] -struct IdentInterner { - idents: HashMap, - ident_data: Vec, -} - -impl IdentInterner { - fn intern(&mut self, data: &IdentData) -> u32 { - if let Some(index) = self.idents.get(data) { - return *index; - } - - let index = self.idents.len() as u32; - self.ident_data.push(data.clone()); - self.idents.insert(data.clone(), index); - index - } - - fn get(&self, index: u32) -> &IdentData { - &self.ident_data[index as usize] - } - - #[allow(unused)] - fn get_mut(&mut self, index: u32) -> &mut IdentData { - self.ident_data.get_mut(index as usize).expect("Should be consistent") - } -} - -pub struct TokenStreamBuilder { - acc: TokenStream, -} - -/// Public implementation details for the `TokenStream` type, such as iterators. -pub mod token_stream { - use super::{tt, TokenStream, TokenTree}; - use std::str::FromStr; - - /// An iterator over `TokenStream`'s `TokenTree`s. - /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, - /// and returns whole groups as token trees. - impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = super::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.subtree.token_trees.into_iter() - } - } - - type LexError = String; - - /// Attempts to break the string into tokens and parse those tokens into a token stream. - /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters - /// or characters not existing in the language. - /// All tokens in the parsed stream get `Span::call_site()` spans. - /// - /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to - /// change these errors into `LexError`s later. - impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result { - let (subtree, _token_map) = - ra_mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?; - - let tt: tt::TokenTree = subtree.into(); - Ok(tt.into()) - } - } - - impl ToString for TokenStream { - fn to_string(&self) -> String { - let tt = self.subtree.clone().into(); - to_text(&tt) - } - } - - fn to_text(tkn: &tt::TokenTree) -> String { - match tkn { - tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => ident.text.clone().into(), - tt::TokenTree::Leaf(tt::Leaf::Literal(literal)) => literal.text.clone().into(), - tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => format!("{}", punct.char), - tt::TokenTree::Subtree(subtree) => { - let content = subtree - .token_trees - .iter() - .map(|tkn| { - let s = to_text(tkn); - if let tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) = tkn { - if punct.spacing == tt::Spacing::Alone { - return s + " "; - } - } - s - }) - .collect::>() - .concat(); - let (open, close) = match subtree.delimiter.map(|it| it.kind) { - None => ("", ""), - Some(tt::DelimiterKind::Brace) => ("{", "}"), - Some(tt::DelimiterKind::Parenthesis) => ("(", ")"), - Some(tt::DelimiterKind::Bracket) => ("[", "]"), - }; - format!("{}{}{}", open, content, close) - } - } - } -} - -impl TokenStreamBuilder { - fn new() -> TokenStreamBuilder { - TokenStreamBuilder { acc: TokenStream::new() } - } - - fn push(&mut self, stream: TokenStream) { - self.acc.extend(stream.into_iter()) - } - - fn build(self) -> TokenStream { - self.acc - } -} - -#[derive(Clone)] -pub struct TokenStreamIter { - trees: IntoIter, -} - -#[derive(Default)] -pub struct Rustc { - ident_interner: IdentInterner, - // FIXME: store span information here. -} - -impl server::Types for Rustc { - type TokenStream = TokenStream; - type TokenStreamBuilder = TokenStreamBuilder; - type TokenStreamIter = TokenStreamIter; - type Group = Group; - type Punct = Punct; - type Ident = IdentId; - type Literal = Literal; - type SourceFile = SourceFile; - type Diagnostic = Diagnostic; - type Span = Span; - type MultiSpan = Vec; -} - -impl server::TokenStream for Rustc { - fn new(&mut self) -> Self::TokenStream { - Self::TokenStream::new() - } - - fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { - stream.is_empty() - } - fn from_str(&mut self, src: &str) -> Self::TokenStream { - Self::TokenStream::from_str(src).expect("cannot parse string") - } - fn to_string(&mut self, stream: &Self::TokenStream) -> String { - stream.to_string() - } - fn from_token_tree( - &mut self, - tree: bridge::TokenTree, - ) -> Self::TokenStream { - match tree { - bridge::TokenTree::Group(group) => { - let tree = TokenTree::from(group); - Self::TokenStream::from_iter(vec![tree]) - } - - bridge::TokenTree::Ident(IdentId(index)) => { - let IdentData(ident) = self.ident_interner.get(index).clone(); - let ident: tt::Ident = ident; - let leaf = tt::Leaf::from(ident); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(vec![tree]) - } - - bridge::TokenTree::Literal(literal) => { - let leaf = tt::Leaf::from(literal); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(vec![tree]) - } - - bridge::TokenTree::Punct(p) => { - let leaf = tt::Leaf::from(p); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(vec![tree]) - } - } - } - - fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter { - let trees: Vec = stream.into_iter().collect(); - TokenStreamIter { trees: trees.into_iter() } - } -} - -impl server::TokenStreamBuilder for Rustc { - fn new(&mut self) -> Self::TokenStreamBuilder { - Self::TokenStreamBuilder::new() - } - fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) { - builder.push(stream) - } - fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream { - builder.build() - } -} - -impl server::TokenStreamIter for Rustc { - fn next( - &mut self, - iter: &mut Self::TokenStreamIter, - ) -> Option> { - iter.trees.next().map(|tree| match tree { - TokenTree::Subtree(group) => bridge::TokenTree::Group(group), - TokenTree::Leaf(tt::Leaf::Ident(ident)) => { - bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident)))) - } - TokenTree::Leaf(tt::Leaf::Literal(literal)) => bridge::TokenTree::Literal(literal), - TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct), - }) - } -} - -fn delim_to_internal(d: bridge::Delimiter) -> Option { - let kind = match d { - bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis, - bridge::Delimiter::Brace => tt::DelimiterKind::Brace, - bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket, - bridge::Delimiter::None => return None, - }; - Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }) -} - -fn delim_to_external(d: Option) -> bridge::Delimiter { - match d.map(|it| it.kind) { - Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis, - Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace, - Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket, - None => bridge::Delimiter::None, - } -} - -fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing { - match spacing { - bridge::Spacing::Alone => Spacing::Alone, - bridge::Spacing::Joint => Spacing::Joint, - } -} - -fn spacing_to_external(spacing: Spacing) -> bridge::Spacing { - match spacing { - Spacing::Alone => bridge::Spacing::Alone, - Spacing::Joint => bridge::Spacing::Joint, - } -} - -impl server::Group for Rustc { - fn new(&mut self, delimiter: bridge::Delimiter, stream: Self::TokenStream) -> Self::Group { - Self::Group { - delimiter: delim_to_internal(delimiter), - token_trees: stream.subtree.token_trees, - } - } - fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter { - delim_to_external(group.delimiter) - } - - // NOTE: Return value of do not include delimiter - fn stream(&mut self, group: &Self::Group) -> Self::TokenStream { - TokenStream { - subtree: tt::Subtree { delimiter: None, token_trees: group.token_trees.clone() }, - } - } - - fn span(&mut self, group: &Self::Group) -> Self::Span { - group.delimiter.map(|it| it.id).unwrap_or_else(|| tt::TokenId::unspecified()) - } - - fn set_span(&mut self, _group: &mut Self::Group, _span: Self::Span) { - // FIXME handle span - } - - fn span_open(&mut self, _group: &Self::Group) -> Self::Span { - // FIXME handle span - // MySpan(self.span_interner.intern(&MySpanData(group.span_open()))) - tt::TokenId::unspecified() - } - - fn span_close(&mut self, _group: &Self::Group) -> Self::Span { - // FIXME handle span - tt::TokenId::unspecified() - } -} - -impl server::Punct for Rustc { - fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct { - tt::Punct { - char: ch, - spacing: spacing_to_internal(spacing), - id: tt::TokenId::unspecified(), - } - } - fn as_char(&mut self, punct: Self::Punct) -> char { - punct.char - } - fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing { - spacing_to_external(punct.spacing) - } - fn span(&mut self, _punct: Self::Punct) -> Self::Span { - // FIXME handle span - tt::TokenId::unspecified() - } - fn with_span(&mut self, punct: Self::Punct, _span: Self::Span) -> Self::Punct { - // FIXME handle span - punct - } -} - -impl server::Ident for Rustc { - fn new(&mut self, string: &str, _span: Self::Span, _is_raw: bool) -> Self::Ident { - IdentId( - self.ident_interner.intern(&IdentData(tt::Ident { - text: string.into(), - id: tt::TokenId::unspecified(), - })), - ) - } - - fn span(&mut self, _ident: Self::Ident) -> Self::Span { - // FIXME handle span - tt::TokenId::unspecified() - } - fn with_span(&mut self, ident: Self::Ident, _span: Self::Span) -> Self::Ident { - // FIXME handle span - ident - } -} - -impl server::Literal for Rustc { - fn debug_kind(&mut self, _literal: &Self::Literal) -> String { - // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these. - // They must still be present to be ABI-compatible and work with upstream proc_macro. - "".to_owned() - } - fn symbol(&mut self, literal: &Self::Literal) -> String { - literal.text.to_string() - } - fn suffix(&mut self, _literal: &Self::Literal) -> Option { - None - } - - fn integer(&mut self, n: &str) -> Self::Literal { - let n: i128 = n.parse().unwrap(); - Literal { text: n.to_string().into(), id: tt::TokenId::unspecified() } - } - - fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal { - macro_rules! def_suffixed_integer { - ($kind:ident, $($ty:ty),*) => { - match $kind { - $( - stringify!($ty) => { - let n: $ty = n.parse().unwrap(); - format!(concat!("{}", stringify!($ty)), n) - } - )* - _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind), - } - } - } - - let text = - def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128}; - - Literal { text: text.into(), id: tt::TokenId::unspecified() } - } - - fn float(&mut self, n: &str) -> Self::Literal { - let n: f64 = n.parse().unwrap(); - let mut text = f64::to_string(&n); - if !text.contains('.') { - text += ".0" - } - Literal { text: text.into(), id: tt::TokenId::unspecified() } - } - - fn f32(&mut self, n: &str) -> Self::Literal { - let n: f32 = n.parse().unwrap(); - let text = format!("{}f32", n); - Literal { text: text.into(), id: tt::TokenId::unspecified() } - } - - fn f64(&mut self, n: &str) -> Self::Literal { - let n: f64 = n.parse().unwrap(); - let text = format!("{}f64", n); - Literal { text: text.into(), id: tt::TokenId::unspecified() } - } - - fn string(&mut self, string: &str) -> Self::Literal { - let mut escaped = String::new(); - for ch in string.chars() { - escaped.extend(ch.escape_debug()); - } - Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() } - } - - fn character(&mut self, ch: char) -> Self::Literal { - Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() } - } - - fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal { - let string = bytes - .iter() - .cloned() - .flat_map(ascii::escape_default) - .map(Into::::into) - .collect::(); - - Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() } - } - - fn span(&mut self, literal: &Self::Literal) -> Self::Span { - literal.id - } - - fn set_span(&mut self, _literal: &mut Self::Literal, _span: Self::Span) { - // FIXME handle span - } - - fn subspan( - &mut self, - _literal: &Self::Literal, - _start: Bound, - _end: Bound, - ) -> Option { - // FIXME handle span - None - } -} - -impl server::SourceFile for Rustc { - fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool { - file1.eq(file2) - } - fn path(&mut self, file: &Self::SourceFile) -> String { - String::from( - file.path().to_str().expect("non-UTF8 file path in `proc_macro::SourceFile::path`"), - ) - } - fn is_real(&mut self, file: &Self::SourceFile) -> bool { - file.is_real() - } -} - -impl server::Diagnostic for Rustc { - fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic { - let mut diag = Diagnostic::new(level, msg); - diag.spans = spans; - diag - } - - fn sub( - &mut self, - _diag: &mut Self::Diagnostic, - _level: Level, - _msg: &str, - _spans: Self::MultiSpan, - ) { - // FIXME handle diagnostic - // - } - - fn emit(&mut self, _diag: Self::Diagnostic) { - // FIXME handle diagnostic - // diag.emit() - } -} - -impl server::Span for Rustc { - fn debug(&mut self, span: Self::Span) -> String { - format!("{:?}", span.0) - } - fn def_site(&mut self) -> Self::Span { - // MySpan(self.span_interner.intern(&MySpanData(Span::def_site()))) - // FIXME handle span - tt::TokenId::unspecified() - } - fn call_site(&mut self) -> Self::Span { - // MySpan(self.span_interner.intern(&MySpanData(Span::call_site()))) - // FIXME handle span - tt::TokenId::unspecified() - } - fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { - // let MySpanData(span) = self.span_interner.get(span.0); - unimplemented!() - } - - /// Recent feature, not yet in the proc_macro - /// - /// See PR: - /// https://github.com/rust-lang/rust/pull/55780 - fn source_text(&mut self, _span: Self::Span) -> Option { - None - } - - fn parent(&mut self, _span: Self::Span) -> Option { - // FIXME handle span - None - } - fn source(&mut self, span: Self::Span) -> Self::Span { - // FIXME handle span - span - } - fn start(&mut self, _span: Self::Span) -> LineColumn { - // FIXME handle span - LineColumn { line: 0, column: 0 } - } - fn end(&mut self, _span: Self::Span) -> LineColumn { - // FIXME handle span - LineColumn { line: 0, column: 0 } - } - fn join(&mut self, _first: Self::Span, _second: Self::Span) -> Option { - None - } - fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { - // FIXME handle span - tt::TokenId::unspecified() - } - - fn mixed_site(&mut self) -> Self::Span { - // FIXME handle span - tt::TokenId::unspecified() - } -} - -impl server::MultiSpan for Rustc { - fn new(&mut self) -> Self::MultiSpan { - // FIXME handle span - vec![] - } - - fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) { - //TODP - other.push(span) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::proc_macro::bridge::server::Literal; - - #[test] - fn test_rustc_server_literals() { - let mut srv = Rustc { ident_interner: IdentInterner::default() }; - assert_eq!(srv.integer("1234").text, "1234"); - - assert_eq!(srv.typed_integer("12", "u8").text, "12u8"); - assert_eq!(srv.typed_integer("255", "u16").text, "255u16"); - assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32"); - assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64"); - assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128"); - assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize"); - assert_eq!(srv.typed_integer("127", "i8").text, "127i8"); - assert_eq!(srv.typed_integer("255", "i16").text, "255i16"); - assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32"); - assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64"); - assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128"); - assert_eq!(srv.float("0").text, "0.0"); - assert_eq!(srv.float("15684.5867").text, "15684.5867"); - assert_eq!(srv.f32("15684.58").text, "15684.58f32"); - assert_eq!(srv.f64("15684.58").text, "15684.58f64"); - - assert_eq!(srv.string("hello_world").text, "\"hello_world\""); - assert_eq!(srv.character('c').text, "'c'"); - assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\""); - } -} diff --git a/crates/ra_proc_macro_srv/src/tests/utils.rs b/crates/ra_proc_macro_srv/src/tests/utils.rs deleted file mode 100644 index dcb00671ff..0000000000 --- a/crates/ra_proc_macro_srv/src/tests/utils.rs +++ /dev/null @@ -1,64 +0,0 @@ -//! utils used in proc-macro tests - -use crate::dylib; -use crate::ProcMacroSrv; -use ra_proc_macro::ListMacrosTask; -use std::str::FromStr; -use test_utils::assert_eq_text; - -mod fixtures { - use cargo_metadata::Message; - use std::process::Command; - - // Use current project metadata to get the proc-macro dylib path - pub fn dylib_path(crate_name: &str, version: &str) -> std::path::PathBuf { - let command = Command::new(ra_toolchain::cargo()) - .args(&["check", "--message-format", "json"]) - .output() - .unwrap() - .stdout; - - for message in Message::parse_stream(command.as_slice()) { - match message.unwrap() { - Message::CompilerArtifact(artifact) => { - if artifact.target.kind.contains(&"proc-macro".to_string()) { - let repr = format!("{} {}", crate_name, version); - if artifact.package_id.repr.starts_with(&repr) { - return artifact.filenames[0].clone(); - } - } - } - _ => (), // Unknown message - } - } - - panic!("No proc-macro dylib for {} found!", crate_name); - } -} - -fn parse_string(code: &str) -> Option { - Some(crate::rustc_server::TokenStream::from_str(code).unwrap()) -} - -pub fn assert_expand( - crate_name: &str, - macro_name: &str, - version: &str, - ra_fixture: &str, - expect: &str, -) { - let path = fixtures::dylib_path(crate_name, version); - let expander = dylib::Expander::new(&path).unwrap(); - let fixture = parse_string(ra_fixture).unwrap(); - - let res = expander.expand(macro_name, &fixture.subtree, None).unwrap(); - assert_eq_text!(&format!("{:?}", res), &expect.trim()); -} - -pub fn list(crate_name: &str, version: &str) -> Vec { - let path = fixtures::dylib_path(crate_name, version); - let task = ListMacrosTask { lib: path }; - let mut srv = ProcMacroSrv::default(); - let res = srv.list_macros(&task).unwrap(); - res.macros.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect() -} diff --git a/crates/ra_prof/Cargo.toml b/crates/ra_prof/Cargo.toml deleted file mode 100644 index c82b9f76d4..0000000000 --- a/crates/ra_prof/Cargo.toml +++ /dev/null @@ -1,27 +0,0 @@ -[package] -edition = "2018" -name = "ra_prof" -version = "0.1.0" -authors = ["rust-analyzer developers"] -publish = false -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -ra_arena = { path = "../ra_arena" } -once_cell = "1.3.1" -backtrace = { version = "0.3.44", optional = true } -cfg-if = "0.1.10" -libc = "0.2.73" - -[target.'cfg(target_os = "linux")'.dependencies] -perf-event = "0.4" - -[features] -cpu_profiler = [] - -# Uncomment to enable for the whole crate graph -# default = [ "backtrace" ] -# default = [ "cpu_profiler" ] diff --git a/crates/ra_prof/src/hprof.rs b/crates/ra_prof/src/hprof.rs deleted file mode 100644 index a3f5321fb3..0000000000 --- a/crates/ra_prof/src/hprof.rs +++ /dev/null @@ -1,243 +0,0 @@ -//! Simple hierarchical profiler -use once_cell::sync::Lazy; -use std::{ - cell::RefCell, - collections::{BTreeMap, HashSet}, - io::{stderr, Write}, - sync::{ - atomic::{AtomicBool, Ordering}, - RwLock, - }, - time::{Duration, Instant}, -}; - -use crate::tree::{Idx, Tree}; - -/// Filtering syntax -/// env RA_PROFILE=* // dump everything -/// env RA_PROFILE=foo|bar|baz // enabled only selected entries -/// env RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms -pub fn init() { - let spec = std::env::var("RA_PROFILE").unwrap_or_default(); - init_from(&spec); -} - -pub fn init_from(spec: &str) { - let filter = if spec.is_empty() { Filter::disabled() } else { Filter::from_spec(spec) }; - filter.install(); -} - -pub type Label = &'static str; - -/// This function starts a profiling scope in the current execution stack with a given description. -/// It returns a `Profile` struct that measures elapsed time between this method invocation and `Profile` struct drop. -/// It supports nested profiling scopes in case when this function is invoked multiple times at the execution stack. -/// In this case the profiling information will be nested at the output. -/// Profiling information is being printed in the stderr. -/// -/// # Example -/// ``` -/// use ra_prof::{profile, set_filter, Filter}; -/// -/// let f = Filter::from_spec("profile1|profile2@2"); -/// set_filter(f); -/// profiling_function1(); -/// -/// fn profiling_function1() { -/// let _p = profile("profile1"); -/// profiling_function2(); -/// } -/// -/// fn profiling_function2() { -/// let _p = profile("profile2"); -/// } -/// ``` -/// This will print in the stderr the following: -/// ```text -/// 0ms - profile -/// 0ms - profile2 -/// ``` -pub fn profile(label: Label) -> Profiler { - assert!(!label.is_empty()); - - if PROFILING_ENABLED.load(Ordering::Relaxed) - && PROFILE_STACK.with(|stack| stack.borrow_mut().push(label)) - { - Profiler(Some(ProfilerImpl { label, detail: None })) - } else { - Profiler(None) - } -} - -pub struct Profiler(Option); - -struct ProfilerImpl { - label: Label, - detail: Option, -} - -impl Profiler { - pub fn detail(mut self, detail: impl FnOnce() -> String) -> Profiler { - if let Some(profiler) = &mut self.0 { - profiler.detail = Some(detail()) - } - self - } -} - -impl Drop for ProfilerImpl { - fn drop(&mut self) { - PROFILE_STACK.with(|it| it.borrow_mut().pop(self.label, self.detail.take())); - } -} - -static PROFILING_ENABLED: AtomicBool = AtomicBool::new(false); -static FILTER: Lazy> = Lazy::new(Default::default); -thread_local!(static PROFILE_STACK: RefCell = RefCell::new(ProfileStack::new())); - -#[derive(Default, Clone, Debug)] -struct Filter { - depth: usize, - allowed: HashSet, - longer_than: Duration, - version: usize, -} - -impl Filter { - fn disabled() -> Filter { - Filter::default() - } - - fn from_spec(mut spec: &str) -> Filter { - let longer_than = if let Some(idx) = spec.rfind('>') { - let longer_than = spec[idx + 1..].parse().expect("invalid profile longer_than"); - spec = &spec[..idx]; - Duration::from_millis(longer_than) - } else { - Duration::new(0, 0) - }; - - let depth = if let Some(idx) = spec.rfind('@') { - let depth: usize = spec[idx + 1..].parse().expect("invalid profile depth"); - spec = &spec[..idx]; - depth - } else { - 999 - }; - let allowed = - if spec == "*" { HashSet::new() } else { spec.split('|').map(String::from).collect() }; - Filter { depth, allowed, longer_than, version: 0 } - } - - fn install(mut self) { - PROFILING_ENABLED.store(self.depth > 0, Ordering::SeqCst); - let mut old = FILTER.write().unwrap(); - self.version = old.version + 1; - *old = self; - } -} - -struct ProfileStack { - starts: Vec, - filter: Filter, - messages: Tree, -} - -#[derive(Default)] -struct Message { - duration: Duration, - label: Label, - detail: Option, -} - -impl ProfileStack { - fn new() -> ProfileStack { - ProfileStack { starts: Vec::new(), messages: Tree::default(), filter: Default::default() } - } - - fn push(&mut self, label: Label) -> bool { - if self.starts.is_empty() { - if let Ok(f) = FILTER.try_read() { - if f.version > self.filter.version { - self.filter = f.clone(); - } - }; - } - if self.starts.len() > self.filter.depth { - return false; - } - let allowed = &self.filter.allowed; - if self.starts.is_empty() && !allowed.is_empty() && !allowed.contains(label) { - return false; - } - - self.starts.push(Instant::now()); - self.messages.start(); - true - } - - pub fn pop(&mut self, label: Label, detail: Option) { - let start = self.starts.pop().unwrap(); - let duration = start.elapsed(); - self.messages.finish(Message { duration, label, detail }); - if self.starts.is_empty() { - let longer_than = self.filter.longer_than; - // Convert to millis for comparison to avoid problems with rounding - // (otherwise we could print `0ms` despite user's `>0` filter when - // `duration` is just a few nanos). - if duration.as_millis() > longer_than.as_millis() { - if let Some(root) = self.messages.root() { - print(&self.messages, root, 0, longer_than, &mut stderr().lock()); - } - } - self.messages.clear(); - } - } -} - -fn print( - tree: &Tree, - curr: Idx, - level: u32, - longer_than: Duration, - out: &mut impl Write, -) { - let current_indent = " ".repeat(level as usize); - let detail = tree[curr].detail.as_ref().map(|it| format!(" @ {}", it)).unwrap_or_default(); - writeln!( - out, - "{}{:5}ms - {}{}", - current_indent, - tree[curr].duration.as_millis(), - tree[curr].label, - detail, - ) - .expect("printing profiling info"); - - let mut accounted_for = Duration::default(); - let mut short_children = BTreeMap::new(); // Use `BTreeMap` to get deterministic output. - for child in tree.children(curr) { - accounted_for += tree[child].duration; - - if tree[child].duration.as_millis() > longer_than.as_millis() { - print(tree, child, level + 1, longer_than, out) - } else { - let (total_duration, cnt) = - short_children.entry(tree[child].label).or_insert((Duration::default(), 0)); - *total_duration += tree[child].duration; - *cnt += 1; - } - } - - for (child_msg, (duration, count)) in short_children.iter() { - let millis = duration.as_millis(); - writeln!(out, " {}{:5}ms - {} ({} calls)", current_indent, millis, child_msg, count) - .expect("printing profiling info"); - } - - let unaccounted = tree[curr].duration - accounted_for; - if tree.children(curr).next().is_some() && unaccounted > longer_than { - writeln!(out, " {}{:5}ms - ???", current_indent, unaccounted.as_millis()) - .expect("printing profiling info"); - } -} diff --git a/crates/ra_prof/src/lib.rs b/crates/ra_prof/src/lib.rs deleted file mode 100644 index eb50965ae6..0000000000 --- a/crates/ra_prof/src/lib.rs +++ /dev/null @@ -1,109 +0,0 @@ -//! A collection of tools for profiling rust-analyzer. - -mod stop_watch; -mod memory_usage; -#[cfg(feature = "cpu_profiler")] -mod google_cpu_profiler; -mod hprof; -mod tree; - -use std::cell::RefCell; - -pub use crate::{ - hprof::{init, init_from, profile}, - memory_usage::{Bytes, MemoryUsage}, - stop_watch::{StopWatch, StopWatchSpan}, -}; - -/// Prints backtrace to stderr, useful for debugging. -#[cfg(feature = "backtrace")] -pub fn print_backtrace() { - let bt = backtrace::Backtrace::new(); - eprintln!("{:?}", bt); -} -#[cfg(not(feature = "backtrace"))] -pub fn print_backtrace() { - eprintln!( - r#"enable the backtrace feature: - ra_prof = {{ path = "../ra_prof", features = [ "backtrace"] }} -"# - ); -} - -thread_local!(static IN_SCOPE: RefCell = RefCell::new(false)); - -/// Allows to check if the current code is withing some dynamic scope, can be -/// useful during debugging to figure out why a function is called. -pub struct Scope { - prev: bool, -} - -impl Scope { - #[must_use] - pub fn enter() -> Scope { - let prev = IN_SCOPE.with(|slot| std::mem::replace(&mut *slot.borrow_mut(), true)); - Scope { prev } - } - pub fn is_active() -> bool { - IN_SCOPE.with(|slot| *slot.borrow()) - } -} - -impl Drop for Scope { - fn drop(&mut self) { - IN_SCOPE.with(|slot| *slot.borrow_mut() = self.prev); - } -} - -/// A wrapper around google_cpu_profiler. -/// -/// Usage: -/// 1. Install gpref_tools (https://github.com/gperftools/gperftools), probably packaged with your Linux distro. -/// 2. Build with `cpu_profiler` feature. -/// 3. Tun the code, the *raw* output would be in the `./out.profile` file. -/// 4. Install pprof for visualization (https://github.com/google/pprof). -/// 5. Bump sampling frequency to once per ms: `export CPUPROFILE_FREQUENCY=1000` -/// 6. Use something like `pprof -svg target/release/rust-analyzer ./out.profile` to see the results. -/// -/// For example, here's how I run profiling on NixOS: -/// -/// ```bash -/// $ nix-shell -p gperftools --run \ -/// 'cargo run --release -p rust-analyzer -- parse < ~/projects/rustbench/parser.rs > /dev/null' -/// ``` -/// -/// See this diff for how to profile completions: -/// -/// https://github.com/rust-analyzer/rust-analyzer/pull/5306 -#[derive(Debug)] -pub struct CpuProfiler { - _private: (), -} - -#[must_use] -pub fn cpu_profiler() -> CpuProfiler { - #[cfg(feature = "cpu_profiler")] - { - google_cpu_profiler::start("./out.profile".as_ref()) - } - - #[cfg(not(feature = "cpu_profiler"))] - { - eprintln!("cpu_profiler feature is disabled") - } - - CpuProfiler { _private: () } -} - -impl Drop for CpuProfiler { - fn drop(&mut self) { - #[cfg(feature = "cpu_profiler")] - { - google_cpu_profiler::stop() - } - } -} - -pub fn memory_usage() -> MemoryUsage { - MemoryUsage::current() -} diff --git a/crates/ra_prof/src/memory_usage.rs b/crates/ra_prof/src/memory_usage.rs deleted file mode 100644 index c2ecbd33cf..0000000000 --- a/crates/ra_prof/src/memory_usage.rs +++ /dev/null @@ -1,75 +0,0 @@ -//! FIXME: write short doc here -use std::fmt; - -use cfg_if::cfg_if; - -#[derive(Copy, Clone)] -pub struct MemoryUsage { - pub allocated: Bytes, -} - -impl fmt::Display for MemoryUsage { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - write!(fmt, "{}", self.allocated) - } -} - -impl std::ops::Sub for MemoryUsage { - type Output = MemoryUsage; - fn sub(self, rhs: MemoryUsage) -> MemoryUsage { - MemoryUsage { allocated: self.allocated - rhs.allocated } - } -} - -impl MemoryUsage { - pub fn current() -> MemoryUsage { - cfg_if! { - if #[cfg(target_os = "linux")] { - // Note: This is incredibly slow. - let alloc = unsafe { libc::mallinfo() }.uordblks as isize; - MemoryUsage { allocated: Bytes(alloc) } - } else { - MemoryUsage { allocated: Bytes(0) } - } - } - } -} - -#[derive(Default, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] -pub struct Bytes(isize); - -impl Bytes { - pub fn megabytes(self) -> isize { - self.0 / 1024 / 1024 - } -} - -impl fmt::Display for Bytes { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let bytes = self.0; - let mut value = bytes; - let mut suffix = "b"; - if value.abs() > 4096 { - value /= 1024; - suffix = "kb"; - if value.abs() > 4096 { - value /= 1024; - suffix = "mb"; - } - } - f.pad(&format!("{}{}", value, suffix)) - } -} - -impl std::ops::AddAssign for Bytes { - fn add_assign(&mut self, x: usize) { - self.0 += x as isize; - } -} - -impl std::ops::Sub for Bytes { - type Output = Bytes; - fn sub(self, rhs: Bytes) -> Bytes { - Bytes(self.0 - rhs.0) - } -} diff --git a/crates/ra_prof/src/tree.rs b/crates/ra_prof/src/tree.rs deleted file mode 100644 index 9ea5b5db8e..0000000000 --- a/crates/ra_prof/src/tree.rs +++ /dev/null @@ -1,84 +0,0 @@ -//! A simple tree implementation which tries to not allocate all over the place. -use std::ops; - -use ra_arena::Arena; - -#[derive(Default)] -pub struct Tree { - nodes: Arena>, - current_path: Vec<(Idx, Option>)>, -} - -pub type Idx = ra_arena::Idx>; - -impl Tree { - pub fn start(&mut self) - where - T: Default, - { - let me = self.nodes.alloc(Node::new(T::default())); - if let Some((parent, last_child)) = self.current_path.last_mut() { - let slot = match *last_child { - Some(last_child) => &mut self.nodes[last_child].next_sibling, - None => &mut self.nodes[*parent].first_child, - }; - let prev = slot.replace(me); - assert!(prev.is_none()); - *last_child = Some(me); - } - - self.current_path.push((me, None)); - } - - pub fn finish(&mut self, data: T) { - let (me, _last_child) = self.current_path.pop().unwrap(); - self.nodes[me].data = data; - } - - pub fn root(&self) -> Option> { - self.nodes.iter().next().map(|(idx, _)| idx) - } - - pub fn children(&self, idx: Idx) -> impl Iterator> + '_ { - NodeIter { nodes: &self.nodes, next: self.nodes[idx].first_child } - } - pub fn clear(&mut self) { - self.nodes.clear(); - self.current_path.clear(); - } -} - -impl ops::Index> for Tree { - type Output = T; - fn index(&self, index: Idx) -> &T { - &self.nodes[index].data - } -} - -pub struct Node { - data: T, - first_child: Option>, - next_sibling: Option>, -} - -impl Node { - fn new(data: T) -> Node { - Node { data, first_child: None, next_sibling: None } - } -} - -struct NodeIter<'a, T> { - nodes: &'a Arena>, - next: Option>, -} - -impl<'a, T> Iterator for NodeIter<'a, T> { - type Item = Idx; - - fn next(&mut self) -> Option> { - self.next.map(|next| { - self.next = self.nodes[next].next_sibling; - next - }) - } -} diff --git a/crates/ra_project_model/Cargo.toml b/crates/ra_project_model/Cargo.toml deleted file mode 100644 index 99adea8e44..0000000000 --- a/crates/ra_project_model/Cargo.toml +++ /dev/null @@ -1,28 +0,0 @@ -[package] -edition = "2018" -name = "ra_project_model" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -log = "0.4.8" -rustc-hash = "1.1.0" - -cargo_metadata = "0.11.1" - -ra_arena = { path = "../ra_arena" } -ra_cfg = { path = "../ra_cfg" } -ra_db = { path = "../ra_db" } -ra_toolchain = { path = "../ra_toolchain" } -ra_proc_macro = { path = "../ra_proc_macro" } -paths = { path = "../paths" } -stdx = { path = "../stdx" } - -serde = { version = "1.0.106", features = ["derive"] } -serde_json = "1.0.48" - -anyhow = "1.0.26" diff --git a/crates/ra_project_model/src/cargo_workspace.rs b/crates/ra_project_model/src/cargo_workspace.rs deleted file mode 100644 index 10513542e2..0000000000 --- a/crates/ra_project_model/src/cargo_workspace.rs +++ /dev/null @@ -1,362 +0,0 @@ -//! FIXME: write short doc here - -use std::{ - ffi::OsStr, - ops, - path::{Path, PathBuf}, - process::Command, -}; - -use anyhow::{Context, Result}; -use cargo_metadata::{BuildScript, CargoOpt, Message, MetadataCommand, PackageId}; -use paths::{AbsPath, AbsPathBuf}; -use ra_arena::{Arena, Idx}; -use ra_db::Edition; -use rustc_hash::FxHashMap; - -use crate::cfg_flag::CfgFlag; - -/// `CargoWorkspace` represents the logical structure of, well, a Cargo -/// workspace. It pretty closely mirrors `cargo metadata` output. -/// -/// Note that internally, rust analyzer uses a different structure: -/// `CrateGraph`. `CrateGraph` is lower-level: it knows only about the crates, -/// while this knows about `Packages` & `Targets`: purely cargo-related -/// concepts. -/// -/// We use absolute paths here, `cargo metadata` guarantees to always produce -/// abs paths. -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct CargoWorkspace { - packages: Arena, - targets: Arena, - workspace_root: AbsPathBuf, -} - -impl ops::Index for CargoWorkspace { - type Output = PackageData; - fn index(&self, index: Package) -> &PackageData { - &self.packages[index] - } -} - -impl ops::Index for CargoWorkspace { - type Output = TargetData; - fn index(&self, index: Target) -> &TargetData { - &self.targets[index] - } -} - -#[derive(Default, Clone, Debug, PartialEq, Eq)] -pub struct CargoConfig { - /// Do not activate the `default` feature. - pub no_default_features: bool, - - /// Activate all available features - pub all_features: bool, - - /// List of features to activate. - /// This will be ignored if `cargo_all_features` is true. - pub features: Vec, - - /// Runs cargo check on launch to figure out the correct values of OUT_DIR - pub load_out_dirs_from_check: bool, - - /// rustc target - pub target: Option, -} - -pub type Package = Idx; - -pub type Target = Idx; - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct PackageData { - pub version: String, - pub name: String, - pub manifest: AbsPathBuf, - pub targets: Vec, - pub is_member: bool, - pub dependencies: Vec, - pub edition: Edition, - pub features: Vec, - pub cfgs: Vec, - pub out_dir: Option, - pub proc_macro_dylib_path: Option, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct PackageDependency { - pub pkg: Package, - pub name: String, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct TargetData { - pub package: Package, - pub name: String, - pub root: AbsPathBuf, - pub kind: TargetKind, - pub is_proc_macro: bool, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum TargetKind { - Bin, - /// Any kind of Cargo lib crate-type (dylib, rlib, proc-macro, ...). - Lib, - Example, - Test, - Bench, - Other, -} - -impl TargetKind { - fn new(kinds: &[String]) -> TargetKind { - for kind in kinds { - return match kind.as_str() { - "bin" => TargetKind::Bin, - "test" => TargetKind::Test, - "bench" => TargetKind::Bench, - "example" => TargetKind::Example, - "proc-macro" => TargetKind::Lib, - _ if kind.contains("lib") => TargetKind::Lib, - _ => continue, - }; - } - TargetKind::Other - } -} - -impl PackageData { - pub fn root(&self) -> &AbsPath { - self.manifest.parent().unwrap() - } -} - -impl CargoWorkspace { - pub fn from_cargo_metadata( - cargo_toml: &AbsPath, - cargo_features: &CargoConfig, - ) -> Result { - let mut meta = MetadataCommand::new(); - meta.cargo_path(ra_toolchain::cargo()); - meta.manifest_path(cargo_toml.to_path_buf()); - if cargo_features.all_features { - meta.features(CargoOpt::AllFeatures); - } else { - if cargo_features.no_default_features { - // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` - // https://github.com/oli-obk/cargo_metadata/issues/79 - meta.features(CargoOpt::NoDefaultFeatures); - } - if !cargo_features.features.is_empty() { - meta.features(CargoOpt::SomeFeatures(cargo_features.features.clone())); - } - } - if let Some(parent) = cargo_toml.parent() { - meta.current_dir(parent.to_path_buf()); - } - if let Some(target) = cargo_features.target.as_ref() { - meta.other_options(vec![String::from("--filter-platform"), target.clone()]); - } - let mut meta = meta.exec().with_context(|| { - format!("Failed to run `cargo metadata --manifest-path {}`", cargo_toml.display()) - })?; - - let mut out_dir_by_id = FxHashMap::default(); - let mut cfgs = FxHashMap::default(); - let mut proc_macro_dylib_paths = FxHashMap::default(); - if cargo_features.load_out_dirs_from_check { - let resources = load_extern_resources(cargo_toml, cargo_features)?; - out_dir_by_id = resources.out_dirs; - cfgs = resources.cfgs; - proc_macro_dylib_paths = resources.proc_dylib_paths; - } - - let mut pkg_by_id = FxHashMap::default(); - let mut packages = Arena::default(); - let mut targets = Arena::default(); - - let ws_members = &meta.workspace_members; - - meta.packages.sort_by(|a, b| a.id.cmp(&b.id)); - for meta_pkg in meta.packages { - let cargo_metadata::Package { id, edition, name, manifest_path, version, .. } = - meta_pkg; - let is_member = ws_members.contains(&id); - let edition = edition - .parse::() - .with_context(|| format!("Failed to parse edition {}", edition))?; - let pkg = packages.alloc(PackageData { - name, - version: version.to_string(), - manifest: AbsPathBuf::assert(manifest_path), - targets: Vec::new(), - is_member, - edition, - dependencies: Vec::new(), - features: Vec::new(), - cfgs: cfgs.get(&id).cloned().unwrap_or_default(), - out_dir: out_dir_by_id.get(&id).cloned(), - proc_macro_dylib_path: proc_macro_dylib_paths.get(&id).cloned(), - }); - let pkg_data = &mut packages[pkg]; - pkg_by_id.insert(id, pkg); - for meta_tgt in meta_pkg.targets { - let is_proc_macro = meta_tgt.kind.as_slice() == ["proc-macro"]; - let tgt = targets.alloc(TargetData { - package: pkg, - name: meta_tgt.name, - root: AbsPathBuf::assert(meta_tgt.src_path.clone()), - kind: TargetKind::new(meta_tgt.kind.as_slice()), - is_proc_macro, - }); - pkg_data.targets.push(tgt); - } - } - let resolve = meta.resolve.expect("metadata executed with deps"); - for mut node in resolve.nodes { - let source = match pkg_by_id.get(&node.id) { - Some(&src) => src, - // FIXME: replace this and a similar branch below with `.unwrap`, once - // https://github.com/rust-lang/cargo/issues/7841 - // is fixed and hits stable (around 1.43-is probably?). - None => { - log::error!("Node id do not match in cargo metadata, ignoring {}", node.id); - continue; - } - }; - node.deps.sort_by(|a, b| a.pkg.cmp(&b.pkg)); - for dep_node in node.deps { - let pkg = match pkg_by_id.get(&dep_node.pkg) { - Some(&pkg) => pkg, - None => { - log::error!( - "Dep node id do not match in cargo metadata, ignoring {}", - dep_node.pkg - ); - continue; - } - }; - let dep = PackageDependency { name: dep_node.name, pkg }; - packages[source].dependencies.push(dep); - } - packages[source].features.extend(node.features); - } - - let workspace_root = AbsPathBuf::assert(meta.workspace_root); - Ok(CargoWorkspace { packages, targets, workspace_root: workspace_root }) - } - - pub fn packages<'a>(&'a self) -> impl Iterator + ExactSizeIterator + 'a { - self.packages.iter().map(|(id, _pkg)| id) - } - - pub fn target_by_root(&self, root: &AbsPath) -> Option { - self.packages() - .filter_map(|pkg| self[pkg].targets.iter().find(|&&it| &self[it].root == root)) - .next() - .copied() - } - - pub fn workspace_root(&self) -> &AbsPath { - &self.workspace_root - } - - pub fn package_flag(&self, package: &PackageData) -> String { - if self.is_unique(&*package.name) { - package.name.clone() - } else { - format!("{}:{}", package.name, package.version) - } - } - - fn is_unique(&self, name: &str) -> bool { - self.packages.iter().filter(|(_, v)| v.name == name).count() == 1 - } -} - -#[derive(Debug, Clone, Default)] -pub struct ExternResources { - out_dirs: FxHashMap, - proc_dylib_paths: FxHashMap, - cfgs: FxHashMap>, -} - -pub fn load_extern_resources( - cargo_toml: &Path, - cargo_features: &CargoConfig, -) -> Result { - let mut cmd = Command::new(ra_toolchain::cargo()); - cmd.args(&["check", "--message-format=json", "--manifest-path"]).arg(cargo_toml); - if cargo_features.all_features { - cmd.arg("--all-features"); - } else { - if cargo_features.no_default_features { - // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` - // https://github.com/oli-obk/cargo_metadata/issues/79 - cmd.arg("--no-default-features"); - } - if !cargo_features.features.is_empty() { - cmd.arg("--features"); - cmd.arg(cargo_features.features.join(" ")); - } - } - - let output = cmd.output()?; - - let mut res = ExternResources::default(); - - for message in cargo_metadata::Message::parse_stream(output.stdout.as_slice()) { - if let Ok(message) = message { - match message { - Message::BuildScriptExecuted(BuildScript { package_id, out_dir, cfgs, .. }) => { - let cfgs = { - let mut acc = Vec::new(); - for cfg in cfgs { - match cfg.parse::() { - Ok(it) => acc.push(it), - Err(err) => { - anyhow::bail!("invalid cfg from cargo-metadata: {}", err) - } - }; - } - acc - }; - // cargo_metadata crate returns default (empty) path for - // older cargos, which is not absolute, so work around that. - if out_dir != PathBuf::default() { - let out_dir = AbsPathBuf::assert(out_dir); - res.out_dirs.insert(package_id.clone(), out_dir); - res.cfgs.insert(package_id, cfgs); - } - } - Message::CompilerArtifact(message) => { - if message.target.kind.contains(&"proc-macro".to_string()) { - let package_id = message.package_id; - // Skip rmeta file - if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name)) - { - let filename = AbsPathBuf::assert(filename.clone()); - res.proc_dylib_paths.insert(package_id, filename); - } - } - } - Message::CompilerMessage(_) => (), - Message::Unknown => (), - Message::BuildFinished(_) => {} - Message::TextLine(_) => {} - } - } - } - Ok(res) -} - -// FIXME: File a better way to know if it is a dylib -fn is_dylib(path: &Path) -> bool { - match path.extension().and_then(OsStr::to_str).map(|it| it.to_string().to_lowercase()) { - None => false, - Some(ext) => matches!(ext.as_str(), "dll" | "dylib" | "so"), - } -} diff --git a/crates/ra_project_model/src/cfg_flag.rs b/crates/ra_project_model/src/cfg_flag.rs deleted file mode 100644 index bd50056c68..0000000000 --- a/crates/ra_project_model/src/cfg_flag.rs +++ /dev/null @@ -1,51 +0,0 @@ -//! Parsing of CfgFlags as command line arguments, as in -//! -//! rustc main.rs --cfg foo --cfg 'feature="bar"' -use std::str::FromStr; - -use ra_cfg::CfgOptions; -use stdx::split_once; - -#[derive(Clone, Eq, PartialEq, Debug)] -pub enum CfgFlag { - Atom(String), - KeyValue { key: String, value: String }, -} - -impl FromStr for CfgFlag { - type Err = String; - fn from_str(s: &str) -> Result { - let res = match split_once(s, '=') { - Some((key, value)) => { - if !(value.starts_with('"') && value.ends_with('"')) { - return Err(format!("Invalid cfg ({:?}), value should be in quotes", s)); - } - let key = key.to_string(); - let value = value[1..value.len() - 1].to_string(); - CfgFlag::KeyValue { key, value } - } - None => CfgFlag::Atom(s.into()), - }; - Ok(res) - } -} - -impl<'de> serde::Deserialize<'de> for CfgFlag { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom) - } -} - -impl Extend for CfgOptions { - fn extend>(&mut self, iter: T) { - for cfg_flag in iter { - match cfg_flag { - CfgFlag::Atom(it) => self.insert_atom(it.into()), - CfgFlag::KeyValue { key, value } => self.insert_key_value(key.into(), value.into()), - } - } - } -} diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs deleted file mode 100644 index 300e751355..0000000000 --- a/crates/ra_project_model/src/lib.rs +++ /dev/null @@ -1,544 +0,0 @@ -//! FIXME: write short doc here - -mod cargo_workspace; -mod project_json; -mod sysroot; -mod cfg_flag; - -use std::{ - fs::{self, read_dir, ReadDir}, - io, - process::Command, -}; - -use anyhow::{bail, Context, Result}; -use paths::{AbsPath, AbsPathBuf}; -use ra_cfg::CfgOptions; -use ra_db::{CrateGraph, CrateId, CrateName, Edition, Env, FileId}; -use rustc_hash::{FxHashMap, FxHashSet}; - -use crate::cfg_flag::CfgFlag; - -pub use crate::{ - cargo_workspace::{CargoConfig, CargoWorkspace, Package, Target, TargetKind}, - project_json::{ProjectJson, ProjectJsonData}, - sysroot::Sysroot, -}; - -pub use ra_proc_macro::ProcMacroClient; - -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum ProjectWorkspace { - /// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`. - Cargo { cargo: CargoWorkspace, sysroot: Sysroot }, - /// Project workspace was manually specified using a `rust-project.json` file. - Json { project: ProjectJson }, -} - -/// `PackageRoot` describes a package root folder. -/// Which may be an external dependency, or a member of -/// the current workspace. -#[derive(Debug, Clone, Eq, PartialEq, Hash)] -pub struct PackageRoot { - /// Is a member of the current workspace - pub is_member: bool, - pub include: Vec, - pub exclude: Vec, -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub enum ProjectManifest { - ProjectJson(AbsPathBuf), - CargoToml(AbsPathBuf), -} - -impl ProjectManifest { - pub fn from_manifest_file(path: AbsPathBuf) -> Result { - if path.ends_with("rust-project.json") { - return Ok(ProjectManifest::ProjectJson(path)); - } - if path.ends_with("Cargo.toml") { - return Ok(ProjectManifest::CargoToml(path)); - } - bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display()) - } - - pub fn discover_single(path: &AbsPath) -> Result { - let mut candidates = ProjectManifest::discover(path)?; - let res = match candidates.pop() { - None => bail!("no projects"), - Some(it) => it, - }; - - if !candidates.is_empty() { - bail!("more than one project") - } - Ok(res) - } - - pub fn discover(path: &AbsPath) -> io::Result> { - if let Some(project_json) = find_in_parent_dirs(path, "rust-project.json") { - return Ok(vec![ProjectManifest::ProjectJson(project_json)]); - } - return find_cargo_toml(path) - .map(|paths| paths.into_iter().map(ProjectManifest::CargoToml).collect()); - - fn find_cargo_toml(path: &AbsPath) -> io::Result> { - match find_in_parent_dirs(path, "Cargo.toml") { - Some(it) => Ok(vec![it]), - None => Ok(find_cargo_toml_in_child_dir(read_dir(path)?)), - } - } - - fn find_in_parent_dirs(path: &AbsPath, target_file_name: &str) -> Option { - if path.ends_with(target_file_name) { - return Some(path.to_path_buf()); - } - - let mut curr = Some(path); - - while let Some(path) = curr { - let candidate = path.join(target_file_name); - if candidate.exists() { - return Some(candidate); - } - curr = path.parent(); - } - - None - } - - fn find_cargo_toml_in_child_dir(entities: ReadDir) -> Vec { - // Only one level down to avoid cycles the easy way and stop a runaway scan with large projects - entities - .filter_map(Result::ok) - .map(|it| it.path().join("Cargo.toml")) - .filter(|it| it.exists()) - .map(AbsPathBuf::assert) - .collect() - } - } - - pub fn discover_all(paths: &[impl AsRef]) -> Vec { - let mut res = paths - .iter() - .filter_map(|it| ProjectManifest::discover(it.as_ref()).ok()) - .flatten() - .collect::>() - .into_iter() - .collect::>(); - res.sort(); - res - } -} - -impl ProjectWorkspace { - pub fn load( - manifest: ProjectManifest, - cargo_config: &CargoConfig, - with_sysroot: bool, - ) -> Result { - let res = match manifest { - ProjectManifest::ProjectJson(project_json) => { - let file = fs::read_to_string(&project_json).with_context(|| { - format!("Failed to read json file {}", project_json.display()) - })?; - let data = serde_json::from_str(&file).with_context(|| { - format!("Failed to deserialize json file {}", project_json.display()) - })?; - let project_location = project_json.parent().unwrap().to_path_buf(); - let project = ProjectJson::new(&project_location, data); - ProjectWorkspace::Json { project } - } - ProjectManifest::CargoToml(cargo_toml) => { - let cargo = CargoWorkspace::from_cargo_metadata(&cargo_toml, cargo_config) - .with_context(|| { - format!( - "Failed to read Cargo metadata from Cargo.toml file {}", - cargo_toml.display() - ) - })?; - let sysroot = if with_sysroot { - Sysroot::discover(&cargo_toml).with_context(|| { - format!( - "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?", - cargo_toml.display() - ) - })? - } else { - Sysroot::default() - }; - ProjectWorkspace::Cargo { cargo, sysroot } - } - }; - - Ok(res) - } - - /// Returns the roots for the current `ProjectWorkspace` - /// The return type contains the path and whether or not - /// the root is a member of the current workspace - pub fn to_roots(&self) -> Vec { - match self { - ProjectWorkspace::Json { project } => project - .crates - .iter() - .map(|krate| PackageRoot { - is_member: krate.is_workspace_member, - include: krate.include.clone(), - exclude: krate.exclude.clone(), - }) - .collect::>() - .into_iter() - .collect::>(), - ProjectWorkspace::Cargo { cargo, sysroot } => cargo - .packages() - .map(|pkg| { - let is_member = cargo[pkg].is_member; - let pkg_root = cargo[pkg].root().to_path_buf(); - - let mut include = vec![pkg_root.clone()]; - include.extend(cargo[pkg].out_dir.clone()); - - let mut exclude = vec![pkg_root.join(".git")]; - if is_member { - exclude.push(pkg_root.join("target")); - } else { - exclude.push(pkg_root.join("tests")); - exclude.push(pkg_root.join("examples")); - exclude.push(pkg_root.join("benches")); - } - PackageRoot { is_member, include, exclude } - }) - .chain(sysroot.crates().map(|krate| PackageRoot { - is_member: false, - include: vec![sysroot[krate].root_dir().to_path_buf()], - exclude: Vec::new(), - })) - .collect(), - } - } - - pub fn proc_macro_dylib_paths(&self) -> Vec { - match self { - ProjectWorkspace::Json { project } => project - .crates - .iter() - .filter_map(|krate| krate.proc_macro_dylib_path.as_ref()) - .cloned() - .collect(), - ProjectWorkspace::Cargo { cargo, sysroot: _sysroot } => cargo - .packages() - .filter_map(|pkg| cargo[pkg].proc_macro_dylib_path.as_ref()) - .cloned() - .collect(), - } - } - - pub fn n_packages(&self) -> usize { - match self { - ProjectWorkspace::Json { project, .. } => project.crates.len(), - ProjectWorkspace::Cargo { cargo, sysroot } => { - cargo.packages().len() + sysroot.crates().len() - } - } - } - - pub fn to_crate_graph( - &self, - target: Option<&str>, - proc_macro_client: &ProcMacroClient, - load: &mut dyn FnMut(&AbsPath) -> Option, - ) -> CrateGraph { - let mut crate_graph = CrateGraph::default(); - match self { - ProjectWorkspace::Json { project } => { - let mut cfg_cache: FxHashMap, Vec> = FxHashMap::default(); - let crates: FxHashMap<_, _> = project - .crates - .iter() - .enumerate() - .filter_map(|(seq_index, krate)| { - let file_path = &krate.root_module; - let file_id = load(&file_path)?; - - let env = krate.env.clone().into_iter().collect(); - let proc_macro = krate - .proc_macro_dylib_path - .clone() - .map(|it| proc_macro_client.by_dylib_path(&it)); - - let target = krate.target.as_deref().or(target); - let target_cfgs = cfg_cache - .entry(target) - .or_insert_with(|| get_rustc_cfg_options(target)); - - let mut cfg_options = CfgOptions::default(); - cfg_options.extend(target_cfgs.iter().chain(krate.cfg.iter()).cloned()); - - // FIXME: No crate name in json definition such that we cannot add OUT_DIR to env - Some(( - CrateId(seq_index as u32), - crate_graph.add_crate_root( - file_id, - krate.edition, - // FIXME json definitions can store the crate name - None, - cfg_options, - env, - proc_macro.unwrap_or_default(), - ), - )) - }) - .collect(); - - for (id, krate) in project.crates.iter().enumerate() { - for dep in &krate.deps { - let from_crate_id = CrateId(id as u32); - let to_crate_id = dep.crate_id; - if let (Some(&from), Some(&to)) = - (crates.get(&from_crate_id), crates.get(&to_crate_id)) - { - if crate_graph.add_dep(from, dep.name.clone(), to).is_err() { - log::error!( - "cyclic dependency {:?} -> {:?}", - from_crate_id, - to_crate_id - ); - } - } - } - } - } - ProjectWorkspace::Cargo { cargo, sysroot } => { - let mut cfg_options = CfgOptions::default(); - cfg_options.extend(get_rustc_cfg_options(target)); - - let sysroot_crates: FxHashMap<_, _> = sysroot - .crates() - .filter_map(|krate| { - let file_id = load(&sysroot[krate].root)?; - - let env = Env::default(); - let proc_macro = vec![]; - let name = sysroot[krate].name.clone(); - let crate_id = crate_graph.add_crate_root( - file_id, - Edition::Edition2018, - Some(name), - cfg_options.clone(), - env, - proc_macro, - ); - Some((krate, crate_id)) - }) - .collect(); - - for from in sysroot.crates() { - for &to in sysroot[from].deps.iter() { - let name = &sysroot[to].name; - if let (Some(&from), Some(&to)) = - (sysroot_crates.get(&from), sysroot_crates.get(&to)) - { - if crate_graph.add_dep(from, CrateName::new(name).unwrap(), to).is_err() - { - log::error!("cyclic dependency between sysroot crates") - } - } - } - } - - let libcore = sysroot.core().and_then(|it| sysroot_crates.get(&it).copied()); - let liballoc = sysroot.alloc().and_then(|it| sysroot_crates.get(&it).copied()); - let libstd = sysroot.std().and_then(|it| sysroot_crates.get(&it).copied()); - let libproc_macro = - sysroot.proc_macro().and_then(|it| sysroot_crates.get(&it).copied()); - - let mut pkg_to_lib_crate = FxHashMap::default(); - let mut pkg_crates = FxHashMap::default(); - - // Add test cfg for non-sysroot crates - cfg_options.insert_atom("test".into()); - cfg_options.insert_atom("debug_assertions".into()); - - // Next, create crates for each package, target pair - for pkg in cargo.packages() { - let mut lib_tgt = None; - for &tgt in cargo[pkg].targets.iter() { - let root = cargo[tgt].root.as_path(); - if let Some(file_id) = load(root) { - let edition = cargo[pkg].edition; - let cfg_options = { - let mut opts = cfg_options.clone(); - for feature in cargo[pkg].features.iter() { - opts.insert_key_value("feature".into(), feature.into()); - } - opts.extend(cargo[pkg].cfgs.iter().cloned()); - opts - }; - let mut env = Env::default(); - if let Some(out_dir) = &cargo[pkg].out_dir { - // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!() - if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) { - env.set("OUT_DIR", out_dir); - } - } - let proc_macro = cargo[pkg] - .proc_macro_dylib_path - .as_ref() - .map(|it| proc_macro_client.by_dylib_path(&it)) - .unwrap_or_default(); - - let crate_id = crate_graph.add_crate_root( - file_id, - edition, - Some(cargo[pkg].name.clone()), - cfg_options, - env, - proc_macro.clone(), - ); - if cargo[tgt].kind == TargetKind::Lib { - lib_tgt = Some((crate_id, cargo[tgt].name.clone())); - pkg_to_lib_crate.insert(pkg, crate_id); - } - if cargo[tgt].is_proc_macro { - if let Some(proc_macro) = libproc_macro { - if crate_graph - .add_dep( - crate_id, - CrateName::new("proc_macro").unwrap(), - proc_macro, - ) - .is_err() - { - log::error!( - "cyclic dependency on proc_macro for {}", - &cargo[pkg].name - ) - } - } - } - - pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id); - } - } - - // Set deps to the core, std and to the lib target of the current package - for &from in pkg_crates.get(&pkg).into_iter().flatten() { - if let Some((to, name)) = lib_tgt.clone() { - if to != from - && crate_graph - .add_dep( - from, - // For root projects with dashes in their name, - // cargo metadata does not do any normalization, - // so we do it ourselves currently - CrateName::normalize_dashes(&name), - to, - ) - .is_err() - { - { - log::error!( - "cyclic dependency between targets of {}", - &cargo[pkg].name - ) - } - } - } - // core is added as a dependency before std in order to - // mimic rustcs dependency order - if let Some(core) = libcore { - if crate_graph - .add_dep(from, CrateName::new("core").unwrap(), core) - .is_err() - { - log::error!("cyclic dependency on core for {}", &cargo[pkg].name) - } - } - if let Some(alloc) = liballoc { - if crate_graph - .add_dep(from, CrateName::new("alloc").unwrap(), alloc) - .is_err() - { - log::error!("cyclic dependency on alloc for {}", &cargo[pkg].name) - } - } - if let Some(std) = libstd { - if crate_graph - .add_dep(from, CrateName::new("std").unwrap(), std) - .is_err() - { - log::error!("cyclic dependency on std for {}", &cargo[pkg].name) - } - } - } - } - - // Now add a dep edge from all targets of upstream to the lib - // target of downstream. - for pkg in cargo.packages() { - for dep in cargo[pkg].dependencies.iter() { - if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) { - for &from in pkg_crates.get(&pkg).into_iter().flatten() { - if crate_graph - .add_dep(from, CrateName::new(&dep.name).unwrap(), to) - .is_err() - { - log::error!( - "cyclic dependency {} -> {}", - &cargo[pkg].name, - &cargo[dep.pkg].name - ) - } - } - } - } - } - } - } - crate_graph - } -} - -fn get_rustc_cfg_options(target: Option<&str>) -> Vec { - let mut res = Vec::new(); - - // Some nightly-only cfgs, which are required for stdlib - res.push(CfgFlag::Atom("target_thread_local".into())); - for &ty in ["8", "16", "32", "64", "cas", "ptr"].iter() { - for &key in ["target_has_atomic", "target_has_atomic_load_store"].iter() { - res.push(CfgFlag::KeyValue { key: key.to_string(), value: ty.into() }); - } - } - - let rustc_cfgs = { - let mut cmd = Command::new(ra_toolchain::rustc()); - cmd.args(&["--print", "cfg", "-O"]); - if let Some(target) = target { - cmd.args(&["--target", target]); - } - utf8_stdout(cmd) - }; - - match rustc_cfgs { - Ok(rustc_cfgs) => res.extend(rustc_cfgs.lines().map(|it| it.parse().unwrap())), - Err(e) => log::error!("failed to get rustc cfgs: {:#}", e), - } - - res -} - -fn utf8_stdout(mut cmd: Command) -> Result { - let output = cmd.output().with_context(|| format!("{:?} failed", cmd))?; - if !output.status.success() { - match String::from_utf8(output.stderr) { - Ok(stderr) if !stderr.is_empty() => { - bail!("{:?} failed, {}\nstderr:\n{}", cmd, output.status, stderr) - } - _ => bail!("{:?} failed, {}", cmd, output.status), - } - } - let stdout = String::from_utf8(output.stdout)?; - Ok(stdout) -} diff --git a/crates/ra_project_model/src/project_json.rs b/crates/ra_project_model/src/project_json.rs deleted file mode 100644 index e3f3163f6a..0000000000 --- a/crates/ra_project_model/src/project_json.rs +++ /dev/null @@ -1,143 +0,0 @@ -//! FIXME: write short doc here - -use std::path::PathBuf; - -use paths::{AbsPath, AbsPathBuf}; -use ra_db::{CrateId, CrateName, Dependency, Edition}; -use rustc_hash::FxHashMap; -use serde::{de, Deserialize}; - -use crate::cfg_flag::CfgFlag; - -/// Roots and crates that compose this Rust project. -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct ProjectJson { - pub(crate) crates: Vec, -} - -/// A crate points to the root module of a crate and lists the dependencies of the crate. This is -/// useful in creating the crate graph. -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct Crate { - pub(crate) root_module: AbsPathBuf, - pub(crate) edition: Edition, - pub(crate) deps: Vec, - pub(crate) cfg: Vec, - pub(crate) target: Option, - pub(crate) env: FxHashMap, - pub(crate) proc_macro_dylib_path: Option, - pub(crate) is_workspace_member: bool, - pub(crate) include: Vec, - pub(crate) exclude: Vec, -} - -impl ProjectJson { - pub fn new(base: &AbsPath, data: ProjectJsonData) -> ProjectJson { - ProjectJson { - crates: data - .crates - .into_iter() - .map(|crate_data| { - let is_workspace_member = crate_data.is_workspace_member.unwrap_or_else(|| { - crate_data.root_module.is_relative() - && !crate_data.root_module.starts_with("..") - || crate_data.root_module.starts_with(base) - }); - let root_module = base.join(crate_data.root_module); - let (include, exclude) = match crate_data.source { - Some(src) => { - let absolutize = |dirs: Vec| { - dirs.into_iter().map(|it| base.join(it)).collect::>() - }; - (absolutize(src.include_dirs), absolutize(src.exclude_dirs)) - } - None => (vec![root_module.parent().unwrap().to_path_buf()], Vec::new()), - }; - - Crate { - root_module, - edition: crate_data.edition.into(), - deps: crate_data - .deps - .into_iter() - .map(|dep_data| Dependency { - crate_id: CrateId(dep_data.krate as u32), - name: dep_data.name, - }) - .collect::>(), - cfg: crate_data.cfg, - target: crate_data.target, - env: crate_data.env, - proc_macro_dylib_path: crate_data - .proc_macro_dylib_path - .map(|it| base.join(it)), - is_workspace_member, - include, - exclude, - } - }) - .collect::>(), - } - } -} - -#[derive(Deserialize)] -pub struct ProjectJsonData { - crates: Vec, -} - -#[derive(Deserialize)] -struct CrateData { - root_module: PathBuf, - edition: EditionData, - deps: Vec, - #[serde(default)] - cfg: Vec, - target: Option, - #[serde(default)] - env: FxHashMap, - proc_macro_dylib_path: Option, - is_workspace_member: Option, - source: Option, -} - -#[derive(Deserialize)] -#[serde(rename = "edition")] -enum EditionData { - #[serde(rename = "2015")] - Edition2015, - #[serde(rename = "2018")] - Edition2018, -} - -impl From for Edition { - fn from(data: EditionData) -> Self { - match data { - EditionData::Edition2015 => Edition::Edition2015, - EditionData::Edition2018 => Edition::Edition2018, - } - } -} - -#[derive(Deserialize)] -struct DepData { - /// Identifies a crate by position in the crates array. - #[serde(rename = "crate")] - krate: usize, - #[serde(deserialize_with = "deserialize_crate_name")] - name: CrateName, -} - -#[derive(Deserialize)] -struct CrateSource { - include_dirs: Vec, - exclude_dirs: Vec, -} - -fn deserialize_crate_name<'de, D>(de: D) -> Result -where - D: de::Deserializer<'de>, -{ - let name = String::deserialize(de)?; - CrateName::new(&name).map_err(|err| de::Error::custom(format!("invalid crate name: {:?}", err))) -} diff --git a/crates/ra_project_model/src/sysroot.rs b/crates/ra_project_model/src/sysroot.rs deleted file mode 100644 index a10ade3757..0000000000 --- a/crates/ra_project_model/src/sysroot.rs +++ /dev/null @@ -1,173 +0,0 @@ -//! FIXME: write short doc here - -use std::{convert::TryFrom, env, ops, path::Path, process::Command}; - -use anyhow::{bail, format_err, Result}; -use paths::{AbsPath, AbsPathBuf}; -use ra_arena::{Arena, Idx}; - -use crate::utf8_stdout; - -#[derive(Default, Debug, Clone, Eq, PartialEq)] -pub struct Sysroot { - crates: Arena, -} - -pub type SysrootCrate = Idx; - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct SysrootCrateData { - pub name: String, - pub root: AbsPathBuf, - pub deps: Vec, -} - -impl ops::Index for Sysroot { - type Output = SysrootCrateData; - fn index(&self, index: SysrootCrate) -> &SysrootCrateData { - &self.crates[index] - } -} - -impl Sysroot { - pub fn core(&self) -> Option { - self.by_name("core") - } - - pub fn alloc(&self) -> Option { - self.by_name("alloc") - } - - pub fn std(&self) -> Option { - self.by_name("std") - } - - pub fn proc_macro(&self) -> Option { - self.by_name("proc_macro") - } - - pub fn crates<'a>(&'a self) -> impl Iterator + ExactSizeIterator + 'a { - self.crates.iter().map(|(id, _data)| id) - } - - pub fn discover(cargo_toml: &AbsPath) -> Result { - let src = get_or_install_rust_src(cargo_toml)?; - let mut sysroot = Sysroot { crates: Arena::default() }; - for name in SYSROOT_CRATES.trim().lines() { - // FIXME: remove this path when 1.47 comes out - // https://github.com/rust-lang/rust/pull/73265 - let root = src.join(format!("lib{}", name)).join("lib.rs"); - if root.exists() { - sysroot.crates.alloc(SysrootCrateData { - name: name.into(), - root, - deps: Vec::new(), - }); - } else { - let root = src.join(name).join("src/lib.rs"); - if root.exists() { - sysroot.crates.alloc(SysrootCrateData { - name: name.into(), - root, - deps: Vec::new(), - }); - } - } - } - if let Some(std) = sysroot.std() { - for dep in STD_DEPS.trim().lines() { - if let Some(dep) = sysroot.by_name(dep) { - sysroot.crates[std].deps.push(dep) - } - } - } - if let Some(alloc) = sysroot.alloc() { - if let Some(core) = sysroot.core() { - sysroot.crates[alloc].deps.push(core); - } - } - Ok(sysroot) - } - - fn by_name(&self, name: &str) -> Option { - self.crates.iter().find(|(_id, data)| data.name == name).map(|(id, _data)| id) - } -} - -fn get_or_install_rust_src(cargo_toml: &AbsPath) -> Result { - if let Ok(path) = env::var("RUST_SRC_PATH") { - let path = AbsPathBuf::try_from(path.as_str()) - .map_err(|path| format_err!("RUST_SRC_PATH must be absolute: {}", path.display()))?; - return Ok(path); - } - let current_dir = cargo_toml.parent().unwrap(); - let mut rustc = Command::new(ra_toolchain::rustc()); - rustc.current_dir(current_dir).args(&["--print", "sysroot"]); - let stdout = utf8_stdout(rustc)?; - let sysroot_path = AbsPath::assert(Path::new(stdout.trim())); - let mut src = get_rust_src(sysroot_path); - if src.is_none() { - let mut rustup = Command::new(ra_toolchain::rustup()); - rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]); - utf8_stdout(rustup)?; - src = get_rust_src(sysroot_path); - } - match src { - Some(r) => Ok(r), - None => bail!( - "can't load standard library from sysroot\n\ - {}\n\ - (discovered via `rustc --print sysroot`)\n\ - try running `rustup component add rust-src` or set `RUST_SRC_PATH`", - sysroot_path.display(), - ), - } -} - -fn get_rust_src(sysroot_path: &AbsPath) -> Option { - // try the new path first since the old one still exists - let mut src_path = sysroot_path.join("lib/rustlib/src/rust/library"); - if !src_path.exists() { - // FIXME: remove this path when 1.47 comes out - // https://github.com/rust-lang/rust/pull/73265 - src_path = sysroot_path.join("lib/rustlib/src/rust/src"); - } - if src_path.exists() { - Some(src_path) - } else { - None - } -} - -impl SysrootCrateData { - pub fn root_dir(&self) -> &AbsPath { - self.root.parent().unwrap() - } -} - -const SYSROOT_CRATES: &str = " -alloc -core -panic_abort -panic_unwind -proc_macro -profiler_builtins -rtstartup -std -stdarch -term -test -unwind"; - -const STD_DEPS: &str = " -alloc -core -panic_abort -panic_unwind -profiler_builtins -rtstartup -proc_macro -stdarch -term -test -unwind"; diff --git a/crates/ra_ssr/Cargo.toml b/crates/ra_ssr/Cargo.toml deleted file mode 100644 index 84e4b171e1..0000000000 --- a/crates/ra_ssr/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -edition = "2018" -name = "ra_ssr" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" -description = "Structural search and replace of Rust code" -repository = "https://github.com/rust-analyzer/rust-analyzer" - -[lib] -doctest = false - -[dependencies] -ra_text_edit = { path = "../ra_text_edit" } -ra_syntax = { path = "../ra_syntax" } -ra_db = { path = "../ra_db" } -ra_ide_db = { path = "../ra_ide_db" } -hir = { path = "../ra_hir", package = "ra_hir" } -rustc-hash = "1.1.0" -test_utils = { path = "../test_utils" } - -[dev-dependencies] -expect = { path = "../expect" } diff --git a/crates/ra_ssr/src/lib.rs b/crates/ra_ssr/src/lib.rs deleted file mode 100644 index c780b460a7..0000000000 --- a/crates/ra_ssr/src/lib.rs +++ /dev/null @@ -1,286 +0,0 @@ -//! Structural Search Replace -//! -//! Allows searching the AST for code that matches one or more patterns and then replacing that code -//! based on a template. - -mod matching; -mod nester; -mod parsing; -mod replacing; -mod resolving; -mod search; -#[macro_use] -mod errors; -#[cfg(test)] -mod tests; - -use crate::errors::bail; -pub use crate::errors::SsrError; -pub use crate::matching::Match; -use crate::matching::MatchFailureReason; -use hir::Semantics; -use ra_db::{FileId, FilePosition, FileRange}; -use ra_ide_db::source_change::SourceFileEdit; -use ra_syntax::{ast, AstNode, SyntaxNode, TextRange}; -use resolving::ResolvedRule; -use rustc_hash::FxHashMap; - -// A structured search replace rule. Create by calling `parse` on a str. -#[derive(Debug)] -pub struct SsrRule { - /// A structured pattern that we're searching for. - pattern: parsing::RawPattern, - /// What we'll replace it with. - template: parsing::RawPattern, - parsed_rules: Vec, -} - -#[derive(Debug)] -pub struct SsrPattern { - raw: parsing::RawPattern, - parsed_rules: Vec, -} - -#[derive(Debug, Default)] -pub struct SsrMatches { - pub matches: Vec, -} - -/// Searches a crate for pattern matches and possibly replaces them with something else. -pub struct MatchFinder<'db> { - /// Our source of information about the user's code. - sema: Semantics<'db, ra_ide_db::RootDatabase>, - rules: Vec, - resolution_scope: resolving::ResolutionScope<'db>, - restrict_ranges: Vec, -} - -impl<'db> MatchFinder<'db> { - /// Constructs a new instance where names will be looked up as if they appeared at - /// `lookup_context`. - pub fn in_context( - db: &'db ra_ide_db::RootDatabase, - lookup_context: FilePosition, - mut restrict_ranges: Vec, - ) -> MatchFinder<'db> { - restrict_ranges.retain(|range| !range.range.is_empty()); - let sema = Semantics::new(db); - let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context); - MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges } - } - - /// Constructs an instance using the start of the first file in `db` as the lookup context. - pub fn at_first_file(db: &'db ra_ide_db::RootDatabase) -> Result, SsrError> { - use ra_db::SourceDatabaseExt; - use ra_ide_db::symbol_index::SymbolsDatabase; - if let Some(first_file_id) = db - .local_roots() - .iter() - .next() - .and_then(|root| db.source_root(root.clone()).iter().next()) - { - Ok(MatchFinder::in_context( - db, - FilePosition { file_id: first_file_id, offset: 0.into() }, - vec![], - )) - } else { - bail!("No files to search"); - } - } - - /// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take - /// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to - /// match to it. - pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> { - for parsed_rule in rule.parsed_rules { - self.rules.push(ResolvedRule::new( - parsed_rule, - &self.resolution_scope, - self.rules.len(), - )?); - } - Ok(()) - } - - /// Finds matches for all added rules and returns edits for all found matches. - pub fn edits(&self) -> Vec { - use ra_db::SourceDatabaseExt; - let mut matches_by_file = FxHashMap::default(); - for m in self.matches().matches { - matches_by_file - .entry(m.range.file_id) - .or_insert_with(|| SsrMatches::default()) - .matches - .push(m); - } - let mut edits = vec![]; - for (file_id, matches) in matches_by_file { - let edit = - replacing::matches_to_edit(&matches, &self.sema.db.file_text(file_id), &self.rules); - edits.push(SourceFileEdit { file_id, edit }); - } - edits - } - - /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you - /// intend to do replacement, use `add_rule` instead. - pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> { - for parsed_rule in pattern.parsed_rules { - self.rules.push(ResolvedRule::new( - parsed_rule, - &self.resolution_scope, - self.rules.len(), - )?); - } - Ok(()) - } - - /// Returns matches for all added rules. - pub fn matches(&self) -> SsrMatches { - let mut matches = Vec::new(); - let mut usage_cache = search::UsageCache::default(); - for rule in &self.rules { - self.find_matches_for_rule(rule, &mut usage_cache, &mut matches); - } - nester::nest_and_remove_collisions(matches, &self.sema) - } - - /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match - /// them, while recording reasons why they don't match. This API is useful for command - /// line-based debugging where providing a range is difficult. - pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec { - use ra_db::SourceDatabaseExt; - let file = self.sema.parse(file_id); - let mut res = Vec::new(); - let file_text = self.sema.db.file_text(file_id); - let mut remaining_text = file_text.as_str(); - let mut base = 0; - let len = snippet.len() as u32; - while let Some(offset) = remaining_text.find(snippet) { - let start = base + offset as u32; - let end = start + len; - self.output_debug_for_nodes_at_range( - file.syntax(), - FileRange { file_id, range: TextRange::new(start.into(), end.into()) }, - &None, - &mut res, - ); - remaining_text = &remaining_text[offset + snippet.len()..]; - base = end; - } - res - } - - fn output_debug_for_nodes_at_range( - &self, - node: &SyntaxNode, - range: FileRange, - restrict_range: &Option, - out: &mut Vec, - ) { - for node in node.children() { - let node_range = self.sema.original_range(&node); - if node_range.file_id != range.file_id || !node_range.range.contains_range(range.range) - { - continue; - } - if node_range.range == range.range { - for rule in &self.rules { - // For now we ignore rules that have a different kind than our node, otherwise - // we get lots of noise. If at some point we add support for restricting rules - // to a particular kind of thing (e.g. only match type references), then we can - // relax this. We special-case expressions, since function calls can match - // method calls. - if rule.pattern.node.kind() != node.kind() - && !(ast::Expr::can_cast(rule.pattern.node.kind()) - && ast::Expr::can_cast(node.kind())) - { - continue; - } - out.push(MatchDebugInfo { - matched: matching::get_match(true, rule, &node, restrict_range, &self.sema) - .map_err(|e| MatchFailureReason { - reason: e.reason.unwrap_or_else(|| { - "Match failed, but no reason was given".to_owned() - }), - }), - pattern: rule.pattern.node.clone(), - node: node.clone(), - }); - } - } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) { - if let Some(expanded) = self.sema.expand(¯o_call) { - if let Some(tt) = macro_call.token_tree() { - self.output_debug_for_nodes_at_range( - &expanded, - range, - &Some(self.sema.original_range(tt.syntax())), - out, - ); - } - } - } - self.output_debug_for_nodes_at_range(&node, range, restrict_range, out); - } - } -} - -pub struct MatchDebugInfo { - node: SyntaxNode, - /// Our search pattern parsed as an expression or item, etc - pattern: SyntaxNode, - matched: Result, -} - -impl std::fmt::Debug for MatchDebugInfo { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match &self.matched { - Ok(_) => writeln!(f, "Node matched")?, - Err(reason) => writeln!(f, "Node failed to match because: {}", reason.reason)?, - } - writeln!( - f, - "============ AST ===========\n\ - {:#?}", - self.node - )?; - writeln!(f, "========= PATTERN ==========")?; - writeln!(f, "{:#?}", self.pattern)?; - writeln!(f, "============================")?; - Ok(()) - } -} - -impl SsrMatches { - /// Returns `self` with any nested matches removed and made into top-level matches. - pub fn flattened(self) -> SsrMatches { - let mut out = SsrMatches::default(); - self.flatten_into(&mut out); - out - } - - fn flatten_into(self, out: &mut SsrMatches) { - for mut m in self.matches { - for p in m.placeholder_values.values_mut() { - std::mem::replace(&mut p.inner_matches, SsrMatches::default()).flatten_into(out); - } - out.matches.push(m); - } - } -} - -impl Match { - pub fn matched_text(&self) -> String { - self.matched_node.text().to_string() - } -} - -impl std::error::Error for SsrError {} - -#[cfg(test)] -impl MatchDebugInfo { - pub(crate) fn match_failure_reason(&self) -> Option<&str> { - self.matched.as_ref().err().map(|r| r.reason.as_str()) - } -} diff --git a/crates/ra_ssr/src/matching.rs b/crates/ra_ssr/src/matching.rs deleted file mode 100644 index 0f72fea691..0000000000 --- a/crates/ra_ssr/src/matching.rs +++ /dev/null @@ -1,732 +0,0 @@ -//! This module is responsible for matching a search pattern against a node in the AST. In the -//! process of matching, placeholder values are recorded. - -use crate::{ - parsing::{Constraint, NodeKind, Placeholder}, - resolving::{ResolvedPattern, ResolvedRule}, - SsrMatches, -}; -use hir::Semantics; -use ra_db::FileRange; -use ra_syntax::ast::{AstNode, AstToken}; -use ra_syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken}; -use rustc_hash::FxHashMap; -use std::{cell::Cell, iter::Peekable}; -use test_utils::mark; - -// Creates a match error. If we're currently attempting to match some code that we thought we were -// going to match, as indicated by the --debug-snippet flag, then populate the reason field. -macro_rules! match_error { - ($e:expr) => {{ - MatchFailed { - reason: if recording_match_fail_reasons() { - Some(format!("{}", $e)) - } else { - None - } - } - }}; - ($fmt:expr, $($arg:tt)+) => {{ - MatchFailed { - reason: if recording_match_fail_reasons() { - Some(format!($fmt, $($arg)+)) - } else { - None - } - } - }}; -} - -// Fails the current match attempt, recording the supplied reason if we're recording match fail reasons. -macro_rules! fail_match { - ($($args:tt)*) => {return Err(match_error!($($args)*))}; -} - -/// Information about a match that was found. -#[derive(Debug)] -pub struct Match { - pub(crate) range: FileRange, - pub(crate) matched_node: SyntaxNode, - pub(crate) placeholder_values: FxHashMap, - pub(crate) ignored_comments: Vec, - pub(crate) rule_index: usize, - /// The depth of matched_node. - pub(crate) depth: usize, - // Each path in the template rendered for the module in which the match was found. - pub(crate) rendered_template_paths: FxHashMap, -} - -/// Represents a `$var` in an SSR query. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub(crate) struct Var(pub String); - -/// Information about a placeholder bound in a match. -#[derive(Debug)] -pub(crate) struct PlaceholderMatch { - /// The node that the placeholder matched to. If set, then we'll search for further matches - /// within this node. It isn't set when we match tokens within a macro call's token tree. - pub(crate) node: Option, - pub(crate) range: FileRange, - /// More matches, found within `node`. - pub(crate) inner_matches: SsrMatches, -} - -#[derive(Debug)] -pub(crate) struct MatchFailureReason { - pub(crate) reason: String, -} - -/// An "error" indicating that matching failed. Use the fail_match! macro to create and return this. -#[derive(Clone)] -pub(crate) struct MatchFailed { - /// The reason why we failed to match. Only present when debug_active true in call to - /// `get_match`. - pub(crate) reason: Option, -} - -/// Checks if `code` matches the search pattern found in `search_scope`, returning information about -/// the match, if it does. Since we only do matching in this module and searching is done by the -/// parent module, we don't populate nested matches. -pub(crate) fn get_match( - debug_active: bool, - rule: &ResolvedRule, - code: &SyntaxNode, - restrict_range: &Option, - sema: &Semantics, -) -> Result { - record_match_fails_reasons_scope(debug_active, || { - Matcher::try_match(rule, code, restrict_range, sema) - }) -} - -/// Checks if our search pattern matches a particular node of the AST. -struct Matcher<'db, 'sema> { - sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>, - /// If any placeholders come from anywhere outside of this range, then the match will be - /// rejected. - restrict_range: Option, - rule: &'sema ResolvedRule, -} - -/// Which phase of matching we're currently performing. We do two phases because most attempted -/// matches will fail and it means we can defer more expensive checks to the second phase. -enum Phase<'a> { - /// On the first phase, we perform cheap checks. No state is mutated and nothing is recorded. - First, - /// On the second phase, we construct the `Match`. Things like what placeholders bind to is - /// recorded. - Second(&'a mut Match), -} - -impl<'db, 'sema> Matcher<'db, 'sema> { - fn try_match( - rule: &ResolvedRule, - code: &SyntaxNode, - restrict_range: &Option, - sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>, - ) -> Result { - let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule }; - // First pass at matching, where we check that node types and idents match. - match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?; - match_state.validate_range(&sema.original_range(code))?; - let mut the_match = Match { - range: sema.original_range(code), - matched_node: code.clone(), - placeholder_values: FxHashMap::default(), - ignored_comments: Vec::new(), - rule_index: rule.index, - depth: 0, - rendered_template_paths: FxHashMap::default(), - }; - // Second matching pass, where we record placeholder matches, ignored comments and maybe do - // any other more expensive checks that we didn't want to do on the first pass. - match_state.attempt_match_node( - &mut Phase::Second(&mut the_match), - &rule.pattern.node, - code, - )?; - the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count(); - if let Some(template) = &rule.template { - the_match.render_template_paths(template, sema)?; - } - Ok(the_match) - } - - /// Checks that `range` is within the permitted range if any. This is applicable when we're - /// processing a macro expansion and we want to fail the match if we're working with a node that - /// didn't originate from the token tree of the macro call. - fn validate_range(&self, range: &FileRange) -> Result<(), MatchFailed> { - if let Some(restrict_range) = &self.restrict_range { - if restrict_range.file_id != range.file_id - || !restrict_range.range.contains_range(range.range) - { - fail_match!("Node originated from a macro"); - } - } - Ok(()) - } - - fn attempt_match_node( - &self, - phase: &mut Phase, - pattern: &SyntaxNode, - code: &SyntaxNode, - ) -> Result<(), MatchFailed> { - // Handle placeholders. - if let Some(placeholder) = self.get_placeholder(&SyntaxElement::Node(pattern.clone())) { - for constraint in &placeholder.constraints { - self.check_constraint(constraint, code)?; - } - if let Phase::Second(matches_out) = phase { - let original_range = self.sema.original_range(code); - // We validated the range for the node when we started the match, so the placeholder - // probably can't fail range validation, but just to be safe... - self.validate_range(&original_range)?; - matches_out.placeholder_values.insert( - Var(placeholder.ident.to_string()), - PlaceholderMatch::new(code, original_range), - ); - } - return Ok(()); - } - // We allow a UFCS call to match a method call, provided they resolve to the same function. - if let Some(pattern_function) = self.rule.pattern.ufcs_function_calls.get(pattern) { - if let (Some(pattern), Some(code)) = - (ast::CallExpr::cast(pattern.clone()), ast::MethodCallExpr::cast(code.clone())) - { - return self.attempt_match_ufcs(phase, &pattern, &code, *pattern_function); - } - } - if pattern.kind() != code.kind() { - fail_match!( - "Pattern had `{}` ({:?}), code had `{}` ({:?})", - pattern.text(), - pattern.kind(), - code.text(), - code.kind() - ); - } - // Some kinds of nodes have special handling. For everything else, we fall back to default - // matching. - match code.kind() { - SyntaxKind::RECORD_EXPR_FIELD_LIST => { - self.attempt_match_record_field_list(phase, pattern, code) - } - SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code), - SyntaxKind::PATH => self.attempt_match_path(phase, pattern, code), - _ => self.attempt_match_node_children(phase, pattern, code), - } - } - - fn attempt_match_node_children( - &self, - phase: &mut Phase, - pattern: &SyntaxNode, - code: &SyntaxNode, - ) -> Result<(), MatchFailed> { - self.attempt_match_sequences( - phase, - PatternIterator::new(pattern), - code.children_with_tokens(), - ) - } - - fn attempt_match_sequences( - &self, - phase: &mut Phase, - pattern_it: PatternIterator, - mut code_it: SyntaxElementChildren, - ) -> Result<(), MatchFailed> { - let mut pattern_it = pattern_it.peekable(); - loop { - match phase.next_non_trivial(&mut code_it) { - None => { - if let Some(p) = pattern_it.next() { - fail_match!("Part of the pattern was unmatched: {:?}", p); - } - return Ok(()); - } - Some(SyntaxElement::Token(c)) => { - self.attempt_match_token(phase, &mut pattern_it, &c)?; - } - Some(SyntaxElement::Node(c)) => match pattern_it.next() { - Some(SyntaxElement::Node(p)) => { - self.attempt_match_node(phase, &p, &c)?; - } - Some(p) => fail_match!("Pattern wanted '{}', code has {}", p, c.text()), - None => fail_match!("Pattern reached end, code has {}", c.text()), - }, - } - } - } - - fn attempt_match_token( - &self, - phase: &mut Phase, - pattern: &mut Peekable, - code: &ra_syntax::SyntaxToken, - ) -> Result<(), MatchFailed> { - phase.record_ignored_comments(code); - // Ignore whitespace and comments. - if code.kind().is_trivia() { - return Ok(()); - } - if let Some(SyntaxElement::Token(p)) = pattern.peek() { - // If the code has a comma and the pattern is about to close something, then accept the - // comma without advancing the pattern. i.e. ignore trailing commas. - if code.kind() == SyntaxKind::COMMA && is_closing_token(p.kind()) { - return Ok(()); - } - // Conversely, if the pattern has a comma and the code doesn't, skip that part of the - // pattern and continue to match the code. - if p.kind() == SyntaxKind::COMMA && is_closing_token(code.kind()) { - pattern.next(); - } - } - // Consume an element from the pattern and make sure it matches. - match pattern.next() { - Some(SyntaxElement::Token(p)) => { - if p.kind() != code.kind() || p.text() != code.text() { - fail_match!( - "Pattern wanted token '{}' ({:?}), but code had token '{}' ({:?})", - p.text(), - p.kind(), - code.text(), - code.kind() - ) - } - } - Some(SyntaxElement::Node(p)) => { - // Not sure if this is actually reachable. - fail_match!( - "Pattern wanted {:?}, but code had token '{}' ({:?})", - p, - code.text(), - code.kind() - ); - } - None => { - fail_match!("Pattern exhausted, while code remains: `{}`", code.text()); - } - } - Ok(()) - } - - fn check_constraint( - &self, - constraint: &Constraint, - code: &SyntaxNode, - ) -> Result<(), MatchFailed> { - match constraint { - Constraint::Kind(kind) => { - kind.matches(code)?; - } - Constraint::Not(sub) => { - if self.check_constraint(&*sub, code).is_ok() { - fail_match!("Constraint {:?} failed for '{}'", constraint, code.text()); - } - } - } - Ok(()) - } - - /// Paths are matched based on whether they refer to the same thing, even if they're written - /// differently. - fn attempt_match_path( - &self, - phase: &mut Phase, - pattern: &SyntaxNode, - code: &SyntaxNode, - ) -> Result<(), MatchFailed> { - if let Some(pattern_resolved) = self.rule.pattern.resolved_paths.get(pattern) { - let pattern_path = ast::Path::cast(pattern.clone()).unwrap(); - let code_path = ast::Path::cast(code.clone()).unwrap(); - if let (Some(pattern_segment), Some(code_segment)) = - (pattern_path.segment(), code_path.segment()) - { - // Match everything within the segment except for the name-ref, which is handled - // separately via comparing what the path resolves to below. - self.attempt_match_opt( - phase, - pattern_segment.generic_arg_list(), - code_segment.generic_arg_list(), - )?; - self.attempt_match_opt( - phase, - pattern_segment.param_list(), - code_segment.param_list(), - )?; - } - if matches!(phase, Phase::Second(_)) { - let resolution = self - .sema - .resolve_path(&code_path) - .ok_or_else(|| match_error!("Failed to resolve path `{}`", code.text()))?; - if pattern_resolved.resolution != resolution { - fail_match!("Pattern had path `{}` code had `{}`", pattern.text(), code.text()); - } - } - } else { - return self.attempt_match_node_children(phase, pattern, code); - } - Ok(()) - } - - fn attempt_match_opt( - &self, - phase: &mut Phase, - pattern: Option, - code: Option, - ) -> Result<(), MatchFailed> { - match (pattern, code) { - (Some(p), Some(c)) => self.attempt_match_node(phase, &p.syntax(), &c.syntax()), - (None, None) => Ok(()), - (Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()), - (None, Some(c)) => { - fail_match!("Nothing in pattern to match code `{}`", c.syntax().text()) - } - } - } - - /// We want to allow the records to match in any order, so we have special matching logic for - /// them. - fn attempt_match_record_field_list( - &self, - phase: &mut Phase, - pattern: &SyntaxNode, - code: &SyntaxNode, - ) -> Result<(), MatchFailed> { - // Build a map keyed by field name. - let mut fields_by_name = FxHashMap::default(); - for child in code.children() { - if let Some(record) = ast::RecordExprField::cast(child.clone()) { - if let Some(name) = record.field_name() { - fields_by_name.insert(name.text().clone(), child.clone()); - } - } - } - for p in pattern.children_with_tokens() { - if let SyntaxElement::Node(p) = p { - if let Some(name_element) = p.first_child_or_token() { - if self.get_placeholder(&name_element).is_some() { - // If the pattern is using placeholders for field names then order - // independence doesn't make sense. Fall back to regular ordered - // matching. - return self.attempt_match_node_children(phase, pattern, code); - } - if let Some(ident) = only_ident(name_element) { - let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| { - match_error!( - "Placeholder has record field '{}', but code doesn't", - ident - ) - })?; - self.attempt_match_node(phase, &p, &code_record)?; - } - } - } - } - if let Some(unmatched_fields) = fields_by_name.keys().next() { - fail_match!( - "{} field(s) of a record literal failed to match, starting with {}", - fields_by_name.len(), - unmatched_fields - ); - } - Ok(()) - } - - /// Outside of token trees, a placeholder can only match a single AST node, whereas in a token - /// tree it can match a sequence of tokens. Note, that this code will only be used when the - /// pattern matches the macro invocation. For matches within the macro call, we'll already have - /// expanded the macro. - fn attempt_match_token_tree( - &self, - phase: &mut Phase, - pattern: &SyntaxNode, - code: &ra_syntax::SyntaxNode, - ) -> Result<(), MatchFailed> { - let mut pattern = PatternIterator::new(pattern).peekable(); - let mut children = code.children_with_tokens(); - while let Some(child) = children.next() { - if let Some(placeholder) = pattern.peek().and_then(|p| self.get_placeholder(p)) { - pattern.next(); - let next_pattern_token = pattern - .peek() - .and_then(|p| match p { - SyntaxElement::Token(t) => Some(t.clone()), - SyntaxElement::Node(n) => n.first_token(), - }) - .map(|p| p.text().to_string()); - let first_matched_token = child.clone(); - let mut last_matched_token = child; - // Read code tokens util we reach one equal to the next token from our pattern - // or we reach the end of the token tree. - while let Some(next) = children.next() { - match &next { - SyntaxElement::Token(t) => { - if Some(t.to_string()) == next_pattern_token { - pattern.next(); - break; - } - } - SyntaxElement::Node(n) => { - if let Some(first_token) = n.first_token() { - if Some(first_token.to_string()) == next_pattern_token { - if let Some(SyntaxElement::Node(p)) = pattern.next() { - // We have a subtree that starts with the next token in our pattern. - self.attempt_match_token_tree(phase, &p, &n)?; - break; - } - } - } - } - }; - last_matched_token = next; - } - if let Phase::Second(match_out) = phase { - match_out.placeholder_values.insert( - Var(placeholder.ident.to_string()), - PlaceholderMatch::from_range(FileRange { - file_id: self.sema.original_range(code).file_id, - range: first_matched_token - .text_range() - .cover(last_matched_token.text_range()), - }), - ); - } - continue; - } - // Match literal (non-placeholder) tokens. - match child { - SyntaxElement::Token(token) => { - self.attempt_match_token(phase, &mut pattern, &token)?; - } - SyntaxElement::Node(node) => match pattern.next() { - Some(SyntaxElement::Node(p)) => { - self.attempt_match_token_tree(phase, &p, &node)?; - } - Some(SyntaxElement::Token(p)) => fail_match!( - "Pattern has token '{}', code has subtree '{}'", - p.text(), - node.text() - ), - None => fail_match!("Pattern has nothing, code has '{}'", node.text()), - }, - } - } - if let Some(p) = pattern.next() { - fail_match!("Reached end of token tree in code, but pattern still has {:?}", p); - } - Ok(()) - } - - fn attempt_match_ufcs( - &self, - phase: &mut Phase, - pattern: &ast::CallExpr, - code: &ast::MethodCallExpr, - pattern_function: hir::Function, - ) -> Result<(), MatchFailed> { - use ast::ArgListOwner; - let code_resolved_function = self - .sema - .resolve_method_call(code) - .ok_or_else(|| match_error!("Failed to resolve method call"))?; - if pattern_function != code_resolved_function { - fail_match!("Method call resolved to a different function"); - } - // Check arguments. - let mut pattern_args = pattern - .arg_list() - .ok_or_else(|| match_error!("Pattern function call has no args"))? - .args(); - self.attempt_match_opt(phase, pattern_args.next(), code.expr())?; - let mut code_args = - code.arg_list().ok_or_else(|| match_error!("Code method call has no args"))?.args(); - loop { - match (pattern_args.next(), code_args.next()) { - (None, None) => return Ok(()), - (p, c) => self.attempt_match_opt(phase, p, c)?, - } - } - } - - fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> { - only_ident(element.clone()).and_then(|ident| self.rule.get_placeholder(&ident)) - } -} - -impl Match { - fn render_template_paths( - &mut self, - template: &ResolvedPattern, - sema: &Semantics, - ) -> Result<(), MatchFailed> { - let module = sema - .scope(&self.matched_node) - .module() - .ok_or_else(|| match_error!("Matched node isn't in a module"))?; - for (path, resolved_path) in &template.resolved_paths { - if let hir::PathResolution::Def(module_def) = resolved_path.resolution { - let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| { - match_error!("Failed to render template path `{}` at match location") - })?; - self.rendered_template_paths.insert(path.clone(), mod_path); - } - } - Ok(()) - } -} - -impl Phase<'_> { - fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option { - loop { - let c = code_it.next(); - if let Some(SyntaxElement::Token(t)) = &c { - self.record_ignored_comments(t); - if t.kind().is_trivia() { - continue; - } - } - return c; - } - } - - fn record_ignored_comments(&mut self, token: &SyntaxToken) { - if token.kind() == SyntaxKind::COMMENT { - if let Phase::Second(match_out) = self { - if let Some(comment) = ast::Comment::cast(token.clone()) { - match_out.ignored_comments.push(comment); - } - } - } - } -} - -fn is_closing_token(kind: SyntaxKind) -> bool { - kind == SyntaxKind::R_PAREN || kind == SyntaxKind::R_CURLY || kind == SyntaxKind::R_BRACK -} - -pub(crate) fn record_match_fails_reasons_scope(debug_active: bool, f: F) -> T -where - F: Fn() -> T, -{ - RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(debug_active)); - let res = f(); - RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(false)); - res -} - -// For performance reasons, we don't want to record the reason why every match fails, only the bit -// of code that the user indicated they thought would match. We use a thread local to indicate when -// we are trying to match that bit of code. This saves us having to pass a boolean into all the bits -// of code that can make the decision to not match. -thread_local! { - pub static RECORDING_MATCH_FAIL_REASONS: Cell = Cell::new(false); -} - -fn recording_match_fail_reasons() -> bool { - RECORDING_MATCH_FAIL_REASONS.with(|c| c.get()) -} - -impl PlaceholderMatch { - fn new(node: &SyntaxNode, range: FileRange) -> Self { - Self { node: Some(node.clone()), range, inner_matches: SsrMatches::default() } - } - - fn from_range(range: FileRange) -> Self { - Self { node: None, range, inner_matches: SsrMatches::default() } - } -} - -impl NodeKind { - fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> { - let ok = match self { - Self::Literal => { - mark::hit!(literal_constraint); - ast::Literal::can_cast(node.kind()) - } - }; - if !ok { - fail_match!("Code '{}' isn't of kind {:?}", node.text(), self); - } - Ok(()) - } -} - -// If `node` contains nothing but an ident then return it, otherwise return None. -fn only_ident(element: SyntaxElement) -> Option { - match element { - SyntaxElement::Token(t) => { - if t.kind() == SyntaxKind::IDENT { - return Some(t); - } - } - SyntaxElement::Node(n) => { - let mut children = n.children_with_tokens(); - if let (Some(only_child), None) = (children.next(), children.next()) { - return only_ident(only_child); - } - } - } - None -} - -struct PatternIterator { - iter: SyntaxElementChildren, -} - -impl Iterator for PatternIterator { - type Item = SyntaxElement; - - fn next(&mut self) -> Option { - while let Some(element) = self.iter.next() { - if !element.kind().is_trivia() { - return Some(element); - } - } - None - } -} - -impl PatternIterator { - fn new(parent: &SyntaxNode) -> Self { - Self { iter: parent.children_with_tokens() } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{MatchFinder, SsrRule}; - - #[test] - fn parse_match_replace() { - let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap(); - let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }"; - - let (db, position, selections) = crate::tests::single_file(input); - let mut match_finder = MatchFinder::in_context(&db, position, selections); - match_finder.add_rule(rule).unwrap(); - let matches = match_finder.matches(); - assert_eq!(matches.matches.len(), 1); - assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)"); - assert_eq!(matches.matches[0].placeholder_values.len(), 1); - assert_eq!( - matches.matches[0].placeholder_values[&Var("x".to_string())] - .node - .as_ref() - .unwrap() - .text(), - "1+2" - ); - - let edits = match_finder.edits(); - assert_eq!(edits.len(), 1); - let edit = &edits[0]; - let mut after = input.to_string(); - edit.edit.apply(&mut after); - assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }"); - } -} diff --git a/crates/ra_ssr/src/nester.rs b/crates/ra_ssr/src/nester.rs deleted file mode 100644 index b3e20579bd..0000000000 --- a/crates/ra_ssr/src/nester.rs +++ /dev/null @@ -1,98 +0,0 @@ -//! Converts a flat collection of matches into a nested form suitable for replacement. When there -//! are multiple matches for a node, or that overlap, priority is given to the earlier rule. Nested -//! matches are only permitted if the inner match is contained entirely within a placeholder of an -//! outer match. -//! -//! For example, if our search pattern is `foo(foo($a))` and the code had `foo(foo(foo(foo(42))))`, -//! then we'll get 3 matches, however only the outermost and innermost matches can be accepted. The -//! middle match would take the second `foo` from the outer match. - -use crate::{Match, SsrMatches}; -use ra_syntax::SyntaxNode; -use rustc_hash::FxHashMap; - -pub(crate) fn nest_and_remove_collisions( - mut matches: Vec, - sema: &hir::Semantics, -) -> SsrMatches { - // We sort the matches by depth then by rule index. Sorting by depth means that by the time we - // see a match, any parent matches or conflicting matches will have already been seen. Sorting - // by rule_index means that if there are two matches for the same node, the rule added first - // will take precedence. - matches.sort_by(|a, b| a.depth.cmp(&b.depth).then_with(|| a.rule_index.cmp(&b.rule_index))); - let mut collector = MatchCollector::default(); - for m in matches { - collector.add_match(m, sema); - } - collector.into() -} - -#[derive(Default)] -struct MatchCollector { - matches_by_node: FxHashMap, -} - -impl MatchCollector { - /// Attempts to add `m` to matches. If it conflicts with an existing match, it is discarded. If - /// it is entirely within the a placeholder of an existing match, then it is added as a child - /// match of the existing match. - fn add_match(&mut self, m: Match, sema: &hir::Semantics) { - let matched_node = m.matched_node.clone(); - if let Some(existing) = self.matches_by_node.get_mut(&matched_node) { - try_add_sub_match(m, existing, sema); - return; - } - for ancestor in sema.ancestors_with_macros(m.matched_node.clone()) { - if let Some(existing) = self.matches_by_node.get_mut(&ancestor) { - try_add_sub_match(m, existing, sema); - return; - } - } - self.matches_by_node.insert(matched_node, m); - } -} - -/// Attempts to add `m` as a sub-match of `existing`. -fn try_add_sub_match( - m: Match, - existing: &mut Match, - sema: &hir::Semantics, -) { - for p in existing.placeholder_values.values_mut() { - // Note, no need to check if p.range.file is equal to m.range.file, since we - // already know we're within `existing`. - if p.range.range.contains_range(m.range.range) { - // Convert the inner matches in `p` into a temporary MatchCollector. When - // we're done, we then convert it back into an SsrMatches. If we expected - // lots of inner matches, it might be worthwhile keeping a MatchCollector - // around for each placeholder match. However we expect most placeholder - // will have 0 and a few will have 1. More than that should hopefully be - // exceptional. - let mut collector = MatchCollector::default(); - for m in std::mem::replace(&mut p.inner_matches.matches, Vec::new()) { - collector.matches_by_node.insert(m.matched_node.clone(), m); - } - collector.add_match(m, sema); - p.inner_matches = collector.into(); - break; - } - } -} - -impl From for SsrMatches { - fn from(mut match_collector: MatchCollector) -> Self { - let mut matches = SsrMatches::default(); - for (_, m) in match_collector.matches_by_node.drain() { - matches.matches.push(m); - } - matches.matches.sort_by(|a, b| { - // Order matches by file_id then by start range. This should be sufficient since ranges - // shouldn't be overlapping. - a.range - .file_id - .cmp(&b.range.file_id) - .then_with(|| a.range.range.start().cmp(&b.range.range.start())) - }); - matches - } -} diff --git a/crates/ra_ssr/src/parsing.rs b/crates/ra_ssr/src/parsing.rs deleted file mode 100644 index f455eb5b7e..0000000000 --- a/crates/ra_ssr/src/parsing.rs +++ /dev/null @@ -1,389 +0,0 @@ -//! This file contains code for parsing SSR rules, which look something like `foo($a) ==>> bar($b)`. -//! We first split everything before and after the separator `==>>`. Next, both the search pattern -//! and the replacement template get tokenized by the Rust tokenizer. Tokens are then searched for -//! placeholders, which start with `$`. For replacement templates, this is the final form. For -//! search patterns, we go further and parse the pattern as each kind of thing that we can match. -//! e.g. expressions, type references etc. - -use crate::errors::bail; -use crate::{SsrError, SsrPattern, SsrRule}; -use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T}; -use rustc_hash::{FxHashMap, FxHashSet}; -use std::str::FromStr; -use test_utils::mark; - -#[derive(Debug)] -pub(crate) struct ParsedRule { - pub(crate) placeholders_by_stand_in: FxHashMap, - pub(crate) pattern: SyntaxNode, - pub(crate) template: Option, -} - -#[derive(Debug)] -pub(crate) struct RawPattern { - tokens: Vec, -} - -// Part of a search or replace pattern. -#[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) enum PatternElement { - Token(Token), - Placeholder(Placeholder), -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) struct Placeholder { - /// The name of this placeholder. e.g. for "$a", this would be "a" - pub(crate) ident: SmolStr, - /// A unique name used in place of this placeholder when we parse the pattern as Rust code. - stand_in_name: String, - pub(crate) constraints: Vec, -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) enum Constraint { - Kind(NodeKind), - Not(Box), -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) enum NodeKind { - Literal, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub(crate) struct Token { - kind: SyntaxKind, - pub(crate) text: SmolStr, -} - -impl ParsedRule { - fn new( - pattern: &RawPattern, - template: Option<&RawPattern>, - ) -> Result, SsrError> { - let raw_pattern = pattern.as_rust_code(); - let raw_template = template.map(|t| t.as_rust_code()); - let raw_template = raw_template.as_ref().map(|s| s.as_str()); - let mut builder = RuleBuilder { - placeholders_by_stand_in: pattern.placeholders_by_stand_in(), - rules: Vec::new(), - }; - builder.try_add(ast::Expr::parse(&raw_pattern), raw_template.map(ast::Expr::parse)); - builder.try_add(ast::Type::parse(&raw_pattern), raw_template.map(ast::Type::parse)); - builder.try_add(ast::Item::parse(&raw_pattern), raw_template.map(ast::Item::parse)); - builder.try_add(ast::Path::parse(&raw_pattern), raw_template.map(ast::Path::parse)); - builder.try_add(ast::Pat::parse(&raw_pattern), raw_template.map(ast::Pat::parse)); - builder.build() - } -} - -struct RuleBuilder { - placeholders_by_stand_in: FxHashMap, - rules: Vec, -} - -impl RuleBuilder { - fn try_add(&mut self, pattern: Result, template: Option>) { - match (pattern, template) { - (Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule { - placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), - pattern: pattern.syntax().clone(), - template: Some(template.syntax().clone()), - }), - (Ok(pattern), None) => self.rules.push(ParsedRule { - placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), - pattern: pattern.syntax().clone(), - template: None, - }), - _ => {} - } - } - - fn build(mut self) -> Result, SsrError> { - if self.rules.is_empty() { - bail!("Not a valid Rust expression, type, item, path or pattern"); - } - // If any rules contain paths, then we reject any rules that don't contain paths. Allowing a - // mix leads to strange semantics, since the path-based rules only match things where the - // path refers to semantically the same thing, whereas the non-path-based rules could match - // anything. Specifically, if we have a rule like `foo ==>> bar` we only want to match the - // `foo` that is in the current scope, not any `foo`. However "foo" can be parsed as a - // pattern (IDENT_PAT -> NAME -> IDENT). Allowing such a rule through would result in - // renaming everything called `foo` to `bar`. It'd also be slow, since without a path, we'd - // have to use the slow-scan search mechanism. - if self.rules.iter().any(|rule| contains_path(&rule.pattern)) { - let old_len = self.rules.len(); - self.rules.retain(|rule| contains_path(&rule.pattern)); - if self.rules.len() < old_len { - mark::hit!(pattern_is_a_single_segment_path); - } - } - Ok(self.rules) - } -} - -/// Returns whether there are any paths in `node`. -fn contains_path(node: &SyntaxNode) -> bool { - node.kind() == SyntaxKind::PATH - || node.descendants().any(|node| node.kind() == SyntaxKind::PATH) -} - -impl FromStr for SsrRule { - type Err = SsrError; - - fn from_str(query: &str) -> Result { - let mut it = query.split("==>>"); - let pattern = it.next().expect("at least empty string").trim(); - let template = it - .next() - .ok_or_else(|| SsrError("Cannot find delimiter `==>>`".into()))? - .trim() - .to_string(); - if it.next().is_some() { - return Err(SsrError("More than one delimiter found".into())); - } - let raw_pattern = pattern.parse()?; - let raw_template = template.parse()?; - let parsed_rules = ParsedRule::new(&raw_pattern, Some(&raw_template))?; - let rule = SsrRule { pattern: raw_pattern, template: raw_template, parsed_rules }; - validate_rule(&rule)?; - Ok(rule) - } -} - -impl FromStr for RawPattern { - type Err = SsrError; - - fn from_str(pattern_str: &str) -> Result { - Ok(RawPattern { tokens: parse_pattern(pattern_str)? }) - } -} - -impl RawPattern { - /// Returns this search pattern as Rust source code that we can feed to the Rust parser. - fn as_rust_code(&self) -> String { - let mut res = String::new(); - for t in &self.tokens { - res.push_str(match t { - PatternElement::Token(token) => token.text.as_str(), - PatternElement::Placeholder(placeholder) => placeholder.stand_in_name.as_str(), - }); - } - res - } - - pub(crate) fn placeholders_by_stand_in(&self) -> FxHashMap { - let mut res = FxHashMap::default(); - for t in &self.tokens { - if let PatternElement::Placeholder(placeholder) = t { - res.insert(SmolStr::new(placeholder.stand_in_name.clone()), placeholder.clone()); - } - } - res - } -} - -impl FromStr for SsrPattern { - type Err = SsrError; - - fn from_str(pattern_str: &str) -> Result { - let raw_pattern = pattern_str.parse()?; - let parsed_rules = ParsedRule::new(&raw_pattern, None)?; - Ok(SsrPattern { raw: raw_pattern, parsed_rules }) - } -} - -/// Returns `pattern_str`, parsed as a search or replace pattern. If `remove_whitespace` is true, -/// then any whitespace tokens will be removed, which we do for the search pattern, but not for the -/// replace pattern. -fn parse_pattern(pattern_str: &str) -> Result, SsrError> { - let mut res = Vec::new(); - let mut placeholder_names = FxHashSet::default(); - let mut tokens = tokenize(pattern_str)?.into_iter(); - while let Some(token) = tokens.next() { - if token.kind == T![$] { - let placeholder = parse_placeholder(&mut tokens)?; - if !placeholder_names.insert(placeholder.ident.clone()) { - bail!("Name `{}` repeats more than once", placeholder.ident); - } - res.push(PatternElement::Placeholder(placeholder)); - } else { - res.push(PatternElement::Token(token)); - } - } - Ok(res) -} - -/// Checks for errors in a rule. e.g. the replace pattern referencing placeholders that the search -/// pattern didn't define. -fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> { - let mut defined_placeholders = FxHashSet::default(); - for p in &rule.pattern.tokens { - if let PatternElement::Placeholder(placeholder) = p { - defined_placeholders.insert(&placeholder.ident); - } - } - let mut undefined = Vec::new(); - for p in &rule.template.tokens { - if let PatternElement::Placeholder(placeholder) = p { - if !defined_placeholders.contains(&placeholder.ident) { - undefined.push(format!("${}", placeholder.ident)); - } - if !placeholder.constraints.is_empty() { - bail!("Replacement placeholders cannot have constraints"); - } - } - } - if !undefined.is_empty() { - bail!("Replacement contains undefined placeholders: {}", undefined.join(", ")); - } - Ok(()) -} - -fn tokenize(source: &str) -> Result, SsrError> { - let mut start = 0; - let (raw_tokens, errors) = ra_syntax::tokenize(source); - if let Some(first_error) = errors.first() { - bail!("Failed to parse pattern: {}", first_error); - } - let mut tokens: Vec = Vec::new(); - for raw_token in raw_tokens { - let token_len = usize::from(raw_token.len); - tokens.push(Token { - kind: raw_token.kind, - text: SmolStr::new(&source[start..start + token_len]), - }); - start += token_len; - } - Ok(tokens) -} - -fn parse_placeholder(tokens: &mut std::vec::IntoIter) -> Result { - let mut name = None; - let mut constraints = Vec::new(); - if let Some(token) = tokens.next() { - match token.kind { - SyntaxKind::IDENT => { - name = Some(token.text); - } - T!['{'] => { - let token = - tokens.next().ok_or_else(|| SsrError::new("Unexpected end of placeholder"))?; - if token.kind == SyntaxKind::IDENT { - name = Some(token.text); - } - loop { - let token = tokens - .next() - .ok_or_else(|| SsrError::new("Placeholder is missing closing brace '}'"))?; - match token.kind { - T![:] => { - constraints.push(parse_constraint(tokens)?); - } - T!['}'] => break, - _ => bail!("Unexpected token while parsing placeholder: '{}'", token.text), - } - } - } - _ => { - bail!("Placeholders should either be $name or ${{name:constraints}}"); - } - } - } - let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?; - Ok(Placeholder::new(name, constraints)) -} - -fn parse_constraint(tokens: &mut std::vec::IntoIter) -> Result { - let constraint_type = tokens - .next() - .ok_or_else(|| SsrError::new("Found end of placeholder while looking for a constraint"))? - .text - .to_string(); - match constraint_type.as_str() { - "kind" => { - expect_token(tokens, "(")?; - let t = tokens.next().ok_or_else(|| { - SsrError::new("Unexpected end of constraint while looking for kind") - })?; - if t.kind != SyntaxKind::IDENT { - bail!("Expected ident, found {:?} while parsing kind constraint", t.kind); - } - expect_token(tokens, ")")?; - Ok(Constraint::Kind(NodeKind::from(&t.text)?)) - } - "not" => { - expect_token(tokens, "(")?; - let sub = parse_constraint(tokens)?; - expect_token(tokens, ")")?; - Ok(Constraint::Not(Box::new(sub))) - } - x => bail!("Unsupported constraint type '{}'", x), - } -} - -fn expect_token(tokens: &mut std::vec::IntoIter, expected: &str) -> Result<(), SsrError> { - if let Some(t) = tokens.next() { - if t.text == expected { - return Ok(()); - } - bail!("Expected {} found {}", expected, t.text); - } - bail!("Expected {} found end of stream", expected); -} - -impl NodeKind { - fn from(name: &SmolStr) -> Result { - Ok(match name.as_str() { - "literal" => NodeKind::Literal, - _ => bail!("Unknown node kind '{}'", name), - }) - } -} - -impl Placeholder { - fn new(name: SmolStr, constraints: Vec) -> Self { - Self { stand_in_name: format!("__placeholder_{}", name), constraints, ident: name } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn parser_happy_case() { - fn token(kind: SyntaxKind, text: &str) -> PatternElement { - PatternElement::Token(Token { kind, text: SmolStr::new(text) }) - } - fn placeholder(name: &str) -> PatternElement { - PatternElement::Placeholder(Placeholder::new(SmolStr::new(name), Vec::new())) - } - let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap(); - assert_eq!( - result.pattern.tokens, - vec![ - token(SyntaxKind::IDENT, "foo"), - token(T!['('], "("), - placeholder("a"), - token(T![,], ","), - token(SyntaxKind::WHITESPACE, " "), - placeholder("b"), - token(T![')'], ")"), - ] - ); - assert_eq!( - result.template.tokens, - vec![ - token(SyntaxKind::IDENT, "bar"), - token(T!['('], "("), - placeholder("b"), - token(T![,], ","), - token(SyntaxKind::WHITESPACE, " "), - placeholder("a"), - token(T![')'], ")"), - ] - ); - } -} diff --git a/crates/ra_ssr/src/replacing.rs b/crates/ra_ssr/src/replacing.rs deleted file mode 100644 index 0943244ff9..0000000000 --- a/crates/ra_ssr/src/replacing.rs +++ /dev/null @@ -1,194 +0,0 @@ -//! Code for applying replacement templates for matches that have previously been found. - -use crate::matching::Var; -use crate::{resolving::ResolvedRule, Match, SsrMatches}; -use ra_syntax::ast::{self, AstToken}; -use ra_syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize}; -use ra_text_edit::TextEdit; -use rustc_hash::{FxHashMap, FxHashSet}; - -/// Returns a text edit that will replace each match in `matches` with its corresponding replacement -/// template. Placeholders in the template will have been substituted with whatever they matched to -/// in the original code. -pub(crate) fn matches_to_edit( - matches: &SsrMatches, - file_src: &str, - rules: &[ResolvedRule], -) -> TextEdit { - matches_to_edit_at_offset(matches, file_src, 0.into(), rules) -} - -fn matches_to_edit_at_offset( - matches: &SsrMatches, - file_src: &str, - relative_start: TextSize, - rules: &[ResolvedRule], -) -> TextEdit { - let mut edit_builder = ra_text_edit::TextEditBuilder::default(); - for m in &matches.matches { - edit_builder.replace( - m.range.range.checked_sub(relative_start).unwrap(), - render_replace(m, file_src, rules), - ); - } - edit_builder.finish() -} - -struct ReplacementRenderer<'a> { - match_info: &'a Match, - file_src: &'a str, - rules: &'a [ResolvedRule], - rule: &'a ResolvedRule, - out: String, - // Map from a range within `out` to a token in `template` that represents a placeholder. This is - // used to validate that the generated source code doesn't split any placeholder expansions (see - // below). - placeholder_tokens_by_range: FxHashMap, - // Which placeholder tokens need to be wrapped in parenthesis in order to ensure that when `out` - // is parsed, placeholders don't get split. e.g. if a template of `$a.to_string()` results in `1 - // + 2.to_string()` then the placeholder value `1 + 2` was split and needs parenthesis. - placeholder_tokens_requiring_parenthesis: FxHashSet, -} - -fn render_replace(match_info: &Match, file_src: &str, rules: &[ResolvedRule]) -> String { - let rule = &rules[match_info.rule_index]; - let template = rule - .template - .as_ref() - .expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern"); - let mut renderer = ReplacementRenderer { - match_info, - file_src, - rules, - rule, - out: String::new(), - placeholder_tokens_requiring_parenthesis: FxHashSet::default(), - placeholder_tokens_by_range: FxHashMap::default(), - }; - renderer.render_node(&template.node); - renderer.maybe_rerender_with_extra_parenthesis(&template.node); - for comment in &match_info.ignored_comments { - renderer.out.push_str(&comment.syntax().to_string()); - } - renderer.out -} - -impl ReplacementRenderer<'_> { - fn render_node_children(&mut self, node: &SyntaxNode) { - for node_or_token in node.children_with_tokens() { - self.render_node_or_token(&node_or_token); - } - } - - fn render_node_or_token(&mut self, node_or_token: &SyntaxElement) { - match node_or_token { - SyntaxElement::Token(token) => { - self.render_token(&token); - } - SyntaxElement::Node(child_node) => { - self.render_node(&child_node); - } - } - } - - fn render_node(&mut self, node: &SyntaxNode) { - use ra_syntax::ast::AstNode; - if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) { - self.out.push_str(&mod_path.to_string()); - // Emit everything except for the segment's name-ref, since we already effectively - // emitted that as part of `mod_path`. - if let Some(path) = ast::Path::cast(node.clone()) { - if let Some(segment) = path.segment() { - for node_or_token in segment.syntax().children_with_tokens() { - if node_or_token.kind() != SyntaxKind::NAME_REF { - self.render_node_or_token(&node_or_token); - } - } - } - } - } else { - self.render_node_children(&node); - } - } - - fn render_token(&mut self, token: &SyntaxToken) { - if let Some(placeholder) = self.rule.get_placeholder(&token) { - if let Some(placeholder_value) = - self.match_info.placeholder_values.get(&Var(placeholder.ident.to_string())) - { - let range = &placeholder_value.range.range; - let mut matched_text = - self.file_src[usize::from(range.start())..usize::from(range.end())].to_owned(); - let edit = matches_to_edit_at_offset( - &placeholder_value.inner_matches, - self.file_src, - range.start(), - self.rules, - ); - let needs_parenthesis = - self.placeholder_tokens_requiring_parenthesis.contains(token); - edit.apply(&mut matched_text); - if needs_parenthesis { - self.out.push('('); - } - self.placeholder_tokens_by_range.insert( - TextRange::new( - TextSize::of(&self.out), - TextSize::of(&self.out) + TextSize::of(&matched_text), - ), - token.clone(), - ); - self.out.push_str(&matched_text); - if needs_parenthesis { - self.out.push(')'); - } - } else { - // We validated that all placeholder references were valid before we - // started, so this shouldn't happen. - panic!( - "Internal error: replacement referenced unknown placeholder {}", - placeholder.ident - ); - } - } else { - self.out.push_str(token.text().as_str()); - } - } - - // Checks if the resulting code, when parsed doesn't split any placeholders due to different - // order of operations between the search pattern and the replacement template. If any do, then - // we rerender the template and wrap the problematic placeholders with parenthesis. - fn maybe_rerender_with_extra_parenthesis(&mut self, template: &SyntaxNode) { - if let Some(node) = parse_as_kind(&self.out, template.kind()) { - self.remove_node_ranges(node); - if self.placeholder_tokens_by_range.is_empty() { - return; - } - self.placeholder_tokens_requiring_parenthesis = - self.placeholder_tokens_by_range.values().cloned().collect(); - self.out.clear(); - self.render_node(template); - } - } - - fn remove_node_ranges(&mut self, node: SyntaxNode) { - self.placeholder_tokens_by_range.remove(&node.text_range()); - for child in node.children() { - self.remove_node_ranges(child); - } - } -} - -fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option { - use ra_syntax::ast::AstNode; - if ast::Expr::can_cast(kind) { - if let Ok(expr) = ast::Expr::parse(code) { - return Some(expr.syntax().clone()); - } - } else if ast::Item::can_cast(kind) { - if let Ok(item) = ast::Item::parse(code) { - return Some(item.syntax().clone()); - } - } - None -} diff --git a/crates/ra_ssr/src/resolving.rs b/crates/ra_ssr/src/resolving.rs deleted file mode 100644 index df60048eb2..0000000000 --- a/crates/ra_ssr/src/resolving.rs +++ /dev/null @@ -1,251 +0,0 @@ -//! This module is responsible for resolving paths within rules. - -use crate::errors::error; -use crate::{parsing, SsrError}; -use parsing::Placeholder; -use ra_db::FilePosition; -use ra_syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken}; -use rustc_hash::{FxHashMap, FxHashSet}; -use test_utils::mark; - -pub(crate) struct ResolutionScope<'db> { - scope: hir::SemanticsScope<'db>, - hygiene: hir::Hygiene, - node: SyntaxNode, -} - -pub(crate) struct ResolvedRule { - pub(crate) pattern: ResolvedPattern, - pub(crate) template: Option, - pub(crate) index: usize, -} - -pub(crate) struct ResolvedPattern { - pub(crate) placeholders_by_stand_in: FxHashMap, - pub(crate) node: SyntaxNode, - // Paths in `node` that we've resolved. - pub(crate) resolved_paths: FxHashMap, - pub(crate) ufcs_function_calls: FxHashMap, - pub(crate) contains_self: bool, -} - -pub(crate) struct ResolvedPath { - pub(crate) resolution: hir::PathResolution, - /// The depth of the ast::Path that was resolved within the pattern. - pub(crate) depth: u32, -} - -impl ResolvedRule { - pub(crate) fn new( - rule: parsing::ParsedRule, - resolution_scope: &ResolutionScope, - index: usize, - ) -> Result { - let resolver = - Resolver { resolution_scope, placeholders_by_stand_in: rule.placeholders_by_stand_in }; - let resolved_template = if let Some(template) = rule.template { - Some(resolver.resolve_pattern_tree(template)?) - } else { - None - }; - Ok(ResolvedRule { - pattern: resolver.resolve_pattern_tree(rule.pattern)?, - template: resolved_template, - index, - }) - } - - pub(crate) fn get_placeholder(&self, token: &SyntaxToken) -> Option<&Placeholder> { - if token.kind() != SyntaxKind::IDENT { - return None; - } - self.pattern.placeholders_by_stand_in.get(token.text()) - } -} - -struct Resolver<'a, 'db> { - resolution_scope: &'a ResolutionScope<'db>, - placeholders_by_stand_in: FxHashMap, -} - -impl Resolver<'_, '_> { - fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result { - use ra_syntax::{SyntaxElement, T}; - let mut resolved_paths = FxHashMap::default(); - self.resolve(pattern.clone(), 0, &mut resolved_paths)?; - let ufcs_function_calls = resolved_paths - .iter() - .filter_map(|(path_node, resolved)| { - if let Some(grandparent) = path_node.parent().and_then(|parent| parent.parent()) { - if grandparent.kind() == SyntaxKind::CALL_EXPR { - if let hir::PathResolution::AssocItem(hir::AssocItem::Function(function)) = - &resolved.resolution - { - return Some((grandparent, *function)); - } - } - } - None - }) - .collect(); - let contains_self = - pattern.descendants_with_tokens().any(|node_or_token| match node_or_token { - SyntaxElement::Token(t) => t.kind() == T![self], - _ => false, - }); - Ok(ResolvedPattern { - node: pattern, - resolved_paths, - placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), - ufcs_function_calls, - contains_self, - }) - } - - fn resolve( - &self, - node: SyntaxNode, - depth: u32, - resolved_paths: &mut FxHashMap, - ) -> Result<(), SsrError> { - use ra_syntax::ast::AstNode; - if let Some(path) = ast::Path::cast(node.clone()) { - if is_self(&path) { - // Self cannot be resolved like other paths. - return Ok(()); - } - // Check if this is an appropriate place in the path to resolve. If the path is - // something like `a::B::::c` then we want to resolve `a::B`. If the path contains - // a placeholder. e.g. `a::$b::c` then we want to resolve `a`. - if !path_contains_type_arguments(path.qualifier()) - && !self.path_contains_placeholder(&path) - { - let resolution = self - .resolution_scope - .resolve_path(&path) - .ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?; - resolved_paths.insert(node, ResolvedPath { resolution, depth }); - return Ok(()); - } - } - for node in node.children() { - self.resolve(node, depth + 1, resolved_paths)?; - } - Ok(()) - } - - /// Returns whether `path` contains a placeholder, but ignores any placeholders within type - /// arguments. - fn path_contains_placeholder(&self, path: &ast::Path) -> bool { - if let Some(segment) = path.segment() { - if let Some(name_ref) = segment.name_ref() { - if self.placeholders_by_stand_in.contains_key(name_ref.text()) { - return true; - } - } - } - if let Some(qualifier) = path.qualifier() { - return self.path_contains_placeholder(&qualifier); - } - false - } -} - -impl<'db> ResolutionScope<'db> { - pub(crate) fn new( - sema: &hir::Semantics<'db, ra_ide_db::RootDatabase>, - resolve_context: FilePosition, - ) -> ResolutionScope<'db> { - use ra_syntax::ast::AstNode; - let file = sema.parse(resolve_context.file_id); - // Find a node at the requested position, falling back to the whole file. - let node = file - .syntax() - .token_at_offset(resolve_context.offset) - .left_biased() - .map(|token| token.parent()) - .unwrap_or_else(|| file.syntax().clone()); - let node = pick_node_for_resolution(node); - let scope = sema.scope(&node); - ResolutionScope { - scope, - hygiene: hir::Hygiene::new(sema.db, resolve_context.file_id.into()), - node, - } - } - - /// Returns the function in which SSR was invoked, if any. - pub(crate) fn current_function(&self) -> Option { - self.node.ancestors().find(|node| node.kind() == SyntaxKind::FN).map(|node| node.clone()) - } - - fn resolve_path(&self, path: &ast::Path) -> Option { - let hir_path = hir::Path::from_src(path.clone(), &self.hygiene)?; - // First try resolving the whole path. This will work for things like - // `std::collections::HashMap`, but will fail for things like - // `std::collections::HashMap::new`. - if let Some(resolution) = self.scope.resolve_hir_path(&hir_path) { - return Some(resolution); - } - // Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if - // that succeeds, then iterate through the candidates on the resolved type with the provided - // name. - let resolved_qualifier = self.scope.resolve_hir_path_qualifier(&hir_path.qualifier()?)?; - if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier { - adt.ty(self.scope.db).iterate_path_candidates( - self.scope.db, - self.scope.module()?.krate(), - &FxHashSet::default(), - Some(hir_path.segments().last()?.name), - |_ty, assoc_item| Some(hir::PathResolution::AssocItem(assoc_item)), - ) - } else { - None - } - } -} - -fn is_self(path: &ast::Path) -> bool { - path.segment().map(|segment| segment.self_token().is_some()).unwrap_or(false) -} - -/// Returns a suitable node for resolving paths in the current scope. If we create a scope based on -/// a statement node, then we can't resolve local variables that were defined in the current scope -/// (only in parent scopes). So we find another node, ideally a child of the statement where local -/// variable resolution is permitted. -fn pick_node_for_resolution(node: SyntaxNode) -> SyntaxNode { - match node.kind() { - SyntaxKind::EXPR_STMT => { - if let Some(n) = node.first_child() { - mark::hit!(cursor_after_semicolon); - return n; - } - } - SyntaxKind::LET_STMT | SyntaxKind::IDENT_PAT => { - if let Some(next) = node.next_sibling() { - return pick_node_for_resolution(next); - } - } - SyntaxKind::NAME => { - if let Some(parent) = node.parent() { - return pick_node_for_resolution(parent); - } - } - _ => {} - } - node -} - -/// Returns whether `path` or any of its qualifiers contains type arguments. -fn path_contains_type_arguments(path: Option) -> bool { - if let Some(path) = path { - if let Some(segment) = path.segment() { - if segment.generic_arg_list().is_some() { - mark::hit!(type_arguments_within_path); - return true; - } - } - return path_contains_type_arguments(path.qualifier()); - } - false -} diff --git a/crates/ra_ssr/src/search.rs b/crates/ra_ssr/src/search.rs deleted file mode 100644 index 85ffa2ac23..0000000000 --- a/crates/ra_ssr/src/search.rs +++ /dev/null @@ -1,282 +0,0 @@ -//! Searching for matches. - -use crate::{ - matching, - resolving::{ResolvedPath, ResolvedPattern, ResolvedRule}, - Match, MatchFinder, -}; -use ra_db::{FileId, FileRange}; -use ra_ide_db::{ - defs::Definition, - search::{Reference, SearchScope}, -}; -use ra_syntax::{ast, AstNode, SyntaxKind, SyntaxNode}; -use rustc_hash::FxHashSet; -use test_utils::mark; - -/// A cache for the results of find_usages. This is for when we have multiple patterns that have the -/// same path. e.g. if the pattern was `foo::Bar` that can parse as a path, an expression, a type -/// and as a pattern. In each, the usages of `foo::Bar` are the same and we'd like to avoid finding -/// them more than once. -#[derive(Default)] -pub(crate) struct UsageCache { - usages: Vec<(Definition, Vec)>, -} - -impl<'db> MatchFinder<'db> { - /// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make - /// replacement impossible, so further processing is required in order to properly nest matches - /// and remove overlapping matches. This is done in the `nesting` module. - pub(crate) fn find_matches_for_rule( - &self, - rule: &ResolvedRule, - usage_cache: &mut UsageCache, - matches_out: &mut Vec, - ) { - if rule.pattern.contains_self { - // If the pattern contains `self` we restrict the scope of the search to just the - // current method. No other method can reference the same `self`. This makes the - // behavior of `self` consistent with other variables. - if let Some(current_function) = self.resolution_scope.current_function() { - self.slow_scan_node(¤t_function, rule, &None, matches_out); - } - return; - } - if pick_path_for_usages(&rule.pattern).is_none() { - self.slow_scan(rule, matches_out); - return; - } - self.find_matches_for_pattern_tree(rule, &rule.pattern, usage_cache, matches_out); - } - - fn find_matches_for_pattern_tree( - &self, - rule: &ResolvedRule, - pattern: &ResolvedPattern, - usage_cache: &mut UsageCache, - matches_out: &mut Vec, - ) { - if let Some(resolved_path) = pick_path_for_usages(pattern) { - let definition: Definition = resolved_path.resolution.clone().into(); - for reference in self.find_usages(usage_cache, definition) { - if let Some(node_to_match) = self.find_node_to_match(resolved_path, reference) { - if !is_search_permitted_ancestors(&node_to_match) { - mark::hit!(use_declaration_with_braces); - continue; - } - self.try_add_match(rule, &node_to_match, &None, matches_out); - } - } - } - } - - fn find_node_to_match( - &self, - resolved_path: &ResolvedPath, - reference: &Reference, - ) -> Option { - let file = self.sema.parse(reference.file_range.file_id); - let depth = resolved_path.depth as usize; - let offset = reference.file_range.range.start(); - if let Some(path) = - self.sema.find_node_at_offset_with_descend::(file.syntax(), offset) - { - self.sema.ancestors_with_macros(path.syntax().clone()).skip(depth).next() - } else if let Some(path) = - self.sema.find_node_at_offset_with_descend::(file.syntax(), offset) - { - // If the pattern contained a path and we found a reference to that path that wasn't - // itself a path, but was a method call, then we need to adjust how far up to try - // matching by how deep the path was within a CallExpr. The structure would have been - // CallExpr, PathExpr, Path - i.e. a depth offset of 2. We don't need to check if the - // path was part of a CallExpr because if it wasn't then all that will happen is we'll - // fail to match, which is the desired behavior. - const PATH_DEPTH_IN_CALL_EXPR: usize = 2; - if depth < PATH_DEPTH_IN_CALL_EXPR { - return None; - } - self.sema - .ancestors_with_macros(path.syntax().clone()) - .skip(depth - PATH_DEPTH_IN_CALL_EXPR) - .next() - } else { - None - } - } - - fn find_usages<'a>( - &self, - usage_cache: &'a mut UsageCache, - definition: Definition, - ) -> &'a [Reference] { - // Logically if a lookup succeeds we should just return it. Unfortunately returning it would - // extend the lifetime of the borrow, then we wouldn't be able to do the insertion on a - // cache miss. This is a limitation of NLL and is fixed with Polonius. For now we do two - // lookups in the case of a cache hit. - if usage_cache.find(&definition).is_none() { - let usages = definition.find_usages(&self.sema, Some(self.search_scope())); - usage_cache.usages.push((definition, usages)); - return &usage_cache.usages.last().unwrap().1; - } - usage_cache.find(&definition).unwrap() - } - - /// Returns the scope within which we want to search. We don't want un unrestricted search - /// scope, since we don't want to find references in external dependencies. - fn search_scope(&self) -> SearchScope { - // FIXME: We should ideally have a test that checks that we edit local roots and not library - // roots. This probably would require some changes to fixtures, since currently everything - // seems to get put into a single source root. - let mut files = Vec::new(); - self.search_files_do(|file_id| { - files.push(file_id); - }); - SearchScope::files(&files) - } - - fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec) { - self.search_files_do(|file_id| { - let file = self.sema.parse(file_id); - let code = file.syntax(); - self.slow_scan_node(code, rule, &None, matches_out); - }) - } - - fn search_files_do(&self, mut callback: impl FnMut(FileId)) { - if self.restrict_ranges.is_empty() { - // Unrestricted search. - use ra_db::SourceDatabaseExt; - use ra_ide_db::symbol_index::SymbolsDatabase; - for &root in self.sema.db.local_roots().iter() { - let sr = self.sema.db.source_root(root); - for file_id in sr.iter() { - callback(file_id); - } - } - } else { - // Search is restricted, deduplicate file IDs (generally only one). - let mut files = FxHashSet::default(); - for range in &self.restrict_ranges { - if files.insert(range.file_id) { - callback(range.file_id); - } - } - } - } - - fn slow_scan_node( - &self, - code: &SyntaxNode, - rule: &ResolvedRule, - restrict_range: &Option, - matches_out: &mut Vec, - ) { - if !is_search_permitted(code) { - return; - } - self.try_add_match(rule, &code, restrict_range, matches_out); - // If we've got a macro call, we already tried matching it pre-expansion, which is the only - // way to match the whole macro, now try expanding it and matching the expansion. - if let Some(macro_call) = ast::MacroCall::cast(code.clone()) { - if let Some(expanded) = self.sema.expand(¯o_call) { - if let Some(tt) = macro_call.token_tree() { - // When matching within a macro expansion, we only want to allow matches of - // nodes that originated entirely from within the token tree of the macro call. - // i.e. we don't want to match something that came from the macro itself. - self.slow_scan_node( - &expanded, - rule, - &Some(self.sema.original_range(tt.syntax())), - matches_out, - ); - } - } - } - for child in code.children() { - self.slow_scan_node(&child, rule, restrict_range, matches_out); - } - } - - fn try_add_match( - &self, - rule: &ResolvedRule, - code: &SyntaxNode, - restrict_range: &Option, - matches_out: &mut Vec, - ) { - if !self.within_range_restrictions(code) { - mark::hit!(replace_nonpath_within_selection); - return; - } - if let Ok(m) = matching::get_match(false, rule, code, restrict_range, &self.sema) { - matches_out.push(m); - } - } - - /// Returns whether `code` is within one of our range restrictions if we have any. No range - /// restrictions is considered unrestricted and always returns true. - fn within_range_restrictions(&self, code: &SyntaxNode) -> bool { - if self.restrict_ranges.is_empty() { - // There is no range restriction. - return true; - } - let node_range = self.sema.original_range(code); - for range in &self.restrict_ranges { - if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) { - return true; - } - } - false - } -} - -/// Returns whether we support matching within `node` and all of its ancestors. -fn is_search_permitted_ancestors(node: &SyntaxNode) -> bool { - if let Some(parent) = node.parent() { - if !is_search_permitted_ancestors(&parent) { - return false; - } - } - is_search_permitted(node) -} - -/// Returns whether we support matching within this kind of node. -fn is_search_permitted(node: &SyntaxNode) -> bool { - // FIXME: Properly handle use declarations. At the moment, if our search pattern is `foo::bar` - // and the code is `use foo::{baz, bar}`, we'll match `bar`, since it resolves to `foo::bar`. - // However we'll then replace just the part we matched `bar`. We probably need to instead remove - // `bar` and insert a new use declaration. - node.kind() != SyntaxKind::USE -} - -impl UsageCache { - fn find(&mut self, definition: &Definition) -> Option<&[Reference]> { - // We expect a very small number of cache entries (generally 1), so a linear scan should be - // fast enough and avoids the need to implement Hash for Definition. - for (d, refs) in &self.usages { - if d == definition { - return Some(refs); - } - } - None - } -} - -/// Returns a path that's suitable for path resolution. We exclude builtin types, since they aren't -/// something that we can find references to. We then somewhat arbitrarily pick the path that is the -/// longest as this is hopefully more likely to be less common, making it faster to find. -fn pick_path_for_usages(pattern: &ResolvedPattern) -> Option<&ResolvedPath> { - // FIXME: Take the scope of the resolved path into account. e.g. if there are any paths that are - // private to the current module, then we definitely would want to pick them over say a path - // from std. Possibly we should go further than this and intersect the search scopes for all - // resolved paths then search only in that scope. - pattern - .resolved_paths - .iter() - .filter(|(_, p)| { - !matches!(p.resolution, hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_))) - }) - .map(|(node, resolved)| (node.text().len(), resolved)) - .max_by(|(a, _), (b, _)| a.cmp(b)) - .map(|(_, resolved)| resolved) -} diff --git a/crates/ra_ssr/src/tests.rs b/crates/ra_ssr/src/tests.rs deleted file mode 100644 index d483640df1..0000000000 --- a/crates/ra_ssr/src/tests.rs +++ /dev/null @@ -1,1081 +0,0 @@ -use crate::{MatchFinder, SsrRule}; -use expect::{expect, Expect}; -use ra_db::{salsa::Durability, FileId, FilePosition, FileRange, SourceDatabaseExt}; -use rustc_hash::FxHashSet; -use std::sync::Arc; -use test_utils::{mark, RangeOrOffset}; - -fn parse_error_text(query: &str) -> String { - format!("{}", query.parse::().unwrap_err()) -} - -#[test] -fn parser_empty_query() { - assert_eq!(parse_error_text(""), "Parse error: Cannot find delimiter `==>>`"); -} - -#[test] -fn parser_no_delimiter() { - assert_eq!(parse_error_text("foo()"), "Parse error: Cannot find delimiter `==>>`"); -} - -#[test] -fn parser_two_delimiters() { - assert_eq!( - parse_error_text("foo() ==>> a ==>> b "), - "Parse error: More than one delimiter found" - ); -} - -#[test] -fn parser_repeated_name() { - assert_eq!( - parse_error_text("foo($a, $a) ==>>"), - "Parse error: Name `a` repeats more than once" - ); -} - -#[test] -fn parser_invalid_pattern() { - assert_eq!( - parse_error_text(" ==>> ()"), - "Parse error: Not a valid Rust expression, type, item, path or pattern" - ); -} - -#[test] -fn parser_invalid_template() { - assert_eq!( - parse_error_text("() ==>> )"), - "Parse error: Not a valid Rust expression, type, item, path or pattern" - ); -} - -#[test] -fn parser_undefined_placeholder_in_replacement() { - assert_eq!( - parse_error_text("42 ==>> $a"), - "Parse error: Replacement contains undefined placeholders: $a" - ); -} - -/// `code` may optionally contain a cursor marker `<|>`. If it doesn't, then the position will be -/// the start of the file. If there's a second cursor marker, then we'll return a single range. -pub(crate) fn single_file(code: &str) -> (ra_ide_db::RootDatabase, FilePosition, Vec) { - use ra_db::fixture::WithFixture; - use ra_ide_db::symbol_index::SymbolsDatabase; - let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) { - ra_ide_db::RootDatabase::with_range_or_offset(code) - } else { - let (db, file_id) = ra_ide_db::RootDatabase::with_single_file(code); - (db, file_id, RangeOrOffset::Offset(0.into())) - }; - let selections; - let position; - match range_or_offset { - RangeOrOffset::Range(range) => { - position = FilePosition { file_id, offset: range.start() }; - selections = vec![FileRange { file_id, range: range }]; - } - RangeOrOffset::Offset(offset) => { - position = FilePosition { file_id, offset }; - selections = vec![]; - } - } - let mut local_roots = FxHashSet::default(); - local_roots.insert(ra_db::fixture::WORKSPACE); - db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); - (db, position, selections) -} - -fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) { - assert_ssr_transforms(&[rule], input, expected); -} - -fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) { - let (db, position, selections) = single_file(input); - let mut match_finder = MatchFinder::in_context(&db, position, selections); - for rule in rules { - let rule: SsrRule = rule.parse().unwrap(); - match_finder.add_rule(rule).unwrap(); - } - let edits = match_finder.edits(); - if edits.is_empty() { - panic!("No edits were made"); - } - assert_eq!(edits[0].file_id, position.file_id); - // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters - // stuff. - let mut actual = db.file_text(position.file_id).to_string(); - edits[0].edit.apply(&mut actual); - expected.assert_eq(&actual); -} - -fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet: &str) { - let debug_info = match_finder.debug_where_text_equal(file_id, snippet); - println!( - "Match debug info: {} nodes had text exactly equal to '{}'", - debug_info.len(), - snippet - ); - for (index, d) in debug_info.iter().enumerate() { - println!("Node #{}\n{:#?}\n", index, d); - } -} - -fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { - let (db, position, selections) = single_file(code); - let mut match_finder = MatchFinder::in_context(&db, position, selections); - match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); - let matched_strings: Vec = - match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect(); - if matched_strings != expected && !expected.is_empty() { - print_match_debug_info(&match_finder, position.file_id, &expected[0]); - } - assert_eq!(matched_strings, expected); -} - -fn assert_no_match(pattern: &str, code: &str) { - let (db, position, selections) = single_file(code); - let mut match_finder = MatchFinder::in_context(&db, position, selections); - match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); - let matches = match_finder.matches().flattened().matches; - if !matches.is_empty() { - print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text()); - panic!("Got {} matches when we expected none: {:#?}", matches.len(), matches); - } -} - -fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) { - let (db, position, selections) = single_file(code); - let mut match_finder = MatchFinder::in_context(&db, position, selections); - match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); - let mut reasons = Vec::new(); - for d in match_finder.debug_where_text_equal(position.file_id, snippet) { - if let Some(reason) = d.match_failure_reason() { - reasons.push(reason.to_owned()); - } - } - assert_eq!(reasons, vec![expected_reason]); -} - -#[test] -fn ssr_function_to_method() { - assert_ssr_transform( - "my_function($a, $b) ==>> ($a).my_method($b)", - "fn my_function() {} fn main() { loop { my_function( other_func(x, y), z + w) } }", - expect![["fn my_function() {} fn main() { loop { (other_func(x, y)).my_method(z + w) } }"]], - ) -} - -#[test] -fn ssr_nested_function() { - assert_ssr_transform( - "foo($a, $b, $c) ==>> bar($c, baz($a, $b))", - r#" - //- /lib.rs crate:foo - fn foo() {} - fn bar() {} - fn baz() {} - fn main { foo (x + value.method(b), x+y-z, true && false) } - "#, - expect![[r#" - fn foo() {} - fn bar() {} - fn baz() {} - fn main { bar(true && false, baz(x + value.method(b), x+y-z)) } - "#]], - ) -} - -#[test] -fn ssr_expected_spacing() { - assert_ssr_transform( - "foo($x) + bar() ==>> bar($x)", - "fn foo() {} fn bar() {} fn main() { foo(5) + bar() }", - expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]], - ); -} - -#[test] -fn ssr_with_extra_space() { - assert_ssr_transform( - "foo($x ) + bar() ==>> bar($x)", - "fn foo() {} fn bar() {} fn main() { foo( 5 ) +bar( ) }", - expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]], - ); -} - -#[test] -fn ssr_keeps_nested_comment() { - assert_ssr_transform( - "foo($x) ==>> bar($x)", - "fn foo() {} fn bar() {} fn main() { foo(other(5 /* using 5 */)) }", - expect![["fn foo() {} fn bar() {} fn main() { bar(other(5 /* using 5 */)) }"]], - ) -} - -#[test] -fn ssr_keeps_comment() { - assert_ssr_transform( - "foo($x) ==>> bar($x)", - "fn foo() {} fn bar() {} fn main() { foo(5 /* using 5 */) }", - expect![["fn foo() {} fn bar() {} fn main() { bar(5)/* using 5 */ }"]], - ) -} - -#[test] -fn ssr_struct_lit() { - assert_ssr_transform( - "Foo{a: $a, b: $b} ==>> Foo::new($a, $b)", - r#" - struct Foo() {} - impl Foo { fn new() {} } - fn main() { Foo{b:2, a:1} } - "#, - expect![[r#" - struct Foo() {} - impl Foo { fn new() {} } - fn main() { Foo::new(1, 2) } - "#]], - ) -} - -#[test] -fn ignores_whitespace() { - assert_matches("1+2", "fn f() -> i32 {1 + 2}", &["1 + 2"]); - assert_matches("1 + 2", "fn f() -> i32 {1+2}", &["1+2"]); -} - -#[test] -fn no_match() { - assert_no_match("1 + 3", "fn f() -> i32 {1 + 2}"); -} - -#[test] -fn match_fn_definition() { - assert_matches("fn $a($b: $t) {$c}", "fn f(a: i32) {bar()}", &["fn f(a: i32) {bar()}"]); -} - -#[test] -fn match_struct_definition() { - let code = r#" - struct Option {} - struct Bar {} - struct Foo {name: Option}"#; - assert_matches("struct $n {$f: Option}", code, &["struct Foo {name: Option}"]); -} - -#[test] -fn match_expr() { - let code = r#" - fn foo() {} - fn f() -> i32 {foo(40 + 2, 42)}"#; - assert_matches("foo($a, $b)", code, &["foo(40 + 2, 42)"]); - assert_no_match("foo($a, $b, $c)", code); - assert_no_match("foo($a)", code); -} - -#[test] -fn match_nested_method_calls() { - assert_matches( - "$a.z().z().z()", - "fn f() {h().i().j().z().z().z().d().e()}", - &["h().i().j().z().z().z()"], - ); -} - -// Make sure that our node matching semantics don't differ within macro calls. -#[test] -fn match_nested_method_calls_with_macro_call() { - assert_matches( - "$a.z().z().z()", - r#" - macro_rules! m1 { ($a:expr) => {$a}; } - fn f() {m1!(h().i().j().z().z().z().d().e())}"#, - &["h().i().j().z().z().z()"], - ); -} - -#[test] -fn match_complex_expr() { - let code = r#" - fn foo() {} fn bar() {} - fn f() -> i32 {foo(bar(40, 2), 42)}"#; - assert_matches("foo($a, $b)", code, &["foo(bar(40, 2), 42)"]); - assert_no_match("foo($a, $b, $c)", code); - assert_no_match("foo($a)", code); - assert_matches("bar($a, $b)", code, &["bar(40, 2)"]); -} - -// Trailing commas in the code should be ignored. -#[test] -fn match_with_trailing_commas() { - // Code has comma, pattern doesn't. - assert_matches("foo($a, $b)", "fn foo() {} fn f() {foo(1, 2,);}", &["foo(1, 2,)"]); - assert_matches("Foo{$a, $b}", "struct Foo {} fn f() {Foo{1, 2,};}", &["Foo{1, 2,}"]); - - // Pattern has comma, code doesn't. - assert_matches("foo($a, $b,)", "fn foo() {} fn f() {foo(1, 2);}", &["foo(1, 2)"]); - assert_matches("Foo{$a, $b,}", "struct Foo {} fn f() {Foo{1, 2};}", &["Foo{1, 2}"]); -} - -#[test] -fn match_type() { - assert_matches("i32", "fn f() -> i32 {1 + 2}", &["i32"]); - assert_matches( - "Option<$a>", - "struct Option {} fn f() -> Option {42}", - &["Option"], - ); - assert_no_match( - "Option<$a>", - "struct Option {} struct Result {} fn f() -> Result {42}", - ); -} - -#[test] -fn match_struct_instantiation() { - let code = r#" - struct Foo {bar: i32, baz: i32} - fn f() {Foo {bar: 1, baz: 2}}"#; - assert_matches("Foo {bar: 1, baz: 2}", code, &["Foo {bar: 1, baz: 2}"]); - // Now with placeholders for all parts of the struct. - assert_matches("Foo {$a: $b, $c: $d}", code, &["Foo {bar: 1, baz: 2}"]); - assert_matches("Foo {}", "struct Foo {} fn f() {Foo {}}", &["Foo {}"]); -} - -#[test] -fn match_path() { - let code = r#" - mod foo { - pub fn bar() {} - } - fn f() {foo::bar(42)}"#; - assert_matches("foo::bar", code, &["foo::bar"]); - assert_matches("$a::bar", code, &["foo::bar"]); - assert_matches("foo::$b", code, &["foo::bar"]); -} - -#[test] -fn match_pattern() { - assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]); -} - -// If our pattern has a full path, e.g. a::b::c() and the code has c(), but c resolves to -// a::b::c, then we should match. -#[test] -fn match_fully_qualified_fn_path() { - let code = r#" - mod a { - pub mod b { - pub fn c(_: i32) {} - } - } - use a::b::c; - fn f1() { - c(42); - } - "#; - assert_matches("a::b::c($a)", code, &["c(42)"]); -} - -#[test] -fn match_resolved_type_name() { - let code = r#" - mod m1 { - pub mod m2 { - pub trait Foo {} - } - } - mod m3 { - trait Foo {} - fn f1(f: Option<&dyn Foo>) {} - } - mod m4 { - use crate::m1::m2::Foo; - fn f1(f: Option<&dyn Foo>) {} - } - "#; - assert_matches("m1::m2::Foo<$t>", code, &["Foo"]); -} - -#[test] -fn type_arguments_within_path() { - mark::check!(type_arguments_within_path); - let code = r#" - mod foo { - pub struct Bar {t: T} - impl Bar { - pub fn baz() {} - } - } - fn f1() {foo::Bar::::baz();} - "#; - assert_no_match("foo::Bar::::baz()", code); - assert_matches("foo::Bar::::baz()", code, &["foo::Bar::::baz()"]); -} - -#[test] -fn literal_constraint() { - mark::check!(literal_constraint); - let code = r#" - enum Option { Some(T), None } - use Option::Some; - fn f1() { - let x1 = Some(42); - let x2 = Some("foo"); - let x3 = Some(x1); - let x4 = Some(40 + 2); - let x5 = Some(true); - } - "#; - assert_matches("Some(${a:kind(literal)})", code, &["Some(42)", "Some(\"foo\")", "Some(true)"]); - assert_matches("Some(${a:not(kind(literal))})", code, &["Some(x1)", "Some(40 + 2)"]); -} - -#[test] -fn match_reordered_struct_instantiation() { - assert_matches( - "Foo {aa: 1, b: 2, ccc: 3}", - "struct Foo {} fn f() {Foo {b: 2, ccc: 3, aa: 1}}", - &["Foo {b: 2, ccc: 3, aa: 1}"], - ); - assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {b: 1}}"); - assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {a: 2}}"); - assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {a: 1}}"); - assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {b: 2}}"); - assert_no_match("Foo {a: 1, }", "struct Foo {} fn f() {Foo {a: 1, b: 2}}"); - assert_no_match("Foo {a: 1, z: 9}", "struct Foo {} fn f() {Foo {a: 1}}"); -} - -#[test] -fn match_macro_invocation() { - assert_matches( - "foo!($a)", - "macro_rules! foo {() => {}} fn() {foo(foo!(foo()))}", - &["foo!(foo())"], - ); - assert_matches( - "foo!(41, $a, 43)", - "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43)}", - &["foo!(41, 42, 43)"], - ); - assert_no_match("foo!(50, $a, 43)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}"); - assert_no_match("foo!(41, $a, 50)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}"); - assert_matches( - "foo!($a())", - "macro_rules! foo {() => {}} fn() {foo!(bar())}", - &["foo!(bar())"], - ); -} - -// When matching within a macro expansion, we only allow matches of nodes that originated from -// the macro call, not from the macro definition. -#[test] -fn no_match_expression_from_macro() { - assert_no_match( - "$a.clone()", - r#" - macro_rules! m1 { - () => {42.clone()} - } - fn f1() {m1!()} - "#, - ); -} - -// We definitely don't want to allow matching of an expression that part originates from the -// macro call `42` and part from the macro definition `.clone()`. -#[test] -fn no_match_split_expression() { - assert_no_match( - "$a.clone()", - r#" - macro_rules! m1 { - ($x:expr) => {$x.clone()} - } - fn f1() {m1!(42)} - "#, - ); -} - -#[test] -fn replace_function_call() { - // This test also makes sure that we ignore empty-ranges. - assert_ssr_transform( - "foo() ==>> bar()", - "fn foo() {<|><|>} fn bar() {} fn f1() {foo(); foo();}", - expect![["fn foo() {} fn bar() {} fn f1() {bar(); bar();}"]], - ); -} - -#[test] -fn replace_function_call_with_placeholders() { - assert_ssr_transform( - "foo($a, $b) ==>> bar($b, $a)", - "fn foo() {} fn bar() {} fn f1() {foo(5, 42)}", - expect![["fn foo() {} fn bar() {} fn f1() {bar(42, 5)}"]], - ); -} - -#[test] -fn replace_nested_function_calls() { - assert_ssr_transform( - "foo($a) ==>> bar($a)", - "fn foo() {} fn bar() {} fn f1() {foo(foo(42))}", - expect![["fn foo() {} fn bar() {} fn f1() {bar(bar(42))}"]], - ); -} - -#[test] -fn replace_associated_function_call() { - assert_ssr_transform( - "Foo::new() ==>> Bar::new()", - r#" - struct Foo {} - impl Foo { fn new() {} } - struct Bar {} - impl Bar { fn new() {} } - fn f1() {Foo::new();} - "#, - expect![[r#" - struct Foo {} - impl Foo { fn new() {} } - struct Bar {} - impl Bar { fn new() {} } - fn f1() {Bar::new();} - "#]], - ); -} - -#[test] -fn replace_path_in_different_contexts() { - // Note the <|> inside module a::b which marks the point where the rule is interpreted. We - // replace foo with bar, but both need different path qualifiers in different contexts. In f4, - // foo is unqualified because of a use statement, however the replacement needs to be fully - // qualified. - assert_ssr_transform( - "c::foo() ==>> c::bar()", - r#" - mod a { - pub mod b {<|> - pub mod c { - pub fn foo() {} - pub fn bar() {} - fn f1() { foo() } - } - fn f2() { c::foo() } - } - fn f3() { b::c::foo() } - } - use a::b::c::foo; - fn f4() { foo() } - "#, - expect![[r#" - mod a { - pub mod b { - pub mod c { - pub fn foo() {} - pub fn bar() {} - fn f1() { bar() } - } - fn f2() { c::bar() } - } - fn f3() { b::c::bar() } - } - use a::b::c::foo; - fn f4() { a::b::c::bar() } - "#]], - ); -} - -#[test] -fn replace_associated_function_with_generics() { - assert_ssr_transform( - "c::Foo::<$a>::new() ==>> d::Bar::<$a>::default()", - r#" - mod c { - pub struct Foo {v: T} - impl Foo { pub fn new() {} } - fn f1() { - Foo::::new(); - } - } - mod d { - pub struct Bar {v: T} - impl Bar { pub fn default() {} } - fn f1() { - super::c::Foo::::new(); - } - } - "#, - expect![[r#" - mod c { - pub struct Foo {v: T} - impl Foo { pub fn new() {} } - fn f1() { - crate::d::Bar::::default(); - } - } - mod d { - pub struct Bar {v: T} - impl Bar { pub fn default() {} } - fn f1() { - Bar::::default(); - } - } - "#]], - ); -} - -#[test] -fn replace_type() { - assert_ssr_transform( - "Result<(), $a> ==>> Option<$a>", - "struct Result {} struct Option {} fn f1() -> Result<(), Vec> {foo()}", - expect![[ - "struct Result {} struct Option {} fn f1() -> Option> {foo()}" - ]], - ); -} - -#[test] -fn replace_macro_invocations() { - assert_ssr_transform( - "try!($a) ==>> $a?", - "macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(try!(foo()));}", - expect![["macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(foo()?);}"]], - ); - assert_ssr_transform( - "foo!($a($b)) ==>> foo($b, $a)", - "macro_rules! foo {() => {}} fn f1() {foo!(abc(def() + 2));}", - expect![["macro_rules! foo {() => {}} fn f1() {foo(def() + 2, abc);}"]], - ); -} - -#[test] -fn replace_binary_op() { - assert_ssr_transform( - "$a + $b ==>> $b + $a", - "fn f() {2 * 3 + 4 * 5}", - expect![["fn f() {4 * 5 + 2 * 3}"]], - ); - assert_ssr_transform( - "$a + $b ==>> $b + $a", - "fn f() {1 + 2 + 3 + 4}", - expect![[r#"fn f() {4 + (3 + (2 + 1))}"#]], - ); -} - -#[test] -fn match_binary_op() { - assert_matches("$a + $b", "fn f() {1 + 2 + 3 + 4}", &["1 + 2", "1 + 2 + 3", "1 + 2 + 3 + 4"]); -} - -#[test] -fn multiple_rules() { - assert_ssr_transforms( - &["$a + 1 ==>> add_one($a)", "$a + $b ==>> add($a, $b)"], - "fn add() {} fn add_one() {} fn f() -> i32 {3 + 2 + 1}", - expect![["fn add() {} fn add_one() {} fn f() -> i32 {add_one(add(3, 2))}"]], - ) -} - -#[test] -fn multiple_rules_with_nested_matches() { - assert_ssr_transforms( - &["foo1($a) ==>> bar1($a)", "foo2($a) ==>> bar2($a)"], - r#" - fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {} - fn f() {foo1(foo2(foo1(foo2(foo1(42)))))} - "#, - expect![[r#" - fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {} - fn f() {bar1(bar2(bar1(bar2(bar1(42)))))} - "#]], - ) -} - -#[test] -fn match_within_macro_invocation() { - let code = r#" - macro_rules! foo { - ($a:stmt; $b:expr) => { - $b - }; - } - struct A {} - impl A { - fn bar() {} - } - fn f1() { - let aaa = A {}; - foo!(macro_ignores_this(); aaa.bar()); - } - "#; - assert_matches("$a.bar()", code, &["aaa.bar()"]); -} - -#[test] -fn replace_within_macro_expansion() { - assert_ssr_transform( - "$a.foo() ==>> bar($a)", - r#" - macro_rules! macro1 { - ($a:expr) => {$a} - } - fn bar() {} - fn f() {macro1!(5.x().foo().o2())} - "#, - expect![[r#" - macro_rules! macro1 { - ($a:expr) => {$a} - } - fn bar() {} - fn f() {macro1!(bar(5.x()).o2())} - "#]], - ) -} - -#[test] -fn replace_outside_and_within_macro_expansion() { - assert_ssr_transform( - "foo($a) ==>> bar($a)", - r#" - fn foo() {} fn bar() {} - macro_rules! macro1 { - ($a:expr) => {$a} - } - fn f() {foo(foo(macro1!(foo(foo(42)))))} - "#, - expect![[r#" - fn foo() {} fn bar() {} - macro_rules! macro1 { - ($a:expr) => {$a} - } - fn f() {bar(bar(macro1!(bar(bar(42)))))} - "#]], - ) -} - -#[test] -fn preserves_whitespace_within_macro_expansion() { - assert_ssr_transform( - "$a + $b ==>> $b - $a", - r#" - macro_rules! macro1 { - ($a:expr) => {$a} - } - fn f() {macro1!(1 * 2 + 3 + 4} - "#, - expect![[r#" - macro_rules! macro1 { - ($a:expr) => {$a} - } - fn f() {macro1!(4 - (3 - 1 * 2)} - "#]], - ) -} - -#[test] -fn add_parenthesis_when_necessary() { - assert_ssr_transform( - "foo($a) ==>> $a.to_string()", - r#" - fn foo(_: i32) {} - fn bar3(v: i32) { - foo(1 + 2); - foo(-v); - } - "#, - expect![[r#" - fn foo(_: i32) {} - fn bar3(v: i32) { - (1 + 2).to_string(); - (-v).to_string(); - } - "#]], - ) -} - -#[test] -fn match_failure_reasons() { - let code = r#" - fn bar() {} - macro_rules! foo { - ($a:expr) => { - 1 + $a + 2 - }; - } - fn f1() { - bar(1, 2); - foo!(5 + 43.to_string() + 5); - } - "#; - assert_match_failure_reason( - "bar($a, 3)", - code, - "bar(1, 2)", - r#"Pattern wanted token '3' (INT_NUMBER), but code had token '2' (INT_NUMBER)"#, - ); - assert_match_failure_reason( - "42.to_string()", - code, - "43.to_string()", - r#"Pattern wanted token '42' (INT_NUMBER), but code had token '43' (INT_NUMBER)"#, - ); -} - -#[test] -fn overlapping_possible_matches() { - // There are three possible matches here, however the middle one, `foo(foo(foo(42)))` shouldn't - // match because it overlaps with the outer match. The inner match is permitted since it's is - // contained entirely within the placeholder of the outer match. - assert_matches( - "foo(foo($a))", - "fn foo() {} fn main() {foo(foo(foo(foo(42))))}", - &["foo(foo(42))", "foo(foo(foo(foo(42))))"], - ); -} - -#[test] -fn use_declaration_with_braces() { - // It would be OK for a path rule to match and alter a use declaration. We shouldn't mess it up - // though. In particular, we must not change `use foo::{baz, bar}` to `use foo::{baz, - // foo2::bar2}`. - mark::check!(use_declaration_with_braces); - assert_ssr_transform( - "foo::bar ==>> foo2::bar2", - r#" - mod foo { pub fn bar() {} pub fn baz() {} } - mod foo2 { pub fn bar2() {} } - use foo::{baz, bar}; - fn main() { bar() } - "#, - expect![[" - mod foo { pub fn bar() {} pub fn baz() {} } - mod foo2 { pub fn bar2() {} } - use foo::{baz, bar}; - fn main() { foo2::bar2() } - "]], - ) -} - -#[test] -fn ufcs_matches_method_call() { - let code = r#" - struct Foo {} - impl Foo { - fn new(_: i32) -> Foo { Foo {} } - fn do_stuff(&self, _: i32) {} - } - struct Bar {} - impl Bar { - fn new(_: i32) -> Bar { Bar {} } - fn do_stuff(&self, v: i32) {} - } - fn main() { - let b = Bar {}; - let f = Foo {}; - b.do_stuff(1); - f.do_stuff(2); - Foo::new(4).do_stuff(3); - // Too many / too few args - should never match - f.do_stuff(2, 10); - f.do_stuff(); - } - "#; - assert_matches("Foo::do_stuff($a, $b)", code, &["f.do_stuff(2)", "Foo::new(4).do_stuff(3)"]); - // The arguments needs special handling in the case of a function call matching a method call - // and the first argument is different. - assert_matches("Foo::do_stuff($a, 2)", code, &["f.do_stuff(2)"]); - assert_matches("Foo::do_stuff(Foo::new(4), $b)", code, &["Foo::new(4).do_stuff(3)"]); - - assert_ssr_transform( - "Foo::do_stuff(Foo::new($a), $b) ==>> Bar::new($b).do_stuff($a)", - code, - expect![[r#" - struct Foo {} - impl Foo { - fn new(_: i32) -> Foo { Foo {} } - fn do_stuff(&self, _: i32) {} - } - struct Bar {} - impl Bar { - fn new(_: i32) -> Bar { Bar {} } - fn do_stuff(&self, v: i32) {} - } - fn main() { - let b = Bar {}; - let f = Foo {}; - b.do_stuff(1); - f.do_stuff(2); - Bar::new(3).do_stuff(4); - // Too many / too few args - should never match - f.do_stuff(2, 10); - f.do_stuff(); - } - "#]], - ); -} - -#[test] -fn pattern_is_a_single_segment_path() { - mark::check!(pattern_is_a_single_segment_path); - // The first function should not be altered because the `foo` in scope at the cursor position is - // a different `foo`. This case is special because "foo" can be parsed as a pattern (IDENT_PAT -> - // NAME -> IDENT), which contains no path. If we're not careful we'll end up matching the `foo` - // in `let foo` from the first function. Whether we should match the `let foo` in the second - // function is less clear. At the moment, we don't. Doing so sounds like a rename operation, - // which isn't really what SSR is for, especially since the replacement `bar` must be able to be - // resolved, which means if we rename `foo` we'll get a name collision. - assert_ssr_transform( - "foo ==>> bar", - r#" - fn f1() -> i32 { - let foo = 1; - let bar = 2; - foo - } - fn f1() -> i32 { - let foo = 1; - let bar = 2; - foo<|> - } - "#, - expect![[r#" - fn f1() -> i32 { - let foo = 1; - let bar = 2; - foo - } - fn f1() -> i32 { - let foo = 1; - let bar = 2; - bar - } - "#]], - ); -} - -#[test] -fn replace_local_variable_reference() { - // The pattern references a local variable `foo` in the block containing the cursor. We should - // only replace references to this variable `foo`, not other variables that just happen to have - // the same name. - mark::check!(cursor_after_semicolon); - assert_ssr_transform( - "foo + $a ==>> $a - foo", - r#" - fn bar1() -> i32 { - let mut res = 0; - let foo = 5; - res += foo + 1; - let foo = 10; - res += foo + 2;<|> - res += foo + 3; - let foo = 15; - res += foo + 4; - res - } - "#, - expect![[r#" - fn bar1() -> i32 { - let mut res = 0; - let foo = 5; - res += foo + 1; - let foo = 10; - res += 2 - foo; - res += 3 - foo; - let foo = 15; - res += foo + 4; - res - } - "#]], - ) -} - -#[test] -fn replace_path_within_selection() { - assert_ssr_transform( - "foo ==>> bar", - r#" - fn main() { - let foo = 41; - let bar = 42; - do_stuff(foo); - do_stuff(foo);<|> - do_stuff(foo); - do_stuff(foo);<|> - do_stuff(foo); - }"#, - expect![[r#" - fn main() { - let foo = 41; - let bar = 42; - do_stuff(foo); - do_stuff(foo); - do_stuff(bar); - do_stuff(bar); - do_stuff(foo); - }"#]], - ); -} - -#[test] -fn replace_nonpath_within_selection() { - mark::check!(replace_nonpath_within_selection); - assert_ssr_transform( - "$a + $b ==>> $b * $a", - r#" - fn main() { - let v = 1 + 2;<|> - let v2 = 3 + 3; - let v3 = 4 + 5;<|> - let v4 = 6 + 7; - }"#, - expect![[r#" - fn main() { - let v = 1 + 2; - let v2 = 3 * 3; - let v3 = 5 * 4; - let v4 = 6 + 7; - }"#]], - ); -} - -#[test] -fn replace_self() { - // `foo(self)` occurs twice in the code, however only the first occurrence is the `self` that's - // in scope where the rule is invoked. - assert_ssr_transform( - "foo(self) ==>> bar(self)", - r#" - struct S1 {} - fn foo(_: &S1) {} - fn bar(_: &S1) {} - impl S1 { - fn f1(&self) { - foo(self)<|> - } - fn f2(&self) { - foo(self) - } - } - "#, - expect![[r#" - struct S1 {} - fn foo(_: &S1) {} - fn bar(_: &S1) {} - impl S1 { - fn f1(&self) { - bar(self) - } - fn f2(&self) { - foo(self) - } - } - "#]], - ); -} diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml deleted file mode 100644 index fc4d7aa048..0000000000 --- a/crates/ra_syntax/Cargo.toml +++ /dev/null @@ -1,36 +0,0 @@ -[package] -edition = "2018" -name = "ra_syntax" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" -description = "Comment and whitespace preserving parser for the Rust language" -repository = "https://github.com/rust-analyzer/rust-analyzer" - -[lib] -doctest = false - -[dependencies] -itertools = "0.9.0" -rowan = "0.10.0" -rustc_lexer = { version = "671.0.0", package = "rustc-ap-rustc_lexer" } -rustc-hash = "1.1.0" -arrayvec = "0.5.1" -once_cell = "1.3.1" - -stdx = { path = "../stdx" } - -ra_text_edit = { path = "../ra_text_edit" } -ra_parser = { path = "../ra_parser" } - -# This crate transitively depends on `smol_str` via `rowan`. -# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here -# to reduce number of compilations -smol_str = { version = "0.1.15", features = ["serde"] } -serde = { version = "1.0.106", features = ["derive"] } - -[dev-dependencies] -test_utils = { path = "../test_utils" } -expect = { path = "../expect" } -walkdir = "2.3.1" -rayon = "1" diff --git a/crates/ra_syntax/fuzz/Cargo.toml b/crates/ra_syntax/fuzz/Cargo.toml deleted file mode 100644 index 613ad2857d..0000000000 --- a/crates/ra_syntax/fuzz/Cargo.toml +++ /dev/null @@ -1,27 +0,0 @@ - -[package] -name = "ra_syntax-fuzz" -version = "0.0.1" -authors = ["rust-analyzer developers"] -publish = false -edition = "2018" - -[package.metadata] -cargo-fuzz = true - -[dependencies] -ra_syntax = { path = ".." } -ra_text_edit = { path = "../../ra_text_edit" } -libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer-sys.git" } - -# Prevent this from interfering with workspaces -[workspace] -members = ["."] - -[[bin]] -name = "parser" -path = "fuzz_targets/parser.rs" - -[[bin]] -name = "reparse" -path = "fuzz_targets/reparse.rs" diff --git a/crates/ra_syntax/fuzz/fuzz_targets/parser.rs b/crates/ra_syntax/fuzz/fuzz_targets/parser.rs deleted file mode 100644 index 7bc4ef30d0..0000000000 --- a/crates/ra_syntax/fuzz/fuzz_targets/parser.rs +++ /dev/null @@ -1,11 +0,0 @@ -//! FIXME: write short doc here - -#![no_main] -use libfuzzer_sys::fuzz_target; -use ra_syntax::fuzz::check_parser; - -fuzz_target!(|data: &[u8]| { - if let Ok(text) = std::str::from_utf8(data) { - check_parser(text) - } -}); diff --git a/crates/ra_syntax/fuzz/fuzz_targets/reparse.rs b/crates/ra_syntax/fuzz/fuzz_targets/reparse.rs deleted file mode 100644 index 16598f5f11..0000000000 --- a/crates/ra_syntax/fuzz/fuzz_targets/reparse.rs +++ /dev/null @@ -1,11 +0,0 @@ -//! FIXME: write short doc here - -#![no_main] -use libfuzzer_sys::fuzz_target; -use ra_syntax::fuzz::CheckReparse; - -fuzz_target!(|data: &[u8]| { - if let Some(check) = CheckReparse::from_data(data) { - check.run(); - } -}); diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs deleted file mode 100644 index 26b3c813a1..0000000000 --- a/crates/ra_syntax/src/algo.rs +++ /dev/null @@ -1,406 +0,0 @@ -//! FIXME: write short doc here - -use std::{ - fmt, - ops::{self, RangeInclusive}, -}; - -use itertools::Itertools; -use ra_text_edit::TextEditBuilder; -use rustc_hash::FxHashMap; - -use crate::{ - AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr, - SyntaxToken, TextRange, TextSize, -}; - -/// Returns ancestors of the node at the offset, sorted by length. This should -/// do the right thing at an edge, e.g. when searching for expressions at `{ -/// <|>foo }` we will get the name reference instead of the whole block, which -/// we would get if we just did `find_token_at_offset(...).flat_map(|t| -/// t.parent().ancestors())`. -pub fn ancestors_at_offset( - node: &SyntaxNode, - offset: TextSize, -) -> impl Iterator { - node.token_at_offset(offset) - .map(|token| token.parent().ancestors()) - .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) -} - -/// Finds a node of specific Ast type at offset. Note that this is slightly -/// imprecise: if the cursor is strictly between two nodes of the desired type, -/// as in -/// -/// ```no-run -/// struct Foo {}|struct Bar; -/// ``` -/// -/// then the shorter node will be silently preferred. -pub fn find_node_at_offset(syntax: &SyntaxNode, offset: TextSize) -> Option { - ancestors_at_offset(syntax, offset).find_map(N::cast) -} - -pub fn find_node_at_range(syntax: &SyntaxNode, range: TextRange) -> Option { - find_covering_element(syntax, range).ancestors().find_map(N::cast) -} - -/// Skip to next non `trivia` token -pub fn skip_trivia_token(mut token: SyntaxToken, direction: Direction) -> Option { - while token.kind().is_trivia() { - token = match direction { - Direction::Next => token.next_token()?, - Direction::Prev => token.prev_token()?, - } - } - Some(token) -} - -/// Finds the first sibling in the given direction which is not `trivia` -pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option { - return match element { - NodeOrToken::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia), - NodeOrToken::Token(token) => token.siblings_with_tokens(direction).skip(1).find(not_trivia), - }; - - fn not_trivia(element: &SyntaxElement) -> bool { - match element { - NodeOrToken::Node(_) => true, - NodeOrToken::Token(token) => !token.kind().is_trivia(), - } - } -} - -pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement { - root.covering_element(range) -} - -pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option { - if u == v { - return Some(u.clone()); - } - - let u_depth = u.ancestors().count(); - let v_depth = v.ancestors().count(); - let keep = u_depth.min(v_depth); - - let u_candidates = u.ancestors().skip(u_depth - keep); - let v_canidates = v.ancestors().skip(v_depth - keep); - let (res, _) = u_candidates.zip(v_canidates).find(|(x, y)| x == y)?; - Some(res) -} - -pub fn neighbor(me: &T, direction: Direction) -> Option { - me.syntax().siblings(direction).skip(1).find_map(T::cast) -} - -pub fn has_errors(node: &SyntaxNode) -> bool { - node.children().any(|it| it.kind() == SyntaxKind::ERROR) -} - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum InsertPosition { - First, - Last, - Before(T), - After(T), -} - -pub struct TreeDiff { - replacements: FxHashMap, -} - -impl TreeDiff { - pub fn into_text_edit(&self, builder: &mut TextEditBuilder) { - for (from, to) in self.replacements.iter() { - builder.replace(from.text_range(), to.to_string()) - } - } - - pub fn is_empty(&self) -> bool { - self.replacements.is_empty() - } -} - -/// Finds minimal the diff, which, applied to `from`, will result in `to`. -/// -/// Specifically, returns a map whose keys are descendants of `from` and values -/// are descendants of `to`, such that `replace_descendants(from, map) == to`. -/// -/// A trivial solution is a singleton map `{ from: to }`, but this function -/// tries to find a more fine-grained diff. -pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { - let mut buf = FxHashMap::default(); - // FIXME: this is both horrible inefficient and gives larger than - // necessary diff. I bet there's a cool algorithm to diff trees properly. - go(&mut buf, from.clone().into(), to.clone().into()); - return TreeDiff { replacements: buf }; - - fn go( - buf: &mut FxHashMap, - lhs: SyntaxElement, - rhs: SyntaxElement, - ) { - if lhs.kind() == rhs.kind() - && lhs.text_range().len() == rhs.text_range().len() - && match (&lhs, &rhs) { - (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { - lhs.green() == rhs.green() || lhs.text() == rhs.text() - } - (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), - _ => false, - } - { - return; - } - if let (Some(lhs), Some(rhs)) = (lhs.as_node(), rhs.as_node()) { - if lhs.children_with_tokens().count() == rhs.children_with_tokens().count() { - for (lhs, rhs) in lhs.children_with_tokens().zip(rhs.children_with_tokens()) { - go(buf, lhs, rhs) - } - return; - } - } - buf.insert(lhs, rhs); - } -} - -/// Adds specified children (tokens or nodes) to the current node at the -/// specific position. -/// -/// This is a type-unsafe low-level editing API, if you need to use it, -/// prefer to create a type-safe abstraction on top of it instead. -pub fn insert_children( - parent: &SyntaxNode, - position: InsertPosition, - to_insert: impl IntoIterator, -) -> SyntaxNode { - let mut to_insert = to_insert.into_iter(); - _insert_children(parent, position, &mut to_insert) -} - -fn _insert_children( - parent: &SyntaxNode, - position: InsertPosition, - to_insert: &mut dyn Iterator, -) -> SyntaxNode { - let mut delta = TextSize::default(); - let to_insert = to_insert.map(|element| { - delta += element.text_range().len(); - to_green_element(element) - }); - - let mut old_children = parent.green().children().map(|it| match it { - NodeOrToken::Token(it) => NodeOrToken::Token(it.clone()), - NodeOrToken::Node(it) => NodeOrToken::Node(it.clone()), - }); - - let new_children = match &position { - InsertPosition::First => to_insert.chain(old_children).collect::>(), - InsertPosition::Last => old_children.chain(to_insert).collect::>(), - InsertPosition::Before(anchor) | InsertPosition::After(anchor) => { - let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 }; - let split_at = position_of_child(parent, anchor.clone()) + take_anchor; - let before = old_children.by_ref().take(split_at).collect::>(); - before.into_iter().chain(to_insert).chain(old_children).collect::>() - } - }; - - with_children(parent, new_children) -} - -/// Replaces all nodes in `to_delete` with nodes from `to_insert` -/// -/// This is a type-unsafe low-level editing API, if you need to use it, -/// prefer to create a type-safe abstraction on top of it instead. -pub fn replace_children( - parent: &SyntaxNode, - to_delete: RangeInclusive, - to_insert: impl IntoIterator, -) -> SyntaxNode { - let mut to_insert = to_insert.into_iter(); - _replace_children(parent, to_delete, &mut to_insert) -} - -fn _replace_children( - parent: &SyntaxNode, - to_delete: RangeInclusive, - to_insert: &mut dyn Iterator, -) -> SyntaxNode { - let start = position_of_child(parent, to_delete.start().clone()); - let end = position_of_child(parent, to_delete.end().clone()); - let mut old_children = parent.green().children().map(|it| match it { - NodeOrToken::Token(it) => NodeOrToken::Token(it.clone()), - NodeOrToken::Node(it) => NodeOrToken::Node(it.clone()), - }); - - let before = old_children.by_ref().take(start).collect::>(); - let new_children = before - .into_iter() - .chain(to_insert.map(to_green_element)) - .chain(old_children.skip(end + 1 - start)) - .collect::>(); - with_children(parent, new_children) -} - -#[derive(Default)] -pub struct SyntaxRewriter<'a> { - f: Option Option + 'a>>, - //FIXME: add debug_assertions that all elements are in fact from the same file. - replacements: FxHashMap, -} - -impl fmt::Debug for SyntaxRewriter<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("SyntaxRewriter").field("replacements", &self.replacements).finish() - } -} - -impl<'a> SyntaxRewriter<'a> { - pub fn from_fn(f: impl Fn(&SyntaxElement) -> Option + 'a) -> SyntaxRewriter<'a> { - SyntaxRewriter { f: Some(Box::new(f)), replacements: FxHashMap::default() } - } - pub fn delete>(&mut self, what: &T) { - let what = what.clone().into(); - let replacement = Replacement::Delete; - self.replacements.insert(what, replacement); - } - pub fn replace>(&mut self, what: &T, with: &T) { - let what = what.clone().into(); - let replacement = Replacement::Single(with.clone().into()); - self.replacements.insert(what, replacement); - } - pub fn replace_with_many>( - &mut self, - what: &T, - with: Vec, - ) { - let what = what.clone().into(); - let replacement = Replacement::Many(with); - self.replacements.insert(what, replacement); - } - pub fn replace_ast(&mut self, what: &T, with: &T) { - self.replace(what.syntax(), with.syntax()) - } - - pub fn rewrite(&self, node: &SyntaxNode) -> SyntaxNode { - if self.f.is_none() && self.replacements.is_empty() { - return node.clone(); - } - self.rewrite_children(node) - } - - pub fn rewrite_ast(self, node: &N) -> N { - N::cast(self.rewrite(node.syntax())).unwrap() - } - - /// Returns a node that encompasses all replacements to be done by this rewriter. - /// - /// Passing the returned node to `rewrite` will apply all replacements queued up in `self`. - /// - /// Returns `None` when there are no replacements. - pub fn rewrite_root(&self) -> Option { - assert!(self.f.is_none()); - self.replacements - .keys() - .map(|element| match element { - SyntaxElement::Node(it) => it.clone(), - SyntaxElement::Token(it) => it.parent(), - }) - // If we only have one replacement, we must return its parent node, since `rewrite` does - // not replace the node passed to it. - .map(|it| it.parent().unwrap_or(it)) - .fold1(|a, b| least_common_ancestor(&a, &b).unwrap()) - } - - fn replacement(&self, element: &SyntaxElement) -> Option { - if let Some(f) = &self.f { - assert!(self.replacements.is_empty()); - return f(element).map(Replacement::Single); - } - self.replacements.get(element).cloned() - } - - fn rewrite_children(&self, node: &SyntaxNode) -> SyntaxNode { - // FIXME: this could be made much faster. - let mut new_children = Vec::new(); - for child in node.children_with_tokens() { - self.rewrite_self(&mut new_children, &child); - } - with_children(node, new_children) - } - - fn rewrite_self( - &self, - acc: &mut Vec>, - element: &SyntaxElement, - ) { - if let Some(replacement) = self.replacement(&element) { - match replacement { - Replacement::Single(NodeOrToken::Node(it)) => { - acc.push(NodeOrToken::Node(it.green().clone())) - } - Replacement::Single(NodeOrToken::Token(it)) => { - acc.push(NodeOrToken::Token(it.green().clone())) - } - Replacement::Many(replacements) => { - acc.extend(replacements.iter().map(|it| match it { - NodeOrToken::Node(it) => NodeOrToken::Node(it.green().clone()), - NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), - })) - } - Replacement::Delete => (), - }; - return; - } - let res = match element { - NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), - NodeOrToken::Node(it) => NodeOrToken::Node(self.rewrite_children(it).green().clone()), - }; - acc.push(res) - } -} - -impl ops::AddAssign for SyntaxRewriter<'_> { - fn add_assign(&mut self, rhs: SyntaxRewriter) { - assert!(rhs.f.is_none()); - self.replacements.extend(rhs.replacements) - } -} - -#[derive(Clone, Debug)] -enum Replacement { - Delete, - Single(SyntaxElement), - Many(Vec), -} - -fn with_children( - parent: &SyntaxNode, - new_children: Vec>, -) -> SyntaxNode { - let len = new_children.iter().map(|it| it.text_len()).sum::(); - let new_node = rowan::GreenNode::new(rowan::SyntaxKind(parent.kind() as u16), new_children); - let new_root_node = parent.replace_with(new_node); - let new_root_node = SyntaxNode::new_root(new_root_node); - - // FIXME: use a more elegant way to re-fetch the node (#1185), make - // `range` private afterwards - let mut ptr = SyntaxNodePtr::new(parent); - ptr.range = TextRange::at(ptr.range.start(), len); - ptr.to_node(&new_root_node) -} - -fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize { - parent - .children_with_tokens() - .position(|it| it == child) - .expect("element is not a child of current element") -} - -fn to_green_element(element: SyntaxElement) -> NodeOrToken { - match element { - NodeOrToken::Node(it) => it.green().clone().into(), - NodeOrToken::Token(it) => it.green().clone().into(), - } -} diff --git a/crates/ra_syntax/src/ast/edit.rs b/crates/ra_syntax/src/ast/edit.rs deleted file mode 100644 index 5ed123f91f..0000000000 --- a/crates/ra_syntax/src/ast/edit.rs +++ /dev/null @@ -1,642 +0,0 @@ -//! This module contains functions for editing syntax trees. As the trees are -//! immutable, all function here return a fresh copy of the tree, instead of -//! doing an in-place modification. -use std::{ - fmt, iter, - ops::{self, RangeInclusive}, -}; - -use arrayvec::ArrayVec; - -use crate::{ - algo::{self, neighbor, SyntaxRewriter}, - ast::{ - self, - make::{self, tokens}, - AstNode, TypeBoundsOwner, - }, - AstToken, Direction, InsertPosition, SmolStr, SyntaxElement, SyntaxKind, - SyntaxKind::{ATTR, COMMENT, WHITESPACE}, - SyntaxNode, SyntaxToken, T, -}; - -impl ast::BinExpr { - #[must_use] - pub fn replace_op(&self, op: SyntaxKind) -> Option { - let op_node: SyntaxElement = self.op_details()?.0.into(); - let to_insert: Option = Some(make::token(op).into()); - Some(self.replace_children(single_node(op_node), to_insert)) - } -} - -impl ast::Fn { - #[must_use] - pub fn with_body(&self, body: ast::BlockExpr) -> ast::Fn { - let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new(); - let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.body() { - old_body.syntax().clone().into() - } else if let Some(semi) = self.semicolon_token() { - to_insert.push(make::tokens::single_space().into()); - semi.into() - } else { - to_insert.push(make::tokens::single_space().into()); - to_insert.push(body.syntax().clone().into()); - return self.insert_children(InsertPosition::Last, to_insert); - }; - to_insert.push(body.syntax().clone().into()); - self.replace_children(single_node(old_body_or_semi), to_insert) - } -} - -fn make_multiline(node: N) -> N -where - N: AstNode + Clone, -{ - let l_curly = match node.syntax().children_with_tokens().find(|it| it.kind() == T!['{']) { - Some(it) => it, - None => return node, - }; - let sibling = match l_curly.next_sibling_or_token() { - Some(it) => it, - None => return node, - }; - let existing_ws = match sibling.as_token() { - None => None, - Some(tok) if tok.kind() != WHITESPACE => None, - Some(ws) => { - if ws.text().contains('\n') { - return node; - } - Some(ws.clone()) - } - }; - - let indent = leading_indent(node.syntax()).unwrap_or_default(); - let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); - let to_insert = iter::once(ws.ws().into()); - match existing_ws { - None => node.insert_children(InsertPosition::After(l_curly), to_insert), - Some(ws) => node.replace_children(single_node(ws), to_insert), - } -} - -impl ast::AssocItemList { - #[must_use] - pub fn append_items( - &self, - items: impl IntoIterator, - ) -> ast::AssocItemList { - let mut res = self.clone(); - if !self.syntax().text().contains_char('\n') { - res = make_multiline(res); - } - items.into_iter().for_each(|it| res = res.append_item(it)); - res - } - - #[must_use] - pub fn append_item(&self, item: ast::AssocItem) -> ast::AssocItemList { - let (indent, position) = match self.assoc_items().last() { - Some(it) => ( - leading_indent(it.syntax()).unwrap_or_default().to_string(), - InsertPosition::After(it.syntax().clone().into()), - ), - None => match self.l_curly_token() { - Some(it) => ( - " ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(), - InsertPosition::After(it.into()), - ), - None => return self.clone(), - }, - }; - let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); - let to_insert: ArrayVec<[SyntaxElement; 2]> = - [ws.ws().into(), item.syntax().clone().into()].into(); - self.insert_children(position, to_insert) - } -} - -impl ast::RecordExprFieldList { - #[must_use] - pub fn append_field(&self, field: &ast::RecordExprField) -> ast::RecordExprFieldList { - self.insert_field(InsertPosition::Last, field) - } - - #[must_use] - pub fn insert_field( - &self, - position: InsertPosition<&'_ ast::RecordExprField>, - field: &ast::RecordExprField, - ) -> ast::RecordExprFieldList { - let is_multiline = self.syntax().text().contains_char('\n'); - let ws; - let space = if is_multiline { - ws = tokens::WsBuilder::new(&format!( - "\n{} ", - leading_indent(self.syntax()).unwrap_or_default() - )); - ws.ws() - } else { - tokens::single_space() - }; - - let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new(); - to_insert.push(space.into()); - to_insert.push(field.syntax().clone().into()); - to_insert.push(make::token(T![,]).into()); - - macro_rules! after_l_curly { - () => {{ - let anchor = match self.l_curly_token() { - Some(it) => it.into(), - None => return self.clone(), - }; - InsertPosition::After(anchor) - }}; - } - - macro_rules! after_field { - ($anchor:expr) => { - if let Some(comma) = $anchor - .syntax() - .siblings_with_tokens(Direction::Next) - .find(|it| it.kind() == T![,]) - { - InsertPosition::After(comma) - } else { - to_insert.insert(0, make::token(T![,]).into()); - InsertPosition::After($anchor.syntax().clone().into()) - } - }; - }; - - let position = match position { - InsertPosition::First => after_l_curly!(), - InsertPosition::Last => { - if !is_multiline { - // don't insert comma before curly - to_insert.pop(); - } - match self.fields().last() { - Some(it) => after_field!(it), - None => after_l_curly!(), - } - } - InsertPosition::Before(anchor) => { - InsertPosition::Before(anchor.syntax().clone().into()) - } - InsertPosition::After(anchor) => after_field!(anchor), - }; - - self.insert_children(position, to_insert) - } -} - -impl ast::TypeAlias { - #[must_use] - pub fn remove_bounds(&self) -> ast::TypeAlias { - let colon = match self.colon_token() { - Some(it) => it, - None => return self.clone(), - }; - let end = match self.type_bound_list() { - Some(it) => it.syntax().clone().into(), - None => colon.clone().into(), - }; - self.replace_children(colon.into()..=end, iter::empty()) - } -} - -impl ast::TypeParam { - #[must_use] - pub fn remove_bounds(&self) -> ast::TypeParam { - let colon = match self.colon_token() { - Some(it) => it, - None => return self.clone(), - }; - let end = match self.type_bound_list() { - Some(it) => it.syntax().clone().into(), - None => colon.clone().into(), - }; - self.replace_children(colon.into()..=end, iter::empty()) - } -} - -impl ast::Path { - #[must_use] - pub fn with_segment(&self, segment: ast::PathSegment) -> ast::Path { - if let Some(old) = self.segment() { - return self.replace_children( - single_node(old.syntax().clone()), - iter::once(segment.syntax().clone().into()), - ); - } - self.clone() - } -} - -impl ast::PathSegment { - #[must_use] - pub fn with_type_args(&self, type_args: ast::GenericArgList) -> ast::PathSegment { - self._with_type_args(type_args, false) - } - - #[must_use] - pub fn with_turbo_fish(&self, type_args: ast::GenericArgList) -> ast::PathSegment { - self._with_type_args(type_args, true) - } - - fn _with_type_args(&self, type_args: ast::GenericArgList, turbo: bool) -> ast::PathSegment { - if let Some(old) = self.generic_arg_list() { - return self.replace_children( - single_node(old.syntax().clone()), - iter::once(type_args.syntax().clone().into()), - ); - } - let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new(); - if turbo { - to_insert.push(make::token(T![::]).into()); - } - to_insert.push(type_args.syntax().clone().into()); - self.insert_children(InsertPosition::Last, to_insert) - } -} - -impl ast::Use { - #[must_use] - pub fn with_use_tree(&self, use_tree: ast::UseTree) -> ast::Use { - if let Some(old) = self.use_tree() { - return self.replace_descendant(old, use_tree); - } - self.clone() - } - - pub fn remove(&self) -> SyntaxRewriter<'static> { - let mut res = SyntaxRewriter::default(); - res.delete(self.syntax()); - let next_ws = self - .syntax() - .next_sibling_or_token() - .and_then(|it| it.into_token()) - .and_then(ast::Whitespace::cast); - if let Some(next_ws) = next_ws { - let ws_text = next_ws.syntax().text(); - if ws_text.starts_with('\n') { - let rest = &ws_text[1..]; - if rest.is_empty() { - res.delete(next_ws.syntax()) - } else { - res.replace(next_ws.syntax(), &make::tokens::whitespace(rest)); - } - } - } - res - } -} - -impl ast::UseTree { - #[must_use] - pub fn with_path(&self, path: ast::Path) -> ast::UseTree { - if let Some(old) = self.path() { - return self.replace_descendant(old, path); - } - self.clone() - } - - #[must_use] - pub fn with_use_tree_list(&self, use_tree_list: ast::UseTreeList) -> ast::UseTree { - if let Some(old) = self.use_tree_list() { - return self.replace_descendant(old, use_tree_list); - } - self.clone() - } - - #[must_use] - pub fn split_prefix(&self, prefix: &ast::Path) -> ast::UseTree { - let suffix = match split_path_prefix(&prefix) { - Some(it) => it, - None => return self.clone(), - }; - let use_tree = make::use_tree( - suffix, - self.use_tree_list(), - self.rename(), - self.star_token().is_some(), - ); - let nested = make::use_tree_list(iter::once(use_tree)); - return make::use_tree(prefix.clone(), Some(nested), None, false); - - fn split_path_prefix(prefix: &ast::Path) -> Option { - let parent = prefix.parent_path()?; - let segment = parent.segment()?; - if algo::has_errors(segment.syntax()) { - return None; - } - let mut res = make::path_unqualified(segment); - for p in iter::successors(parent.parent_path(), |it| it.parent_path()) { - res = make::path_qualified(res, p.segment()?); - } - Some(res) - } - } - - pub fn remove(&self) -> SyntaxRewriter<'static> { - let mut res = SyntaxRewriter::default(); - res.delete(self.syntax()); - for &dir in [Direction::Next, Direction::Prev].iter() { - if let Some(nb) = neighbor(self, dir) { - self.syntax() - .siblings_with_tokens(dir) - .skip(1) - .take_while(|it| it.as_node() != Some(nb.syntax())) - .for_each(|el| res.delete(&el)); - return res; - } - } - res - } -} - -impl ast::MatchArmList { - #[must_use] - pub fn append_arms(&self, items: impl IntoIterator) -> ast::MatchArmList { - let mut res = self.clone(); - res = res.strip_if_only_whitespace(); - if !res.syntax().text().contains_char('\n') { - res = make_multiline(res); - } - items.into_iter().for_each(|it| res = res.append_arm(it)); - res - } - - fn strip_if_only_whitespace(&self) -> ast::MatchArmList { - let mut iter = self.syntax().children_with_tokens().skip_while(|it| it.kind() != T!['{']); - iter.next(); // Eat the curly - let mut inner = iter.take_while(|it| it.kind() != T!['}']); - if !inner.clone().all(|it| it.kind() == WHITESPACE) { - return self.clone(); - } - let start = match inner.next() { - Some(s) => s, - None => return self.clone(), - }; - let end = match inner.last() { - Some(s) => s, - None => start.clone(), - }; - self.replace_children(start..=end, &mut iter::empty()) - } - - #[must_use] - pub fn remove_placeholder(&self) -> ast::MatchArmList { - let placeholder = - self.arms().find(|arm| matches!(arm.pat(), Some(ast::Pat::WildcardPat(_)))); - if let Some(placeholder) = placeholder { - self.remove_arm(&placeholder) - } else { - self.clone() - } - } - - #[must_use] - fn remove_arm(&self, arm: &ast::MatchArm) -> ast::MatchArmList { - let start = arm.syntax().clone(); - let end = if let Some(comma) = start - .siblings_with_tokens(Direction::Next) - .skip(1) - .skip_while(|it| it.kind().is_trivia()) - .next() - .filter(|it| it.kind() == T![,]) - { - comma - } else { - start.clone().into() - }; - self.replace_children(start.into()..=end, None) - } - - #[must_use] - pub fn append_arm(&self, item: ast::MatchArm) -> ast::MatchArmList { - let r_curly = match self.syntax().children_with_tokens().find(|it| it.kind() == T!['}']) { - Some(t) => t, - None => return self.clone(), - }; - let position = InsertPosition::Before(r_curly.into()); - let arm_ws = tokens::WsBuilder::new(" "); - let match_indent = &leading_indent(self.syntax()).unwrap_or_default(); - let match_ws = tokens::WsBuilder::new(&format!("\n{}", match_indent)); - let to_insert: ArrayVec<[SyntaxElement; 3]> = - [arm_ws.ws().into(), item.syntax().clone().into(), match_ws.ws().into()].into(); - self.insert_children(position, to_insert) - } -} - -#[must_use] -pub fn remove_attrs_and_docs(node: &N) -> N { - N::cast(remove_attrs_and_docs_inner(node.syntax().clone())).unwrap() -} - -fn remove_attrs_and_docs_inner(mut node: SyntaxNode) -> SyntaxNode { - while let Some(start) = - node.children_with_tokens().find(|it| it.kind() == ATTR || it.kind() == COMMENT) - { - let end = match &start.next_sibling_or_token() { - Some(el) if el.kind() == WHITESPACE => el.clone(), - Some(_) | None => start.clone(), - }; - node = algo::replace_children(&node, start..=end, &mut iter::empty()); - } - node -} - -#[derive(Debug, Clone, Copy)] -pub struct IndentLevel(pub u8); - -impl From for IndentLevel { - fn from(level: u8) -> IndentLevel { - IndentLevel(level) - } -} - -impl fmt::Display for IndentLevel { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let spaces = " "; - let buf; - let len = self.0 as usize * 4; - let indent = if len <= spaces.len() { - &spaces[..len] - } else { - buf = iter::repeat(' ').take(len).collect::(); - &buf - }; - fmt::Display::fmt(indent, f) - } -} - -impl ops::Add for IndentLevel { - type Output = IndentLevel; - fn add(self, rhs: u8) -> IndentLevel { - IndentLevel(self.0 + rhs) - } -} - -impl IndentLevel { - pub fn from_node(node: &SyntaxNode) -> IndentLevel { - let first_token = match node.first_token() { - Some(it) => it, - None => return IndentLevel(0), - }; - for ws in prev_tokens(first_token).filter_map(ast::Whitespace::cast) { - let text = ws.syntax().text(); - if let Some(pos) = text.rfind('\n') { - let level = text[pos + 1..].chars().count() / 4; - return IndentLevel(level as u8); - } - } - IndentLevel(0) - } - - /// XXX: this intentionally doesn't change the indent of the very first token. - /// Ie, in something like - /// ``` - /// fn foo() { - /// 92 - /// } - /// ``` - /// if you indent the block, the `{` token would stay put. - fn increase_indent(self, node: SyntaxNode) -> SyntaxNode { - let mut rewriter = SyntaxRewriter::default(); - node.descendants_with_tokens() - .filter_map(|el| el.into_token()) - .filter_map(ast::Whitespace::cast) - .filter(|ws| { - let text = ws.syntax().text(); - text.contains('\n') - }) - .for_each(|ws| { - let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self,)); - rewriter.replace(ws.syntax(), &new_ws) - }); - rewriter.rewrite(&node) - } - - fn decrease_indent(self, node: SyntaxNode) -> SyntaxNode { - let mut rewriter = SyntaxRewriter::default(); - node.descendants_with_tokens() - .filter_map(|el| el.into_token()) - .filter_map(ast::Whitespace::cast) - .filter(|ws| { - let text = ws.syntax().text(); - text.contains('\n') - }) - .for_each(|ws| { - let new_ws = make::tokens::whitespace( - &ws.syntax().text().replace(&format!("\n{}", self), "\n"), - ); - rewriter.replace(ws.syntax(), &new_ws) - }); - rewriter.rewrite(&node) - } -} - -// FIXME: replace usages with IndentLevel above -fn leading_indent(node: &SyntaxNode) -> Option { - for token in prev_tokens(node.first_token()?) { - if let Some(ws) = ast::Whitespace::cast(token.clone()) { - let ws_text = ws.text(); - if let Some(pos) = ws_text.rfind('\n') { - return Some(ws_text[pos + 1..].into()); - } - } - if token.text().contains('\n') { - break; - } - } - None -} - -fn prev_tokens(token: SyntaxToken) -> impl Iterator { - iter::successors(Some(token), |token| token.prev_token()) -} - -pub trait AstNodeEdit: AstNode + Clone + Sized { - #[must_use] - fn insert_children( - &self, - position: InsertPosition, - to_insert: impl IntoIterator, - ) -> Self { - let new_syntax = algo::insert_children(self.syntax(), position, to_insert); - Self::cast(new_syntax).unwrap() - } - - #[must_use] - fn replace_children( - &self, - to_replace: RangeInclusive, - to_insert: impl IntoIterator, - ) -> Self { - let new_syntax = algo::replace_children(self.syntax(), to_replace, to_insert); - Self::cast(new_syntax).unwrap() - } - - #[must_use] - fn replace_descendant(&self, old: D, new: D) -> Self { - self.replace_descendants(iter::once((old, new))) - } - - #[must_use] - fn replace_descendants( - &self, - replacement_map: impl IntoIterator, - ) -> Self { - let mut rewriter = SyntaxRewriter::default(); - for (from, to) in replacement_map { - rewriter.replace(from.syntax(), to.syntax()) - } - rewriter.rewrite_ast(self) - } - #[must_use] - fn indent(&self, level: IndentLevel) -> Self { - Self::cast(level.increase_indent(self.syntax().clone())).unwrap() - } - #[must_use] - fn dedent(&self, level: IndentLevel) -> Self { - Self::cast(level.decrease_indent(self.syntax().clone())).unwrap() - } - #[must_use] - fn reset_indent(&self) -> Self { - let level = IndentLevel::from_node(self.syntax()); - self.dedent(level) - } -} - -impl AstNodeEdit for N {} - -fn single_node(element: impl Into) -> RangeInclusive { - let element = element.into(); - element.clone()..=element -} - -#[test] -fn test_increase_indent() { - let arm_list = { - let arm = make::match_arm(iter::once(make::wildcard_pat().into()), make::expr_unit()); - make::match_arm_list(vec![arm.clone(), arm]) - }; - assert_eq!( - arm_list.syntax().to_string(), - "{ - _ => (), - _ => (), -}" - ); - let indented = arm_list.indent(IndentLevel(2)); - assert_eq!( - indented.syntax().to_string(), - "{ - _ => (), - _ => (), - }" - ); -} diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs deleted file mode 100644 index 254a37fe3e..0000000000 --- a/crates/ra_syntax/src/ast/make.rs +++ /dev/null @@ -1,392 +0,0 @@ -//! This module contains free-standing functions for creating AST fragments out -//! of smaller pieces. -//! -//! Note that all functions here intended to be stupid constructors, which just -//! assemble a finish node from immediate children. If you want to do something -//! smarter than that, it probably doesn't belong in this module. -use itertools::Itertools; -use stdx::format_to; - -use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, SyntaxToken}; - -pub fn name(text: &str) -> ast::Name { - ast_from_text(&format!("mod {};", text)) -} - -pub fn name_ref(text: &str) -> ast::NameRef { - ast_from_text(&format!("fn f() {{ {}; }}", text)) -} - -pub fn ty(text: &str) -> ast::Type { - ast_from_text(&format!("impl {} for D {{}};", text)) -} - -pub fn path_segment(name_ref: ast::NameRef) -> ast::PathSegment { - ast_from_text(&format!("use {};", name_ref)) -} -pub fn path_unqualified(segment: ast::PathSegment) -> ast::Path { - path_from_text(&format!("use {}", segment)) -} -pub fn path_qualified(qual: ast::Path, segment: ast::PathSegment) -> ast::Path { - path_from_text(&format!("{}::{}", qual, segment)) -} -pub fn path_from_text(text: &str) -> ast::Path { - ast_from_text(text) -} - -pub fn use_tree( - path: ast::Path, - use_tree_list: Option, - alias: Option, - add_star: bool, -) -> ast::UseTree { - let mut buf = "use ".to_string(); - buf += &path.syntax().to_string(); - if let Some(use_tree_list) = use_tree_list { - format_to!(buf, "::{}", use_tree_list); - } - if add_star { - buf += "::*"; - } - - if let Some(alias) = alias { - format_to!(buf, " {}", alias); - } - ast_from_text(&buf) -} - -pub fn use_tree_list(use_trees: impl IntoIterator) -> ast::UseTreeList { - let use_trees = use_trees.into_iter().map(|it| it.syntax().clone()).join(", "); - ast_from_text(&format!("use {{{}}};", use_trees)) -} - -pub fn use_(use_tree: ast::UseTree) -> ast::Use { - ast_from_text(&format!("use {};", use_tree)) -} - -pub fn record_expr_field(name: ast::NameRef, expr: Option) -> ast::RecordExprField { - return match expr { - Some(expr) => from_text(&format!("{}: {}", name, expr)), - None => from_text(&name.to_string()), - }; - - fn from_text(text: &str) -> ast::RecordExprField { - ast_from_text(&format!("fn f() {{ S {{ {}, }} }}", text)) - } -} - -pub fn record_field(name: ast::NameRef, ty: ast::Type) -> ast::RecordField { - ast_from_text(&format!("struct S {{ {}: {}, }}", name, ty)) -} - -pub fn block_expr( - stmts: impl IntoIterator, - tail_expr: Option, -) -> ast::BlockExpr { - let mut buf = "{\n".to_string(); - for stmt in stmts.into_iter() { - format_to!(buf, " {}\n", stmt); - } - if let Some(tail_expr) = tail_expr { - format_to!(buf, " {}\n", tail_expr) - } - buf += "}"; - ast_from_text(&format!("fn f() {}", buf)) -} - -pub fn expr_unit() -> ast::Expr { - expr_from_text("()") -} -pub fn expr_empty_block() -> ast::Expr { - expr_from_text("{}") -} -pub fn expr_unimplemented() -> ast::Expr { - expr_from_text("unimplemented!()") -} -pub fn expr_unreachable() -> ast::Expr { - expr_from_text("unreachable!()") -} -pub fn expr_todo() -> ast::Expr { - expr_from_text("todo!()") -} -pub fn expr_path(path: ast::Path) -> ast::Expr { - expr_from_text(&path.to_string()) -} -pub fn expr_continue() -> ast::Expr { - expr_from_text("continue") -} -pub fn expr_break() -> ast::Expr { - expr_from_text("break") -} -pub fn expr_return() -> ast::Expr { - expr_from_text("return") -} -pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Expr { - expr_from_text(&format!("match {} {}", expr, match_arm_list)) -} -pub fn expr_if(condition: ast::Condition, then_branch: ast::BlockExpr) -> ast::Expr { - expr_from_text(&format!("if {} {}", condition, then_branch)) -} -pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::Expr { - let token = token(op); - expr_from_text(&format!("{}{}", token, expr)) -} -fn expr_from_text(text: &str) -> ast::Expr { - ast_from_text(&format!("const C: () = {};", text)) -} - -pub fn try_expr_from_text(text: &str) -> Option { - try_ast_from_text(&format!("const C: () = {};", text)) -} - -pub fn condition(expr: ast::Expr, pattern: Option) -> ast::Condition { - match pattern { - None => ast_from_text(&format!("const _: () = while {} {{}};", expr)), - Some(pattern) => { - ast_from_text(&format!("const _: () = while let {} = {} {{}};", pattern, expr)) - } - } -} - -pub fn ident_pat(name: ast::Name) -> ast::IdentPat { - return from_text(name.text()); - - fn from_text(text: &str) -> ast::IdentPat { - ast_from_text(&format!("fn f({}: ())", text)) - } -} - -pub fn wildcard_pat() -> ast::WildcardPat { - return from_text("_"); - - fn from_text(text: &str) -> ast::WildcardPat { - ast_from_text(&format!("fn f({}: ())", text)) - } -} - -/// Creates a tuple of patterns from an interator of patterns. -/// -/// Invariant: `pats` must be length > 1 -/// -/// FIXME handle `pats` length == 1 -pub fn tuple_pat(pats: impl IntoIterator) -> ast::TuplePat { - let pats_str = pats.into_iter().map(|p| p.to_string()).join(", "); - return from_text(&format!("({})", pats_str)); - - fn from_text(text: &str) -> ast::TuplePat { - ast_from_text(&format!("fn f({}: ())", text)) - } -} - -pub fn tuple_struct_pat( - path: ast::Path, - pats: impl IntoIterator, -) -> ast::TupleStructPat { - let pats_str = pats.into_iter().join(", "); - return from_text(&format!("{}({})", path, pats_str)); - - fn from_text(text: &str) -> ast::TupleStructPat { - ast_from_text(&format!("fn f({}: ())", text)) - } -} - -pub fn record_pat(path: ast::Path, pats: impl IntoIterator) -> ast::RecordPat { - let pats_str = pats.into_iter().join(", "); - return from_text(&format!("{} {{ {} }}", path, pats_str)); - - fn from_text(text: &str) -> ast::RecordPat { - ast_from_text(&format!("fn f({}: ())", text)) - } -} - -/// Returns a `BindPat` if the path has just one segment, a `PathPat` otherwise. -pub fn path_pat(path: ast::Path) -> ast::Pat { - return from_text(&path.to_string()); - fn from_text(text: &str) -> ast::Pat { - ast_from_text(&format!("fn f({}: ())", text)) - } -} - -pub fn match_arm(pats: impl IntoIterator, expr: ast::Expr) -> ast::MatchArm { - let pats_str = pats.into_iter().join(" | "); - return from_text(&format!("{} => {}", pats_str, expr)); - - fn from_text(text: &str) -> ast::MatchArm { - ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text)) - } -} - -pub fn match_arm_list(arms: impl IntoIterator) -> ast::MatchArmList { - let arms_str = arms - .into_iter() - .map(|arm| { - let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like()); - let comma = if needs_comma { "," } else { "" }; - format!(" {}{}\n", arm.syntax(), comma) - }) - .collect::(); - return from_text(&arms_str); - - fn from_text(text: &str) -> ast::MatchArmList { - ast_from_text(&format!("fn f() {{ match () {{\n{}}} }}", text)) - } -} - -pub fn where_pred( - path: ast::Path, - bounds: impl IntoIterator, -) -> ast::WherePred { - let bounds = bounds.into_iter().join(" + "); - return from_text(&format!("{}: {}", path, bounds)); - - fn from_text(text: &str) -> ast::WherePred { - ast_from_text(&format!("fn f() where {} {{ }}", text)) - } -} - -pub fn where_clause(preds: impl IntoIterator) -> ast::WhereClause { - let preds = preds.into_iter().join(", "); - return from_text(preds.as_str()); - - fn from_text(text: &str) -> ast::WhereClause { - ast_from_text(&format!("fn f() where {} {{ }}", text)) - } -} - -pub fn let_stmt(pattern: ast::Pat, initializer: Option) -> ast::LetStmt { - let text = match initializer { - Some(it) => format!("let {} = {};", pattern, it), - None => format!("let {};", pattern), - }; - ast_from_text(&format!("fn f() {{ {} }}", text)) -} -pub fn expr_stmt(expr: ast::Expr) -> ast::ExprStmt { - let semi = if expr.is_block_like() { "" } else { ";" }; - ast_from_text(&format!("fn f() {{ {}{} (); }}", expr, semi)) -} - -pub fn token(kind: SyntaxKind) -> SyntaxToken { - tokens::SOURCE_FILE - .tree() - .syntax() - .descendants_with_tokens() - .filter_map(|it| it.into_token()) - .find(|it| it.kind() == kind) - .unwrap_or_else(|| panic!("unhandled token: {:?}", kind)) -} - -pub fn param(name: String, ty: String) -> ast::Param { - ast_from_text(&format!("fn f({}: {}) {{ }}", name, ty)) -} - -pub fn param_list(pats: impl IntoIterator) -> ast::ParamList { - let args = pats.into_iter().join(", "); - ast_from_text(&format!("fn f({}) {{ }}", args)) -} - -pub fn visibility_pub_crate() -> ast::Visibility { - ast_from_text("pub(crate) struct S") -} - -pub fn fn_( - visibility: Option, - fn_name: ast::Name, - type_params: Option, - params: ast::ParamList, - body: ast::BlockExpr, -) -> ast::Fn { - let type_params = - if let Some(type_params) = type_params { format!("<{}>", type_params) } else { "".into() }; - let visibility = match visibility { - None => String::new(), - Some(it) => format!("{} ", it), - }; - ast_from_text(&format!("{}fn {}{}{} {}", visibility, fn_name, type_params, params, body)) -} - -fn ast_from_text(text: &str) -> N { - let parse = SourceFile::parse(text); - let node = match parse.tree().syntax().descendants().find_map(N::cast) { - Some(it) => it, - None => { - panic!("Failed to make ast node `{}` from text {}", std::any::type_name::(), text) - } - }; - let node = node.syntax().clone(); - let node = unroot(node); - let node = N::cast(node).unwrap(); - assert_eq!(node.syntax().text_range().start(), 0.into()); - node -} - -fn try_ast_from_text(text: &str) -> Option { - let parse = SourceFile::parse(text); - let node = parse.tree().syntax().descendants().find_map(N::cast)?; - let node = node.syntax().clone(); - let node = unroot(node); - let node = N::cast(node).unwrap(); - assert_eq!(node.syntax().text_range().start(), 0.into()); - Some(node) -} - -fn unroot(n: SyntaxNode) -> SyntaxNode { - SyntaxNode::new_root(n.green().clone()) -} - -pub mod tokens { - use once_cell::sync::Lazy; - - use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken}; - - pub(super) static SOURCE_FILE: Lazy> = - Lazy::new(|| SourceFile::parse("const C: <()>::Item = (1 != 1, 2 == 2, !true)\n;")); - - pub fn single_space() -> SyntaxToken { - SOURCE_FILE - .tree() - .syntax() - .descendants_with_tokens() - .filter_map(|it| it.into_token()) - .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") - .unwrap() - } - - pub fn whitespace(text: &str) -> SyntaxToken { - assert!(text.trim().is_empty()); - let sf = SourceFile::parse(text).ok().unwrap(); - sf.syntax().first_child_or_token().unwrap().into_token().unwrap() - } - - pub fn doc_comment(text: &str) -> SyntaxToken { - assert!(!text.trim().is_empty()); - let sf = SourceFile::parse(text).ok().unwrap(); - sf.syntax().first_child_or_token().unwrap().into_token().unwrap() - } - - pub fn literal(text: &str) -> SyntaxToken { - assert_eq!(text.trim(), text); - let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {}; }}", text)); - lit.syntax().first_child_or_token().unwrap().into_token().unwrap() - } - - pub fn single_newline() -> SyntaxToken { - SOURCE_FILE - .tree() - .syntax() - .descendants_with_tokens() - .filter_map(|it| it.into_token()) - .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") - .unwrap() - } - - pub struct WsBuilder(SourceFile); - - impl WsBuilder { - pub fn new(text: &str) -> WsBuilder { - WsBuilder(SourceFile::parse(text).ok().unwrap()) - } - pub fn ws(&self) -> SyntaxToken { - self.0.syntax().first_child_or_token().unwrap().into_token().unwrap() - } - } -} diff --git a/crates/ra_syntax/src/ast/node_ext.rs b/crates/ra_syntax/src/ast/node_ext.rs deleted file mode 100644 index 733e978772..0000000000 --- a/crates/ra_syntax/src/ast/node_ext.rs +++ /dev/null @@ -1,485 +0,0 @@ -//! Various extension methods to ast Nodes, which are hard to code-generate. -//! Extensions for various expressions live in a sibling `expr_extensions` module. - -use std::fmt; - -use itertools::Itertools; -use ra_parser::SyntaxKind; - -use crate::{ - ast::{self, support, AstNode, NameOwner, SyntaxNode}, - SmolStr, SyntaxElement, SyntaxToken, T, -}; - -impl ast::Name { - pub fn text(&self) -> &SmolStr { - text_of_first_token(self.syntax()) - } -} - -impl ast::NameRef { - pub fn text(&self) -> &SmolStr { - text_of_first_token(self.syntax()) - } - - pub fn as_tuple_field(&self) -> Option { - self.text().parse().ok() - } -} - -fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { - node.green().children().next().and_then(|it| it.into_token()).unwrap().text() -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum AttrKind { - Inner, - Outer, -} - -impl ast::Attr { - pub fn as_simple_atom(&self) -> Option { - if self.eq_token().is_some() || self.token_tree().is_some() { - return None; - } - self.simple_name() - } - - pub fn as_simple_call(&self) -> Option<(SmolStr, ast::TokenTree)> { - let tt = self.token_tree()?; - Some((self.simple_name()?, tt)) - } - - pub fn as_simple_key_value(&self) -> Option<(SmolStr, SmolStr)> { - let lit = self.literal()?; - let key = self.simple_name()?; - // FIXME: escape? raw string? - let value = lit.syntax().first_token()?.text().trim_matches('"').into(); - Some((key, value)) - } - - pub fn simple_name(&self) -> Option { - let path = self.path()?; - match (path.segment(), path.qualifier()) { - (Some(segment), None) => Some(segment.syntax().first_token()?.text().clone()), - _ => None, - } - } - - pub fn kind(&self) -> AttrKind { - let first_token = self.syntax().first_token(); - let first_token_kind = first_token.as_ref().map(SyntaxToken::kind); - let second_token_kind = - first_token.and_then(|token| token.next_token()).as_ref().map(SyntaxToken::kind); - - match (first_token_kind, second_token_kind) { - (Some(SyntaxKind::POUND), Some(T![!])) => AttrKind::Inner, - _ => AttrKind::Outer, - } - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum PathSegmentKind { - Name(ast::NameRef), - Type { type_ref: Option, trait_ref: Option }, - SelfKw, - SuperKw, - CrateKw, -} - -impl ast::PathSegment { - pub fn parent_path(&self) -> ast::Path { - self.syntax() - .parent() - .and_then(ast::Path::cast) - .expect("segments are always nested in paths") - } - - pub fn kind(&self) -> Option { - let res = if let Some(name_ref) = self.name_ref() { - PathSegmentKind::Name(name_ref) - } else { - match self.syntax().first_child_or_token()?.kind() { - T![self] => PathSegmentKind::SelfKw, - T![super] => PathSegmentKind::SuperKw, - T![crate] => PathSegmentKind::CrateKw, - T![<] => { - // or - // T is any TypeRef, Trait has to be a PathType - let mut type_refs = - self.syntax().children().filter(|node| ast::Type::can_cast(node.kind())); - let type_ref = type_refs.next().and_then(ast::Type::cast); - let trait_ref = type_refs.next().and_then(ast::PathType::cast); - PathSegmentKind::Type { type_ref, trait_ref } - } - _ => return None, - } - }; - Some(res) - } -} - -impl ast::Path { - pub fn parent_path(&self) -> Option { - self.syntax().parent().and_then(ast::Path::cast) - } -} - -impl ast::UseTreeList { - pub fn parent_use_tree(&self) -> ast::UseTree { - self.syntax() - .parent() - .and_then(ast::UseTree::cast) - .expect("UseTreeLists are always nested in UseTrees") - } -} - -impl ast::Impl { - pub fn self_ty(&self) -> Option { - match self.target() { - (Some(t), None) | (_, Some(t)) => Some(t), - _ => None, - } - } - - pub fn trait_(&self) -> Option { - match self.target() { - (Some(t), Some(_)) => Some(t), - _ => None, - } - } - - fn target(&self) -> (Option, Option) { - let mut types = support::children(self.syntax()); - let first = types.next(); - let second = types.next(); - (first, second) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum StructKind { - Record(ast::RecordFieldList), - Tuple(ast::TupleFieldList), - Unit, -} - -impl StructKind { - fn from_node(node: &N) -> StructKind { - if let Some(nfdl) = support::child::(node.syntax()) { - StructKind::Record(nfdl) - } else if let Some(pfl) = support::child::(node.syntax()) { - StructKind::Tuple(pfl) - } else { - StructKind::Unit - } - } -} - -impl ast::Struct { - pub fn kind(&self) -> StructKind { - StructKind::from_node(self) - } -} - -impl ast::RecordExprField { - pub fn for_field_name(field_name: &ast::NameRef) -> Option { - let candidate = - field_name.syntax().parent().and_then(ast::RecordExprField::cast).or_else(|| { - field_name.syntax().ancestors().nth(4).and_then(ast::RecordExprField::cast) - })?; - if candidate.field_name().as_ref() == Some(field_name) { - Some(candidate) - } else { - None - } - } - - /// Deals with field init shorthand - pub fn field_name(&self) -> Option { - if let Some(name_ref) = self.name_ref() { - return Some(name_ref); - } - if let Some(ast::Expr::PathExpr(expr)) = self.expr() { - let path = expr.path()?; - let segment = path.segment()?; - let name_ref = segment.name_ref()?; - if path.qualifier().is_none() { - return Some(name_ref); - } - } - None - } -} - -pub enum NameOrNameRef { - Name(ast::Name), - NameRef(ast::NameRef), -} - -impl fmt::Display for NameOrNameRef { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - NameOrNameRef::Name(it) => fmt::Display::fmt(it, f), - NameOrNameRef::NameRef(it) => fmt::Display::fmt(it, f), - } - } -} - -impl ast::RecordPatField { - /// Deals with field init shorthand - pub fn field_name(&self) -> Option { - if let Some(name_ref) = self.name_ref() { - return Some(NameOrNameRef::NameRef(name_ref)); - } - if let Some(ast::Pat::IdentPat(pat)) = self.pat() { - let name = pat.name()?; - return Some(NameOrNameRef::Name(name)); - } - None - } -} - -impl ast::Variant { - pub fn parent_enum(&self) -> ast::Enum { - self.syntax() - .parent() - .and_then(|it| it.parent()) - .and_then(ast::Enum::cast) - .expect("EnumVariants are always nested in Enums") - } - pub fn kind(&self) -> StructKind { - StructKind::from_node(self) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum FieldKind { - Name(ast::NameRef), - Index(SyntaxToken), -} - -impl ast::FieldExpr { - pub fn index_token(&self) -> Option { - self.syntax - .children_with_tokens() - // FIXME: Accepting floats here to reject them in validation later - .find(|c| c.kind() == SyntaxKind::INT_NUMBER || c.kind() == SyntaxKind::FLOAT_NUMBER) - .as_ref() - .and_then(SyntaxElement::as_token) - .cloned() - } - - pub fn field_access(&self) -> Option { - if let Some(nr) = self.name_ref() { - Some(FieldKind::Name(nr)) - } else if let Some(tok) = self.index_token() { - Some(FieldKind::Index(tok)) - } else { - None - } - } -} - -pub struct SlicePatComponents { - pub prefix: Vec, - pub slice: Option, - pub suffix: Vec, -} - -impl ast::SlicePat { - pub fn components(&self) -> SlicePatComponents { - let mut args = self.pats().peekable(); - let prefix = args - .peeking_take_while(|p| match p { - ast::Pat::RestPat(_) => false, - ast::Pat::IdentPat(bp) => match bp.pat() { - Some(ast::Pat::RestPat(_)) => false, - _ => true, - }, - ast::Pat::RefPat(rp) => match rp.pat() { - Some(ast::Pat::RestPat(_)) => false, - Some(ast::Pat::IdentPat(bp)) => match bp.pat() { - Some(ast::Pat::RestPat(_)) => false, - _ => true, - }, - _ => true, - }, - _ => true, - }) - .collect(); - let slice = args.next(); - let suffix = args.collect(); - - SlicePatComponents { prefix, slice, suffix } - } -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub enum SelfParamKind { - /// self - Owned, - /// &self - Ref, - /// &mut self - MutRef, -} - -impl ast::SelfParam { - pub fn kind(&self) -> SelfParamKind { - if self.amp_token().is_some() { - if self.mut_token().is_some() { - SelfParamKind::MutRef - } else { - SelfParamKind::Ref - } - } else { - SelfParamKind::Owned - } - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub enum TypeBoundKind { - /// Trait - PathType(ast::PathType), - /// for<'a> ... - ForType(ast::ForType), - /// 'a - Lifetime(SyntaxToken), -} - -impl ast::TypeBound { - pub fn kind(&self) -> TypeBoundKind { - if let Some(path_type) = support::children(self.syntax()).next() { - TypeBoundKind::PathType(path_type) - } else if let Some(for_type) = support::children(self.syntax()).next() { - TypeBoundKind::ForType(for_type) - } else if let Some(lifetime) = self.lifetime_token() { - TypeBoundKind::Lifetime(lifetime) - } else { - unreachable!() - } - } -} - -pub enum VisibilityKind { - In(ast::Path), - PubCrate, - PubSuper, - PubSelf, - Pub, -} - -impl ast::Visibility { - pub fn kind(&self) -> VisibilityKind { - if let Some(path) = support::children(self.syntax()).next() { - VisibilityKind::In(path) - } else if self.crate_token().is_some() { - VisibilityKind::PubCrate - } else if self.super_token().is_some() { - VisibilityKind::PubSuper - } else if self.self_token().is_some() { - VisibilityKind::PubSelf - } else { - VisibilityKind::Pub - } - } -} - -impl ast::MacroCall { - pub fn is_macro_rules(&self) -> Option { - let name_ref = self.path()?.segment()?.name_ref()?; - if name_ref.text() == "macro_rules" { - self.name() - } else { - None - } - } - - pub fn is_bang(&self) -> bool { - self.is_macro_rules().is_none() - } -} - -impl ast::LifetimeParam { - pub fn lifetime_bounds(&self) -> impl Iterator { - self.syntax() - .children_with_tokens() - .filter_map(|it| it.into_token()) - .skip_while(|x| x.kind() != T![:]) - .filter(|it| it.kind() == T![lifetime]) - } -} - -impl ast::RangePat { - pub fn start(&self) -> Option { - self.syntax() - .children_with_tokens() - .take_while(|it| !(it.kind() == T![..] || it.kind() == T![..=])) - .filter_map(|it| it.into_node()) - .find_map(ast::Pat::cast) - } - - pub fn end(&self) -> Option { - self.syntax() - .children_with_tokens() - .skip_while(|it| !(it.kind() == T![..] || it.kind() == T![..=])) - .filter_map(|it| it.into_node()) - .find_map(ast::Pat::cast) - } -} - -impl ast::TokenTree { - pub fn left_delimiter_token(&self) -> Option { - self.syntax() - .first_child_or_token()? - .into_token() - .filter(|it| matches!(it.kind(), T!['{'] | T!['('] | T!['['])) - } - - pub fn right_delimiter_token(&self) -> Option { - self.syntax() - .last_child_or_token()? - .into_token() - .filter(|it| matches!(it.kind(), T!['}'] | T![')'] | T![']'])) - } -} - -impl ast::GenericParamList { - pub fn lifetime_params(&self) -> impl Iterator { - self.generic_params().filter_map(|param| match param { - ast::GenericParam::LifetimeParam(it) => Some(it), - ast::GenericParam::TypeParam(_) | ast::GenericParam::ConstParam(_) => None, - }) - } - pub fn type_params(&self) -> impl Iterator { - self.generic_params().filter_map(|param| match param { - ast::GenericParam::TypeParam(it) => Some(it), - ast::GenericParam::LifetimeParam(_) | ast::GenericParam::ConstParam(_) => None, - }) - } - pub fn const_params(&self) -> impl Iterator { - self.generic_params().filter_map(|param| match param { - ast::GenericParam::ConstParam(it) => Some(it), - ast::GenericParam::TypeParam(_) | ast::GenericParam::LifetimeParam(_) => None, - }) - } -} - -impl ast::DocCommentsOwner for ast::SourceFile {} -impl ast::DocCommentsOwner for ast::Fn {} -impl ast::DocCommentsOwner for ast::Struct {} -impl ast::DocCommentsOwner for ast::Union {} -impl ast::DocCommentsOwner for ast::RecordField {} -impl ast::DocCommentsOwner for ast::TupleField {} -impl ast::DocCommentsOwner for ast::Enum {} -impl ast::DocCommentsOwner for ast::Variant {} -impl ast::DocCommentsOwner for ast::Trait {} -impl ast::DocCommentsOwner for ast::Module {} -impl ast::DocCommentsOwner for ast::Static {} -impl ast::DocCommentsOwner for ast::Const {} -impl ast::DocCommentsOwner for ast::TypeAlias {} -impl ast::DocCommentsOwner for ast::Impl {} -impl ast::DocCommentsOwner for ast::MacroCall {} diff --git a/crates/ra_syntax/src/ast/traits.rs b/crates/ra_syntax/src/ast/traits.rs deleted file mode 100644 index 3a56b1674c..0000000000 --- a/crates/ra_syntax/src/ast/traits.rs +++ /dev/null @@ -1,142 +0,0 @@ -//! Various traits that are implemented by ast nodes. -//! -//! The implementations are usually trivial, and live in generated.rs -use stdx::SepBy; - -use crate::{ - ast::{self, support, AstChildren, AstNode, AstToken}, - syntax_node::SyntaxElementChildren, - SyntaxToken, T, -}; - -pub trait NameOwner: AstNode { - fn name(&self) -> Option { - support::child(self.syntax()) - } -} - -pub trait VisibilityOwner: AstNode { - fn visibility(&self) -> Option { - support::child(self.syntax()) - } -} - -pub trait LoopBodyOwner: AstNode { - fn loop_body(&self) -> Option { - support::child(self.syntax()) - } - - fn label(&self) -> Option { - support::child(self.syntax()) - } -} - -pub trait ArgListOwner: AstNode { - fn arg_list(&self) -> Option { - support::child(self.syntax()) - } -} - -pub trait ModuleItemOwner: AstNode { - fn items(&self) -> AstChildren { - support::children(self.syntax()) - } -} - -pub trait GenericParamsOwner: AstNode { - fn generic_param_list(&self) -> Option { - support::child(self.syntax()) - } - - fn where_clause(&self) -> Option { - support::child(self.syntax()) - } -} - -pub trait TypeBoundsOwner: AstNode { - fn type_bound_list(&self) -> Option { - support::child(self.syntax()) - } - - fn colon_token(&self) -> Option { - support::token(self.syntax(), T![:]) - } -} - -pub trait AttrsOwner: AstNode { - fn attrs(&self) -> AstChildren { - support::children(self.syntax()) - } - fn has_atom_attr(&self, atom: &str) -> bool { - self.attrs().filter_map(|x| x.as_simple_atom()).any(|x| x == atom) - } -} - -pub trait DocCommentsOwner: AstNode { - fn doc_comments(&self) -> CommentIter { - CommentIter { iter: self.syntax().children_with_tokens() } - } - - fn doc_comment_text(&self) -> Option { - self.doc_comments().doc_comment_text() - } -} - -impl CommentIter { - pub fn from_syntax_node(syntax_node: &ast::SyntaxNode) -> CommentIter { - CommentIter { iter: syntax_node.children_with_tokens() } - } - - /// Returns the textual content of a doc comment block as a single string. - /// That is, strips leading `///` (+ optional 1 character of whitespace), - /// trailing `*/`, trailing whitespace and then joins the lines. - pub fn doc_comment_text(self) -> Option { - let mut has_comments = false; - let docs = self - .filter(|comment| comment.kind().doc.is_some()) - .map(|comment| { - has_comments = true; - let prefix_len = comment.prefix().len(); - - let line: &str = comment.text().as_str(); - - // Determine if the prefix or prefix + 1 char is stripped - let pos = - if let Some(ws) = line.chars().nth(prefix_len).filter(|c| c.is_whitespace()) { - prefix_len + ws.len_utf8() - } else { - prefix_len - }; - - let end = if comment.kind().shape.is_block() && line.ends_with("*/") { - line.len() - 2 - } else { - line.len() - }; - - // Note that we do not trim the end of the line here - // since whitespace can have special meaning at the end - // of a line in markdown. - line[pos..end].to_owned() - }) - .sep_by("\n") - .to_string(); - - if has_comments { - Some(docs) - } else { - None - } - } -} - -pub struct CommentIter { - iter: SyntaxElementChildren, -} - -impl Iterator for CommentIter { - type Item = ast::Comment; - fn next(&mut self) -> Option { - self.iter.by_ref().find_map(|el| el.into_token().and_then(ast::Comment::cast)) - } -} diff --git a/crates/ra_syntax/src/fuzz.rs b/crates/ra_syntax/src/fuzz.rs deleted file mode 100644 index 39f9b12ab2..0000000000 --- a/crates/ra_syntax/src/fuzz.rs +++ /dev/null @@ -1,73 +0,0 @@ -//! FIXME: write short doc here - -use std::{ - convert::TryInto, - str::{self, FromStr}, -}; - -use ra_text_edit::Indel; - -use crate::{validation, AstNode, SourceFile, TextRange}; - -fn check_file_invariants(file: &SourceFile) { - let root = file.syntax(); - validation::validate_block_structure(root); -} - -pub fn check_parser(text: &str) { - let file = SourceFile::parse(text); - check_file_invariants(&file.tree()); -} - -#[derive(Debug, Clone)] -pub struct CheckReparse { - text: String, - edit: Indel, - edited_text: String, -} - -impl CheckReparse { - pub fn from_data(data: &[u8]) -> Option { - const PREFIX: &str = "fn main(){\n\t"; - const SUFFIX: &str = "\n}"; - - let data = str::from_utf8(data).ok()?; - let mut lines = data.lines(); - let delete_start = usize::from_str(lines.next()?).ok()? + PREFIX.len(); - let delete_len = usize::from_str(lines.next()?).ok()?; - let insert = lines.next()?.to_string(); - let text = lines.collect::>().join("\n"); - let text = format!("{}{}{}", PREFIX, text, SUFFIX); - text.get(delete_start..delete_start.checked_add(delete_len)?)?; // make sure delete is a valid range - let delete = - TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap()); - let edited_text = - format!("{}{}{}", &text[..delete_start], &insert, &text[delete_start + delete_len..]); - let edit = Indel { delete, insert }; - Some(CheckReparse { text, edit, edited_text }) - } - - pub fn run(&self) { - let parse = SourceFile::parse(&self.text); - let new_parse = parse.reparse(&self.edit); - check_file_invariants(&new_parse.tree()); - assert_eq!(&new_parse.tree().syntax().text().to_string(), &self.edited_text); - let full_reparse = SourceFile::parse(&self.edited_text); - for (a, b) in - new_parse.tree().syntax().descendants().zip(full_reparse.tree().syntax().descendants()) - { - if (a.kind(), a.text_range()) != (b.kind(), b.text_range()) { - eprint!("original:\n{:#?}", parse.tree().syntax()); - eprint!("reparsed:\n{:#?}", new_parse.tree().syntax()); - eprint!("full reparse:\n{:#?}", full_reparse.tree().syntax()); - assert_eq!( - format!("{:?}", a), - format!("{:?}", b), - "different syntax tree produced by the full reparse" - ); - } - } - // FIXME - // assert_eq!(new_file.errors(), full_reparse.errors()); - } -} diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs deleted file mode 100644 index 8a4d453862..0000000000 --- a/crates/ra_syntax/src/lib.rs +++ /dev/null @@ -1,388 +0,0 @@ -//! Syntax Tree library used throughout the rust analyzer. -//! -//! Properties: -//! - easy and fast incremental re-parsing -//! - graceful handling of errors -//! - full-fidelity representation (*any* text can be precisely represented as -//! a syntax tree) -//! -//! For more information, see the [RFC]. Current implementation is inspired by -//! the [Swift] one. -//! -//! The most interesting modules here are `syntax_node` (which defines concrete -//! syntax tree) and `ast` (which defines abstract syntax tree on top of the -//! CST). The actual parser live in a separate `ra_parser` crate, though the -//! lexer lives in this crate. -//! -//! See `api_walkthrough` test in this file for a quick API tour! -//! -//! [RFC]: -//! [Swift]: - -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - -mod syntax_node; -mod syntax_error; -mod parsing; -mod validation; -mod ptr; -#[cfg(test)] -mod tests; - -pub mod algo; -pub mod ast; -#[doc(hidden)] -pub mod fuzz; - -use std::{marker::PhantomData, sync::Arc}; - -use ra_text_edit::Indel; -use stdx::format_to; - -pub use crate::{ - algo::InsertPosition, - ast::{AstNode, AstToken}, - parsing::{lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token}, - ptr::{AstPtr, SyntaxNodePtr}, - syntax_error::SyntaxError, - syntax_node::{ - Direction, GreenNode, NodeOrToken, SyntaxElement, SyntaxElementChildren, SyntaxNode, - SyntaxNodeChildren, SyntaxToken, SyntaxTreeBuilder, - }, -}; -pub use ra_parser::{SyntaxKind, T}; -pub use rowan::{SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent}; - -/// `Parse` is the result of the parsing: a syntax tree and a collection of -/// errors. -/// -/// Note that we always produce a syntax tree, even for completely invalid -/// files. -#[derive(Debug, PartialEq, Eq)] -pub struct Parse { - green: GreenNode, - errors: Arc>, - _ty: PhantomData T>, -} - -impl Clone for Parse { - fn clone(&self) -> Parse { - Parse { green: self.green.clone(), errors: self.errors.clone(), _ty: PhantomData } - } -} - -impl Parse { - fn new(green: GreenNode, errors: Vec) -> Parse { - Parse { green, errors: Arc::new(errors), _ty: PhantomData } - } - - pub fn syntax_node(&self) -> SyntaxNode { - SyntaxNode::new_root(self.green.clone()) - } -} - -impl Parse { - pub fn to_syntax(self) -> Parse { - Parse { green: self.green, errors: self.errors, _ty: PhantomData } - } - - pub fn tree(&self) -> T { - T::cast(self.syntax_node()).unwrap() - } - - pub fn errors(&self) -> &[SyntaxError] { - &*self.errors - } - - pub fn ok(self) -> Result>> { - if self.errors.is_empty() { - Ok(self.tree()) - } else { - Err(self.errors) - } - } -} - -impl Parse { - pub fn cast(self) -> Option> { - if N::cast(self.syntax_node()).is_some() { - Some(Parse { green: self.green, errors: self.errors, _ty: PhantomData }) - } else { - None - } - } -} - -impl Parse { - pub fn debug_dump(&self) -> String { - let mut buf = format!("{:#?}", self.tree().syntax()); - for err in self.errors.iter() { - format_to!(buf, "error {:?}: {}\n", err.range(), err); - } - buf - } - - pub fn reparse(&self, indel: &Indel) -> Parse { - self.incremental_reparse(indel).unwrap_or_else(|| self.full_reparse(indel)) - } - - fn incremental_reparse(&self, indel: &Indel) -> Option> { - // FIXME: validation errors are not handled here - parsing::incremental_reparse(self.tree().syntax(), indel, self.errors.to_vec()).map( - |(green_node, errors, _reparsed_range)| Parse { - green: green_node, - errors: Arc::new(errors), - _ty: PhantomData, - }, - ) - } - - fn full_reparse(&self, indel: &Indel) -> Parse { - let mut text = self.tree().syntax().text().to_string(); - indel.apply(&mut text); - SourceFile::parse(&text) - } -} - -/// `SourceFile` represents a parse tree for a single Rust file. -pub use crate::ast::SourceFile; - -impl SourceFile { - pub fn parse(text: &str) -> Parse { - let (green, mut errors) = parsing::parse_text(text); - let root = SyntaxNode::new_root(green.clone()); - - if cfg!(debug_assertions) { - validation::validate_block_structure(&root); - } - - errors.extend(validation::validate(&root)); - - assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); - Parse { green, errors: Arc::new(errors), _ty: PhantomData } - } -} - -impl ast::Path { - /// Returns `text`, parsed as a path, but only if it has no errors. - pub fn parse(text: &str) -> Result { - parsing::parse_text_fragment(text, ra_parser::FragmentKind::Path) - } -} - -impl ast::Pat { - /// Returns `text`, parsed as a pattern, but only if it has no errors. - pub fn parse(text: &str) -> Result { - parsing::parse_text_fragment(text, ra_parser::FragmentKind::Pattern) - } -} - -impl ast::Expr { - /// Returns `text`, parsed as an expression, but only if it has no errors. - pub fn parse(text: &str) -> Result { - parsing::parse_text_fragment(text, ra_parser::FragmentKind::Expr) - } -} - -impl ast::Item { - /// Returns `text`, parsed as an item, but only if it has no errors. - pub fn parse(text: &str) -> Result { - parsing::parse_text_fragment(text, ra_parser::FragmentKind::Item) - } -} - -impl ast::Type { - /// Returns `text`, parsed as an type reference, but only if it has no errors. - pub fn parse(text: &str) -> Result { - parsing::parse_text_fragment(text, ra_parser::FragmentKind::Type) - } -} - -/// Matches a `SyntaxNode` against an `ast` type. -/// -/// # Example: -/// -/// ```ignore -/// match_ast! { -/// match node { -/// ast::CallExpr(it) => { ... }, -/// ast::MethodCallExpr(it) => { ... }, -/// ast::MacroCall(it) => { ... }, -/// _ => None, -/// } -/// } -/// ``` -#[macro_export] -macro_rules! match_ast { - (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; - - (match ($node:expr) { - $( ast::$ast:ident($it:ident) => $res:expr, )* - _ => $catch_all:expr $(,)? - }) => {{ - $( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )* - { $catch_all } - }}; -} - -/// This test does not assert anything and instead just shows off the crate's -/// API. -#[test] -fn api_walkthrough() { - use ast::{ModuleItemOwner, NameOwner}; - - let source_code = " - fn foo() { - 1 + 1 - } - "; - // `SourceFile` is the main entry point. - // - // The `parse` method returns a `Parse` -- a pair of syntax tree and a list - // of errors. That is, syntax tree is constructed even in presence of errors. - let parse = SourceFile::parse(source_code); - assert!(parse.errors().is_empty()); - - // The `tree` method returns an owned syntax node of type `SourceFile`. - // Owned nodes are cheap: inside, they are `Rc` handles to the underling data. - let file: SourceFile = parse.tree(); - - // `SourceFile` is the root of the syntax tree. We can iterate file's items. - // Let's fetch the `foo` function. - let mut func = None; - for item in file.items() { - match item { - ast::Item::Fn(f) => func = Some(f), - _ => unreachable!(), - } - } - let func: ast::Fn = func.unwrap(); - - // Each AST node has a bunch of getters for children. All getters return - // `Option`s though, to account for incomplete code. Some getters are common - // for several kinds of node. In this case, a trait like `ast::NameOwner` - // usually exists. By convention, all ast types should be used with `ast::` - // qualifier. - let name: Option = func.name(); - let name = name.unwrap(); - assert_eq!(name.text(), "foo"); - - // Let's get the `1 + 1` expression! - let body: ast::BlockExpr = func.body().unwrap(); - let expr: ast::Expr = body.expr().unwrap(); - - // Enums are used to group related ast nodes together, and can be used for - // matching. However, because there are no public fields, it's possible to - // match only the top level enum: that is the price we pay for increased API - // flexibility - let bin_expr: &ast::BinExpr = match &expr { - ast::Expr::BinExpr(e) => e, - _ => unreachable!(), - }; - - // Besides the "typed" AST API, there's an untyped CST one as well. - // To switch from AST to CST, call `.syntax()` method: - let expr_syntax: &SyntaxNode = expr.syntax(); - - // Note how `expr` and `bin_expr` are in fact the same node underneath: - assert!(expr_syntax == bin_expr.syntax()); - - // To go from CST to AST, `AstNode::cast` function is used: - let _expr: ast::Expr = match ast::Expr::cast(expr_syntax.clone()) { - Some(e) => e, - None => unreachable!(), - }; - - // The two properties each syntax node has is a `SyntaxKind`: - assert_eq!(expr_syntax.kind(), SyntaxKind::BIN_EXPR); - - // And text range: - assert_eq!(expr_syntax.text_range(), TextRange::new(32.into(), 37.into())); - - // You can get node's text as a `SyntaxText` object, which will traverse the - // tree collecting token's text: - let text: SyntaxText = expr_syntax.text(); - assert_eq!(text.to_string(), "1 + 1"); - - // There's a bunch of traversal methods on `SyntaxNode`: - assert_eq!(expr_syntax.parent().as_ref(), Some(body.syntax())); - assert_eq!(body.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{'])); - assert_eq!( - expr_syntax.next_sibling_or_token().map(|it| it.kind()), - Some(SyntaxKind::WHITESPACE) - ); - - // As well as some iterator helpers: - let f = expr_syntax.ancestors().find_map(ast::Fn::cast); - assert_eq!(f, Some(func)); - assert!(expr_syntax.siblings_with_tokens(Direction::Next).any(|it| it.kind() == T!['}'])); - assert_eq!( - expr_syntax.descendants_with_tokens().count(), - 8, // 5 tokens `1`, ` `, `+`, ` `, `!` - // 2 child literal expressions: `1`, `1` - // 1 the node itself: `1 + 1` - ); - - // There's also a `preorder` method with a more fine-grained iteration control: - let mut buf = String::new(); - let mut indent = 0; - for event in expr_syntax.preorder_with_tokens() { - match event { - WalkEvent::Enter(node) => { - let text = match &node { - NodeOrToken::Node(it) => it.text().to_string(), - NodeOrToken::Token(it) => it.text().to_string(), - }; - format_to!(buf, "{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent); - indent += 2; - } - WalkEvent::Leave(_) => indent -= 2, - } - } - assert_eq!(indent, 0); - assert_eq!( - buf.trim(), - r#" -"1 + 1" BIN_EXPR - "1" LITERAL - "1" INT_NUMBER - " " WHITESPACE - "+" PLUS - " " WHITESPACE - "1" LITERAL - "1" INT_NUMBER -"# - .trim() - ); - - // To recursively process the tree, there are three approaches: - // 1. explicitly call getter methods on AST nodes. - // 2. use descendants and `AstNode::cast`. - // 3. use descendants and `match_ast!`. - // - // Here's how the first one looks like: - let exprs_cast: Vec = file - .syntax() - .descendants() - .filter_map(ast::Expr::cast) - .map(|expr| expr.syntax().text().to_string()) - .collect(); - - // An alternative is to use a macro. - let mut exprs_visit = Vec::new(); - for node in file.syntax().descendants() { - match_ast! { - match node { - ast::Expr(it) => { - let res = it.syntax().text().to_string(); - exprs_visit.push(res); - }, - _ => (), - } - } - } - assert_eq!(exprs_cast, exprs_visit); -} diff --git a/crates/ra_syntax/src/parsing.rs b/crates/ra_syntax/src/parsing.rs deleted file mode 100644 index 0ed3c20ef9..0000000000 --- a/crates/ra_syntax/src/parsing.rs +++ /dev/null @@ -1,59 +0,0 @@ -//! Lexing, bridging to ra_parser (which does the actual parsing) and -//! incremental reparsing. - -mod lexer; -mod text_token_source; -mod text_tree_sink; -mod reparsing; - -use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode}; -use text_token_source::TextTokenSource; -use text_tree_sink::TextTreeSink; - -pub use lexer::*; - -pub(crate) use self::reparsing::incremental_reparse; -use ra_parser::SyntaxKind; - -pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec) { - let (tokens, lexer_errors) = tokenize(&text); - - let mut token_source = TextTokenSource::new(text, &tokens); - let mut tree_sink = TextTreeSink::new(text, &tokens); - - ra_parser::parse(&mut token_source, &mut tree_sink); - - let (tree, mut parser_errors) = tree_sink.finish(); - parser_errors.extend(lexer_errors); - - (tree, parser_errors) -} - -/// Returns `text` parsed as a `T` provided there are no parse errors. -pub(crate) fn parse_text_fragment( - text: &str, - fragment_kind: ra_parser::FragmentKind, -) -> Result { - let (tokens, lexer_errors) = tokenize(&text); - if !lexer_errors.is_empty() { - return Err(()); - } - - let mut token_source = TextTokenSource::new(text, &tokens); - let mut tree_sink = TextTreeSink::new(text, &tokens); - - // TextTreeSink assumes that there's at least some root node to which it can attach errors and - // tokens. We arbitrarily give it a SourceFile. - use ra_parser::TreeSink; - tree_sink.start_node(SyntaxKind::SOURCE_FILE); - ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind); - tree_sink.finish_node(); - - let (tree, parser_errors) = tree_sink.finish(); - use ra_parser::TokenSource; - if !parser_errors.is_empty() || token_source.current().kind != SyntaxKind::EOF { - return Err(()); - } - - SyntaxNode::new_root(tree).first_child().and_then(T::cast).ok_or(()) -} diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs deleted file mode 100644 index ed5a42ea38..0000000000 --- a/crates/ra_syntax/src/parsing/reparsing.rs +++ /dev/null @@ -1,455 +0,0 @@ -//! Implementation of incremental re-parsing. -//! -//! We use two simple strategies for this: -//! - if the edit modifies only a single token (like changing an identifier's -//! letter), we replace only this token. -//! - otherwise, we search for the nearest `{}` block which contains the edit -//! and try to parse only this block. - -use ra_parser::Reparser; -use ra_text_edit::Indel; - -use crate::{ - algo, - parsing::{ - lexer::{lex_single_syntax_kind, tokenize, Token}, - text_token_source::TextTokenSource, - text_tree_sink::TextTreeSink, - }, - syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode}, - SyntaxError, - SyntaxKind::*, - TextRange, TextSize, T, -}; - -pub(crate) fn incremental_reparse( - node: &SyntaxNode, - edit: &Indel, - errors: Vec, -) -> Option<(GreenNode, Vec, TextRange)> { - if let Some((green, new_errors, old_range)) = reparse_token(node, &edit) { - return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); - } - - if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) { - return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); - } - None -} - -fn reparse_token<'node>( - root: &'node SyntaxNode, - edit: &Indel, -) -> Option<(GreenNode, Vec, TextRange)> { - let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); - let prev_token_kind = prev_token.kind(); - match prev_token_kind { - WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { - if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT { - // removing a new line may extends previous token - let deleted_range = edit.delete - prev_token.text_range().start(); - if prev_token.text()[deleted_range].contains('\n') { - return None; - } - } - - let mut new_text = get_text_after_edit(prev_token.clone().into(), &edit); - let (new_token_kind, new_err) = lex_single_syntax_kind(&new_text)?; - - if new_token_kind != prev_token_kind - || (new_token_kind == IDENT && is_contextual_kw(&new_text)) - { - return None; - } - - // Check that edited token is not a part of the bigger token. - // E.g. if for source code `bruh"str"` the user removed `ruh`, then - // `b` no longer remains an identifier, but becomes a part of byte string literal - if let Some(next_char) = root.text().char_at(prev_token.text_range().end()) { - new_text.push(next_char); - let token_with_next_char = lex_single_syntax_kind(&new_text); - if let Some((_kind, _error)) = token_with_next_char { - return None; - } - new_text.pop(); - } - - let new_token = - GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into()); - Some(( - prev_token.replace_with(new_token), - new_err.into_iter().collect(), - prev_token.text_range(), - )) - } - _ => None, - } -} - -fn reparse_block<'node>( - root: &'node SyntaxNode, - edit: &Indel, -) -> Option<(GreenNode, Vec, TextRange)> { - let (node, reparser) = find_reparsable_node(root, edit.delete)?; - let text = get_text_after_edit(node.clone().into(), edit); - - let (tokens, new_lexer_errors) = tokenize(&text); - if !is_balanced(&tokens) { - return None; - } - - let mut token_source = TextTokenSource::new(&text, &tokens); - let mut tree_sink = TextTreeSink::new(&text, &tokens); - reparser.parse(&mut token_source, &mut tree_sink); - - let (green, mut new_parser_errors) = tree_sink.finish(); - new_parser_errors.extend(new_lexer_errors); - - Some((node.replace_with(green), new_parser_errors, node.text_range())) -} - -fn get_text_after_edit(element: SyntaxElement, edit: &Indel) -> String { - let edit = Indel::replace(edit.delete - element.text_range().start(), edit.insert.clone()); - - let mut text = match element { - NodeOrToken::Token(token) => token.text().to_string(), - NodeOrToken::Node(node) => node.text().to_string(), - }; - edit.apply(&mut text); - text -} - -fn is_contextual_kw(text: &str) -> bool { - matches!(text, "auto" | "default" | "union") -} - -fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> { - let node = algo::find_covering_element(node, range); - - let mut ancestors = match node { - NodeOrToken::Token(it) => it.parent().ancestors(), - NodeOrToken::Node(it) => it.ancestors(), - }; - ancestors.find_map(|node| { - let first_child = node.first_child_or_token().map(|it| it.kind()); - let parent = node.parent().map(|it| it.kind()); - Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r)) - }) -} - -fn is_balanced(tokens: &[Token]) -> bool { - if tokens.is_empty() - || tokens.first().unwrap().kind != T!['{'] - || tokens.last().unwrap().kind != T!['}'] - { - return false; - } - let mut balance = 0usize; - for t in &tokens[1..tokens.len() - 1] { - match t.kind { - T!['{'] => balance += 1, - T!['}'] => { - balance = match balance.checked_sub(1) { - Some(b) => b, - None => return false, - } - } - _ => (), - } - } - balance == 0 -} - -fn merge_errors( - old_errors: Vec, - new_errors: Vec, - range_before_reparse: TextRange, - edit: &Indel, -) -> Vec { - let mut res = Vec::new(); - - for old_err in old_errors { - let old_err_range = old_err.range(); - if old_err_range.end() <= range_before_reparse.start() { - res.push(old_err); - } else if old_err_range.start() >= range_before_reparse.end() { - let inserted_len = TextSize::of(&edit.insert); - res.push(old_err.with_range((old_err_range + inserted_len) - edit.delete.len())); - // Note: extra parens are intentional to prevent uint underflow, HWAB (here was a bug) - } - } - res.extend(new_errors.into_iter().map(|new_err| { - // fighting borrow checker with a variable ;) - let offseted_range = new_err.range() + range_before_reparse.start(); - new_err.with_range(offseted_range) - })); - res -} - -#[cfg(test)] -mod tests { - use test_utils::{assert_eq_text, extract_range}; - - use super::*; - use crate::{AstNode, Parse, SourceFile}; - - fn do_check(before: &str, replace_with: &str, reparsed_len: u32) { - let (range, before) = extract_range(before); - let edit = Indel::replace(range, replace_with.to_owned()); - let after = { - let mut after = before.clone(); - edit.apply(&mut after); - after - }; - - let fully_reparsed = SourceFile::parse(&after); - let incrementally_reparsed: Parse = { - let before = SourceFile::parse(&before); - let (green, new_errors, range) = - incremental_reparse(before.tree().syntax(), &edit, before.errors.to_vec()).unwrap(); - assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); - Parse::new(green, new_errors) - }; - - assert_eq_text!( - &format!("{:#?}", fully_reparsed.tree().syntax()), - &format!("{:#?}", incrementally_reparsed.tree().syntax()), - ); - assert_eq!(fully_reparsed.errors(), incrementally_reparsed.errors()); - } - - #[test] // FIXME: some test here actually test token reparsing - fn reparse_block_tests() { - do_check( - r" -fn foo() { - let x = foo + <|>bar<|> -} -", - "baz", - 3, - ); - do_check( - r" -fn foo() { - let x = foo<|> + bar<|> -} -", - "baz", - 25, - ); - do_check( - r" -struct Foo { - f: foo<|><|> -} -", - ",\n g: (),", - 14, - ); - do_check( - r" -fn foo { - let; - 1 + 1; - <|>92<|>; -} -", - "62", - 31, // FIXME: reparse only int literal here - ); - do_check( - r" -mod foo { - fn <|><|> -} -", - "bar", - 11, - ); - - do_check( - r" -trait Foo { - type <|>Foo<|>; -} -", - "Output", - 3, - ); - do_check( - r" -impl IntoIterator for Foo { - f<|><|> -} -", - "n next(", - 9, - ); - do_check(r"use a::b::{foo,<|>,bar<|>};", "baz", 10); - do_check( - r" -pub enum A { - Foo<|><|> -} -", - "\nBar;\n", - 11, - ); - do_check( - r" -foo!{a, b<|><|> d} -", - ", c[3]", - 8, - ); - do_check( - r" -fn foo() { - vec![<|><|>] -} -", - "123", - 14, - ); - do_check( - r" -extern { - fn<|>;<|> -} -", - " exit(code: c_int)", - 11, - ); - } - - #[test] - fn reparse_token_tests() { - do_check( - r"<|><|> -fn foo() -> i32 { 1 } -", - "\n\n\n \n", - 1, - ); - do_check( - r" -fn foo() -> <|><|> {} -", - " \n", - 2, - ); - do_check( - r" -fn <|>foo<|>() -> i32 { 1 } -", - "bar", - 3, - ); - do_check( - r" -fn foo<|><|>foo() { } -", - "bar", - 6, - ); - do_check( - r" -fn foo /* <|><|> */ () {} -", - "some comment", - 6, - ); - do_check( - r" -fn baz <|><|> () {} -", - " \t\t\n\n", - 2, - ); - do_check( - r" -fn baz <|><|> () {} -", - " \t\t\n\n", - 2, - ); - do_check( - r" -/// foo <|><|>omment -mod { } -", - "c", - 14, - ); - do_check( - r#" -fn -> &str { "Hello<|><|>" } -"#, - ", world", - 7, - ); - do_check( - r#" -fn -> &str { // "Hello<|><|>" -"#, - ", world", - 10, - ); - do_check( - r##" -fn -> &str { r#"Hello<|><|>"# -"##, - ", world", - 10, - ); - do_check( - r" -#[derive(<|>Copy<|>)] -enum Foo { - -} -", - "Clone", - 4, - ); - } - - #[test] - fn reparse_str_token_with_error_unchanged() { - do_check(r#""<|>Unclosed<|> string literal"#, "Still unclosed", 24); - } - - #[test] - fn reparse_str_token_with_error_fixed() { - do_check(r#""unterinated<|><|>"#, "\"", 12); - } - - #[test] - fn reparse_block_with_error_in_middle_unchanged() { - do_check( - r#"fn main() { - if {} - 32 + 4<|><|> - return - if {} - }"#, - "23", - 105, - ) - } - - #[test] - fn reparse_block_with_error_in_middle_fixed() { - do_check( - r#"fn main() { - if {} - 32 + 4<|><|> - return - if {} - }"#, - ";", - 105, - ) - } -} diff --git a/crates/ra_syntax/src/parsing/text_token_source.rs b/crates/ra_syntax/src/parsing/text_token_source.rs deleted file mode 100644 index 97aa3e7951..0000000000 --- a/crates/ra_syntax/src/parsing/text_token_source.rs +++ /dev/null @@ -1,84 +0,0 @@ -//! See `TextTokenSource` docs. - -use ra_parser::TokenSource; - -use crate::{parsing::lexer::Token, SyntaxKind::EOF, TextRange, TextSize}; - -/// Implementation of `ra_parser::TokenSource` that takes tokens from source code text. -pub(crate) struct TextTokenSource<'t> { - text: &'t str, - /// token and its start position (non-whitespace/comment tokens) - /// ```non-rust - /// struct Foo; - /// ^------^--^- - /// | | \________ - /// | \____ \ - /// | \ | - /// (struct, 0) (Foo, 7) (;, 10) - /// ``` - /// `[(struct, 0), (Foo, 7), (;, 10)]` - token_offset_pairs: Vec<(Token, TextSize)>, - - /// Current token and position - curr: (ra_parser::Token, usize), -} - -impl<'t> TokenSource for TextTokenSource<'t> { - fn current(&self) -> ra_parser::Token { - self.curr.0 - } - - fn lookahead_nth(&self, n: usize) -> ra_parser::Token { - mk_token(self.curr.1 + n, &self.token_offset_pairs) - } - - fn bump(&mut self) { - if self.curr.0.kind == EOF { - return; - } - - let pos = self.curr.1 + 1; - self.curr = (mk_token(pos, &self.token_offset_pairs), pos); - } - - fn is_keyword(&self, kw: &str) -> bool { - self.token_offset_pairs - .get(self.curr.1) - .map(|(token, offset)| &self.text[TextRange::at(*offset, token.len)] == kw) - .unwrap_or(false) - } -} - -fn mk_token(pos: usize, token_offset_pairs: &[(Token, TextSize)]) -> ra_parser::Token { - let (kind, is_jointed_to_next) = match token_offset_pairs.get(pos) { - Some((token, offset)) => ( - token.kind, - token_offset_pairs - .get(pos + 1) - .map(|(_, next_offset)| offset + token.len == *next_offset) - .unwrap_or(false), - ), - None => (EOF, false), - }; - ra_parser::Token { kind, is_jointed_to_next } -} - -impl<'t> TextTokenSource<'t> { - /// Generate input from tokens(expect comment and whitespace). - pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> { - let token_offset_pairs: Vec<_> = raw_tokens - .iter() - .filter_map({ - let mut len = 0.into(); - move |token| { - let pair = if token.kind.is_trivia() { None } else { Some((*token, len)) }; - len += token.len; - pair - } - }) - .collect(); - - let first = mk_token(0, &token_offset_pairs); - TextTokenSource { text, token_offset_pairs, curr: (first, 0) } - } -} diff --git a/crates/ra_syntax/src/parsing/text_tree_sink.rs b/crates/ra_syntax/src/parsing/text_tree_sink.rs deleted file mode 100644 index 6d1828d203..0000000000 --- a/crates/ra_syntax/src/parsing/text_tree_sink.rs +++ /dev/null @@ -1,183 +0,0 @@ -//! FIXME: write short doc here - -use std::mem; - -use ra_parser::{ParseError, TreeSink}; - -use crate::{ - parsing::Token, - syntax_node::GreenNode, - SmolStr, SyntaxError, - SyntaxKind::{self, *}, - SyntaxTreeBuilder, TextRange, TextSize, -}; - -/// Bridges the parser with our specific syntax tree representation. -/// -/// `TextTreeSink` also handles attachment of trivia (whitespace) to nodes. -pub(crate) struct TextTreeSink<'a> { - text: &'a str, - tokens: &'a [Token], - text_pos: TextSize, - token_pos: usize, - state: State, - inner: SyntaxTreeBuilder, -} - -enum State { - PendingStart, - Normal, - PendingFinish, -} - -impl<'a> TreeSink for TextTreeSink<'a> { - fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { - match mem::replace(&mut self.state, State::Normal) { - State::PendingStart => unreachable!(), - State::PendingFinish => self.inner.finish_node(), - State::Normal => (), - } - self.eat_trivias(); - let n_tokens = n_tokens as usize; - let len = self.tokens[self.token_pos..self.token_pos + n_tokens] - .iter() - .map(|it| it.len) - .sum::(); - self.do_token(kind, len, n_tokens); - } - - fn start_node(&mut self, kind: SyntaxKind) { - match mem::replace(&mut self.state, State::Normal) { - State::PendingStart => { - self.inner.start_node(kind); - // No need to attach trivias to previous node: there is no - // previous node. - return; - } - State::PendingFinish => self.inner.finish_node(), - State::Normal => (), - } - - let n_trivias = - self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count(); - let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias]; - let mut trivia_end = - self.text_pos + leading_trivias.iter().map(|it| it.len).sum::(); - - let n_attached_trivias = { - let leading_trivias = leading_trivias.iter().rev().map(|it| { - let next_end = trivia_end - it.len; - let range = TextRange::new(next_end, trivia_end); - trivia_end = next_end; - (it.kind, &self.text[range]) - }); - n_attached_trivias(kind, leading_trivias) - }; - self.eat_n_trivias(n_trivias - n_attached_trivias); - self.inner.start_node(kind); - self.eat_n_trivias(n_attached_trivias); - } - - fn finish_node(&mut self) { - match mem::replace(&mut self.state, State::PendingFinish) { - State::PendingStart => unreachable!(), - State::PendingFinish => self.inner.finish_node(), - State::Normal => (), - } - } - - fn error(&mut self, error: ParseError) { - self.inner.error(error, self.text_pos) - } -} - -impl<'a> TextTreeSink<'a> { - pub(super) fn new(text: &'a str, tokens: &'a [Token]) -> Self { - Self { - text, - tokens, - text_pos: 0.into(), - token_pos: 0, - state: State::PendingStart, - inner: SyntaxTreeBuilder::default(), - } - } - - pub(super) fn finish(mut self) -> (GreenNode, Vec) { - match mem::replace(&mut self.state, State::Normal) { - State::PendingFinish => { - self.eat_trivias(); - self.inner.finish_node() - } - State::PendingStart | State::Normal => unreachable!(), - } - - self.inner.finish_raw() - } - - fn eat_trivias(&mut self) { - while let Some(&token) = self.tokens.get(self.token_pos) { - if !token.kind.is_trivia() { - break; - } - self.do_token(token.kind, token.len, 1); - } - } - - fn eat_n_trivias(&mut self, n: usize) { - for _ in 0..n { - let token = self.tokens[self.token_pos]; - assert!(token.kind.is_trivia()); - self.do_token(token.kind, token.len, 1); - } - } - - fn do_token(&mut self, kind: SyntaxKind, len: TextSize, n_tokens: usize) { - let range = TextRange::at(self.text_pos, len); - let text: SmolStr = self.text[range].into(); - self.text_pos += len; - self.token_pos += n_tokens; - self.inner.token(kind, text); - } -} - -fn n_attached_trivias<'a>( - kind: SyntaxKind, - trivias: impl Iterator, -) -> usize { - match kind { - MACRO_CALL | CONST | TYPE_ALIAS | STRUCT | ENUM | VARIANT | FN | TRAIT | MODULE - | RECORD_FIELD | STATIC => { - let mut res = 0; - let mut trivias = trivias.enumerate().peekable(); - - while let Some((i, (kind, text))) = trivias.next() { - match kind { - WHITESPACE => { - if text.contains("\n\n") { - // we check whether the next token is a doc-comment - // and skip the whitespace in this case - if let Some((peek_kind, peek_text)) = - trivias.peek().map(|(_, pair)| pair) - { - if *peek_kind == COMMENT - && peek_text.starts_with("///") - && !peek_text.starts_with("////") - { - continue; - } - } - break; - } - } - COMMENT => { - res = i + 1; - } - _ => (), - } - } - res - } - _ => 0, - } -} diff --git a/crates/ra_syntax/src/syntax_node.rs b/crates/ra_syntax/src/syntax_node.rs deleted file mode 100644 index a7dbdba7b1..0000000000 --- a/crates/ra_syntax/src/syntax_node.rs +++ /dev/null @@ -1,77 +0,0 @@ -//! This module defines Concrete Syntax Tree (CST), used by rust-analyzer. -//! -//! The CST includes comments and whitespace, provides a single node type, -//! `SyntaxNode`, and a basic traversal API (parent, children, siblings). -//! -//! The *real* implementation is in the (language-agnostic) `rowan` crate, this -//! module just wraps its API. - -use rowan::{GreenNodeBuilder, Language}; - -use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextSize}; - -pub use rowan::GreenNode; - -pub(crate) use rowan::GreenToken; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum RustLanguage {} -impl Language for RustLanguage { - type Kind = SyntaxKind; - - fn kind_from_raw(raw: rowan::SyntaxKind) -> SyntaxKind { - SyntaxKind::from(raw.0) - } - - fn kind_to_raw(kind: SyntaxKind) -> rowan::SyntaxKind { - rowan::SyntaxKind(kind.into()) - } -} - -pub type SyntaxNode = rowan::SyntaxNode; -pub type SyntaxToken = rowan::SyntaxToken; -pub type SyntaxElement = rowan::SyntaxElement; -pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren; -pub type SyntaxElementChildren = rowan::SyntaxElementChildren; - -pub use rowan::{Direction, NodeOrToken}; - -#[derive(Default)] -pub struct SyntaxTreeBuilder { - errors: Vec, - inner: GreenNodeBuilder<'static>, -} - -impl SyntaxTreeBuilder { - pub(crate) fn finish_raw(self) -> (GreenNode, Vec) { - let green = self.inner.finish(); - (green, self.errors) - } - - pub fn finish(self) -> Parse { - let (green, errors) = self.finish_raw(); - if cfg!(debug_assertions) { - let node = SyntaxNode::new_root(green.clone()); - crate::validation::validate_block_structure(&node); - } - Parse::new(green, errors) - } - - pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) { - let kind = RustLanguage::kind_to_raw(kind); - self.inner.token(kind, text) - } - - pub fn start_node(&mut self, kind: SyntaxKind) { - let kind = RustLanguage::kind_to_raw(kind); - self.inner.start_node(kind) - } - - pub fn finish_node(&mut self) { - self.inner.finish_node() - } - - pub fn error(&mut self, error: ra_parser::ParseError, text_pos: TextSize) { - self.errors.push(SyntaxError::new_at_offset(*error.0, text_pos)) - } -} diff --git a/crates/ra_syntax/src/tests.rs b/crates/ra_syntax/src/tests.rs deleted file mode 100644 index 00b957f434..0000000000 --- a/crates/ra_syntax/src/tests.rs +++ /dev/null @@ -1,280 +0,0 @@ -use std::{ - fmt::Write, - fs, - path::{Path, PathBuf}, -}; - -use expect::expect_file; -use rayon::prelude::*; -use test_utils::project_dir; - -use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextSize, Token}; - -#[test] -fn lexer_tests() { - // FIXME: - // * Add tests for unicode escapes in byte-character and [raw]-byte-string literals - // * Add tests for unescape errors - - dir_tests(&test_data_dir(), &["lexer/ok"], "txt", |text, path| { - let (tokens, errors) = tokenize(text); - assert_errors_are_absent(&errors, path); - dump_tokens_and_errors(&tokens, &errors, text) - }); - dir_tests(&test_data_dir(), &["lexer/err"], "txt", |text, path| { - let (tokens, errors) = tokenize(text); - assert_errors_are_present(&errors, path); - dump_tokens_and_errors(&tokens, &errors, text) - }); -} - -#[test] -fn parse_smoke_test() { - let code = r##" -fn main() { - println!("Hello, world!") -} - "##; - - let parse = SourceFile::parse(code); - // eprintln!("{:#?}", parse.syntax_node()); - assert!(parse.ok().is_ok()); -} - -#[test] -fn parser_tests() { - dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], "rast", |text, path| { - let parse = SourceFile::parse(text); - let errors = parse.errors(); - assert_errors_are_absent(&errors, path); - parse.debug_dump() - }); - dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], "rast", |text, path| { - let parse = SourceFile::parse(text); - let errors = parse.errors(); - assert_errors_are_present(&errors, path); - parse.debug_dump() - }); -} - -#[test] -fn expr_parser_tests() { - fragment_parser_dir_test( - &["parser/fragments/expr/ok"], - &["parser/fragments/expr/err"], - crate::ast::Expr::parse, - ); -} - -#[test] -fn path_parser_tests() { - fragment_parser_dir_test( - &["parser/fragments/path/ok"], - &["parser/fragments/path/err"], - crate::ast::Path::parse, - ); -} - -#[test] -fn pattern_parser_tests() { - fragment_parser_dir_test( - &["parser/fragments/pattern/ok"], - &["parser/fragments/pattern/err"], - crate::ast::Pat::parse, - ); -} - -#[test] -fn item_parser_tests() { - fragment_parser_dir_test( - &["parser/fragments/item/ok"], - &["parser/fragments/item/err"], - crate::ast::Item::parse, - ); -} - -#[test] -fn type_parser_tests() { - fragment_parser_dir_test( - &["parser/fragments/type/ok"], - &["parser/fragments/type/err"], - crate::ast::Type::parse, - ); -} - -#[test] -fn parser_fuzz_tests() { - for (_, text) in collect_rust_files(&test_data_dir(), &["parser/fuzz-failures"]) { - fuzz::check_parser(&text) - } -} - -#[test] -fn reparse_fuzz_tests() { - for (_, text) in collect_rust_files(&test_data_dir(), &["reparse/fuzz-failures"]) { - let check = fuzz::CheckReparse::from_data(text.as_bytes()).unwrap(); - println!("{:?}", check); - check.run(); - } -} - -/// Test that Rust-analyzer can parse and validate the rust-analyzer -/// FIXME: Use this as a benchmark -#[test] -fn self_hosting_parsing() { - let dir = project_dir().join("crates"); - let files = walkdir::WalkDir::new(dir) - .into_iter() - .filter_entry(|entry| { - // Get all files which are not in the crates/ra_syntax/test_data folder - !entry.path().components().any(|component| component.as_os_str() == "test_data") - }) - .map(|e| e.unwrap()) - .filter(|entry| { - // Get all `.rs ` files - !entry.path().is_dir() && (entry.path().extension().unwrap_or_default() == "rs") - }) - .map(|entry| entry.into_path()) - .collect::>(); - assert!( - files.len() > 100, - "self_hosting_parsing found too few files - is it running in the right directory?" - ); - - let errors = files - .into_par_iter() - .filter_map(|file| { - let text = read_text(&file); - match SourceFile::parse(&text).ok() { - Ok(_) => None, - Err(err) => Some((file, err)), - } - }) - .collect::>(); - - if !errors.is_empty() { - let errors = errors - .into_iter() - .map(|(path, err)| format!("{}: {:?}\n", path.display(), err)) - .collect::(); - panic!("Parsing errors:\n{}\n", errors); - } -} - -fn test_data_dir() -> PathBuf { - project_dir().join("crates/ra_syntax/test_data") -} - -fn assert_errors_are_present(errors: &[SyntaxError], path: &Path) { - assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display()); -} -fn assert_errors_are_absent(errors: &[SyntaxError], path: &Path) { - assert_eq!( - errors, - &[] as &[SyntaxError], - "There should be no errors in the file {:?}", - path.display(), - ); -} - -fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String { - let mut acc = String::new(); - let mut offset: TextSize = 0.into(); - for token in tokens { - let token_len = token.len; - let token_text = &text[TextRange::at(offset, token.len)]; - offset += token.len; - writeln!(acc, "{:?} {:?} {:?}", token.kind, token_len, token_text).unwrap(); - } - for err in errors { - writeln!(acc, "> error{:?} token({:?}) msg({})", err.range(), &text[err.range()], err) - .unwrap(); - } - acc -} - -fn fragment_parser_dir_test(ok_paths: &[&str], err_paths: &[&str], f: F) -where - T: crate::AstNode, - F: Fn(&str) -> Result, -{ - dir_tests(&test_data_dir(), ok_paths, "rast", |text, path| { - if let Ok(node) = f(text) { - format!("{:#?}", crate::ast::AstNode::syntax(&node)) - } else { - panic!("Failed to parse '{:?}'", path); - } - }); - dir_tests(&test_data_dir(), err_paths, "rast", |text, path| { - if let Ok(_) = f(text) { - panic!("'{:?}' successfully parsed when it should have errored", path); - } else { - "ERROR\n".to_owned() - } - }); -} - -/// Calls callback `f` with input code and file paths for each `.rs` file in `test_data_dir` -/// subdirectories defined by `paths`. -/// -/// If the content of the matching output file differs from the output of `f()` -/// the test will fail. -/// -/// If there is no matching output file it will be created and filled with the -/// output of `f()`, but the test will fail. -fn dir_tests(test_data_dir: &Path, paths: &[&str], outfile_extension: &str, f: F) -where - F: Fn(&str, &Path) -> String, -{ - for (path, input_code) in collect_rust_files(test_data_dir, paths) { - let actual = f(&input_code, &path); - let path = path.with_extension(outfile_extension); - expect_file![path].assert_eq(&actual) - } -} - -/// Collects all `.rs` files from `dir` subdirectories defined by `paths`. -fn collect_rust_files(root_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> { - paths - .iter() - .flat_map(|path| { - let path = root_dir.to_owned().join(path); - rust_files_in_dir(&path).into_iter() - }) - .map(|path| { - let text = read_text(&path); - (path, text) - }) - .collect() -} - -/// Collects paths to all `.rs` files from `dir` in a sorted `Vec`. -fn rust_files_in_dir(dir: &Path) -> Vec { - let mut acc = Vec::new(); - for file in fs::read_dir(&dir).unwrap() { - let file = file.unwrap(); - let path = file.path(); - if path.extension().unwrap_or_default() == "rs" { - acc.push(path); - } - } - acc.sort(); - acc -} - -/// Read file and normalize newlines. -/// -/// `rustc` seems to always normalize `\r\n` newlines to `\n`: -/// -/// ``` -/// let s = " -/// "; -/// assert_eq!(s.as_bytes(), &[10]); -/// ``` -/// -/// so this should always be correct. -fn read_text(path: &Path) -> String { - fs::read_to_string(path) - .unwrap_or_else(|_| panic!("File at {:?} should be valid", path)) - .replace("\r\n", "\n") -} diff --git a/crates/ra_syntax/test_data/parser/err/0043_default_const.rast b/crates/ra_syntax/test_data/parser/err/0043_default_const.rast deleted file mode 100644 index 51ad2a846f..0000000000 --- a/crates/ra_syntax/test_data/parser/err/0043_default_const.rast +++ /dev/null @@ -1,40 +0,0 @@ -SOURCE_FILE@0..39 - TRAIT@0..38 - TRAIT_KW@0..5 "trait" - WHITESPACE@5..6 " " - NAME@6..7 - IDENT@6..7 "T" - WHITESPACE@7..8 " " - ASSOC_ITEM_LIST@8..38 - L_CURLY@8..9 "{" - WHITESPACE@9..12 "\n " - MACRO_CALL@12..19 - PATH@12..19 - PATH_SEGMENT@12..19 - NAME_REF@12..19 - IDENT@12..19 "default" - WHITESPACE@19..20 " " - CONST@20..36 - CONST_KW@20..25 "const" - WHITESPACE@25..26 " " - NAME@26..27 - IDENT@26..27 "f" - COLON@27..28 ":" - WHITESPACE@28..29 " " - PATH_TYPE@29..31 - PATH@29..31 - PATH_SEGMENT@29..31 - NAME_REF@29..31 - IDENT@29..31 "u8" - WHITESPACE@31..32 " " - EQ@32..33 "=" - WHITESPACE@33..34 " " - LITERAL@34..35 - INT_NUMBER@34..35 "0" - SEMICOLON@35..36 ";" - WHITESPACE@36..37 "\n" - R_CURLY@37..38 "}" - WHITESPACE@38..39 "\n" -error 19..19: expected BANG -error 19..19: expected `{`, `[`, `(` -error 19..19: expected SEMICOLON diff --git a/crates/ra_syntax/test_data/parser/err/0043_default_const.rs b/crates/ra_syntax/test_data/parser/err/0043_default_const.rs deleted file mode 100644 index 80f15474a5..0000000000 --- a/crates/ra_syntax/test_data/parser/err/0043_default_const.rs +++ /dev/null @@ -1,3 +0,0 @@ -trait T { - default const f: u8 = 0; -} diff --git a/crates/ra_syntax/test_data/parser/fuzz-failures/0001.rs b/crates/ra_syntax/test_data/parser/fuzz-failures/0001.rs deleted file mode 100644 index 099cc5f84a..0000000000 --- a/crates/ra_syntax/test_data/parser/fuzz-failures/0001.rs +++ /dev/null @@ -1,106 +0,0 @@ -use ra_syntax::{ - File, TextRange, SyntaxNodeRef, TextUnit, - SyntaxKind::*, - algo::{find_leaf_at_offset, LeafAtOffset, find_covering_node, ancestors, Direction, siblings}, -}; - -pub fn extend_selection(file: &File, range: TextRange) -> Option { - let syntax = file.syntax(); - extend(syntax.borrowed(), range) -} - -pub(crate) fn extend(root: SyntaxNodeRef, range: TextRange) -> Option { - if range.is_empty() { - let offset = range.start(); - let mut leaves = find_leaf_at_offset(root, offset); - if leaves.clone().all(|it| it.kind() == WHITESPACE) { - return Some(extend_ws(root, leaves.next()?, offset)); - } - let leaf = match leaves { - LeafAtOffset::None => return None, - LeafAtOffset::Single(l) => l, - LeafAtOffset::Between(l, r) => pick_best(l, r), - }; - return Some(leaf.range()); - }; - let node = find_covering_node(root, range); - if node.kind() == COMMENT && range == node.range() { - if let Some(range) = extend_comments(node) { - return Some(range); - } - } - - match ancestors(node).skip_while(|n| n.range() == range).next() { - None => None, - Some(parent) => Some(parent.range()), - } -} - -fn extend_ws(root: SyntaxNodeRef, ws: SyntaxNodeRef, offset: TextUnit) -> TextRange { - let ws_text = ws.leaf_text().unwrap(); - let suffix = TextRange::from_to(offset, ws.range().end()) - ws.range().start(); - let prefix = TextRange::from_to(ws.range().start(), offset) - ws.range().start(); - let ws_suffix = &ws_text.as_str()[suffix]; - let ws_prefix = &ws_text.as_str()[prefix]; - if ws_text.contains("\n") && !ws_suffix.contains("\n") { - if let Some(node) = ws.next_sibling() { - let start = match ws_prefix.rfind('\n') { - Some(idx) => ws.range().start() + TextUnit::from((idx + 1) as u32), - None => node.range().start() - }; - let end = if root.text().char_at(node.range().end()) == Some('\n') { - node.range().end() + TextUnit::of_char('\n') - } else { - node.range().end() - }; - return TextRange::from_to(start, end); - } - } - ws.range() -} - -fn pick_best<'a>(l: SyntaxNodeRef<'a>, r: Syntd[axNodeRef<'a>) -> SyntaxNodeRef<'a> { - return if priority(r) > priority(l) { r } else { l }; - fn priority(n: SyntaxNodeRef) -> usize { - match n.kind() { - WHITESPACE => 0, - IDENT | SELF_KW | SUPER_KW | CRATE_KW => 2, - _ => 1, - } - } -} - -fn extend_comments(node: SyntaxNodeRef) -> Option { - let left = adj_com[ments(node, Direction::Backward); - let right = adj_comments(node, Direction::Forward); - if left != right { - Some(TextRange::from_to( - left.range().start(), - right.range().end(), - )) - } else { - None - } -} - -fn adj_comments(node: SyntaxNodeRef, dir: Direction) -> SyntaxNodeRef { - let mut res = node; - for node in siblings(node, dir) { - match node.kind() { - COMMENT => res = node, - WHITESPACE if !node.leaf_text().unwrap().as_str().contains("\n\n") => (), - _ => break - } - } - res -} - -#[cfg(test)] -mod tests { - use super::*; - use test_utils::extract_offset; - - fn do_check(before: &str, afters: &[&str]) { - let (cursor, before) = extract_offset(before); - let file = File::parse(&before); - let mut range = TextRange::of diff --git a/crates/ra_syntax/test_data/parser/inline/err/0014_default_fn_type.rast b/crates/ra_syntax/test_data/parser/inline/err/0014_default_fn_type.rast deleted file mode 100644 index acd72094b9..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/err/0014_default_fn_type.rast +++ /dev/null @@ -1,58 +0,0 @@ -SOURCE_FILE@0..62 - TRAIT@0..61 - TRAIT_KW@0..5 "trait" - WHITESPACE@5..6 " " - NAME@6..7 - IDENT@6..7 "T" - WHITESPACE@7..8 " " - ASSOC_ITEM_LIST@8..61 - L_CURLY@8..9 "{" - WHITESPACE@9..14 "\n " - MACRO_CALL@14..21 - PATH@14..21 - PATH_SEGMENT@14..21 - NAME_REF@14..21 - IDENT@14..21 "default" - WHITESPACE@21..22 " " - TYPE_ALIAS@22..35 - TYPE_KW@22..26 "type" - WHITESPACE@26..27 " " - NAME@27..28 - IDENT@27..28 "T" - WHITESPACE@28..29 " " - EQ@29..30 "=" - WHITESPACE@30..31 " " - PATH_TYPE@31..34 - PATH@31..34 - PATH_SEGMENT@31..34 - NAME_REF@31..34 - IDENT@31..34 "Bar" - SEMICOLON@34..35 ";" - WHITESPACE@35..40 "\n " - MACRO_CALL@40..47 - PATH@40..47 - PATH_SEGMENT@40..47 - NAME_REF@40..47 - IDENT@40..47 "default" - WHITESPACE@47..48 " " - FN@48..59 - FN_KW@48..50 "fn" - WHITESPACE@50..51 " " - NAME@51..54 - IDENT@51..54 "foo" - PARAM_LIST@54..56 - L_PAREN@54..55 "(" - R_PAREN@55..56 ")" - WHITESPACE@56..57 " " - BLOCK_EXPR@57..59 - L_CURLY@57..58 "{" - R_CURLY@58..59 "}" - WHITESPACE@59..60 "\n" - R_CURLY@60..61 "}" - WHITESPACE@61..62 "\n" -error 21..21: expected BANG -error 21..21: expected `{`, `[`, `(` -error 21..21: expected SEMICOLON -error 47..47: expected BANG -error 47..47: expected `{`, `[`, `(` -error 47..47: expected SEMICOLON diff --git a/crates/ra_syntax/test_data/parser/inline/err/0014_default_fn_type.rs b/crates/ra_syntax/test_data/parser/inline/err/0014_default_fn_type.rs deleted file mode 100644 index 15ba8f4a85..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/err/0014_default_fn_type.rs +++ /dev/null @@ -1,4 +0,0 @@ -trait T { - default type T = Bar; - default fn foo() {} -} diff --git a/crates/ra_syntax/test_data/parser/inline/err/0015_empty_segment.rast b/crates/ra_syntax/test_data/parser/inline/err/0015_empty_segment.rast deleted file mode 100644 index 2f59d0606c..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/err/0015_empty_segment.rast +++ /dev/null @@ -1,15 +0,0 @@ -SOURCE_FILE@0..13 - USE@0..12 - USE_KW@0..3 "use" - WHITESPACE@3..4 " " - USE_TREE@4..12 - PATH@4..12 - PATH@4..9 - PATH_SEGMENT@4..9 - CRATE_KW@4..9 "crate" - COLON2@9..11 "::" - ERROR@11..12 - SEMICOLON@11..12 ";" - WHITESPACE@12..13 "\n" -error 11..11: expected identifier -error 12..12: expected SEMICOLON diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0001_trait_item_list.rast b/crates/ra_syntax/test_data/parser/inline/ok/0001_trait_item_list.rast deleted file mode 100644 index c7289e4008..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0001_trait_item_list.rast +++ /dev/null @@ -1,71 +0,0 @@ -SOURCE_FILE@0..83 - IMPL@0..82 - IMPL_KW@0..4 "impl" - WHITESPACE@4..5 " " - PATH_TYPE@5..6 - PATH@5..6 - PATH_SEGMENT@5..6 - NAME_REF@5..6 - IDENT@5..6 "F" - WHITESPACE@6..7 " " - ASSOC_ITEM_LIST@7..82 - L_CURLY@7..8 "{" - WHITESPACE@8..13 "\n " - TYPE_ALIAS@13..27 - TYPE_KW@13..17 "type" - WHITESPACE@17..18 " " - NAME@18..19 - IDENT@18..19 "A" - COLON@19..20 ":" - WHITESPACE@20..21 " " - TYPE_BOUND_LIST@21..26 - TYPE_BOUND@21..26 - PATH_TYPE@21..26 - PATH@21..26 - PATH_SEGMENT@21..26 - NAME_REF@21..26 - IDENT@21..26 "Clone" - SEMICOLON@26..27 ";" - WHITESPACE@27..32 "\n " - CONST@32..45 - CONST_KW@32..37 "const" - WHITESPACE@37..38 " " - NAME@38..39 - IDENT@38..39 "B" - COLON@39..40 ":" - WHITESPACE@40..41 " " - PATH_TYPE@41..44 - PATH@41..44 - PATH_SEGMENT@41..44 - NAME_REF@41..44 - IDENT@41..44 "i32" - SEMICOLON@44..45 ";" - WHITESPACE@45..50 "\n " - FN@50..61 - FN_KW@50..52 "fn" - WHITESPACE@52..53 " " - NAME@53..56 - IDENT@53..56 "foo" - PARAM_LIST@56..58 - L_PAREN@56..57 "(" - R_PAREN@57..58 ")" - WHITESPACE@58..59 " " - BLOCK_EXPR@59..61 - L_CURLY@59..60 "{" - R_CURLY@60..61 "}" - WHITESPACE@61..66 "\n " - FN@66..80 - FN_KW@66..68 "fn" - WHITESPACE@68..69 " " - NAME@69..72 - IDENT@69..72 "bar" - PARAM_LIST@72..79 - L_PAREN@72..73 "(" - SELF_PARAM@73..78 - AMP@73..74 "&" - SELF_KW@74..78 "self" - R_PAREN@78..79 ")" - SEMICOLON@79..80 ";" - WHITESPACE@80..81 "\n" - R_CURLY@81..82 "}" - WHITESPACE@82..83 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0001_trait_item_list.rs b/crates/ra_syntax/test_data/parser/inline/ok/0001_trait_item_list.rs deleted file mode 100644 index a5ec3239f8..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0001_trait_item_list.rs +++ /dev/null @@ -1,6 +0,0 @@ -impl F { - type A: Clone; - const B: i32; - fn foo() {} - fn bar(&self); -} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0016_unsafe_trait.rast b/crates/ra_syntax/test_data/parser/inline/ok/0016_unsafe_trait.rast deleted file mode 100644 index 625ab4c2d9..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0016_unsafe_trait.rast +++ /dev/null @@ -1,13 +0,0 @@ -SOURCE_FILE@0..18 - TRAIT@0..17 - UNSAFE_KW@0..6 "unsafe" - WHITESPACE@6..7 " " - TRAIT_KW@7..12 "trait" - WHITESPACE@12..13 " " - NAME@13..14 - IDENT@13..14 "T" - WHITESPACE@14..15 " " - ASSOC_ITEM_LIST@15..17 - L_CURLY@15..16 "{" - R_CURLY@16..17 "}" - WHITESPACE@17..18 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0016_unsafe_trait.rs b/crates/ra_syntax/test_data/parser/inline/ok/0016_unsafe_trait.rs deleted file mode 100644 index 04e021550d..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0016_unsafe_trait.rs +++ /dev/null @@ -1 +0,0 @@ -unsafe trait T {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0036_unsafe_extern_fn.rast b/crates/ra_syntax/test_data/parser/inline/ok/0036_unsafe_extern_fn.rast deleted file mode 100644 index 293b1d64c7..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0036_unsafe_extern_fn.rast +++ /dev/null @@ -1,21 +0,0 @@ -SOURCE_FILE@0..30 - FN@0..29 - UNSAFE_KW@0..6 "unsafe" - WHITESPACE@6..7 " " - ABI@7..17 - EXTERN_KW@7..13 "extern" - WHITESPACE@13..14 " " - STRING@14..17 "\"C\"" - WHITESPACE@17..18 " " - FN_KW@18..20 "fn" - WHITESPACE@20..21 " " - NAME@21..24 - IDENT@21..24 "foo" - PARAM_LIST@24..26 - L_PAREN@24..25 "(" - R_PAREN@25..26 ")" - WHITESPACE@26..27 " " - BLOCK_EXPR@27..29 - L_CURLY@27..28 "{" - R_CURLY@28..29 "}" - WHITESPACE@29..30 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0036_unsafe_extern_fn.rs b/crates/ra_syntax/test_data/parser/inline/ok/0036_unsafe_extern_fn.rs deleted file mode 100644 index 1295c2cd22..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0036_unsafe_extern_fn.rs +++ /dev/null @@ -1 +0,0 @@ -unsafe extern "C" fn foo() {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0047_unsafe_default_impl.rast b/crates/ra_syntax/test_data/parser/inline/ok/0047_unsafe_default_impl.rast deleted file mode 100644 index d6dfa83b70..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0047_unsafe_default_impl.rast +++ /dev/null @@ -1,18 +0,0 @@ -SOURCE_FILE@0..27 - IMPL@0..26 - UNSAFE_KW@0..6 "unsafe" - WHITESPACE@6..7 " " - DEFAULT_KW@7..14 "default" - WHITESPACE@14..15 " " - IMPL_KW@15..19 "impl" - WHITESPACE@19..20 " " - PATH_TYPE@20..23 - PATH@20..23 - PATH_SEGMENT@20..23 - NAME_REF@20..23 - IDENT@20..23 "Foo" - WHITESPACE@23..24 " " - ASSOC_ITEM_LIST@24..26 - L_CURLY@24..25 "{" - R_CURLY@25..26 "}" - WHITESPACE@26..27 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0047_unsafe_default_impl.rs b/crates/ra_syntax/test_data/parser/inline/ok/0047_unsafe_default_impl.rs deleted file mode 100644 index 9cd6c57bd8..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0047_unsafe_default_impl.rs +++ /dev/null @@ -1 +0,0 @@ -unsafe default impl Foo {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0057_const_fn.rast b/crates/ra_syntax/test_data/parser/inline/ok/0057_const_fn.rast deleted file mode 100644 index 97548a5eeb..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0057_const_fn.rast +++ /dev/null @@ -1,16 +0,0 @@ -SOURCE_FILE@0..18 - FN@0..17 - CONST_KW@0..5 "const" - WHITESPACE@5..6 " " - FN_KW@6..8 "fn" - WHITESPACE@8..9 " " - NAME@9..12 - IDENT@9..12 "foo" - PARAM_LIST@12..14 - L_PAREN@12..13 "(" - R_PAREN@13..14 ")" - WHITESPACE@14..15 " " - BLOCK_EXPR@15..17 - L_CURLY@15..16 "{" - R_CURLY@16..17 "}" - WHITESPACE@17..18 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0057_const_fn.rs b/crates/ra_syntax/test_data/parser/inline/ok/0057_const_fn.rs deleted file mode 100644 index 8c84d9cd7c..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0057_const_fn.rs +++ /dev/null @@ -1 +0,0 @@ -const fn foo() {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0087_unsafe_impl.rast b/crates/ra_syntax/test_data/parser/inline/ok/0087_unsafe_impl.rast deleted file mode 100644 index 43c09affed..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0087_unsafe_impl.rast +++ /dev/null @@ -1,16 +0,0 @@ -SOURCE_FILE@0..19 - IMPL@0..18 - UNSAFE_KW@0..6 "unsafe" - WHITESPACE@6..7 " " - IMPL_KW@7..11 "impl" - WHITESPACE@11..12 " " - PATH_TYPE@12..15 - PATH@12..15 - PATH_SEGMENT@12..15 - NAME_REF@12..15 - IDENT@12..15 "Foo" - WHITESPACE@15..16 " " - ASSOC_ITEM_LIST@16..18 - L_CURLY@16..17 "{" - R_CURLY@17..18 "}" - WHITESPACE@18..19 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0087_unsafe_impl.rs b/crates/ra_syntax/test_data/parser/inline/ok/0087_unsafe_impl.rs deleted file mode 100644 index 41055f41d9..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0087_unsafe_impl.rs +++ /dev/null @@ -1 +0,0 @@ -unsafe impl Foo {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0089_extern_fn.rast b/crates/ra_syntax/test_data/parser/inline/ok/0089_extern_fn.rast deleted file mode 100644 index 405b6a259f..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0089_extern_fn.rast +++ /dev/null @@ -1,17 +0,0 @@ -SOURCE_FILE@0..19 - FN@0..18 - ABI@0..6 - EXTERN_KW@0..6 "extern" - WHITESPACE@6..7 " " - FN_KW@7..9 "fn" - WHITESPACE@9..10 " " - NAME@10..13 - IDENT@10..13 "foo" - PARAM_LIST@13..15 - L_PAREN@13..14 "(" - R_PAREN@14..15 ")" - WHITESPACE@15..16 " " - BLOCK_EXPR@16..18 - L_CURLY@16..17 "{" - R_CURLY@17..18 "}" - WHITESPACE@18..19 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0089_extern_fn.rs b/crates/ra_syntax/test_data/parser/inline/ok/0089_extern_fn.rs deleted file mode 100644 index 394a049f0f..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0089_extern_fn.rs +++ /dev/null @@ -1 +0,0 @@ -extern fn foo() {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0091_auto_trait.rast b/crates/ra_syntax/test_data/parser/inline/ok/0091_auto_trait.rast deleted file mode 100644 index 0cac9ac431..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0091_auto_trait.rast +++ /dev/null @@ -1,13 +0,0 @@ -SOURCE_FILE@0..16 - TRAIT@0..15 - AUTO_KW@0..4 "auto" - WHITESPACE@4..5 " " - TRAIT_KW@5..10 "trait" - WHITESPACE@10..11 " " - NAME@11..12 - IDENT@11..12 "T" - WHITESPACE@12..13 " " - ASSOC_ITEM_LIST@13..15 - L_CURLY@13..14 "{" - R_CURLY@14..15 "}" - WHITESPACE@15..16 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0091_auto_trait.rs b/crates/ra_syntax/test_data/parser/inline/ok/0091_auto_trait.rs deleted file mode 100644 index 72adf60351..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0091_auto_trait.rs +++ /dev/null @@ -1 +0,0 @@ -auto trait T {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0094_unsafe_auto_trait.rast b/crates/ra_syntax/test_data/parser/inline/ok/0094_unsafe_auto_trait.rast deleted file mode 100644 index 0ef11c6825..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0094_unsafe_auto_trait.rast +++ /dev/null @@ -1,15 +0,0 @@ -SOURCE_FILE@0..23 - TRAIT@0..22 - UNSAFE_KW@0..6 "unsafe" - WHITESPACE@6..7 " " - AUTO_KW@7..11 "auto" - WHITESPACE@11..12 " " - TRAIT_KW@12..17 "trait" - WHITESPACE@17..18 " " - NAME@18..19 - IDENT@18..19 "T" - WHITESPACE@19..20 " " - ASSOC_ITEM_LIST@20..22 - L_CURLY@20..21 "{" - R_CURLY@21..22 "}" - WHITESPACE@22..23 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0094_unsafe_auto_trait.rs b/crates/ra_syntax/test_data/parser/inline/ok/0094_unsafe_auto_trait.rs deleted file mode 100644 index 03d29f3241..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0094_unsafe_auto_trait.rs +++ /dev/null @@ -1 +0,0 @@ -unsafe auto trait T {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0097_default_impl.rast b/crates/ra_syntax/test_data/parser/inline/ok/0097_default_impl.rast deleted file mode 100644 index 0a1b21d6e6..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0097_default_impl.rast +++ /dev/null @@ -1,16 +0,0 @@ -SOURCE_FILE@0..20 - IMPL@0..19 - DEFAULT_KW@0..7 "default" - WHITESPACE@7..8 " " - IMPL_KW@8..12 "impl" - WHITESPACE@12..13 " " - PATH_TYPE@13..16 - PATH@13..16 - PATH_SEGMENT@13..16 - NAME_REF@13..16 - IDENT@13..16 "Foo" - WHITESPACE@16..17 " " - ASSOC_ITEM_LIST@17..19 - L_CURLY@17..18 "{" - R_CURLY@18..19 "}" - WHITESPACE@19..20 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0097_default_impl.rs b/crates/ra_syntax/test_data/parser/inline/ok/0097_default_impl.rs deleted file mode 100644 index ef6aa84a29..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0097_default_impl.rs +++ /dev/null @@ -1 +0,0 @@ -default impl Foo {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0098_const_unsafe_fn.rast b/crates/ra_syntax/test_data/parser/inline/ok/0098_const_unsafe_fn.rast deleted file mode 100644 index 32a77ba490..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0098_const_unsafe_fn.rast +++ /dev/null @@ -1,18 +0,0 @@ -SOURCE_FILE@0..25 - FN@0..24 - CONST_KW@0..5 "const" - WHITESPACE@5..6 " " - UNSAFE_KW@6..12 "unsafe" - WHITESPACE@12..13 " " - FN_KW@13..15 "fn" - WHITESPACE@15..16 " " - NAME@16..19 - IDENT@16..19 "foo" - PARAM_LIST@19..21 - L_PAREN@19..20 "(" - R_PAREN@20..21 ")" - WHITESPACE@21..22 " " - BLOCK_EXPR@22..24 - L_CURLY@22..23 "{" - R_CURLY@23..24 "}" - WHITESPACE@24..25 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0098_const_unsafe_fn.rs b/crates/ra_syntax/test_data/parser/inline/ok/0098_const_unsafe_fn.rs deleted file mode 100644 index 31a1e435f5..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0098_const_unsafe_fn.rs +++ /dev/null @@ -1 +0,0 @@ -const unsafe fn foo() {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0101_unsafe_fn.rast b/crates/ra_syntax/test_data/parser/inline/ok/0101_unsafe_fn.rast deleted file mode 100644 index 73c94e5d43..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0101_unsafe_fn.rast +++ /dev/null @@ -1,16 +0,0 @@ -SOURCE_FILE@0..19 - FN@0..18 - UNSAFE_KW@0..6 "unsafe" - WHITESPACE@6..7 " " - FN_KW@7..9 "fn" - WHITESPACE@9..10 " " - NAME@10..13 - IDENT@10..13 "foo" - PARAM_LIST@13..15 - L_PAREN@13..14 "(" - R_PAREN@14..15 ")" - WHITESPACE@15..16 " " - BLOCK_EXPR@16..18 - L_CURLY@16..17 "{" - R_CURLY@17..18 "}" - WHITESPACE@18..19 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0101_unsafe_fn.rs b/crates/ra_syntax/test_data/parser/inline/ok/0101_unsafe_fn.rs deleted file mode 100644 index 33cfc4cd7a..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0101_unsafe_fn.rs +++ /dev/null @@ -1 +0,0 @@ -unsafe fn foo() {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0124_async_fn.rast b/crates/ra_syntax/test_data/parser/inline/ok/0124_async_fn.rast deleted file mode 100644 index a7df188bd6..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0124_async_fn.rast +++ /dev/null @@ -1,16 +0,0 @@ -SOURCE_FILE@0..18 - FN@0..17 - ASYNC_KW@0..5 "async" - WHITESPACE@5..6 " " - FN_KW@6..8 "fn" - WHITESPACE@8..9 " " - NAME@9..12 - IDENT@9..12 "foo" - PARAM_LIST@12..14 - L_PAREN@12..13 "(" - R_PAREN@13..14 ")" - WHITESPACE@14..15 " " - BLOCK_EXPR@15..17 - L_CURLY@15..16 "{" - R_CURLY@16..17 "}" - WHITESPACE@17..18 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0124_async_fn.rs b/crates/ra_syntax/test_data/parser/inline/ok/0124_async_fn.rs deleted file mode 100644 index f4adcb62b3..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0124_async_fn.rs +++ /dev/null @@ -1 +0,0 @@ -async fn foo() {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0128_combined_fns.rast b/crates/ra_syntax/test_data/parser/inline/ok/0128_combined_fns.rast deleted file mode 100644 index 98a20f36d6..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0128_combined_fns.rast +++ /dev/null @@ -1,35 +0,0 @@ -SOURCE_FILE@0..50 - FN@0..24 - ASYNC_KW@0..5 "async" - WHITESPACE@5..6 " " - UNSAFE_KW@6..12 "unsafe" - WHITESPACE@12..13 " " - FN_KW@13..15 "fn" - WHITESPACE@15..16 " " - NAME@16..19 - IDENT@16..19 "foo" - PARAM_LIST@19..21 - L_PAREN@19..20 "(" - R_PAREN@20..21 ")" - WHITESPACE@21..22 " " - BLOCK_EXPR@22..24 - L_CURLY@22..23 "{" - R_CURLY@23..24 "}" - WHITESPACE@24..25 "\n" - FN@25..49 - CONST_KW@25..30 "const" - WHITESPACE@30..31 " " - UNSAFE_KW@31..37 "unsafe" - WHITESPACE@37..38 " " - FN_KW@38..40 "fn" - WHITESPACE@40..41 " " - NAME@41..44 - IDENT@41..44 "bar" - PARAM_LIST@44..46 - L_PAREN@44..45 "(" - R_PAREN@45..46 ")" - WHITESPACE@46..47 " " - BLOCK_EXPR@47..49 - L_CURLY@47..48 "{" - R_CURLY@48..49 "}" - WHITESPACE@49..50 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0128_combined_fns.rs b/crates/ra_syntax/test_data/parser/inline/ok/0128_combined_fns.rs deleted file mode 100644 index 1262871453..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0128_combined_fns.rs +++ /dev/null @@ -1,2 +0,0 @@ -async unsafe fn foo() {} -const unsafe fn bar() {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0132_default_fn_type.rast b/crates/ra_syntax/test_data/parser/inline/ok/0132_default_fn_type.rast deleted file mode 100644 index b8d26a53a5..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0132_default_fn_type.rast +++ /dev/null @@ -1,55 +0,0 @@ -SOURCE_FILE@0..69 - IMPL@0..68 - IMPL_KW@0..4 "impl" - WHITESPACE@4..5 " " - PATH_TYPE@5..6 - PATH@5..6 - PATH_SEGMENT@5..6 - NAME_REF@5..6 - IDENT@5..6 "T" - WHITESPACE@6..7 " " - FOR_KW@7..10 "for" - WHITESPACE@10..11 " " - PATH_TYPE@11..14 - PATH@11..14 - PATH_SEGMENT@11..14 - NAME_REF@11..14 - IDENT@11..14 "Foo" - WHITESPACE@14..15 " " - ASSOC_ITEM_LIST@15..68 - L_CURLY@15..16 "{" - WHITESPACE@16..21 "\n " - TYPE_ALIAS@21..42 - DEFAULT_KW@21..28 "default" - WHITESPACE@28..29 " " - TYPE_KW@29..33 "type" - WHITESPACE@33..34 " " - NAME@34..35 - IDENT@34..35 "T" - WHITESPACE@35..36 " " - EQ@36..37 "=" - WHITESPACE@37..38 " " - PATH_TYPE@38..41 - PATH@38..41 - PATH_SEGMENT@38..41 - NAME_REF@38..41 - IDENT@38..41 "Bar" - SEMICOLON@41..42 ";" - WHITESPACE@42..47 "\n " - FN@47..66 - DEFAULT_KW@47..54 "default" - WHITESPACE@54..55 " " - FN_KW@55..57 "fn" - WHITESPACE@57..58 " " - NAME@58..61 - IDENT@58..61 "foo" - PARAM_LIST@61..63 - L_PAREN@61..62 "(" - R_PAREN@62..63 ")" - WHITESPACE@63..64 " " - BLOCK_EXPR@64..66 - L_CURLY@64..65 "{" - R_CURLY@65..66 "}" - WHITESPACE@66..67 "\n" - R_CURLY@67..68 "}" - WHITESPACE@68..69 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0132_default_fn_type.rs b/crates/ra_syntax/test_data/parser/inline/ok/0132_default_fn_type.rs deleted file mode 100644 index 8f5d611139..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0132_default_fn_type.rs +++ /dev/null @@ -1,4 +0,0 @@ -impl T for Foo { - default type T = Bar; - default fn foo() {} -} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0163_default_unsafe_fn.rast b/crates/ra_syntax/test_data/parser/inline/ok/0163_default_unsafe_fn.rast deleted file mode 100644 index 1269621dc2..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0163_default_unsafe_fn.rast +++ /dev/null @@ -1,40 +0,0 @@ -SOURCE_FILE@0..50 - IMPL@0..49 - IMPL_KW@0..4 "impl" - WHITESPACE@4..5 " " - PATH_TYPE@5..6 - PATH@5..6 - PATH_SEGMENT@5..6 - NAME_REF@5..6 - IDENT@5..6 "T" - WHITESPACE@6..7 " " - FOR_KW@7..10 "for" - WHITESPACE@10..11 " " - PATH_TYPE@11..14 - PATH@11..14 - PATH_SEGMENT@11..14 - NAME_REF@11..14 - IDENT@11..14 "Foo" - WHITESPACE@14..15 " " - ASSOC_ITEM_LIST@15..49 - L_CURLY@15..16 "{" - WHITESPACE@16..21 "\n " - FN@21..47 - DEFAULT_KW@21..28 "default" - WHITESPACE@28..29 " " - UNSAFE_KW@29..35 "unsafe" - WHITESPACE@35..36 " " - FN_KW@36..38 "fn" - WHITESPACE@38..39 " " - NAME@39..42 - IDENT@39..42 "foo" - PARAM_LIST@42..44 - L_PAREN@42..43 "(" - R_PAREN@43..44 ")" - WHITESPACE@44..45 " " - BLOCK_EXPR@45..47 - L_CURLY@45..46 "{" - R_CURLY@46..47 "}" - WHITESPACE@47..48 "\n" - R_CURLY@48..49 "}" - WHITESPACE@49..50 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0163_default_unsafe_fn.rs b/crates/ra_syntax/test_data/parser/inline/ok/0163_default_unsafe_fn.rs deleted file mode 100644 index 12926cd8a1..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0163_default_unsafe_fn.rs +++ /dev/null @@ -1,3 +0,0 @@ -impl T for Foo { - default unsafe fn foo() {} -} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0163_default_unsafe_impl.rast b/crates/ra_syntax/test_data/parser/inline/ok/0163_default_unsafe_impl.rast deleted file mode 100644 index 6bfe925af2..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0163_default_unsafe_impl.rast +++ /dev/null @@ -1,18 +0,0 @@ -SOURCE_FILE@0..27 - IMPL@0..26 - DEFAULT_KW@0..7 "default" - WHITESPACE@7..8 " " - UNSAFE_KW@8..14 "unsafe" - WHITESPACE@14..15 " " - IMPL_KW@15..19 "impl" - WHITESPACE@19..20 " " - PATH_TYPE@20..23 - PATH@20..23 - PATH_SEGMENT@20..23 - NAME_REF@20..23 - IDENT@20..23 "Foo" - WHITESPACE@23..24 " " - ASSOC_ITEM_LIST@24..26 - L_CURLY@24..25 "{" - R_CURLY@25..26 "}" - WHITESPACE@26..27 "\n" diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0163_default_unsafe_impl.rs b/crates/ra_syntax/test_data/parser/inline/ok/0163_default_unsafe_impl.rs deleted file mode 100644 index ba0998ff4d..0000000000 --- a/crates/ra_syntax/test_data/parser/inline/ok/0163_default_unsafe_impl.rs +++ /dev/null @@ -1 +0,0 @@ -default unsafe impl Foo {} diff --git a/crates/ra_syntax/test_data/parser/ok/0021_extern_fn.rast b/crates/ra_syntax/test_data/parser/ok/0021_extern_fn.rast deleted file mode 100644 index 5524efaafe..0000000000 --- a/crates/ra_syntax/test_data/parser/ok/0021_extern_fn.rast +++ /dev/null @@ -1,56 +0,0 @@ -SOURCE_FILE@0..71 - FN@0..19 - ABI@0..6 - EXTERN_KW@0..6 "extern" - WHITESPACE@6..7 " " - FN_KW@7..9 "fn" - WHITESPACE@9..10 " " - NAME@10..13 - IDENT@10..13 "foo" - PARAM_LIST@13..15 - L_PAREN@13..14 "(" - R_PAREN@14..15 ")" - WHITESPACE@15..16 " " - BLOCK_EXPR@16..19 - L_CURLY@16..17 "{" - WHITESPACE@17..18 "\n" - R_CURLY@18..19 "}" - WHITESPACE@19..21 "\n\n" - FN@21..44 - ABI@21..31 - EXTERN_KW@21..27 "extern" - WHITESPACE@27..28 " " - STRING@28..31 "\"C\"" - WHITESPACE@31..32 " " - FN_KW@32..34 "fn" - WHITESPACE@34..35 " " - NAME@35..38 - IDENT@35..38 "bar" - PARAM_LIST@38..40 - L_PAREN@38..39 "(" - R_PAREN@39..40 ")" - WHITESPACE@40..41 " " - BLOCK_EXPR@41..44 - L_CURLY@41..42 "{" - WHITESPACE@42..43 "\n" - R_CURLY@43..44 "}" - WHITESPACE@44..46 "\n\n" - FN@46..70 - ABI@46..57 - EXTERN_KW@46..52 "extern" - WHITESPACE@52..53 " " - RAW_STRING@53..57 "r\"D\"" - WHITESPACE@57..58 " " - FN_KW@58..60 "fn" - WHITESPACE@60..61 " " - NAME@61..64 - IDENT@61..64 "baz" - PARAM_LIST@64..66 - L_PAREN@64..65 "(" - R_PAREN@65..66 ")" - WHITESPACE@66..67 " " - BLOCK_EXPR@67..70 - L_CURLY@67..68 "{" - WHITESPACE@68..69 "\n" - R_CURLY@69..70 "}" - WHITESPACE@70..71 "\n" diff --git a/crates/ra_syntax/test_data/parser/ok/0021_extern_fn.rs b/crates/ra_syntax/test_data/parser/ok/0021_extern_fn.rs deleted file mode 100644 index e929eef741..0000000000 --- a/crates/ra_syntax/test_data/parser/ok/0021_extern_fn.rs +++ /dev/null @@ -1,8 +0,0 @@ -extern fn foo() { -} - -extern "C" fn bar() { -} - -extern r"D" fn baz() { -} diff --git a/crates/ra_syntax/test_data/parser/ok/0066_default_const.rast b/crates/ra_syntax/test_data/parser/ok/0066_default_const.rast deleted file mode 100644 index 6246a31a66..0000000000 --- a/crates/ra_syntax/test_data/parser/ok/0066_default_const.rast +++ /dev/null @@ -1,44 +0,0 @@ -SOURCE_FILE@0..46 - IMPL@0..45 - IMPL_KW@0..4 "impl" - WHITESPACE@4..5 " " - PATH_TYPE@5..6 - PATH@5..6 - PATH_SEGMENT@5..6 - NAME_REF@5..6 - IDENT@5..6 "T" - WHITESPACE@6..7 " " - FOR_KW@7..10 "for" - WHITESPACE@10..11 " " - PATH_TYPE@11..14 - PATH@11..14 - PATH_SEGMENT@11..14 - NAME_REF@11..14 - IDENT@11..14 "Foo" - WHITESPACE@14..15 " " - ASSOC_ITEM_LIST@15..45 - L_CURLY@15..16 "{" - WHITESPACE@16..19 "\n " - CONST@19..43 - DEFAULT_KW@19..26 "default" - WHITESPACE@26..27 " " - CONST_KW@27..32 "const" - WHITESPACE@32..33 " " - NAME@33..34 - IDENT@33..34 "f" - COLON@34..35 ":" - WHITESPACE@35..36 " " - PATH_TYPE@36..38 - PATH@36..38 - PATH_SEGMENT@36..38 - NAME_REF@36..38 - IDENT@36..38 "u8" - WHITESPACE@38..39 " " - EQ@39..40 "=" - WHITESPACE@40..41 " " - LITERAL@41..42 - INT_NUMBER@41..42 "0" - SEMICOLON@42..43 ";" - WHITESPACE@43..44 "\n" - R_CURLY@44..45 "}" - WHITESPACE@45..46 "\n" diff --git a/crates/ra_syntax/test_data/parser/ok/0066_default_const.rs b/crates/ra_syntax/test_data/parser/ok/0066_default_const.rs deleted file mode 100644 index dfb3b92dca..0000000000 --- a/crates/ra_syntax/test_data/parser/ok/0066_default_const.rs +++ /dev/null @@ -1,3 +0,0 @@ -impl T for Foo { - default const f: u8 = 0; -} diff --git a/crates/ra_text_edit/Cargo.toml b/crates/ra_text_edit/Cargo.toml deleted file mode 100644 index dbb2233504..0000000000 --- a/crates/ra_text_edit/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -edition = "2018" -name = "ra_text_edit" -version = "0.1.0" -authors = ["rust-analyzer developers"] -publish = false -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -text-size = "1.0.0" diff --git a/crates/ra_text_edit/src/lib.rs b/crates/ra_text_edit/src/lib.rs deleted file mode 100644 index 25554f583e..0000000000 --- a/crates/ra_text_edit/src/lib.rs +++ /dev/null @@ -1,170 +0,0 @@ -//! Representation of a `TextEdit`. -//! -//! `rust-analyzer` never mutates text itself and only sends diffs to clients, -//! so `TextEdit` is the ultimate representation of the work done by -//! rust-analyzer. -use std::{slice, vec}; - -pub use text_size::{TextRange, TextSize}; - -/// `InsertDelete` -- a single "atomic" change to text -/// -/// Must not overlap with other `InDel`s -#[derive(Debug, Clone)] -pub struct Indel { - pub insert: String, - /// Refers to offsets in the original text - pub delete: TextRange, -} - -#[derive(Default, Debug, Clone)] -pub struct TextEdit { - indels: Vec, -} - -#[derive(Debug, Default, Clone)] -pub struct TextEditBuilder { - indels: Vec, -} - -impl Indel { - pub fn insert(offset: TextSize, text: String) -> Indel { - Indel::replace(TextRange::empty(offset), text) - } - pub fn delete(range: TextRange) -> Indel { - Indel::replace(range, String::new()) - } - pub fn replace(range: TextRange, replace_with: String) -> Indel { - Indel { delete: range, insert: replace_with } - } - - pub fn apply(&self, text: &mut String) { - let start: usize = self.delete.start().into(); - let end: usize = self.delete.end().into(); - text.replace_range(start..end, &self.insert); - } -} - -impl TextEdit { - pub fn insert(offset: TextSize, text: String) -> TextEdit { - let mut builder = TextEditBuilder::default(); - builder.insert(offset, text); - builder.finish() - } - - pub fn delete(range: TextRange) -> TextEdit { - let mut builder = TextEditBuilder::default(); - builder.delete(range); - builder.finish() - } - - pub fn replace(range: TextRange, replace_with: String) -> TextEdit { - let mut builder = TextEditBuilder::default(); - builder.replace(range, replace_with); - builder.finish() - } - - pub fn len(&self) -> usize { - self.indels.len() - } - - pub fn is_empty(&self) -> bool { - self.indels.is_empty() - } - - pub fn iter(&self) -> slice::Iter<'_, Indel> { - self.indels.iter() - } - - pub fn into_iter(self) -> vec::IntoIter { - self.indels.into_iter() - } - - pub fn apply(&self, text: &mut String) { - match self.len() { - 0 => return, - 1 => { - self.indels[0].apply(text); - return; - } - _ => (), - } - - let mut total_len = TextSize::of(&*text); - for indel in self.indels.iter() { - total_len += TextSize::of(&indel.insert); - total_len -= indel.delete.end() - indel.delete.start(); - } - let mut buf = String::with_capacity(total_len.into()); - let mut prev = 0; - for indel in self.indels.iter() { - let start: usize = indel.delete.start().into(); - let end: usize = indel.delete.end().into(); - if start > prev { - buf.push_str(&text[prev..start]); - } - buf.push_str(&indel.insert); - prev = end; - } - buf.push_str(&text[prev..text.len()]); - assert_eq!(TextSize::of(&buf), total_len); - - // FIXME: figure out a way to mutate the text in-place or reuse the - // memory in some other way - *text = buf - } - - pub fn union(&mut self, other: TextEdit) -> Result<(), TextEdit> { - // FIXME: can be done without allocating intermediate vector - let mut all = self.iter().chain(other.iter()).collect::>(); - if !check_disjoint(&mut all) { - return Err(other); - } - self.indels.extend(other.indels); - assert!(check_disjoint(&mut self.indels)); - Ok(()) - } - - pub fn apply_to_offset(&self, offset: TextSize) -> Option { - let mut res = offset; - for indel in self.indels.iter() { - if indel.delete.start() >= offset { - break; - } - if offset < indel.delete.end() { - return None; - } - res += TextSize::of(&indel.insert); - res -= indel.delete.len(); - } - Some(res) - } -} - -impl TextEditBuilder { - pub fn replace(&mut self, range: TextRange, replace_with: String) { - self.indels.push(Indel::replace(range, replace_with)) - } - pub fn delete(&mut self, range: TextRange) { - self.indels.push(Indel::delete(range)) - } - pub fn insert(&mut self, offset: TextSize, text: String) { - self.indels.push(Indel::insert(offset, text)) - } - pub fn finish(self) -> TextEdit { - let mut indels = self.indels; - assert!(check_disjoint(&mut indels)); - TextEdit { indels } - } - pub fn invalidates_offset(&self, offset: TextSize) -> bool { - self.indels.iter().any(|indel| indel.delete.contains_inclusive(offset)) - } -} - -fn check_disjoint(indels: &mut [impl std::borrow::Borrow]) -> bool { - indels.sort_by_key(|indel| (indel.borrow().delete.start(), indel.borrow().delete.end())); - indels - .iter() - .zip(indels.iter().skip(1)) - .all(|(l, r)| l.borrow().delete.end() <= r.borrow().delete.start()) -} diff --git a/crates/ra_toolchain/Cargo.toml b/crates/ra_toolchain/Cargo.toml deleted file mode 100644 index 84b748c0a4..0000000000 --- a/crates/ra_toolchain/Cargo.toml +++ /dev/null @@ -1,12 +0,0 @@ -[package] -edition = "2018" -name = "ra_toolchain" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -home = "0.5.3" diff --git a/crates/ra_toolchain/src/lib.rs b/crates/ra_toolchain/src/lib.rs deleted file mode 100644 index 9916e52c48..0000000000 --- a/crates/ra_toolchain/src/lib.rs +++ /dev/null @@ -1,68 +0,0 @@ -//! This crate contains a single public function -//! [`get_path_for_executable`](fn.get_path_for_executable.html). -//! See docs there for more information. -use std::{env, iter, path::PathBuf}; - -pub fn cargo() -> PathBuf { - get_path_for_executable("cargo") -} - -pub fn rustc() -> PathBuf { - get_path_for_executable("rustc") -} - -pub fn rustup() -> PathBuf { - get_path_for_executable("rustup") -} - -pub fn rustfmt() -> PathBuf { - get_path_for_executable("rustfmt") -} - -/// Return a `PathBuf` to use for the given executable. -/// -/// E.g., `get_path_for_executable("cargo")` may return just `cargo` if that -/// gives a valid Cargo executable; or it may return a full path to a valid -/// Cargo. -fn get_path_for_executable(executable_name: &'static str) -> PathBuf { - // The current implementation checks three places for an executable to use: - // 1) Appropriate environment variable (erroring if this is set but not a usable executable) - // example: for cargo, this checks $CARGO environment variable; for rustc, $RUSTC; etc - // 2) `` - // example: for cargo, this tries just `cargo`, which will succeed if `cargo` is on the $PATH - // 3) `~/.cargo/bin/` - // example: for cargo, this tries ~/.cargo/bin/cargo - // It seems that this is a reasonable place to try for cargo, rustc, and rustup - let env_var = executable_name.to_ascii_uppercase(); - if let Some(path) = env::var_os(&env_var) { - return path.into(); - } - - if lookup_in_path(executable_name) { - return executable_name.into(); - } - - if let Some(mut path) = home::home_dir() { - path.push(".cargo"); - path.push("bin"); - path.push(executable_name); - if let Some(path) = probe(path) { - return path; - } - } - - executable_name.into() -} - -fn lookup_in_path(exec: &str) -> bool { - let paths = env::var_os("PATH").unwrap_or_default(); - env::split_paths(&paths).map(|path| path.join(exec)).find_map(probe).is_some() -} - -fn probe(path: PathBuf) -> Option { - let with_extension = match env::consts::EXE_EXTENSION { - "" => None, - it => Some(path.with_extension(it)), - }; - iter::once(path).chain(with_extension).find(|it| it.is_file()) -} diff --git a/crates/ra_tt/Cargo.toml b/crates/ra_tt/Cargo.toml deleted file mode 100644 index 3c45248c3e..0000000000 --- a/crates/ra_tt/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -edition = "2018" -name = "ra_tt" -version = "0.1.0" -authors = ["rust-analyzer developers"] -license = "MIT OR Apache-2.0" - -[lib] -doctest = false - -[dependencies] -stdx = { path = "../stdx" } -# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here -# to reduce number of compilations -smol_str = { version = "0.1.15", features = ["serde"] } diff --git a/crates/ra_tt/src/lib.rs b/crates/ra_tt/src/lib.rs deleted file mode 100644 index 8faf1cc679..0000000000 --- a/crates/ra_tt/src/lib.rs +++ /dev/null @@ -1,246 +0,0 @@ -//! `tt` crate defines a `TokenTree` data structure: this is the interface (both -//! input and output) of macros. It closely mirrors `proc_macro` crate's -//! `TokenTree`. -use std::{ - fmt::{self, Debug}, - panic::RefUnwindSafe, -}; - -use stdx::impl_from; - -pub use smol_str::SmolStr; - -/// Represents identity of the token. -/// -/// For hygiene purposes, we need to track which expanded tokens originated from -/// which source tokens. We do it by assigning an distinct identity to each -/// source token and making sure that identities are preserved during macro -/// expansion. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct TokenId(pub u32); - -impl TokenId { - pub const fn unspecified() -> TokenId { - TokenId(!0) - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum TokenTree { - Leaf(Leaf), - Subtree(Subtree), -} -impl_from!(Leaf, Subtree for TokenTree); - -impl TokenTree { - pub fn empty() -> Self { - TokenTree::Subtree(Subtree::default()) - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum Leaf { - Literal(Literal), - Punct(Punct), - Ident(Ident), -} -impl_from!(Literal, Punct, Ident for Leaf); - -#[derive(Clone, PartialEq, Eq, Hash, Default)] -pub struct Subtree { - pub delimiter: Option, - pub token_trees: Vec, -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct Delimiter { - pub id: TokenId, - pub kind: DelimiterKind, -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub enum DelimiterKind { - Parenthesis, - Brace, - Bracket, -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Literal { - pub text: SmolStr, - pub id: TokenId, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Punct { - pub char: char, - pub spacing: Spacing, - pub id: TokenId, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum Spacing { - Alone, - Joint, -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Ident { - pub text: SmolStr, - pub id: TokenId, -} - -fn print_debug_subtree(f: &mut fmt::Formatter<'_>, subtree: &Subtree, level: usize) -> fmt::Result { - let align = std::iter::repeat(" ").take(level).collect::(); - - let aux = match subtree.delimiter.map(|it| (it.kind, it.id.0)) { - None => "$".to_string(), - Some((DelimiterKind::Parenthesis, id)) => format!("() {}", id), - Some((DelimiterKind::Brace, id)) => format!("{{}} {}", id), - Some((DelimiterKind::Bracket, id)) => format!("[] {}", id), - }; - - if subtree.token_trees.is_empty() { - write!(f, "{}SUBTREE {}", align, aux)?; - } else { - writeln!(f, "{}SUBTREE {}", align, aux)?; - for (idx, child) in subtree.token_trees.iter().enumerate() { - print_debug_token(f, child, level + 1)?; - if idx != subtree.token_trees.len() - 1 { - writeln!(f, "")?; - } - } - } - - Ok(()) -} - -fn print_debug_token(f: &mut fmt::Formatter<'_>, tkn: &TokenTree, level: usize) -> fmt::Result { - let align = std::iter::repeat(" ").take(level).collect::(); - - match tkn { - TokenTree::Leaf(leaf) => match leaf { - Leaf::Literal(lit) => write!(f, "{}LITERAL {} {}", align, lit.text, lit.id.0)?, - Leaf::Punct(punct) => write!( - f, - "{}PUNCH {} [{}] {}", - align, - punct.char, - if punct.spacing == Spacing::Alone { "alone" } else { "joint" }, - punct.id.0 - )?, - Leaf::Ident(ident) => write!(f, "{}IDENT {} {}", align, ident.text, ident.id.0)?, - }, - TokenTree::Subtree(subtree) => { - print_debug_subtree(f, subtree, level)?; - } - } - - Ok(()) -} - -impl Debug for Subtree { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - print_debug_subtree(f, self, 0) - } -} - -impl fmt::Display for TokenTree { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - TokenTree::Leaf(it) => fmt::Display::fmt(it, f), - TokenTree::Subtree(it) => fmt::Display::fmt(it, f), - } - } -} - -impl fmt::Display for Subtree { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let (l, r) = match self.delimiter_kind() { - Some(DelimiterKind::Parenthesis) => ("(", ")"), - Some(DelimiterKind::Brace) => ("{", "}"), - Some(DelimiterKind::Bracket) => ("[", "]"), - None => ("", ""), - }; - f.write_str(l)?; - let mut needs_space = false; - for tt in self.token_trees.iter() { - if needs_space { - f.write_str(" ")?; - } - needs_space = true; - match tt { - TokenTree::Leaf(Leaf::Punct(p)) => { - needs_space = p.spacing == Spacing::Alone; - fmt::Display::fmt(p, f)? - } - tt => fmt::Display::fmt(tt, f)?, - } - } - f.write_str(r)?; - Ok(()) - } -} - -impl fmt::Display for Leaf { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Leaf::Ident(it) => fmt::Display::fmt(it, f), - Leaf::Literal(it) => fmt::Display::fmt(it, f), - Leaf::Punct(it) => fmt::Display::fmt(it, f), - } - } -} - -impl fmt::Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.text, f) - } -} - -impl fmt::Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.text, f) - } -} - -impl fmt::Display for Punct { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.char, f) - } -} - -impl Subtree { - /// Count the number of tokens recursively - pub fn count(&self) -> usize { - let children_count = self - .token_trees - .iter() - .map(|c| match c { - TokenTree::Subtree(c) => c.count(), - _ => 0, - }) - .sum::(); - - self.token_trees.len() + children_count - } - - pub fn delimiter_kind(&self) -> Option { - self.delimiter.map(|it| it.kind) - } -} - -pub mod buffer; - -#[derive(Debug, PartialEq, Eq, Clone)] -pub enum ExpansionError { - IOError(String), - JsonError(String), - Unknown(String), - ExpansionError(String), -} - -pub trait TokenExpander: Debug + Send + Sync + RefUnwindSafe { - fn expand(&self, subtree: &Subtree, attrs: Option<&Subtree>) - -> Result; -} diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 02c1371ac0..c7c1eda0fd 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -1,10 +1,10 @@ [package] -edition = "2018" name = "rust-analyzer" -version = "0.1.0" +version = "0.0.0" +license = "MIT OR Apache-2.0" authors = ["rust-analyzer developers"] autobins = false -license = "MIT OR Apache-2.0" +edition = "2018" [lib] doctest = false @@ -30,29 +30,28 @@ serde_json = "1.0.48" threadpool = "1.7.1" rayon = "1.3.1" mimalloc = { version = "0.1.19", default-features = false, optional = true } +lsp-server = "0.3.3" stdx = { path = "../stdx" } - -lsp-server = "0.3.3" flycheck = { path = "../flycheck" } -ra_ide = { path = "../ra_ide" } -ra_prof = { path = "../ra_prof" } -ra_project_model = { path = "../ra_project_model" } -ra_syntax = { path = "../ra_syntax" } -ra_text_edit = { path = "../ra_text_edit" } +ide = { path = "../ide" } +profile = { path = "../profile" } +project_model = { path = "../project_model" } +syntax = { path = "../syntax" } +text_edit = { path = "../text_edit" } vfs = { path = "../vfs" } vfs-notify = { path = "../vfs-notify" } -ra_cfg = { path = "../ra_cfg" } -ra_toolchain = { path = "../ra_toolchain" } +cfg = { path = "../cfg" } +toolchain = { path = "../toolchain" } # This should only be used in CLI -ra_db = { path = "../ra_db" } -ra_ide_db = { path = "../ra_ide_db" } -ra_ssr = { path = "../ra_ssr" } -hir = { path = "../ra_hir", package = "ra_hir" } -hir_def = { path = "../ra_hir_def", package = "ra_hir_def" } -hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" } -ra_proc_macro_srv = { path = "../ra_proc_macro_srv" } +base_db = { path = "../base_db" } +ide_db = { path = "../ide_db" } +ssr = { path = "../ssr" } +hir = { path = "../hir" } +hir_def = { path = "../hir_def" } +hir_ty = { path = "../hir_ty" } +proc_macro_srv = { path = "../proc_macro_srv" } [target.'cfg(windows)'.dependencies] winapi = "0.3.8" @@ -60,5 +59,5 @@ winapi = "0.3.8" [dev-dependencies] expect = { path = "../expect" } test_utils = { path = "../test_utils" } -mbe = { path = "../ra_mbe", package = "ra_mbe" } -tt = { path = "../ra_tt", package = "ra_tt" } +mbe = { path = "../mbe" } +tt = { path = "../tt" } diff --git a/crates/rust-analyzer/src/bin/args.rs b/crates/rust-analyzer/src/bin/args.rs index d3081e88bd..0bc92431a9 100644 --- a/crates/rust-analyzer/src/bin/args.rs +++ b/crates/rust-analyzer/src/bin/args.rs @@ -7,8 +7,8 @@ use std::{env, fmt::Write, path::PathBuf}; use anyhow::{bail, Result}; use pico_args::Arguments; -use ra_ssr::{SsrPattern, SsrRule}; use rust_analyzer::cli::{AnalysisStatsCmd, BenchCmd, BenchWhat, Position, Verbosity}; +use ssr::{SsrPattern, SsrRule}; use vfs::AbsPathBuf; pub(crate) struct Args { diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index fc7f8b01d2..bade31ca24 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -6,7 +6,7 @@ mod args; use std::{convert::TryFrom, process}; use lsp_server::Connection; -use ra_project_model::ProjectManifest; +use project_model::ProjectManifest; use rust_analyzer::{ cli, config::{Config, LinkedProject}, @@ -30,7 +30,7 @@ fn try_main() -> Result<()> { let args = args::Args::parse()?; match args.command { args::Command::RunServer => run_server()?, - args::Command::ProcMacro => ra_proc_macro_srv::cli::run()?, + args::Command::ProcMacro => proc_macro_srv::cli::run()?, args::Command::Parse { no_dump } => cli::parse(no_dump)?, args::Command::Symbols => cli::symbols()?, @@ -55,7 +55,7 @@ fn try_main() -> Result<()> { fn setup_logging() -> Result<()> { std::env::set_var("RUST_BACKTRACE", "short"); env_logger::try_init_from_env("RA_LOG")?; - ra_prof::init(); + profile::init(); Ok(()) } diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index 03c41263aa..3041915e18 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -1,8 +1,8 @@ //! See `CargoTargetSpec` -use ra_cfg::CfgExpr; -use ra_ide::{FileId, RunnableKind, TestId}; -use ra_project_model::{self, TargetKind}; +use cfg::CfgExpr; +use ide::{FileId, RunnableKind, TestId}; +use project_model::{self, TargetKind}; use vfs::AbsPathBuf; use crate::{global_state::GlobalStateSnapshot, Result}; @@ -177,9 +177,9 @@ fn required_features(cfg_expr: &CfgExpr, features: &mut Vec) { mod tests { use super::*; + use cfg::CfgExpr; use mbe::ast_to_token_tree; - use ra_cfg::CfgExpr; - use ra_syntax::{ + use syntax::{ ast::{self, AstNode}, SmolStr, }; diff --git a/crates/rust-analyzer/src/cli.rs b/crates/rust-analyzer/src/cli.rs index 1034d11bd8..6966ee576f 100644 --- a/crates/rust-analyzer/src/cli.rs +++ b/crates/rust-analyzer/src/cli.rs @@ -10,15 +10,16 @@ mod ssr; use std::io::Read; use anyhow::Result; -use ra_ide::Analysis; -use ra_prof::profile; -use ra_syntax::{AstNode, SourceFile}; +use ide::Analysis; +use syntax::{AstNode, SourceFile}; -pub use analysis_bench::{BenchCmd, BenchWhat, Position}; -pub use analysis_stats::AnalysisStatsCmd; -pub use diagnostics::diagnostics; -pub use load_cargo::load_cargo; -pub use ssr::{apply_ssr_rules, search_for_patterns}; +pub use self::{ + analysis_bench::{BenchCmd, BenchWhat, Position}, + analysis_stats::AnalysisStatsCmd, + diagnostics::diagnostics, + load_cargo::load_cargo, + ssr::{apply_ssr_rules, search_for_patterns}, +}; #[derive(Clone, Copy)] pub enum Verbosity { @@ -38,7 +39,7 @@ impl Verbosity { } pub fn parse(no_dump: bool) -> Result<()> { - let _p = profile("parsing"); + let _p = profile::span("parsing"); let file = file()?; if !no_dump { println!("{:#?}", file.syntax()); diff --git a/crates/rust-analyzer/src/cli/analysis_bench.rs b/crates/rust-analyzer/src/cli/analysis_bench.rs index c54ee5f4de..0f614f9e0c 100644 --- a/crates/rust-analyzer/src/cli/analysis_bench.rs +++ b/crates/rust-analyzer/src/cli/analysis_bench.rs @@ -3,11 +3,11 @@ use std::{env, path::PathBuf, str::FromStr, sync::Arc, time::Instant}; use anyhow::{bail, format_err, Result}; -use ra_db::{ +use base_db::{ salsa::{Database, Durability}, FileId, }; -use ra_ide::{Analysis, AnalysisChange, AnalysisHost, CompletionConfig, FilePosition, LineCol}; +use ide::{Analysis, AnalysisChange, AnalysisHost, CompletionConfig, FilePosition, LineCol}; use vfs::AbsPathBuf; use crate::{ @@ -52,7 +52,7 @@ impl FromStr for Position { impl BenchCmd { pub fn run(self, verbosity: Verbosity) -> Result<()> { - ra_prof::init(); + profile::init(); let start = Instant::now(); eprint!("loading: "); diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 721d41a58f..fb2b2b0001 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -6,6 +6,10 @@ use std::{ time::{SystemTime, UNIX_EPOCH}, }; +use base_db::{ + salsa::{self, ParallelDatabase}, + SourceDatabaseExt, +}; use hir::{ db::{AstDatabase, DefDatabase, HirDatabase}, original_range, AssocItem, Crate, HasSource, HirDisplay, ModuleDef, @@ -14,14 +18,10 @@ use hir_def::FunctionId; use hir_ty::{Ty, TypeWalk}; use itertools::Itertools; use oorandom::Rand32; -use ra_db::{ - salsa::{self, ParallelDatabase}, - SourceDatabaseExt, -}; -use ra_syntax::AstNode; use rayon::prelude::*; use rustc_hash::FxHashSet; use stdx::format_to; +use syntax::AstNode; use crate::{ cli::{ @@ -29,7 +29,7 @@ use crate::{ }, print_memory_usage, }; -use ra_prof::StopWatch; +use profile::StopWatch; /// Need to wrap Snapshot to provide `Clone` impl for `map_with` struct Snap(DB); @@ -72,7 +72,7 @@ impl AnalysisStatsCmd { shuffle(&mut rng, &mut krates); } for krate in krates { - let module = krate.root_module(db).expect("crate without root module"); + let module = krate.root_module(db); let file_id = module.definition_source(db).file_id; let file_id = file_id.original_file(db); let source_root = db.file_source_root(file_id); diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs index 4ac8c8772e..3371c4fd30 100644 --- a/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/crates/rust-analyzer/src/cli/diagnostics.rs @@ -6,9 +6,9 @@ use std::path::Path; use anyhow::anyhow; use rustc_hash::FxHashSet; +use base_db::SourceDatabaseExt; use hir::Crate; -use ra_db::SourceDatabaseExt; -use ra_ide::Severity; +use ide::Severity; use crate::cli::{load_cargo::load_cargo, Result}; @@ -28,7 +28,7 @@ pub fn diagnostics( let mut work = Vec::new(); let krates = Crate::all(db); for krate in krates { - let module = krate.root_module(db).expect("crate without root module"); + let module = krate.root_module(db); let file_id = module.definition_source(db).file_id; let file_id = file_id.original_file(db); let source_root = db.file_source_root(file_id); diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index a43bf2244d..c47cf6ef3e 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs @@ -3,10 +3,10 @@ use std::{path::Path, sync::Arc}; use anyhow::Result; +use base_db::CrateGraph; use crossbeam_channel::{unbounded, Receiver}; -use ra_db::CrateGraph; -use ra_ide::{AnalysisChange, AnalysisHost}; -use ra_project_model::{CargoConfig, ProcMacroClient, ProjectManifest, ProjectWorkspace}; +use ide::{AnalysisChange, AnalysisHost}; +use project_model::{CargoConfig, ProcMacroClient, ProjectManifest, ProjectWorkspace}; use vfs::{loader::Handle, AbsPath, AbsPathBuf}; use crate::reload::{ProjectFolders, SourceRootConfig}; diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs index 194bec008d..c11e109437 100644 --- a/crates/rust-analyzer/src/cli/ssr.rs +++ b/crates/rust-analyzer/src/cli/ssr.rs @@ -1,10 +1,10 @@ //! Applies structured search replace rules from the command line. use crate::cli::{load_cargo::load_cargo, Result}; -use ra_ssr::{MatchFinder, SsrPattern, SsrRule}; +use ssr::{MatchFinder, SsrPattern, SsrRule}; pub fn apply_ssr_rules(rules: Vec) -> Result<()> { - use ra_db::SourceDatabaseExt; + use base_db::SourceDatabaseExt; let (host, vfs) = load_cargo(&std::env::current_dir()?, true, true)?; let db = host.raw_database(); let mut match_finder = MatchFinder::at_first_file(db)?; @@ -26,8 +26,8 @@ pub fn apply_ssr_rules(rules: Vec) -> Result<()> { /// `debug_snippet`. This is intended for debugging and probably isn't in it's current form useful /// for much else. pub fn search_for_patterns(patterns: Vec, debug_snippet: Option) -> Result<()> { - use ra_db::SourceDatabaseExt; - use ra_ide_db::symbol_index::SymbolsDatabase; + use base_db::SourceDatabaseExt; + use ide_db::symbol_index::SymbolsDatabase; let (host, _vfs) = load_cargo(&std::env::current_dir()?, true, true)?; let db = host.raw_database(); let mut match_finder = MatchFinder::at_first_file(db)?; diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 70b4512d0b..33fb5e9c22 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -10,9 +10,9 @@ use std::{ffi::OsString, path::PathBuf}; use flycheck::FlycheckConfig; +use ide::{AssistConfig, CompletionConfig, HoverConfig, InlayHintsConfig}; use lsp_types::ClientCapabilities; -use ra_ide::{AssistConfig, CompletionConfig, HoverConfig, InlayHintsConfig}; -use ra_project_model::{CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest}; +use project_model::{CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest}; use serde::Deserialize; use vfs::AbsPathBuf; diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs index d24c55cee6..108df3eb04 100644 --- a/crates/rust-analyzer/src/diagnostics.rs +++ b/crates/rust-analyzer/src/diagnostics.rs @@ -3,7 +3,7 @@ pub(crate) mod to_proto; use std::{mem, sync::Arc}; -use ra_ide::FileId; +use ide::FileId; use rustc_hash::{FxHashMap, FxHashSet}; use crate::lsp_ext; diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index 97f3313521..6d54081560 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -1116,7 +1116,7 @@ mod tests { fn macro_compiler_error() { check( r##"{ - "rendered": "error: Please register your known path in the path module\n --> crates/ra_hir_def/src/path.rs:265:9\n |\n265 | compile_error!(\"Please register your known path in the path module\")\n | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n | \n ::: crates/ra_hir_def/src/data.rs:80:16\n |\n80 | let path = path![std::future::Future];\n | -------------------------- in this macro invocation\n\n", + "rendered": "error: Please register your known path in the path module\n --> crates/hir_def/src/path.rs:265:9\n |\n265 | compile_error!(\"Please register your known path in the path module\")\n | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n | \n ::: crates/hir_def/src/data.rs:80:16\n |\n80 | let path = path![std::future::Future];\n | -------------------------- in this macro invocation\n\n", "children": [], "code": null, "level": "error", @@ -1134,7 +1134,7 @@ mod tests { "column_end": 2, "column_start": 1, "expansion": null, - "file_name": "crates/ra_hir_def/src/path.rs", + "file_name": "crates/hir_def/src/path.rs", "is_primary": false, "label": null, "line_end": 267, @@ -1227,7 +1227,7 @@ mod tests { "column_end": 2, "column_start": 1, "expansion": null, - "file_name": "crates/ra_hir_def/src/path.rs", + "file_name": "crates/hir_def/src/path.rs", "is_primary": false, "label": null, "line_end": 277, @@ -1284,7 +1284,7 @@ mod tests { "column_end": 42, "column_start": 16, "expansion": null, - "file_name": "crates/ra_hir_def/src/data.rs", + "file_name": "crates/hir_def/src/data.rs", "is_primary": false, "label": null, "line_end": 80, @@ -1300,7 +1300,7 @@ mod tests { ] } }, - "file_name": "crates/ra_hir_def/src/path.rs", + "file_name": "crates/hir_def/src/path.rs", "is_primary": false, "label": null, "line_end": 272, @@ -1316,7 +1316,7 @@ mod tests { ] } }, - "file_name": "crates/ra_hir_def/src/path.rs", + "file_name": "crates/hir_def/src/path.rs", "is_primary": true, "label": null, "line_end": 265, diff --git a/crates/rust-analyzer/src/from_proto.rs b/crates/rust-analyzer/src/from_proto.rs index 9f8ce3b991..5b9f52993d 100644 --- a/crates/rust-analyzer/src/from_proto.rs +++ b/crates/rust-analyzer/src/from_proto.rs @@ -1,9 +1,9 @@ //! Conversion lsp_types types to rust-analyzer specific ones. use std::convert::TryFrom; -use ra_db::{FileId, FilePosition, FileRange}; -use ra_ide::{AssistKind, LineCol, LineIndex}; -use ra_syntax::{TextRange, TextSize}; +use base_db::{FileId, FilePosition, FileRange}; +use ide::{AssistKind, LineCol, LineIndex}; +use syntax::{TextRange, TextSize}; use vfs::AbsPathBuf; use crate::{global_state::GlobalStateSnapshot, Result}; diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 0e592ac1be..212f98a300 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -5,13 +5,13 @@ use std::{sync::Arc, time::Instant}; +use base_db::{CrateId, VfsPath}; use crossbeam_channel::{unbounded, Receiver, Sender}; use flycheck::FlycheckHandle; +use ide::{Analysis, AnalysisChange, AnalysisHost, FileId}; use lsp_types::{SemanticTokens, Url}; use parking_lot::{Mutex, RwLock}; -use ra_db::{CrateId, VfsPath}; -use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FileId}; -use ra_project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target}; +use project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target}; use rustc_hash::FxHashMap; use crate::{ @@ -27,7 +27,6 @@ use crate::{ to_proto::url_from_abs_path, Result, }; -use ra_prof::profile; #[derive(Eq, PartialEq, Copy, Clone)] pub(crate) enum Status { @@ -73,6 +72,7 @@ pub(crate) struct GlobalState { pub(crate) mem_docs: FxHashMap, pub(crate) semantic_tokens_cache: Arc>>, pub(crate) vfs: Arc)>>, + pub(crate) shutdown_requested: bool, pub(crate) status: Status, pub(crate) source_root_config: SourceRootConfig, pub(crate) proc_macro_client: ProcMacroClient, @@ -124,6 +124,7 @@ impl GlobalState { mem_docs: FxHashMap::default(), semantic_tokens_cache: Arc::new(Default::default()), vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))), + shutdown_requested: false, status: Status::default(), source_root_config: SourceRootConfig::default(), proc_macro_client: ProcMacroClient::dummy(), @@ -133,7 +134,7 @@ impl GlobalState { } pub(crate) fn process_changes(&mut self) -> bool { - let _p = profile("GlobalState::process_changes"); + let _p = profile::span("GlobalState::process_changes"); let mut fs_changes = Vec::new(); let mut has_fs_changes = false; diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 067259e246..74f73655a4 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -1,12 +1,16 @@ //! This module is responsible for implementing handlers for Language Server //! Protocol. The majority of requests are fulfilled by calling into the -//! `ra_ide` crate. +//! `ide` crate. use std::{ io::Write as _, process::{self, Stdio}, }; +use ide::{ + FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query, + RangeInfo, Runnable, RunnableKind, SearchScope, TextEdit, +}; use lsp_server::ErrorCode; use lsp_types::{ CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem, @@ -18,16 +22,11 @@ use lsp_types::{ SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation, SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit, }; -use ra_ide::{ - FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query, - RangeInfo, Runnable, RunnableKind, SearchScope, TextEdit, -}; -use ra_prof::profile; -use ra_project_model::TargetKind; -use ra_syntax::{algo, ast, AstNode, SyntaxKind, TextRange, TextSize}; +use project_model::TargetKind; use serde::{Deserialize, Serialize}; use serde_json::to_value; use stdx::{format_to, split_once}; +use syntax::{algo, ast, AstNode, SyntaxKind, TextRange, TextSize}; use crate::{ cargo_target_spec::CargoTargetSpec, @@ -39,7 +38,7 @@ use crate::{ }; pub(crate) fn handle_analyzer_status(snap: GlobalStateSnapshot, _: ()) -> Result { - let _p = profile("handle_analyzer_status"); + let _p = profile::span("handle_analyzer_status"); let mut buf = String::new(); if snap.workspaces.is_empty() { @@ -64,7 +63,7 @@ pub(crate) fn handle_analyzer_status(snap: GlobalStateSnapshot, _: ()) -> Result } pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> Result { - let _p = profile("handle_memory_usage"); + let _p = profile::span("handle_memory_usage"); let mem = state.analysis_host.per_query_memory_usage(); let mut out = String::new(); @@ -78,7 +77,7 @@ pub(crate) fn handle_syntax_tree( snap: GlobalStateSnapshot, params: lsp_ext::SyntaxTreeParams, ) -> Result { - let _p = profile("handle_syntax_tree"); + let _p = profile::span("handle_syntax_tree"); let id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.analysis.file_line_index(id)?; let text_range = params.range.map(|r| from_proto::text_range(&line_index, r)); @@ -90,7 +89,7 @@ pub(crate) fn handle_expand_macro( snap: GlobalStateSnapshot, params: lsp_ext::ExpandMacroParams, ) -> Result> { - let _p = profile("handle_expand_macro"); + let _p = profile::span("handle_expand_macro"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.analysis.file_line_index(file_id)?; let offset = from_proto::offset(&line_index, params.position); @@ -103,7 +102,7 @@ pub(crate) fn handle_selection_range( snap: GlobalStateSnapshot, params: lsp_types::SelectionRangeParams, ) -> Result>> { - let _p = profile("handle_selection_range"); + let _p = profile::span("handle_selection_range"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.analysis.file_line_index(file_id)?; let res: Result> = params @@ -146,7 +145,7 @@ pub(crate) fn handle_matching_brace( snap: GlobalStateSnapshot, params: lsp_ext::MatchingBraceParams, ) -> Result> { - let _p = profile("handle_matching_brace"); + let _p = profile::span("handle_matching_brace"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.analysis.file_line_index(file_id)?; let res = params @@ -168,7 +167,7 @@ pub(crate) fn handle_join_lines( snap: GlobalStateSnapshot, params: lsp_ext::JoinLinesParams, ) -> Result> { - let _p = profile("handle_join_lines"); + let _p = profile::span("handle_join_lines"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.analysis.file_line_index(file_id)?; let line_endings = snap.file_line_endings(file_id); @@ -191,7 +190,7 @@ pub(crate) fn handle_on_enter( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, ) -> Result>> { - let _p = profile("handle_on_enter"); + let _p = profile::span("handle_on_enter"); let position = from_proto::file_position(&snap, params)?; let edit = match snap.analysis.on_enter(position)? { None => return Ok(None), @@ -208,12 +207,12 @@ pub(crate) fn handle_on_type_formatting( snap: GlobalStateSnapshot, params: lsp_types::DocumentOnTypeFormattingParams, ) -> Result>> { - let _p = profile("handle_on_type_formatting"); + let _p = profile::span("handle_on_type_formatting"); let mut position = from_proto::file_position(&snap, params.text_document_position)?; let line_index = snap.analysis.file_line_index(position.file_id)?; let line_endings = snap.file_line_endings(position.file_id); - // in `ra_ide`, the `on_type` invariant is that + // in `ide`, the `on_type` invariant is that // `text.char_at(position) == typed_char`. position.offset -= TextSize::of('.'); let char_typed = params.ch.chars().next().unwrap_or('\0'); @@ -247,7 +246,7 @@ pub(crate) fn handle_document_symbol( snap: GlobalStateSnapshot, params: lsp_types::DocumentSymbolParams, ) -> Result> { - let _p = profile("handle_document_symbol"); + let _p = profile::span("handle_document_symbol"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.analysis.file_line_index(file_id)?; @@ -332,7 +331,7 @@ pub(crate) fn handle_workspace_symbol( snap: GlobalStateSnapshot, params: lsp_types::WorkspaceSymbolParams, ) -> Result>> { - let _p = profile("handle_workspace_symbol"); + let _p = profile::span("handle_workspace_symbol"); let all_symbols = params.query.contains('#'); let libs = params.query.contains('*'); let query = { @@ -380,7 +379,7 @@ pub(crate) fn handle_goto_definition( snap: GlobalStateSnapshot, params: lsp_types::GotoDefinitionParams, ) -> Result> { - let _p = profile("handle_goto_definition"); + let _p = profile::span("handle_goto_definition"); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let nav_info = match snap.analysis.goto_definition(position)? { None => return Ok(None), @@ -395,7 +394,7 @@ pub(crate) fn handle_goto_implementation( snap: GlobalStateSnapshot, params: lsp_types::request::GotoImplementationParams, ) -> Result> { - let _p = profile("handle_goto_implementation"); + let _p = profile::span("handle_goto_implementation"); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let nav_info = match snap.analysis.goto_implementation(position)? { None => return Ok(None), @@ -410,7 +409,7 @@ pub(crate) fn handle_goto_type_definition( snap: GlobalStateSnapshot, params: lsp_types::request::GotoTypeDefinitionParams, ) -> Result> { - let _p = profile("handle_goto_type_definition"); + let _p = profile::span("handle_goto_type_definition"); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let nav_info = match snap.analysis.goto_type_definition(position)? { None => return Ok(None), @@ -425,7 +424,7 @@ pub(crate) fn handle_parent_module( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, ) -> Result> { - let _p = profile("handle_parent_module"); + let _p = profile::span("handle_parent_module"); let position = from_proto::file_position(&snap, params)?; let navs = snap.analysis.parent_module(position)?; let res = to_proto::goto_definition_response(&snap, None, navs)?; @@ -436,7 +435,7 @@ pub(crate) fn handle_runnables( snap: GlobalStateSnapshot, params: lsp_ext::RunnablesParams, ) -> Result> { - let _p = profile("handle_runnables"); + let _p = profile::span("handle_runnables"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.analysis.file_line_index(file_id)?; let offset = params.position.map(|it| from_proto::offset(&line_index, it)); @@ -513,7 +512,7 @@ pub(crate) fn handle_completion( snap: GlobalStateSnapshot, params: lsp_types::CompletionParams, ) -> Result> { - let _p = profile("handle_completion"); + let _p = profile::span("handle_completion"); let position = from_proto::file_position(&snap, params.text_document_position)?; let completion_triggered_after_single_colon = { let mut res = false; @@ -555,7 +554,7 @@ pub(crate) fn handle_folding_range( snap: GlobalStateSnapshot, params: FoldingRangeParams, ) -> Result>> { - let _p = profile("handle_folding_range"); + let _p = profile::span("handle_folding_range"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let folds = snap.analysis.folding_ranges(file_id)?; let text = snap.analysis.file_text(file_id)?; @@ -572,7 +571,7 @@ pub(crate) fn handle_signature_help( snap: GlobalStateSnapshot, params: lsp_types::SignatureHelpParams, ) -> Result> { - let _p = profile("handle_signature_help"); + let _p = profile::span("handle_signature_help"); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let call_info = match snap.analysis.call_info(position)? { Some(it) => it, @@ -591,7 +590,7 @@ pub(crate) fn handle_hover( snap: GlobalStateSnapshot, params: lsp_types::HoverParams, ) -> Result> { - let _p = profile("handle_hover"); + let _p = profile::span("handle_hover"); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let info = match snap.analysis.hover(position)? { None => return Ok(None), @@ -614,7 +613,7 @@ pub(crate) fn handle_prepare_rename( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, ) -> Result> { - let _p = profile("handle_prepare_rename"); + let _p = profile::span("handle_prepare_rename"); let position = from_proto::file_position(&snap, params)?; let optional_change = snap.analysis.rename(position, "dummy")?; @@ -632,7 +631,7 @@ pub(crate) fn handle_rename( snap: GlobalStateSnapshot, params: RenameParams, ) -> Result> { - let _p = profile("handle_rename"); + let _p = profile::span("handle_rename"); let position = from_proto::file_position(&snap, params.text_document_position)?; if params.new_name.is_empty() { @@ -656,7 +655,7 @@ pub(crate) fn handle_references( snap: GlobalStateSnapshot, params: lsp_types::ReferenceParams, ) -> Result>> { - let _p = profile("handle_references"); + let _p = profile::span("handle_references"); let position = from_proto::file_position(&snap, params.text_document_position)?; let refs = match snap.analysis.find_all_refs(position, None)? { @@ -683,7 +682,7 @@ pub(crate) fn handle_formatting( snap: GlobalStateSnapshot, params: DocumentFormattingParams, ) -> Result>> { - let _p = profile("handle_formatting"); + let _p = profile::span("handle_formatting"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let file = snap.analysis.file_text(file_id)?; let crate_ids = snap.analysis.crate_for(file_id)?; @@ -693,7 +692,7 @@ pub(crate) fn handle_formatting( let mut rustfmt = match &snap.config.rustfmt { RustfmtConfig::Rustfmt { extra_args } => { - let mut cmd = process::Command::new(ra_toolchain::rustfmt()); + let mut cmd = process::Command::new(toolchain::rustfmt()); cmd.args(extra_args); if let Some(&crate_id) = crate_ids.first() { // Assume all crates are in the same edition @@ -773,12 +772,11 @@ fn handle_fixes( let diagnostics = snap.analysis.diagnostics(file_id, snap.config.experimental_diagnostics)?; - let fixes_from_diagnostics = diagnostics + for fix in diagnostics .into_iter() - .filter_map(|d| Some((d.range, d.fix?))) - .filter(|(diag_range, _fix)| diag_range.intersect(range).is_some()) - .map(|(_range, fix)| fix); - for fix in fixes_from_diagnostics { + .filter_map(|d| d.fix) + .filter(|fix| fix.fix_trigger_range.intersect(range).is_some()) + { let title = fix.label; let edit = to_proto::snippet_workspace_edit(&snap, fix.source_change)?; let action = lsp_ext::CodeAction { @@ -806,7 +804,7 @@ pub(crate) fn handle_code_action( mut snap: GlobalStateSnapshot, params: lsp_types::CodeActionParams, ) -> Result>> { - let _p = profile("handle_code_action"); + let _p = profile::span("handle_code_action"); // We intentionally don't support command-based actions, as those either // requires custom client-code anyway, or requires server-initiated edits. // Server initiated edits break causality, so we avoid those as well. @@ -848,7 +846,7 @@ pub(crate) fn handle_resolve_code_action( mut snap: GlobalStateSnapshot, params: lsp_ext::ResolveCodeActionParams, ) -> Result> { - let _p = profile("handle_resolve_code_action"); + let _p = profile::span("handle_resolve_code_action"); let file_id = from_proto::file_id(&snap, ¶ms.code_action_params.text_document.uri)?; let line_index = snap.analysis.file_line_index(file_id)?; let range = from_proto::text_range(&line_index, params.code_action_params.range); @@ -864,7 +862,7 @@ pub(crate) fn handle_resolve_code_action( let (id_string, index) = split_once(¶ms.id, ':').unwrap(); let index = index.parse::().unwrap(); let assist = &assists[index]; - assert!(assist.assist.id.0 == id_string); + assert!(assist.assist.id().0 == id_string); Ok(to_proto::resolved_code_action(&snap, assist.clone())?.edit) } @@ -872,7 +870,7 @@ pub(crate) fn handle_code_lens( snap: GlobalStateSnapshot, params: lsp_types::CodeLensParams, ) -> Result>> { - let _p = profile("handle_code_lens"); + let _p = profile::span("handle_code_lens"); let mut lenses: Vec = Default::default(); if snap.config.lens.none() { @@ -892,7 +890,7 @@ pub(crate) fn handle_code_lens( } let action = runnable.action(); - let range = to_proto::range(&line_index, runnable.nav.focus_or_full_range()); + let range = to_proto::range(&line_index, runnable.nav.full_range); let r = to_proto::runnable(&snap, file_id, runnable)?; if snap.config.lens.run { let lens = CodeLens { @@ -958,7 +956,7 @@ pub(crate) fn handle_code_lens_resolve( snap: GlobalStateSnapshot, code_lens: CodeLens, ) -> Result { - let _p = profile("handle_code_lens_resolve"); + let _p = profile::span("handle_code_lens_resolve"); let data = code_lens.data.unwrap(); let resolve = from_json::>("CodeLensResolveData", data)?; match resolve { @@ -995,7 +993,7 @@ pub(crate) fn handle_document_highlight( snap: GlobalStateSnapshot, params: lsp_types::DocumentHighlightParams, ) -> Result>> { - let _p = profile("handle_document_highlight"); + let _p = profile::span("handle_document_highlight"); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let line_index = snap.analysis.file_line_index(position.file_id)?; @@ -1022,7 +1020,7 @@ pub(crate) fn handle_ssr( snap: GlobalStateSnapshot, params: lsp_ext::SsrParams, ) -> Result { - let _p = profile("handle_ssr"); + let _p = profile::span("handle_ssr"); let selections = params .selections .iter() @@ -1042,7 +1040,7 @@ pub(crate) fn publish_diagnostics( snap: &GlobalStateSnapshot, file_id: FileId, ) -> Result> { - let _p = profile("publish_diagnostics"); + let _p = profile::span("publish_diagnostics"); let line_index = snap.analysis.file_line_index(file_id)?; let diagnostics: Vec = snap .analysis @@ -1065,7 +1063,7 @@ pub(crate) fn handle_inlay_hints( snap: GlobalStateSnapshot, params: InlayHintsParams, ) -> Result> { - let _p = profile("handle_inlay_hints"); + let _p = profile::span("handle_inlay_hints"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.analysis.file_line_index(file_id)?; Ok(snap @@ -1080,7 +1078,7 @@ pub(crate) fn handle_call_hierarchy_prepare( snap: GlobalStateSnapshot, params: CallHierarchyPrepareParams, ) -> Result>> { - let _p = profile("handle_call_hierarchy_prepare"); + let _p = profile::span("handle_call_hierarchy_prepare"); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let nav_info = match snap.analysis.call_hierarchy(position)? { @@ -1102,7 +1100,7 @@ pub(crate) fn handle_call_hierarchy_incoming( snap: GlobalStateSnapshot, params: CallHierarchyIncomingCallsParams, ) -> Result>> { - let _p = profile("handle_call_hierarchy_incoming"); + let _p = profile::span("handle_call_hierarchy_incoming"); let item = params.item; let doc = TextDocumentIdentifier::new(item.uri); @@ -1137,7 +1135,7 @@ pub(crate) fn handle_call_hierarchy_outgoing( snap: GlobalStateSnapshot, params: CallHierarchyOutgoingCallsParams, ) -> Result>> { - let _p = profile("handle_call_hierarchy_outgoing"); + let _p = profile::span("handle_call_hierarchy_outgoing"); let item = params.item; let doc = TextDocumentIdentifier::new(item.uri); @@ -1172,7 +1170,7 @@ pub(crate) fn handle_semantic_tokens( snap: GlobalStateSnapshot, params: SemanticTokensParams, ) -> Result> { - let _p = profile("handle_semantic_tokens"); + let _p = profile::span("handle_semantic_tokens"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let text = snap.analysis.file_text(file_id)?; @@ -1191,7 +1189,7 @@ pub(crate) fn handle_semantic_tokens_edits( snap: GlobalStateSnapshot, params: SemanticTokensEditsParams, ) -> Result> { - let _p = profile("handle_semantic_tokens_edits"); + let _p = profile::span("handle_semantic_tokens_edits"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let text = snap.analysis.file_text(file_id)?; @@ -1221,7 +1219,7 @@ pub(crate) fn handle_semantic_tokens_range( snap: GlobalStateSnapshot, params: SemanticTokensRangeParams, ) -> Result> { - let _p = profile("handle_semantic_tokens_range"); + let _p = profile::span("handle_semantic_tokens_range"); let frange = from_proto::file_range(&snap, params.text_document, params.range)?; let text = snap.analysis.file_text(frange.file_id)?; diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index ed37992cd3..87f72b4974 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs @@ -1,6 +1,6 @@ //! Implementation of the LSP for rust-analyzer. //! -//! This crate takes Rust-specific analysis results from ra_ide and translates +//! This crate takes Rust-specific analysis results from ide and translates //! into LSP types. //! //! It also is the root of all state. `world` module defines the bulk of the @@ -41,7 +41,7 @@ use serde::de::DeserializeOwned; pub type Result> = std::result::Result; pub use crate::{caps::server_capabilities, main_loop::main_loop}; -use ra_ide::AnalysisHost; +use ide::AnalysisHost; use std::fmt; use vfs::Vfs; @@ -74,16 +74,16 @@ impl std::error::Error for LspError {} fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) { let mut mem = host.per_query_memory_usage(); - let before = ra_prof::memory_usage(); + let before = profile::memory_usage(); drop(vfs); - let vfs = before.allocated - ra_prof::memory_usage().allocated; + let vfs = before.allocated - profile::memory_usage().allocated; mem.push(("VFS".into(), vfs)); - let before = ra_prof::memory_usage(); + let before = profile::memory_usage(); drop(host); - mem.push(("Unaccounted".into(), before.allocated - ra_prof::memory_usage().allocated)); + mem.push(("Unaccounted".into(), before.allocated - profile::memory_usage().allocated)); - mem.push(("Remaining".into(), ra_prof::memory_usage().allocated)); + mem.push(("Remaining".into(), profile::memory_usage().allocated)); for (name, bytes) in mem { eprintln!("{:>8} {}", bytes, name); diff --git a/crates/rust-analyzer/src/lsp_utils.rs b/crates/rust-analyzer/src/lsp_utils.rs index 0bc3ff115d..85c6615714 100644 --- a/crates/rust-analyzer/src/lsp_utils.rs +++ b/crates/rust-analyzer/src/lsp_utils.rs @@ -1,9 +1,9 @@ //! Utilities for LSP-related boilerplate code. use std::{error::Error, ops::Range}; +use base_db::Canceled; +use ide::LineIndex; use lsp_server::Notification; -use ra_db::Canceled; -use ra_ide::LineIndex; use crate::{from_proto, global_state::GlobalState}; diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 438e965e08..66e04653a3 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -5,12 +5,11 @@ use std::{ time::{Duration, Instant}, }; +use base_db::VfsPath; use crossbeam_channel::{select, Receiver}; +use ide::{Canceled, FileId}; use lsp_server::{Connection, Notification, Request, Response}; use lsp_types::notification::Notification as _; -use ra_db::VfsPath; -use ra_ide::{Canceled, FileId}; -use ra_prof::profile; use crate::{ config::Config, @@ -22,7 +21,7 @@ use crate::{ lsp_utils::{apply_document_changes, is_canceled, notification_is, Progress}, Result, }; -use ra_project_model::ProjectWorkspace; +use project_model::ProjectWorkspace; use vfs::ChangeKind; pub fn main_loop(config: Config, connection: Connection) -> Result<()> { @@ -173,7 +172,7 @@ impl GlobalState { fn handle_event(&mut self, event: Event) -> Result<()> { let loop_start = Instant::now(); // NOTE: don't count blocking select! call as a loop-turn time - let _p = profile("GlobalState::handle_event"); + let _p = profile::span("GlobalState::handle_event"); log::info!("handle_event({:?})", event); let queue_count = self.task_pool.handle.len(); @@ -204,7 +203,7 @@ impl GlobalState { self.analysis_host.maybe_collect_garbage(); } Event::Vfs(mut task) => { - let _p = profile("GlobalState::handle_event/vfs"); + let _p = profile::span("GlobalState::handle_event/vfs"); loop { match task { vfs::loader::Message::Loaded { files } => { @@ -337,11 +336,34 @@ impl GlobalState { fn on_request(&mut self, request_received: Instant, req: Request) -> Result<()> { self.register_request(&req, request_received); + if self.shutdown_requested { + self.respond(Response::new_err( + req.id, + lsp_server::ErrorCode::InvalidRequest as i32, + "Shutdown already requested.".to_owned(), + )); + + return Ok(()); + } + + if self.status == Status::Loading && req.method != "shutdown" { + self.respond(lsp_server::Response::new_err( + req.id, + // FIXME: i32 should impl From (from() guarantees lossless conversion) + lsp_server::ErrorCode::ContentModified as i32, + "Rust Analyzer is still loading...".to_owned(), + )); + return Ok(()); + } + RequestDispatcher { req: Some(req), global_state: self } .on_sync::(|s, ()| Ok(s.fetch_workspaces()))? .on_sync::(|s, p| handlers::handle_join_lines(s.snapshot(), p))? .on_sync::(|s, p| handlers::handle_on_enter(s.snapshot(), p))? - .on_sync::(|_, ()| Ok(()))? + .on_sync::(|s, ()| { + s.shutdown_requested = true; + Ok(()) + })? .on_sync::(|s, p| { handlers::handle_selection_range(s.snapshot(), p) })? diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 1907f2f132..a2cfb4e0d5 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -1,11 +1,10 @@ //! Project loading & configuration updates use std::{mem, sync::Arc}; +use base_db::{CrateGraph, SourceRoot, VfsPath}; use flycheck::FlycheckHandle; -use ra_db::{CrateGraph, SourceRoot, VfsPath}; -use ra_ide::AnalysisChange; -use ra_prof::profile; -use ra_project_model::{ProcMacroClient, ProjectWorkspace}; +use ide::AnalysisChange; +use project_model::{ProcMacroClient, ProjectWorkspace}; use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind}; use crate::{ @@ -17,7 +16,7 @@ use crate::{ impl GlobalState { pub(crate) fn update_configuration(&mut self, config: Config) { - let _p = profile("GlobalState::update_configuration"); + let _p = profile::span("GlobalState::update_configuration"); let old_config = mem::replace(&mut self.config, config); if self.config.lru_capacity != old_config.lru_capacity { self.analysis_host.update_lru_capacity(old_config.lru_capacity); @@ -99,14 +98,14 @@ impl GlobalState { .iter() .map(|project| match project { LinkedProject::ProjectManifest(manifest) => { - ra_project_model::ProjectWorkspace::load( + project_model::ProjectWorkspace::load( manifest.clone(), &cargo_config, with_sysroot, ) } LinkedProject::InlineJsonProject(it) => { - Ok(ra_project_model::ProjectWorkspace::Json { project: it.clone() }) + Ok(project_model::ProjectWorkspace::Json { project: it.clone() }) } }) .collect::>(); @@ -115,7 +114,7 @@ impl GlobalState { }); } pub(crate) fn switch_workspaces(&mut self, workspaces: Vec>) { - let _p = profile("GlobalState::switch_workspaces"); + let _p = profile::span("GlobalState::switch_workspaces"); log::info!("reloading projects: {:?}", self.config.linked_projects); let mut has_errors = false; @@ -177,7 +176,7 @@ impl GlobalState { Ok(it) => it, Err(err) => { log::error!( - "Failed to run ra_proc_macro_srv from path {}, error: {:?}", + "Failed to run proc_macro_srv from path {}, error: {:?}", path.display(), err ); @@ -300,7 +299,7 @@ pub(crate) struct SourceRootConfig { impl SourceRootConfig { pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec { - let _p = profile("SourceRootConfig::partition"); + let _p = profile::span("SourceRootConfig::partition"); self.fsc .partition(vfs) .into_iter() diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 5eba1f1555..8a2cfa2aee 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -4,15 +4,15 @@ use std::{ sync::atomic::{AtomicU32, Ordering}, }; -use itertools::Itertools; -use ra_db::{FileId, FileRange}; -use ra_ide::{ +use base_db::{FileId, FileRange}; +use ide::{ Assist, AssistKind, CallInfo, CompletionItem, CompletionItemKind, Documentation, FileSystemEdit, Fold, FoldKind, Highlight, HighlightModifier, HighlightTag, HighlightedRange, Indel, InlayHint, InlayKind, InsertTextFormat, LineIndex, Markup, NavigationTarget, ReferenceAccess, ResolvedAssist, Runnable, Severity, SourceChange, SourceFileEdit, TextEdit, }; -use ra_syntax::{SyntaxKind, TextRange, TextSize}; +use itertools::Itertools; +use syntax::{SyntaxKind, TextRange, TextSize}; use crate::{ cargo_target_spec::CargoTargetSpec, global_state::GlobalStateSnapshot, @@ -704,10 +704,10 @@ pub(crate) fn unresolved_code_action( index: usize, ) -> Result { let res = lsp_ext::CodeAction { - title: assist.label, - id: Some(format!("{}:{}", assist.id.0.to_owned(), index.to_string())), - group: assist.group.filter(|_| snap.config.client_caps.code_action_group).map(|gr| gr.0), - kind: Some(code_action_kind(assist.id.1)), + title: assist.label(), + id: Some(format!("{}:{}", assist.id().0.to_owned(), index.to_string())), + group: assist.group().filter(|_| snap.config.client_caps.code_action_group).map(|gr| gr.0), + kind: Some(code_action_kind(assist.id().1)), edit: None, is_preferred: None, }; @@ -755,12 +755,13 @@ pub(crate) fn runnable( } pub(crate) fn markup_content(markup: Markup) -> lsp_types::MarkupContent { - lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value: markup.into() } + let value = crate::markdown::format_docs(markup.as_str()); + lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value } } #[cfg(test)] mod tests { - use ra_ide::Analysis; + use ide::Analysis; use super::*; diff --git a/crates/rust-analyzer/test_data/macro_compiler_error.txt b/crates/rust-analyzer/test_data/macro_compiler_error.txt index f695db73ce..89dae7d5a6 100644 --- a/crates/rust-analyzer/test_data/macro_compiler_error.txt +++ b/crates/rust-analyzer/test_data/macro_compiler_error.txt @@ -1,6 +1,6 @@ [ MappedRustDiagnostic { - url: "file:///test/crates/ra_hir_def/src/data.rs", + url: "file:///test/crates/hir_def/src/data.rs", diagnostic: Diagnostic { range: Range { start: Position { @@ -24,7 +24,7 @@ [ DiagnosticRelatedInformation { location: Location { - uri: "file:///test/crates/ra_hir_def/src/path.rs", + uri: "file:///test/crates/hir_def/src/path.rs", range: Range { start: Position { line: 264, diff --git a/crates/rust-analyzer/tests/heavy_tests/support.rs b/crates/rust-analyzer/tests/heavy_tests/support.rs index f242c8165c..5bafeba797 100644 --- a/crates/rust-analyzer/tests/heavy_tests/support.rs +++ b/crates/rust-analyzer/tests/heavy_tests/support.rs @@ -12,7 +12,7 @@ use lsp_types::{ notification::Exit, request::Shutdown, TextDocumentIdentifier, Url, WorkDoneProgress, }; use lsp_types::{ProgressParams, ProgressParamsValue}; -use ra_project_model::ProjectManifest; +use project_model::ProjectManifest; use rust_analyzer::{ config::{ClientCapsConfig, Config, FilesConfig, FilesWatcher, LinkedProject}, main_loop, @@ -62,7 +62,7 @@ impl<'a> Project<'a> { static INIT: Once = Once::new(); INIT.call_once(|| { env_logger::builder().is_test(true).try_init().unwrap(); - ra_prof::init_from(crate::PROFILE); + profile::init_from(crate::PROFILE); }); for entry in Fixture::parse(self.fixture) { diff --git a/crates/ssr/Cargo.toml b/crates/ssr/Cargo.toml new file mode 100644 index 0000000000..56c1f77618 --- /dev/null +++ b/crates/ssr/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "ssr" +version = "0.0.0" +description = "Structural search and replace of Rust code" +license = "MIT OR Apache-2.0" +repository = "https://github.com/rust-analyzer/rust-analyzer" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +rustc-hash = "1.1.0" + +text_edit = { path = "../text_edit" } +syntax = { path = "../syntax" } +base_db = { path = "../base_db" } +ide_db = { path = "../ide_db" } +hir = { path = "../hir" } +test_utils = { path = "../test_utils" } + +[dev-dependencies] +expect = { path = "../expect" } diff --git a/crates/ra_ssr/src/errors.rs b/crates/ssr/src/errors.rs similarity index 100% rename from crates/ra_ssr/src/errors.rs rename to crates/ssr/src/errors.rs diff --git a/crates/ssr/src/lib.rs b/crates/ssr/src/lib.rs new file mode 100644 index 0000000000..292bd5b9a7 --- /dev/null +++ b/crates/ssr/src/lib.rs @@ -0,0 +1,338 @@ +//! Structural Search Replace +//! +//! Allows searching the AST for code that matches one or more patterns and then replacing that code +//! based on a template. + +// Feature: Structural Search and Replace +// +// Search and replace with named wildcards that will match any expression, type, path, pattern or item. +// The syntax for a structural search replace command is ` ==>> `. +// A `$` placeholder in the search pattern will match any AST node and `$` will reference it in the replacement. +// Within a macro call, a placeholder will match up until whatever token follows the placeholder. +// +// All paths in both the search pattern and the replacement template must resolve in the context +// in which this command is invoked. Paths in the search pattern will then match the code if they +// resolve to the same item, even if they're written differently. For example if we invoke the +// command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers +// to `foo::Bar` will match. +// +// Paths in the replacement template will be rendered appropriately for the context in which the +// replacement occurs. For example if our replacement template is `foo::Bar` and we match some +// code in the `foo` module, we'll insert just `Bar`. +// +// Inherent method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will +// match `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`. +// +// The scope of the search / replace will be restricted to the current selection if any, otherwise +// it will apply to the whole workspace. +// +// Placeholders may be given constraints by writing them as `${::...}`. +// +// Supported constraints: +// +// |=== +// | Constraint | Restricts placeholder +// +// | kind(literal) | Is a literal (e.g. `42` or `"forty two"`) +// | not(a) | Negates the constraint `a` +// |=== +// +// Available via the command `rust-analyzer.ssr`. +// +// ```rust +// // Using structural search replace command [foo($a, $b) ==>> ($a).foo($b)] +// +// // BEFORE +// String::from(foo(y + 5, z)) +// +// // AFTER +// String::from((y + 5).foo(z)) +// ``` +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Structural Search Replace** +// |=== + +mod matching; +mod nester; +mod parsing; +mod replacing; +mod resolving; +mod search; +#[macro_use] +mod errors; +#[cfg(test)] +mod tests; + +use crate::errors::bail; +pub use crate::errors::SsrError; +pub use crate::matching::Match; +use crate::matching::MatchFailureReason; +use base_db::{FileId, FilePosition, FileRange}; +use hir::Semantics; +use ide_db::source_change::SourceFileEdit; +use resolving::ResolvedRule; +use rustc_hash::FxHashMap; +use syntax::{ast, AstNode, SyntaxNode, TextRange}; + +// A structured search replace rule. Create by calling `parse` on a str. +#[derive(Debug)] +pub struct SsrRule { + /// A structured pattern that we're searching for. + pattern: parsing::RawPattern, + /// What we'll replace it with. + template: parsing::RawPattern, + parsed_rules: Vec, +} + +#[derive(Debug)] +pub struct SsrPattern { + raw: parsing::RawPattern, + parsed_rules: Vec, +} + +#[derive(Debug, Default)] +pub struct SsrMatches { + pub matches: Vec, +} + +/// Searches a crate for pattern matches and possibly replaces them with something else. +pub struct MatchFinder<'db> { + /// Our source of information about the user's code. + sema: Semantics<'db, ide_db::RootDatabase>, + rules: Vec, + resolution_scope: resolving::ResolutionScope<'db>, + restrict_ranges: Vec, +} + +impl<'db> MatchFinder<'db> { + /// Constructs a new instance where names will be looked up as if they appeared at + /// `lookup_context`. + pub fn in_context( + db: &'db ide_db::RootDatabase, + lookup_context: FilePosition, + mut restrict_ranges: Vec, + ) -> MatchFinder<'db> { + restrict_ranges.retain(|range| !range.range.is_empty()); + let sema = Semantics::new(db); + let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context); + MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges } + } + + /// Constructs an instance using the start of the first file in `db` as the lookup context. + pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result, SsrError> { + use base_db::SourceDatabaseExt; + use ide_db::symbol_index::SymbolsDatabase; + if let Some(first_file_id) = db + .local_roots() + .iter() + .next() + .and_then(|root| db.source_root(root.clone()).iter().next()) + { + Ok(MatchFinder::in_context( + db, + FilePosition { file_id: first_file_id, offset: 0.into() }, + vec![], + )) + } else { + bail!("No files to search"); + } + } + + /// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take + /// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to + /// match to it. + pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> { + for parsed_rule in rule.parsed_rules { + self.rules.push(ResolvedRule::new( + parsed_rule, + &self.resolution_scope, + self.rules.len(), + )?); + } + Ok(()) + } + + /// Finds matches for all added rules and returns edits for all found matches. + pub fn edits(&self) -> Vec { + use base_db::SourceDatabaseExt; + let mut matches_by_file = FxHashMap::default(); + for m in self.matches().matches { + matches_by_file + .entry(m.range.file_id) + .or_insert_with(|| SsrMatches::default()) + .matches + .push(m); + } + let mut edits = vec![]; + for (file_id, matches) in matches_by_file { + let edit = + replacing::matches_to_edit(&matches, &self.sema.db.file_text(file_id), &self.rules); + edits.push(SourceFileEdit { file_id, edit }); + } + edits + } + + /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you + /// intend to do replacement, use `add_rule` instead. + pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> { + for parsed_rule in pattern.parsed_rules { + self.rules.push(ResolvedRule::new( + parsed_rule, + &self.resolution_scope, + self.rules.len(), + )?); + } + Ok(()) + } + + /// Returns matches for all added rules. + pub fn matches(&self) -> SsrMatches { + let mut matches = Vec::new(); + let mut usage_cache = search::UsageCache::default(); + for rule in &self.rules { + self.find_matches_for_rule(rule, &mut usage_cache, &mut matches); + } + nester::nest_and_remove_collisions(matches, &self.sema) + } + + /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match + /// them, while recording reasons why they don't match. This API is useful for command + /// line-based debugging where providing a range is difficult. + pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec { + use base_db::SourceDatabaseExt; + let file = self.sema.parse(file_id); + let mut res = Vec::new(); + let file_text = self.sema.db.file_text(file_id); + let mut remaining_text = file_text.as_str(); + let mut base = 0; + let len = snippet.len() as u32; + while let Some(offset) = remaining_text.find(snippet) { + let start = base + offset as u32; + let end = start + len; + self.output_debug_for_nodes_at_range( + file.syntax(), + FileRange { file_id, range: TextRange::new(start.into(), end.into()) }, + &None, + &mut res, + ); + remaining_text = &remaining_text[offset + snippet.len()..]; + base = end; + } + res + } + + fn output_debug_for_nodes_at_range( + &self, + node: &SyntaxNode, + range: FileRange, + restrict_range: &Option, + out: &mut Vec, + ) { + for node in node.children() { + let node_range = self.sema.original_range(&node); + if node_range.file_id != range.file_id || !node_range.range.contains_range(range.range) + { + continue; + } + if node_range.range == range.range { + for rule in &self.rules { + // For now we ignore rules that have a different kind than our node, otherwise + // we get lots of noise. If at some point we add support for restricting rules + // to a particular kind of thing (e.g. only match type references), then we can + // relax this. We special-case expressions, since function calls can match + // method calls. + if rule.pattern.node.kind() != node.kind() + && !(ast::Expr::can_cast(rule.pattern.node.kind()) + && ast::Expr::can_cast(node.kind())) + { + continue; + } + out.push(MatchDebugInfo { + matched: matching::get_match(true, rule, &node, restrict_range, &self.sema) + .map_err(|e| MatchFailureReason { + reason: e.reason.unwrap_or_else(|| { + "Match failed, but no reason was given".to_owned() + }), + }), + pattern: rule.pattern.node.clone(), + node: node.clone(), + }); + } + } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) { + if let Some(expanded) = self.sema.expand(¯o_call) { + if let Some(tt) = macro_call.token_tree() { + self.output_debug_for_nodes_at_range( + &expanded, + range, + &Some(self.sema.original_range(tt.syntax())), + out, + ); + } + } + } + self.output_debug_for_nodes_at_range(&node, range, restrict_range, out); + } + } +} + +pub struct MatchDebugInfo { + node: SyntaxNode, + /// Our search pattern parsed as an expression or item, etc + pattern: SyntaxNode, + matched: Result, +} + +impl std::fmt::Debug for MatchDebugInfo { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match &self.matched { + Ok(_) => writeln!(f, "Node matched")?, + Err(reason) => writeln!(f, "Node failed to match because: {}", reason.reason)?, + } + writeln!( + f, + "============ AST ===========\n\ + {:#?}", + self.node + )?; + writeln!(f, "========= PATTERN ==========")?; + writeln!(f, "{:#?}", self.pattern)?; + writeln!(f, "============================")?; + Ok(()) + } +} + +impl SsrMatches { + /// Returns `self` with any nested matches removed and made into top-level matches. + pub fn flattened(self) -> SsrMatches { + let mut out = SsrMatches::default(); + self.flatten_into(&mut out); + out + } + + fn flatten_into(self, out: &mut SsrMatches) { + for mut m in self.matches { + for p in m.placeholder_values.values_mut() { + std::mem::replace(&mut p.inner_matches, SsrMatches::default()).flatten_into(out); + } + out.matches.push(m); + } + } +} + +impl Match { + pub fn matched_text(&self) -> String { + self.matched_node.text().to_string() + } +} + +impl std::error::Error for SsrError {} + +#[cfg(test)] +impl MatchDebugInfo { + pub(crate) fn match_failure_reason(&self) -> Option<&str> { + self.matched.as_ref().err().map(|r| r.reason.as_str()) + } +} diff --git a/crates/ssr/src/matching.rs b/crates/ssr/src/matching.rs new file mode 100644 index 0000000000..ffc7202ae5 --- /dev/null +++ b/crates/ssr/src/matching.rs @@ -0,0 +1,777 @@ +//! This module is responsible for matching a search pattern against a node in the AST. In the +//! process of matching, placeholder values are recorded. + +use crate::{ + parsing::{Constraint, NodeKind, Placeholder}, + resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo}, + SsrMatches, +}; +use base_db::FileRange; +use hir::Semantics; +use rustc_hash::FxHashMap; +use std::{cell::Cell, iter::Peekable}; +use syntax::ast::{AstNode, AstToken}; +use syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken}; +use test_utils::mark; + +// Creates a match error. If we're currently attempting to match some code that we thought we were +// going to match, as indicated by the --debug-snippet flag, then populate the reason field. +macro_rules! match_error { + ($e:expr) => {{ + MatchFailed { + reason: if recording_match_fail_reasons() { + Some(format!("{}", $e)) + } else { + None + } + } + }}; + ($fmt:expr, $($arg:tt)+) => {{ + MatchFailed { + reason: if recording_match_fail_reasons() { + Some(format!($fmt, $($arg)+)) + } else { + None + } + } + }}; +} + +// Fails the current match attempt, recording the supplied reason if we're recording match fail reasons. +macro_rules! fail_match { + ($($args:tt)*) => {return Err(match_error!($($args)*))}; +} + +/// Information about a match that was found. +#[derive(Debug)] +pub struct Match { + pub(crate) range: FileRange, + pub(crate) matched_node: SyntaxNode, + pub(crate) placeholder_values: FxHashMap, + pub(crate) ignored_comments: Vec, + pub(crate) rule_index: usize, + /// The depth of matched_node. + pub(crate) depth: usize, + // Each path in the template rendered for the module in which the match was found. + pub(crate) rendered_template_paths: FxHashMap, +} + +/// Represents a `$var` in an SSR query. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub(crate) struct Var(pub String); + +/// Information about a placeholder bound in a match. +#[derive(Debug)] +pub(crate) struct PlaceholderMatch { + /// The node that the placeholder matched to. If set, then we'll search for further matches + /// within this node. It isn't set when we match tokens within a macro call's token tree. + pub(crate) node: Option, + pub(crate) range: FileRange, + /// More matches, found within `node`. + pub(crate) inner_matches: SsrMatches, +} + +#[derive(Debug)] +pub(crate) struct MatchFailureReason { + pub(crate) reason: String, +} + +/// An "error" indicating that matching failed. Use the fail_match! macro to create and return this. +#[derive(Clone)] +pub(crate) struct MatchFailed { + /// The reason why we failed to match. Only present when debug_active true in call to + /// `get_match`. + pub(crate) reason: Option, +} + +/// Checks if `code` matches the search pattern found in `search_scope`, returning information about +/// the match, if it does. Since we only do matching in this module and searching is done by the +/// parent module, we don't populate nested matches. +pub(crate) fn get_match( + debug_active: bool, + rule: &ResolvedRule, + code: &SyntaxNode, + restrict_range: &Option, + sema: &Semantics, +) -> Result { + record_match_fails_reasons_scope(debug_active, || { + Matcher::try_match(rule, code, restrict_range, sema) + }) +} + +/// Checks if our search pattern matches a particular node of the AST. +struct Matcher<'db, 'sema> { + sema: &'sema Semantics<'db, ide_db::RootDatabase>, + /// If any placeholders come from anywhere outside of this range, then the match will be + /// rejected. + restrict_range: Option, + rule: &'sema ResolvedRule, +} + +/// Which phase of matching we're currently performing. We do two phases because most attempted +/// matches will fail and it means we can defer more expensive checks to the second phase. +enum Phase<'a> { + /// On the first phase, we perform cheap checks. No state is mutated and nothing is recorded. + First, + /// On the second phase, we construct the `Match`. Things like what placeholders bind to is + /// recorded. + Second(&'a mut Match), +} + +impl<'db, 'sema> Matcher<'db, 'sema> { + fn try_match( + rule: &ResolvedRule, + code: &SyntaxNode, + restrict_range: &Option, + sema: &'sema Semantics<'db, ide_db::RootDatabase>, + ) -> Result { + let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule }; + // First pass at matching, where we check that node types and idents match. + match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?; + match_state.validate_range(&sema.original_range(code))?; + let mut the_match = Match { + range: sema.original_range(code), + matched_node: code.clone(), + placeholder_values: FxHashMap::default(), + ignored_comments: Vec::new(), + rule_index: rule.index, + depth: 0, + rendered_template_paths: FxHashMap::default(), + }; + // Second matching pass, where we record placeholder matches, ignored comments and maybe do + // any other more expensive checks that we didn't want to do on the first pass. + match_state.attempt_match_node( + &mut Phase::Second(&mut the_match), + &rule.pattern.node, + code, + )?; + the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count(); + if let Some(template) = &rule.template { + the_match.render_template_paths(template, sema)?; + } + Ok(the_match) + } + + /// Checks that `range` is within the permitted range if any. This is applicable when we're + /// processing a macro expansion and we want to fail the match if we're working with a node that + /// didn't originate from the token tree of the macro call. + fn validate_range(&self, range: &FileRange) -> Result<(), MatchFailed> { + if let Some(restrict_range) = &self.restrict_range { + if restrict_range.file_id != range.file_id + || !restrict_range.range.contains_range(range.range) + { + fail_match!("Node originated from a macro"); + } + } + Ok(()) + } + + fn attempt_match_node( + &self, + phase: &mut Phase, + pattern: &SyntaxNode, + code: &SyntaxNode, + ) -> Result<(), MatchFailed> { + // Handle placeholders. + if let Some(placeholder) = self.get_placeholder(&SyntaxElement::Node(pattern.clone())) { + for constraint in &placeholder.constraints { + self.check_constraint(constraint, code)?; + } + if let Phase::Second(matches_out) = phase { + let original_range = self.sema.original_range(code); + // We validated the range for the node when we started the match, so the placeholder + // probably can't fail range validation, but just to be safe... + self.validate_range(&original_range)?; + matches_out.placeholder_values.insert( + Var(placeholder.ident.to_string()), + PlaceholderMatch::new(code, original_range), + ); + } + return Ok(()); + } + // We allow a UFCS call to match a method call, provided they resolve to the same function. + if let Some(pattern_ufcs) = self.rule.pattern.ufcs_function_calls.get(pattern) { + if let Some(code) = ast::MethodCallExpr::cast(code.clone()) { + return self.attempt_match_ufcs_to_method_call(phase, pattern_ufcs, &code); + } + if let Some(code) = ast::CallExpr::cast(code.clone()) { + return self.attempt_match_ufcs_to_ufcs(phase, pattern_ufcs, &code); + } + } + if pattern.kind() != code.kind() { + fail_match!( + "Pattern had `{}` ({:?}), code had `{}` ({:?})", + pattern.text(), + pattern.kind(), + code.text(), + code.kind() + ); + } + // Some kinds of nodes have special handling. For everything else, we fall back to default + // matching. + match code.kind() { + SyntaxKind::RECORD_EXPR_FIELD_LIST => { + self.attempt_match_record_field_list(phase, pattern, code) + } + SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code), + SyntaxKind::PATH => self.attempt_match_path(phase, pattern, code), + _ => self.attempt_match_node_children(phase, pattern, code), + } + } + + fn attempt_match_node_children( + &self, + phase: &mut Phase, + pattern: &SyntaxNode, + code: &SyntaxNode, + ) -> Result<(), MatchFailed> { + self.attempt_match_sequences( + phase, + PatternIterator::new(pattern), + code.children_with_tokens(), + ) + } + + fn attempt_match_sequences( + &self, + phase: &mut Phase, + pattern_it: PatternIterator, + mut code_it: SyntaxElementChildren, + ) -> Result<(), MatchFailed> { + let mut pattern_it = pattern_it.peekable(); + loop { + match phase.next_non_trivial(&mut code_it) { + None => { + if let Some(p) = pattern_it.next() { + fail_match!("Part of the pattern was unmatched: {:?}", p); + } + return Ok(()); + } + Some(SyntaxElement::Token(c)) => { + self.attempt_match_token(phase, &mut pattern_it, &c)?; + } + Some(SyntaxElement::Node(c)) => match pattern_it.next() { + Some(SyntaxElement::Node(p)) => { + self.attempt_match_node(phase, &p, &c)?; + } + Some(p) => fail_match!("Pattern wanted '{}', code has {}", p, c.text()), + None => fail_match!("Pattern reached end, code has {}", c.text()), + }, + } + } + } + + fn attempt_match_token( + &self, + phase: &mut Phase, + pattern: &mut Peekable, + code: &syntax::SyntaxToken, + ) -> Result<(), MatchFailed> { + phase.record_ignored_comments(code); + // Ignore whitespace and comments. + if code.kind().is_trivia() { + return Ok(()); + } + if let Some(SyntaxElement::Token(p)) = pattern.peek() { + // If the code has a comma and the pattern is about to close something, then accept the + // comma without advancing the pattern. i.e. ignore trailing commas. + if code.kind() == SyntaxKind::COMMA && is_closing_token(p.kind()) { + return Ok(()); + } + // Conversely, if the pattern has a comma and the code doesn't, skip that part of the + // pattern and continue to match the code. + if p.kind() == SyntaxKind::COMMA && is_closing_token(code.kind()) { + pattern.next(); + } + } + // Consume an element from the pattern and make sure it matches. + match pattern.next() { + Some(SyntaxElement::Token(p)) => { + if p.kind() != code.kind() || p.text() != code.text() { + fail_match!( + "Pattern wanted token '{}' ({:?}), but code had token '{}' ({:?})", + p.text(), + p.kind(), + code.text(), + code.kind() + ) + } + } + Some(SyntaxElement::Node(p)) => { + // Not sure if this is actually reachable. + fail_match!( + "Pattern wanted {:?}, but code had token '{}' ({:?})", + p, + code.text(), + code.kind() + ); + } + None => { + fail_match!("Pattern exhausted, while code remains: `{}`", code.text()); + } + } + Ok(()) + } + + fn check_constraint( + &self, + constraint: &Constraint, + code: &SyntaxNode, + ) -> Result<(), MatchFailed> { + match constraint { + Constraint::Kind(kind) => { + kind.matches(code)?; + } + Constraint::Not(sub) => { + if self.check_constraint(&*sub, code).is_ok() { + fail_match!("Constraint {:?} failed for '{}'", constraint, code.text()); + } + } + } + Ok(()) + } + + /// Paths are matched based on whether they refer to the same thing, even if they're written + /// differently. + fn attempt_match_path( + &self, + phase: &mut Phase, + pattern: &SyntaxNode, + code: &SyntaxNode, + ) -> Result<(), MatchFailed> { + if let Some(pattern_resolved) = self.rule.pattern.resolved_paths.get(pattern) { + let pattern_path = ast::Path::cast(pattern.clone()).unwrap(); + let code_path = ast::Path::cast(code.clone()).unwrap(); + if let (Some(pattern_segment), Some(code_segment)) = + (pattern_path.segment(), code_path.segment()) + { + // Match everything within the segment except for the name-ref, which is handled + // separately via comparing what the path resolves to below. + self.attempt_match_opt( + phase, + pattern_segment.generic_arg_list(), + code_segment.generic_arg_list(), + )?; + self.attempt_match_opt( + phase, + pattern_segment.param_list(), + code_segment.param_list(), + )?; + } + if matches!(phase, Phase::Second(_)) { + let resolution = self + .sema + .resolve_path(&code_path) + .ok_or_else(|| match_error!("Failed to resolve path `{}`", code.text()))?; + if pattern_resolved.resolution != resolution { + fail_match!("Pattern had path `{}` code had `{}`", pattern.text(), code.text()); + } + } + } else { + return self.attempt_match_node_children(phase, pattern, code); + } + Ok(()) + } + + fn attempt_match_opt( + &self, + phase: &mut Phase, + pattern: Option, + code: Option, + ) -> Result<(), MatchFailed> { + match (pattern, code) { + (Some(p), Some(c)) => self.attempt_match_node(phase, &p.syntax(), &c.syntax()), + (None, None) => Ok(()), + (Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()), + (None, Some(c)) => { + fail_match!("Nothing in pattern to match code `{}`", c.syntax().text()) + } + } + } + + /// We want to allow the records to match in any order, so we have special matching logic for + /// them. + fn attempt_match_record_field_list( + &self, + phase: &mut Phase, + pattern: &SyntaxNode, + code: &SyntaxNode, + ) -> Result<(), MatchFailed> { + // Build a map keyed by field name. + let mut fields_by_name = FxHashMap::default(); + for child in code.children() { + if let Some(record) = ast::RecordExprField::cast(child.clone()) { + if let Some(name) = record.field_name() { + fields_by_name.insert(name.text().clone(), child.clone()); + } + } + } + for p in pattern.children_with_tokens() { + if let SyntaxElement::Node(p) = p { + if let Some(name_element) = p.first_child_or_token() { + if self.get_placeholder(&name_element).is_some() { + // If the pattern is using placeholders for field names then order + // independence doesn't make sense. Fall back to regular ordered + // matching. + return self.attempt_match_node_children(phase, pattern, code); + } + if let Some(ident) = only_ident(name_element) { + let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| { + match_error!( + "Placeholder has record field '{}', but code doesn't", + ident + ) + })?; + self.attempt_match_node(phase, &p, &code_record)?; + } + } + } + } + if let Some(unmatched_fields) = fields_by_name.keys().next() { + fail_match!( + "{} field(s) of a record literal failed to match, starting with {}", + fields_by_name.len(), + unmatched_fields + ); + } + Ok(()) + } + + /// Outside of token trees, a placeholder can only match a single AST node, whereas in a token + /// tree it can match a sequence of tokens. Note, that this code will only be used when the + /// pattern matches the macro invocation. For matches within the macro call, we'll already have + /// expanded the macro. + fn attempt_match_token_tree( + &self, + phase: &mut Phase, + pattern: &SyntaxNode, + code: &syntax::SyntaxNode, + ) -> Result<(), MatchFailed> { + let mut pattern = PatternIterator::new(pattern).peekable(); + let mut children = code.children_with_tokens(); + while let Some(child) = children.next() { + if let Some(placeholder) = pattern.peek().and_then(|p| self.get_placeholder(p)) { + pattern.next(); + let next_pattern_token = pattern + .peek() + .and_then(|p| match p { + SyntaxElement::Token(t) => Some(t.clone()), + SyntaxElement::Node(n) => n.first_token(), + }) + .map(|p| p.text().to_string()); + let first_matched_token = child.clone(); + let mut last_matched_token = child; + // Read code tokens util we reach one equal to the next token from our pattern + // or we reach the end of the token tree. + while let Some(next) = children.next() { + match &next { + SyntaxElement::Token(t) => { + if Some(t.to_string()) == next_pattern_token { + pattern.next(); + break; + } + } + SyntaxElement::Node(n) => { + if let Some(first_token) = n.first_token() { + if Some(first_token.to_string()) == next_pattern_token { + if let Some(SyntaxElement::Node(p)) = pattern.next() { + // We have a subtree that starts with the next token in our pattern. + self.attempt_match_token_tree(phase, &p, &n)?; + break; + } + } + } + } + }; + last_matched_token = next; + } + if let Phase::Second(match_out) = phase { + match_out.placeholder_values.insert( + Var(placeholder.ident.to_string()), + PlaceholderMatch::from_range(FileRange { + file_id: self.sema.original_range(code).file_id, + range: first_matched_token + .text_range() + .cover(last_matched_token.text_range()), + }), + ); + } + continue; + } + // Match literal (non-placeholder) tokens. + match child { + SyntaxElement::Token(token) => { + self.attempt_match_token(phase, &mut pattern, &token)?; + } + SyntaxElement::Node(node) => match pattern.next() { + Some(SyntaxElement::Node(p)) => { + self.attempt_match_token_tree(phase, &p, &node)?; + } + Some(SyntaxElement::Token(p)) => fail_match!( + "Pattern has token '{}', code has subtree '{}'", + p.text(), + node.text() + ), + None => fail_match!("Pattern has nothing, code has '{}'", node.text()), + }, + } + } + if let Some(p) = pattern.next() { + fail_match!("Reached end of token tree in code, but pattern still has {:?}", p); + } + Ok(()) + } + + fn attempt_match_ufcs_to_method_call( + &self, + phase: &mut Phase, + pattern_ufcs: &UfcsCallInfo, + code: &ast::MethodCallExpr, + ) -> Result<(), MatchFailed> { + use ast::ArgListOwner; + let code_resolved_function = self + .sema + .resolve_method_call(code) + .ok_or_else(|| match_error!("Failed to resolve method call"))?; + if pattern_ufcs.function != code_resolved_function { + fail_match!("Method call resolved to a different function"); + } + if code_resolved_function.has_self_param(self.sema.db) { + if let (Some(pattern_type), Some(expr)) = (&pattern_ufcs.qualifier_type, &code.expr()) { + self.check_expr_type(pattern_type, expr)?; + } + } + // Check arguments. + let mut pattern_args = pattern_ufcs + .call_expr + .arg_list() + .ok_or_else(|| match_error!("Pattern function call has no args"))? + .args(); + self.attempt_match_opt(phase, pattern_args.next(), code.expr())?; + let mut code_args = + code.arg_list().ok_or_else(|| match_error!("Code method call has no args"))?.args(); + loop { + match (pattern_args.next(), code_args.next()) { + (None, None) => return Ok(()), + (p, c) => self.attempt_match_opt(phase, p, c)?, + } + } + } + + fn attempt_match_ufcs_to_ufcs( + &self, + phase: &mut Phase, + pattern_ufcs: &UfcsCallInfo, + code: &ast::CallExpr, + ) -> Result<(), MatchFailed> { + use ast::ArgListOwner; + // Check that the first argument is the expected type. + if let (Some(pattern_type), Some(expr)) = ( + &pattern_ufcs.qualifier_type, + &code.arg_list().and_then(|code_args| code_args.args().next()), + ) { + self.check_expr_type(pattern_type, expr)?; + } + self.attempt_match_node_children(phase, pattern_ufcs.call_expr.syntax(), code.syntax()) + } + + fn check_expr_type( + &self, + pattern_type: &hir::Type, + expr: &ast::Expr, + ) -> Result<(), MatchFailed> { + use hir::HirDisplay; + let code_type = self.sema.type_of_expr(&expr).ok_or_else(|| { + match_error!("Failed to get receiver type for `{}`", expr.syntax().text()) + })?; + if !code_type + .autoderef(self.sema.db) + .any(|deref_code_type| *pattern_type == deref_code_type) + { + fail_match!( + "Pattern type `{}` didn't match code type `{}`", + pattern_type.display(self.sema.db), + code_type.display(self.sema.db) + ); + } + Ok(()) + } + + fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> { + only_ident(element.clone()).and_then(|ident| self.rule.get_placeholder(&ident)) + } +} + +impl Match { + fn render_template_paths( + &mut self, + template: &ResolvedPattern, + sema: &Semantics, + ) -> Result<(), MatchFailed> { + let module = sema + .scope(&self.matched_node) + .module() + .ok_or_else(|| match_error!("Matched node isn't in a module"))?; + for (path, resolved_path) in &template.resolved_paths { + if let hir::PathResolution::Def(module_def) = resolved_path.resolution { + let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| { + match_error!("Failed to render template path `{}` at match location") + })?; + self.rendered_template_paths.insert(path.clone(), mod_path); + } + } + Ok(()) + } +} + +impl Phase<'_> { + fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option { + loop { + let c = code_it.next(); + if let Some(SyntaxElement::Token(t)) = &c { + self.record_ignored_comments(t); + if t.kind().is_trivia() { + continue; + } + } + return c; + } + } + + fn record_ignored_comments(&mut self, token: &SyntaxToken) { + if token.kind() == SyntaxKind::COMMENT { + if let Phase::Second(match_out) = self { + if let Some(comment) = ast::Comment::cast(token.clone()) { + match_out.ignored_comments.push(comment); + } + } + } + } +} + +fn is_closing_token(kind: SyntaxKind) -> bool { + kind == SyntaxKind::R_PAREN || kind == SyntaxKind::R_CURLY || kind == SyntaxKind::R_BRACK +} + +pub(crate) fn record_match_fails_reasons_scope(debug_active: bool, f: F) -> T +where + F: Fn() -> T, +{ + RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(debug_active)); + let res = f(); + RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(false)); + res +} + +// For performance reasons, we don't want to record the reason why every match fails, only the bit +// of code that the user indicated they thought would match. We use a thread local to indicate when +// we are trying to match that bit of code. This saves us having to pass a boolean into all the bits +// of code that can make the decision to not match. +thread_local! { + pub static RECORDING_MATCH_FAIL_REASONS: Cell = Cell::new(false); +} + +fn recording_match_fail_reasons() -> bool { + RECORDING_MATCH_FAIL_REASONS.with(|c| c.get()) +} + +impl PlaceholderMatch { + fn new(node: &SyntaxNode, range: FileRange) -> Self { + Self { node: Some(node.clone()), range, inner_matches: SsrMatches::default() } + } + + fn from_range(range: FileRange) -> Self { + Self { node: None, range, inner_matches: SsrMatches::default() } + } +} + +impl NodeKind { + fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> { + let ok = match self { + Self::Literal => { + mark::hit!(literal_constraint); + ast::Literal::can_cast(node.kind()) + } + }; + if !ok { + fail_match!("Code '{}' isn't of kind {:?}", node.text(), self); + } + Ok(()) + } +} + +// If `node` contains nothing but an ident then return it, otherwise return None. +fn only_ident(element: SyntaxElement) -> Option { + match element { + SyntaxElement::Token(t) => { + if t.kind() == SyntaxKind::IDENT { + return Some(t); + } + } + SyntaxElement::Node(n) => { + let mut children = n.children_with_tokens(); + if let (Some(only_child), None) = (children.next(), children.next()) { + return only_ident(only_child); + } + } + } + None +} + +struct PatternIterator { + iter: SyntaxElementChildren, +} + +impl Iterator for PatternIterator { + type Item = SyntaxElement; + + fn next(&mut self) -> Option { + while let Some(element) = self.iter.next() { + if !element.kind().is_trivia() { + return Some(element); + } + } + None + } +} + +impl PatternIterator { + fn new(parent: &SyntaxNode) -> Self { + Self { iter: parent.children_with_tokens() } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{MatchFinder, SsrRule}; + + #[test] + fn parse_match_replace() { + let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap(); + let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }"; + + let (db, position, selections) = crate::tests::single_file(input); + let mut match_finder = MatchFinder::in_context(&db, position, selections); + match_finder.add_rule(rule).unwrap(); + let matches = match_finder.matches(); + assert_eq!(matches.matches.len(), 1); + assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)"); + assert_eq!(matches.matches[0].placeholder_values.len(), 1); + assert_eq!( + matches.matches[0].placeholder_values[&Var("x".to_string())] + .node + .as_ref() + .unwrap() + .text(), + "1+2" + ); + + let edits = match_finder.edits(); + assert_eq!(edits.len(), 1); + let edit = &edits[0]; + let mut after = input.to_string(); + edit.edit.apply(&mut after); + assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }"); + } +} diff --git a/crates/ssr/src/nester.rs b/crates/ssr/src/nester.rs new file mode 100644 index 0000000000..6ac355dfc2 --- /dev/null +++ b/crates/ssr/src/nester.rs @@ -0,0 +1,94 @@ +//! Converts a flat collection of matches into a nested form suitable for replacement. When there +//! are multiple matches for a node, or that overlap, priority is given to the earlier rule. Nested +//! matches are only permitted if the inner match is contained entirely within a placeholder of an +//! outer match. +//! +//! For example, if our search pattern is `foo(foo($a))` and the code had `foo(foo(foo(foo(42))))`, +//! then we'll get 3 matches, however only the outermost and innermost matches can be accepted. The +//! middle match would take the second `foo` from the outer match. + +use crate::{Match, SsrMatches}; +use rustc_hash::FxHashMap; +use syntax::SyntaxNode; + +pub(crate) fn nest_and_remove_collisions( + mut matches: Vec, + sema: &hir::Semantics, +) -> SsrMatches { + // We sort the matches by depth then by rule index. Sorting by depth means that by the time we + // see a match, any parent matches or conflicting matches will have already been seen. Sorting + // by rule_index means that if there are two matches for the same node, the rule added first + // will take precedence. + matches.sort_by(|a, b| a.depth.cmp(&b.depth).then_with(|| a.rule_index.cmp(&b.rule_index))); + let mut collector = MatchCollector::default(); + for m in matches { + collector.add_match(m, sema); + } + collector.into() +} + +#[derive(Default)] +struct MatchCollector { + matches_by_node: FxHashMap, +} + +impl MatchCollector { + /// Attempts to add `m` to matches. If it conflicts with an existing match, it is discarded. If + /// it is entirely within the a placeholder of an existing match, then it is added as a child + /// match of the existing match. + fn add_match(&mut self, m: Match, sema: &hir::Semantics) { + let matched_node = m.matched_node.clone(); + if let Some(existing) = self.matches_by_node.get_mut(&matched_node) { + try_add_sub_match(m, existing, sema); + return; + } + for ancestor in sema.ancestors_with_macros(m.matched_node.clone()) { + if let Some(existing) = self.matches_by_node.get_mut(&ancestor) { + try_add_sub_match(m, existing, sema); + return; + } + } + self.matches_by_node.insert(matched_node, m); + } +} + +/// Attempts to add `m` as a sub-match of `existing`. +fn try_add_sub_match(m: Match, existing: &mut Match, sema: &hir::Semantics) { + for p in existing.placeholder_values.values_mut() { + // Note, no need to check if p.range.file is equal to m.range.file, since we + // already know we're within `existing`. + if p.range.range.contains_range(m.range.range) { + // Convert the inner matches in `p` into a temporary MatchCollector. When + // we're done, we then convert it back into an SsrMatches. If we expected + // lots of inner matches, it might be worthwhile keeping a MatchCollector + // around for each placeholder match. However we expect most placeholder + // will have 0 and a few will have 1. More than that should hopefully be + // exceptional. + let mut collector = MatchCollector::default(); + for m in std::mem::replace(&mut p.inner_matches.matches, Vec::new()) { + collector.matches_by_node.insert(m.matched_node.clone(), m); + } + collector.add_match(m, sema); + p.inner_matches = collector.into(); + break; + } + } +} + +impl From for SsrMatches { + fn from(mut match_collector: MatchCollector) -> Self { + let mut matches = SsrMatches::default(); + for (_, m) in match_collector.matches_by_node.drain() { + matches.matches.push(m); + } + matches.matches.sort_by(|a, b| { + // Order matches by file_id then by start range. This should be sufficient since ranges + // shouldn't be overlapping. + a.range + .file_id + .cmp(&b.range.file_id) + .then_with(|| a.range.range.start().cmp(&b.range.range.start())) + }); + matches + } +} diff --git a/crates/ssr/src/parsing.rs b/crates/ssr/src/parsing.rs new file mode 100644 index 0000000000..9570e96e36 --- /dev/null +++ b/crates/ssr/src/parsing.rs @@ -0,0 +1,389 @@ +//! This file contains code for parsing SSR rules, which look something like `foo($a) ==>> bar($b)`. +//! We first split everything before and after the separator `==>>`. Next, both the search pattern +//! and the replacement template get tokenized by the Rust tokenizer. Tokens are then searched for +//! placeholders, which start with `$`. For replacement templates, this is the final form. For +//! search patterns, we go further and parse the pattern as each kind of thing that we can match. +//! e.g. expressions, type references etc. + +use crate::errors::bail; +use crate::{SsrError, SsrPattern, SsrRule}; +use rustc_hash::{FxHashMap, FxHashSet}; +use std::str::FromStr; +use syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T}; +use test_utils::mark; + +#[derive(Debug)] +pub(crate) struct ParsedRule { + pub(crate) placeholders_by_stand_in: FxHashMap, + pub(crate) pattern: SyntaxNode, + pub(crate) template: Option, +} + +#[derive(Debug)] +pub(crate) struct RawPattern { + tokens: Vec, +} + +// Part of a search or replace pattern. +#[derive(Clone, Debug, PartialEq, Eq)] +pub(crate) enum PatternElement { + Token(Token), + Placeholder(Placeholder), +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub(crate) struct Placeholder { + /// The name of this placeholder. e.g. for "$a", this would be "a" + pub(crate) ident: SmolStr, + /// A unique name used in place of this placeholder when we parse the pattern as Rust code. + stand_in_name: String, + pub(crate) constraints: Vec, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub(crate) enum Constraint { + Kind(NodeKind), + Not(Box), +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub(crate) enum NodeKind { + Literal, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct Token { + kind: SyntaxKind, + pub(crate) text: SmolStr, +} + +impl ParsedRule { + fn new( + pattern: &RawPattern, + template: Option<&RawPattern>, + ) -> Result, SsrError> { + let raw_pattern = pattern.as_rust_code(); + let raw_template = template.map(|t| t.as_rust_code()); + let raw_template = raw_template.as_ref().map(|s| s.as_str()); + let mut builder = RuleBuilder { + placeholders_by_stand_in: pattern.placeholders_by_stand_in(), + rules: Vec::new(), + }; + builder.try_add(ast::Expr::parse(&raw_pattern), raw_template.map(ast::Expr::parse)); + builder.try_add(ast::Type::parse(&raw_pattern), raw_template.map(ast::Type::parse)); + builder.try_add(ast::Item::parse(&raw_pattern), raw_template.map(ast::Item::parse)); + builder.try_add(ast::Path::parse(&raw_pattern), raw_template.map(ast::Path::parse)); + builder.try_add(ast::Pat::parse(&raw_pattern), raw_template.map(ast::Pat::parse)); + builder.build() + } +} + +struct RuleBuilder { + placeholders_by_stand_in: FxHashMap, + rules: Vec, +} + +impl RuleBuilder { + fn try_add(&mut self, pattern: Result, template: Option>) { + match (pattern, template) { + (Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule { + placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), + pattern: pattern.syntax().clone(), + template: Some(template.syntax().clone()), + }), + (Ok(pattern), None) => self.rules.push(ParsedRule { + placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), + pattern: pattern.syntax().clone(), + template: None, + }), + _ => {} + } + } + + fn build(mut self) -> Result, SsrError> { + if self.rules.is_empty() { + bail!("Not a valid Rust expression, type, item, path or pattern"); + } + // If any rules contain paths, then we reject any rules that don't contain paths. Allowing a + // mix leads to strange semantics, since the path-based rules only match things where the + // path refers to semantically the same thing, whereas the non-path-based rules could match + // anything. Specifically, if we have a rule like `foo ==>> bar` we only want to match the + // `foo` that is in the current scope, not any `foo`. However "foo" can be parsed as a + // pattern (IDENT_PAT -> NAME -> IDENT). Allowing such a rule through would result in + // renaming everything called `foo` to `bar`. It'd also be slow, since without a path, we'd + // have to use the slow-scan search mechanism. + if self.rules.iter().any(|rule| contains_path(&rule.pattern)) { + let old_len = self.rules.len(); + self.rules.retain(|rule| contains_path(&rule.pattern)); + if self.rules.len() < old_len { + mark::hit!(pattern_is_a_single_segment_path); + } + } + Ok(self.rules) + } +} + +/// Returns whether there are any paths in `node`. +fn contains_path(node: &SyntaxNode) -> bool { + node.kind() == SyntaxKind::PATH + || node.descendants().any(|node| node.kind() == SyntaxKind::PATH) +} + +impl FromStr for SsrRule { + type Err = SsrError; + + fn from_str(query: &str) -> Result { + let mut it = query.split("==>>"); + let pattern = it.next().expect("at least empty string").trim(); + let template = it + .next() + .ok_or_else(|| SsrError("Cannot find delimiter `==>>`".into()))? + .trim() + .to_string(); + if it.next().is_some() { + return Err(SsrError("More than one delimiter found".into())); + } + let raw_pattern = pattern.parse()?; + let raw_template = template.parse()?; + let parsed_rules = ParsedRule::new(&raw_pattern, Some(&raw_template))?; + let rule = SsrRule { pattern: raw_pattern, template: raw_template, parsed_rules }; + validate_rule(&rule)?; + Ok(rule) + } +} + +impl FromStr for RawPattern { + type Err = SsrError; + + fn from_str(pattern_str: &str) -> Result { + Ok(RawPattern { tokens: parse_pattern(pattern_str)? }) + } +} + +impl RawPattern { + /// Returns this search pattern as Rust source code that we can feed to the Rust parser. + fn as_rust_code(&self) -> String { + let mut res = String::new(); + for t in &self.tokens { + res.push_str(match t { + PatternElement::Token(token) => token.text.as_str(), + PatternElement::Placeholder(placeholder) => placeholder.stand_in_name.as_str(), + }); + } + res + } + + pub(crate) fn placeholders_by_stand_in(&self) -> FxHashMap { + let mut res = FxHashMap::default(); + for t in &self.tokens { + if let PatternElement::Placeholder(placeholder) = t { + res.insert(SmolStr::new(placeholder.stand_in_name.clone()), placeholder.clone()); + } + } + res + } +} + +impl FromStr for SsrPattern { + type Err = SsrError; + + fn from_str(pattern_str: &str) -> Result { + let raw_pattern = pattern_str.parse()?; + let parsed_rules = ParsedRule::new(&raw_pattern, None)?; + Ok(SsrPattern { raw: raw_pattern, parsed_rules }) + } +} + +/// Returns `pattern_str`, parsed as a search or replace pattern. If `remove_whitespace` is true, +/// then any whitespace tokens will be removed, which we do for the search pattern, but not for the +/// replace pattern. +fn parse_pattern(pattern_str: &str) -> Result, SsrError> { + let mut res = Vec::new(); + let mut placeholder_names = FxHashSet::default(); + let mut tokens = tokenize(pattern_str)?.into_iter(); + while let Some(token) = tokens.next() { + if token.kind == T![$] { + let placeholder = parse_placeholder(&mut tokens)?; + if !placeholder_names.insert(placeholder.ident.clone()) { + bail!("Name `{}` repeats more than once", placeholder.ident); + } + res.push(PatternElement::Placeholder(placeholder)); + } else { + res.push(PatternElement::Token(token)); + } + } + Ok(res) +} + +/// Checks for errors in a rule. e.g. the replace pattern referencing placeholders that the search +/// pattern didn't define. +fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> { + let mut defined_placeholders = FxHashSet::default(); + for p in &rule.pattern.tokens { + if let PatternElement::Placeholder(placeholder) = p { + defined_placeholders.insert(&placeholder.ident); + } + } + let mut undefined = Vec::new(); + for p in &rule.template.tokens { + if let PatternElement::Placeholder(placeholder) = p { + if !defined_placeholders.contains(&placeholder.ident) { + undefined.push(format!("${}", placeholder.ident)); + } + if !placeholder.constraints.is_empty() { + bail!("Replacement placeholders cannot have constraints"); + } + } + } + if !undefined.is_empty() { + bail!("Replacement contains undefined placeholders: {}", undefined.join(", ")); + } + Ok(()) +} + +fn tokenize(source: &str) -> Result, SsrError> { + let mut start = 0; + let (raw_tokens, errors) = syntax::tokenize(source); + if let Some(first_error) = errors.first() { + bail!("Failed to parse pattern: {}", first_error); + } + let mut tokens: Vec = Vec::new(); + for raw_token in raw_tokens { + let token_len = usize::from(raw_token.len); + tokens.push(Token { + kind: raw_token.kind, + text: SmolStr::new(&source[start..start + token_len]), + }); + start += token_len; + } + Ok(tokens) +} + +fn parse_placeholder(tokens: &mut std::vec::IntoIter) -> Result { + let mut name = None; + let mut constraints = Vec::new(); + if let Some(token) = tokens.next() { + match token.kind { + SyntaxKind::IDENT => { + name = Some(token.text); + } + T!['{'] => { + let token = + tokens.next().ok_or_else(|| SsrError::new("Unexpected end of placeholder"))?; + if token.kind == SyntaxKind::IDENT { + name = Some(token.text); + } + loop { + let token = tokens + .next() + .ok_or_else(|| SsrError::new("Placeholder is missing closing brace '}'"))?; + match token.kind { + T![:] => { + constraints.push(parse_constraint(tokens)?); + } + T!['}'] => break, + _ => bail!("Unexpected token while parsing placeholder: '{}'", token.text), + } + } + } + _ => { + bail!("Placeholders should either be $name or ${{name:constraints}}"); + } + } + } + let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?; + Ok(Placeholder::new(name, constraints)) +} + +fn parse_constraint(tokens: &mut std::vec::IntoIter) -> Result { + let constraint_type = tokens + .next() + .ok_or_else(|| SsrError::new("Found end of placeholder while looking for a constraint"))? + .text + .to_string(); + match constraint_type.as_str() { + "kind" => { + expect_token(tokens, "(")?; + let t = tokens.next().ok_or_else(|| { + SsrError::new("Unexpected end of constraint while looking for kind") + })?; + if t.kind != SyntaxKind::IDENT { + bail!("Expected ident, found {:?} while parsing kind constraint", t.kind); + } + expect_token(tokens, ")")?; + Ok(Constraint::Kind(NodeKind::from(&t.text)?)) + } + "not" => { + expect_token(tokens, "(")?; + let sub = parse_constraint(tokens)?; + expect_token(tokens, ")")?; + Ok(Constraint::Not(Box::new(sub))) + } + x => bail!("Unsupported constraint type '{}'", x), + } +} + +fn expect_token(tokens: &mut std::vec::IntoIter, expected: &str) -> Result<(), SsrError> { + if let Some(t) = tokens.next() { + if t.text == expected { + return Ok(()); + } + bail!("Expected {} found {}", expected, t.text); + } + bail!("Expected {} found end of stream", expected); +} + +impl NodeKind { + fn from(name: &SmolStr) -> Result { + Ok(match name.as_str() { + "literal" => NodeKind::Literal, + _ => bail!("Unknown node kind '{}'", name), + }) + } +} + +impl Placeholder { + fn new(name: SmolStr, constraints: Vec) -> Self { + Self { stand_in_name: format!("__placeholder_{}", name), constraints, ident: name } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn parser_happy_case() { + fn token(kind: SyntaxKind, text: &str) -> PatternElement { + PatternElement::Token(Token { kind, text: SmolStr::new(text) }) + } + fn placeholder(name: &str) -> PatternElement { + PatternElement::Placeholder(Placeholder::new(SmolStr::new(name), Vec::new())) + } + let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap(); + assert_eq!( + result.pattern.tokens, + vec![ + token(SyntaxKind::IDENT, "foo"), + token(T!['('], "("), + placeholder("a"), + token(T![,], ","), + token(SyntaxKind::WHITESPACE, " "), + placeholder("b"), + token(T![')'], ")"), + ] + ); + assert_eq!( + result.template.tokens, + vec![ + token(SyntaxKind::IDENT, "bar"), + token(T!['('], "("), + placeholder("b"), + token(T![,], ","), + token(SyntaxKind::WHITESPACE, " "), + placeholder("a"), + token(T![')'], ")"), + ] + ); + } +} diff --git a/crates/ssr/src/replacing.rs b/crates/ssr/src/replacing.rs new file mode 100644 index 0000000000..8f8fe6149a --- /dev/null +++ b/crates/ssr/src/replacing.rs @@ -0,0 +1,194 @@ +//! Code for applying replacement templates for matches that have previously been found. + +use crate::matching::Var; +use crate::{resolving::ResolvedRule, Match, SsrMatches}; +use rustc_hash::{FxHashMap, FxHashSet}; +use syntax::ast::{self, AstToken}; +use syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize}; +use text_edit::TextEdit; + +/// Returns a text edit that will replace each match in `matches` with its corresponding replacement +/// template. Placeholders in the template will have been substituted with whatever they matched to +/// in the original code. +pub(crate) fn matches_to_edit( + matches: &SsrMatches, + file_src: &str, + rules: &[ResolvedRule], +) -> TextEdit { + matches_to_edit_at_offset(matches, file_src, 0.into(), rules) +} + +fn matches_to_edit_at_offset( + matches: &SsrMatches, + file_src: &str, + relative_start: TextSize, + rules: &[ResolvedRule], +) -> TextEdit { + let mut edit_builder = TextEdit::builder(); + for m in &matches.matches { + edit_builder.replace( + m.range.range.checked_sub(relative_start).unwrap(), + render_replace(m, file_src, rules), + ); + } + edit_builder.finish() +} + +struct ReplacementRenderer<'a> { + match_info: &'a Match, + file_src: &'a str, + rules: &'a [ResolvedRule], + rule: &'a ResolvedRule, + out: String, + // Map from a range within `out` to a token in `template` that represents a placeholder. This is + // used to validate that the generated source code doesn't split any placeholder expansions (see + // below). + placeholder_tokens_by_range: FxHashMap, + // Which placeholder tokens need to be wrapped in parenthesis in order to ensure that when `out` + // is parsed, placeholders don't get split. e.g. if a template of `$a.to_string()` results in `1 + // + 2.to_string()` then the placeholder value `1 + 2` was split and needs parenthesis. + placeholder_tokens_requiring_parenthesis: FxHashSet, +} + +fn render_replace(match_info: &Match, file_src: &str, rules: &[ResolvedRule]) -> String { + let rule = &rules[match_info.rule_index]; + let template = rule + .template + .as_ref() + .expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern"); + let mut renderer = ReplacementRenderer { + match_info, + file_src, + rules, + rule, + out: String::new(), + placeholder_tokens_requiring_parenthesis: FxHashSet::default(), + placeholder_tokens_by_range: FxHashMap::default(), + }; + renderer.render_node(&template.node); + renderer.maybe_rerender_with_extra_parenthesis(&template.node); + for comment in &match_info.ignored_comments { + renderer.out.push_str(&comment.syntax().to_string()); + } + renderer.out +} + +impl ReplacementRenderer<'_> { + fn render_node_children(&mut self, node: &SyntaxNode) { + for node_or_token in node.children_with_tokens() { + self.render_node_or_token(&node_or_token); + } + } + + fn render_node_or_token(&mut self, node_or_token: &SyntaxElement) { + match node_or_token { + SyntaxElement::Token(token) => { + self.render_token(&token); + } + SyntaxElement::Node(child_node) => { + self.render_node(&child_node); + } + } + } + + fn render_node(&mut self, node: &SyntaxNode) { + use syntax::ast::AstNode; + if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) { + self.out.push_str(&mod_path.to_string()); + // Emit everything except for the segment's name-ref, since we already effectively + // emitted that as part of `mod_path`. + if let Some(path) = ast::Path::cast(node.clone()) { + if let Some(segment) = path.segment() { + for node_or_token in segment.syntax().children_with_tokens() { + if node_or_token.kind() != SyntaxKind::NAME_REF { + self.render_node_or_token(&node_or_token); + } + } + } + } + } else { + self.render_node_children(&node); + } + } + + fn render_token(&mut self, token: &SyntaxToken) { + if let Some(placeholder) = self.rule.get_placeholder(&token) { + if let Some(placeholder_value) = + self.match_info.placeholder_values.get(&Var(placeholder.ident.to_string())) + { + let range = &placeholder_value.range.range; + let mut matched_text = + self.file_src[usize::from(range.start())..usize::from(range.end())].to_owned(); + let edit = matches_to_edit_at_offset( + &placeholder_value.inner_matches, + self.file_src, + range.start(), + self.rules, + ); + let needs_parenthesis = + self.placeholder_tokens_requiring_parenthesis.contains(token); + edit.apply(&mut matched_text); + if needs_parenthesis { + self.out.push('('); + } + self.placeholder_tokens_by_range.insert( + TextRange::new( + TextSize::of(&self.out), + TextSize::of(&self.out) + TextSize::of(&matched_text), + ), + token.clone(), + ); + self.out.push_str(&matched_text); + if needs_parenthesis { + self.out.push(')'); + } + } else { + // We validated that all placeholder references were valid before we + // started, so this shouldn't happen. + panic!( + "Internal error: replacement referenced unknown placeholder {}", + placeholder.ident + ); + } + } else { + self.out.push_str(token.text().as_str()); + } + } + + // Checks if the resulting code, when parsed doesn't split any placeholders due to different + // order of operations between the search pattern and the replacement template. If any do, then + // we rerender the template and wrap the problematic placeholders with parenthesis. + fn maybe_rerender_with_extra_parenthesis(&mut self, template: &SyntaxNode) { + if let Some(node) = parse_as_kind(&self.out, template.kind()) { + self.remove_node_ranges(node); + if self.placeholder_tokens_by_range.is_empty() { + return; + } + self.placeholder_tokens_requiring_parenthesis = + self.placeholder_tokens_by_range.values().cloned().collect(); + self.out.clear(); + self.render_node(template); + } + } + + fn remove_node_ranges(&mut self, node: SyntaxNode) { + self.placeholder_tokens_by_range.remove(&node.text_range()); + for child in node.children() { + self.remove_node_ranges(child); + } + } +} + +fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option { + use syntax::ast::AstNode; + if ast::Expr::can_cast(kind) { + if let Ok(expr) = ast::Expr::parse(code) { + return Some(expr.syntax().clone()); + } + } else if ast::Item::can_cast(kind) { + if let Ok(item) = ast::Item::parse(code) { + return Some(item.syntax().clone()); + } + } + None +} diff --git a/crates/ssr/src/resolving.rs b/crates/ssr/src/resolving.rs new file mode 100644 index 0000000000..b932132d5b --- /dev/null +++ b/crates/ssr/src/resolving.rs @@ -0,0 +1,301 @@ +//! This module is responsible for resolving paths within rules. + +use crate::errors::error; +use crate::{parsing, SsrError}; +use base_db::FilePosition; +use parsing::Placeholder; +use rustc_hash::FxHashMap; +use syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken}; +use test_utils::mark; + +pub(crate) struct ResolutionScope<'db> { + scope: hir::SemanticsScope<'db>, + node: SyntaxNode, +} + +pub(crate) struct ResolvedRule { + pub(crate) pattern: ResolvedPattern, + pub(crate) template: Option, + pub(crate) index: usize, +} + +pub(crate) struct ResolvedPattern { + pub(crate) placeholders_by_stand_in: FxHashMap, + pub(crate) node: SyntaxNode, + // Paths in `node` that we've resolved. + pub(crate) resolved_paths: FxHashMap, + pub(crate) ufcs_function_calls: FxHashMap, + pub(crate) contains_self: bool, +} + +pub(crate) struct ResolvedPath { + pub(crate) resolution: hir::PathResolution, + /// The depth of the ast::Path that was resolved within the pattern. + pub(crate) depth: u32, +} + +pub(crate) struct UfcsCallInfo { + pub(crate) call_expr: ast::CallExpr, + pub(crate) function: hir::Function, + pub(crate) qualifier_type: Option, +} + +impl ResolvedRule { + pub(crate) fn new( + rule: parsing::ParsedRule, + resolution_scope: &ResolutionScope, + index: usize, + ) -> Result { + let resolver = + Resolver { resolution_scope, placeholders_by_stand_in: rule.placeholders_by_stand_in }; + let resolved_template = if let Some(template) = rule.template { + Some(resolver.resolve_pattern_tree(template)?) + } else { + None + }; + Ok(ResolvedRule { + pattern: resolver.resolve_pattern_tree(rule.pattern)?, + template: resolved_template, + index, + }) + } + + pub(crate) fn get_placeholder(&self, token: &SyntaxToken) -> Option<&Placeholder> { + if token.kind() != SyntaxKind::IDENT { + return None; + } + self.pattern.placeholders_by_stand_in.get(token.text()) + } +} + +struct Resolver<'a, 'db> { + resolution_scope: &'a ResolutionScope<'db>, + placeholders_by_stand_in: FxHashMap, +} + +impl Resolver<'_, '_> { + fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result { + use syntax::ast::AstNode; + use syntax::{SyntaxElement, T}; + let mut resolved_paths = FxHashMap::default(); + self.resolve(pattern.clone(), 0, &mut resolved_paths)?; + let ufcs_function_calls = resolved_paths + .iter() + .filter_map(|(path_node, resolved)| { + if let Some(grandparent) = path_node.parent().and_then(|parent| parent.parent()) { + if let Some(call_expr) = ast::CallExpr::cast(grandparent.clone()) { + if let hir::PathResolution::AssocItem(hir::AssocItem::Function(function)) = + resolved.resolution + { + let qualifier_type = self.resolution_scope.qualifier_type(path_node); + return Some(( + grandparent, + UfcsCallInfo { call_expr, function, qualifier_type }, + )); + } + } + } + None + }) + .collect(); + let contains_self = + pattern.descendants_with_tokens().any(|node_or_token| match node_or_token { + SyntaxElement::Token(t) => t.kind() == T![self], + _ => false, + }); + Ok(ResolvedPattern { + node: pattern, + resolved_paths, + placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), + ufcs_function_calls, + contains_self, + }) + } + + fn resolve( + &self, + node: SyntaxNode, + depth: u32, + resolved_paths: &mut FxHashMap, + ) -> Result<(), SsrError> { + use syntax::ast::AstNode; + if let Some(path) = ast::Path::cast(node.clone()) { + if is_self(&path) { + // Self cannot be resolved like other paths. + return Ok(()); + } + // Check if this is an appropriate place in the path to resolve. If the path is + // something like `a::B::::c` then we want to resolve `a::B`. If the path contains + // a placeholder. e.g. `a::$b::c` then we want to resolve `a`. + if !path_contains_type_arguments(path.qualifier()) + && !self.path_contains_placeholder(&path) + { + let resolution = self + .resolution_scope + .resolve_path(&path) + .ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?; + if self.ok_to_use_path_resolution(&resolution) { + resolved_paths.insert(node, ResolvedPath { resolution, depth }); + return Ok(()); + } + } + } + for node in node.children() { + self.resolve(node, depth + 1, resolved_paths)?; + } + Ok(()) + } + + /// Returns whether `path` contains a placeholder, but ignores any placeholders within type + /// arguments. + fn path_contains_placeholder(&self, path: &ast::Path) -> bool { + if let Some(segment) = path.segment() { + if let Some(name_ref) = segment.name_ref() { + if self.placeholders_by_stand_in.contains_key(name_ref.text()) { + return true; + } + } + } + if let Some(qualifier) = path.qualifier() { + return self.path_contains_placeholder(&qualifier); + } + false + } + + fn ok_to_use_path_resolution(&self, resolution: &hir::PathResolution) -> bool { + match resolution { + hir::PathResolution::AssocItem(hir::AssocItem::Function(function)) => { + if function.has_self_param(self.resolution_scope.scope.db) { + // If we don't use this path resolution, then we won't be able to match method + // calls. e.g. `Foo::bar($s)` should match `x.bar()`. + true + } else { + mark::hit!(replace_associated_trait_default_function_call); + false + } + } + hir::PathResolution::AssocItem(_) => { + // Not a function. Could be a constant or an associated type. + mark::hit!(replace_associated_trait_constant); + false + } + _ => true, + } + } +} + +impl<'db> ResolutionScope<'db> { + pub(crate) fn new( + sema: &hir::Semantics<'db, ide_db::RootDatabase>, + resolve_context: FilePosition, + ) -> ResolutionScope<'db> { + use syntax::ast::AstNode; + let file = sema.parse(resolve_context.file_id); + // Find a node at the requested position, falling back to the whole file. + let node = file + .syntax() + .token_at_offset(resolve_context.offset) + .left_biased() + .map(|token| token.parent()) + .unwrap_or_else(|| file.syntax().clone()); + let node = pick_node_for_resolution(node); + let scope = sema.scope(&node); + ResolutionScope { scope, node } + } + + /// Returns the function in which SSR was invoked, if any. + pub(crate) fn current_function(&self) -> Option { + self.node.ancestors().find(|node| node.kind() == SyntaxKind::FN).map(|node| node.clone()) + } + + fn resolve_path(&self, path: &ast::Path) -> Option { + // First try resolving the whole path. This will work for things like + // `std::collections::HashMap`, but will fail for things like + // `std::collections::HashMap::new`. + if let Some(resolution) = self.scope.speculative_resolve(&path) { + return Some(resolution); + } + // Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if + // that succeeds, then iterate through the candidates on the resolved type with the provided + // name. + let resolved_qualifier = self.scope.speculative_resolve(&path.qualifier()?)?; + if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier { + let name = path.segment()?.name_ref()?; + adt.ty(self.scope.db).iterate_path_candidates( + self.scope.db, + self.scope.module()?.krate(), + &self.scope.traits_in_scope(), + None, + |_ty, assoc_item| { + let item_name = assoc_item.name(self.scope.db)?; + if item_name.to_string().as_str() == name.text().as_str() { + Some(hir::PathResolution::AssocItem(assoc_item)) + } else { + None + } + }, + ) + } else { + None + } + } + + fn qualifier_type(&self, path: &SyntaxNode) -> Option { + use syntax::ast::AstNode; + if let Some(path) = ast::Path::cast(path.clone()) { + if let Some(qualifier) = path.qualifier() { + if let Some(resolved_qualifier) = self.resolve_path(&qualifier) { + if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier { + return Some(adt.ty(self.scope.db)); + } + } + } + } + None + } +} + +fn is_self(path: &ast::Path) -> bool { + path.segment().map(|segment| segment.self_token().is_some()).unwrap_or(false) +} + +/// Returns a suitable node for resolving paths in the current scope. If we create a scope based on +/// a statement node, then we can't resolve local variables that were defined in the current scope +/// (only in parent scopes). So we find another node, ideally a child of the statement where local +/// variable resolution is permitted. +fn pick_node_for_resolution(node: SyntaxNode) -> SyntaxNode { + match node.kind() { + SyntaxKind::EXPR_STMT => { + if let Some(n) = node.first_child() { + mark::hit!(cursor_after_semicolon); + return n; + } + } + SyntaxKind::LET_STMT | SyntaxKind::IDENT_PAT => { + if let Some(next) = node.next_sibling() { + return pick_node_for_resolution(next); + } + } + SyntaxKind::NAME => { + if let Some(parent) = node.parent() { + return pick_node_for_resolution(parent); + } + } + _ => {} + } + node +} + +/// Returns whether `path` or any of its qualifiers contains type arguments. +fn path_contains_type_arguments(path: Option) -> bool { + if let Some(path) = path { + if let Some(segment) = path.segment() { + if segment.generic_arg_list().is_some() { + mark::hit!(type_arguments_within_path); + return true; + } + } + return path_contains_type_arguments(path.qualifier()); + } + false +} diff --git a/crates/ssr/src/search.rs b/crates/ssr/src/search.rs new file mode 100644 index 0000000000..8509cfa4de --- /dev/null +++ b/crates/ssr/src/search.rs @@ -0,0 +1,282 @@ +//! Searching for matches. + +use crate::{ + matching, + resolving::{ResolvedPath, ResolvedPattern, ResolvedRule}, + Match, MatchFinder, +}; +use base_db::{FileId, FileRange}; +use ide_db::{ + defs::Definition, + search::{Reference, SearchScope}, +}; +use rustc_hash::FxHashSet; +use syntax::{ast, AstNode, SyntaxKind, SyntaxNode}; +use test_utils::mark; + +/// A cache for the results of find_usages. This is for when we have multiple patterns that have the +/// same path. e.g. if the pattern was `foo::Bar` that can parse as a path, an expression, a type +/// and as a pattern. In each, the usages of `foo::Bar` are the same and we'd like to avoid finding +/// them more than once. +#[derive(Default)] +pub(crate) struct UsageCache { + usages: Vec<(Definition, Vec)>, +} + +impl<'db> MatchFinder<'db> { + /// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make + /// replacement impossible, so further processing is required in order to properly nest matches + /// and remove overlapping matches. This is done in the `nesting` module. + pub(crate) fn find_matches_for_rule( + &self, + rule: &ResolvedRule, + usage_cache: &mut UsageCache, + matches_out: &mut Vec, + ) { + if rule.pattern.contains_self { + // If the pattern contains `self` we restrict the scope of the search to just the + // current method. No other method can reference the same `self`. This makes the + // behavior of `self` consistent with other variables. + if let Some(current_function) = self.resolution_scope.current_function() { + self.slow_scan_node(¤t_function, rule, &None, matches_out); + } + return; + } + if pick_path_for_usages(&rule.pattern).is_none() { + self.slow_scan(rule, matches_out); + return; + } + self.find_matches_for_pattern_tree(rule, &rule.pattern, usage_cache, matches_out); + } + + fn find_matches_for_pattern_tree( + &self, + rule: &ResolvedRule, + pattern: &ResolvedPattern, + usage_cache: &mut UsageCache, + matches_out: &mut Vec, + ) { + if let Some(resolved_path) = pick_path_for_usages(pattern) { + let definition: Definition = resolved_path.resolution.clone().into(); + for reference in self.find_usages(usage_cache, definition) { + if let Some(node_to_match) = self.find_node_to_match(resolved_path, reference) { + if !is_search_permitted_ancestors(&node_to_match) { + mark::hit!(use_declaration_with_braces); + continue; + } + self.try_add_match(rule, &node_to_match, &None, matches_out); + } + } + } + } + + fn find_node_to_match( + &self, + resolved_path: &ResolvedPath, + reference: &Reference, + ) -> Option { + let file = self.sema.parse(reference.file_range.file_id); + let depth = resolved_path.depth as usize; + let offset = reference.file_range.range.start(); + if let Some(path) = + self.sema.find_node_at_offset_with_descend::(file.syntax(), offset) + { + self.sema.ancestors_with_macros(path.syntax().clone()).skip(depth).next() + } else if let Some(path) = + self.sema.find_node_at_offset_with_descend::(file.syntax(), offset) + { + // If the pattern contained a path and we found a reference to that path that wasn't + // itself a path, but was a method call, then we need to adjust how far up to try + // matching by how deep the path was within a CallExpr. The structure would have been + // CallExpr, PathExpr, Path - i.e. a depth offset of 2. We don't need to check if the + // path was part of a CallExpr because if it wasn't then all that will happen is we'll + // fail to match, which is the desired behavior. + const PATH_DEPTH_IN_CALL_EXPR: usize = 2; + if depth < PATH_DEPTH_IN_CALL_EXPR { + return None; + } + self.sema + .ancestors_with_macros(path.syntax().clone()) + .skip(depth - PATH_DEPTH_IN_CALL_EXPR) + .next() + } else { + None + } + } + + fn find_usages<'a>( + &self, + usage_cache: &'a mut UsageCache, + definition: Definition, + ) -> &'a [Reference] { + // Logically if a lookup succeeds we should just return it. Unfortunately returning it would + // extend the lifetime of the borrow, then we wouldn't be able to do the insertion on a + // cache miss. This is a limitation of NLL and is fixed with Polonius. For now we do two + // lookups in the case of a cache hit. + if usage_cache.find(&definition).is_none() { + let usages = definition.find_usages(&self.sema, Some(self.search_scope())); + usage_cache.usages.push((definition, usages)); + return &usage_cache.usages.last().unwrap().1; + } + usage_cache.find(&definition).unwrap() + } + + /// Returns the scope within which we want to search. We don't want un unrestricted search + /// scope, since we don't want to find references in external dependencies. + fn search_scope(&self) -> SearchScope { + // FIXME: We should ideally have a test that checks that we edit local roots and not library + // roots. This probably would require some changes to fixtures, since currently everything + // seems to get put into a single source root. + let mut files = Vec::new(); + self.search_files_do(|file_id| { + files.push(file_id); + }); + SearchScope::files(&files) + } + + fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec) { + self.search_files_do(|file_id| { + let file = self.sema.parse(file_id); + let code = file.syntax(); + self.slow_scan_node(code, rule, &None, matches_out); + }) + } + + fn search_files_do(&self, mut callback: impl FnMut(FileId)) { + if self.restrict_ranges.is_empty() { + // Unrestricted search. + use base_db::SourceDatabaseExt; + use ide_db::symbol_index::SymbolsDatabase; + for &root in self.sema.db.local_roots().iter() { + let sr = self.sema.db.source_root(root); + for file_id in sr.iter() { + callback(file_id); + } + } + } else { + // Search is restricted, deduplicate file IDs (generally only one). + let mut files = FxHashSet::default(); + for range in &self.restrict_ranges { + if files.insert(range.file_id) { + callback(range.file_id); + } + } + } + } + + fn slow_scan_node( + &self, + code: &SyntaxNode, + rule: &ResolvedRule, + restrict_range: &Option, + matches_out: &mut Vec, + ) { + if !is_search_permitted(code) { + return; + } + self.try_add_match(rule, &code, restrict_range, matches_out); + // If we've got a macro call, we already tried matching it pre-expansion, which is the only + // way to match the whole macro, now try expanding it and matching the expansion. + if let Some(macro_call) = ast::MacroCall::cast(code.clone()) { + if let Some(expanded) = self.sema.expand(¯o_call) { + if let Some(tt) = macro_call.token_tree() { + // When matching within a macro expansion, we only want to allow matches of + // nodes that originated entirely from within the token tree of the macro call. + // i.e. we don't want to match something that came from the macro itself. + self.slow_scan_node( + &expanded, + rule, + &Some(self.sema.original_range(tt.syntax())), + matches_out, + ); + } + } + } + for child in code.children() { + self.slow_scan_node(&child, rule, restrict_range, matches_out); + } + } + + fn try_add_match( + &self, + rule: &ResolvedRule, + code: &SyntaxNode, + restrict_range: &Option, + matches_out: &mut Vec, + ) { + if !self.within_range_restrictions(code) { + mark::hit!(replace_nonpath_within_selection); + return; + } + if let Ok(m) = matching::get_match(false, rule, code, restrict_range, &self.sema) { + matches_out.push(m); + } + } + + /// Returns whether `code` is within one of our range restrictions if we have any. No range + /// restrictions is considered unrestricted and always returns true. + fn within_range_restrictions(&self, code: &SyntaxNode) -> bool { + if self.restrict_ranges.is_empty() { + // There is no range restriction. + return true; + } + let node_range = self.sema.original_range(code); + for range in &self.restrict_ranges { + if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) { + return true; + } + } + false + } +} + +/// Returns whether we support matching within `node` and all of its ancestors. +fn is_search_permitted_ancestors(node: &SyntaxNode) -> bool { + if let Some(parent) = node.parent() { + if !is_search_permitted_ancestors(&parent) { + return false; + } + } + is_search_permitted(node) +} + +/// Returns whether we support matching within this kind of node. +fn is_search_permitted(node: &SyntaxNode) -> bool { + // FIXME: Properly handle use declarations. At the moment, if our search pattern is `foo::bar` + // and the code is `use foo::{baz, bar}`, we'll match `bar`, since it resolves to `foo::bar`. + // However we'll then replace just the part we matched `bar`. We probably need to instead remove + // `bar` and insert a new use declaration. + node.kind() != SyntaxKind::USE +} + +impl UsageCache { + fn find(&mut self, definition: &Definition) -> Option<&[Reference]> { + // We expect a very small number of cache entries (generally 1), so a linear scan should be + // fast enough and avoids the need to implement Hash for Definition. + for (d, refs) in &self.usages { + if d == definition { + return Some(refs); + } + } + None + } +} + +/// Returns a path that's suitable for path resolution. We exclude builtin types, since they aren't +/// something that we can find references to. We then somewhat arbitrarily pick the path that is the +/// longest as this is hopefully more likely to be less common, making it faster to find. +fn pick_path_for_usages(pattern: &ResolvedPattern) -> Option<&ResolvedPath> { + // FIXME: Take the scope of the resolved path into account. e.g. if there are any paths that are + // private to the current module, then we definitely would want to pick them over say a path + // from std. Possibly we should go further than this and intersect the search scopes for all + // resolved paths then search only in that scope. + pattern + .resolved_paths + .iter() + .filter(|(_, p)| { + !matches!(p.resolution, hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_))) + }) + .map(|(node, resolved)| (node.text().len(), resolved)) + .max_by(|(a, _), (b, _)| a.cmp(b)) + .map(|(_, resolved)| resolved) +} diff --git a/crates/ssr/src/tests.rs b/crates/ssr/src/tests.rs new file mode 100644 index 0000000000..0d0a000906 --- /dev/null +++ b/crates/ssr/src/tests.rs @@ -0,0 +1,1174 @@ +use crate::{MatchFinder, SsrRule}; +use base_db::{salsa::Durability, FileId, FilePosition, FileRange, SourceDatabaseExt}; +use expect::{expect, Expect}; +use rustc_hash::FxHashSet; +use std::sync::Arc; +use test_utils::{mark, RangeOrOffset}; + +fn parse_error_text(query: &str) -> String { + format!("{}", query.parse::().unwrap_err()) +} + +#[test] +fn parser_empty_query() { + assert_eq!(parse_error_text(""), "Parse error: Cannot find delimiter `==>>`"); +} + +#[test] +fn parser_no_delimiter() { + assert_eq!(parse_error_text("foo()"), "Parse error: Cannot find delimiter `==>>`"); +} + +#[test] +fn parser_two_delimiters() { + assert_eq!( + parse_error_text("foo() ==>> a ==>> b "), + "Parse error: More than one delimiter found" + ); +} + +#[test] +fn parser_repeated_name() { + assert_eq!( + parse_error_text("foo($a, $a) ==>>"), + "Parse error: Name `a` repeats more than once" + ); +} + +#[test] +fn parser_invalid_pattern() { + assert_eq!( + parse_error_text(" ==>> ()"), + "Parse error: Not a valid Rust expression, type, item, path or pattern" + ); +} + +#[test] +fn parser_invalid_template() { + assert_eq!( + parse_error_text("() ==>> )"), + "Parse error: Not a valid Rust expression, type, item, path or pattern" + ); +} + +#[test] +fn parser_undefined_placeholder_in_replacement() { + assert_eq!( + parse_error_text("42 ==>> $a"), + "Parse error: Replacement contains undefined placeholders: $a" + ); +} + +/// `code` may optionally contain a cursor marker `<|>`. If it doesn't, then the position will be +/// the start of the file. If there's a second cursor marker, then we'll return a single range. +pub(crate) fn single_file(code: &str) -> (ide_db::RootDatabase, FilePosition, Vec) { + use base_db::fixture::WithFixture; + use ide_db::symbol_index::SymbolsDatabase; + let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) { + ide_db::RootDatabase::with_range_or_offset(code) + } else { + let (db, file_id) = ide_db::RootDatabase::with_single_file(code); + (db, file_id, RangeOrOffset::Offset(0.into())) + }; + let selections; + let position; + match range_or_offset { + RangeOrOffset::Range(range) => { + position = FilePosition { file_id, offset: range.start() }; + selections = vec![FileRange { file_id, range: range }]; + } + RangeOrOffset::Offset(offset) => { + position = FilePosition { file_id, offset }; + selections = vec![]; + } + } + let mut local_roots = FxHashSet::default(); + local_roots.insert(base_db::fixture::WORKSPACE); + db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); + (db, position, selections) +} + +fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) { + assert_ssr_transforms(&[rule], input, expected); +} + +fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) { + let (db, position, selections) = single_file(input); + let mut match_finder = MatchFinder::in_context(&db, position, selections); + for rule in rules { + let rule: SsrRule = rule.parse().unwrap(); + match_finder.add_rule(rule).unwrap(); + } + let edits = match_finder.edits(); + if edits.is_empty() { + panic!("No edits were made"); + } + assert_eq!(edits[0].file_id, position.file_id); + // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters + // stuff. + let mut actual = db.file_text(position.file_id).to_string(); + edits[0].edit.apply(&mut actual); + expected.assert_eq(&actual); +} + +fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet: &str) { + let debug_info = match_finder.debug_where_text_equal(file_id, snippet); + println!( + "Match debug info: {} nodes had text exactly equal to '{}'", + debug_info.len(), + snippet + ); + for (index, d) in debug_info.iter().enumerate() { + println!("Node #{}\n{:#?}\n", index, d); + } +} + +fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { + let (db, position, selections) = single_file(code); + let mut match_finder = MatchFinder::in_context(&db, position, selections); + match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); + let matched_strings: Vec = + match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect(); + if matched_strings != expected && !expected.is_empty() { + print_match_debug_info(&match_finder, position.file_id, &expected[0]); + } + assert_eq!(matched_strings, expected); +} + +fn assert_no_match(pattern: &str, code: &str) { + let (db, position, selections) = single_file(code); + let mut match_finder = MatchFinder::in_context(&db, position, selections); + match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); + let matches = match_finder.matches().flattened().matches; + if !matches.is_empty() { + print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text()); + panic!("Got {} matches when we expected none: {:#?}", matches.len(), matches); + } +} + +fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) { + let (db, position, selections) = single_file(code); + let mut match_finder = MatchFinder::in_context(&db, position, selections); + match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); + let mut reasons = Vec::new(); + for d in match_finder.debug_where_text_equal(position.file_id, snippet) { + if let Some(reason) = d.match_failure_reason() { + reasons.push(reason.to_owned()); + } + } + assert_eq!(reasons, vec![expected_reason]); +} + +#[test] +fn ssr_function_to_method() { + assert_ssr_transform( + "my_function($a, $b) ==>> ($a).my_method($b)", + "fn my_function() {} fn main() { loop { my_function( other_func(x, y), z + w) } }", + expect![["fn my_function() {} fn main() { loop { (other_func(x, y)).my_method(z + w) } }"]], + ) +} + +#[test] +fn ssr_nested_function() { + assert_ssr_transform( + "foo($a, $b, $c) ==>> bar($c, baz($a, $b))", + r#" + //- /lib.rs crate:foo + fn foo() {} + fn bar() {} + fn baz() {} + fn main { foo (x + value.method(b), x+y-z, true && false) } + "#, + expect![[r#" + fn foo() {} + fn bar() {} + fn baz() {} + fn main { bar(true && false, baz(x + value.method(b), x+y-z)) } + "#]], + ) +} + +#[test] +fn ssr_expected_spacing() { + assert_ssr_transform( + "foo($x) + bar() ==>> bar($x)", + "fn foo() {} fn bar() {} fn main() { foo(5) + bar() }", + expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]], + ); +} + +#[test] +fn ssr_with_extra_space() { + assert_ssr_transform( + "foo($x ) + bar() ==>> bar($x)", + "fn foo() {} fn bar() {} fn main() { foo( 5 ) +bar( ) }", + expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]], + ); +} + +#[test] +fn ssr_keeps_nested_comment() { + assert_ssr_transform( + "foo($x) ==>> bar($x)", + "fn foo() {} fn bar() {} fn main() { foo(other(5 /* using 5 */)) }", + expect![["fn foo() {} fn bar() {} fn main() { bar(other(5 /* using 5 */)) }"]], + ) +} + +#[test] +fn ssr_keeps_comment() { + assert_ssr_transform( + "foo($x) ==>> bar($x)", + "fn foo() {} fn bar() {} fn main() { foo(5 /* using 5 */) }", + expect![["fn foo() {} fn bar() {} fn main() { bar(5)/* using 5 */ }"]], + ) +} + +#[test] +fn ssr_struct_lit() { + assert_ssr_transform( + "Foo{a: $a, b: $b} ==>> Foo::new($a, $b)", + r#" + struct Foo() {} + impl Foo { fn new() {} } + fn main() { Foo{b:2, a:1} } + "#, + expect![[r#" + struct Foo() {} + impl Foo { fn new() {} } + fn main() { Foo::new(1, 2) } + "#]], + ) +} + +#[test] +fn ignores_whitespace() { + assert_matches("1+2", "fn f() -> i32 {1 + 2}", &["1 + 2"]); + assert_matches("1 + 2", "fn f() -> i32 {1+2}", &["1+2"]); +} + +#[test] +fn no_match() { + assert_no_match("1 + 3", "fn f() -> i32 {1 + 2}"); +} + +#[test] +fn match_fn_definition() { + assert_matches("fn $a($b: $t) {$c}", "fn f(a: i32) {bar()}", &["fn f(a: i32) {bar()}"]); +} + +#[test] +fn match_struct_definition() { + let code = r#" + struct Option {} + struct Bar {} + struct Foo {name: Option}"#; + assert_matches("struct $n {$f: Option}", code, &["struct Foo {name: Option}"]); +} + +#[test] +fn match_expr() { + let code = r#" + fn foo() {} + fn f() -> i32 {foo(40 + 2, 42)}"#; + assert_matches("foo($a, $b)", code, &["foo(40 + 2, 42)"]); + assert_no_match("foo($a, $b, $c)", code); + assert_no_match("foo($a)", code); +} + +#[test] +fn match_nested_method_calls() { + assert_matches( + "$a.z().z().z()", + "fn f() {h().i().j().z().z().z().d().e()}", + &["h().i().j().z().z().z()"], + ); +} + +// Make sure that our node matching semantics don't differ within macro calls. +#[test] +fn match_nested_method_calls_with_macro_call() { + assert_matches( + "$a.z().z().z()", + r#" + macro_rules! m1 { ($a:expr) => {$a}; } + fn f() {m1!(h().i().j().z().z().z().d().e())}"#, + &["h().i().j().z().z().z()"], + ); +} + +#[test] +fn match_complex_expr() { + let code = r#" + fn foo() {} fn bar() {} + fn f() -> i32 {foo(bar(40, 2), 42)}"#; + assert_matches("foo($a, $b)", code, &["foo(bar(40, 2), 42)"]); + assert_no_match("foo($a, $b, $c)", code); + assert_no_match("foo($a)", code); + assert_matches("bar($a, $b)", code, &["bar(40, 2)"]); +} + +// Trailing commas in the code should be ignored. +#[test] +fn match_with_trailing_commas() { + // Code has comma, pattern doesn't. + assert_matches("foo($a, $b)", "fn foo() {} fn f() {foo(1, 2,);}", &["foo(1, 2,)"]); + assert_matches("Foo{$a, $b}", "struct Foo {} fn f() {Foo{1, 2,};}", &["Foo{1, 2,}"]); + + // Pattern has comma, code doesn't. + assert_matches("foo($a, $b,)", "fn foo() {} fn f() {foo(1, 2);}", &["foo(1, 2)"]); + assert_matches("Foo{$a, $b,}", "struct Foo {} fn f() {Foo{1, 2};}", &["Foo{1, 2}"]); +} + +#[test] +fn match_type() { + assert_matches("i32", "fn f() -> i32 {1 + 2}", &["i32"]); + assert_matches( + "Option<$a>", + "struct Option {} fn f() -> Option {42}", + &["Option"], + ); + assert_no_match( + "Option<$a>", + "struct Option {} struct Result {} fn f() -> Result {42}", + ); +} + +#[test] +fn match_struct_instantiation() { + let code = r#" + struct Foo {bar: i32, baz: i32} + fn f() {Foo {bar: 1, baz: 2}}"#; + assert_matches("Foo {bar: 1, baz: 2}", code, &["Foo {bar: 1, baz: 2}"]); + // Now with placeholders for all parts of the struct. + assert_matches("Foo {$a: $b, $c: $d}", code, &["Foo {bar: 1, baz: 2}"]); + assert_matches("Foo {}", "struct Foo {} fn f() {Foo {}}", &["Foo {}"]); +} + +#[test] +fn match_path() { + let code = r#" + mod foo { + pub fn bar() {} + } + fn f() {foo::bar(42)}"#; + assert_matches("foo::bar", code, &["foo::bar"]); + assert_matches("$a::bar", code, &["foo::bar"]); + assert_matches("foo::$b", code, &["foo::bar"]); +} + +#[test] +fn match_pattern() { + assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]); +} + +// If our pattern has a full path, e.g. a::b::c() and the code has c(), but c resolves to +// a::b::c, then we should match. +#[test] +fn match_fully_qualified_fn_path() { + let code = r#" + mod a { + pub mod b { + pub fn c(_: i32) {} + } + } + use a::b::c; + fn f1() { + c(42); + } + "#; + assert_matches("a::b::c($a)", code, &["c(42)"]); +} + +#[test] +fn match_resolved_type_name() { + let code = r#" + mod m1 { + pub mod m2 { + pub trait Foo {} + } + } + mod m3 { + trait Foo {} + fn f1(f: Option<&dyn Foo>) {} + } + mod m4 { + use crate::m1::m2::Foo; + fn f1(f: Option<&dyn Foo>) {} + } + "#; + assert_matches("m1::m2::Foo<$t>", code, &["Foo"]); +} + +#[test] +fn type_arguments_within_path() { + mark::check!(type_arguments_within_path); + let code = r#" + mod foo { + pub struct Bar {t: T} + impl Bar { + pub fn baz() {} + } + } + fn f1() {foo::Bar::::baz();} + "#; + assert_no_match("foo::Bar::::baz()", code); + assert_matches("foo::Bar::::baz()", code, &["foo::Bar::::baz()"]); +} + +#[test] +fn literal_constraint() { + mark::check!(literal_constraint); + let code = r#" + enum Option { Some(T), None } + use Option::Some; + fn f1() { + let x1 = Some(42); + let x2 = Some("foo"); + let x3 = Some(x1); + let x4 = Some(40 + 2); + let x5 = Some(true); + } + "#; + assert_matches("Some(${a:kind(literal)})", code, &["Some(42)", "Some(\"foo\")", "Some(true)"]); + assert_matches("Some(${a:not(kind(literal))})", code, &["Some(x1)", "Some(40 + 2)"]); +} + +#[test] +fn match_reordered_struct_instantiation() { + assert_matches( + "Foo {aa: 1, b: 2, ccc: 3}", + "struct Foo {} fn f() {Foo {b: 2, ccc: 3, aa: 1}}", + &["Foo {b: 2, ccc: 3, aa: 1}"], + ); + assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {b: 1}}"); + assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {a: 2}}"); + assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {a: 1}}"); + assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {b: 2}}"); + assert_no_match("Foo {a: 1, }", "struct Foo {} fn f() {Foo {a: 1, b: 2}}"); + assert_no_match("Foo {a: 1, z: 9}", "struct Foo {} fn f() {Foo {a: 1}}"); +} + +#[test] +fn match_macro_invocation() { + assert_matches( + "foo!($a)", + "macro_rules! foo {() => {}} fn() {foo(foo!(foo()))}", + &["foo!(foo())"], + ); + assert_matches( + "foo!(41, $a, 43)", + "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43)}", + &["foo!(41, 42, 43)"], + ); + assert_no_match("foo!(50, $a, 43)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}"); + assert_no_match("foo!(41, $a, 50)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}"); + assert_matches( + "foo!($a())", + "macro_rules! foo {() => {}} fn() {foo!(bar())}", + &["foo!(bar())"], + ); +} + +// When matching within a macro expansion, we only allow matches of nodes that originated from +// the macro call, not from the macro definition. +#[test] +fn no_match_expression_from_macro() { + assert_no_match( + "$a.clone()", + r#" + macro_rules! m1 { + () => {42.clone()} + } + fn f1() {m1!()} + "#, + ); +} + +// We definitely don't want to allow matching of an expression that part originates from the +// macro call `42` and part from the macro definition `.clone()`. +#[test] +fn no_match_split_expression() { + assert_no_match( + "$a.clone()", + r#" + macro_rules! m1 { + ($x:expr) => {$x.clone()} + } + fn f1() {m1!(42)} + "#, + ); +} + +#[test] +fn replace_function_call() { + // This test also makes sure that we ignore empty-ranges. + assert_ssr_transform( + "foo() ==>> bar()", + "fn foo() {<|><|>} fn bar() {} fn f1() {foo(); foo();}", + expect![["fn foo() {} fn bar() {} fn f1() {bar(); bar();}"]], + ); +} + +#[test] +fn replace_function_call_with_placeholders() { + assert_ssr_transform( + "foo($a, $b) ==>> bar($b, $a)", + "fn foo() {} fn bar() {} fn f1() {foo(5, 42)}", + expect![["fn foo() {} fn bar() {} fn f1() {bar(42, 5)}"]], + ); +} + +#[test] +fn replace_nested_function_calls() { + assert_ssr_transform( + "foo($a) ==>> bar($a)", + "fn foo() {} fn bar() {} fn f1() {foo(foo(42))}", + expect![["fn foo() {} fn bar() {} fn f1() {bar(bar(42))}"]], + ); +} + +#[test] +fn replace_associated_function_call() { + assert_ssr_transform( + "Foo::new() ==>> Bar::new()", + r#" + struct Foo {} + impl Foo { fn new() {} } + struct Bar {} + impl Bar { fn new() {} } + fn f1() {Foo::new();} + "#, + expect![[r#" + struct Foo {} + impl Foo { fn new() {} } + struct Bar {} + impl Bar { fn new() {} } + fn f1() {Bar::new();} + "#]], + ); +} + +#[test] +fn replace_associated_trait_default_function_call() { + mark::check!(replace_associated_trait_default_function_call); + assert_ssr_transform( + "Bar2::foo() ==>> Bar2::foo2()", + r#" + trait Foo { fn foo() {} } + pub struct Bar {} + impl Foo for Bar {} + pub struct Bar2 {} + impl Foo for Bar2 {} + impl Bar2 { fn foo2() {} } + fn main() { + Bar::foo(); + Bar2::foo(); + } + "#, + expect![[r#" + trait Foo { fn foo() {} } + pub struct Bar {} + impl Foo for Bar {} + pub struct Bar2 {} + impl Foo for Bar2 {} + impl Bar2 { fn foo2() {} } + fn main() { + Bar::foo(); + Bar2::foo2(); + } + "#]], + ); +} + +#[test] +fn replace_associated_trait_constant() { + mark::check!(replace_associated_trait_constant); + assert_ssr_transform( + "Bar2::VALUE ==>> Bar2::VALUE_2222", + r#" + trait Foo { const VALUE: i32; const VALUE_2222: i32; } + pub struct Bar {} + impl Foo for Bar { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; } + pub struct Bar2 {} + impl Foo for Bar2 { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; } + impl Bar2 { fn foo2() {} } + fn main() { + Bar::VALUE; + Bar2::VALUE; + } + "#, + expect![[r#" + trait Foo { const VALUE: i32; const VALUE_2222: i32; } + pub struct Bar {} + impl Foo for Bar { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; } + pub struct Bar2 {} + impl Foo for Bar2 { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; } + impl Bar2 { fn foo2() {} } + fn main() { + Bar::VALUE; + Bar2::VALUE_2222; + } + "#]], + ); +} + +#[test] +fn replace_path_in_different_contexts() { + // Note the <|> inside module a::b which marks the point where the rule is interpreted. We + // replace foo with bar, but both need different path qualifiers in different contexts. In f4, + // foo is unqualified because of a use statement, however the replacement needs to be fully + // qualified. + assert_ssr_transform( + "c::foo() ==>> c::bar()", + r#" + mod a { + pub mod b {<|> + pub mod c { + pub fn foo() {} + pub fn bar() {} + fn f1() { foo() } + } + fn f2() { c::foo() } + } + fn f3() { b::c::foo() } + } + use a::b::c::foo; + fn f4() { foo() } + "#, + expect![[r#" + mod a { + pub mod b { + pub mod c { + pub fn foo() {} + pub fn bar() {} + fn f1() { bar() } + } + fn f2() { c::bar() } + } + fn f3() { b::c::bar() } + } + use a::b::c::foo; + fn f4() { a::b::c::bar() } + "#]], + ); +} + +#[test] +fn replace_associated_function_with_generics() { + assert_ssr_transform( + "c::Foo::<$a>::new() ==>> d::Bar::<$a>::default()", + r#" + mod c { + pub struct Foo {v: T} + impl Foo { pub fn new() {} } + fn f1() { + Foo::::new(); + } + } + mod d { + pub struct Bar {v: T} + impl Bar { pub fn default() {} } + fn f1() { + super::c::Foo::::new(); + } + } + "#, + expect![[r#" + mod c { + pub struct Foo {v: T} + impl Foo { pub fn new() {} } + fn f1() { + crate::d::Bar::::default(); + } + } + mod d { + pub struct Bar {v: T} + impl Bar { pub fn default() {} } + fn f1() { + Bar::::default(); + } + } + "#]], + ); +} + +#[test] +fn replace_type() { + assert_ssr_transform( + "Result<(), $a> ==>> Option<$a>", + "struct Result {} struct Option {} fn f1() -> Result<(), Vec> {foo()}", + expect![[ + "struct Result {} struct Option {} fn f1() -> Option> {foo()}" + ]], + ); +} + +#[test] +fn replace_macro_invocations() { + assert_ssr_transform( + "try!($a) ==>> $a?", + "macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(try!(foo()));}", + expect![["macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(foo()?);}"]], + ); + assert_ssr_transform( + "foo!($a($b)) ==>> foo($b, $a)", + "macro_rules! foo {() => {}} fn f1() {foo!(abc(def() + 2));}", + expect![["macro_rules! foo {() => {}} fn f1() {foo(def() + 2, abc);}"]], + ); +} + +#[test] +fn replace_binary_op() { + assert_ssr_transform( + "$a + $b ==>> $b + $a", + "fn f() {2 * 3 + 4 * 5}", + expect![["fn f() {4 * 5 + 2 * 3}"]], + ); + assert_ssr_transform( + "$a + $b ==>> $b + $a", + "fn f() {1 + 2 + 3 + 4}", + expect![[r#"fn f() {4 + (3 + (2 + 1))}"#]], + ); +} + +#[test] +fn match_binary_op() { + assert_matches("$a + $b", "fn f() {1 + 2 + 3 + 4}", &["1 + 2", "1 + 2 + 3", "1 + 2 + 3 + 4"]); +} + +#[test] +fn multiple_rules() { + assert_ssr_transforms( + &["$a + 1 ==>> add_one($a)", "$a + $b ==>> add($a, $b)"], + "fn add() {} fn add_one() {} fn f() -> i32 {3 + 2 + 1}", + expect![["fn add() {} fn add_one() {} fn f() -> i32 {add_one(add(3, 2))}"]], + ) +} + +#[test] +fn multiple_rules_with_nested_matches() { + assert_ssr_transforms( + &["foo1($a) ==>> bar1($a)", "foo2($a) ==>> bar2($a)"], + r#" + fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {} + fn f() {foo1(foo2(foo1(foo2(foo1(42)))))} + "#, + expect![[r#" + fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {} + fn f() {bar1(bar2(bar1(bar2(bar1(42)))))} + "#]], + ) +} + +#[test] +fn match_within_macro_invocation() { + let code = r#" + macro_rules! foo { + ($a:stmt; $b:expr) => { + $b + }; + } + struct A {} + impl A { + fn bar() {} + } + fn f1() { + let aaa = A {}; + foo!(macro_ignores_this(); aaa.bar()); + } + "#; + assert_matches("$a.bar()", code, &["aaa.bar()"]); +} + +#[test] +fn replace_within_macro_expansion() { + assert_ssr_transform( + "$a.foo() ==>> bar($a)", + r#" + macro_rules! macro1 { + ($a:expr) => {$a} + } + fn bar() {} + fn f() {macro1!(5.x().foo().o2())} + "#, + expect![[r#" + macro_rules! macro1 { + ($a:expr) => {$a} + } + fn bar() {} + fn f() {macro1!(bar(5.x()).o2())} + "#]], + ) +} + +#[test] +fn replace_outside_and_within_macro_expansion() { + assert_ssr_transform( + "foo($a) ==>> bar($a)", + r#" + fn foo() {} fn bar() {} + macro_rules! macro1 { + ($a:expr) => {$a} + } + fn f() {foo(foo(macro1!(foo(foo(42)))))} + "#, + expect![[r#" + fn foo() {} fn bar() {} + macro_rules! macro1 { + ($a:expr) => {$a} + } + fn f() {bar(bar(macro1!(bar(bar(42)))))} + "#]], + ) +} + +#[test] +fn preserves_whitespace_within_macro_expansion() { + assert_ssr_transform( + "$a + $b ==>> $b - $a", + r#" + macro_rules! macro1 { + ($a:expr) => {$a} + } + fn f() {macro1!(1 * 2 + 3 + 4} + "#, + expect![[r#" + macro_rules! macro1 { + ($a:expr) => {$a} + } + fn f() {macro1!(4 - (3 - 1 * 2)} + "#]], + ) +} + +#[test] +fn add_parenthesis_when_necessary() { + assert_ssr_transform( + "foo($a) ==>> $a.to_string()", + r#" + fn foo(_: i32) {} + fn bar3(v: i32) { + foo(1 + 2); + foo(-v); + } + "#, + expect![[r#" + fn foo(_: i32) {} + fn bar3(v: i32) { + (1 + 2).to_string(); + (-v).to_string(); + } + "#]], + ) +} + +#[test] +fn match_failure_reasons() { + let code = r#" + fn bar() {} + macro_rules! foo { + ($a:expr) => { + 1 + $a + 2 + }; + } + fn f1() { + bar(1, 2); + foo!(5 + 43.to_string() + 5); + } + "#; + assert_match_failure_reason( + "bar($a, 3)", + code, + "bar(1, 2)", + r#"Pattern wanted token '3' (INT_NUMBER), but code had token '2' (INT_NUMBER)"#, + ); + assert_match_failure_reason( + "42.to_string()", + code, + "43.to_string()", + r#"Pattern wanted token '42' (INT_NUMBER), but code had token '43' (INT_NUMBER)"#, + ); +} + +#[test] +fn overlapping_possible_matches() { + // There are three possible matches here, however the middle one, `foo(foo(foo(42)))` shouldn't + // match because it overlaps with the outer match. The inner match is permitted since it's is + // contained entirely within the placeholder of the outer match. + assert_matches( + "foo(foo($a))", + "fn foo() {} fn main() {foo(foo(foo(foo(42))))}", + &["foo(foo(42))", "foo(foo(foo(foo(42))))"], + ); +} + +#[test] +fn use_declaration_with_braces() { + // It would be OK for a path rule to match and alter a use declaration. We shouldn't mess it up + // though. In particular, we must not change `use foo::{baz, bar}` to `use foo::{baz, + // foo2::bar2}`. + mark::check!(use_declaration_with_braces); + assert_ssr_transform( + "foo::bar ==>> foo2::bar2", + r#" + mod foo { pub fn bar() {} pub fn baz() {} } + mod foo2 { pub fn bar2() {} } + use foo::{baz, bar}; + fn main() { bar() } + "#, + expect![[" + mod foo { pub fn bar() {} pub fn baz() {} } + mod foo2 { pub fn bar2() {} } + use foo::{baz, bar}; + fn main() { foo2::bar2() } + "]], + ) +} + +#[test] +fn ufcs_matches_method_call() { + let code = r#" + struct Foo {} + impl Foo { + fn new(_: i32) -> Foo { Foo {} } + fn do_stuff(&self, _: i32) {} + } + struct Bar {} + impl Bar { + fn new(_: i32) -> Bar { Bar {} } + fn do_stuff(&self, v: i32) {} + } + fn main() { + let b = Bar {}; + let f = Foo {}; + b.do_stuff(1); + f.do_stuff(2); + Foo::new(4).do_stuff(3); + // Too many / too few args - should never match + f.do_stuff(2, 10); + f.do_stuff(); + } + "#; + assert_matches("Foo::do_stuff($a, $b)", code, &["f.do_stuff(2)", "Foo::new(4).do_stuff(3)"]); + // The arguments needs special handling in the case of a function call matching a method call + // and the first argument is different. + assert_matches("Foo::do_stuff($a, 2)", code, &["f.do_stuff(2)"]); + assert_matches("Foo::do_stuff(Foo::new(4), $b)", code, &["Foo::new(4).do_stuff(3)"]); + + assert_ssr_transform( + "Foo::do_stuff(Foo::new($a), $b) ==>> Bar::new($b).do_stuff($a)", + code, + expect![[r#" + struct Foo {} + impl Foo { + fn new(_: i32) -> Foo { Foo {} } + fn do_stuff(&self, _: i32) {} + } + struct Bar {} + impl Bar { + fn new(_: i32) -> Bar { Bar {} } + fn do_stuff(&self, v: i32) {} + } + fn main() { + let b = Bar {}; + let f = Foo {}; + b.do_stuff(1); + f.do_stuff(2); + Bar::new(3).do_stuff(4); + // Too many / too few args - should never match + f.do_stuff(2, 10); + f.do_stuff(); + } + "#]], + ); +} + +#[test] +fn pattern_is_a_single_segment_path() { + mark::check!(pattern_is_a_single_segment_path); + // The first function should not be altered because the `foo` in scope at the cursor position is + // a different `foo`. This case is special because "foo" can be parsed as a pattern (IDENT_PAT -> + // NAME -> IDENT), which contains no path. If we're not careful we'll end up matching the `foo` + // in `let foo` from the first function. Whether we should match the `let foo` in the second + // function is less clear. At the moment, we don't. Doing so sounds like a rename operation, + // which isn't really what SSR is for, especially since the replacement `bar` must be able to be + // resolved, which means if we rename `foo` we'll get a name collision. + assert_ssr_transform( + "foo ==>> bar", + r#" + fn f1() -> i32 { + let foo = 1; + let bar = 2; + foo + } + fn f1() -> i32 { + let foo = 1; + let bar = 2; + foo<|> + } + "#, + expect![[r#" + fn f1() -> i32 { + let foo = 1; + let bar = 2; + foo + } + fn f1() -> i32 { + let foo = 1; + let bar = 2; + bar + } + "#]], + ); +} + +#[test] +fn replace_local_variable_reference() { + // The pattern references a local variable `foo` in the block containing the cursor. We should + // only replace references to this variable `foo`, not other variables that just happen to have + // the same name. + mark::check!(cursor_after_semicolon); + assert_ssr_transform( + "foo + $a ==>> $a - foo", + r#" + fn bar1() -> i32 { + let mut res = 0; + let foo = 5; + res += foo + 1; + let foo = 10; + res += foo + 2;<|> + res += foo + 3; + let foo = 15; + res += foo + 4; + res + } + "#, + expect![[r#" + fn bar1() -> i32 { + let mut res = 0; + let foo = 5; + res += foo + 1; + let foo = 10; + res += 2 - foo; + res += 3 - foo; + let foo = 15; + res += foo + 4; + res + } + "#]], + ) +} + +#[test] +fn replace_path_within_selection() { + assert_ssr_transform( + "foo ==>> bar", + r#" + fn main() { + let foo = 41; + let bar = 42; + do_stuff(foo); + do_stuff(foo);<|> + do_stuff(foo); + do_stuff(foo);<|> + do_stuff(foo); + }"#, + expect![[r#" + fn main() { + let foo = 41; + let bar = 42; + do_stuff(foo); + do_stuff(foo); + do_stuff(bar); + do_stuff(bar); + do_stuff(foo); + }"#]], + ); +} + +#[test] +fn replace_nonpath_within_selection() { + mark::check!(replace_nonpath_within_selection); + assert_ssr_transform( + "$a + $b ==>> $b * $a", + r#" + fn main() { + let v = 1 + 2;<|> + let v2 = 3 + 3; + let v3 = 4 + 5;<|> + let v4 = 6 + 7; + }"#, + expect![[r#" + fn main() { + let v = 1 + 2; + let v2 = 3 * 3; + let v3 = 5 * 4; + let v4 = 6 + 7; + }"#]], + ); +} + +#[test] +fn replace_self() { + // `foo(self)` occurs twice in the code, however only the first occurrence is the `self` that's + // in scope where the rule is invoked. + assert_ssr_transform( + "foo(self) ==>> bar(self)", + r#" + struct S1 {} + fn foo(_: &S1) {} + fn bar(_: &S1) {} + impl S1 { + fn f1(&self) { + foo(self)<|> + } + fn f2(&self) { + foo(self) + } + } + "#, + expect![[r#" + struct S1 {} + fn foo(_: &S1) {} + fn bar(_: &S1) {} + impl S1 { + fn f1(&self) { + bar(self) + } + fn f2(&self) { + foo(self) + } + } + "#]], + ); +} + +#[test] +fn match_trait_method_call() { + // `Bar::foo` and `Bar2::foo` resolve to the same function. Make sure we only match if the type + // matches what's in the pattern. Also checks that we handle autoderef. + let code = r#" + pub struct Bar {} + pub struct Bar2 {} + pub trait Foo { + fn foo(&self, _: i32) {} + } + impl Foo for Bar {} + impl Foo for Bar2 {} + fn main() { + let v1 = Bar {}; + let v2 = Bar2 {}; + let v1_ref = &v1; + let v2_ref = &v2; + v1.foo(1); + v2.foo(2); + Bar::foo(&v1, 3); + Bar2::foo(&v2, 4); + v1_ref.foo(5); + v2_ref.foo(6); + } + "#; + assert_matches("Bar::foo($a, $b)", code, &["v1.foo(1)", "Bar::foo(&v1, 3)", "v1_ref.foo(5)"]); + assert_matches("Bar2::foo($a, $b)", code, &["v2.foo(2)", "Bar2::foo(&v2, 4)", "v2_ref.foo(6)"]); +} diff --git a/crates/stdx/Cargo.toml b/crates/stdx/Cargo.toml index 4c0b858617..b186b46f29 100644 --- a/crates/stdx/Cargo.toml +++ b/crates/stdx/Cargo.toml @@ -1,9 +1,9 @@ [package] name = "stdx" -version = "0.1.0" +version = "0.0.0" +license = "MIT OR Apache-2.0" authors = ["rust-analyzer developers"] edition = "2018" -license = "MIT OR Apache-2.0" [lib] doctest = false diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs index b65875c96e..3c5027fe57 100644 --- a/crates/stdx/src/lib.rs +++ b/crates/stdx/src/lib.rs @@ -1,5 +1,5 @@ //! Missing batteries for standard libraries. -use std::{cell::Cell, fmt, time::Instant}; +use std::time::Instant; mod macros; @@ -8,69 +8,6 @@ pub fn is_ci() -> bool { option_env!("CI").is_some() } -pub trait SepBy: Sized { - /// Returns an `impl fmt::Display`, which joins elements via a separator. - fn sep_by<'a>(self, sep: &'a str) -> SepByBuilder<'a, Self>; -} - -impl SepBy for I -where - I: Iterator, - I::Item: fmt::Display, -{ - fn sep_by<'a>(self, sep: &'a str) -> SepByBuilder<'a, Self> { - SepByBuilder::new(sep, self) - } -} - -pub struct SepByBuilder<'a, I> { - sep: &'a str, - prefix: &'a str, - suffix: &'a str, - iter: Cell>, -} - -impl<'a, I> SepByBuilder<'a, I> { - fn new(sep: &'a str, iter: I) -> SepByBuilder<'a, I> { - SepByBuilder { sep, prefix: "", suffix: "", iter: Cell::new(Some(iter)) } - } - - pub fn prefix(mut self, prefix: &'a str) -> Self { - self.prefix = prefix; - self - } - - pub fn suffix(mut self, suffix: &'a str) -> Self { - self.suffix = suffix; - self - } - - /// Set both suffix and prefix. - pub fn surround_with(self, prefix: &'a str, suffix: &'a str) -> Self { - self.prefix(prefix).suffix(suffix) - } -} - -impl fmt::Display for SepByBuilder<'_, I> -where - I: Iterator, - I::Item: fmt::Display, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(self.prefix)?; - let mut first = true; - for item in self.iter.take().unwrap() { - if !first { - f.write_str(self.sep)?; - } - first = false; - fmt::Display::fmt(&item, f)?; - } - f.write_str(self.suffix)?; - Ok(()) - } -} - #[must_use] pub fn timeit(label: &'static str) -> impl Drop { struct Guard { diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml new file mode 100644 index 0000000000..47e351f9d1 --- /dev/null +++ b/crates/syntax/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "syntax" +version = "0.0.0" +description = "Comment and whitespace preserving parser for the Rust language" +license = "MIT OR Apache-2.0" +repository = "https://github.com/rust-analyzer/rust-analyzer" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +itertools = "0.9.0" +rowan = "0.10.0" +rustc_lexer = { version = "671.0.0", package = "rustc-ap-rustc_lexer" } +rustc-hash = "1.1.0" +arrayvec = "0.5.1" +once_cell = "1.3.1" +# This crate transitively depends on `smol_str` via `rowan`. +# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here +# to reduce number of compilations +smol_str = { version = "0.1.15", features = ["serde"] } +serde = { version = "1.0.106", features = ["derive"] } + +stdx = { path = "../stdx" } +text_edit = { path = "../text_edit" } +parser = { path = "../parser" } + +[dev-dependencies] +walkdir = "2.3.1" +rayon = "1" + +test_utils = { path = "../test_utils" } +expect = { path = "../expect" } diff --git a/crates/ra_syntax/fuzz/.gitignore b/crates/syntax/fuzz/.gitignore similarity index 100% rename from crates/ra_syntax/fuzz/.gitignore rename to crates/syntax/fuzz/.gitignore diff --git a/crates/syntax/fuzz/Cargo.toml b/crates/syntax/fuzz/Cargo.toml new file mode 100644 index 0000000000..32c40d1b95 --- /dev/null +++ b/crates/syntax/fuzz/Cargo.toml @@ -0,0 +1,27 @@ + +[package] +name = "syntax-fuzz" +version = "0.0.1" +authors = ["rust-analyzer developers"] +publish = false +edition = "2018" + +[package.metadata] +cargo-fuzz = true + +[dependencies] +syntax = { path = ".." } +text_edit = { path = "../../text_edit" } +libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer-sys.git" } + +# Prevent this from interfering with workspaces +[workspace] +members = ["."] + +[[bin]] +name = "parser" +path = "fuzz_targets/parser.rs" + +[[bin]] +name = "reparse" +path = "fuzz_targets/reparse.rs" diff --git a/crates/syntax/fuzz/fuzz_targets/parser.rs b/crates/syntax/fuzz/fuzz_targets/parser.rs new file mode 100644 index 0000000000..386768343b --- /dev/null +++ b/crates/syntax/fuzz/fuzz_targets/parser.rs @@ -0,0 +1,11 @@ +//! FIXME: write short doc here + +#![no_main] +use libfuzzer_sys::fuzz_target; +use syntax::fuzz::check_parser; + +fuzz_target!(|data: &[u8]| { + if let Ok(text) = std::str::from_utf8(data) { + check_parser(text) + } +}); diff --git a/crates/syntax/fuzz/fuzz_targets/reparse.rs b/crates/syntax/fuzz/fuzz_targets/reparse.rs new file mode 100644 index 0000000000..5ac99fdafd --- /dev/null +++ b/crates/syntax/fuzz/fuzz_targets/reparse.rs @@ -0,0 +1,11 @@ +//! FIXME: write short doc here + +#![no_main] +use libfuzzer_sys::fuzz_target; +use syntax::fuzz::CheckReparse; + +fuzz_target!(|data: &[u8]| { + if let Some(check) = CheckReparse::from_data(data) { + check.run(); + } +}); diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs new file mode 100644 index 0000000000..6254b38ba1 --- /dev/null +++ b/crates/syntax/src/algo.rs @@ -0,0 +1,406 @@ +//! FIXME: write short doc here + +use std::{ + fmt, + ops::{self, RangeInclusive}, +}; + +use itertools::Itertools; +use rustc_hash::FxHashMap; +use text_edit::TextEditBuilder; + +use crate::{ + AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr, + SyntaxToken, TextRange, TextSize, +}; + +/// Returns ancestors of the node at the offset, sorted by length. This should +/// do the right thing at an edge, e.g. when searching for expressions at `{ +/// <|>foo }` we will get the name reference instead of the whole block, which +/// we would get if we just did `find_token_at_offset(...).flat_map(|t| +/// t.parent().ancestors())`. +pub fn ancestors_at_offset( + node: &SyntaxNode, + offset: TextSize, +) -> impl Iterator { + node.token_at_offset(offset) + .map(|token| token.parent().ancestors()) + .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) +} + +/// Finds a node of specific Ast type at offset. Note that this is slightly +/// imprecise: if the cursor is strictly between two nodes of the desired type, +/// as in +/// +/// ```no-run +/// struct Foo {}|struct Bar; +/// ``` +/// +/// then the shorter node will be silently preferred. +pub fn find_node_at_offset(syntax: &SyntaxNode, offset: TextSize) -> Option { + ancestors_at_offset(syntax, offset).find_map(N::cast) +} + +pub fn find_node_at_range(syntax: &SyntaxNode, range: TextRange) -> Option { + find_covering_element(syntax, range).ancestors().find_map(N::cast) +} + +/// Skip to next non `trivia` token +pub fn skip_trivia_token(mut token: SyntaxToken, direction: Direction) -> Option { + while token.kind().is_trivia() { + token = match direction { + Direction::Next => token.next_token()?, + Direction::Prev => token.prev_token()?, + } + } + Some(token) +} + +/// Finds the first sibling in the given direction which is not `trivia` +pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option { + return match element { + NodeOrToken::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia), + NodeOrToken::Token(token) => token.siblings_with_tokens(direction).skip(1).find(not_trivia), + }; + + fn not_trivia(element: &SyntaxElement) -> bool { + match element { + NodeOrToken::Node(_) => true, + NodeOrToken::Token(token) => !token.kind().is_trivia(), + } + } +} + +pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement { + root.covering_element(range) +} + +pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option { + if u == v { + return Some(u.clone()); + } + + let u_depth = u.ancestors().count(); + let v_depth = v.ancestors().count(); + let keep = u_depth.min(v_depth); + + let u_candidates = u.ancestors().skip(u_depth - keep); + let v_canidates = v.ancestors().skip(v_depth - keep); + let (res, _) = u_candidates.zip(v_canidates).find(|(x, y)| x == y)?; + Some(res) +} + +pub fn neighbor(me: &T, direction: Direction) -> Option { + me.syntax().siblings(direction).skip(1).find_map(T::cast) +} + +pub fn has_errors(node: &SyntaxNode) -> bool { + node.children().any(|it| it.kind() == SyntaxKind::ERROR) +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum InsertPosition { + First, + Last, + Before(T), + After(T), +} + +pub struct TreeDiff { + replacements: FxHashMap, +} + +impl TreeDiff { + pub fn into_text_edit(&self, builder: &mut TextEditBuilder) { + for (from, to) in self.replacements.iter() { + builder.replace(from.text_range(), to.to_string()) + } + } + + pub fn is_empty(&self) -> bool { + self.replacements.is_empty() + } +} + +/// Finds minimal the diff, which, applied to `from`, will result in `to`. +/// +/// Specifically, returns a map whose keys are descendants of `from` and values +/// are descendants of `to`, such that `replace_descendants(from, map) == to`. +/// +/// A trivial solution is a singleton map `{ from: to }`, but this function +/// tries to find a more fine-grained diff. +pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { + let mut buf = FxHashMap::default(); + // FIXME: this is both horrible inefficient and gives larger than + // necessary diff. I bet there's a cool algorithm to diff trees properly. + go(&mut buf, from.clone().into(), to.clone().into()); + return TreeDiff { replacements: buf }; + + fn go( + buf: &mut FxHashMap, + lhs: SyntaxElement, + rhs: SyntaxElement, + ) { + if lhs.kind() == rhs.kind() + && lhs.text_range().len() == rhs.text_range().len() + && match (&lhs, &rhs) { + (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { + lhs.green() == rhs.green() || lhs.text() == rhs.text() + } + (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), + _ => false, + } + { + return; + } + if let (Some(lhs), Some(rhs)) = (lhs.as_node(), rhs.as_node()) { + if lhs.children_with_tokens().count() == rhs.children_with_tokens().count() { + for (lhs, rhs) in lhs.children_with_tokens().zip(rhs.children_with_tokens()) { + go(buf, lhs, rhs) + } + return; + } + } + buf.insert(lhs, rhs); + } +} + +/// Adds specified children (tokens or nodes) to the current node at the +/// specific position. +/// +/// This is a type-unsafe low-level editing API, if you need to use it, +/// prefer to create a type-safe abstraction on top of it instead. +pub fn insert_children( + parent: &SyntaxNode, + position: InsertPosition, + to_insert: impl IntoIterator, +) -> SyntaxNode { + let mut to_insert = to_insert.into_iter(); + _insert_children(parent, position, &mut to_insert) +} + +fn _insert_children( + parent: &SyntaxNode, + position: InsertPosition, + to_insert: &mut dyn Iterator, +) -> SyntaxNode { + let mut delta = TextSize::default(); + let to_insert = to_insert.map(|element| { + delta += element.text_range().len(); + to_green_element(element) + }); + + let mut old_children = parent.green().children().map(|it| match it { + NodeOrToken::Token(it) => NodeOrToken::Token(it.clone()), + NodeOrToken::Node(it) => NodeOrToken::Node(it.clone()), + }); + + let new_children = match &position { + InsertPosition::First => to_insert.chain(old_children).collect::>(), + InsertPosition::Last => old_children.chain(to_insert).collect::>(), + InsertPosition::Before(anchor) | InsertPosition::After(anchor) => { + let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 }; + let split_at = position_of_child(parent, anchor.clone()) + take_anchor; + let before = old_children.by_ref().take(split_at).collect::>(); + before.into_iter().chain(to_insert).chain(old_children).collect::>() + } + }; + + with_children(parent, new_children) +} + +/// Replaces all nodes in `to_delete` with nodes from `to_insert` +/// +/// This is a type-unsafe low-level editing API, if you need to use it, +/// prefer to create a type-safe abstraction on top of it instead. +pub fn replace_children( + parent: &SyntaxNode, + to_delete: RangeInclusive, + to_insert: impl IntoIterator, +) -> SyntaxNode { + let mut to_insert = to_insert.into_iter(); + _replace_children(parent, to_delete, &mut to_insert) +} + +fn _replace_children( + parent: &SyntaxNode, + to_delete: RangeInclusive, + to_insert: &mut dyn Iterator, +) -> SyntaxNode { + let start = position_of_child(parent, to_delete.start().clone()); + let end = position_of_child(parent, to_delete.end().clone()); + let mut old_children = parent.green().children().map(|it| match it { + NodeOrToken::Token(it) => NodeOrToken::Token(it.clone()), + NodeOrToken::Node(it) => NodeOrToken::Node(it.clone()), + }); + + let before = old_children.by_ref().take(start).collect::>(); + let new_children = before + .into_iter() + .chain(to_insert.map(to_green_element)) + .chain(old_children.skip(end + 1 - start)) + .collect::>(); + with_children(parent, new_children) +} + +#[derive(Default)] +pub struct SyntaxRewriter<'a> { + f: Option Option + 'a>>, + //FIXME: add debug_assertions that all elements are in fact from the same file. + replacements: FxHashMap, +} + +impl fmt::Debug for SyntaxRewriter<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("SyntaxRewriter").field("replacements", &self.replacements).finish() + } +} + +impl<'a> SyntaxRewriter<'a> { + pub fn from_fn(f: impl Fn(&SyntaxElement) -> Option + 'a) -> SyntaxRewriter<'a> { + SyntaxRewriter { f: Some(Box::new(f)), replacements: FxHashMap::default() } + } + pub fn delete>(&mut self, what: &T) { + let what = what.clone().into(); + let replacement = Replacement::Delete; + self.replacements.insert(what, replacement); + } + pub fn replace>(&mut self, what: &T, with: &T) { + let what = what.clone().into(); + let replacement = Replacement::Single(with.clone().into()); + self.replacements.insert(what, replacement); + } + pub fn replace_with_many>( + &mut self, + what: &T, + with: Vec, + ) { + let what = what.clone().into(); + let replacement = Replacement::Many(with); + self.replacements.insert(what, replacement); + } + pub fn replace_ast(&mut self, what: &T, with: &T) { + self.replace(what.syntax(), with.syntax()) + } + + pub fn rewrite(&self, node: &SyntaxNode) -> SyntaxNode { + if self.f.is_none() && self.replacements.is_empty() { + return node.clone(); + } + self.rewrite_children(node) + } + + pub fn rewrite_ast(self, node: &N) -> N { + N::cast(self.rewrite(node.syntax())).unwrap() + } + + /// Returns a node that encompasses all replacements to be done by this rewriter. + /// + /// Passing the returned node to `rewrite` will apply all replacements queued up in `self`. + /// + /// Returns `None` when there are no replacements. + pub fn rewrite_root(&self) -> Option { + assert!(self.f.is_none()); + self.replacements + .keys() + .map(|element| match element { + SyntaxElement::Node(it) => it.clone(), + SyntaxElement::Token(it) => it.parent(), + }) + // If we only have one replacement, we must return its parent node, since `rewrite` does + // not replace the node passed to it. + .map(|it| it.parent().unwrap_or(it)) + .fold1(|a, b| least_common_ancestor(&a, &b).unwrap()) + } + + fn replacement(&self, element: &SyntaxElement) -> Option { + if let Some(f) = &self.f { + assert!(self.replacements.is_empty()); + return f(element).map(Replacement::Single); + } + self.replacements.get(element).cloned() + } + + fn rewrite_children(&self, node: &SyntaxNode) -> SyntaxNode { + // FIXME: this could be made much faster. + let mut new_children = Vec::new(); + for child in node.children_with_tokens() { + self.rewrite_self(&mut new_children, &child); + } + with_children(node, new_children) + } + + fn rewrite_self( + &self, + acc: &mut Vec>, + element: &SyntaxElement, + ) { + if let Some(replacement) = self.replacement(&element) { + match replacement { + Replacement::Single(NodeOrToken::Node(it)) => { + acc.push(NodeOrToken::Node(it.green().clone())) + } + Replacement::Single(NodeOrToken::Token(it)) => { + acc.push(NodeOrToken::Token(it.green().clone())) + } + Replacement::Many(replacements) => { + acc.extend(replacements.iter().map(|it| match it { + NodeOrToken::Node(it) => NodeOrToken::Node(it.green().clone()), + NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), + })) + } + Replacement::Delete => (), + }; + return; + } + let res = match element { + NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), + NodeOrToken::Node(it) => NodeOrToken::Node(self.rewrite_children(it).green().clone()), + }; + acc.push(res) + } +} + +impl ops::AddAssign for SyntaxRewriter<'_> { + fn add_assign(&mut self, rhs: SyntaxRewriter) { + assert!(rhs.f.is_none()); + self.replacements.extend(rhs.replacements) + } +} + +#[derive(Clone, Debug)] +enum Replacement { + Delete, + Single(SyntaxElement), + Many(Vec), +} + +fn with_children( + parent: &SyntaxNode, + new_children: Vec>, +) -> SyntaxNode { + let len = new_children.iter().map(|it| it.text_len()).sum::(); + let new_node = rowan::GreenNode::new(rowan::SyntaxKind(parent.kind() as u16), new_children); + let new_root_node = parent.replace_with(new_node); + let new_root_node = SyntaxNode::new_root(new_root_node); + + // FIXME: use a more elegant way to re-fetch the node (#1185), make + // `range` private afterwards + let mut ptr = SyntaxNodePtr::new(parent); + ptr.range = TextRange::at(ptr.range.start(), len); + ptr.to_node(&new_root_node) +} + +fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize { + parent + .children_with_tokens() + .position(|it| it == child) + .expect("element is not a child of current element") +} + +fn to_green_element(element: SyntaxElement) -> NodeOrToken { + match element { + NodeOrToken::Node(it) => it.green().clone().into(), + NodeOrToken::Token(it) => it.green().clone().into(), + } +} diff --git a/crates/ra_syntax/src/ast.rs b/crates/syntax/src/ast.rs similarity index 100% rename from crates/ra_syntax/src/ast.rs rename to crates/syntax/src/ast.rs diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs new file mode 100644 index 0000000000..190746e09e --- /dev/null +++ b/crates/syntax/src/ast/edit.rs @@ -0,0 +1,650 @@ +//! This module contains functions for editing syntax trees. As the trees are +//! immutable, all function here return a fresh copy of the tree, instead of +//! doing an in-place modification. +use std::{ + fmt, iter, + ops::{self, RangeInclusive}, +}; + +use arrayvec::ArrayVec; + +use crate::{ + algo::{self, neighbor, SyntaxRewriter}, + ast::{ + self, + make::{self, tokens}, + AstNode, TypeBoundsOwner, + }, + AstToken, Direction, InsertPosition, SmolStr, SyntaxElement, SyntaxKind, + SyntaxKind::{ATTR, COMMENT, WHITESPACE}, + SyntaxNode, SyntaxToken, T, +}; + +impl ast::BinExpr { + #[must_use] + pub fn replace_op(&self, op: SyntaxKind) -> Option { + let op_node: SyntaxElement = self.op_details()?.0.into(); + let to_insert: Option = Some(make::token(op).into()); + Some(self.replace_children(single_node(op_node), to_insert)) + } +} + +impl ast::Fn { + #[must_use] + pub fn with_body(&self, body: ast::BlockExpr) -> ast::Fn { + let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new(); + let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.body() { + old_body.syntax().clone().into() + } else if let Some(semi) = self.semicolon_token() { + to_insert.push(make::tokens::single_space().into()); + semi.into() + } else { + to_insert.push(make::tokens::single_space().into()); + to_insert.push(body.syntax().clone().into()); + return self.insert_children(InsertPosition::Last, to_insert); + }; + to_insert.push(body.syntax().clone().into()); + self.replace_children(single_node(old_body_or_semi), to_insert) + } +} + +fn make_multiline(node: N) -> N +where + N: AstNode + Clone, +{ + let l_curly = match node.syntax().children_with_tokens().find(|it| it.kind() == T!['{']) { + Some(it) => it, + None => return node, + }; + let sibling = match l_curly.next_sibling_or_token() { + Some(it) => it, + None => return node, + }; + let existing_ws = match sibling.as_token() { + None => None, + Some(tok) if tok.kind() != WHITESPACE => None, + Some(ws) => { + if ws.text().contains('\n') { + return node; + } + Some(ws.clone()) + } + }; + + let indent = leading_indent(node.syntax()).unwrap_or_default(); + let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); + let to_insert = iter::once(ws.ws().into()); + match existing_ws { + None => node.insert_children(InsertPosition::After(l_curly), to_insert), + Some(ws) => node.replace_children(single_node(ws), to_insert), + } +} + +impl ast::AssocItemList { + #[must_use] + pub fn append_items( + &self, + items: impl IntoIterator, + ) -> ast::AssocItemList { + let mut res = self.clone(); + if !self.syntax().text().contains_char('\n') { + res = make_multiline(res); + } + items.into_iter().for_each(|it| res = res.append_item(it)); + res + } + + #[must_use] + pub fn append_item(&self, item: ast::AssocItem) -> ast::AssocItemList { + let (indent, position) = match self.assoc_items().last() { + Some(it) => ( + leading_indent(it.syntax()).unwrap_or_default().to_string(), + InsertPosition::After(it.syntax().clone().into()), + ), + None => match self.l_curly_token() { + Some(it) => ( + " ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(), + InsertPosition::After(it.into()), + ), + None => return self.clone(), + }, + }; + let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); + let to_insert: ArrayVec<[SyntaxElement; 2]> = + [ws.ws().into(), item.syntax().clone().into()].into(); + self.insert_children(position, to_insert) + } +} + +impl ast::RecordExprFieldList { + #[must_use] + pub fn append_field(&self, field: &ast::RecordExprField) -> ast::RecordExprFieldList { + self.insert_field(InsertPosition::Last, field) + } + + #[must_use] + pub fn insert_field( + &self, + position: InsertPosition<&'_ ast::RecordExprField>, + field: &ast::RecordExprField, + ) -> ast::RecordExprFieldList { + let is_multiline = self.syntax().text().contains_char('\n'); + let ws; + let space = if is_multiline { + ws = tokens::WsBuilder::new(&format!( + "\n{} ", + leading_indent(self.syntax()).unwrap_or_default() + )); + ws.ws() + } else { + tokens::single_space() + }; + + let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new(); + to_insert.push(space.into()); + to_insert.push(field.syntax().clone().into()); + to_insert.push(make::token(T![,]).into()); + + macro_rules! after_l_curly { + () => {{ + let anchor = match self.l_curly_token() { + Some(it) => it.into(), + None => return self.clone(), + }; + InsertPosition::After(anchor) + }}; + } + + macro_rules! after_field { + ($anchor:expr) => { + if let Some(comma) = $anchor + .syntax() + .siblings_with_tokens(Direction::Next) + .find(|it| it.kind() == T![,]) + { + InsertPosition::After(comma) + } else { + to_insert.insert(0, make::token(T![,]).into()); + InsertPosition::After($anchor.syntax().clone().into()) + } + }; + }; + + let position = match position { + InsertPosition::First => after_l_curly!(), + InsertPosition::Last => { + if !is_multiline { + // don't insert comma before curly + to_insert.pop(); + } + match self.fields().last() { + Some(it) => after_field!(it), + None => after_l_curly!(), + } + } + InsertPosition::Before(anchor) => { + InsertPosition::Before(anchor.syntax().clone().into()) + } + InsertPosition::After(anchor) => after_field!(anchor), + }; + + self.insert_children(position, to_insert) + } +} + +impl ast::TypeAlias { + #[must_use] + pub fn remove_bounds(&self) -> ast::TypeAlias { + let colon = match self.colon_token() { + Some(it) => it, + None => return self.clone(), + }; + let end = match self.type_bound_list() { + Some(it) => it.syntax().clone().into(), + None => colon.clone().into(), + }; + self.replace_children(colon.into()..=end, iter::empty()) + } +} + +impl ast::TypeParam { + #[must_use] + pub fn remove_bounds(&self) -> ast::TypeParam { + let colon = match self.colon_token() { + Some(it) => it, + None => return self.clone(), + }; + let end = match self.type_bound_list() { + Some(it) => it.syntax().clone().into(), + None => colon.clone().into(), + }; + self.replace_children(colon.into()..=end, iter::empty()) + } +} + +impl ast::Path { + #[must_use] + pub fn with_segment(&self, segment: ast::PathSegment) -> ast::Path { + if let Some(old) = self.segment() { + return self.replace_children( + single_node(old.syntax().clone()), + iter::once(segment.syntax().clone().into()), + ); + } + self.clone() + } +} + +impl ast::PathSegment { + #[must_use] + pub fn with_type_args(&self, type_args: ast::GenericArgList) -> ast::PathSegment { + self._with_type_args(type_args, false) + } + + #[must_use] + pub fn with_turbo_fish(&self, type_args: ast::GenericArgList) -> ast::PathSegment { + self._with_type_args(type_args, true) + } + + fn _with_type_args(&self, type_args: ast::GenericArgList, turbo: bool) -> ast::PathSegment { + if let Some(old) = self.generic_arg_list() { + return self.replace_children( + single_node(old.syntax().clone()), + iter::once(type_args.syntax().clone().into()), + ); + } + let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new(); + if turbo { + to_insert.push(make::token(T![::]).into()); + } + to_insert.push(type_args.syntax().clone().into()); + self.insert_children(InsertPosition::Last, to_insert) + } +} + +impl ast::Use { + #[must_use] + pub fn with_use_tree(&self, use_tree: ast::UseTree) -> ast::Use { + if let Some(old) = self.use_tree() { + return self.replace_descendant(old, use_tree); + } + self.clone() + } + + pub fn remove(&self) -> SyntaxRewriter<'static> { + let mut res = SyntaxRewriter::default(); + res.delete(self.syntax()); + let next_ws = self + .syntax() + .next_sibling_or_token() + .and_then(|it| it.into_token()) + .and_then(ast::Whitespace::cast); + if let Some(next_ws) = next_ws { + let ws_text = next_ws.syntax().text(); + if ws_text.starts_with('\n') { + let rest = &ws_text[1..]; + if rest.is_empty() { + res.delete(next_ws.syntax()) + } else { + res.replace(next_ws.syntax(), &make::tokens::whitespace(rest)); + } + } + } + res + } +} + +impl ast::UseTree { + #[must_use] + pub fn with_path(&self, path: ast::Path) -> ast::UseTree { + if let Some(old) = self.path() { + return self.replace_descendant(old, path); + } + self.clone() + } + + #[must_use] + pub fn with_use_tree_list(&self, use_tree_list: ast::UseTreeList) -> ast::UseTree { + if let Some(old) = self.use_tree_list() { + return self.replace_descendant(old, use_tree_list); + } + self.clone() + } + + #[must_use] + pub fn split_prefix(&self, prefix: &ast::Path) -> ast::UseTree { + let suffix = if self.path().as_ref() == Some(prefix) && self.use_tree_list().is_none() { + make::path_unqualified(make::path_segment_self()) + } else { + match split_path_prefix(&prefix) { + Some(it) => it, + None => return self.clone(), + } + }; + + let use_tree = make::use_tree( + suffix, + self.use_tree_list(), + self.rename(), + self.star_token().is_some(), + ); + let nested = make::use_tree_list(iter::once(use_tree)); + return make::use_tree(prefix.clone(), Some(nested), None, false); + + fn split_path_prefix(prefix: &ast::Path) -> Option { + let parent = prefix.parent_path()?; + let segment = parent.segment()?; + if algo::has_errors(segment.syntax()) { + return None; + } + let mut res = make::path_unqualified(segment); + for p in iter::successors(parent.parent_path(), |it| it.parent_path()) { + res = make::path_qualified(res, p.segment()?); + } + Some(res) + } + } + + pub fn remove(&self) -> SyntaxRewriter<'static> { + let mut res = SyntaxRewriter::default(); + res.delete(self.syntax()); + for &dir in [Direction::Next, Direction::Prev].iter() { + if let Some(nb) = neighbor(self, dir) { + self.syntax() + .siblings_with_tokens(dir) + .skip(1) + .take_while(|it| it.as_node() != Some(nb.syntax())) + .for_each(|el| res.delete(&el)); + return res; + } + } + res + } +} + +impl ast::MatchArmList { + #[must_use] + pub fn append_arms(&self, items: impl IntoIterator) -> ast::MatchArmList { + let mut res = self.clone(); + res = res.strip_if_only_whitespace(); + if !res.syntax().text().contains_char('\n') { + res = make_multiline(res); + } + items.into_iter().for_each(|it| res = res.append_arm(it)); + res + } + + fn strip_if_only_whitespace(&self) -> ast::MatchArmList { + let mut iter = self.syntax().children_with_tokens().skip_while(|it| it.kind() != T!['{']); + iter.next(); // Eat the curly + let mut inner = iter.take_while(|it| it.kind() != T!['}']); + if !inner.clone().all(|it| it.kind() == WHITESPACE) { + return self.clone(); + } + let start = match inner.next() { + Some(s) => s, + None => return self.clone(), + }; + let end = match inner.last() { + Some(s) => s, + None => start.clone(), + }; + self.replace_children(start..=end, &mut iter::empty()) + } + + #[must_use] + pub fn remove_placeholder(&self) -> ast::MatchArmList { + let placeholder = + self.arms().find(|arm| matches!(arm.pat(), Some(ast::Pat::WildcardPat(_)))); + if let Some(placeholder) = placeholder { + self.remove_arm(&placeholder) + } else { + self.clone() + } + } + + #[must_use] + fn remove_arm(&self, arm: &ast::MatchArm) -> ast::MatchArmList { + let start = arm.syntax().clone(); + let end = if let Some(comma) = start + .siblings_with_tokens(Direction::Next) + .skip(1) + .skip_while(|it| it.kind().is_trivia()) + .next() + .filter(|it| it.kind() == T![,]) + { + comma + } else { + start.clone().into() + }; + self.replace_children(start.into()..=end, None) + } + + #[must_use] + pub fn append_arm(&self, item: ast::MatchArm) -> ast::MatchArmList { + let r_curly = match self.syntax().children_with_tokens().find(|it| it.kind() == T!['}']) { + Some(t) => t, + None => return self.clone(), + }; + let position = InsertPosition::Before(r_curly.into()); + let arm_ws = tokens::WsBuilder::new(" "); + let match_indent = &leading_indent(self.syntax()).unwrap_or_default(); + let match_ws = tokens::WsBuilder::new(&format!("\n{}", match_indent)); + let to_insert: ArrayVec<[SyntaxElement; 3]> = + [arm_ws.ws().into(), item.syntax().clone().into(), match_ws.ws().into()].into(); + self.insert_children(position, to_insert) + } +} + +#[must_use] +pub fn remove_attrs_and_docs(node: &N) -> N { + N::cast(remove_attrs_and_docs_inner(node.syntax().clone())).unwrap() +} + +fn remove_attrs_and_docs_inner(mut node: SyntaxNode) -> SyntaxNode { + while let Some(start) = + node.children_with_tokens().find(|it| it.kind() == ATTR || it.kind() == COMMENT) + { + let end = match &start.next_sibling_or_token() { + Some(el) if el.kind() == WHITESPACE => el.clone(), + Some(_) | None => start.clone(), + }; + node = algo::replace_children(&node, start..=end, &mut iter::empty()); + } + node +} + +#[derive(Debug, Clone, Copy)] +pub struct IndentLevel(pub u8); + +impl From for IndentLevel { + fn from(level: u8) -> IndentLevel { + IndentLevel(level) + } +} + +impl fmt::Display for IndentLevel { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let spaces = " "; + let buf; + let len = self.0 as usize * 4; + let indent = if len <= spaces.len() { + &spaces[..len] + } else { + buf = iter::repeat(' ').take(len).collect::(); + &buf + }; + fmt::Display::fmt(indent, f) + } +} + +impl ops::Add for IndentLevel { + type Output = IndentLevel; + fn add(self, rhs: u8) -> IndentLevel { + IndentLevel(self.0 + rhs) + } +} + +impl IndentLevel { + pub fn from_node(node: &SyntaxNode) -> IndentLevel { + let first_token = match node.first_token() { + Some(it) => it, + None => return IndentLevel(0), + }; + for ws in prev_tokens(first_token).filter_map(ast::Whitespace::cast) { + let text = ws.syntax().text(); + if let Some(pos) = text.rfind('\n') { + let level = text[pos + 1..].chars().count() / 4; + return IndentLevel(level as u8); + } + } + IndentLevel(0) + } + + /// XXX: this intentionally doesn't change the indent of the very first token. + /// Ie, in something like + /// ``` + /// fn foo() { + /// 92 + /// } + /// ``` + /// if you indent the block, the `{` token would stay put. + fn increase_indent(self, node: SyntaxNode) -> SyntaxNode { + let mut rewriter = SyntaxRewriter::default(); + node.descendants_with_tokens() + .filter_map(|el| el.into_token()) + .filter_map(ast::Whitespace::cast) + .filter(|ws| { + let text = ws.syntax().text(); + text.contains('\n') + }) + .for_each(|ws| { + let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self,)); + rewriter.replace(ws.syntax(), &new_ws) + }); + rewriter.rewrite(&node) + } + + fn decrease_indent(self, node: SyntaxNode) -> SyntaxNode { + let mut rewriter = SyntaxRewriter::default(); + node.descendants_with_tokens() + .filter_map(|el| el.into_token()) + .filter_map(ast::Whitespace::cast) + .filter(|ws| { + let text = ws.syntax().text(); + text.contains('\n') + }) + .for_each(|ws| { + let new_ws = make::tokens::whitespace( + &ws.syntax().text().replace(&format!("\n{}", self), "\n"), + ); + rewriter.replace(ws.syntax(), &new_ws) + }); + rewriter.rewrite(&node) + } +} + +// FIXME: replace usages with IndentLevel above +fn leading_indent(node: &SyntaxNode) -> Option { + for token in prev_tokens(node.first_token()?) { + if let Some(ws) = ast::Whitespace::cast(token.clone()) { + let ws_text = ws.text(); + if let Some(pos) = ws_text.rfind('\n') { + return Some(ws_text[pos + 1..].into()); + } + } + if token.text().contains('\n') { + break; + } + } + None +} + +fn prev_tokens(token: SyntaxToken) -> impl Iterator { + iter::successors(Some(token), |token| token.prev_token()) +} + +pub trait AstNodeEdit: AstNode + Clone + Sized { + #[must_use] + fn insert_children( + &self, + position: InsertPosition, + to_insert: impl IntoIterator, + ) -> Self { + let new_syntax = algo::insert_children(self.syntax(), position, to_insert); + Self::cast(new_syntax).unwrap() + } + + #[must_use] + fn replace_children( + &self, + to_replace: RangeInclusive, + to_insert: impl IntoIterator, + ) -> Self { + let new_syntax = algo::replace_children(self.syntax(), to_replace, to_insert); + Self::cast(new_syntax).unwrap() + } + + #[must_use] + fn replace_descendant(&self, old: D, new: D) -> Self { + self.replace_descendants(iter::once((old, new))) + } + + #[must_use] + fn replace_descendants( + &self, + replacement_map: impl IntoIterator, + ) -> Self { + let mut rewriter = SyntaxRewriter::default(); + for (from, to) in replacement_map { + rewriter.replace(from.syntax(), to.syntax()) + } + rewriter.rewrite_ast(self) + } + fn indent_level(&self) -> IndentLevel { + IndentLevel::from_node(self.syntax()) + } + #[must_use] + fn indent(&self, level: IndentLevel) -> Self { + Self::cast(level.increase_indent(self.syntax().clone())).unwrap() + } + #[must_use] + fn dedent(&self, level: IndentLevel) -> Self { + Self::cast(level.decrease_indent(self.syntax().clone())).unwrap() + } + #[must_use] + fn reset_indent(&self) -> Self { + let level = IndentLevel::from_node(self.syntax()); + self.dedent(level) + } +} + +impl AstNodeEdit for N {} + +fn single_node(element: impl Into) -> RangeInclusive { + let element = element.into(); + element.clone()..=element +} + +#[test] +fn test_increase_indent() { + let arm_list = { + let arm = make::match_arm(iter::once(make::wildcard_pat().into()), make::expr_unit()); + make::match_arm_list(vec![arm.clone(), arm]) + }; + assert_eq!( + arm_list.syntax().to_string(), + "{ + _ => (), + _ => (), +}" + ); + let indented = arm_list.indent(IndentLevel(2)); + assert_eq!( + indented.syntax().to_string(), + "{ + _ => (), + _ => (), + }" + ); +} diff --git a/crates/ra_syntax/src/ast/expr_ext.rs b/crates/syntax/src/ast/expr_ext.rs similarity index 100% rename from crates/ra_syntax/src/ast/expr_ext.rs rename to crates/syntax/src/ast/expr_ext.rs diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/syntax/src/ast/generated.rs similarity index 100% rename from crates/ra_syntax/src/ast/generated.rs rename to crates/syntax/src/ast/generated.rs diff --git a/crates/ra_syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs similarity index 100% rename from crates/ra_syntax/src/ast/generated/nodes.rs rename to crates/syntax/src/ast/generated/nodes.rs diff --git a/crates/ra_syntax/src/ast/generated/tokens.rs b/crates/syntax/src/ast/generated/tokens.rs similarity index 100% rename from crates/ra_syntax/src/ast/generated/tokens.rs rename to crates/syntax/src/ast/generated/tokens.rs diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs new file mode 100644 index 0000000000..d20c085aa0 --- /dev/null +++ b/crates/syntax/src/ast/make.rs @@ -0,0 +1,402 @@ +//! This module contains free-standing functions for creating AST fragments out +//! of smaller pieces. +//! +//! Note that all functions here intended to be stupid constructors, which just +//! assemble a finish node from immediate children. If you want to do something +//! smarter than that, it probably doesn't belong in this module. +use itertools::Itertools; +use stdx::format_to; + +use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, SyntaxToken}; + +pub fn name(text: &str) -> ast::Name { + ast_from_text(&format!("mod {};", text)) +} + +pub fn name_ref(text: &str) -> ast::NameRef { + ast_from_text(&format!("fn f() {{ {}; }}", text)) +} + +pub fn ty(text: &str) -> ast::Type { + ast_from_text(&format!("impl {} for D {{}};", text)) +} + +pub fn path_segment(name_ref: ast::NameRef) -> ast::PathSegment { + ast_from_text(&format!("use {};", name_ref)) +} +pub fn path_segment_self() -> ast::PathSegment { + ast_from_text("use self;") +} +pub fn path_unqualified(segment: ast::PathSegment) -> ast::Path { + path_from_text(&format!("use {}", segment)) +} +pub fn path_qualified(qual: ast::Path, segment: ast::PathSegment) -> ast::Path { + path_from_text(&format!("{}::{}", qual, segment)) +} +pub fn path_from_text(text: &str) -> ast::Path { + ast_from_text(text) +} + +pub fn use_tree( + path: ast::Path, + use_tree_list: Option, + alias: Option, + add_star: bool, +) -> ast::UseTree { + let mut buf = "use ".to_string(); + buf += &path.syntax().to_string(); + if let Some(use_tree_list) = use_tree_list { + format_to!(buf, "::{}", use_tree_list); + } + if add_star { + buf += "::*"; + } + + if let Some(alias) = alias { + format_to!(buf, " {}", alias); + } + ast_from_text(&buf) +} + +pub fn use_tree_list(use_trees: impl IntoIterator) -> ast::UseTreeList { + let use_trees = use_trees.into_iter().map(|it| it.syntax().clone()).join(", "); + ast_from_text(&format!("use {{{}}};", use_trees)) +} + +pub fn use_(use_tree: ast::UseTree) -> ast::Use { + ast_from_text(&format!("use {};", use_tree)) +} + +pub fn record_expr_field(name: ast::NameRef, expr: Option) -> ast::RecordExprField { + return match expr { + Some(expr) => from_text(&format!("{}: {}", name, expr)), + None => from_text(&name.to_string()), + }; + + fn from_text(text: &str) -> ast::RecordExprField { + ast_from_text(&format!("fn f() {{ S {{ {}, }} }}", text)) + } +} + +pub fn record_field(name: ast::NameRef, ty: ast::Type) -> ast::RecordField { + ast_from_text(&format!("struct S {{ {}: {}, }}", name, ty)) +} + +pub fn block_expr( + stmts: impl IntoIterator, + tail_expr: Option, +) -> ast::BlockExpr { + let mut buf = "{\n".to_string(); + for stmt in stmts.into_iter() { + format_to!(buf, " {}\n", stmt); + } + if let Some(tail_expr) = tail_expr { + format_to!(buf, " {}\n", tail_expr) + } + buf += "}"; + ast_from_text(&format!("fn f() {}", buf)) +} + +pub fn expr_unit() -> ast::Expr { + expr_from_text("()") +} +pub fn expr_empty_block() -> ast::Expr { + expr_from_text("{}") +} +pub fn expr_unimplemented() -> ast::Expr { + expr_from_text("unimplemented!()") +} +pub fn expr_unreachable() -> ast::Expr { + expr_from_text("unreachable!()") +} +pub fn expr_todo() -> ast::Expr { + expr_from_text("todo!()") +} +pub fn expr_path(path: ast::Path) -> ast::Expr { + expr_from_text(&path.to_string()) +} +pub fn expr_continue() -> ast::Expr { + expr_from_text("continue") +} +pub fn expr_break() -> ast::Expr { + expr_from_text("break") +} +pub fn expr_return() -> ast::Expr { + expr_from_text("return") +} +pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Expr { + expr_from_text(&format!("match {} {}", expr, match_arm_list)) +} +pub fn expr_if(condition: ast::Condition, then_branch: ast::BlockExpr) -> ast::Expr { + expr_from_text(&format!("if {} {}", condition, then_branch)) +} +pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::Expr { + let token = token(op); + expr_from_text(&format!("{}{}", token, expr)) +} +pub fn expr_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr { + expr_from_text(&format!("{}{}", f, arg_list)) +} +fn expr_from_text(text: &str) -> ast::Expr { + ast_from_text(&format!("const C: () = {};", text)) +} + +pub fn try_expr_from_text(text: &str) -> Option { + try_ast_from_text(&format!("const C: () = {};", text)) +} + +pub fn condition(expr: ast::Expr, pattern: Option) -> ast::Condition { + match pattern { + None => ast_from_text(&format!("const _: () = while {} {{}};", expr)), + Some(pattern) => { + ast_from_text(&format!("const _: () = while let {} = {} {{}};", pattern, expr)) + } + } +} + +pub fn arg_list(args: impl IntoIterator) -> ast::ArgList { + ast_from_text(&format!("fn main() {{ ()({}) }}", args.into_iter().format(", "))) +} + +pub fn ident_pat(name: ast::Name) -> ast::IdentPat { + return from_text(name.text()); + + fn from_text(text: &str) -> ast::IdentPat { + ast_from_text(&format!("fn f({}: ())", text)) + } +} + +pub fn wildcard_pat() -> ast::WildcardPat { + return from_text("_"); + + fn from_text(text: &str) -> ast::WildcardPat { + ast_from_text(&format!("fn f({}: ())", text)) + } +} + +/// Creates a tuple of patterns from an interator of patterns. +/// +/// Invariant: `pats` must be length > 1 +/// +/// FIXME handle `pats` length == 1 +pub fn tuple_pat(pats: impl IntoIterator) -> ast::TuplePat { + let pats_str = pats.into_iter().map(|p| p.to_string()).join(", "); + return from_text(&format!("({})", pats_str)); + + fn from_text(text: &str) -> ast::TuplePat { + ast_from_text(&format!("fn f({}: ())", text)) + } +} + +pub fn tuple_struct_pat( + path: ast::Path, + pats: impl IntoIterator, +) -> ast::TupleStructPat { + let pats_str = pats.into_iter().join(", "); + return from_text(&format!("{}({})", path, pats_str)); + + fn from_text(text: &str) -> ast::TupleStructPat { + ast_from_text(&format!("fn f({}: ())", text)) + } +} + +pub fn record_pat(path: ast::Path, pats: impl IntoIterator) -> ast::RecordPat { + let pats_str = pats.into_iter().join(", "); + return from_text(&format!("{} {{ {} }}", path, pats_str)); + + fn from_text(text: &str) -> ast::RecordPat { + ast_from_text(&format!("fn f({}: ())", text)) + } +} + +/// Returns a `BindPat` if the path has just one segment, a `PathPat` otherwise. +pub fn path_pat(path: ast::Path) -> ast::Pat { + return from_text(&path.to_string()); + fn from_text(text: &str) -> ast::Pat { + ast_from_text(&format!("fn f({}: ())", text)) + } +} + +pub fn match_arm(pats: impl IntoIterator, expr: ast::Expr) -> ast::MatchArm { + let pats_str = pats.into_iter().join(" | "); + return from_text(&format!("{} => {}", pats_str, expr)); + + fn from_text(text: &str) -> ast::MatchArm { + ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text)) + } +} + +pub fn match_arm_list(arms: impl IntoIterator) -> ast::MatchArmList { + let arms_str = arms + .into_iter() + .map(|arm| { + let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like()); + let comma = if needs_comma { "," } else { "" }; + format!(" {}{}\n", arm.syntax(), comma) + }) + .collect::(); + return from_text(&arms_str); + + fn from_text(text: &str) -> ast::MatchArmList { + ast_from_text(&format!("fn f() {{ match () {{\n{}}} }}", text)) + } +} + +pub fn where_pred( + path: ast::Path, + bounds: impl IntoIterator, +) -> ast::WherePred { + let bounds = bounds.into_iter().join(" + "); + return from_text(&format!("{}: {}", path, bounds)); + + fn from_text(text: &str) -> ast::WherePred { + ast_from_text(&format!("fn f() where {} {{ }}", text)) + } +} + +pub fn where_clause(preds: impl IntoIterator) -> ast::WhereClause { + let preds = preds.into_iter().join(", "); + return from_text(preds.as_str()); + + fn from_text(text: &str) -> ast::WhereClause { + ast_from_text(&format!("fn f() where {} {{ }}", text)) + } +} + +pub fn let_stmt(pattern: ast::Pat, initializer: Option) -> ast::LetStmt { + let text = match initializer { + Some(it) => format!("let {} = {};", pattern, it), + None => format!("let {};", pattern), + }; + ast_from_text(&format!("fn f() {{ {} }}", text)) +} +pub fn expr_stmt(expr: ast::Expr) -> ast::ExprStmt { + let semi = if expr.is_block_like() { "" } else { ";" }; + ast_from_text(&format!("fn f() {{ {}{} (); }}", expr, semi)) +} + +pub fn token(kind: SyntaxKind) -> SyntaxToken { + tokens::SOURCE_FILE + .tree() + .syntax() + .descendants_with_tokens() + .filter_map(|it| it.into_token()) + .find(|it| it.kind() == kind) + .unwrap_or_else(|| panic!("unhandled token: {:?}", kind)) +} + +pub fn param(name: String, ty: String) -> ast::Param { + ast_from_text(&format!("fn f({}: {}) {{ }}", name, ty)) +} + +pub fn param_list(pats: impl IntoIterator) -> ast::ParamList { + let args = pats.into_iter().join(", "); + ast_from_text(&format!("fn f({}) {{ }}", args)) +} + +pub fn visibility_pub_crate() -> ast::Visibility { + ast_from_text("pub(crate) struct S") +} + +pub fn fn_( + visibility: Option, + fn_name: ast::Name, + type_params: Option, + params: ast::ParamList, + body: ast::BlockExpr, +) -> ast::Fn { + let type_params = + if let Some(type_params) = type_params { format!("<{}>", type_params) } else { "".into() }; + let visibility = match visibility { + None => String::new(), + Some(it) => format!("{} ", it), + }; + ast_from_text(&format!("{}fn {}{}{} {}", visibility, fn_name, type_params, params, body)) +} + +fn ast_from_text(text: &str) -> N { + let parse = SourceFile::parse(text); + let node = match parse.tree().syntax().descendants().find_map(N::cast) { + Some(it) => it, + None => { + panic!("Failed to make ast node `{}` from text {}", std::any::type_name::(), text) + } + }; + let node = node.syntax().clone(); + let node = unroot(node); + let node = N::cast(node).unwrap(); + assert_eq!(node.syntax().text_range().start(), 0.into()); + node +} + +fn try_ast_from_text(text: &str) -> Option { + let parse = SourceFile::parse(text); + let node = parse.tree().syntax().descendants().find_map(N::cast)?; + let node = node.syntax().clone(); + let node = unroot(node); + let node = N::cast(node).unwrap(); + assert_eq!(node.syntax().text_range().start(), 0.into()); + Some(node) +} + +fn unroot(n: SyntaxNode) -> SyntaxNode { + SyntaxNode::new_root(n.green().clone()) +} + +pub mod tokens { + use once_cell::sync::Lazy; + + use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken}; + + pub(super) static SOURCE_FILE: Lazy> = + Lazy::new(|| SourceFile::parse("const C: <()>::Item = (1 != 1, 2 == 2, !true)\n;")); + + pub fn single_space() -> SyntaxToken { + SOURCE_FILE + .tree() + .syntax() + .descendants_with_tokens() + .filter_map(|it| it.into_token()) + .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") + .unwrap() + } + + pub fn whitespace(text: &str) -> SyntaxToken { + assert!(text.trim().is_empty()); + let sf = SourceFile::parse(text).ok().unwrap(); + sf.syntax().first_child_or_token().unwrap().into_token().unwrap() + } + + pub fn doc_comment(text: &str) -> SyntaxToken { + assert!(!text.trim().is_empty()); + let sf = SourceFile::parse(text).ok().unwrap(); + sf.syntax().first_child_or_token().unwrap().into_token().unwrap() + } + + pub fn literal(text: &str) -> SyntaxToken { + assert_eq!(text.trim(), text); + let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {}; }}", text)); + lit.syntax().first_child_or_token().unwrap().into_token().unwrap() + } + + pub fn single_newline() -> SyntaxToken { + SOURCE_FILE + .tree() + .syntax() + .descendants_with_tokens() + .filter_map(|it| it.into_token()) + .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") + .unwrap() + } + + pub struct WsBuilder(SourceFile); + + impl WsBuilder { + pub fn new(text: &str) -> WsBuilder { + WsBuilder(SourceFile::parse(text).ok().unwrap()) + } + pub fn ws(&self) -> SyntaxToken { + self.0.syntax().first_child_or_token().unwrap().into_token().unwrap() + } + } +} diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs new file mode 100644 index 0000000000..50c1c157d8 --- /dev/null +++ b/crates/syntax/src/ast/node_ext.rs @@ -0,0 +1,485 @@ +//! Various extension methods to ast Nodes, which are hard to code-generate. +//! Extensions for various expressions live in a sibling `expr_extensions` module. + +use std::fmt; + +use itertools::Itertools; +use parser::SyntaxKind; + +use crate::{ + ast::{self, support, AstNode, NameOwner, SyntaxNode}, + SmolStr, SyntaxElement, SyntaxToken, T, +}; + +impl ast::Name { + pub fn text(&self) -> &SmolStr { + text_of_first_token(self.syntax()) + } +} + +impl ast::NameRef { + pub fn text(&self) -> &SmolStr { + text_of_first_token(self.syntax()) + } + + pub fn as_tuple_field(&self) -> Option { + self.text().parse().ok() + } +} + +fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { + node.green().children().next().and_then(|it| it.into_token()).unwrap().text() +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum AttrKind { + Inner, + Outer, +} + +impl ast::Attr { + pub fn as_simple_atom(&self) -> Option { + if self.eq_token().is_some() || self.token_tree().is_some() { + return None; + } + self.simple_name() + } + + pub fn as_simple_call(&self) -> Option<(SmolStr, ast::TokenTree)> { + let tt = self.token_tree()?; + Some((self.simple_name()?, tt)) + } + + pub fn as_simple_key_value(&self) -> Option<(SmolStr, SmolStr)> { + let lit = self.literal()?; + let key = self.simple_name()?; + // FIXME: escape? raw string? + let value = lit.syntax().first_token()?.text().trim_matches('"').into(); + Some((key, value)) + } + + pub fn simple_name(&self) -> Option { + let path = self.path()?; + match (path.segment(), path.qualifier()) { + (Some(segment), None) => Some(segment.syntax().first_token()?.text().clone()), + _ => None, + } + } + + pub fn kind(&self) -> AttrKind { + let first_token = self.syntax().first_token(); + let first_token_kind = first_token.as_ref().map(SyntaxToken::kind); + let second_token_kind = + first_token.and_then(|token| token.next_token()).as_ref().map(SyntaxToken::kind); + + match (first_token_kind, second_token_kind) { + (Some(SyntaxKind::POUND), Some(T![!])) => AttrKind::Inner, + _ => AttrKind::Outer, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum PathSegmentKind { + Name(ast::NameRef), + Type { type_ref: Option, trait_ref: Option }, + SelfKw, + SuperKw, + CrateKw, +} + +impl ast::PathSegment { + pub fn parent_path(&self) -> ast::Path { + self.syntax() + .parent() + .and_then(ast::Path::cast) + .expect("segments are always nested in paths") + } + + pub fn kind(&self) -> Option { + let res = if let Some(name_ref) = self.name_ref() { + PathSegmentKind::Name(name_ref) + } else { + match self.syntax().first_child_or_token()?.kind() { + T![self] => PathSegmentKind::SelfKw, + T![super] => PathSegmentKind::SuperKw, + T![crate] => PathSegmentKind::CrateKw, + T![<] => { + // or + // T is any TypeRef, Trait has to be a PathType + let mut type_refs = + self.syntax().children().filter(|node| ast::Type::can_cast(node.kind())); + let type_ref = type_refs.next().and_then(ast::Type::cast); + let trait_ref = type_refs.next().and_then(ast::PathType::cast); + PathSegmentKind::Type { type_ref, trait_ref } + } + _ => return None, + } + }; + Some(res) + } +} + +impl ast::Path { + pub fn parent_path(&self) -> Option { + self.syntax().parent().and_then(ast::Path::cast) + } +} + +impl ast::UseTreeList { + pub fn parent_use_tree(&self) -> ast::UseTree { + self.syntax() + .parent() + .and_then(ast::UseTree::cast) + .expect("UseTreeLists are always nested in UseTrees") + } +} + +impl ast::Impl { + pub fn self_ty(&self) -> Option { + match self.target() { + (Some(t), None) | (_, Some(t)) => Some(t), + _ => None, + } + } + + pub fn trait_(&self) -> Option { + match self.target() { + (Some(t), Some(_)) => Some(t), + _ => None, + } + } + + fn target(&self) -> (Option, Option) { + let mut types = support::children(self.syntax()); + let first = types.next(); + let second = types.next(); + (first, second) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum StructKind { + Record(ast::RecordFieldList), + Tuple(ast::TupleFieldList), + Unit, +} + +impl StructKind { + fn from_node(node: &N) -> StructKind { + if let Some(nfdl) = support::child::(node.syntax()) { + StructKind::Record(nfdl) + } else if let Some(pfl) = support::child::(node.syntax()) { + StructKind::Tuple(pfl) + } else { + StructKind::Unit + } + } +} + +impl ast::Struct { + pub fn kind(&self) -> StructKind { + StructKind::from_node(self) + } +} + +impl ast::RecordExprField { + pub fn for_field_name(field_name: &ast::NameRef) -> Option { + let candidate = + field_name.syntax().parent().and_then(ast::RecordExprField::cast).or_else(|| { + field_name.syntax().ancestors().nth(4).and_then(ast::RecordExprField::cast) + })?; + if candidate.field_name().as_ref() == Some(field_name) { + Some(candidate) + } else { + None + } + } + + /// Deals with field init shorthand + pub fn field_name(&self) -> Option { + if let Some(name_ref) = self.name_ref() { + return Some(name_ref); + } + if let Some(ast::Expr::PathExpr(expr)) = self.expr() { + let path = expr.path()?; + let segment = path.segment()?; + let name_ref = segment.name_ref()?; + if path.qualifier().is_none() { + return Some(name_ref); + } + } + None + } +} + +pub enum NameOrNameRef { + Name(ast::Name), + NameRef(ast::NameRef), +} + +impl fmt::Display for NameOrNameRef { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + NameOrNameRef::Name(it) => fmt::Display::fmt(it, f), + NameOrNameRef::NameRef(it) => fmt::Display::fmt(it, f), + } + } +} + +impl ast::RecordPatField { + /// Deals with field init shorthand + pub fn field_name(&self) -> Option { + if let Some(name_ref) = self.name_ref() { + return Some(NameOrNameRef::NameRef(name_ref)); + } + if let Some(ast::Pat::IdentPat(pat)) = self.pat() { + let name = pat.name()?; + return Some(NameOrNameRef::Name(name)); + } + None + } +} + +impl ast::Variant { + pub fn parent_enum(&self) -> ast::Enum { + self.syntax() + .parent() + .and_then(|it| it.parent()) + .and_then(ast::Enum::cast) + .expect("EnumVariants are always nested in Enums") + } + pub fn kind(&self) -> StructKind { + StructKind::from_node(self) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum FieldKind { + Name(ast::NameRef), + Index(SyntaxToken), +} + +impl ast::FieldExpr { + pub fn index_token(&self) -> Option { + self.syntax + .children_with_tokens() + // FIXME: Accepting floats here to reject them in validation later + .find(|c| c.kind() == SyntaxKind::INT_NUMBER || c.kind() == SyntaxKind::FLOAT_NUMBER) + .as_ref() + .and_then(SyntaxElement::as_token) + .cloned() + } + + pub fn field_access(&self) -> Option { + if let Some(nr) = self.name_ref() { + Some(FieldKind::Name(nr)) + } else if let Some(tok) = self.index_token() { + Some(FieldKind::Index(tok)) + } else { + None + } + } +} + +pub struct SlicePatComponents { + pub prefix: Vec, + pub slice: Option, + pub suffix: Vec, +} + +impl ast::SlicePat { + pub fn components(&self) -> SlicePatComponents { + let mut args = self.pats().peekable(); + let prefix = args + .peeking_take_while(|p| match p { + ast::Pat::RestPat(_) => false, + ast::Pat::IdentPat(bp) => match bp.pat() { + Some(ast::Pat::RestPat(_)) => false, + _ => true, + }, + ast::Pat::RefPat(rp) => match rp.pat() { + Some(ast::Pat::RestPat(_)) => false, + Some(ast::Pat::IdentPat(bp)) => match bp.pat() { + Some(ast::Pat::RestPat(_)) => false, + _ => true, + }, + _ => true, + }, + _ => true, + }) + .collect(); + let slice = args.next(); + let suffix = args.collect(); + + SlicePatComponents { prefix, slice, suffix } + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum SelfParamKind { + /// self + Owned, + /// &self + Ref, + /// &mut self + MutRef, +} + +impl ast::SelfParam { + pub fn kind(&self) -> SelfParamKind { + if self.amp_token().is_some() { + if self.mut_token().is_some() { + SelfParamKind::MutRef + } else { + SelfParamKind::Ref + } + } else { + SelfParamKind::Owned + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub enum TypeBoundKind { + /// Trait + PathType(ast::PathType), + /// for<'a> ... + ForType(ast::ForType), + /// 'a + Lifetime(SyntaxToken), +} + +impl ast::TypeBound { + pub fn kind(&self) -> TypeBoundKind { + if let Some(path_type) = support::children(self.syntax()).next() { + TypeBoundKind::PathType(path_type) + } else if let Some(for_type) = support::children(self.syntax()).next() { + TypeBoundKind::ForType(for_type) + } else if let Some(lifetime) = self.lifetime_token() { + TypeBoundKind::Lifetime(lifetime) + } else { + unreachable!() + } + } +} + +pub enum VisibilityKind { + In(ast::Path), + PubCrate, + PubSuper, + PubSelf, + Pub, +} + +impl ast::Visibility { + pub fn kind(&self) -> VisibilityKind { + if let Some(path) = support::children(self.syntax()).next() { + VisibilityKind::In(path) + } else if self.crate_token().is_some() { + VisibilityKind::PubCrate + } else if self.super_token().is_some() { + VisibilityKind::PubSuper + } else if self.self_token().is_some() { + VisibilityKind::PubSelf + } else { + VisibilityKind::Pub + } + } +} + +impl ast::MacroCall { + pub fn is_macro_rules(&self) -> Option { + let name_ref = self.path()?.segment()?.name_ref()?; + if name_ref.text() == "macro_rules" { + self.name() + } else { + None + } + } + + pub fn is_bang(&self) -> bool { + self.is_macro_rules().is_none() + } +} + +impl ast::LifetimeParam { + pub fn lifetime_bounds(&self) -> impl Iterator { + self.syntax() + .children_with_tokens() + .filter_map(|it| it.into_token()) + .skip_while(|x| x.kind() != T![:]) + .filter(|it| it.kind() == T![lifetime]) + } +} + +impl ast::RangePat { + pub fn start(&self) -> Option { + self.syntax() + .children_with_tokens() + .take_while(|it| !(it.kind() == T![..] || it.kind() == T![..=])) + .filter_map(|it| it.into_node()) + .find_map(ast::Pat::cast) + } + + pub fn end(&self) -> Option { + self.syntax() + .children_with_tokens() + .skip_while(|it| !(it.kind() == T![..] || it.kind() == T![..=])) + .filter_map(|it| it.into_node()) + .find_map(ast::Pat::cast) + } +} + +impl ast::TokenTree { + pub fn left_delimiter_token(&self) -> Option { + self.syntax() + .first_child_or_token()? + .into_token() + .filter(|it| matches!(it.kind(), T!['{'] | T!['('] | T!['['])) + } + + pub fn right_delimiter_token(&self) -> Option { + self.syntax() + .last_child_or_token()? + .into_token() + .filter(|it| matches!(it.kind(), T!['}'] | T![')'] | T![']'])) + } +} + +impl ast::GenericParamList { + pub fn lifetime_params(&self) -> impl Iterator { + self.generic_params().filter_map(|param| match param { + ast::GenericParam::LifetimeParam(it) => Some(it), + ast::GenericParam::TypeParam(_) | ast::GenericParam::ConstParam(_) => None, + }) + } + pub fn type_params(&self) -> impl Iterator { + self.generic_params().filter_map(|param| match param { + ast::GenericParam::TypeParam(it) => Some(it), + ast::GenericParam::LifetimeParam(_) | ast::GenericParam::ConstParam(_) => None, + }) + } + pub fn const_params(&self) -> impl Iterator { + self.generic_params().filter_map(|param| match param { + ast::GenericParam::ConstParam(it) => Some(it), + ast::GenericParam::TypeParam(_) | ast::GenericParam::LifetimeParam(_) => None, + }) + } +} + +impl ast::DocCommentsOwner for ast::SourceFile {} +impl ast::DocCommentsOwner for ast::Fn {} +impl ast::DocCommentsOwner for ast::Struct {} +impl ast::DocCommentsOwner for ast::Union {} +impl ast::DocCommentsOwner for ast::RecordField {} +impl ast::DocCommentsOwner for ast::TupleField {} +impl ast::DocCommentsOwner for ast::Enum {} +impl ast::DocCommentsOwner for ast::Variant {} +impl ast::DocCommentsOwner for ast::Trait {} +impl ast::DocCommentsOwner for ast::Module {} +impl ast::DocCommentsOwner for ast::Static {} +impl ast::DocCommentsOwner for ast::Const {} +impl ast::DocCommentsOwner for ast::TypeAlias {} +impl ast::DocCommentsOwner for ast::Impl {} +impl ast::DocCommentsOwner for ast::MacroCall {} diff --git a/crates/ra_syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs similarity index 100% rename from crates/ra_syntax/src/ast/token_ext.rs rename to crates/syntax/src/ast/token_ext.rs diff --git a/crates/syntax/src/ast/traits.rs b/crates/syntax/src/ast/traits.rs new file mode 100644 index 0000000000..0bdc22d953 --- /dev/null +++ b/crates/syntax/src/ast/traits.rs @@ -0,0 +1,141 @@ +//! Various traits that are implemented by ast nodes. +//! +//! The implementations are usually trivial, and live in generated.rs +use itertools::Itertools; + +use crate::{ + ast::{self, support, AstChildren, AstNode, AstToken}, + syntax_node::SyntaxElementChildren, + SyntaxToken, T, +}; + +pub trait NameOwner: AstNode { + fn name(&self) -> Option { + support::child(self.syntax()) + } +} + +pub trait VisibilityOwner: AstNode { + fn visibility(&self) -> Option { + support::child(self.syntax()) + } +} + +pub trait LoopBodyOwner: AstNode { + fn loop_body(&self) -> Option { + support::child(self.syntax()) + } + + fn label(&self) -> Option { + support::child(self.syntax()) + } +} + +pub trait ArgListOwner: AstNode { + fn arg_list(&self) -> Option { + support::child(self.syntax()) + } +} + +pub trait ModuleItemOwner: AstNode { + fn items(&self) -> AstChildren { + support::children(self.syntax()) + } +} + +pub trait GenericParamsOwner: AstNode { + fn generic_param_list(&self) -> Option { + support::child(self.syntax()) + } + + fn where_clause(&self) -> Option { + support::child(self.syntax()) + } +} + +pub trait TypeBoundsOwner: AstNode { + fn type_bound_list(&self) -> Option { + support::child(self.syntax()) + } + + fn colon_token(&self) -> Option { + support::token(self.syntax(), T![:]) + } +} + +pub trait AttrsOwner: AstNode { + fn attrs(&self) -> AstChildren { + support::children(self.syntax()) + } + fn has_atom_attr(&self, atom: &str) -> bool { + self.attrs().filter_map(|x| x.as_simple_atom()).any(|x| x == atom) + } +} + +pub trait DocCommentsOwner: AstNode { + fn doc_comments(&self) -> CommentIter { + CommentIter { iter: self.syntax().children_with_tokens() } + } + + fn doc_comment_text(&self) -> Option { + self.doc_comments().doc_comment_text() + } +} + +impl CommentIter { + pub fn from_syntax_node(syntax_node: &ast::SyntaxNode) -> CommentIter { + CommentIter { iter: syntax_node.children_with_tokens() } + } + + /// Returns the textual content of a doc comment block as a single string. + /// That is, strips leading `///` (+ optional 1 character of whitespace), + /// trailing `*/`, trailing whitespace and then joins the lines. + pub fn doc_comment_text(self) -> Option { + let mut has_comments = false; + let docs = self + .filter(|comment| comment.kind().doc.is_some()) + .map(|comment| { + has_comments = true; + let prefix_len = comment.prefix().len(); + + let line: &str = comment.text().as_str(); + + // Determine if the prefix or prefix + 1 char is stripped + let pos = + if let Some(ws) = line.chars().nth(prefix_len).filter(|c| c.is_whitespace()) { + prefix_len + ws.len_utf8() + } else { + prefix_len + }; + + let end = if comment.kind().shape.is_block() && line.ends_with("*/") { + line.len() - 2 + } else { + line.len() + }; + + // Note that we do not trim the end of the line here + // since whitespace can have special meaning at the end + // of a line in markdown. + line[pos..end].to_owned() + }) + .join("\n"); + + if has_comments { + Some(docs) + } else { + None + } + } +} + +pub struct CommentIter { + iter: SyntaxElementChildren, +} + +impl Iterator for CommentIter { + type Item = ast::Comment; + fn next(&mut self) -> Option { + self.iter.by_ref().find_map(|el| el.into_token().and_then(ast::Comment::cast)) + } +} diff --git a/crates/syntax/src/fuzz.rs b/crates/syntax/src/fuzz.rs new file mode 100644 index 0000000000..fbb97aa273 --- /dev/null +++ b/crates/syntax/src/fuzz.rs @@ -0,0 +1,73 @@ +//! FIXME: write short doc here + +use std::{ + convert::TryInto, + str::{self, FromStr}, +}; + +use text_edit::Indel; + +use crate::{validation, AstNode, SourceFile, TextRange}; + +fn check_file_invariants(file: &SourceFile) { + let root = file.syntax(); + validation::validate_block_structure(root); +} + +pub fn check_parser(text: &str) { + let file = SourceFile::parse(text); + check_file_invariants(&file.tree()); +} + +#[derive(Debug, Clone)] +pub struct CheckReparse { + text: String, + edit: Indel, + edited_text: String, +} + +impl CheckReparse { + pub fn from_data(data: &[u8]) -> Option { + const PREFIX: &str = "fn main(){\n\t"; + const SUFFIX: &str = "\n}"; + + let data = str::from_utf8(data).ok()?; + let mut lines = data.lines(); + let delete_start = usize::from_str(lines.next()?).ok()? + PREFIX.len(); + let delete_len = usize::from_str(lines.next()?).ok()?; + let insert = lines.next()?.to_string(); + let text = lines.collect::>().join("\n"); + let text = format!("{}{}{}", PREFIX, text, SUFFIX); + text.get(delete_start..delete_start.checked_add(delete_len)?)?; // make sure delete is a valid range + let delete = + TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap()); + let edited_text = + format!("{}{}{}", &text[..delete_start], &insert, &text[delete_start + delete_len..]); + let edit = Indel { delete, insert }; + Some(CheckReparse { text, edit, edited_text }) + } + + pub fn run(&self) { + let parse = SourceFile::parse(&self.text); + let new_parse = parse.reparse(&self.edit); + check_file_invariants(&new_parse.tree()); + assert_eq!(&new_parse.tree().syntax().text().to_string(), &self.edited_text); + let full_reparse = SourceFile::parse(&self.edited_text); + for (a, b) in + new_parse.tree().syntax().descendants().zip(full_reparse.tree().syntax().descendants()) + { + if (a.kind(), a.text_range()) != (b.kind(), b.text_range()) { + eprint!("original:\n{:#?}", parse.tree().syntax()); + eprint!("reparsed:\n{:#?}", new_parse.tree().syntax()); + eprint!("full reparse:\n{:#?}", full_reparse.tree().syntax()); + assert_eq!( + format!("{:?}", a), + format!("{:?}", b), + "different syntax tree produced by the full reparse" + ); + } + } + // FIXME + // assert_eq!(new_file.errors(), full_reparse.errors()); + } +} diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs new file mode 100644 index 0000000000..7f8da66af0 --- /dev/null +++ b/crates/syntax/src/lib.rs @@ -0,0 +1,388 @@ +//! Syntax Tree library used throughout the rust analyzer. +//! +//! Properties: +//! - easy and fast incremental re-parsing +//! - graceful handling of errors +//! - full-fidelity representation (*any* text can be precisely represented as +//! a syntax tree) +//! +//! For more information, see the [RFC]. Current implementation is inspired by +//! the [Swift] one. +//! +//! The most interesting modules here are `syntax_node` (which defines concrete +//! syntax tree) and `ast` (which defines abstract syntax tree on top of the +//! CST). The actual parser live in a separate `parser` crate, though the +//! lexer lives in this crate. +//! +//! See `api_walkthrough` test in this file for a quick API tour! +//! +//! [RFC]: +//! [Swift]: + +#[allow(unused)] +macro_rules! eprintln { + ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; +} + +mod syntax_node; +mod syntax_error; +mod parsing; +mod validation; +mod ptr; +#[cfg(test)] +mod tests; + +pub mod algo; +pub mod ast; +#[doc(hidden)] +pub mod fuzz; + +use std::{marker::PhantomData, sync::Arc}; + +use stdx::format_to; +use text_edit::Indel; + +pub use crate::{ + algo::InsertPosition, + ast::{AstNode, AstToken}, + parsing::{lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token}, + ptr::{AstPtr, SyntaxNodePtr}, + syntax_error::SyntaxError, + syntax_node::{ + Direction, GreenNode, NodeOrToken, SyntaxElement, SyntaxElementChildren, SyntaxNode, + SyntaxNodeChildren, SyntaxToken, SyntaxTreeBuilder, + }, +}; +pub use parser::{SyntaxKind, T}; +pub use rowan::{SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent}; + +/// `Parse` is the result of the parsing: a syntax tree and a collection of +/// errors. +/// +/// Note that we always produce a syntax tree, even for completely invalid +/// files. +#[derive(Debug, PartialEq, Eq)] +pub struct Parse { + green: GreenNode, + errors: Arc>, + _ty: PhantomData T>, +} + +impl Clone for Parse { + fn clone(&self) -> Parse { + Parse { green: self.green.clone(), errors: self.errors.clone(), _ty: PhantomData } + } +} + +impl Parse { + fn new(green: GreenNode, errors: Vec) -> Parse { + Parse { green, errors: Arc::new(errors), _ty: PhantomData } + } + + pub fn syntax_node(&self) -> SyntaxNode { + SyntaxNode::new_root(self.green.clone()) + } +} + +impl Parse { + pub fn to_syntax(self) -> Parse { + Parse { green: self.green, errors: self.errors, _ty: PhantomData } + } + + pub fn tree(&self) -> T { + T::cast(self.syntax_node()).unwrap() + } + + pub fn errors(&self) -> &[SyntaxError] { + &*self.errors + } + + pub fn ok(self) -> Result>> { + if self.errors.is_empty() { + Ok(self.tree()) + } else { + Err(self.errors) + } + } +} + +impl Parse { + pub fn cast(self) -> Option> { + if N::cast(self.syntax_node()).is_some() { + Some(Parse { green: self.green, errors: self.errors, _ty: PhantomData }) + } else { + None + } + } +} + +impl Parse { + pub fn debug_dump(&self) -> String { + let mut buf = format!("{:#?}", self.tree().syntax()); + for err in self.errors.iter() { + format_to!(buf, "error {:?}: {}\n", err.range(), err); + } + buf + } + + pub fn reparse(&self, indel: &Indel) -> Parse { + self.incremental_reparse(indel).unwrap_or_else(|| self.full_reparse(indel)) + } + + fn incremental_reparse(&self, indel: &Indel) -> Option> { + // FIXME: validation errors are not handled here + parsing::incremental_reparse(self.tree().syntax(), indel, self.errors.to_vec()).map( + |(green_node, errors, _reparsed_range)| Parse { + green: green_node, + errors: Arc::new(errors), + _ty: PhantomData, + }, + ) + } + + fn full_reparse(&self, indel: &Indel) -> Parse { + let mut text = self.tree().syntax().text().to_string(); + indel.apply(&mut text); + SourceFile::parse(&text) + } +} + +/// `SourceFile` represents a parse tree for a single Rust file. +pub use crate::ast::SourceFile; + +impl SourceFile { + pub fn parse(text: &str) -> Parse { + let (green, mut errors) = parsing::parse_text(text); + let root = SyntaxNode::new_root(green.clone()); + + if cfg!(debug_assertions) { + validation::validate_block_structure(&root); + } + + errors.extend(validation::validate(&root)); + + assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); + Parse { green, errors: Arc::new(errors), _ty: PhantomData } + } +} + +impl ast::Path { + /// Returns `text`, parsed as a path, but only if it has no errors. + pub fn parse(text: &str) -> Result { + parsing::parse_text_fragment(text, parser::FragmentKind::Path) + } +} + +impl ast::Pat { + /// Returns `text`, parsed as a pattern, but only if it has no errors. + pub fn parse(text: &str) -> Result { + parsing::parse_text_fragment(text, parser::FragmentKind::Pattern) + } +} + +impl ast::Expr { + /// Returns `text`, parsed as an expression, but only if it has no errors. + pub fn parse(text: &str) -> Result { + parsing::parse_text_fragment(text, parser::FragmentKind::Expr) + } +} + +impl ast::Item { + /// Returns `text`, parsed as an item, but only if it has no errors. + pub fn parse(text: &str) -> Result { + parsing::parse_text_fragment(text, parser::FragmentKind::Item) + } +} + +impl ast::Type { + /// Returns `text`, parsed as an type reference, but only if it has no errors. + pub fn parse(text: &str) -> Result { + parsing::parse_text_fragment(text, parser::FragmentKind::Type) + } +} + +/// Matches a `SyntaxNode` against an `ast` type. +/// +/// # Example: +/// +/// ```ignore +/// match_ast! { +/// match node { +/// ast::CallExpr(it) => { ... }, +/// ast::MethodCallExpr(it) => { ... }, +/// ast::MacroCall(it) => { ... }, +/// _ => None, +/// } +/// } +/// ``` +#[macro_export] +macro_rules! match_ast { + (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; + + (match ($node:expr) { + $( ast::$ast:ident($it:ident) => $res:expr, )* + _ => $catch_all:expr $(,)? + }) => {{ + $( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )* + { $catch_all } + }}; +} + +/// This test does not assert anything and instead just shows off the crate's +/// API. +#[test] +fn api_walkthrough() { + use ast::{ModuleItemOwner, NameOwner}; + + let source_code = " + fn foo() { + 1 + 1 + } + "; + // `SourceFile` is the main entry point. + // + // The `parse` method returns a `Parse` -- a pair of syntax tree and a list + // of errors. That is, syntax tree is constructed even in presence of errors. + let parse = SourceFile::parse(source_code); + assert!(parse.errors().is_empty()); + + // The `tree` method returns an owned syntax node of type `SourceFile`. + // Owned nodes are cheap: inside, they are `Rc` handles to the underling data. + let file: SourceFile = parse.tree(); + + // `SourceFile` is the root of the syntax tree. We can iterate file's items. + // Let's fetch the `foo` function. + let mut func = None; + for item in file.items() { + match item { + ast::Item::Fn(f) => func = Some(f), + _ => unreachable!(), + } + } + let func: ast::Fn = func.unwrap(); + + // Each AST node has a bunch of getters for children. All getters return + // `Option`s though, to account for incomplete code. Some getters are common + // for several kinds of node. In this case, a trait like `ast::NameOwner` + // usually exists. By convention, all ast types should be used with `ast::` + // qualifier. + let name: Option = func.name(); + let name = name.unwrap(); + assert_eq!(name.text(), "foo"); + + // Let's get the `1 + 1` expression! + let body: ast::BlockExpr = func.body().unwrap(); + let expr: ast::Expr = body.expr().unwrap(); + + // Enums are used to group related ast nodes together, and can be used for + // matching. However, because there are no public fields, it's possible to + // match only the top level enum: that is the price we pay for increased API + // flexibility + let bin_expr: &ast::BinExpr = match &expr { + ast::Expr::BinExpr(e) => e, + _ => unreachable!(), + }; + + // Besides the "typed" AST API, there's an untyped CST one as well. + // To switch from AST to CST, call `.syntax()` method: + let expr_syntax: &SyntaxNode = expr.syntax(); + + // Note how `expr` and `bin_expr` are in fact the same node underneath: + assert!(expr_syntax == bin_expr.syntax()); + + // To go from CST to AST, `AstNode::cast` function is used: + let _expr: ast::Expr = match ast::Expr::cast(expr_syntax.clone()) { + Some(e) => e, + None => unreachable!(), + }; + + // The two properties each syntax node has is a `SyntaxKind`: + assert_eq!(expr_syntax.kind(), SyntaxKind::BIN_EXPR); + + // And text range: + assert_eq!(expr_syntax.text_range(), TextRange::new(32.into(), 37.into())); + + // You can get node's text as a `SyntaxText` object, which will traverse the + // tree collecting token's text: + let text: SyntaxText = expr_syntax.text(); + assert_eq!(text.to_string(), "1 + 1"); + + // There's a bunch of traversal methods on `SyntaxNode`: + assert_eq!(expr_syntax.parent().as_ref(), Some(body.syntax())); + assert_eq!(body.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{'])); + assert_eq!( + expr_syntax.next_sibling_or_token().map(|it| it.kind()), + Some(SyntaxKind::WHITESPACE) + ); + + // As well as some iterator helpers: + let f = expr_syntax.ancestors().find_map(ast::Fn::cast); + assert_eq!(f, Some(func)); + assert!(expr_syntax.siblings_with_tokens(Direction::Next).any(|it| it.kind() == T!['}'])); + assert_eq!( + expr_syntax.descendants_with_tokens().count(), + 8, // 5 tokens `1`, ` `, `+`, ` `, `!` + // 2 child literal expressions: `1`, `1` + // 1 the node itself: `1 + 1` + ); + + // There's also a `preorder` method with a more fine-grained iteration control: + let mut buf = String::new(); + let mut indent = 0; + for event in expr_syntax.preorder_with_tokens() { + match event { + WalkEvent::Enter(node) => { + let text = match &node { + NodeOrToken::Node(it) => it.text().to_string(), + NodeOrToken::Token(it) => it.text().to_string(), + }; + format_to!(buf, "{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent); + indent += 2; + } + WalkEvent::Leave(_) => indent -= 2, + } + } + assert_eq!(indent, 0); + assert_eq!( + buf.trim(), + r#" +"1 + 1" BIN_EXPR + "1" LITERAL + "1" INT_NUMBER + " " WHITESPACE + "+" PLUS + " " WHITESPACE + "1" LITERAL + "1" INT_NUMBER +"# + .trim() + ); + + // To recursively process the tree, there are three approaches: + // 1. explicitly call getter methods on AST nodes. + // 2. use descendants and `AstNode::cast`. + // 3. use descendants and `match_ast!`. + // + // Here's how the first one looks like: + let exprs_cast: Vec = file + .syntax() + .descendants() + .filter_map(ast::Expr::cast) + .map(|expr| expr.syntax().text().to_string()) + .collect(); + + // An alternative is to use a macro. + let mut exprs_visit = Vec::new(); + for node in file.syntax().descendants() { + match_ast! { + match node { + ast::Expr(it) => { + let res = it.syntax().text().to_string(); + exprs_visit.push(res); + }, + _ => (), + } + } + } + assert_eq!(exprs_cast, exprs_visit); +} diff --git a/crates/syntax/src/parsing.rs b/crates/syntax/src/parsing.rs new file mode 100644 index 0000000000..68a39eb210 --- /dev/null +++ b/crates/syntax/src/parsing.rs @@ -0,0 +1,59 @@ +//! Lexing, bridging to parser (which does the actual parsing) and +//! incremental reparsing. + +mod lexer; +mod text_token_source; +mod text_tree_sink; +mod reparsing; + +use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode}; +use text_token_source::TextTokenSource; +use text_tree_sink::TextTreeSink; + +pub use lexer::*; + +pub(crate) use self::reparsing::incremental_reparse; +use parser::SyntaxKind; + +pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec) { + let (tokens, lexer_errors) = tokenize(&text); + + let mut token_source = TextTokenSource::new(text, &tokens); + let mut tree_sink = TextTreeSink::new(text, &tokens); + + parser::parse(&mut token_source, &mut tree_sink); + + let (tree, mut parser_errors) = tree_sink.finish(); + parser_errors.extend(lexer_errors); + + (tree, parser_errors) +} + +/// Returns `text` parsed as a `T` provided there are no parse errors. +pub(crate) fn parse_text_fragment( + text: &str, + fragment_kind: parser::FragmentKind, +) -> Result { + let (tokens, lexer_errors) = tokenize(&text); + if !lexer_errors.is_empty() { + return Err(()); + } + + let mut token_source = TextTokenSource::new(text, &tokens); + let mut tree_sink = TextTreeSink::new(text, &tokens); + + // TextTreeSink assumes that there's at least some root node to which it can attach errors and + // tokens. We arbitrarily give it a SourceFile. + use parser::TreeSink; + tree_sink.start_node(SyntaxKind::SOURCE_FILE); + parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind); + tree_sink.finish_node(); + + let (tree, parser_errors) = tree_sink.finish(); + use parser::TokenSource; + if !parser_errors.is_empty() || token_source.current().kind != SyntaxKind::EOF { + return Err(()); + } + + SyntaxNode::new_root(tree).first_child().and_then(T::cast).ok_or(()) +} diff --git a/crates/ra_syntax/src/parsing/lexer.rs b/crates/syntax/src/parsing/lexer.rs similarity index 100% rename from crates/ra_syntax/src/parsing/lexer.rs rename to crates/syntax/src/parsing/lexer.rs diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs new file mode 100644 index 0000000000..4149f856a8 --- /dev/null +++ b/crates/syntax/src/parsing/reparsing.rs @@ -0,0 +1,455 @@ +//! Implementation of incremental re-parsing. +//! +//! We use two simple strategies for this: +//! - if the edit modifies only a single token (like changing an identifier's +//! letter), we replace only this token. +//! - otherwise, we search for the nearest `{}` block which contains the edit +//! and try to parse only this block. + +use parser::Reparser; +use text_edit::Indel; + +use crate::{ + algo, + parsing::{ + lexer::{lex_single_syntax_kind, tokenize, Token}, + text_token_source::TextTokenSource, + text_tree_sink::TextTreeSink, + }, + syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode}, + SyntaxError, + SyntaxKind::*, + TextRange, TextSize, T, +}; + +pub(crate) fn incremental_reparse( + node: &SyntaxNode, + edit: &Indel, + errors: Vec, +) -> Option<(GreenNode, Vec, TextRange)> { + if let Some((green, new_errors, old_range)) = reparse_token(node, &edit) { + return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); + } + + if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) { + return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); + } + None +} + +fn reparse_token<'node>( + root: &'node SyntaxNode, + edit: &Indel, +) -> Option<(GreenNode, Vec, TextRange)> { + let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); + let prev_token_kind = prev_token.kind(); + match prev_token_kind { + WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { + if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT { + // removing a new line may extends previous token + let deleted_range = edit.delete - prev_token.text_range().start(); + if prev_token.text()[deleted_range].contains('\n') { + return None; + } + } + + let mut new_text = get_text_after_edit(prev_token.clone().into(), &edit); + let (new_token_kind, new_err) = lex_single_syntax_kind(&new_text)?; + + if new_token_kind != prev_token_kind + || (new_token_kind == IDENT && is_contextual_kw(&new_text)) + { + return None; + } + + // Check that edited token is not a part of the bigger token. + // E.g. if for source code `bruh"str"` the user removed `ruh`, then + // `b` no longer remains an identifier, but becomes a part of byte string literal + if let Some(next_char) = root.text().char_at(prev_token.text_range().end()) { + new_text.push(next_char); + let token_with_next_char = lex_single_syntax_kind(&new_text); + if let Some((_kind, _error)) = token_with_next_char { + return None; + } + new_text.pop(); + } + + let new_token = + GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into()); + Some(( + prev_token.replace_with(new_token), + new_err.into_iter().collect(), + prev_token.text_range(), + )) + } + _ => None, + } +} + +fn reparse_block<'node>( + root: &'node SyntaxNode, + edit: &Indel, +) -> Option<(GreenNode, Vec, TextRange)> { + let (node, reparser) = find_reparsable_node(root, edit.delete)?; + let text = get_text_after_edit(node.clone().into(), edit); + + let (tokens, new_lexer_errors) = tokenize(&text); + if !is_balanced(&tokens) { + return None; + } + + let mut token_source = TextTokenSource::new(&text, &tokens); + let mut tree_sink = TextTreeSink::new(&text, &tokens); + reparser.parse(&mut token_source, &mut tree_sink); + + let (green, mut new_parser_errors) = tree_sink.finish(); + new_parser_errors.extend(new_lexer_errors); + + Some((node.replace_with(green), new_parser_errors, node.text_range())) +} + +fn get_text_after_edit(element: SyntaxElement, edit: &Indel) -> String { + let edit = Indel::replace(edit.delete - element.text_range().start(), edit.insert.clone()); + + let mut text = match element { + NodeOrToken::Token(token) => token.text().to_string(), + NodeOrToken::Node(node) => node.text().to_string(), + }; + edit.apply(&mut text); + text +} + +fn is_contextual_kw(text: &str) -> bool { + matches!(text, "auto" | "default" | "union") +} + +fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> { + let node = algo::find_covering_element(node, range); + + let mut ancestors = match node { + NodeOrToken::Token(it) => it.parent().ancestors(), + NodeOrToken::Node(it) => it.ancestors(), + }; + ancestors.find_map(|node| { + let first_child = node.first_child_or_token().map(|it| it.kind()); + let parent = node.parent().map(|it| it.kind()); + Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r)) + }) +} + +fn is_balanced(tokens: &[Token]) -> bool { + if tokens.is_empty() + || tokens.first().unwrap().kind != T!['{'] + || tokens.last().unwrap().kind != T!['}'] + { + return false; + } + let mut balance = 0usize; + for t in &tokens[1..tokens.len() - 1] { + match t.kind { + T!['{'] => balance += 1, + T!['}'] => { + balance = match balance.checked_sub(1) { + Some(b) => b, + None => return false, + } + } + _ => (), + } + } + balance == 0 +} + +fn merge_errors( + old_errors: Vec, + new_errors: Vec, + range_before_reparse: TextRange, + edit: &Indel, +) -> Vec { + let mut res = Vec::new(); + + for old_err in old_errors { + let old_err_range = old_err.range(); + if old_err_range.end() <= range_before_reparse.start() { + res.push(old_err); + } else if old_err_range.start() >= range_before_reparse.end() { + let inserted_len = TextSize::of(&edit.insert); + res.push(old_err.with_range((old_err_range + inserted_len) - edit.delete.len())); + // Note: extra parens are intentional to prevent uint underflow, HWAB (here was a bug) + } + } + res.extend(new_errors.into_iter().map(|new_err| { + // fighting borrow checker with a variable ;) + let offseted_range = new_err.range() + range_before_reparse.start(); + new_err.with_range(offseted_range) + })); + res +} + +#[cfg(test)] +mod tests { + use test_utils::{assert_eq_text, extract_range}; + + use super::*; + use crate::{AstNode, Parse, SourceFile}; + + fn do_check(before: &str, replace_with: &str, reparsed_len: u32) { + let (range, before) = extract_range(before); + let edit = Indel::replace(range, replace_with.to_owned()); + let after = { + let mut after = before.clone(); + edit.apply(&mut after); + after + }; + + let fully_reparsed = SourceFile::parse(&after); + let incrementally_reparsed: Parse = { + let before = SourceFile::parse(&before); + let (green, new_errors, range) = + incremental_reparse(before.tree().syntax(), &edit, before.errors.to_vec()).unwrap(); + assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); + Parse::new(green, new_errors) + }; + + assert_eq_text!( + &format!("{:#?}", fully_reparsed.tree().syntax()), + &format!("{:#?}", incrementally_reparsed.tree().syntax()), + ); + assert_eq!(fully_reparsed.errors(), incrementally_reparsed.errors()); + } + + #[test] // FIXME: some test here actually test token reparsing + fn reparse_block_tests() { + do_check( + r" +fn foo() { + let x = foo + <|>bar<|> +} +", + "baz", + 3, + ); + do_check( + r" +fn foo() { + let x = foo<|> + bar<|> +} +", + "baz", + 25, + ); + do_check( + r" +struct Foo { + f: foo<|><|> +} +", + ",\n g: (),", + 14, + ); + do_check( + r" +fn foo { + let; + 1 + 1; + <|>92<|>; +} +", + "62", + 31, // FIXME: reparse only int literal here + ); + do_check( + r" +mod foo { + fn <|><|> +} +", + "bar", + 11, + ); + + do_check( + r" +trait Foo { + type <|>Foo<|>; +} +", + "Output", + 3, + ); + do_check( + r" +impl IntoIterator for Foo { + f<|><|> +} +", + "n next(", + 9, + ); + do_check(r"use a::b::{foo,<|>,bar<|>};", "baz", 10); + do_check( + r" +pub enum A { + Foo<|><|> +} +", + "\nBar;\n", + 11, + ); + do_check( + r" +foo!{a, b<|><|> d} +", + ", c[3]", + 8, + ); + do_check( + r" +fn foo() { + vec![<|><|>] +} +", + "123", + 14, + ); + do_check( + r" +extern { + fn<|>;<|> +} +", + " exit(code: c_int)", + 11, + ); + } + + #[test] + fn reparse_token_tests() { + do_check( + r"<|><|> +fn foo() -> i32 { 1 } +", + "\n\n\n \n", + 1, + ); + do_check( + r" +fn foo() -> <|><|> {} +", + " \n", + 2, + ); + do_check( + r" +fn <|>foo<|>() -> i32 { 1 } +", + "bar", + 3, + ); + do_check( + r" +fn foo<|><|>foo() { } +", + "bar", + 6, + ); + do_check( + r" +fn foo /* <|><|> */ () {} +", + "some comment", + 6, + ); + do_check( + r" +fn baz <|><|> () {} +", + " \t\t\n\n", + 2, + ); + do_check( + r" +fn baz <|><|> () {} +", + " \t\t\n\n", + 2, + ); + do_check( + r" +/// foo <|><|>omment +mod { } +", + "c", + 14, + ); + do_check( + r#" +fn -> &str { "Hello<|><|>" } +"#, + ", world", + 7, + ); + do_check( + r#" +fn -> &str { // "Hello<|><|>" +"#, + ", world", + 10, + ); + do_check( + r##" +fn -> &str { r#"Hello<|><|>"# +"##, + ", world", + 10, + ); + do_check( + r" +#[derive(<|>Copy<|>)] +enum Foo { + +} +", + "Clone", + 4, + ); + } + + #[test] + fn reparse_str_token_with_error_unchanged() { + do_check(r#""<|>Unclosed<|> string literal"#, "Still unclosed", 24); + } + + #[test] + fn reparse_str_token_with_error_fixed() { + do_check(r#""unterinated<|><|>"#, "\"", 12); + } + + #[test] + fn reparse_block_with_error_in_middle_unchanged() { + do_check( + r#"fn main() { + if {} + 32 + 4<|><|> + return + if {} + }"#, + "23", + 105, + ) + } + + #[test] + fn reparse_block_with_error_in_middle_fixed() { + do_check( + r#"fn main() { + if {} + 32 + 4<|><|> + return + if {} + }"#, + ";", + 105, + ) + } +} diff --git a/crates/syntax/src/parsing/text_token_source.rs b/crates/syntax/src/parsing/text_token_source.rs new file mode 100644 index 0000000000..df866dc2b7 --- /dev/null +++ b/crates/syntax/src/parsing/text_token_source.rs @@ -0,0 +1,84 @@ +//! See `TextTokenSource` docs. + +use parser::TokenSource; + +use crate::{parsing::lexer::Token, SyntaxKind::EOF, TextRange, TextSize}; + +/// Implementation of `parser::TokenSource` that takes tokens from source code text. +pub(crate) struct TextTokenSource<'t> { + text: &'t str, + /// token and its start position (non-whitespace/comment tokens) + /// ```non-rust + /// struct Foo; + /// ^------^--^- + /// | | \________ + /// | \____ \ + /// | \ | + /// (struct, 0) (Foo, 7) (;, 10) + /// ``` + /// `[(struct, 0), (Foo, 7), (;, 10)]` + token_offset_pairs: Vec<(Token, TextSize)>, + + /// Current token and position + curr: (parser::Token, usize), +} + +impl<'t> TokenSource for TextTokenSource<'t> { + fn current(&self) -> parser::Token { + self.curr.0 + } + + fn lookahead_nth(&self, n: usize) -> parser::Token { + mk_token(self.curr.1 + n, &self.token_offset_pairs) + } + + fn bump(&mut self) { + if self.curr.0.kind == EOF { + return; + } + + let pos = self.curr.1 + 1; + self.curr = (mk_token(pos, &self.token_offset_pairs), pos); + } + + fn is_keyword(&self, kw: &str) -> bool { + self.token_offset_pairs + .get(self.curr.1) + .map(|(token, offset)| &self.text[TextRange::at(*offset, token.len)] == kw) + .unwrap_or(false) + } +} + +fn mk_token(pos: usize, token_offset_pairs: &[(Token, TextSize)]) -> parser::Token { + let (kind, is_jointed_to_next) = match token_offset_pairs.get(pos) { + Some((token, offset)) => ( + token.kind, + token_offset_pairs + .get(pos + 1) + .map(|(_, next_offset)| offset + token.len == *next_offset) + .unwrap_or(false), + ), + None => (EOF, false), + }; + parser::Token { kind, is_jointed_to_next } +} + +impl<'t> TextTokenSource<'t> { + /// Generate input from tokens(expect comment and whitespace). + pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> { + let token_offset_pairs: Vec<_> = raw_tokens + .iter() + .filter_map({ + let mut len = 0.into(); + move |token| { + let pair = if token.kind.is_trivia() { None } else { Some((*token, len)) }; + len += token.len; + pair + } + }) + .collect(); + + let first = mk_token(0, &token_offset_pairs); + TextTokenSource { text, token_offset_pairs, curr: (first, 0) } + } +} diff --git a/crates/syntax/src/parsing/text_tree_sink.rs b/crates/syntax/src/parsing/text_tree_sink.rs new file mode 100644 index 0000000000..c1b5f246d1 --- /dev/null +++ b/crates/syntax/src/parsing/text_tree_sink.rs @@ -0,0 +1,183 @@ +//! FIXME: write short doc here + +use std::mem; + +use parser::{ParseError, TreeSink}; + +use crate::{ + parsing::Token, + syntax_node::GreenNode, + SmolStr, SyntaxError, + SyntaxKind::{self, *}, + SyntaxTreeBuilder, TextRange, TextSize, +}; + +/// Bridges the parser with our specific syntax tree representation. +/// +/// `TextTreeSink` also handles attachment of trivia (whitespace) to nodes. +pub(crate) struct TextTreeSink<'a> { + text: &'a str, + tokens: &'a [Token], + text_pos: TextSize, + token_pos: usize, + state: State, + inner: SyntaxTreeBuilder, +} + +enum State { + PendingStart, + Normal, + PendingFinish, +} + +impl<'a> TreeSink for TextTreeSink<'a> { + fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { + match mem::replace(&mut self.state, State::Normal) { + State::PendingStart => unreachable!(), + State::PendingFinish => self.inner.finish_node(), + State::Normal => (), + } + self.eat_trivias(); + let n_tokens = n_tokens as usize; + let len = self.tokens[self.token_pos..self.token_pos + n_tokens] + .iter() + .map(|it| it.len) + .sum::(); + self.do_token(kind, len, n_tokens); + } + + fn start_node(&mut self, kind: SyntaxKind) { + match mem::replace(&mut self.state, State::Normal) { + State::PendingStart => { + self.inner.start_node(kind); + // No need to attach trivias to previous node: there is no + // previous node. + return; + } + State::PendingFinish => self.inner.finish_node(), + State::Normal => (), + } + + let n_trivias = + self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count(); + let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias]; + let mut trivia_end = + self.text_pos + leading_trivias.iter().map(|it| it.len).sum::(); + + let n_attached_trivias = { + let leading_trivias = leading_trivias.iter().rev().map(|it| { + let next_end = trivia_end - it.len; + let range = TextRange::new(next_end, trivia_end); + trivia_end = next_end; + (it.kind, &self.text[range]) + }); + n_attached_trivias(kind, leading_trivias) + }; + self.eat_n_trivias(n_trivias - n_attached_trivias); + self.inner.start_node(kind); + self.eat_n_trivias(n_attached_trivias); + } + + fn finish_node(&mut self) { + match mem::replace(&mut self.state, State::PendingFinish) { + State::PendingStart => unreachable!(), + State::PendingFinish => self.inner.finish_node(), + State::Normal => (), + } + } + + fn error(&mut self, error: ParseError) { + self.inner.error(error, self.text_pos) + } +} + +impl<'a> TextTreeSink<'a> { + pub(super) fn new(text: &'a str, tokens: &'a [Token]) -> Self { + Self { + text, + tokens, + text_pos: 0.into(), + token_pos: 0, + state: State::PendingStart, + inner: SyntaxTreeBuilder::default(), + } + } + + pub(super) fn finish(mut self) -> (GreenNode, Vec) { + match mem::replace(&mut self.state, State::Normal) { + State::PendingFinish => { + self.eat_trivias(); + self.inner.finish_node() + } + State::PendingStart | State::Normal => unreachable!(), + } + + self.inner.finish_raw() + } + + fn eat_trivias(&mut self) { + while let Some(&token) = self.tokens.get(self.token_pos) { + if !token.kind.is_trivia() { + break; + } + self.do_token(token.kind, token.len, 1); + } + } + + fn eat_n_trivias(&mut self, n: usize) { + for _ in 0..n { + let token = self.tokens[self.token_pos]; + assert!(token.kind.is_trivia()); + self.do_token(token.kind, token.len, 1); + } + } + + fn do_token(&mut self, kind: SyntaxKind, len: TextSize, n_tokens: usize) { + let range = TextRange::at(self.text_pos, len); + let text: SmolStr = self.text[range].into(); + self.text_pos += len; + self.token_pos += n_tokens; + self.inner.token(kind, text); + } +} + +fn n_attached_trivias<'a>( + kind: SyntaxKind, + trivias: impl Iterator, +) -> usize { + match kind { + MACRO_CALL | CONST | TYPE_ALIAS | STRUCT | ENUM | VARIANT | FN | TRAIT | MODULE + | RECORD_FIELD | STATIC => { + let mut res = 0; + let mut trivias = trivias.enumerate().peekable(); + + while let Some((i, (kind, text))) = trivias.next() { + match kind { + WHITESPACE => { + if text.contains("\n\n") { + // we check whether the next token is a doc-comment + // and skip the whitespace in this case + if let Some((peek_kind, peek_text)) = + trivias.peek().map(|(_, pair)| pair) + { + if *peek_kind == COMMENT + && peek_text.starts_with("///") + && !peek_text.starts_with("////") + { + continue; + } + } + break; + } + } + COMMENT => { + res = i + 1; + } + _ => (), + } + } + res + } + _ => 0, + } +} diff --git a/crates/ra_syntax/src/ptr.rs b/crates/syntax/src/ptr.rs similarity index 100% rename from crates/ra_syntax/src/ptr.rs rename to crates/syntax/src/ptr.rs diff --git a/crates/ra_syntax/src/syntax_error.rs b/crates/syntax/src/syntax_error.rs similarity index 100% rename from crates/ra_syntax/src/syntax_error.rs rename to crates/syntax/src/syntax_error.rs diff --git a/crates/syntax/src/syntax_node.rs b/crates/syntax/src/syntax_node.rs new file mode 100644 index 0000000000..b2abcbfbb3 --- /dev/null +++ b/crates/syntax/src/syntax_node.rs @@ -0,0 +1,77 @@ +//! This module defines Concrete Syntax Tree (CST), used by rust-analyzer. +//! +//! The CST includes comments and whitespace, provides a single node type, +//! `SyntaxNode`, and a basic traversal API (parent, children, siblings). +//! +//! The *real* implementation is in the (language-agnostic) `rowan` crate, this +//! module just wraps its API. + +use rowan::{GreenNodeBuilder, Language}; + +use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextSize}; + +pub use rowan::GreenNode; + +pub(crate) use rowan::GreenToken; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum RustLanguage {} +impl Language for RustLanguage { + type Kind = SyntaxKind; + + fn kind_from_raw(raw: rowan::SyntaxKind) -> SyntaxKind { + SyntaxKind::from(raw.0) + } + + fn kind_to_raw(kind: SyntaxKind) -> rowan::SyntaxKind { + rowan::SyntaxKind(kind.into()) + } +} + +pub type SyntaxNode = rowan::SyntaxNode; +pub type SyntaxToken = rowan::SyntaxToken; +pub type SyntaxElement = rowan::SyntaxElement; +pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren; +pub type SyntaxElementChildren = rowan::SyntaxElementChildren; + +pub use rowan::{Direction, NodeOrToken}; + +#[derive(Default)] +pub struct SyntaxTreeBuilder { + errors: Vec, + inner: GreenNodeBuilder<'static>, +} + +impl SyntaxTreeBuilder { + pub(crate) fn finish_raw(self) -> (GreenNode, Vec) { + let green = self.inner.finish(); + (green, self.errors) + } + + pub fn finish(self) -> Parse { + let (green, errors) = self.finish_raw(); + if cfg!(debug_assertions) { + let node = SyntaxNode::new_root(green.clone()); + crate::validation::validate_block_structure(&node); + } + Parse::new(green, errors) + } + + pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) { + let kind = RustLanguage::kind_to_raw(kind); + self.inner.token(kind, text) + } + + pub fn start_node(&mut self, kind: SyntaxKind) { + let kind = RustLanguage::kind_to_raw(kind); + self.inner.start_node(kind) + } + + pub fn finish_node(&mut self) { + self.inner.finish_node() + } + + pub fn error(&mut self, error: parser::ParseError, text_pos: TextSize) { + self.errors.push(SyntaxError::new_at_offset(*error.0, text_pos)) + } +} diff --git a/crates/syntax/src/tests.rs b/crates/syntax/src/tests.rs new file mode 100644 index 0000000000..ddc7183694 --- /dev/null +++ b/crates/syntax/src/tests.rs @@ -0,0 +1,280 @@ +use std::{ + fmt::Write, + fs, + path::{Path, PathBuf}, +}; + +use expect::expect_file; +use rayon::prelude::*; +use test_utils::project_dir; + +use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextSize, Token}; + +#[test] +fn lexer_tests() { + // FIXME: + // * Add tests for unicode escapes in byte-character and [raw]-byte-string literals + // * Add tests for unescape errors + + dir_tests(&test_data_dir(), &["lexer/ok"], "txt", |text, path| { + let (tokens, errors) = tokenize(text); + assert_errors_are_absent(&errors, path); + dump_tokens_and_errors(&tokens, &errors, text) + }); + dir_tests(&test_data_dir(), &["lexer/err"], "txt", |text, path| { + let (tokens, errors) = tokenize(text); + assert_errors_are_present(&errors, path); + dump_tokens_and_errors(&tokens, &errors, text) + }); +} + +#[test] +fn parse_smoke_test() { + let code = r##" +fn main() { + println!("Hello, world!") +} + "##; + + let parse = SourceFile::parse(code); + // eprintln!("{:#?}", parse.syntax_node()); + assert!(parse.ok().is_ok()); +} + +#[test] +fn parser_tests() { + dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], "rast", |text, path| { + let parse = SourceFile::parse(text); + let errors = parse.errors(); + assert_errors_are_absent(&errors, path); + parse.debug_dump() + }); + dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], "rast", |text, path| { + let parse = SourceFile::parse(text); + let errors = parse.errors(); + assert_errors_are_present(&errors, path); + parse.debug_dump() + }); +} + +#[test] +fn expr_parser_tests() { + fragment_parser_dir_test( + &["parser/fragments/expr/ok"], + &["parser/fragments/expr/err"], + crate::ast::Expr::parse, + ); +} + +#[test] +fn path_parser_tests() { + fragment_parser_dir_test( + &["parser/fragments/path/ok"], + &["parser/fragments/path/err"], + crate::ast::Path::parse, + ); +} + +#[test] +fn pattern_parser_tests() { + fragment_parser_dir_test( + &["parser/fragments/pattern/ok"], + &["parser/fragments/pattern/err"], + crate::ast::Pat::parse, + ); +} + +#[test] +fn item_parser_tests() { + fragment_parser_dir_test( + &["parser/fragments/item/ok"], + &["parser/fragments/item/err"], + crate::ast::Item::parse, + ); +} + +#[test] +fn type_parser_tests() { + fragment_parser_dir_test( + &["parser/fragments/type/ok"], + &["parser/fragments/type/err"], + crate::ast::Type::parse, + ); +} + +#[test] +fn parser_fuzz_tests() { + for (_, text) in collect_rust_files(&test_data_dir(), &["parser/fuzz-failures"]) { + fuzz::check_parser(&text) + } +} + +#[test] +fn reparse_fuzz_tests() { + for (_, text) in collect_rust_files(&test_data_dir(), &["reparse/fuzz-failures"]) { + let check = fuzz::CheckReparse::from_data(text.as_bytes()).unwrap(); + println!("{:?}", check); + check.run(); + } +} + +/// Test that Rust-analyzer can parse and validate the rust-analyzer +/// FIXME: Use this as a benchmark +#[test] +fn self_hosting_parsing() { + let dir = project_dir().join("crates"); + let files = walkdir::WalkDir::new(dir) + .into_iter() + .filter_entry(|entry| { + // Get all files which are not in the crates/syntax/test_data folder + !entry.path().components().any(|component| component.as_os_str() == "test_data") + }) + .map(|e| e.unwrap()) + .filter(|entry| { + // Get all `.rs ` files + !entry.path().is_dir() && (entry.path().extension().unwrap_or_default() == "rs") + }) + .map(|entry| entry.into_path()) + .collect::>(); + assert!( + files.len() > 100, + "self_hosting_parsing found too few files - is it running in the right directory?" + ); + + let errors = files + .into_par_iter() + .filter_map(|file| { + let text = read_text(&file); + match SourceFile::parse(&text).ok() { + Ok(_) => None, + Err(err) => Some((file, err)), + } + }) + .collect::>(); + + if !errors.is_empty() { + let errors = errors + .into_iter() + .map(|(path, err)| format!("{}: {:?}\n", path.display(), err)) + .collect::(); + panic!("Parsing errors:\n{}\n", errors); + } +} + +fn test_data_dir() -> PathBuf { + project_dir().join("crates/syntax/test_data") +} + +fn assert_errors_are_present(errors: &[SyntaxError], path: &Path) { + assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display()); +} +fn assert_errors_are_absent(errors: &[SyntaxError], path: &Path) { + assert_eq!( + errors, + &[] as &[SyntaxError], + "There should be no errors in the file {:?}", + path.display(), + ); +} + +fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String { + let mut acc = String::new(); + let mut offset: TextSize = 0.into(); + for token in tokens { + let token_len = token.len; + let token_text = &text[TextRange::at(offset, token.len)]; + offset += token.len; + writeln!(acc, "{:?} {:?} {:?}", token.kind, token_len, token_text).unwrap(); + } + for err in errors { + writeln!(acc, "> error{:?} token({:?}) msg({})", err.range(), &text[err.range()], err) + .unwrap(); + } + acc +} + +fn fragment_parser_dir_test(ok_paths: &[&str], err_paths: &[&str], f: F) +where + T: crate::AstNode, + F: Fn(&str) -> Result, +{ + dir_tests(&test_data_dir(), ok_paths, "rast", |text, path| { + if let Ok(node) = f(text) { + format!("{:#?}", crate::ast::AstNode::syntax(&node)) + } else { + panic!("Failed to parse '{:?}'", path); + } + }); + dir_tests(&test_data_dir(), err_paths, "rast", |text, path| { + if let Ok(_) = f(text) { + panic!("'{:?}' successfully parsed when it should have errored", path); + } else { + "ERROR\n".to_owned() + } + }); +} + +/// Calls callback `f` with input code and file paths for each `.rs` file in `test_data_dir` +/// subdirectories defined by `paths`. +/// +/// If the content of the matching output file differs from the output of `f()` +/// the test will fail. +/// +/// If there is no matching output file it will be created and filled with the +/// output of `f()`, but the test will fail. +fn dir_tests(test_data_dir: &Path, paths: &[&str], outfile_extension: &str, f: F) +where + F: Fn(&str, &Path) -> String, +{ + for (path, input_code) in collect_rust_files(test_data_dir, paths) { + let actual = f(&input_code, &path); + let path = path.with_extension(outfile_extension); + expect_file![path].assert_eq(&actual) + } +} + +/// Collects all `.rs` files from `dir` subdirectories defined by `paths`. +fn collect_rust_files(root_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> { + paths + .iter() + .flat_map(|path| { + let path = root_dir.to_owned().join(path); + rust_files_in_dir(&path).into_iter() + }) + .map(|path| { + let text = read_text(&path); + (path, text) + }) + .collect() +} + +/// Collects paths to all `.rs` files from `dir` in a sorted `Vec`. +fn rust_files_in_dir(dir: &Path) -> Vec { + let mut acc = Vec::new(); + for file in fs::read_dir(&dir).unwrap() { + let file = file.unwrap(); + let path = file.path(); + if path.extension().unwrap_or_default() == "rs" { + acc.push(path); + } + } + acc.sort(); + acc +} + +/// Read file and normalize newlines. +/// +/// `rustc` seems to always normalize `\r\n` newlines to `\n`: +/// +/// ``` +/// let s = " +/// "; +/// assert_eq!(s.as_bytes(), &[10]); +/// ``` +/// +/// so this should always be correct. +fn read_text(path: &Path) -> String { + fs::read_to_string(path) + .unwrap_or_else(|_| panic!("File at {:?} should be valid", path)) + .replace("\r\n", "\n") +} diff --git a/crates/ra_syntax/src/validation.rs b/crates/syntax/src/validation.rs similarity index 100% rename from crates/ra_syntax/src/validation.rs rename to crates/syntax/src/validation.rs diff --git a/crates/ra_syntax/src/validation/block.rs b/crates/syntax/src/validation/block.rs similarity index 100% rename from crates/ra_syntax/src/validation/block.rs rename to crates/syntax/src/validation/block.rs diff --git a/crates/ra_syntax/test_data/accidentally_quadratic b/crates/syntax/test_data/accidentally_quadratic similarity index 100% rename from crates/ra_syntax/test_data/accidentally_quadratic rename to crates/syntax/test_data/accidentally_quadratic diff --git a/crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.rs b/crates/syntax/test_data/lexer/err/0001_unclosed_char_at_eof.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.rs rename to crates/syntax/test_data/lexer/err/0001_unclosed_char_at_eof.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.txt b/crates/syntax/test_data/lexer/err/0001_unclosed_char_at_eof.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.txt rename to crates/syntax/test_data/lexer/err/0001_unclosed_char_at_eof.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.rs b/crates/syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.rs rename to crates/syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.txt b/crates/syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.txt rename to crates/syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.rs b/crates/syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.rs rename to crates/syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.txt b/crates/syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.txt rename to crates/syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.rs b/crates/syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.rs rename to crates/syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.txt b/crates/syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.txt rename to crates/syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.rs b/crates/syntax/test_data/lexer/err/0005_unclosed_char_with_space.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.rs rename to crates/syntax/test_data/lexer/err/0005_unclosed_char_with_space.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.txt b/crates/syntax/test_data/lexer/err/0005_unclosed_char_with_space.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.txt rename to crates/syntax/test_data/lexer/err/0005_unclosed_char_with_space.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.rs b/crates/syntax/test_data/lexer/err/0006_unclosed_char_with_slash.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.rs rename to crates/syntax/test_data/lexer/err/0006_unclosed_char_with_slash.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.txt b/crates/syntax/test_data/lexer/err/0006_unclosed_char_with_slash.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.txt rename to crates/syntax/test_data/lexer/err/0006_unclosed_char_with_slash.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.rs b/crates/syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.rs rename to crates/syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.txt b/crates/syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.txt rename to crates/syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.rs b/crates/syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.rs rename to crates/syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.txt b/crates/syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.txt rename to crates/syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.rs b/crates/syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.rs rename to crates/syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.txt b/crates/syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.txt rename to crates/syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.rs b/crates/syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.rs rename to crates/syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.txt b/crates/syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.txt rename to crates/syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.rs b/crates/syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.rs rename to crates/syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.txt b/crates/syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.txt rename to crates/syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.rs b/crates/syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.rs rename to crates/syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.txt b/crates/syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.txt rename to crates/syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.rs b/crates/syntax/test_data/lexer/err/0013_unclosed_byte_with_space.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.rs rename to crates/syntax/test_data/lexer/err/0013_unclosed_byte_with_space.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.txt b/crates/syntax/test_data/lexer/err/0013_unclosed_byte_with_space.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.txt rename to crates/syntax/test_data/lexer/err/0013_unclosed_byte_with_space.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.rs b/crates/syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.rs rename to crates/syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.txt b/crates/syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.txt rename to crates/syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.rs b/crates/syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.rs rename to crates/syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.txt b/crates/syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.txt rename to crates/syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.rs b/crates/syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.rs rename to crates/syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.txt b/crates/syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.txt rename to crates/syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.rs b/crates/syntax/test_data/lexer/err/0017_unclosed_string_at_eof.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.rs rename to crates/syntax/test_data/lexer/err/0017_unclosed_string_at_eof.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0017_unclosed_string_at_eof.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.txt rename to crates/syntax/test_data/lexer/err/0017_unclosed_string_at_eof.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.rs b/crates/syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.rs rename to crates/syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.txt b/crates/syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.txt rename to crates/syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.rs b/crates/syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.rs rename to crates/syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.txt b/crates/syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.txt rename to crates/syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.rs b/crates/syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.rs rename to crates/syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.txt b/crates/syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.txt rename to crates/syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.rs b/crates/syntax/test_data/lexer/err/0021_unclosed_string_with_space.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.rs rename to crates/syntax/test_data/lexer/err/0021_unclosed_string_with_space.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.txt b/crates/syntax/test_data/lexer/err/0021_unclosed_string_with_space.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.txt rename to crates/syntax/test_data/lexer/err/0021_unclosed_string_with_space.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.rs b/crates/syntax/test_data/lexer/err/0022_unclosed_string_with_slash.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.rs rename to crates/syntax/test_data/lexer/err/0022_unclosed_string_with_slash.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.txt b/crates/syntax/test_data/lexer/err/0022_unclosed_string_with_slash.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.txt rename to crates/syntax/test_data/lexer/err/0022_unclosed_string_with_slash.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.rs b/crates/syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.rs rename to crates/syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.txt b/crates/syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.txt rename to crates/syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.rs b/crates/syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.rs rename to crates/syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.txt b/crates/syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.txt rename to crates/syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.rs b/crates/syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.rs rename to crates/syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.txt rename to crates/syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.rs b/crates/syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.rs rename to crates/syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.txt b/crates/syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.txt rename to crates/syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.rs b/crates/syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.rs rename to crates/syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.txt b/crates/syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.txt rename to crates/syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.rs b/crates/syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.rs rename to crates/syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.txt b/crates/syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.txt rename to crates/syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.rs b/crates/syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.rs rename to crates/syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.txt b/crates/syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.txt rename to crates/syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.rs b/crates/syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.rs rename to crates/syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.txt b/crates/syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.txt rename to crates/syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.rs b/crates/syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.rs rename to crates/syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.txt b/crates/syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.txt rename to crates/syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.rs b/crates/syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.rs rename to crates/syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.txt b/crates/syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.txt rename to crates/syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.rs b/crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.rs rename to crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt rename to crates/syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.rs b/crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.rs rename to crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt b/crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt rename to crates/syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.rs b/crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.rs rename to crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt b/crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt rename to crates/syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.rs b/crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.rs rename to crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt b/crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt rename to crates/syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.rs b/crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.rs rename to crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt b/crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt rename to crates/syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.rs b/crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.rs rename to crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt b/crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt rename to crates/syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.rs b/crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.rs rename to crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt b/crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt rename to crates/syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.rs b/crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.rs rename to crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt rename to crates/syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.rs b/crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.rs rename to crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt b/crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt rename to crates/syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.rs b/crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.rs rename to crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt b/crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt rename to crates/syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.rs b/crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.rs rename to crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt b/crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt rename to crates/syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.rs b/crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.rs rename to crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt b/crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt rename to crates/syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.rs b/crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.rs rename to crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt b/crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt rename to crates/syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.rs b/crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.rs rename to crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt b/crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt rename to crates/syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.rs b/crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.rs rename to crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt rename to crates/syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.rs b/crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.rs rename to crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt b/crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt rename to crates/syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.rs b/crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.rs rename to crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt b/crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt rename to crates/syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.rs b/crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.rs rename to crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt b/crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt rename to crates/syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.rs b/crates/syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.rs rename to crates/syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.txt b/crates/syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.txt rename to crates/syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.rs b/crates/syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.rs rename to crates/syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.txt b/crates/syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.txt rename to crates/syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.rs b/crates/syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.rs rename to crates/syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.txt b/crates/syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.txt rename to crates/syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.rs b/crates/syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.rs rename to crates/syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.txt b/crates/syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.txt rename to crates/syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0055_empty_int.rs b/crates/syntax/test_data/lexer/err/0055_empty_int.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0055_empty_int.rs rename to crates/syntax/test_data/lexer/err/0055_empty_int.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0055_empty_int.txt b/crates/syntax/test_data/lexer/err/0055_empty_int.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0055_empty_int.txt rename to crates/syntax/test_data/lexer/err/0055_empty_int.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.rs b/crates/syntax/test_data/lexer/err/0056_empty_exponent.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.rs rename to crates/syntax/test_data/lexer/err/0056_empty_exponent.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.txt b/crates/syntax/test_data/lexer/err/0056_empty_exponent.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.txt rename to crates/syntax/test_data/lexer/err/0056_empty_exponent.txt diff --git a/crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.rs b/crates/syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.rs rename to crates/syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.rs diff --git a/crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.txt b/crates/syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.txt rename to crates/syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.txt diff --git a/crates/ra_syntax/test_data/lexer/ok/0001_hello.rs b/crates/syntax/test_data/lexer/ok/0001_hello.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0001_hello.rs rename to crates/syntax/test_data/lexer/ok/0001_hello.rs diff --git a/crates/ra_syntax/test_data/lexer/ok/0001_hello.txt b/crates/syntax/test_data/lexer/ok/0001_hello.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0001_hello.txt rename to crates/syntax/test_data/lexer/ok/0001_hello.txt diff --git a/crates/ra_syntax/test_data/lexer/ok/0002_whitespace.rs b/crates/syntax/test_data/lexer/ok/0002_whitespace.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0002_whitespace.rs rename to crates/syntax/test_data/lexer/ok/0002_whitespace.rs diff --git a/crates/ra_syntax/test_data/lexer/ok/0002_whitespace.txt b/crates/syntax/test_data/lexer/ok/0002_whitespace.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0002_whitespace.txt rename to crates/syntax/test_data/lexer/ok/0002_whitespace.txt diff --git a/crates/ra_syntax/test_data/lexer/ok/0003_ident.rs b/crates/syntax/test_data/lexer/ok/0003_ident.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0003_ident.rs rename to crates/syntax/test_data/lexer/ok/0003_ident.rs diff --git a/crates/ra_syntax/test_data/lexer/ok/0003_ident.txt b/crates/syntax/test_data/lexer/ok/0003_ident.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0003_ident.txt rename to crates/syntax/test_data/lexer/ok/0003_ident.txt diff --git a/crates/ra_syntax/test_data/lexer/ok/0004_numbers.rs b/crates/syntax/test_data/lexer/ok/0004_numbers.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0004_numbers.rs rename to crates/syntax/test_data/lexer/ok/0004_numbers.rs diff --git a/crates/ra_syntax/test_data/lexer/ok/0004_numbers.txt b/crates/syntax/test_data/lexer/ok/0004_numbers.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0004_numbers.txt rename to crates/syntax/test_data/lexer/ok/0004_numbers.txt diff --git a/crates/ra_syntax/test_data/lexer/ok/0005_symbols.rs b/crates/syntax/test_data/lexer/ok/0005_symbols.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0005_symbols.rs rename to crates/syntax/test_data/lexer/ok/0005_symbols.rs diff --git a/crates/ra_syntax/test_data/lexer/ok/0005_symbols.txt b/crates/syntax/test_data/lexer/ok/0005_symbols.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0005_symbols.txt rename to crates/syntax/test_data/lexer/ok/0005_symbols.txt diff --git a/crates/ra_syntax/test_data/lexer/ok/0006_chars.rs b/crates/syntax/test_data/lexer/ok/0006_chars.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0006_chars.rs rename to crates/syntax/test_data/lexer/ok/0006_chars.rs diff --git a/crates/ra_syntax/test_data/lexer/ok/0006_chars.txt b/crates/syntax/test_data/lexer/ok/0006_chars.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0006_chars.txt rename to crates/syntax/test_data/lexer/ok/0006_chars.txt diff --git a/crates/ra_syntax/test_data/lexer/ok/0007_lifetimes.rs b/crates/syntax/test_data/lexer/ok/0007_lifetimes.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0007_lifetimes.rs rename to crates/syntax/test_data/lexer/ok/0007_lifetimes.rs diff --git a/crates/ra_syntax/test_data/lexer/ok/0007_lifetimes.txt b/crates/syntax/test_data/lexer/ok/0007_lifetimes.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0007_lifetimes.txt rename to crates/syntax/test_data/lexer/ok/0007_lifetimes.txt diff --git a/crates/ra_syntax/test_data/lexer/ok/0008_byte_strings.rs b/crates/syntax/test_data/lexer/ok/0008_byte_strings.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0008_byte_strings.rs rename to crates/syntax/test_data/lexer/ok/0008_byte_strings.rs diff --git a/crates/ra_syntax/test_data/lexer/ok/0008_byte_strings.txt b/crates/syntax/test_data/lexer/ok/0008_byte_strings.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0008_byte_strings.txt rename to crates/syntax/test_data/lexer/ok/0008_byte_strings.txt diff --git a/crates/ra_syntax/test_data/lexer/ok/0009_strings.rs b/crates/syntax/test_data/lexer/ok/0009_strings.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0009_strings.rs rename to crates/syntax/test_data/lexer/ok/0009_strings.rs diff --git a/crates/ra_syntax/test_data/lexer/ok/0009_strings.txt b/crates/syntax/test_data/lexer/ok/0009_strings.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0009_strings.txt rename to crates/syntax/test_data/lexer/ok/0009_strings.txt diff --git a/crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.rs b/crates/syntax/test_data/lexer/ok/0010_single_line_comments.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.rs rename to crates/syntax/test_data/lexer/ok/0010_single_line_comments.rs diff --git a/crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.txt b/crates/syntax/test_data/lexer/ok/0010_single_line_comments.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.txt rename to crates/syntax/test_data/lexer/ok/0010_single_line_comments.txt diff --git a/crates/ra_syntax/test_data/lexer/ok/0011_keywords.rs b/crates/syntax/test_data/lexer/ok/0011_keywords.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0011_keywords.rs rename to crates/syntax/test_data/lexer/ok/0011_keywords.rs diff --git a/crates/ra_syntax/test_data/lexer/ok/0011_keywords.txt b/crates/syntax/test_data/lexer/ok/0011_keywords.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0011_keywords.txt rename to crates/syntax/test_data/lexer/ok/0011_keywords.txt diff --git a/crates/ra_syntax/test_data/lexer/ok/0012_block_comment.rs b/crates/syntax/test_data/lexer/ok/0012_block_comment.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0012_block_comment.rs rename to crates/syntax/test_data/lexer/ok/0012_block_comment.rs diff --git a/crates/ra_syntax/test_data/lexer/ok/0012_block_comment.txt b/crates/syntax/test_data/lexer/ok/0012_block_comment.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0012_block_comment.txt rename to crates/syntax/test_data/lexer/ok/0012_block_comment.txt diff --git a/crates/ra_syntax/test_data/lexer/ok/0013_raw_strings.rs b/crates/syntax/test_data/lexer/ok/0013_raw_strings.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0013_raw_strings.rs rename to crates/syntax/test_data/lexer/ok/0013_raw_strings.rs diff --git a/crates/ra_syntax/test_data/lexer/ok/0013_raw_strings.txt b/crates/syntax/test_data/lexer/ok/0013_raw_strings.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0013_raw_strings.txt rename to crates/syntax/test_data/lexer/ok/0013_raw_strings.txt diff --git a/crates/ra_syntax/test_data/lexer/ok/0014_raw_ident.rs b/crates/syntax/test_data/lexer/ok/0014_raw_ident.rs similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0014_raw_ident.rs rename to crates/syntax/test_data/lexer/ok/0014_raw_ident.rs diff --git a/crates/ra_syntax/test_data/lexer/ok/0014_raw_ident.txt b/crates/syntax/test_data/lexer/ok/0014_raw_ident.txt similarity index 100% rename from crates/ra_syntax/test_data/lexer/ok/0014_raw_ident.txt rename to crates/syntax/test_data/lexer/ok/0014_raw_ident.txt diff --git a/crates/ra_syntax/test_data/parser/err/0000_struct_field_missing_comma.rast b/crates/syntax/test_data/parser/err/0000_struct_field_missing_comma.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0000_struct_field_missing_comma.rast rename to crates/syntax/test_data/parser/err/0000_struct_field_missing_comma.rast diff --git a/crates/ra_syntax/test_data/parser/err/0000_struct_field_missing_comma.rs b/crates/syntax/test_data/parser/err/0000_struct_field_missing_comma.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0000_struct_field_missing_comma.rs rename to crates/syntax/test_data/parser/err/0000_struct_field_missing_comma.rs diff --git a/crates/ra_syntax/test_data/parser/err/0001_item_recovery_in_file.rast b/crates/syntax/test_data/parser/err/0001_item_recovery_in_file.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0001_item_recovery_in_file.rast rename to crates/syntax/test_data/parser/err/0001_item_recovery_in_file.rast diff --git a/crates/ra_syntax/test_data/parser/err/0001_item_recovery_in_file.rs b/crates/syntax/test_data/parser/err/0001_item_recovery_in_file.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0001_item_recovery_in_file.rs rename to crates/syntax/test_data/parser/err/0001_item_recovery_in_file.rs diff --git a/crates/ra_syntax/test_data/parser/err/0002_duplicate_shebang.rast b/crates/syntax/test_data/parser/err/0002_duplicate_shebang.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0002_duplicate_shebang.rast rename to crates/syntax/test_data/parser/err/0002_duplicate_shebang.rast diff --git a/crates/ra_syntax/test_data/parser/err/0002_duplicate_shebang.rs b/crates/syntax/test_data/parser/err/0002_duplicate_shebang.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0002_duplicate_shebang.rs rename to crates/syntax/test_data/parser/err/0002_duplicate_shebang.rs diff --git a/crates/ra_syntax/test_data/parser/err/0003_C++_semicolon.rast b/crates/syntax/test_data/parser/err/0003_C++_semicolon.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0003_C++_semicolon.rast rename to crates/syntax/test_data/parser/err/0003_C++_semicolon.rast diff --git a/crates/ra_syntax/test_data/parser/err/0003_C++_semicolon.rs b/crates/syntax/test_data/parser/err/0003_C++_semicolon.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0003_C++_semicolon.rs rename to crates/syntax/test_data/parser/err/0003_C++_semicolon.rs diff --git a/crates/ra_syntax/test_data/parser/err/0004_use_path_bad_segment.rast b/crates/syntax/test_data/parser/err/0004_use_path_bad_segment.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0004_use_path_bad_segment.rast rename to crates/syntax/test_data/parser/err/0004_use_path_bad_segment.rast diff --git a/crates/ra_syntax/test_data/parser/err/0004_use_path_bad_segment.rs b/crates/syntax/test_data/parser/err/0004_use_path_bad_segment.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0004_use_path_bad_segment.rs rename to crates/syntax/test_data/parser/err/0004_use_path_bad_segment.rs diff --git a/crates/ra_syntax/test_data/parser/err/0005_attribute_recover.rast b/crates/syntax/test_data/parser/err/0005_attribute_recover.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0005_attribute_recover.rast rename to crates/syntax/test_data/parser/err/0005_attribute_recover.rast diff --git a/crates/ra_syntax/test_data/parser/err/0005_attribute_recover.rs b/crates/syntax/test_data/parser/err/0005_attribute_recover.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0005_attribute_recover.rs rename to crates/syntax/test_data/parser/err/0005_attribute_recover.rs diff --git a/crates/ra_syntax/test_data/parser/err/0006_named_field_recovery.rast b/crates/syntax/test_data/parser/err/0006_named_field_recovery.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0006_named_field_recovery.rast rename to crates/syntax/test_data/parser/err/0006_named_field_recovery.rast diff --git a/crates/ra_syntax/test_data/parser/err/0006_named_field_recovery.rs b/crates/syntax/test_data/parser/err/0006_named_field_recovery.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0006_named_field_recovery.rs rename to crates/syntax/test_data/parser/err/0006_named_field_recovery.rs diff --git a/crates/ra_syntax/test_data/parser/err/0007_stray_curly_in_file.rast b/crates/syntax/test_data/parser/err/0007_stray_curly_in_file.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0007_stray_curly_in_file.rast rename to crates/syntax/test_data/parser/err/0007_stray_curly_in_file.rast diff --git a/crates/ra_syntax/test_data/parser/err/0007_stray_curly_in_file.rs b/crates/syntax/test_data/parser/err/0007_stray_curly_in_file.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0007_stray_curly_in_file.rs rename to crates/syntax/test_data/parser/err/0007_stray_curly_in_file.rs diff --git a/crates/ra_syntax/test_data/parser/err/0008_item_block_recovery.rast b/crates/syntax/test_data/parser/err/0008_item_block_recovery.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0008_item_block_recovery.rast rename to crates/syntax/test_data/parser/err/0008_item_block_recovery.rast diff --git a/crates/ra_syntax/test_data/parser/err/0008_item_block_recovery.rs b/crates/syntax/test_data/parser/err/0008_item_block_recovery.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0008_item_block_recovery.rs rename to crates/syntax/test_data/parser/err/0008_item_block_recovery.rs diff --git a/crates/ra_syntax/test_data/parser/err/0009_broken_struct_type_parameter.rast b/crates/syntax/test_data/parser/err/0009_broken_struct_type_parameter.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0009_broken_struct_type_parameter.rast rename to crates/syntax/test_data/parser/err/0009_broken_struct_type_parameter.rast diff --git a/crates/ra_syntax/test_data/parser/err/0009_broken_struct_type_parameter.rs b/crates/syntax/test_data/parser/err/0009_broken_struct_type_parameter.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0009_broken_struct_type_parameter.rs rename to crates/syntax/test_data/parser/err/0009_broken_struct_type_parameter.rs diff --git a/crates/ra_syntax/test_data/parser/err/0010_unsafe_lambda_block.rast b/crates/syntax/test_data/parser/err/0010_unsafe_lambda_block.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0010_unsafe_lambda_block.rast rename to crates/syntax/test_data/parser/err/0010_unsafe_lambda_block.rast diff --git a/crates/ra_syntax/test_data/parser/err/0010_unsafe_lambda_block.rs b/crates/syntax/test_data/parser/err/0010_unsafe_lambda_block.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0010_unsafe_lambda_block.rs rename to crates/syntax/test_data/parser/err/0010_unsafe_lambda_block.rs diff --git a/crates/ra_syntax/test_data/parser/err/0011_extern_struct.rast b/crates/syntax/test_data/parser/err/0011_extern_struct.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0011_extern_struct.rast rename to crates/syntax/test_data/parser/err/0011_extern_struct.rast diff --git a/crates/ra_syntax/test_data/parser/err/0011_extern_struct.rs b/crates/syntax/test_data/parser/err/0011_extern_struct.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0011_extern_struct.rs rename to crates/syntax/test_data/parser/err/0011_extern_struct.rs diff --git a/crates/ra_syntax/test_data/parser/err/0012_broken_lambda.rast b/crates/syntax/test_data/parser/err/0012_broken_lambda.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0012_broken_lambda.rast rename to crates/syntax/test_data/parser/err/0012_broken_lambda.rast diff --git a/crates/ra_syntax/test_data/parser/err/0013_invalid_type.rast b/crates/syntax/test_data/parser/err/0013_invalid_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0013_invalid_type.rast rename to crates/syntax/test_data/parser/err/0013_invalid_type.rast diff --git a/crates/ra_syntax/test_data/parser/err/0013_invalid_type.rs b/crates/syntax/test_data/parser/err/0013_invalid_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0013_invalid_type.rs rename to crates/syntax/test_data/parser/err/0013_invalid_type.rs diff --git a/crates/ra_syntax/test_data/parser/err/0014_where_no_bounds.rast b/crates/syntax/test_data/parser/err/0014_where_no_bounds.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0014_where_no_bounds.rast rename to crates/syntax/test_data/parser/err/0014_where_no_bounds.rast diff --git a/crates/ra_syntax/test_data/parser/err/0014_where_no_bounds.rs b/crates/syntax/test_data/parser/err/0014_where_no_bounds.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0014_where_no_bounds.rs rename to crates/syntax/test_data/parser/err/0014_where_no_bounds.rs diff --git a/crates/ra_syntax/test_data/parser/err/0015_curly_in_params.rast b/crates/syntax/test_data/parser/err/0015_curly_in_params.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0015_curly_in_params.rast rename to crates/syntax/test_data/parser/err/0015_curly_in_params.rast diff --git a/crates/ra_syntax/test_data/parser/err/0015_curly_in_params.rs b/crates/syntax/test_data/parser/err/0015_curly_in_params.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0015_curly_in_params.rs rename to crates/syntax/test_data/parser/err/0015_curly_in_params.rs diff --git a/crates/ra_syntax/test_data/parser/err/0016_missing_semi.rast b/crates/syntax/test_data/parser/err/0016_missing_semi.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0016_missing_semi.rast rename to crates/syntax/test_data/parser/err/0016_missing_semi.rast diff --git a/crates/ra_syntax/test_data/parser/err/0016_missing_semi.rs b/crates/syntax/test_data/parser/err/0016_missing_semi.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0016_missing_semi.rs rename to crates/syntax/test_data/parser/err/0016_missing_semi.rs diff --git a/crates/ra_syntax/test_data/parser/err/0017_incomplete_binexpr.rast b/crates/syntax/test_data/parser/err/0017_incomplete_binexpr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0017_incomplete_binexpr.rast rename to crates/syntax/test_data/parser/err/0017_incomplete_binexpr.rast diff --git a/crates/ra_syntax/test_data/parser/err/0017_incomplete_binexpr.rs b/crates/syntax/test_data/parser/err/0017_incomplete_binexpr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0017_incomplete_binexpr.rs rename to crates/syntax/test_data/parser/err/0017_incomplete_binexpr.rs diff --git a/crates/ra_syntax/test_data/parser/err/0018_incomplete_fn.rast b/crates/syntax/test_data/parser/err/0018_incomplete_fn.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0018_incomplete_fn.rast rename to crates/syntax/test_data/parser/err/0018_incomplete_fn.rast diff --git a/crates/ra_syntax/test_data/parser/err/0018_incomplete_fn.rs b/crates/syntax/test_data/parser/err/0018_incomplete_fn.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0018_incomplete_fn.rs rename to crates/syntax/test_data/parser/err/0018_incomplete_fn.rs diff --git a/crates/ra_syntax/test_data/parser/err/0019_let_recover.rast b/crates/syntax/test_data/parser/err/0019_let_recover.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0019_let_recover.rast rename to crates/syntax/test_data/parser/err/0019_let_recover.rast diff --git a/crates/ra_syntax/test_data/parser/err/0019_let_recover.rs b/crates/syntax/test_data/parser/err/0019_let_recover.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0019_let_recover.rs rename to crates/syntax/test_data/parser/err/0019_let_recover.rs diff --git a/crates/ra_syntax/test_data/parser/err/0020_fn_recover.rast b/crates/syntax/test_data/parser/err/0020_fn_recover.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0020_fn_recover.rast rename to crates/syntax/test_data/parser/err/0020_fn_recover.rast diff --git a/crates/ra_syntax/test_data/parser/err/0020_fn_recover.rs b/crates/syntax/test_data/parser/err/0020_fn_recover.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0020_fn_recover.rs rename to crates/syntax/test_data/parser/err/0020_fn_recover.rs diff --git a/crates/ra_syntax/test_data/parser/err/0021_incomplete_param.rast b/crates/syntax/test_data/parser/err/0021_incomplete_param.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0021_incomplete_param.rast rename to crates/syntax/test_data/parser/err/0021_incomplete_param.rast diff --git a/crates/ra_syntax/test_data/parser/err/0021_incomplete_param.rs b/crates/syntax/test_data/parser/err/0021_incomplete_param.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0021_incomplete_param.rs rename to crates/syntax/test_data/parser/err/0021_incomplete_param.rs diff --git a/crates/ra_syntax/test_data/parser/err/0022_bad_exprs.rast b/crates/syntax/test_data/parser/err/0022_bad_exprs.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0022_bad_exprs.rast rename to crates/syntax/test_data/parser/err/0022_bad_exprs.rast diff --git a/crates/ra_syntax/test_data/parser/err/0022_bad_exprs.rs b/crates/syntax/test_data/parser/err/0022_bad_exprs.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0022_bad_exprs.rs rename to crates/syntax/test_data/parser/err/0022_bad_exprs.rs diff --git a/crates/ra_syntax/test_data/parser/err/0023_mismatched_paren.rast b/crates/syntax/test_data/parser/err/0023_mismatched_paren.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0023_mismatched_paren.rast rename to crates/syntax/test_data/parser/err/0023_mismatched_paren.rast diff --git a/crates/ra_syntax/test_data/parser/err/0023_mismatched_paren.rs b/crates/syntax/test_data/parser/err/0023_mismatched_paren.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0023_mismatched_paren.rs rename to crates/syntax/test_data/parser/err/0023_mismatched_paren.rs diff --git a/crates/ra_syntax/test_data/parser/err/0024_many_type_parens.rast b/crates/syntax/test_data/parser/err/0024_many_type_parens.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0024_many_type_parens.rast rename to crates/syntax/test_data/parser/err/0024_many_type_parens.rast diff --git a/crates/ra_syntax/test_data/parser/err/0024_many_type_parens.rs b/crates/syntax/test_data/parser/err/0024_many_type_parens.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0024_many_type_parens.rs rename to crates/syntax/test_data/parser/err/0024_many_type_parens.rs diff --git a/crates/ra_syntax/test_data/parser/err/0025_nope.rast b/crates/syntax/test_data/parser/err/0025_nope.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0025_nope.rast rename to crates/syntax/test_data/parser/err/0025_nope.rast diff --git a/crates/ra_syntax/test_data/parser/err/0025_nope.rs b/crates/syntax/test_data/parser/err/0025_nope.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0025_nope.rs rename to crates/syntax/test_data/parser/err/0025_nope.rs diff --git a/crates/ra_syntax/test_data/parser/err/0026_imp_recovery.rast b/crates/syntax/test_data/parser/err/0026_imp_recovery.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0026_imp_recovery.rast rename to crates/syntax/test_data/parser/err/0026_imp_recovery.rast diff --git a/crates/ra_syntax/test_data/parser/err/0026_imp_recovery.rs b/crates/syntax/test_data/parser/err/0026_imp_recovery.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0026_imp_recovery.rs rename to crates/syntax/test_data/parser/err/0026_imp_recovery.rs diff --git a/crates/ra_syntax/test_data/parser/err/0027_incomplere_where_for.rast b/crates/syntax/test_data/parser/err/0027_incomplere_where_for.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0027_incomplere_where_for.rast rename to crates/syntax/test_data/parser/err/0027_incomplere_where_for.rast diff --git a/crates/ra_syntax/test_data/parser/err/0027_incomplere_where_for.rs b/crates/syntax/test_data/parser/err/0027_incomplere_where_for.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0027_incomplere_where_for.rs rename to crates/syntax/test_data/parser/err/0027_incomplere_where_for.rs diff --git a/crates/ra_syntax/test_data/parser/err/0029_field_completion.rast b/crates/syntax/test_data/parser/err/0029_field_completion.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0029_field_completion.rast rename to crates/syntax/test_data/parser/err/0029_field_completion.rast diff --git a/crates/ra_syntax/test_data/parser/err/0029_field_completion.rs b/crates/syntax/test_data/parser/err/0029_field_completion.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0029_field_completion.rs rename to crates/syntax/test_data/parser/err/0029_field_completion.rs diff --git a/crates/ra_syntax/test_data/parser/err/0031_block_inner_attrs.rast b/crates/syntax/test_data/parser/err/0031_block_inner_attrs.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0031_block_inner_attrs.rast rename to crates/syntax/test_data/parser/err/0031_block_inner_attrs.rast diff --git a/crates/ra_syntax/test_data/parser/err/0031_block_inner_attrs.rs b/crates/syntax/test_data/parser/err/0031_block_inner_attrs.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0031_block_inner_attrs.rs rename to crates/syntax/test_data/parser/err/0031_block_inner_attrs.rs diff --git a/crates/ra_syntax/test_data/parser/err/0032_match_arms_inner_attrs.rast b/crates/syntax/test_data/parser/err/0032_match_arms_inner_attrs.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0032_match_arms_inner_attrs.rast rename to crates/syntax/test_data/parser/err/0032_match_arms_inner_attrs.rast diff --git a/crates/ra_syntax/test_data/parser/err/0032_match_arms_inner_attrs.rs b/crates/syntax/test_data/parser/err/0032_match_arms_inner_attrs.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0032_match_arms_inner_attrs.rs rename to crates/syntax/test_data/parser/err/0032_match_arms_inner_attrs.rs diff --git a/crates/ra_syntax/test_data/parser/err/0033_match_arms_outer_attrs.rast b/crates/syntax/test_data/parser/err/0033_match_arms_outer_attrs.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0033_match_arms_outer_attrs.rast rename to crates/syntax/test_data/parser/err/0033_match_arms_outer_attrs.rast diff --git a/crates/ra_syntax/test_data/parser/err/0033_match_arms_outer_attrs.rs b/crates/syntax/test_data/parser/err/0033_match_arms_outer_attrs.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0033_match_arms_outer_attrs.rs rename to crates/syntax/test_data/parser/err/0033_match_arms_outer_attrs.rs diff --git a/crates/ra_syntax/test_data/parser/err/0034_bad_box_pattern.rast b/crates/syntax/test_data/parser/err/0034_bad_box_pattern.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0034_bad_box_pattern.rast rename to crates/syntax/test_data/parser/err/0034_bad_box_pattern.rast diff --git a/crates/ra_syntax/test_data/parser/err/0034_bad_box_pattern.rs b/crates/syntax/test_data/parser/err/0034_bad_box_pattern.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0034_bad_box_pattern.rs rename to crates/syntax/test_data/parser/err/0034_bad_box_pattern.rs diff --git a/crates/ra_syntax/test_data/parser/err/0035_use_recover.rast b/crates/syntax/test_data/parser/err/0035_use_recover.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0035_use_recover.rast rename to crates/syntax/test_data/parser/err/0035_use_recover.rast diff --git a/crates/ra_syntax/test_data/parser/err/0035_use_recover.rs b/crates/syntax/test_data/parser/err/0035_use_recover.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0035_use_recover.rs rename to crates/syntax/test_data/parser/err/0035_use_recover.rs diff --git a/crates/ra_syntax/test_data/parser/err/0036_partial_use.rast b/crates/syntax/test_data/parser/err/0036_partial_use.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0036_partial_use.rast rename to crates/syntax/test_data/parser/err/0036_partial_use.rast diff --git a/crates/ra_syntax/test_data/parser/err/0036_partial_use.rs b/crates/syntax/test_data/parser/err/0036_partial_use.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0036_partial_use.rs rename to crates/syntax/test_data/parser/err/0036_partial_use.rs diff --git a/crates/ra_syntax/test_data/parser/err/0037_visibility_in_traits.rast b/crates/syntax/test_data/parser/err/0037_visibility_in_traits.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0037_visibility_in_traits.rast rename to crates/syntax/test_data/parser/err/0037_visibility_in_traits.rast diff --git a/crates/ra_syntax/test_data/parser/err/0037_visibility_in_traits.rs b/crates/syntax/test_data/parser/err/0037_visibility_in_traits.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0037_visibility_in_traits.rs rename to crates/syntax/test_data/parser/err/0037_visibility_in_traits.rs diff --git a/crates/ra_syntax/test_data/parser/err/0038_endless_inclusive_range.rast b/crates/syntax/test_data/parser/err/0038_endless_inclusive_range.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0038_endless_inclusive_range.rast rename to crates/syntax/test_data/parser/err/0038_endless_inclusive_range.rast diff --git a/crates/ra_syntax/test_data/parser/err/0038_endless_inclusive_range.rs b/crates/syntax/test_data/parser/err/0038_endless_inclusive_range.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0038_endless_inclusive_range.rs rename to crates/syntax/test_data/parser/err/0038_endless_inclusive_range.rs diff --git a/crates/ra_syntax/test_data/parser/err/0039_lambda_recovery.rast b/crates/syntax/test_data/parser/err/0039_lambda_recovery.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0039_lambda_recovery.rast rename to crates/syntax/test_data/parser/err/0039_lambda_recovery.rast diff --git a/crates/ra_syntax/test_data/parser/err/0039_lambda_recovery.rs b/crates/syntax/test_data/parser/err/0039_lambda_recovery.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0039_lambda_recovery.rs rename to crates/syntax/test_data/parser/err/0039_lambda_recovery.rs diff --git a/crates/ra_syntax/test_data/parser/err/0040_illegal_crate_kw_location.rast b/crates/syntax/test_data/parser/err/0040_illegal_crate_kw_location.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0040_illegal_crate_kw_location.rast rename to crates/syntax/test_data/parser/err/0040_illegal_crate_kw_location.rast diff --git a/crates/ra_syntax/test_data/parser/err/0040_illegal_crate_kw_location.rs b/crates/syntax/test_data/parser/err/0040_illegal_crate_kw_location.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0040_illegal_crate_kw_location.rs rename to crates/syntax/test_data/parser/err/0040_illegal_crate_kw_location.rs diff --git a/crates/ra_syntax/test_data/parser/err/0041_illegal_super_keyword_location.rast b/crates/syntax/test_data/parser/err/0041_illegal_super_keyword_location.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0041_illegal_super_keyword_location.rast rename to crates/syntax/test_data/parser/err/0041_illegal_super_keyword_location.rast diff --git a/crates/ra_syntax/test_data/parser/err/0041_illegal_super_keyword_location.rs b/crates/syntax/test_data/parser/err/0041_illegal_super_keyword_location.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0041_illegal_super_keyword_location.rs rename to crates/syntax/test_data/parser/err/0041_illegal_super_keyword_location.rs diff --git a/crates/ra_syntax/test_data/parser/err/0042_illegal_self_keyword_location.rast b/crates/syntax/test_data/parser/err/0042_illegal_self_keyword_location.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0042_illegal_self_keyword_location.rast rename to crates/syntax/test_data/parser/err/0042_illegal_self_keyword_location.rast diff --git a/crates/ra_syntax/test_data/parser/err/0042_illegal_self_keyword_location.rs b/crates/syntax/test_data/parser/err/0042_illegal_self_keyword_location.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0042_illegal_self_keyword_location.rs rename to crates/syntax/test_data/parser/err/0042_illegal_self_keyword_location.rs diff --git a/crates/ra_syntax/test_data/parser/err/0163_weird_blocks.rast b/crates/syntax/test_data/parser/err/0043_weird_blocks.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0163_weird_blocks.rast rename to crates/syntax/test_data/parser/err/0043_weird_blocks.rast diff --git a/crates/ra_syntax/test_data/parser/err/0163_weird_blocks.rs b/crates/syntax/test_data/parser/err/0043_weird_blocks.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0163_weird_blocks.rs rename to crates/syntax/test_data/parser/err/0043_weird_blocks.rs diff --git a/crates/ra_syntax/test_data/parser/err/0044_unexpected_for_type.rast b/crates/syntax/test_data/parser/err/0044_unexpected_for_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0044_unexpected_for_type.rast rename to crates/syntax/test_data/parser/err/0044_unexpected_for_type.rast diff --git a/crates/ra_syntax/test_data/parser/err/0044_unexpected_for_type.rs b/crates/syntax/test_data/parser/err/0044_unexpected_for_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/err/0044_unexpected_for_type.rs rename to crates/syntax/test_data/parser/err/0044_unexpected_for_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/err/0010_wrong_order_fns.rast b/crates/syntax/test_data/parser/err/0045_item_modifiers.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0010_wrong_order_fns.rast rename to crates/syntax/test_data/parser/err/0045_item_modifiers.rast diff --git a/crates/ra_syntax/test_data/parser/inline/err/0010_wrong_order_fns.rs b/crates/syntax/test_data/parser/err/0045_item_modifiers.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0010_wrong_order_fns.rs rename to crates/syntax/test_data/parser/err/0045_item_modifiers.rs diff --git a/crates/ra_syntax/test_data/parser/fragments/expr/err/0000_truncated_add.rast b/crates/syntax/test_data/parser/fragments/expr/err/0000_truncated_add.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/expr/err/0000_truncated_add.rast rename to crates/syntax/test_data/parser/fragments/expr/err/0000_truncated_add.rast diff --git a/crates/ra_syntax/test_data/parser/fragments/expr/err/0000_truncated_add.rs b/crates/syntax/test_data/parser/fragments/expr/err/0000_truncated_add.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/expr/err/0000_truncated_add.rs rename to crates/syntax/test_data/parser/fragments/expr/err/0000_truncated_add.rs diff --git a/crates/ra_syntax/test_data/parser/fragments/expr/ok/0000_add.rast b/crates/syntax/test_data/parser/fragments/expr/ok/0000_add.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/expr/ok/0000_add.rast rename to crates/syntax/test_data/parser/fragments/expr/ok/0000_add.rast diff --git a/crates/ra_syntax/test_data/parser/fragments/expr/ok/0000_add.rs b/crates/syntax/test_data/parser/fragments/expr/ok/0000_add.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/expr/ok/0000_add.rs rename to crates/syntax/test_data/parser/fragments/expr/ok/0000_add.rs diff --git a/crates/ra_syntax/test_data/parser/fragments/item/err/0000_extra_keyword.rast b/crates/syntax/test_data/parser/fragments/item/err/0000_extra_keyword.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/item/err/0000_extra_keyword.rast rename to crates/syntax/test_data/parser/fragments/item/err/0000_extra_keyword.rast diff --git a/crates/ra_syntax/test_data/parser/fragments/item/err/0000_extra_keyword.rs b/crates/syntax/test_data/parser/fragments/item/err/0000_extra_keyword.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/item/err/0000_extra_keyword.rs rename to crates/syntax/test_data/parser/fragments/item/err/0000_extra_keyword.rs diff --git a/crates/ra_syntax/test_data/parser/fragments/item/ok/0000_fn.rast b/crates/syntax/test_data/parser/fragments/item/ok/0000_fn.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/item/ok/0000_fn.rast rename to crates/syntax/test_data/parser/fragments/item/ok/0000_fn.rast diff --git a/crates/ra_syntax/test_data/parser/fragments/item/ok/0000_fn.rs b/crates/syntax/test_data/parser/fragments/item/ok/0000_fn.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/item/ok/0000_fn.rs rename to crates/syntax/test_data/parser/fragments/item/ok/0000_fn.rs diff --git a/crates/ra_syntax/test_data/parser/fragments/path/err/0000_reserved_word.rast b/crates/syntax/test_data/parser/fragments/path/err/0000_reserved_word.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/path/err/0000_reserved_word.rast rename to crates/syntax/test_data/parser/fragments/path/err/0000_reserved_word.rast diff --git a/crates/ra_syntax/test_data/parser/fragments/path/err/0000_reserved_word.rs b/crates/syntax/test_data/parser/fragments/path/err/0000_reserved_word.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/path/err/0000_reserved_word.rs rename to crates/syntax/test_data/parser/fragments/path/err/0000_reserved_word.rs diff --git a/crates/ra_syntax/test_data/parser/fragments/path/err/0001_expression.rast b/crates/syntax/test_data/parser/fragments/path/err/0001_expression.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/path/err/0001_expression.rast rename to crates/syntax/test_data/parser/fragments/path/err/0001_expression.rast diff --git a/crates/ra_syntax/test_data/parser/fragments/path/err/0001_expression.rs b/crates/syntax/test_data/parser/fragments/path/err/0001_expression.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/path/err/0001_expression.rs rename to crates/syntax/test_data/parser/fragments/path/err/0001_expression.rs diff --git a/crates/ra_syntax/test_data/parser/fragments/path/ok/0000_single_ident.rast b/crates/syntax/test_data/parser/fragments/path/ok/0000_single_ident.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/path/ok/0000_single_ident.rast rename to crates/syntax/test_data/parser/fragments/path/ok/0000_single_ident.rast diff --git a/crates/ra_syntax/test_data/parser/fragments/path/ok/0000_single_ident.rs b/crates/syntax/test_data/parser/fragments/path/ok/0000_single_ident.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/path/ok/0000_single_ident.rs rename to crates/syntax/test_data/parser/fragments/path/ok/0000_single_ident.rs diff --git a/crates/ra_syntax/test_data/parser/fragments/path/ok/0001_multipart.rast b/crates/syntax/test_data/parser/fragments/path/ok/0001_multipart.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/path/ok/0001_multipart.rast rename to crates/syntax/test_data/parser/fragments/path/ok/0001_multipart.rast diff --git a/crates/ra_syntax/test_data/parser/fragments/path/ok/0001_multipart.rs b/crates/syntax/test_data/parser/fragments/path/ok/0001_multipart.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/path/ok/0001_multipart.rs rename to crates/syntax/test_data/parser/fragments/path/ok/0001_multipart.rs diff --git a/crates/ra_syntax/test_data/parser/fragments/pattern/err/0000_reserved_word.rast b/crates/syntax/test_data/parser/fragments/pattern/err/0000_reserved_word.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/pattern/err/0000_reserved_word.rast rename to crates/syntax/test_data/parser/fragments/pattern/err/0000_reserved_word.rast diff --git a/crates/ra_syntax/test_data/parser/fragments/pattern/err/0000_reserved_word.rs b/crates/syntax/test_data/parser/fragments/pattern/err/0000_reserved_word.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/pattern/err/0000_reserved_word.rs rename to crates/syntax/test_data/parser/fragments/pattern/err/0000_reserved_word.rs diff --git a/crates/ra_syntax/test_data/parser/fragments/pattern/err/0001_missing_paren.rast b/crates/syntax/test_data/parser/fragments/pattern/err/0001_missing_paren.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/pattern/err/0001_missing_paren.rast rename to crates/syntax/test_data/parser/fragments/pattern/err/0001_missing_paren.rast diff --git a/crates/ra_syntax/test_data/parser/fragments/pattern/err/0001_missing_paren.rs b/crates/syntax/test_data/parser/fragments/pattern/err/0001_missing_paren.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/pattern/err/0001_missing_paren.rs rename to crates/syntax/test_data/parser/fragments/pattern/err/0001_missing_paren.rs diff --git a/crates/ra_syntax/test_data/parser/fragments/pattern/ok/0000_enum.rast b/crates/syntax/test_data/parser/fragments/pattern/ok/0000_enum.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/pattern/ok/0000_enum.rast rename to crates/syntax/test_data/parser/fragments/pattern/ok/0000_enum.rast diff --git a/crates/ra_syntax/test_data/parser/fragments/pattern/ok/0000_enum.rs b/crates/syntax/test_data/parser/fragments/pattern/ok/0000_enum.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/pattern/ok/0000_enum.rs rename to crates/syntax/test_data/parser/fragments/pattern/ok/0000_enum.rs diff --git a/crates/ra_syntax/test_data/parser/fragments/type/err/0000_missing_close.rast b/crates/syntax/test_data/parser/fragments/type/err/0000_missing_close.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/type/err/0000_missing_close.rast rename to crates/syntax/test_data/parser/fragments/type/err/0000_missing_close.rast diff --git a/crates/ra_syntax/test_data/parser/fragments/type/err/0000_missing_close.rs b/crates/syntax/test_data/parser/fragments/type/err/0000_missing_close.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/type/err/0000_missing_close.rs rename to crates/syntax/test_data/parser/fragments/type/err/0000_missing_close.rs diff --git a/crates/ra_syntax/test_data/parser/fragments/type/ok/0000_result.rast b/crates/syntax/test_data/parser/fragments/type/ok/0000_result.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/type/ok/0000_result.rast rename to crates/syntax/test_data/parser/fragments/type/ok/0000_result.rast diff --git a/crates/ra_syntax/test_data/parser/fragments/type/ok/0000_result.rs b/crates/syntax/test_data/parser/fragments/type/ok/0000_result.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fragments/type/ok/0000_result.rs rename to crates/syntax/test_data/parser/fragments/type/ok/0000_result.rs diff --git a/crates/ra_syntax/test_data/parser/fuzz-failures/0000.rs b/crates/syntax/test_data/parser/fuzz-failures/0000.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fuzz-failures/0000.rs rename to crates/syntax/test_data/parser/fuzz-failures/0000.rs diff --git a/crates/syntax/test_data/parser/fuzz-failures/0001.rs b/crates/syntax/test_data/parser/fuzz-failures/0001.rs new file mode 100644 index 0000000000..f1148058ef --- /dev/null +++ b/crates/syntax/test_data/parser/fuzz-failures/0001.rs @@ -0,0 +1,106 @@ +use syntax::{ + File, TextRange, SyntaxNodeRef, TextUnit, + SyntaxKind::*, + algo::{find_leaf_at_offset, LeafAtOffset, find_covering_node, ancestors, Direction, siblings}, +}; + +pub fn extend_selection(file: &File, range: TextRange) -> Option { + let syntax = file.syntax(); + extend(syntax.borrowed(), range) +} + +pub(crate) fn extend(root: SyntaxNodeRef, range: TextRange) -> Option { + if range.is_empty() { + let offset = range.start(); + let mut leaves = find_leaf_at_offset(root, offset); + if leaves.clone().all(|it| it.kind() == WHITESPACE) { + return Some(extend_ws(root, leaves.next()?, offset)); + } + let leaf = match leaves { + LeafAtOffset::None => return None, + LeafAtOffset::Single(l) => l, + LeafAtOffset::Between(l, r) => pick_best(l, r), + }; + return Some(leaf.range()); + }; + let node = find_covering_node(root, range); + if node.kind() == COMMENT && range == node.range() { + if let Some(range) = extend_comments(node) { + return Some(range); + } + } + + match ancestors(node).skip_while(|n| n.range() == range).next() { + None => None, + Some(parent) => Some(parent.range()), + } +} + +fn extend_ws(root: SyntaxNodeRef, ws: SyntaxNodeRef, offset: TextUnit) -> TextRange { + let ws_text = ws.leaf_text().unwrap(); + let suffix = TextRange::from_to(offset, ws.range().end()) - ws.range().start(); + let prefix = TextRange::from_to(ws.range().start(), offset) - ws.range().start(); + let ws_suffix = &ws_text.as_str()[suffix]; + let ws_prefix = &ws_text.as_str()[prefix]; + if ws_text.contains("\n") && !ws_suffix.contains("\n") { + if let Some(node) = ws.next_sibling() { + let start = match ws_prefix.rfind('\n') { + Some(idx) => ws.range().start() + TextUnit::from((idx + 1) as u32), + None => node.range().start() + }; + let end = if root.text().char_at(node.range().end()) == Some('\n') { + node.range().end() + TextUnit::of_char('\n') + } else { + node.range().end() + }; + return TextRange::from_to(start, end); + } + } + ws.range() +} + +fn pick_best<'a>(l: SyntaxNodeRef<'a>, r: Syntd[axNodeRef<'a>) -> SyntaxNodeRef<'a> { + return if priority(r) > priority(l) { r } else { l }; + fn priority(n: SyntaxNodeRef) -> usize { + match n.kind() { + WHITESPACE => 0, + IDENT | SELF_KW | SUPER_KW | CRATE_KW => 2, + _ => 1, + } + } +} + +fn extend_comments(node: SyntaxNodeRef) -> Option { + let left = adj_com[ments(node, Direction::Backward); + let right = adj_comments(node, Direction::Forward); + if left != right { + Some(TextRange::from_to( + left.range().start(), + right.range().end(), + )) + } else { + None + } +} + +fn adj_comments(node: SyntaxNodeRef, dir: Direction) -> SyntaxNodeRef { + let mut res = node; + for node in siblings(node, dir) { + match node.kind() { + COMMENT => res = node, + WHITESPACE if !node.leaf_text().unwrap().as_str().contains("\n\n") => (), + _ => break + } + } + res +} + +#[cfg(test)] +mod tests { + use super::*; + use test_utils::extract_offset; + + fn do_check(before: &str, afters: &[&str]) { + let (cursor, before) = extract_offset(before); + let file = File::parse(&before); + let mut range = TextRange::of diff --git a/crates/ra_syntax/test_data/parser/fuzz-failures/0002.rs b/crates/syntax/test_data/parser/fuzz-failures/0002.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fuzz-failures/0002.rs rename to crates/syntax/test_data/parser/fuzz-failures/0002.rs diff --git a/crates/ra_syntax/test_data/parser/fuzz-failures/0003.rs b/crates/syntax/test_data/parser/fuzz-failures/0003.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fuzz-failures/0003.rs rename to crates/syntax/test_data/parser/fuzz-failures/0003.rs diff --git a/crates/ra_syntax/test_data/parser/fuzz-failures/0004.rs b/crates/syntax/test_data/parser/fuzz-failures/0004.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/fuzz-failures/0004.rs rename to crates/syntax/test_data/parser/fuzz-failures/0004.rs diff --git a/crates/ra_syntax/test_data/parser/inline/err/0001_array_type_missing_semi.rast b/crates/syntax/test_data/parser/inline/err/0001_array_type_missing_semi.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0001_array_type_missing_semi.rast rename to crates/syntax/test_data/parser/inline/err/0001_array_type_missing_semi.rast diff --git a/crates/ra_syntax/test_data/parser/inline/err/0001_array_type_missing_semi.rs b/crates/syntax/test_data/parser/inline/err/0001_array_type_missing_semi.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0001_array_type_missing_semi.rs rename to crates/syntax/test_data/parser/inline/err/0001_array_type_missing_semi.rs diff --git a/crates/ra_syntax/test_data/parser/inline/err/0002_misplaced_label_err.rast b/crates/syntax/test_data/parser/inline/err/0002_misplaced_label_err.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0002_misplaced_label_err.rast rename to crates/syntax/test_data/parser/inline/err/0002_misplaced_label_err.rast diff --git a/crates/ra_syntax/test_data/parser/inline/err/0002_misplaced_label_err.rs b/crates/syntax/test_data/parser/inline/err/0002_misplaced_label_err.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0002_misplaced_label_err.rs rename to crates/syntax/test_data/parser/inline/err/0002_misplaced_label_err.rs diff --git a/crates/ra_syntax/test_data/parser/inline/err/0003_pointer_type_no_mutability.rast b/crates/syntax/test_data/parser/inline/err/0003_pointer_type_no_mutability.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0003_pointer_type_no_mutability.rast rename to crates/syntax/test_data/parser/inline/err/0003_pointer_type_no_mutability.rast diff --git a/crates/ra_syntax/test_data/parser/inline/err/0003_pointer_type_no_mutability.rs b/crates/syntax/test_data/parser/inline/err/0003_pointer_type_no_mutability.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0003_pointer_type_no_mutability.rs rename to crates/syntax/test_data/parser/inline/err/0003_pointer_type_no_mutability.rs diff --git a/crates/ra_syntax/test_data/parser/inline/err/0004_impl_type.rast b/crates/syntax/test_data/parser/inline/err/0004_impl_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0004_impl_type.rast rename to crates/syntax/test_data/parser/inline/err/0004_impl_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/err/0004_impl_type.rs b/crates/syntax/test_data/parser/inline/err/0004_impl_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0004_impl_type.rs rename to crates/syntax/test_data/parser/inline/err/0004_impl_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rast b/crates/syntax/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rast rename to crates/syntax/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rast diff --git a/crates/ra_syntax/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rs b/crates/syntax/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rs rename to crates/syntax/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rs diff --git a/crates/ra_syntax/test_data/parser/inline/err/0006_unsafe_block_in_mod.rast b/crates/syntax/test_data/parser/inline/err/0006_unsafe_block_in_mod.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0006_unsafe_block_in_mod.rast rename to crates/syntax/test_data/parser/inline/err/0006_unsafe_block_in_mod.rast diff --git a/crates/ra_syntax/test_data/parser/inline/err/0006_unsafe_block_in_mod.rs b/crates/syntax/test_data/parser/inline/err/0006_unsafe_block_in_mod.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0006_unsafe_block_in_mod.rs rename to crates/syntax/test_data/parser/inline/err/0006_unsafe_block_in_mod.rs diff --git a/crates/ra_syntax/test_data/parser/inline/err/0007_async_without_semicolon.rast b/crates/syntax/test_data/parser/inline/err/0007_async_without_semicolon.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0007_async_without_semicolon.rast rename to crates/syntax/test_data/parser/inline/err/0007_async_without_semicolon.rast diff --git a/crates/ra_syntax/test_data/parser/inline/err/0007_async_without_semicolon.rs b/crates/syntax/test_data/parser/inline/err/0007_async_without_semicolon.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0007_async_without_semicolon.rs rename to crates/syntax/test_data/parser/inline/err/0007_async_without_semicolon.rs diff --git a/crates/ra_syntax/test_data/parser/inline/err/0008_pub_expr.rast b/crates/syntax/test_data/parser/inline/err/0008_pub_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0008_pub_expr.rast rename to crates/syntax/test_data/parser/inline/err/0008_pub_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/err/0008_pub_expr.rs b/crates/syntax/test_data/parser/inline/err/0008_pub_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0008_pub_expr.rs rename to crates/syntax/test_data/parser/inline/err/0008_pub_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/err/0009_attr_on_expr_not_allowed.rast b/crates/syntax/test_data/parser/inline/err/0009_attr_on_expr_not_allowed.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0009_attr_on_expr_not_allowed.rast rename to crates/syntax/test_data/parser/inline/err/0009_attr_on_expr_not_allowed.rast diff --git a/crates/ra_syntax/test_data/parser/inline/err/0009_attr_on_expr_not_allowed.rs b/crates/syntax/test_data/parser/inline/err/0009_attr_on_expr_not_allowed.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0009_attr_on_expr_not_allowed.rs rename to crates/syntax/test_data/parser/inline/err/0009_attr_on_expr_not_allowed.rs diff --git a/crates/ra_syntax/test_data/parser/inline/err/0010_bad_tuple_index_expr.rast b/crates/syntax/test_data/parser/inline/err/0010_bad_tuple_index_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0010_bad_tuple_index_expr.rast rename to crates/syntax/test_data/parser/inline/err/0010_bad_tuple_index_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/err/0010_bad_tuple_index_expr.rs b/crates/syntax/test_data/parser/inline/err/0010_bad_tuple_index_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0010_bad_tuple_index_expr.rs rename to crates/syntax/test_data/parser/inline/err/0010_bad_tuple_index_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/err/0013_static_underscore.rast b/crates/syntax/test_data/parser/inline/err/0013_static_underscore.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0013_static_underscore.rast rename to crates/syntax/test_data/parser/inline/err/0013_static_underscore.rast diff --git a/crates/ra_syntax/test_data/parser/inline/err/0013_static_underscore.rs b/crates/syntax/test_data/parser/inline/err/0013_static_underscore.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0013_static_underscore.rs rename to crates/syntax/test_data/parser/inline/err/0013_static_underscore.rs diff --git a/crates/ra_syntax/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast b/crates/syntax/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast rename to crates/syntax/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast diff --git a/crates/ra_syntax/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rs b/crates/syntax/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rs rename to crates/syntax/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rs diff --git a/crates/syntax/test_data/parser/inline/err/0015_empty_segment.rast b/crates/syntax/test_data/parser/inline/err/0015_empty_segment.rast new file mode 100644 index 0000000000..e872526d90 --- /dev/null +++ b/crates/syntax/test_data/parser/inline/err/0015_empty_segment.rast @@ -0,0 +1,13 @@ +SOURCE_FILE@0..13 + USE@0..12 + USE_KW@0..3 "use" + WHITESPACE@3..4 " " + USE_TREE@4..11 + PATH@4..11 + PATH@4..9 + PATH_SEGMENT@4..9 + CRATE_KW@4..9 "crate" + COLON2@9..11 "::" + SEMICOLON@11..12 ";" + WHITESPACE@12..13 "\n" +error 11..11: expected identifier diff --git a/crates/ra_syntax/test_data/parser/inline/err/0015_empty_segment.rs b/crates/syntax/test_data/parser/inline/err/0015_empty_segment.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/err/0015_empty_segment.rs rename to crates/syntax/test_data/parser/inline/err/0015_empty_segment.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0002_use_tree_list.rast b/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0002_use_tree_list.rast rename to crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0002_use_tree_list.rs b/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0002_use_tree_list.rs rename to crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0003_where_pred_for.rast b/crates/syntax/test_data/parser/inline/ok/0003_where_pred_for.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0003_where_pred_for.rast rename to crates/syntax/test_data/parser/inline/ok/0003_where_pred_for.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0003_where_pred_for.rs b/crates/syntax/test_data/parser/inline/ok/0003_where_pred_for.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0003_where_pred_for.rs rename to crates/syntax/test_data/parser/inline/ok/0003_where_pred_for.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rast b/crates/syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rast rename to crates/syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs b/crates/syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs rename to crates/syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0005_function_type_params.rast b/crates/syntax/test_data/parser/inline/ok/0005_function_type_params.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0005_function_type_params.rast rename to crates/syntax/test_data/parser/inline/ok/0005_function_type_params.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0005_function_type_params.rs b/crates/syntax/test_data/parser/inline/ok/0005_function_type_params.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0005_function_type_params.rs rename to crates/syntax/test_data/parser/inline/ok/0005_function_type_params.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0006_self_param.rast b/crates/syntax/test_data/parser/inline/ok/0006_self_param.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0006_self_param.rast rename to crates/syntax/test_data/parser/inline/ok/0006_self_param.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0006_self_param.rs b/crates/syntax/test_data/parser/inline/ok/0006_self_param.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0006_self_param.rs rename to crates/syntax/test_data/parser/inline/ok/0006_self_param.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0007_type_param_bounds.rast b/crates/syntax/test_data/parser/inline/ok/0007_type_param_bounds.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0007_type_param_bounds.rast rename to crates/syntax/test_data/parser/inline/ok/0007_type_param_bounds.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0007_type_param_bounds.rs b/crates/syntax/test_data/parser/inline/ok/0007_type_param_bounds.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0007_type_param_bounds.rs rename to crates/syntax/test_data/parser/inline/ok/0007_type_param_bounds.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0008_path_part.rast b/crates/syntax/test_data/parser/inline/ok/0008_path_part.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0008_path_part.rast rename to crates/syntax/test_data/parser/inline/ok/0008_path_part.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0008_path_part.rs b/crates/syntax/test_data/parser/inline/ok/0008_path_part.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0008_path_part.rs rename to crates/syntax/test_data/parser/inline/ok/0008_path_part.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0009_loop_expr.rast b/crates/syntax/test_data/parser/inline/ok/0009_loop_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0009_loop_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0009_loop_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0009_loop_expr.rs b/crates/syntax/test_data/parser/inline/ok/0009_loop_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0009_loop_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0009_loop_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0010_extern_block.rast b/crates/syntax/test_data/parser/inline/ok/0010_extern_block.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0010_extern_block.rast rename to crates/syntax/test_data/parser/inline/ok/0010_extern_block.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0010_extern_block.rs b/crates/syntax/test_data/parser/inline/ok/0010_extern_block.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0010_extern_block.rs rename to crates/syntax/test_data/parser/inline/ok/0010_extern_block.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0011_field_expr.rast b/crates/syntax/test_data/parser/inline/ok/0011_field_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0011_field_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0011_field_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0011_field_expr.rs b/crates/syntax/test_data/parser/inline/ok/0011_field_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0011_field_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0011_field_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0012_type_item_where_clause.rast b/crates/syntax/test_data/parser/inline/ok/0012_type_item_where_clause.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0012_type_item_where_clause.rast rename to crates/syntax/test_data/parser/inline/ok/0012_type_item_where_clause.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0012_type_item_where_clause.rs b/crates/syntax/test_data/parser/inline/ok/0012_type_item_where_clause.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0012_type_item_where_clause.rs rename to crates/syntax/test_data/parser/inline/ok/0012_type_item_where_clause.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0013_pointer_type_mut.rast b/crates/syntax/test_data/parser/inline/ok/0013_pointer_type_mut.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0013_pointer_type_mut.rast rename to crates/syntax/test_data/parser/inline/ok/0013_pointer_type_mut.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0013_pointer_type_mut.rs b/crates/syntax/test_data/parser/inline/ok/0013_pointer_type_mut.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0013_pointer_type_mut.rs rename to crates/syntax/test_data/parser/inline/ok/0013_pointer_type_mut.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0014_never_type.rast b/crates/syntax/test_data/parser/inline/ok/0014_never_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0014_never_type.rast rename to crates/syntax/test_data/parser/inline/ok/0014_never_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0014_never_type.rs b/crates/syntax/test_data/parser/inline/ok/0014_never_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0014_never_type.rs rename to crates/syntax/test_data/parser/inline/ok/0014_never_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0015_continue_expr.rast b/crates/syntax/test_data/parser/inline/ok/0015_continue_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0015_continue_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0015_continue_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0015_continue_expr.rs b/crates/syntax/test_data/parser/inline/ok/0015_continue_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0015_continue_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0015_continue_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0017_array_type.rast b/crates/syntax/test_data/parser/inline/ok/0017_array_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0017_array_type.rast rename to crates/syntax/test_data/parser/inline/ok/0017_array_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0017_array_type.rs b/crates/syntax/test_data/parser/inline/ok/0017_array_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0017_array_type.rs rename to crates/syntax/test_data/parser/inline/ok/0017_array_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0018_arb_self_types.rast b/crates/syntax/test_data/parser/inline/ok/0018_arb_self_types.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0018_arb_self_types.rast rename to crates/syntax/test_data/parser/inline/ok/0018_arb_self_types.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0018_arb_self_types.rs b/crates/syntax/test_data/parser/inline/ok/0018_arb_self_types.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0018_arb_self_types.rs rename to crates/syntax/test_data/parser/inline/ok/0018_arb_self_types.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0019_unary_expr.rast b/crates/syntax/test_data/parser/inline/ok/0019_unary_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0019_unary_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0019_unary_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0019_unary_expr.rs b/crates/syntax/test_data/parser/inline/ok/0019_unary_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0019_unary_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0019_unary_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0020_use_star.rast b/crates/syntax/test_data/parser/inline/ok/0020_use_star.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0020_use_star.rast rename to crates/syntax/test_data/parser/inline/ok/0020_use_star.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0020_use_star.rs b/crates/syntax/test_data/parser/inline/ok/0020_use_star.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0020_use_star.rs rename to crates/syntax/test_data/parser/inline/ok/0020_use_star.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0021_impl_item_list.rast b/crates/syntax/test_data/parser/inline/ok/0021_impl_item_list.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0021_impl_item_list.rast rename to crates/syntax/test_data/parser/inline/ok/0021_impl_item_list.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0021_impl_item_list.rs b/crates/syntax/test_data/parser/inline/ok/0021_impl_item_list.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0021_impl_item_list.rs rename to crates/syntax/test_data/parser/inline/ok/0021_impl_item_list.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0022_crate_visibility.rast b/crates/syntax/test_data/parser/inline/ok/0022_crate_visibility.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0022_crate_visibility.rast rename to crates/syntax/test_data/parser/inline/ok/0022_crate_visibility.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0022_crate_visibility.rs b/crates/syntax/test_data/parser/inline/ok/0022_crate_visibility.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0022_crate_visibility.rs rename to crates/syntax/test_data/parser/inline/ok/0022_crate_visibility.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0023_placeholder_type.rast b/crates/syntax/test_data/parser/inline/ok/0023_placeholder_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0023_placeholder_type.rast rename to crates/syntax/test_data/parser/inline/ok/0023_placeholder_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0023_placeholder_type.rs b/crates/syntax/test_data/parser/inline/ok/0023_placeholder_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0023_placeholder_type.rs rename to crates/syntax/test_data/parser/inline/ok/0023_placeholder_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0024_slice_pat.rast b/crates/syntax/test_data/parser/inline/ok/0024_slice_pat.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0024_slice_pat.rast rename to crates/syntax/test_data/parser/inline/ok/0024_slice_pat.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0024_slice_pat.rs b/crates/syntax/test_data/parser/inline/ok/0024_slice_pat.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0024_slice_pat.rs rename to crates/syntax/test_data/parser/inline/ok/0024_slice_pat.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0025_slice_type.rast b/crates/syntax/test_data/parser/inline/ok/0025_slice_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0025_slice_type.rast rename to crates/syntax/test_data/parser/inline/ok/0025_slice_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0025_slice_type.rs b/crates/syntax/test_data/parser/inline/ok/0025_slice_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0025_slice_type.rs rename to crates/syntax/test_data/parser/inline/ok/0025_slice_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0026_tuple_pat_fields.rast b/crates/syntax/test_data/parser/inline/ok/0026_tuple_pat_fields.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0026_tuple_pat_fields.rast rename to crates/syntax/test_data/parser/inline/ok/0026_tuple_pat_fields.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0026_tuple_pat_fields.rs b/crates/syntax/test_data/parser/inline/ok/0026_tuple_pat_fields.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0026_tuple_pat_fields.rs rename to crates/syntax/test_data/parser/inline/ok/0026_tuple_pat_fields.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0027_ref_pat.rast b/crates/syntax/test_data/parser/inline/ok/0027_ref_pat.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0027_ref_pat.rast rename to crates/syntax/test_data/parser/inline/ok/0027_ref_pat.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0027_ref_pat.rs b/crates/syntax/test_data/parser/inline/ok/0027_ref_pat.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0027_ref_pat.rs rename to crates/syntax/test_data/parser/inline/ok/0027_ref_pat.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0028_impl_trait_type.rast b/crates/syntax/test_data/parser/inline/ok/0028_impl_trait_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0028_impl_trait_type.rast rename to crates/syntax/test_data/parser/inline/ok/0028_impl_trait_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0028_impl_trait_type.rs b/crates/syntax/test_data/parser/inline/ok/0028_impl_trait_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0028_impl_trait_type.rs rename to crates/syntax/test_data/parser/inline/ok/0028_impl_trait_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0029_cast_expr.rast b/crates/syntax/test_data/parser/inline/ok/0029_cast_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0029_cast_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0029_cast_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0029_cast_expr.rs b/crates/syntax/test_data/parser/inline/ok/0029_cast_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0029_cast_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0029_cast_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0030_cond.rast b/crates/syntax/test_data/parser/inline/ok/0030_cond.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0030_cond.rast rename to crates/syntax/test_data/parser/inline/ok/0030_cond.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0030_cond.rs b/crates/syntax/test_data/parser/inline/ok/0030_cond.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0030_cond.rs rename to crates/syntax/test_data/parser/inline/ok/0030_cond.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0031_while_expr.rast b/crates/syntax/test_data/parser/inline/ok/0031_while_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0031_while_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0031_while_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0031_while_expr.rs b/crates/syntax/test_data/parser/inline/ok/0031_while_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0031_while_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0031_while_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0032_fn_pointer_type.rast b/crates/syntax/test_data/parser/inline/ok/0032_fn_pointer_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0032_fn_pointer_type.rast rename to crates/syntax/test_data/parser/inline/ok/0032_fn_pointer_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0032_fn_pointer_type.rs b/crates/syntax/test_data/parser/inline/ok/0032_fn_pointer_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0032_fn_pointer_type.rs rename to crates/syntax/test_data/parser/inline/ok/0032_fn_pointer_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0033_reference_type;.rast b/crates/syntax/test_data/parser/inline/ok/0033_reference_type;.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0033_reference_type;.rast rename to crates/syntax/test_data/parser/inline/ok/0033_reference_type;.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0033_reference_type;.rs b/crates/syntax/test_data/parser/inline/ok/0033_reference_type;.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0033_reference_type;.rs rename to crates/syntax/test_data/parser/inline/ok/0033_reference_type;.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0034_break_expr.rast b/crates/syntax/test_data/parser/inline/ok/0034_break_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0034_break_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0034_break_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0034_break_expr.rs b/crates/syntax/test_data/parser/inline/ok/0034_break_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0034_break_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0034_break_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0037_qual_paths.rast b/crates/syntax/test_data/parser/inline/ok/0037_qual_paths.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0037_qual_paths.rast rename to crates/syntax/test_data/parser/inline/ok/0037_qual_paths.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0037_qual_paths.rs b/crates/syntax/test_data/parser/inline/ok/0037_qual_paths.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0037_qual_paths.rs rename to crates/syntax/test_data/parser/inline/ok/0037_qual_paths.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0038_full_range_expr.rast b/crates/syntax/test_data/parser/inline/ok/0038_full_range_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0038_full_range_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0038_full_range_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0038_full_range_expr.rs b/crates/syntax/test_data/parser/inline/ok/0038_full_range_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0038_full_range_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0038_full_range_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0039_type_arg.rast b/crates/syntax/test_data/parser/inline/ok/0039_type_arg.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0039_type_arg.rast rename to crates/syntax/test_data/parser/inline/ok/0039_type_arg.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0039_type_arg.rs b/crates/syntax/test_data/parser/inline/ok/0039_type_arg.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0039_type_arg.rs rename to crates/syntax/test_data/parser/inline/ok/0039_type_arg.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0040_crate_keyword_vis.rast b/crates/syntax/test_data/parser/inline/ok/0040_crate_keyword_vis.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0040_crate_keyword_vis.rast rename to crates/syntax/test_data/parser/inline/ok/0040_crate_keyword_vis.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0040_crate_keyword_vis.rs b/crates/syntax/test_data/parser/inline/ok/0040_crate_keyword_vis.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0040_crate_keyword_vis.rs rename to crates/syntax/test_data/parser/inline/ok/0040_crate_keyword_vis.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0041_trait_item.rast b/crates/syntax/test_data/parser/inline/ok/0041_trait_item.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0041_trait_item.rast rename to crates/syntax/test_data/parser/inline/ok/0041_trait_item.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0041_trait_item.rs b/crates/syntax/test_data/parser/inline/ok/0041_trait_item.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0041_trait_item.rs rename to crates/syntax/test_data/parser/inline/ok/0041_trait_item.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0042_call_expr.rast b/crates/syntax/test_data/parser/inline/ok/0042_call_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0042_call_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0042_call_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0042_call_expr.rs b/crates/syntax/test_data/parser/inline/ok/0042_call_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0042_call_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0042_call_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0043_use_alias.rast b/crates/syntax/test_data/parser/inline/ok/0043_use_alias.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0043_use_alias.rast rename to crates/syntax/test_data/parser/inline/ok/0043_use_alias.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0043_use_alias.rs b/crates/syntax/test_data/parser/inline/ok/0043_use_alias.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0043_use_alias.rs rename to crates/syntax/test_data/parser/inline/ok/0043_use_alias.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0044_block_items.rast b/crates/syntax/test_data/parser/inline/ok/0044_block_items.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0044_block_items.rast rename to crates/syntax/test_data/parser/inline/ok/0044_block_items.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0044_block_items.rs b/crates/syntax/test_data/parser/inline/ok/0044_block_items.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0044_block_items.rs rename to crates/syntax/test_data/parser/inline/ok/0044_block_items.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0045_param_list_opt_patterns.rast b/crates/syntax/test_data/parser/inline/ok/0045_param_list_opt_patterns.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0045_param_list_opt_patterns.rast rename to crates/syntax/test_data/parser/inline/ok/0045_param_list_opt_patterns.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0045_param_list_opt_patterns.rs b/crates/syntax/test_data/parser/inline/ok/0045_param_list_opt_patterns.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0045_param_list_opt_patterns.rs rename to crates/syntax/test_data/parser/inline/ok/0045_param_list_opt_patterns.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0046_singleton_tuple_type.rast b/crates/syntax/test_data/parser/inline/ok/0046_singleton_tuple_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0046_singleton_tuple_type.rast rename to crates/syntax/test_data/parser/inline/ok/0046_singleton_tuple_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0046_singleton_tuple_type.rs b/crates/syntax/test_data/parser/inline/ok/0046_singleton_tuple_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0046_singleton_tuple_type.rs rename to crates/syntax/test_data/parser/inline/ok/0046_singleton_tuple_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0048_path_type_with_bounds.rast b/crates/syntax/test_data/parser/inline/ok/0048_path_type_with_bounds.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0048_path_type_with_bounds.rast rename to crates/syntax/test_data/parser/inline/ok/0048_path_type_with_bounds.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0048_path_type_with_bounds.rs b/crates/syntax/test_data/parser/inline/ok/0048_path_type_with_bounds.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0048_path_type_with_bounds.rs rename to crates/syntax/test_data/parser/inline/ok/0048_path_type_with_bounds.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0050_fn_decl.rast b/crates/syntax/test_data/parser/inline/ok/0050_fn_decl.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0050_fn_decl.rast rename to crates/syntax/test_data/parser/inline/ok/0050_fn_decl.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0050_fn_decl.rs b/crates/syntax/test_data/parser/inline/ok/0050_fn_decl.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0050_fn_decl.rs rename to crates/syntax/test_data/parser/inline/ok/0050_fn_decl.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0051_unit_type.rast b/crates/syntax/test_data/parser/inline/ok/0051_unit_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0051_unit_type.rast rename to crates/syntax/test_data/parser/inline/ok/0051_unit_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0051_unit_type.rs b/crates/syntax/test_data/parser/inline/ok/0051_unit_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0051_unit_type.rs rename to crates/syntax/test_data/parser/inline/ok/0051_unit_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0052_path_type.rast b/crates/syntax/test_data/parser/inline/ok/0052_path_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0052_path_type.rast rename to crates/syntax/test_data/parser/inline/ok/0052_path_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0052_path_type.rs b/crates/syntax/test_data/parser/inline/ok/0052_path_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0052_path_type.rs rename to crates/syntax/test_data/parser/inline/ok/0052_path_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0053_path_expr.rast b/crates/syntax/test_data/parser/inline/ok/0053_path_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0053_path_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0053_path_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0053_path_expr.rs b/crates/syntax/test_data/parser/inline/ok/0053_path_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0053_path_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0053_path_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0054_record_field_attrs.rast b/crates/syntax/test_data/parser/inline/ok/0054_record_field_attrs.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0054_record_field_attrs.rast rename to crates/syntax/test_data/parser/inline/ok/0054_record_field_attrs.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0054_record_field_attrs.rs b/crates/syntax/test_data/parser/inline/ok/0054_record_field_attrs.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0054_record_field_attrs.rs rename to crates/syntax/test_data/parser/inline/ok/0054_record_field_attrs.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0055_literal_pattern.rast b/crates/syntax/test_data/parser/inline/ok/0055_literal_pattern.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0055_literal_pattern.rast rename to crates/syntax/test_data/parser/inline/ok/0055_literal_pattern.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0055_literal_pattern.rs b/crates/syntax/test_data/parser/inline/ok/0055_literal_pattern.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0055_literal_pattern.rs rename to crates/syntax/test_data/parser/inline/ok/0055_literal_pattern.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0056_where_clause.rast b/crates/syntax/test_data/parser/inline/ok/0056_where_clause.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0056_where_clause.rast rename to crates/syntax/test_data/parser/inline/ok/0056_where_clause.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0056_where_clause.rs b/crates/syntax/test_data/parser/inline/ok/0056_where_clause.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0056_where_clause.rs rename to crates/syntax/test_data/parser/inline/ok/0056_where_clause.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0058_range_pat.rast b/crates/syntax/test_data/parser/inline/ok/0058_range_pat.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0058_range_pat.rast rename to crates/syntax/test_data/parser/inline/ok/0058_range_pat.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0058_range_pat.rs b/crates/syntax/test_data/parser/inline/ok/0058_range_pat.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0058_range_pat.rs rename to crates/syntax/test_data/parser/inline/ok/0058_range_pat.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0059_match_arms_commas.rast b/crates/syntax/test_data/parser/inline/ok/0059_match_arms_commas.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0059_match_arms_commas.rast rename to crates/syntax/test_data/parser/inline/ok/0059_match_arms_commas.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0059_match_arms_commas.rs b/crates/syntax/test_data/parser/inline/ok/0059_match_arms_commas.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0059_match_arms_commas.rs rename to crates/syntax/test_data/parser/inline/ok/0059_match_arms_commas.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0060_extern_crate.rast b/crates/syntax/test_data/parser/inline/ok/0060_extern_crate.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0060_extern_crate.rast rename to crates/syntax/test_data/parser/inline/ok/0060_extern_crate.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0060_extern_crate.rs b/crates/syntax/test_data/parser/inline/ok/0060_extern_crate.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0060_extern_crate.rs rename to crates/syntax/test_data/parser/inline/ok/0060_extern_crate.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0061_record_lit.rast b/crates/syntax/test_data/parser/inline/ok/0061_record_lit.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0061_record_lit.rast rename to crates/syntax/test_data/parser/inline/ok/0061_record_lit.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0061_record_lit.rs b/crates/syntax/test_data/parser/inline/ok/0061_record_lit.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0061_record_lit.rs rename to crates/syntax/test_data/parser/inline/ok/0061_record_lit.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0062_mod_contents.rast b/crates/syntax/test_data/parser/inline/ok/0062_mod_contents.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0062_mod_contents.rast rename to crates/syntax/test_data/parser/inline/ok/0062_mod_contents.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0062_mod_contents.rs b/crates/syntax/test_data/parser/inline/ok/0062_mod_contents.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0062_mod_contents.rs rename to crates/syntax/test_data/parser/inline/ok/0062_mod_contents.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0063_impl_def_neg.rast b/crates/syntax/test_data/parser/inline/ok/0063_impl_def_neg.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0063_impl_def_neg.rast rename to crates/syntax/test_data/parser/inline/ok/0063_impl_def_neg.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0063_impl_def_neg.rs b/crates/syntax/test_data/parser/inline/ok/0063_impl_def_neg.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0063_impl_def_neg.rs rename to crates/syntax/test_data/parser/inline/ok/0063_impl_def_neg.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0064_if_expr.rast b/crates/syntax/test_data/parser/inline/ok/0064_if_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0064_if_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0064_if_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0064_if_expr.rs b/crates/syntax/test_data/parser/inline/ok/0064_if_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0064_if_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0064_if_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0065_dyn_trait_type.rast b/crates/syntax/test_data/parser/inline/ok/0065_dyn_trait_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0065_dyn_trait_type.rast rename to crates/syntax/test_data/parser/inline/ok/0065_dyn_trait_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0065_dyn_trait_type.rs b/crates/syntax/test_data/parser/inline/ok/0065_dyn_trait_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0065_dyn_trait_type.rs rename to crates/syntax/test_data/parser/inline/ok/0065_dyn_trait_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0066_match_arm.rast b/crates/syntax/test_data/parser/inline/ok/0066_match_arm.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0066_match_arm.rast rename to crates/syntax/test_data/parser/inline/ok/0066_match_arm.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0066_match_arm.rs b/crates/syntax/test_data/parser/inline/ok/0066_match_arm.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0066_match_arm.rs rename to crates/syntax/test_data/parser/inline/ok/0066_match_arm.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0067_crate_path.rast b/crates/syntax/test_data/parser/inline/ok/0067_crate_path.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0067_crate_path.rast rename to crates/syntax/test_data/parser/inline/ok/0067_crate_path.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0067_crate_path.rs b/crates/syntax/test_data/parser/inline/ok/0067_crate_path.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0067_crate_path.rs rename to crates/syntax/test_data/parser/inline/ok/0067_crate_path.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0068_union_items.rast b/crates/syntax/test_data/parser/inline/ok/0068_union_items.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0068_union_items.rast rename to crates/syntax/test_data/parser/inline/ok/0068_union_items.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0068_union_items.rs b/crates/syntax/test_data/parser/inline/ok/0068_union_items.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0068_union_items.rs rename to crates/syntax/test_data/parser/inline/ok/0068_union_items.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0069_use_tree_list_after_path.rast b/crates/syntax/test_data/parser/inline/ok/0069_use_tree_list_after_path.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0069_use_tree_list_after_path.rast rename to crates/syntax/test_data/parser/inline/ok/0069_use_tree_list_after_path.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0069_use_tree_list_after_path.rs b/crates/syntax/test_data/parser/inline/ok/0069_use_tree_list_after_path.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0069_use_tree_list_after_path.rs rename to crates/syntax/test_data/parser/inline/ok/0069_use_tree_list_after_path.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rast b/crates/syntax/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rast rename to crates/syntax/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rs b/crates/syntax/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rs rename to crates/syntax/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0071_match_expr.rast b/crates/syntax/test_data/parser/inline/ok/0071_match_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0071_match_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0071_match_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0071_match_expr.rs b/crates/syntax/test_data/parser/inline/ok/0071_match_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0071_match_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0071_match_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0072_return_expr.rast b/crates/syntax/test_data/parser/inline/ok/0072_return_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0072_return_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0072_return_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0072_return_expr.rs b/crates/syntax/test_data/parser/inline/ok/0072_return_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0072_return_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0072_return_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0073_type_item_type_params.rast b/crates/syntax/test_data/parser/inline/ok/0073_type_item_type_params.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0073_type_item_type_params.rast rename to crates/syntax/test_data/parser/inline/ok/0073_type_item_type_params.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0073_type_item_type_params.rs b/crates/syntax/test_data/parser/inline/ok/0073_type_item_type_params.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0073_type_item_type_params.rs rename to crates/syntax/test_data/parser/inline/ok/0073_type_item_type_params.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rast b/crates/syntax/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rast rename to crates/syntax/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rs b/crates/syntax/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rs rename to crates/syntax/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0075_block.rast b/crates/syntax/test_data/parser/inline/ok/0075_block.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0075_block.rast rename to crates/syntax/test_data/parser/inline/ok/0075_block.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0075_block.rs b/crates/syntax/test_data/parser/inline/ok/0075_block.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0075_block.rs rename to crates/syntax/test_data/parser/inline/ok/0075_block.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0076_function_where_clause.rast b/crates/syntax/test_data/parser/inline/ok/0076_function_where_clause.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0076_function_where_clause.rast rename to crates/syntax/test_data/parser/inline/ok/0076_function_where_clause.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0076_function_where_clause.rs b/crates/syntax/test_data/parser/inline/ok/0076_function_where_clause.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0076_function_where_clause.rs rename to crates/syntax/test_data/parser/inline/ok/0076_function_where_clause.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0077_try_expr.rast b/crates/syntax/test_data/parser/inline/ok/0077_try_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0077_try_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0077_try_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0077_try_expr.rs b/crates/syntax/test_data/parser/inline/ok/0077_try_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0077_try_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0077_try_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0078_type_item.rast b/crates/syntax/test_data/parser/inline/ok/0078_type_item.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0078_type_item.rast rename to crates/syntax/test_data/parser/inline/ok/0078_type_item.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0078_type_item.rs b/crates/syntax/test_data/parser/inline/ok/0078_type_item.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0078_type_item.rs rename to crates/syntax/test_data/parser/inline/ok/0078_type_item.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0079_impl_def.rast b/crates/syntax/test_data/parser/inline/ok/0079_impl_def.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0079_impl_def.rast rename to crates/syntax/test_data/parser/inline/ok/0079_impl_def.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0079_impl_def.rs b/crates/syntax/test_data/parser/inline/ok/0079_impl_def.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0079_impl_def.rs rename to crates/syntax/test_data/parser/inline/ok/0079_impl_def.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0080_postfix_range.rast b/crates/syntax/test_data/parser/inline/ok/0080_postfix_range.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0080_postfix_range.rast rename to crates/syntax/test_data/parser/inline/ok/0080_postfix_range.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0080_postfix_range.rs b/crates/syntax/test_data/parser/inline/ok/0080_postfix_range.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0080_postfix_range.rs rename to crates/syntax/test_data/parser/inline/ok/0080_postfix_range.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0081_for_type.rast b/crates/syntax/test_data/parser/inline/ok/0081_for_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0081_for_type.rast rename to crates/syntax/test_data/parser/inline/ok/0081_for_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0081_for_type.rs b/crates/syntax/test_data/parser/inline/ok/0081_for_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0081_for_type.rs rename to crates/syntax/test_data/parser/inline/ok/0081_for_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0082_ref_expr.rast b/crates/syntax/test_data/parser/inline/ok/0082_ref_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0082_ref_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0082_ref_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0082_ref_expr.rs b/crates/syntax/test_data/parser/inline/ok/0082_ref_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0082_ref_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0082_ref_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0083_struct_items.rast b/crates/syntax/test_data/parser/inline/ok/0083_struct_items.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0083_struct_items.rast rename to crates/syntax/test_data/parser/inline/ok/0083_struct_items.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0083_struct_items.rs b/crates/syntax/test_data/parser/inline/ok/0083_struct_items.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0083_struct_items.rs rename to crates/syntax/test_data/parser/inline/ok/0083_struct_items.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0084_paren_type.rast b/crates/syntax/test_data/parser/inline/ok/0084_paren_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0084_paren_type.rast rename to crates/syntax/test_data/parser/inline/ok/0084_paren_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0084_paren_type.rs b/crates/syntax/test_data/parser/inline/ok/0084_paren_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0084_paren_type.rs rename to crates/syntax/test_data/parser/inline/ok/0084_paren_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0085_expr_literals.rast b/crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0085_expr_literals.rast rename to crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0085_expr_literals.rs b/crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0085_expr_literals.rs rename to crates/syntax/test_data/parser/inline/ok/0085_expr_literals.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0086_function_ret_type.rast b/crates/syntax/test_data/parser/inline/ok/0086_function_ret_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0086_function_ret_type.rast rename to crates/syntax/test_data/parser/inline/ok/0086_function_ret_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0086_function_ret_type.rs b/crates/syntax/test_data/parser/inline/ok/0086_function_ret_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0086_function_ret_type.rs rename to crates/syntax/test_data/parser/inline/ok/0086_function_ret_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0088_break_ambiguity.rast b/crates/syntax/test_data/parser/inline/ok/0088_break_ambiguity.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0088_break_ambiguity.rast rename to crates/syntax/test_data/parser/inline/ok/0088_break_ambiguity.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0088_break_ambiguity.rs b/crates/syntax/test_data/parser/inline/ok/0088_break_ambiguity.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0088_break_ambiguity.rs rename to crates/syntax/test_data/parser/inline/ok/0088_break_ambiguity.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0090_type_param_default.rast b/crates/syntax/test_data/parser/inline/ok/0090_type_param_default.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0090_type_param_default.rast rename to crates/syntax/test_data/parser/inline/ok/0090_type_param_default.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0090_type_param_default.rs b/crates/syntax/test_data/parser/inline/ok/0090_type_param_default.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0090_type_param_default.rs rename to crates/syntax/test_data/parser/inline/ok/0090_type_param_default.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rast b/crates/syntax/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rast rename to crates/syntax/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rs b/crates/syntax/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rs rename to crates/syntax/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0093_index_expr.rast b/crates/syntax/test_data/parser/inline/ok/0093_index_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0093_index_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0093_index_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0093_index_expr.rs b/crates/syntax/test_data/parser/inline/ok/0093_index_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0093_index_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0093_index_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0095_placeholder_pat.rast b/crates/syntax/test_data/parser/inline/ok/0095_placeholder_pat.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0095_placeholder_pat.rast rename to crates/syntax/test_data/parser/inline/ok/0095_placeholder_pat.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0095_placeholder_pat.rs b/crates/syntax/test_data/parser/inline/ok/0095_placeholder_pat.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0095_placeholder_pat.rs rename to crates/syntax/test_data/parser/inline/ok/0095_placeholder_pat.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0096_no_semi_after_block.rast b/crates/syntax/test_data/parser/inline/ok/0096_no_semi_after_block.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0096_no_semi_after_block.rast rename to crates/syntax/test_data/parser/inline/ok/0096_no_semi_after_block.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0096_no_semi_after_block.rs b/crates/syntax/test_data/parser/inline/ok/0096_no_semi_after_block.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0096_no_semi_after_block.rs rename to crates/syntax/test_data/parser/inline/ok/0096_no_semi_after_block.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0099_param_list.rast b/crates/syntax/test_data/parser/inline/ok/0099_param_list.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0099_param_list.rast rename to crates/syntax/test_data/parser/inline/ok/0099_param_list.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0099_param_list.rs b/crates/syntax/test_data/parser/inline/ok/0099_param_list.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0099_param_list.rs rename to crates/syntax/test_data/parser/inline/ok/0099_param_list.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0100_for_expr.rast b/crates/syntax/test_data/parser/inline/ok/0100_for_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0100_for_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0100_for_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0100_for_expr.rs b/crates/syntax/test_data/parser/inline/ok/0100_for_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0100_for_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0100_for_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0102_record_field_pat_list.rast b/crates/syntax/test_data/parser/inline/ok/0102_record_field_pat_list.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0102_record_field_pat_list.rast rename to crates/syntax/test_data/parser/inline/ok/0102_record_field_pat_list.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0102_record_field_pat_list.rs b/crates/syntax/test_data/parser/inline/ok/0102_record_field_pat_list.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0102_record_field_pat_list.rs rename to crates/syntax/test_data/parser/inline/ok/0102_record_field_pat_list.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0103_array_expr.rast b/crates/syntax/test_data/parser/inline/ok/0103_array_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0103_array_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0103_array_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0103_array_expr.rs b/crates/syntax/test_data/parser/inline/ok/0103_array_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0103_array_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0103_array_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.rast b/crates/syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.rast rename to crates/syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.rs b/crates/syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.rs rename to crates/syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0106_lambda_expr.rast b/crates/syntax/test_data/parser/inline/ok/0106_lambda_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0106_lambda_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0106_lambda_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0106_lambda_expr.rs b/crates/syntax/test_data/parser/inline/ok/0106_lambda_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0106_lambda_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0106_lambda_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0107_method_call_expr.rast b/crates/syntax/test_data/parser/inline/ok/0107_method_call_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0107_method_call_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0107_method_call_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0107_method_call_expr.rs b/crates/syntax/test_data/parser/inline/ok/0107_method_call_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0107_method_call_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0107_method_call_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0108_tuple_expr.rast b/crates/syntax/test_data/parser/inline/ok/0108_tuple_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0108_tuple_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0108_tuple_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0108_tuple_expr.rs b/crates/syntax/test_data/parser/inline/ok/0108_tuple_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0108_tuple_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0108_tuple_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0109_label.rast b/crates/syntax/test_data/parser/inline/ok/0109_label.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0109_label.rast rename to crates/syntax/test_data/parser/inline/ok/0109_label.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0109_label.rs b/crates/syntax/test_data/parser/inline/ok/0109_label.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0109_label.rs rename to crates/syntax/test_data/parser/inline/ok/0109_label.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0110_use_path.rast b/crates/syntax/test_data/parser/inline/ok/0110_use_path.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0110_use_path.rast rename to crates/syntax/test_data/parser/inline/ok/0110_use_path.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0110_use_path.rs b/crates/syntax/test_data/parser/inline/ok/0110_use_path.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0110_use_path.rs rename to crates/syntax/test_data/parser/inline/ok/0110_use_path.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0111_tuple_pat.rast b/crates/syntax/test_data/parser/inline/ok/0111_tuple_pat.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0111_tuple_pat.rast rename to crates/syntax/test_data/parser/inline/ok/0111_tuple_pat.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0111_tuple_pat.rs b/crates/syntax/test_data/parser/inline/ok/0111_tuple_pat.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0111_tuple_pat.rs rename to crates/syntax/test_data/parser/inline/ok/0111_tuple_pat.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0112_bind_pat.rast b/crates/syntax/test_data/parser/inline/ok/0112_bind_pat.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0112_bind_pat.rast rename to crates/syntax/test_data/parser/inline/ok/0112_bind_pat.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0112_bind_pat.rs b/crates/syntax/test_data/parser/inline/ok/0112_bind_pat.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0112_bind_pat.rs rename to crates/syntax/test_data/parser/inline/ok/0112_bind_pat.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0113_nocontentexpr.rast b/crates/syntax/test_data/parser/inline/ok/0113_nocontentexpr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0113_nocontentexpr.rast rename to crates/syntax/test_data/parser/inline/ok/0113_nocontentexpr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0113_nocontentexpr.rs b/crates/syntax/test_data/parser/inline/ok/0113_nocontentexpr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0113_nocontentexpr.rs rename to crates/syntax/test_data/parser/inline/ok/0113_nocontentexpr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0114_tuple_struct_where.rast b/crates/syntax/test_data/parser/inline/ok/0114_tuple_struct_where.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0114_tuple_struct_where.rast rename to crates/syntax/test_data/parser/inline/ok/0114_tuple_struct_where.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0114_tuple_struct_where.rs b/crates/syntax/test_data/parser/inline/ok/0114_tuple_struct_where.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0114_tuple_struct_where.rs rename to crates/syntax/test_data/parser/inline/ok/0114_tuple_struct_where.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0115_tuple_field_attrs.rast b/crates/syntax/test_data/parser/inline/ok/0115_tuple_field_attrs.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0115_tuple_field_attrs.rast rename to crates/syntax/test_data/parser/inline/ok/0115_tuple_field_attrs.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0115_tuple_field_attrs.rs b/crates/syntax/test_data/parser/inline/ok/0115_tuple_field_attrs.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0115_tuple_field_attrs.rs rename to crates/syntax/test_data/parser/inline/ok/0115_tuple_field_attrs.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0117_macro_call_type.rast b/crates/syntax/test_data/parser/inline/ok/0117_macro_call_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0117_macro_call_type.rast rename to crates/syntax/test_data/parser/inline/ok/0117_macro_call_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0117_macro_call_type.rs b/crates/syntax/test_data/parser/inline/ok/0117_macro_call_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0117_macro_call_type.rs rename to crates/syntax/test_data/parser/inline/ok/0117_macro_call_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0118_impl_inner_attributes.rast b/crates/syntax/test_data/parser/inline/ok/0118_impl_inner_attributes.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0118_impl_inner_attributes.rast rename to crates/syntax/test_data/parser/inline/ok/0118_impl_inner_attributes.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0118_impl_inner_attributes.rs b/crates/syntax/test_data/parser/inline/ok/0118_impl_inner_attributes.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0118_impl_inner_attributes.rs rename to crates/syntax/test_data/parser/inline/ok/0118_impl_inner_attributes.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0118_match_guard.rast b/crates/syntax/test_data/parser/inline/ok/0118_match_guard.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0118_match_guard.rast rename to crates/syntax/test_data/parser/inline/ok/0118_match_guard.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0118_match_guard.rs b/crates/syntax/test_data/parser/inline/ok/0118_match_guard.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0118_match_guard.rs rename to crates/syntax/test_data/parser/inline/ok/0118_match_guard.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rast b/crates/syntax/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rast rename to crates/syntax/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rs b/crates/syntax/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rs rename to crates/syntax/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rast b/crates/syntax/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rast rename to crates/syntax/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rs b/crates/syntax/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rs rename to crates/syntax/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0122_generic_lifetime_type_attribute.rast b/crates/syntax/test_data/parser/inline/ok/0122_generic_lifetime_type_attribute.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0122_generic_lifetime_type_attribute.rast rename to crates/syntax/test_data/parser/inline/ok/0122_generic_lifetime_type_attribute.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0122_generic_lifetime_type_attribute.rs b/crates/syntax/test_data/parser/inline/ok/0122_generic_lifetime_type_attribute.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0122_generic_lifetime_type_attribute.rs rename to crates/syntax/test_data/parser/inline/ok/0122_generic_lifetime_type_attribute.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0123_param_list_vararg.rast b/crates/syntax/test_data/parser/inline/ok/0123_param_list_vararg.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0123_param_list_vararg.rast rename to crates/syntax/test_data/parser/inline/ok/0123_param_list_vararg.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0123_param_list_vararg.rs b/crates/syntax/test_data/parser/inline/ok/0123_param_list_vararg.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0123_param_list_vararg.rs rename to crates/syntax/test_data/parser/inline/ok/0123_param_list_vararg.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0125_crate_keyword_path.rast b/crates/syntax/test_data/parser/inline/ok/0125_crate_keyword_path.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0125_crate_keyword_path.rast rename to crates/syntax/test_data/parser/inline/ok/0125_crate_keyword_path.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0125_crate_keyword_path.rs b/crates/syntax/test_data/parser/inline/ok/0125_crate_keyword_path.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0125_crate_keyword_path.rs rename to crates/syntax/test_data/parser/inline/ok/0125_crate_keyword_path.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rast b/crates/syntax/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rast rename to crates/syntax/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rs b/crates/syntax/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rs rename to crates/syntax/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rast b/crates/syntax/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rast rename to crates/syntax/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rs b/crates/syntax/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rs rename to crates/syntax/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0127_attr_on_last_expr_in_block.rast b/crates/syntax/test_data/parser/inline/ok/0127_attr_on_last_expr_in_block.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0127_attr_on_last_expr_in_block.rast rename to crates/syntax/test_data/parser/inline/ok/0127_attr_on_last_expr_in_block.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0127_attr_on_last_expr_in_block.rs b/crates/syntax/test_data/parser/inline/ok/0127_attr_on_last_expr_in_block.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0127_attr_on_last_expr_in_block.rs rename to crates/syntax/test_data/parser/inline/ok/0127_attr_on_last_expr_in_block.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0129_marco_pat.rast b/crates/syntax/test_data/parser/inline/ok/0129_marco_pat.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0129_marco_pat.rast rename to crates/syntax/test_data/parser/inline/ok/0129_marco_pat.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0129_marco_pat.rs b/crates/syntax/test_data/parser/inline/ok/0129_marco_pat.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0129_marco_pat.rs rename to crates/syntax/test_data/parser/inline/ok/0129_marco_pat.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0130_let_stmt.rast b/crates/syntax/test_data/parser/inline/ok/0130_let_stmt.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0130_let_stmt.rast rename to crates/syntax/test_data/parser/inline/ok/0130_let_stmt.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0130_let_stmt.rs b/crates/syntax/test_data/parser/inline/ok/0130_let_stmt.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0130_let_stmt.rs rename to crates/syntax/test_data/parser/inline/ok/0130_let_stmt.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0130_try_block_expr.rast b/crates/syntax/test_data/parser/inline/ok/0130_try_block_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0130_try_block_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0130_try_block_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0130_try_block_expr.rs b/crates/syntax/test_data/parser/inline/ok/0130_try_block_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0130_try_block_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0130_try_block_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0131_existential_type.rast b/crates/syntax/test_data/parser/inline/ok/0131_existential_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0131_existential_type.rast rename to crates/syntax/test_data/parser/inline/ok/0131_existential_type.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0131_existential_type.rs b/crates/syntax/test_data/parser/inline/ok/0131_existential_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0131_existential_type.rs rename to crates/syntax/test_data/parser/inline/ok/0131_existential_type.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0132_box_expr.rast b/crates/syntax/test_data/parser/inline/ok/0132_box_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0132_box_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0132_box_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0132_box_expr.rs b/crates/syntax/test_data/parser/inline/ok/0132_box_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0132_box_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0132_box_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rast b/crates/syntax/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rast rename to crates/syntax/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rs b/crates/syntax/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rs rename to crates/syntax/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0137_await_expr.rast b/crates/syntax/test_data/parser/inline/ok/0137_await_expr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0137_await_expr.rast rename to crates/syntax/test_data/parser/inline/ok/0137_await_expr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0137_await_expr.rs b/crates/syntax/test_data/parser/inline/ok/0137_await_expr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0137_await_expr.rs rename to crates/syntax/test_data/parser/inline/ok/0137_await_expr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0138_associated_type_bounds.rast b/crates/syntax/test_data/parser/inline/ok/0138_associated_type_bounds.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0138_associated_type_bounds.rast rename to crates/syntax/test_data/parser/inline/ok/0138_associated_type_bounds.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0138_associated_type_bounds.rs b/crates/syntax/test_data/parser/inline/ok/0138_associated_type_bounds.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0138_associated_type_bounds.rs rename to crates/syntax/test_data/parser/inline/ok/0138_associated_type_bounds.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0138_expression_after_block.rast b/crates/syntax/test_data/parser/inline/ok/0138_expression_after_block.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0138_expression_after_block.rast rename to crates/syntax/test_data/parser/inline/ok/0138_expression_after_block.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0138_expression_after_block.rs b/crates/syntax/test_data/parser/inline/ok/0138_expression_after_block.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0138_expression_after_block.rs rename to crates/syntax/test_data/parser/inline/ok/0138_expression_after_block.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0138_self_param_outer_attr.rast b/crates/syntax/test_data/parser/inline/ok/0138_self_param_outer_attr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0138_self_param_outer_attr.rast rename to crates/syntax/test_data/parser/inline/ok/0138_self_param_outer_attr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0138_self_param_outer_attr.rs b/crates/syntax/test_data/parser/inline/ok/0138_self_param_outer_attr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0138_self_param_outer_attr.rs rename to crates/syntax/test_data/parser/inline/ok/0138_self_param_outer_attr.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0139_param_outer_arg.rast b/crates/syntax/test_data/parser/inline/ok/0139_param_outer_arg.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0139_param_outer_arg.rast rename to crates/syntax/test_data/parser/inline/ok/0139_param_outer_arg.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0139_param_outer_arg.rs b/crates/syntax/test_data/parser/inline/ok/0139_param_outer_arg.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0139_param_outer_arg.rs rename to crates/syntax/test_data/parser/inline/ok/0139_param_outer_arg.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0142_for_range_from.rast b/crates/syntax/test_data/parser/inline/ok/0142_for_range_from.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0142_for_range_from.rast rename to crates/syntax/test_data/parser/inline/ok/0142_for_range_from.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0142_for_range_from.rs b/crates/syntax/test_data/parser/inline/ok/0142_for_range_from.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0142_for_range_from.rs rename to crates/syntax/test_data/parser/inline/ok/0142_for_range_from.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0143_box_pat.rast b/crates/syntax/test_data/parser/inline/ok/0143_box_pat.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0143_box_pat.rast rename to crates/syntax/test_data/parser/inline/ok/0143_box_pat.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0143_box_pat.rs b/crates/syntax/test_data/parser/inline/ok/0143_box_pat.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0143_box_pat.rs rename to crates/syntax/test_data/parser/inline/ok/0143_box_pat.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0144_dot_dot_pat.rast b/crates/syntax/test_data/parser/inline/ok/0144_dot_dot_pat.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0144_dot_dot_pat.rast rename to crates/syntax/test_data/parser/inline/ok/0144_dot_dot_pat.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0144_dot_dot_pat.rs b/crates/syntax/test_data/parser/inline/ok/0144_dot_dot_pat.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0144_dot_dot_pat.rs rename to crates/syntax/test_data/parser/inline/ok/0144_dot_dot_pat.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0145_record_field_pat.rast b/crates/syntax/test_data/parser/inline/ok/0145_record_field_pat.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0145_record_field_pat.rast rename to crates/syntax/test_data/parser/inline/ok/0145_record_field_pat.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0145_record_field_pat.rs b/crates/syntax/test_data/parser/inline/ok/0145_record_field_pat.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0145_record_field_pat.rs rename to crates/syntax/test_data/parser/inline/ok/0145_record_field_pat.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0146_as_precedence.rast b/crates/syntax/test_data/parser/inline/ok/0146_as_precedence.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0146_as_precedence.rast rename to crates/syntax/test_data/parser/inline/ok/0146_as_precedence.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0146_as_precedence.rs b/crates/syntax/test_data/parser/inline/ok/0146_as_precedence.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0146_as_precedence.rs rename to crates/syntax/test_data/parser/inline/ok/0146_as_precedence.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0147_const_param.rast b/crates/syntax/test_data/parser/inline/ok/0147_const_param.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0147_const_param.rast rename to crates/syntax/test_data/parser/inline/ok/0147_const_param.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0147_const_param.rs b/crates/syntax/test_data/parser/inline/ok/0147_const_param.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0147_const_param.rs rename to crates/syntax/test_data/parser/inline/ok/0147_const_param.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0147_macro_def.rast b/crates/syntax/test_data/parser/inline/ok/0147_macro_def.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0147_macro_def.rast rename to crates/syntax/test_data/parser/inline/ok/0147_macro_def.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0147_macro_def.rs b/crates/syntax/test_data/parser/inline/ok/0147_macro_def.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0147_macro_def.rs rename to crates/syntax/test_data/parser/inline/ok/0147_macro_def.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0148_pub_macro_def.rast b/crates/syntax/test_data/parser/inline/ok/0148_pub_macro_def.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0148_pub_macro_def.rast rename to crates/syntax/test_data/parser/inline/ok/0148_pub_macro_def.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0148_pub_macro_def.rs b/crates/syntax/test_data/parser/inline/ok/0148_pub_macro_def.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0148_pub_macro_def.rs rename to crates/syntax/test_data/parser/inline/ok/0148_pub_macro_def.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0150_array_attrs.rast b/crates/syntax/test_data/parser/inline/ok/0150_array_attrs.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0150_array_attrs.rast rename to crates/syntax/test_data/parser/inline/ok/0150_array_attrs.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0150_array_attrs.rs b/crates/syntax/test_data/parser/inline/ok/0150_array_attrs.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0150_array_attrs.rs rename to crates/syntax/test_data/parser/inline/ok/0150_array_attrs.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0150_impl_type_params.rast b/crates/syntax/test_data/parser/inline/ok/0150_impl_type_params.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0150_impl_type_params.rast rename to crates/syntax/test_data/parser/inline/ok/0150_impl_type_params.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0150_impl_type_params.rs b/crates/syntax/test_data/parser/inline/ok/0150_impl_type_params.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0150_impl_type_params.rs rename to crates/syntax/test_data/parser/inline/ok/0150_impl_type_params.rs diff --git a/crates/syntax/test_data/parser/inline/ok/0151_fn.rast b/crates/syntax/test_data/parser/inline/ok/0151_fn.rast new file mode 100644 index 0000000000..23c4269b30 --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0151_fn.rast @@ -0,0 +1,14 @@ +SOURCE_FILE@0..12 + FN@0..11 + FN_KW@0..2 "fn" + WHITESPACE@2..3 " " + NAME@3..6 + IDENT@3..6 "foo" + PARAM_LIST@6..8 + L_PAREN@6..7 "(" + R_PAREN@7..8 ")" + WHITESPACE@8..9 " " + BLOCK_EXPR@9..11 + L_CURLY@9..10 "{" + R_CURLY@10..11 "}" + WHITESPACE@11..12 "\n" diff --git a/crates/syntax/test_data/parser/inline/ok/0151_fn.rs b/crates/syntax/test_data/parser/inline/ok/0151_fn.rs new file mode 100644 index 0000000000..8f3b7ef112 --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0151_fn.rs @@ -0,0 +1 @@ +fn foo() {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0151_trait_alias.rast b/crates/syntax/test_data/parser/inline/ok/0151_trait_alias.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0151_trait_alias.rast rename to crates/syntax/test_data/parser/inline/ok/0151_trait_alias.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0151_trait_alias.rs b/crates/syntax/test_data/parser/inline/ok/0151_trait_alias.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0151_trait_alias.rs rename to crates/syntax/test_data/parser/inline/ok/0151_trait_alias.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0152_arg_with_attr.rast b/crates/syntax/test_data/parser/inline/ok/0152_arg_with_attr.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0152_arg_with_attr.rast rename to crates/syntax/test_data/parser/inline/ok/0152_arg_with_attr.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0152_arg_with_attr.rs b/crates/syntax/test_data/parser/inline/ok/0152_arg_with_attr.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0152_arg_with_attr.rs rename to crates/syntax/test_data/parser/inline/ok/0152_arg_with_attr.rs diff --git a/crates/syntax/test_data/parser/inline/ok/0152_impl.rast b/crates/syntax/test_data/parser/inline/ok/0152_impl.rast new file mode 100644 index 0000000000..7968cf9ffa --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0152_impl.rast @@ -0,0 +1,22 @@ +SOURCE_FILE@0..16 + IMPL@0..15 + IMPL_KW@0..4 "impl" + WHITESPACE@4..5 " " + PATH_TYPE@5..6 + PATH@5..6 + PATH_SEGMENT@5..6 + NAME_REF@5..6 + IDENT@5..6 "T" + WHITESPACE@6..7 " " + FOR_KW@7..10 "for" + WHITESPACE@10..11 " " + PATH_TYPE@11..12 + PATH@11..12 + PATH_SEGMENT@11..12 + NAME_REF@11..12 + IDENT@11..12 "S" + WHITESPACE@12..13 " " + ASSOC_ITEM_LIST@13..15 + L_CURLY@13..14 "{" + R_CURLY@14..15 "}" + WHITESPACE@15..16 "\n" diff --git a/crates/syntax/test_data/parser/inline/ok/0152_impl.rs b/crates/syntax/test_data/parser/inline/ok/0152_impl.rs new file mode 100644 index 0000000000..a1a550d8a6 --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0152_impl.rs @@ -0,0 +1 @@ +impl T for S {} diff --git a/crates/syntax/test_data/parser/inline/ok/0153_trait.rast b/crates/syntax/test_data/parser/inline/ok/0153_trait.rast new file mode 100644 index 0000000000..9881e5048c --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0153_trait.rast @@ -0,0 +1,11 @@ +SOURCE_FILE@0..11 + TRAIT@0..10 + TRAIT_KW@0..5 "trait" + WHITESPACE@5..6 " " + NAME@6..7 + IDENT@6..7 "T" + WHITESPACE@7..8 " " + ASSOC_ITEM_LIST@8..10 + L_CURLY@8..9 "{" + R_CURLY@9..10 "}" + WHITESPACE@10..11 "\n" diff --git a/crates/syntax/test_data/parser/inline/ok/0153_trait.rs b/crates/syntax/test_data/parser/inline/ok/0153_trait.rs new file mode 100644 index 0000000000..8d183dbb5d --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0153_trait.rs @@ -0,0 +1 @@ +trait T {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rast b/crates/syntax/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rast rename to crates/syntax/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs b/crates/syntax/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs rename to crates/syntax/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0155_closure_params.rast b/crates/syntax/test_data/parser/inline/ok/0155_closure_params.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0155_closure_params.rast rename to crates/syntax/test_data/parser/inline/ok/0155_closure_params.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0155_closure_params.rs b/crates/syntax/test_data/parser/inline/ok/0155_closure_params.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0155_closure_params.rs rename to crates/syntax/test_data/parser/inline/ok/0155_closure_params.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0156_fn_def_param.rast b/crates/syntax/test_data/parser/inline/ok/0156_fn_def_param.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0156_fn_def_param.rast rename to crates/syntax/test_data/parser/inline/ok/0156_fn_def_param.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0156_fn_def_param.rs b/crates/syntax/test_data/parser/inline/ok/0156_fn_def_param.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0156_fn_def_param.rs rename to crates/syntax/test_data/parser/inline/ok/0156_fn_def_param.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0156_or_pattern.rast b/crates/syntax/test_data/parser/inline/ok/0156_or_pattern.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0156_or_pattern.rast rename to crates/syntax/test_data/parser/inline/ok/0156_or_pattern.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0156_or_pattern.rs b/crates/syntax/test_data/parser/inline/ok/0156_or_pattern.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0156_or_pattern.rs rename to crates/syntax/test_data/parser/inline/ok/0156_or_pattern.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rast b/crates/syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rast rename to crates/syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs b/crates/syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs rename to crates/syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0157_variant_discriminant.rast b/crates/syntax/test_data/parser/inline/ok/0157_variant_discriminant.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0157_variant_discriminant.rast rename to crates/syntax/test_data/parser/inline/ok/0157_variant_discriminant.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0157_variant_discriminant.rs b/crates/syntax/test_data/parser/inline/ok/0157_variant_discriminant.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0157_variant_discriminant.rs rename to crates/syntax/test_data/parser/inline/ok/0157_variant_discriminant.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.rast b/crates/syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.rast rename to crates/syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.rs b/crates/syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.rs rename to crates/syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0158_lambda_ret_block.rast b/crates/syntax/test_data/parser/inline/ok/0158_lambda_ret_block.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0158_lambda_ret_block.rast rename to crates/syntax/test_data/parser/inline/ok/0158_lambda_ret_block.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0158_lambda_ret_block.rs b/crates/syntax/test_data/parser/inline/ok/0158_lambda_ret_block.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0158_lambda_ret_block.rs rename to crates/syntax/test_data/parser/inline/ok/0158_lambda_ret_block.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0159_try_macro_fallback.rast b/crates/syntax/test_data/parser/inline/ok/0159_try_macro_fallback.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0159_try_macro_fallback.rast rename to crates/syntax/test_data/parser/inline/ok/0159_try_macro_fallback.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0159_try_macro_fallback.rs b/crates/syntax/test_data/parser/inline/ok/0159_try_macro_fallback.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0159_try_macro_fallback.rs rename to crates/syntax/test_data/parser/inline/ok/0159_try_macro_fallback.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0160_try_macro_rules.rast b/crates/syntax/test_data/parser/inline/ok/0160_try_macro_rules.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0160_try_macro_rules.rast rename to crates/syntax/test_data/parser/inline/ok/0160_try_macro_rules.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0160_try_macro_rules.rs b/crates/syntax/test_data/parser/inline/ok/0160_try_macro_rules.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0160_try_macro_rules.rs rename to crates/syntax/test_data/parser/inline/ok/0160_try_macro_rules.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0161_labeled_block.rast b/crates/syntax/test_data/parser/inline/ok/0161_labeled_block.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0161_labeled_block.rast rename to crates/syntax/test_data/parser/inline/ok/0161_labeled_block.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0161_labeled_block.rs b/crates/syntax/test_data/parser/inline/ok/0161_labeled_block.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0161_labeled_block.rs rename to crates/syntax/test_data/parser/inline/ok/0161_labeled_block.rs diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0162_unsafe_block.rast b/crates/syntax/test_data/parser/inline/ok/0162_unsafe_block.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0162_unsafe_block.rast rename to crates/syntax/test_data/parser/inline/ok/0162_unsafe_block.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0162_unsafe_block.rs b/crates/syntax/test_data/parser/inline/ok/0162_unsafe_block.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0162_unsafe_block.rs rename to crates/syntax/test_data/parser/inline/ok/0162_unsafe_block.rs diff --git a/crates/syntax/test_data/parser/inline/ok/0163_default_unsafe_item.rast b/crates/syntax/test_data/parser/inline/ok/0163_default_unsafe_item.rast new file mode 100644 index 0000000000..f2e2014605 --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0163_default_unsafe_item.rast @@ -0,0 +1,44 @@ +SOURCE_FILE@0..65 + IMPL@0..64 + DEFAULT_KW@0..7 "default" + WHITESPACE@7..8 " " + UNSAFE_KW@8..14 "unsafe" + WHITESPACE@14..15 " " + IMPL_KW@15..19 "impl" + WHITESPACE@19..20 " " + PATH_TYPE@20..21 + PATH@20..21 + PATH_SEGMENT@20..21 + NAME_REF@20..21 + IDENT@20..21 "T" + WHITESPACE@21..22 " " + FOR_KW@22..25 "for" + WHITESPACE@25..26 " " + PATH_TYPE@26..29 + PATH@26..29 + PATH_SEGMENT@26..29 + NAME_REF@26..29 + IDENT@26..29 "Foo" + WHITESPACE@29..30 " " + ASSOC_ITEM_LIST@30..64 + L_CURLY@30..31 "{" + WHITESPACE@31..36 "\n " + FN@36..62 + DEFAULT_KW@36..43 "default" + WHITESPACE@43..44 " " + UNSAFE_KW@44..50 "unsafe" + WHITESPACE@50..51 " " + FN_KW@51..53 "fn" + WHITESPACE@53..54 " " + NAME@54..57 + IDENT@54..57 "foo" + PARAM_LIST@57..59 + L_PAREN@57..58 "(" + R_PAREN@58..59 ")" + WHITESPACE@59..60 " " + BLOCK_EXPR@60..62 + L_CURLY@60..61 "{" + R_CURLY@61..62 "}" + WHITESPACE@62..63 "\n" + R_CURLY@63..64 "}" + WHITESPACE@64..65 "\n" diff --git a/crates/syntax/test_data/parser/inline/ok/0163_default_unsafe_item.rs b/crates/syntax/test_data/parser/inline/ok/0163_default_unsafe_item.rs new file mode 100644 index 0000000000..96340f84ab --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0163_default_unsafe_item.rs @@ -0,0 +1,3 @@ +default unsafe impl T for Foo { + default unsafe fn foo() {} +} diff --git a/crates/syntax/test_data/parser/inline/ok/0164_default_item.rast b/crates/syntax/test_data/parser/inline/ok/0164_default_item.rast new file mode 100644 index 0000000000..9282772f34 --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0164_default_item.rast @@ -0,0 +1,24 @@ +SOURCE_FILE@0..26 + IMPL@0..25 + DEFAULT_KW@0..7 "default" + WHITESPACE@7..8 " " + IMPL_KW@8..12 "impl" + WHITESPACE@12..13 " " + PATH_TYPE@13..14 + PATH@13..14 + PATH_SEGMENT@13..14 + NAME_REF@13..14 + IDENT@13..14 "T" + WHITESPACE@14..15 " " + FOR_KW@15..18 "for" + WHITESPACE@18..19 " " + PATH_TYPE@19..22 + PATH@19..22 + PATH_SEGMENT@19..22 + NAME_REF@19..22 + IDENT@19..22 "Foo" + WHITESPACE@22..23 " " + ASSOC_ITEM_LIST@23..25 + L_CURLY@23..24 "{" + R_CURLY@24..25 "}" + WHITESPACE@25..26 "\n" diff --git a/crates/syntax/test_data/parser/inline/ok/0164_default_item.rs b/crates/syntax/test_data/parser/inline/ok/0164_default_item.rs new file mode 100644 index 0000000000..a6836cbd57 --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0164_default_item.rs @@ -0,0 +1 @@ +default impl T for Foo {} diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0164_type_path_in_pattern.rast b/crates/syntax/test_data/parser/inline/ok/0164_type_path_in_pattern.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0164_type_path_in_pattern.rast rename to crates/syntax/test_data/parser/inline/ok/0164_type_path_in_pattern.rast diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0164_type_path_in_pattern.rs b/crates/syntax/test_data/parser/inline/ok/0164_type_path_in_pattern.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/inline/ok/0164_type_path_in_pattern.rs rename to crates/syntax/test_data/parser/inline/ok/0164_type_path_in_pattern.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0000_empty.rast b/crates/syntax/test_data/parser/ok/0000_empty.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0000_empty.rast rename to crates/syntax/test_data/parser/ok/0000_empty.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0000_empty.rs b/crates/syntax/test_data/parser/ok/0000_empty.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0000_empty.rs rename to crates/syntax/test_data/parser/ok/0000_empty.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0001_struct_item.rast b/crates/syntax/test_data/parser/ok/0001_struct_item.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0001_struct_item.rast rename to crates/syntax/test_data/parser/ok/0001_struct_item.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0001_struct_item.rs b/crates/syntax/test_data/parser/ok/0001_struct_item.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0001_struct_item.rs rename to crates/syntax/test_data/parser/ok/0001_struct_item.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0002_struct_item_field.rast b/crates/syntax/test_data/parser/ok/0002_struct_item_field.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0002_struct_item_field.rast rename to crates/syntax/test_data/parser/ok/0002_struct_item_field.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0002_struct_item_field.rs b/crates/syntax/test_data/parser/ok/0002_struct_item_field.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0002_struct_item_field.rs rename to crates/syntax/test_data/parser/ok/0002_struct_item_field.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0004_file_shebang.rast b/crates/syntax/test_data/parser/ok/0004_file_shebang.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0004_file_shebang.rast rename to crates/syntax/test_data/parser/ok/0004_file_shebang.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0004_file_shebang.rs b/crates/syntax/test_data/parser/ok/0004_file_shebang.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0004_file_shebang.rs rename to crates/syntax/test_data/parser/ok/0004_file_shebang.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0005_fn_item.rast b/crates/syntax/test_data/parser/ok/0005_fn_item.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0005_fn_item.rast rename to crates/syntax/test_data/parser/ok/0005_fn_item.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0005_fn_item.rs b/crates/syntax/test_data/parser/ok/0005_fn_item.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0005_fn_item.rs rename to crates/syntax/test_data/parser/ok/0005_fn_item.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0006_inner_attributes.rast b/crates/syntax/test_data/parser/ok/0006_inner_attributes.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0006_inner_attributes.rast rename to crates/syntax/test_data/parser/ok/0006_inner_attributes.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0006_inner_attributes.rs b/crates/syntax/test_data/parser/ok/0006_inner_attributes.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0006_inner_attributes.rs rename to crates/syntax/test_data/parser/ok/0006_inner_attributes.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0007_extern_crate.rast b/crates/syntax/test_data/parser/ok/0007_extern_crate.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0007_extern_crate.rast rename to crates/syntax/test_data/parser/ok/0007_extern_crate.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0007_extern_crate.rs b/crates/syntax/test_data/parser/ok/0007_extern_crate.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0007_extern_crate.rs rename to crates/syntax/test_data/parser/ok/0007_extern_crate.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0008_mod_item.rast b/crates/syntax/test_data/parser/ok/0008_mod_item.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0008_mod_item.rast rename to crates/syntax/test_data/parser/ok/0008_mod_item.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0008_mod_item.rs b/crates/syntax/test_data/parser/ok/0008_mod_item.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0008_mod_item.rs rename to crates/syntax/test_data/parser/ok/0008_mod_item.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0009_use_item.rast b/crates/syntax/test_data/parser/ok/0009_use_item.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0009_use_item.rast rename to crates/syntax/test_data/parser/ok/0009_use_item.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0009_use_item.rs b/crates/syntax/test_data/parser/ok/0009_use_item.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0009_use_item.rs rename to crates/syntax/test_data/parser/ok/0009_use_item.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0010_use_path_segments.rast b/crates/syntax/test_data/parser/ok/0010_use_path_segments.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0010_use_path_segments.rast rename to crates/syntax/test_data/parser/ok/0010_use_path_segments.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0010_use_path_segments.rs b/crates/syntax/test_data/parser/ok/0010_use_path_segments.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0010_use_path_segments.rs rename to crates/syntax/test_data/parser/ok/0010_use_path_segments.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0011_outer_attribute.rast b/crates/syntax/test_data/parser/ok/0011_outer_attribute.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0011_outer_attribute.rast rename to crates/syntax/test_data/parser/ok/0011_outer_attribute.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0011_outer_attribute.rs b/crates/syntax/test_data/parser/ok/0011_outer_attribute.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0011_outer_attribute.rs rename to crates/syntax/test_data/parser/ok/0011_outer_attribute.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0012_visibility.rast b/crates/syntax/test_data/parser/ok/0012_visibility.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0012_visibility.rast rename to crates/syntax/test_data/parser/ok/0012_visibility.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0012_visibility.rs b/crates/syntax/test_data/parser/ok/0012_visibility.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0012_visibility.rs rename to crates/syntax/test_data/parser/ok/0012_visibility.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0013_use_path_self_super.rast b/crates/syntax/test_data/parser/ok/0013_use_path_self_super.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0013_use_path_self_super.rast rename to crates/syntax/test_data/parser/ok/0013_use_path_self_super.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0013_use_path_self_super.rs b/crates/syntax/test_data/parser/ok/0013_use_path_self_super.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0013_use_path_self_super.rs rename to crates/syntax/test_data/parser/ok/0013_use_path_self_super.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0014_use_tree.rast b/crates/syntax/test_data/parser/ok/0014_use_tree.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0014_use_tree.rast rename to crates/syntax/test_data/parser/ok/0014_use_tree.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0014_use_tree.rs b/crates/syntax/test_data/parser/ok/0014_use_tree.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0014_use_tree.rs rename to crates/syntax/test_data/parser/ok/0014_use_tree.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0015_use_tree.rast b/crates/syntax/test_data/parser/ok/0015_use_tree.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0015_use_tree.rast rename to crates/syntax/test_data/parser/ok/0015_use_tree.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0015_use_tree.rs b/crates/syntax/test_data/parser/ok/0015_use_tree.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0015_use_tree.rs rename to crates/syntax/test_data/parser/ok/0015_use_tree.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0016_struct_flavors.rast b/crates/syntax/test_data/parser/ok/0016_struct_flavors.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0016_struct_flavors.rast rename to crates/syntax/test_data/parser/ok/0016_struct_flavors.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0016_struct_flavors.rs b/crates/syntax/test_data/parser/ok/0016_struct_flavors.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0016_struct_flavors.rs rename to crates/syntax/test_data/parser/ok/0016_struct_flavors.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0017_attr_trailing_comma.rast b/crates/syntax/test_data/parser/ok/0017_attr_trailing_comma.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0017_attr_trailing_comma.rast rename to crates/syntax/test_data/parser/ok/0017_attr_trailing_comma.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0017_attr_trailing_comma.rs b/crates/syntax/test_data/parser/ok/0017_attr_trailing_comma.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0017_attr_trailing_comma.rs rename to crates/syntax/test_data/parser/ok/0017_attr_trailing_comma.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0018_struct_type_params.rast b/crates/syntax/test_data/parser/ok/0018_struct_type_params.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0018_struct_type_params.rast rename to crates/syntax/test_data/parser/ok/0018_struct_type_params.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0018_struct_type_params.rs b/crates/syntax/test_data/parser/ok/0018_struct_type_params.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0018_struct_type_params.rs rename to crates/syntax/test_data/parser/ok/0018_struct_type_params.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0019_enums.rast b/crates/syntax/test_data/parser/ok/0019_enums.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0019_enums.rast rename to crates/syntax/test_data/parser/ok/0019_enums.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0019_enums.rs b/crates/syntax/test_data/parser/ok/0019_enums.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0019_enums.rs rename to crates/syntax/test_data/parser/ok/0019_enums.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0020_type_param_bounds.rast b/crates/syntax/test_data/parser/ok/0020_type_param_bounds.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0020_type_param_bounds.rast rename to crates/syntax/test_data/parser/ok/0020_type_param_bounds.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0020_type_param_bounds.rs b/crates/syntax/test_data/parser/ok/0020_type_param_bounds.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0020_type_param_bounds.rs rename to crates/syntax/test_data/parser/ok/0020_type_param_bounds.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0022_empty_extern_block.rast b/crates/syntax/test_data/parser/ok/0022_empty_extern_block.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0022_empty_extern_block.rast rename to crates/syntax/test_data/parser/ok/0022_empty_extern_block.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0022_empty_extern_block.rs b/crates/syntax/test_data/parser/ok/0022_empty_extern_block.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0022_empty_extern_block.rs rename to crates/syntax/test_data/parser/ok/0022_empty_extern_block.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0023_static_items.rast b/crates/syntax/test_data/parser/ok/0023_static_items.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0023_static_items.rast rename to crates/syntax/test_data/parser/ok/0023_static_items.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0023_static_items.rs b/crates/syntax/test_data/parser/ok/0023_static_items.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0023_static_items.rs rename to crates/syntax/test_data/parser/ok/0023_static_items.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0024_const_item.rast b/crates/syntax/test_data/parser/ok/0024_const_item.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0024_const_item.rast rename to crates/syntax/test_data/parser/ok/0024_const_item.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0024_const_item.rs b/crates/syntax/test_data/parser/ok/0024_const_item.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0024_const_item.rs rename to crates/syntax/test_data/parser/ok/0024_const_item.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0025_extern_fn_in_block.rast b/crates/syntax/test_data/parser/ok/0025_extern_fn_in_block.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0025_extern_fn_in_block.rast rename to crates/syntax/test_data/parser/ok/0025_extern_fn_in_block.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0025_extern_fn_in_block.rs b/crates/syntax/test_data/parser/ok/0025_extern_fn_in_block.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0025_extern_fn_in_block.rs rename to crates/syntax/test_data/parser/ok/0025_extern_fn_in_block.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0026_const_fn_in_block.rast b/crates/syntax/test_data/parser/ok/0026_const_fn_in_block.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0026_const_fn_in_block.rast rename to crates/syntax/test_data/parser/ok/0026_const_fn_in_block.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0026_const_fn_in_block.rs b/crates/syntax/test_data/parser/ok/0026_const_fn_in_block.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0026_const_fn_in_block.rs rename to crates/syntax/test_data/parser/ok/0026_const_fn_in_block.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0027_unsafe_fn_in_block.rast b/crates/syntax/test_data/parser/ok/0027_unsafe_fn_in_block.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0027_unsafe_fn_in_block.rast rename to crates/syntax/test_data/parser/ok/0027_unsafe_fn_in_block.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0027_unsafe_fn_in_block.rs b/crates/syntax/test_data/parser/ok/0027_unsafe_fn_in_block.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0027_unsafe_fn_in_block.rs rename to crates/syntax/test_data/parser/ok/0027_unsafe_fn_in_block.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0028_operator_binding_power.rast b/crates/syntax/test_data/parser/ok/0028_operator_binding_power.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0028_operator_binding_power.rast rename to crates/syntax/test_data/parser/ok/0028_operator_binding_power.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0028_operator_binding_power.rs b/crates/syntax/test_data/parser/ok/0028_operator_binding_power.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0028_operator_binding_power.rs rename to crates/syntax/test_data/parser/ok/0028_operator_binding_power.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0029_range_forms.rast b/crates/syntax/test_data/parser/ok/0029_range_forms.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0029_range_forms.rast rename to crates/syntax/test_data/parser/ok/0029_range_forms.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0029_range_forms.rs b/crates/syntax/test_data/parser/ok/0029_range_forms.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0029_range_forms.rs rename to crates/syntax/test_data/parser/ok/0029_range_forms.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0030_string_suffixes.rast b/crates/syntax/test_data/parser/ok/0030_string_suffixes.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0030_string_suffixes.rast rename to crates/syntax/test_data/parser/ok/0030_string_suffixes.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0030_string_suffixes.rs b/crates/syntax/test_data/parser/ok/0030_string_suffixes.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0030_string_suffixes.rs rename to crates/syntax/test_data/parser/ok/0030_string_suffixes.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0030_traits.rast b/crates/syntax/test_data/parser/ok/0030_traits.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0030_traits.rast rename to crates/syntax/test_data/parser/ok/0030_traits.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0030_traits.rs b/crates/syntax/test_data/parser/ok/0030_traits.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0030_traits.rs rename to crates/syntax/test_data/parser/ok/0030_traits.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0031_extern.rast b/crates/syntax/test_data/parser/ok/0031_extern.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0031_extern.rast rename to crates/syntax/test_data/parser/ok/0031_extern.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0031_extern.rs b/crates/syntax/test_data/parser/ok/0031_extern.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0031_extern.rs rename to crates/syntax/test_data/parser/ok/0031_extern.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0032_where_for.rast b/crates/syntax/test_data/parser/ok/0032_where_for.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0032_where_for.rast rename to crates/syntax/test_data/parser/ok/0032_where_for.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0032_where_for.rs b/crates/syntax/test_data/parser/ok/0032_where_for.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0032_where_for.rs rename to crates/syntax/test_data/parser/ok/0032_where_for.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0033_label_break.rast b/crates/syntax/test_data/parser/ok/0033_label_break.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0033_label_break.rast rename to crates/syntax/test_data/parser/ok/0033_label_break.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0033_label_break.rs b/crates/syntax/test_data/parser/ok/0033_label_break.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0033_label_break.rs rename to crates/syntax/test_data/parser/ok/0033_label_break.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0034_crate_path_in_call.rast b/crates/syntax/test_data/parser/ok/0034_crate_path_in_call.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0034_crate_path_in_call.rast rename to crates/syntax/test_data/parser/ok/0034_crate_path_in_call.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0034_crate_path_in_call.rs b/crates/syntax/test_data/parser/ok/0034_crate_path_in_call.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0034_crate_path_in_call.rs rename to crates/syntax/test_data/parser/ok/0034_crate_path_in_call.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0035_weird_exprs.rast b/crates/syntax/test_data/parser/ok/0035_weird_exprs.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0035_weird_exprs.rast rename to crates/syntax/test_data/parser/ok/0035_weird_exprs.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0035_weird_exprs.rs b/crates/syntax/test_data/parser/ok/0035_weird_exprs.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0035_weird_exprs.rs rename to crates/syntax/test_data/parser/ok/0035_weird_exprs.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0036_fully_qualified.rast b/crates/syntax/test_data/parser/ok/0036_fully_qualified.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0036_fully_qualified.rast rename to crates/syntax/test_data/parser/ok/0036_fully_qualified.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0036_fully_qualified.rs b/crates/syntax/test_data/parser/ok/0036_fully_qualified.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0036_fully_qualified.rs rename to crates/syntax/test_data/parser/ok/0036_fully_qualified.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0037_mod.rast b/crates/syntax/test_data/parser/ok/0037_mod.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0037_mod.rast rename to crates/syntax/test_data/parser/ok/0037_mod.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0037_mod.rs b/crates/syntax/test_data/parser/ok/0037_mod.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0037_mod.rs rename to crates/syntax/test_data/parser/ok/0037_mod.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0038_where_pred_type.rast b/crates/syntax/test_data/parser/ok/0038_where_pred_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0038_where_pred_type.rast rename to crates/syntax/test_data/parser/ok/0038_where_pred_type.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0038_where_pred_type.rs b/crates/syntax/test_data/parser/ok/0038_where_pred_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0038_where_pred_type.rs rename to crates/syntax/test_data/parser/ok/0038_where_pred_type.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0039_raw_fn_item.rast b/crates/syntax/test_data/parser/ok/0039_raw_fn_item.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0039_raw_fn_item.rast rename to crates/syntax/test_data/parser/ok/0039_raw_fn_item.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0039_raw_fn_item.rs b/crates/syntax/test_data/parser/ok/0039_raw_fn_item.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0039_raw_fn_item.rs rename to crates/syntax/test_data/parser/ok/0039_raw_fn_item.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0040_raw_struct_item_field.rast b/crates/syntax/test_data/parser/ok/0040_raw_struct_item_field.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0040_raw_struct_item_field.rast rename to crates/syntax/test_data/parser/ok/0040_raw_struct_item_field.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0040_raw_struct_item_field.rs b/crates/syntax/test_data/parser/ok/0040_raw_struct_item_field.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0040_raw_struct_item_field.rs rename to crates/syntax/test_data/parser/ok/0040_raw_struct_item_field.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0041_raw_keywords.rast b/crates/syntax/test_data/parser/ok/0041_raw_keywords.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0041_raw_keywords.rast rename to crates/syntax/test_data/parser/ok/0041_raw_keywords.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0041_raw_keywords.rs b/crates/syntax/test_data/parser/ok/0041_raw_keywords.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0041_raw_keywords.rs rename to crates/syntax/test_data/parser/ok/0041_raw_keywords.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0042_ufcs_call_list.rast b/crates/syntax/test_data/parser/ok/0042_ufcs_call_list.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0042_ufcs_call_list.rast rename to crates/syntax/test_data/parser/ok/0042_ufcs_call_list.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0042_ufcs_call_list.rs b/crates/syntax/test_data/parser/ok/0042_ufcs_call_list.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0042_ufcs_call_list.rs rename to crates/syntax/test_data/parser/ok/0042_ufcs_call_list.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0043_complex_assignment.rast b/crates/syntax/test_data/parser/ok/0043_complex_assignment.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0043_complex_assignment.rast rename to crates/syntax/test_data/parser/ok/0043_complex_assignment.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0043_complex_assignment.rs b/crates/syntax/test_data/parser/ok/0043_complex_assignment.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0043_complex_assignment.rs rename to crates/syntax/test_data/parser/ok/0043_complex_assignment.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0044_let_attrs.rast b/crates/syntax/test_data/parser/ok/0044_let_attrs.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0044_let_attrs.rast rename to crates/syntax/test_data/parser/ok/0044_let_attrs.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0044_let_attrs.rs b/crates/syntax/test_data/parser/ok/0044_let_attrs.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0044_let_attrs.rs rename to crates/syntax/test_data/parser/ok/0044_let_attrs.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0045_block_inner_attrs.rast b/crates/syntax/test_data/parser/ok/0045_block_inner_attrs.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0045_block_inner_attrs.rast rename to crates/syntax/test_data/parser/ok/0045_block_inner_attrs.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0045_block_inner_attrs.rs b/crates/syntax/test_data/parser/ok/0045_block_inner_attrs.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0045_block_inner_attrs.rs rename to crates/syntax/test_data/parser/ok/0045_block_inner_attrs.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0046_extern_inner_attributes.rast b/crates/syntax/test_data/parser/ok/0046_extern_inner_attributes.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0046_extern_inner_attributes.rast rename to crates/syntax/test_data/parser/ok/0046_extern_inner_attributes.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0046_extern_inner_attributes.rs b/crates/syntax/test_data/parser/ok/0046_extern_inner_attributes.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0046_extern_inner_attributes.rs rename to crates/syntax/test_data/parser/ok/0046_extern_inner_attributes.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0047_minus_in_inner_pattern.rast b/crates/syntax/test_data/parser/ok/0047_minus_in_inner_pattern.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0047_minus_in_inner_pattern.rast rename to crates/syntax/test_data/parser/ok/0047_minus_in_inner_pattern.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0047_minus_in_inner_pattern.rs b/crates/syntax/test_data/parser/ok/0047_minus_in_inner_pattern.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0047_minus_in_inner_pattern.rs rename to crates/syntax/test_data/parser/ok/0047_minus_in_inner_pattern.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0048_compound_assignment.rast b/crates/syntax/test_data/parser/ok/0048_compound_assignment.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0048_compound_assignment.rast rename to crates/syntax/test_data/parser/ok/0048_compound_assignment.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0048_compound_assignment.rs b/crates/syntax/test_data/parser/ok/0048_compound_assignment.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0048_compound_assignment.rs rename to crates/syntax/test_data/parser/ok/0048_compound_assignment.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0049_async_block.rast b/crates/syntax/test_data/parser/ok/0049_async_block.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0049_async_block.rast rename to crates/syntax/test_data/parser/ok/0049_async_block.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0049_async_block.rs b/crates/syntax/test_data/parser/ok/0049_async_block.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0049_async_block.rs rename to crates/syntax/test_data/parser/ok/0049_async_block.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0050_async_block_as_argument.rast b/crates/syntax/test_data/parser/ok/0050_async_block_as_argument.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0050_async_block_as_argument.rast rename to crates/syntax/test_data/parser/ok/0050_async_block_as_argument.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0050_async_block_as_argument.rs b/crates/syntax/test_data/parser/ok/0050_async_block_as_argument.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0050_async_block_as_argument.rs rename to crates/syntax/test_data/parser/ok/0050_async_block_as_argument.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0051_parameter_attrs.rast b/crates/syntax/test_data/parser/ok/0051_parameter_attrs.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0051_parameter_attrs.rast rename to crates/syntax/test_data/parser/ok/0051_parameter_attrs.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0051_parameter_attrs.rs b/crates/syntax/test_data/parser/ok/0051_parameter_attrs.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0051_parameter_attrs.rs rename to crates/syntax/test_data/parser/ok/0051_parameter_attrs.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0052_for_range_block.rast b/crates/syntax/test_data/parser/ok/0052_for_range_block.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0052_for_range_block.rast rename to crates/syntax/test_data/parser/ok/0052_for_range_block.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0052_for_range_block.rs b/crates/syntax/test_data/parser/ok/0052_for_range_block.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0052_for_range_block.rs rename to crates/syntax/test_data/parser/ok/0052_for_range_block.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rast b/crates/syntax/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rast rename to crates/syntax/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rs b/crates/syntax/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rs rename to crates/syntax/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0054_qual_path_in_type_arg.rast b/crates/syntax/test_data/parser/ok/0054_qual_path_in_type_arg.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0054_qual_path_in_type_arg.rast rename to crates/syntax/test_data/parser/ok/0054_qual_path_in_type_arg.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0054_qual_path_in_type_arg.rs b/crates/syntax/test_data/parser/ok/0054_qual_path_in_type_arg.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0054_qual_path_in_type_arg.rs rename to crates/syntax/test_data/parser/ok/0054_qual_path_in_type_arg.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0055_dot_dot_dot.rast b/crates/syntax/test_data/parser/ok/0055_dot_dot_dot.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0055_dot_dot_dot.rast rename to crates/syntax/test_data/parser/ok/0055_dot_dot_dot.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0055_dot_dot_dot.rs b/crates/syntax/test_data/parser/ok/0055_dot_dot_dot.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0055_dot_dot_dot.rs rename to crates/syntax/test_data/parser/ok/0055_dot_dot_dot.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0056_neq_in_type.rast b/crates/syntax/test_data/parser/ok/0056_neq_in_type.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0056_neq_in_type.rast rename to crates/syntax/test_data/parser/ok/0056_neq_in_type.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0056_neq_in_type.rs b/crates/syntax/test_data/parser/ok/0056_neq_in_type.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0056_neq_in_type.rs rename to crates/syntax/test_data/parser/ok/0056_neq_in_type.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0057_loop_in_call.rast b/crates/syntax/test_data/parser/ok/0057_loop_in_call.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0057_loop_in_call.rast rename to crates/syntax/test_data/parser/ok/0057_loop_in_call.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0057_loop_in_call.rs b/crates/syntax/test_data/parser/ok/0057_loop_in_call.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0057_loop_in_call.rs rename to crates/syntax/test_data/parser/ok/0057_loop_in_call.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0058_unary_expr_precedence.rast b/crates/syntax/test_data/parser/ok/0058_unary_expr_precedence.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0058_unary_expr_precedence.rast rename to crates/syntax/test_data/parser/ok/0058_unary_expr_precedence.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0058_unary_expr_precedence.rs b/crates/syntax/test_data/parser/ok/0058_unary_expr_precedence.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0058_unary_expr_precedence.rs rename to crates/syntax/test_data/parser/ok/0058_unary_expr_precedence.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0059_loops_in_parens.rast b/crates/syntax/test_data/parser/ok/0059_loops_in_parens.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0059_loops_in_parens.rast rename to crates/syntax/test_data/parser/ok/0059_loops_in_parens.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0059_loops_in_parens.rs b/crates/syntax/test_data/parser/ok/0059_loops_in_parens.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0059_loops_in_parens.rs rename to crates/syntax/test_data/parser/ok/0059_loops_in_parens.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0060_as_range.rast b/crates/syntax/test_data/parser/ok/0060_as_range.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0060_as_range.rast rename to crates/syntax/test_data/parser/ok/0060_as_range.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0060_as_range.rs b/crates/syntax/test_data/parser/ok/0060_as_range.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0060_as_range.rs rename to crates/syntax/test_data/parser/ok/0060_as_range.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0061_match_full_range.rast b/crates/syntax/test_data/parser/ok/0061_match_full_range.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0061_match_full_range.rast rename to crates/syntax/test_data/parser/ok/0061_match_full_range.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0061_match_full_range.rs b/crates/syntax/test_data/parser/ok/0061_match_full_range.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0061_match_full_range.rs rename to crates/syntax/test_data/parser/ok/0061_match_full_range.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0062_macro_2.0.rast b/crates/syntax/test_data/parser/ok/0062_macro_2.0.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0062_macro_2.0.rast rename to crates/syntax/test_data/parser/ok/0062_macro_2.0.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0062_macro_2.0.rs b/crates/syntax/test_data/parser/ok/0062_macro_2.0.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0062_macro_2.0.rs rename to crates/syntax/test_data/parser/ok/0062_macro_2.0.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0063_trait_fn_patterns.rast b/crates/syntax/test_data/parser/ok/0063_trait_fn_patterns.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0063_trait_fn_patterns.rast rename to crates/syntax/test_data/parser/ok/0063_trait_fn_patterns.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0063_trait_fn_patterns.rs b/crates/syntax/test_data/parser/ok/0063_trait_fn_patterns.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0063_trait_fn_patterns.rs rename to crates/syntax/test_data/parser/ok/0063_trait_fn_patterns.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0063_variadic_fun.rast b/crates/syntax/test_data/parser/ok/0063_variadic_fun.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0063_variadic_fun.rast rename to crates/syntax/test_data/parser/ok/0063_variadic_fun.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0063_variadic_fun.rs b/crates/syntax/test_data/parser/ok/0063_variadic_fun.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0063_variadic_fun.rs rename to crates/syntax/test_data/parser/ok/0063_variadic_fun.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0064_impl_fn_params.rast b/crates/syntax/test_data/parser/ok/0064_impl_fn_params.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0064_impl_fn_params.rast rename to crates/syntax/test_data/parser/ok/0064_impl_fn_params.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0064_impl_fn_params.rs b/crates/syntax/test_data/parser/ok/0064_impl_fn_params.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0064_impl_fn_params.rs rename to crates/syntax/test_data/parser/ok/0064_impl_fn_params.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0065_comment_newline.rast b/crates/syntax/test_data/parser/ok/0065_comment_newline.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0065_comment_newline.rast rename to crates/syntax/test_data/parser/ok/0065_comment_newline.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0065_comment_newline.rs b/crates/syntax/test_data/parser/ok/0065_comment_newline.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0065_comment_newline.rs rename to crates/syntax/test_data/parser/ok/0065_comment_newline.rs diff --git a/crates/ra_syntax/test_data/parser/ok/0065_plus_after_fn_trait_bound.rast b/crates/syntax/test_data/parser/ok/0065_plus_after_fn_trait_bound.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0065_plus_after_fn_trait_bound.rast rename to crates/syntax/test_data/parser/ok/0065_plus_after_fn_trait_bound.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0065_plus_after_fn_trait_bound.rs b/crates/syntax/test_data/parser/ok/0065_plus_after_fn_trait_bound.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0065_plus_after_fn_trait_bound.rs rename to crates/syntax/test_data/parser/ok/0065_plus_after_fn_trait_bound.rs diff --git a/crates/syntax/test_data/parser/ok/0066_default_modifier.rast b/crates/syntax/test_data/parser/ok/0066_default_modifier.rast new file mode 100644 index 0000000000..e9b57ec3b3 --- /dev/null +++ b/crates/syntax/test_data/parser/ok/0066_default_modifier.rast @@ -0,0 +1,218 @@ +SOURCE_FILE@0..294 + TRAIT@0..113 + TRAIT_KW@0..5 "trait" + WHITESPACE@5..6 " " + NAME@6..7 + IDENT@6..7 "T" + WHITESPACE@7..8 " " + ASSOC_ITEM_LIST@8..113 + L_CURLY@8..9 "{" + WHITESPACE@9..12 "\n " + TYPE_ALIAS@12..33 + DEFAULT_KW@12..19 "default" + WHITESPACE@19..20 " " + TYPE_KW@20..24 "type" + WHITESPACE@24..25 " " + NAME@25..26 + IDENT@25..26 "T" + WHITESPACE@26..27 " " + EQ@27..28 "=" + WHITESPACE@28..29 " " + PATH_TYPE@29..32 + PATH@29..32 + PATH_SEGMENT@29..32 + NAME_REF@29..32 + IDENT@29..32 "Bar" + SEMICOLON@32..33 ";" + WHITESPACE@33..36 "\n " + CONST@36..60 + DEFAULT_KW@36..43 "default" + WHITESPACE@43..44 " " + CONST_KW@44..49 "const" + WHITESPACE@49..50 " " + NAME@50..51 + IDENT@50..51 "f" + COLON@51..52 ":" + WHITESPACE@52..53 " " + PATH_TYPE@53..55 + PATH@53..55 + PATH_SEGMENT@53..55 + NAME_REF@53..55 + IDENT@53..55 "u8" + WHITESPACE@55..56 " " + EQ@56..57 "=" + WHITESPACE@57..58 " " + LITERAL@58..59 + INT_NUMBER@58..59 "0" + SEMICOLON@59..60 ";" + WHITESPACE@60..63 "\n " + FN@63..82 + DEFAULT_KW@63..70 "default" + WHITESPACE@70..71 " " + FN_KW@71..73 "fn" + WHITESPACE@73..74 " " + NAME@74..77 + IDENT@74..77 "foo" + PARAM_LIST@77..79 + L_PAREN@77..78 "(" + R_PAREN@78..79 ")" + WHITESPACE@79..80 " " + BLOCK_EXPR@80..82 + L_CURLY@80..81 "{" + R_CURLY@81..82 "}" + WHITESPACE@82..85 "\n " + FN@85..111 + DEFAULT_KW@85..92 "default" + WHITESPACE@92..93 " " + UNSAFE_KW@93..99 "unsafe" + WHITESPACE@99..100 " " + FN_KW@100..102 "fn" + WHITESPACE@102..103 " " + NAME@103..106 + IDENT@103..106 "bar" + PARAM_LIST@106..108 + L_PAREN@106..107 "(" + R_PAREN@107..108 ")" + WHITESPACE@108..109 " " + BLOCK_EXPR@109..111 + L_CURLY@109..110 "{" + R_CURLY@110..111 "}" + WHITESPACE@111..112 "\n" + R_CURLY@112..113 "}" + WHITESPACE@113..115 "\n\n" + IMPL@115..235 + IMPL_KW@115..119 "impl" + WHITESPACE@119..120 " " + PATH_TYPE@120..121 + PATH@120..121 + PATH_SEGMENT@120..121 + NAME_REF@120..121 + IDENT@120..121 "T" + WHITESPACE@121..122 " " + FOR_KW@122..125 "for" + WHITESPACE@125..126 " " + PATH_TYPE@126..129 + PATH@126..129 + PATH_SEGMENT@126..129 + NAME_REF@126..129 + IDENT@126..129 "Foo" + WHITESPACE@129..130 " " + ASSOC_ITEM_LIST@130..235 + L_CURLY@130..131 "{" + WHITESPACE@131..134 "\n " + TYPE_ALIAS@134..155 + DEFAULT_KW@134..141 "default" + WHITESPACE@141..142 " " + TYPE_KW@142..146 "type" + WHITESPACE@146..147 " " + NAME@147..148 + IDENT@147..148 "T" + WHITESPACE@148..149 " " + EQ@149..150 "=" + WHITESPACE@150..151 " " + PATH_TYPE@151..154 + PATH@151..154 + PATH_SEGMENT@151..154 + NAME_REF@151..154 + IDENT@151..154 "Bar" + SEMICOLON@154..155 ";" + WHITESPACE@155..158 "\n " + CONST@158..182 + DEFAULT_KW@158..165 "default" + WHITESPACE@165..166 " " + CONST_KW@166..171 "const" + WHITESPACE@171..172 " " + NAME@172..173 + IDENT@172..173 "f" + COLON@173..174 ":" + WHITESPACE@174..175 " " + PATH_TYPE@175..177 + PATH@175..177 + PATH_SEGMENT@175..177 + NAME_REF@175..177 + IDENT@175..177 "u8" + WHITESPACE@177..178 " " + EQ@178..179 "=" + WHITESPACE@179..180 " " + LITERAL@180..181 + INT_NUMBER@180..181 "0" + SEMICOLON@181..182 ";" + WHITESPACE@182..185 "\n " + FN@185..204 + DEFAULT_KW@185..192 "default" + WHITESPACE@192..193 " " + FN_KW@193..195 "fn" + WHITESPACE@195..196 " " + NAME@196..199 + IDENT@196..199 "foo" + PARAM_LIST@199..201 + L_PAREN@199..200 "(" + R_PAREN@200..201 ")" + WHITESPACE@201..202 " " + BLOCK_EXPR@202..204 + L_CURLY@202..203 "{" + R_CURLY@203..204 "}" + WHITESPACE@204..207 "\n " + FN@207..233 + DEFAULT_KW@207..214 "default" + WHITESPACE@214..215 " " + UNSAFE_KW@215..221 "unsafe" + WHITESPACE@221..222 " " + FN_KW@222..224 "fn" + WHITESPACE@224..225 " " + NAME@225..228 + IDENT@225..228 "bar" + PARAM_LIST@228..230 + L_PAREN@228..229 "(" + R_PAREN@229..230 ")" + WHITESPACE@230..231 " " + BLOCK_EXPR@231..233 + L_CURLY@231..232 "{" + R_CURLY@232..233 "}" + WHITESPACE@233..234 "\n" + R_CURLY@234..235 "}" + WHITESPACE@235..237 "\n\n" + IMPL@237..261 + DEFAULT_KW@237..244 "default" + WHITESPACE@244..245 " " + IMPL_KW@245..249 "impl" + WHITESPACE@249..250 " " + PATH_TYPE@250..251 + PATH@250..251 + PATH_SEGMENT@250..251 + NAME_REF@250..251 + IDENT@250..251 "T" + WHITESPACE@251..252 " " + FOR_KW@252..255 "for" + WHITESPACE@255..256 " " + TUPLE_TYPE@256..258 + L_PAREN@256..257 "(" + R_PAREN@257..258 ")" + WHITESPACE@258..259 " " + ASSOC_ITEM_LIST@259..261 + L_CURLY@259..260 "{" + R_CURLY@260..261 "}" + WHITESPACE@261..262 "\n" + IMPL@262..293 + DEFAULT_KW@262..269 "default" + WHITESPACE@269..270 " " + UNSAFE_KW@270..276 "unsafe" + WHITESPACE@276..277 " " + IMPL_KW@277..281 "impl" + WHITESPACE@281..282 " " + PATH_TYPE@282..283 + PATH@282..283 + PATH_SEGMENT@282..283 + NAME_REF@282..283 + IDENT@282..283 "T" + WHITESPACE@283..284 " " + FOR_KW@284..287 "for" + WHITESPACE@287..288 " " + TUPLE_TYPE@288..290 + L_PAREN@288..289 "(" + R_PAREN@289..290 ")" + WHITESPACE@290..291 " " + ASSOC_ITEM_LIST@291..293 + L_CURLY@291..292 "{" + R_CURLY@292..293 "}" + WHITESPACE@293..294 "\n" diff --git a/crates/syntax/test_data/parser/ok/0066_default_modifier.rs b/crates/syntax/test_data/parser/ok/0066_default_modifier.rs new file mode 100644 index 0000000000..e443e3495e --- /dev/null +++ b/crates/syntax/test_data/parser/ok/0066_default_modifier.rs @@ -0,0 +1,16 @@ +trait T { + default type T = Bar; + default const f: u8 = 0; + default fn foo() {} + default unsafe fn bar() {} +} + +impl T for Foo { + default type T = Bar; + default const f: u8 = 0; + default fn foo() {} + default unsafe fn bar() {} +} + +default impl T for () {} +default unsafe impl T for () {} diff --git a/crates/ra_syntax/test_data/parser/ok/0067_where_for_pred.rast b/crates/syntax/test_data/parser/ok/0067_where_for_pred.rast similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0067_where_for_pred.rast rename to crates/syntax/test_data/parser/ok/0067_where_for_pred.rast diff --git a/crates/ra_syntax/test_data/parser/ok/0067_where_for_pred.rs b/crates/syntax/test_data/parser/ok/0067_where_for_pred.rs similarity index 100% rename from crates/ra_syntax/test_data/parser/ok/0067_where_for_pred.rs rename to crates/syntax/test_data/parser/ok/0067_where_for_pred.rs diff --git a/crates/syntax/test_data/parser/ok/0068_item_modifiers.rast b/crates/syntax/test_data/parser/ok/0068_item_modifiers.rast new file mode 100644 index 0000000000..50a6d8ee9a --- /dev/null +++ b/crates/syntax/test_data/parser/ok/0068_item_modifiers.rast @@ -0,0 +1,218 @@ +SOURCE_FILE@0..304 + FN@0..17 + ASYNC_KW@0..5 "async" + WHITESPACE@5..6 " " + FN_KW@6..8 "fn" + WHITESPACE@8..9 " " + NAME@9..12 + IDENT@9..12 "foo" + PARAM_LIST@12..14 + L_PAREN@12..13 "(" + R_PAREN@13..14 ")" + WHITESPACE@14..15 " " + BLOCK_EXPR@15..17 + L_CURLY@15..16 "{" + R_CURLY@16..17 "}" + WHITESPACE@17..18 "\n" + FN@18..36 + ABI@18..24 + EXTERN_KW@18..24 "extern" + WHITESPACE@24..25 " " + FN_KW@25..27 "fn" + WHITESPACE@27..28 " " + NAME@28..31 + IDENT@28..31 "foo" + PARAM_LIST@31..33 + L_PAREN@31..32 "(" + R_PAREN@32..33 ")" + WHITESPACE@33..34 " " + BLOCK_EXPR@34..36 + L_CURLY@34..35 "{" + R_CURLY@35..36 "}" + WHITESPACE@36..37 "\n" + FN@37..54 + CONST_KW@37..42 "const" + WHITESPACE@42..43 " " + FN_KW@43..45 "fn" + WHITESPACE@45..46 " " + NAME@46..49 + IDENT@46..49 "foo" + PARAM_LIST@49..51 + L_PAREN@49..50 "(" + R_PAREN@50..51 ")" + WHITESPACE@51..52 " " + BLOCK_EXPR@52..54 + L_CURLY@52..53 "{" + R_CURLY@53..54 "}" + WHITESPACE@54..55 "\n" + FN@55..79 + CONST_KW@55..60 "const" + WHITESPACE@60..61 " " + UNSAFE_KW@61..67 "unsafe" + WHITESPACE@67..68 " " + FN_KW@68..70 "fn" + WHITESPACE@70..71 " " + NAME@71..74 + IDENT@71..74 "foo" + PARAM_LIST@74..76 + L_PAREN@74..75 "(" + R_PAREN@75..76 ")" + WHITESPACE@76..77 " " + BLOCK_EXPR@77..79 + L_CURLY@77..78 "{" + R_CURLY@78..79 "}" + WHITESPACE@79..80 "\n" + FN@80..109 + UNSAFE_KW@80..86 "unsafe" + WHITESPACE@86..87 " " + ABI@87..97 + EXTERN_KW@87..93 "extern" + WHITESPACE@93..94 " " + STRING@94..97 "\"C\"" + WHITESPACE@97..98 " " + FN_KW@98..100 "fn" + WHITESPACE@100..101 " " + NAME@101..104 + IDENT@101..104 "foo" + PARAM_LIST@104..106 + L_PAREN@104..105 "(" + R_PAREN@105..106 ")" + WHITESPACE@106..107 " " + BLOCK_EXPR@107..109 + L_CURLY@107..108 "{" + R_CURLY@108..109 "}" + WHITESPACE@109..110 "\n" + FN@110..128 + UNSAFE_KW@110..116 "unsafe" + WHITESPACE@116..117 " " + FN_KW@117..119 "fn" + WHITESPACE@119..120 " " + NAME@120..123 + IDENT@120..123 "foo" + PARAM_LIST@123..125 + L_PAREN@123..124 "(" + R_PAREN@124..125 ")" + WHITESPACE@125..126 " " + BLOCK_EXPR@126..128 + L_CURLY@126..127 "{" + R_CURLY@127..128 "}" + WHITESPACE@128..129 "\n" + FN@129..153 + ASYNC_KW@129..134 "async" + WHITESPACE@134..135 " " + UNSAFE_KW@135..141 "unsafe" + WHITESPACE@141..142 " " + FN_KW@142..144 "fn" + WHITESPACE@144..145 " " + NAME@145..148 + IDENT@145..148 "foo" + PARAM_LIST@148..150 + L_PAREN@148..149 "(" + R_PAREN@149..150 ")" + WHITESPACE@150..151 " " + BLOCK_EXPR@151..153 + L_CURLY@151..152 "{" + R_CURLY@152..153 "}" + WHITESPACE@153..154 "\n" + FN@154..178 + CONST_KW@154..159 "const" + WHITESPACE@159..160 " " + UNSAFE_KW@160..166 "unsafe" + WHITESPACE@166..167 " " + FN_KW@167..169 "fn" + WHITESPACE@169..170 " " + NAME@170..173 + IDENT@170..173 "bar" + PARAM_LIST@173..175 + L_PAREN@173..174 "(" + R_PAREN@174..175 ")" + WHITESPACE@175..176 " " + BLOCK_EXPR@176..178 + L_CURLY@176..177 "{" + R_CURLY@177..178 "}" + WHITESPACE@178..180 "\n\n" + TRAIT@180..197 + UNSAFE_KW@180..186 "unsafe" + WHITESPACE@186..187 " " + TRAIT_KW@187..192 "trait" + WHITESPACE@192..193 " " + NAME@193..194 + IDENT@193..194 "T" + WHITESPACE@194..195 " " + ASSOC_ITEM_LIST@195..197 + L_CURLY@195..196 "{" + R_CURLY@196..197 "}" + WHITESPACE@197..198 "\n" + TRAIT@198..213 + AUTO_KW@198..202 "auto" + WHITESPACE@202..203 " " + TRAIT_KW@203..208 "trait" + WHITESPACE@208..209 " " + NAME@209..210 + IDENT@209..210 "T" + WHITESPACE@210..211 " " + ASSOC_ITEM_LIST@211..213 + L_CURLY@211..212 "{" + R_CURLY@212..213 "}" + WHITESPACE@213..214 "\n" + TRAIT@214..236 + UNSAFE_KW@214..220 "unsafe" + WHITESPACE@220..221 " " + AUTO_KW@221..225 "auto" + WHITESPACE@225..226 " " + TRAIT_KW@226..231 "trait" + WHITESPACE@231..232 " " + NAME@232..233 + IDENT@232..233 "T" + WHITESPACE@233..234 " " + ASSOC_ITEM_LIST@234..236 + L_CURLY@234..235 "{" + R_CURLY@235..236 "}" + WHITESPACE@236..238 "\n\n" + IMPL@238..256 + UNSAFE_KW@238..244 "unsafe" + WHITESPACE@244..245 " " + IMPL_KW@245..249 "impl" + WHITESPACE@249..250 " " + PATH_TYPE@250..253 + PATH@250..253 + PATH_SEGMENT@250..253 + NAME_REF@250..253 + IDENT@250..253 "Foo" + WHITESPACE@253..254 " " + ASSOC_ITEM_LIST@254..256 + L_CURLY@254..255 "{" + R_CURLY@255..256 "}" + WHITESPACE@256..257 "\n" + IMPL@257..276 + DEFAULT_KW@257..264 "default" + WHITESPACE@264..265 " " + IMPL_KW@265..269 "impl" + WHITESPACE@269..270 " " + PATH_TYPE@270..273 + PATH@270..273 + PATH_SEGMENT@270..273 + NAME_REF@270..273 + IDENT@270..273 "Foo" + WHITESPACE@273..274 " " + ASSOC_ITEM_LIST@274..276 + L_CURLY@274..275 "{" + R_CURLY@275..276 "}" + WHITESPACE@276..277 "\n" + IMPL@277..303 + UNSAFE_KW@277..283 "unsafe" + WHITESPACE@283..284 " " + DEFAULT_KW@284..291 "default" + WHITESPACE@291..292 " " + IMPL_KW@292..296 "impl" + WHITESPACE@296..297 " " + PATH_TYPE@297..300 + PATH@297..300 + PATH_SEGMENT@297..300 + NAME_REF@297..300 + IDENT@297..300 "Foo" + WHITESPACE@300..301 " " + ASSOC_ITEM_LIST@301..303 + L_CURLY@301..302 "{" + R_CURLY@302..303 "}" + WHITESPACE@303..304 "\n" diff --git a/crates/syntax/test_data/parser/ok/0068_item_modifiers.rs b/crates/syntax/test_data/parser/ok/0068_item_modifiers.rs new file mode 100644 index 0000000000..8d697c04b9 --- /dev/null +++ b/crates/syntax/test_data/parser/ok/0068_item_modifiers.rs @@ -0,0 +1,16 @@ +async fn foo() {} +extern fn foo() {} +const fn foo() {} +const unsafe fn foo() {} +unsafe extern "C" fn foo() {} +unsafe fn foo() {} +async unsafe fn foo() {} +const unsafe fn bar() {} + +unsafe trait T {} +auto trait T {} +unsafe auto trait T {} + +unsafe impl Foo {} +default impl Foo {} +unsafe default impl Foo {} diff --git a/crates/ra_syntax/test_data/reparse/fuzz-failures/0000.rs b/crates/syntax/test_data/reparse/fuzz-failures/0000.rs similarity index 100% rename from crates/ra_syntax/test_data/reparse/fuzz-failures/0000.rs rename to crates/syntax/test_data/reparse/fuzz-failures/0000.rs diff --git a/crates/ra_syntax/test_data/reparse/fuzz-failures/0001.rs b/crates/syntax/test_data/reparse/fuzz-failures/0001.rs similarity index 100% rename from crates/ra_syntax/test_data/reparse/fuzz-failures/0001.rs rename to crates/syntax/test_data/reparse/fuzz-failures/0001.rs diff --git a/crates/ra_syntax/test_data/reparse/fuzz-failures/0002.rs b/crates/syntax/test_data/reparse/fuzz-failures/0002.rs similarity index 100% rename from crates/ra_syntax/test_data/reparse/fuzz-failures/0002.rs rename to crates/syntax/test_data/reparse/fuzz-failures/0002.rs diff --git a/crates/ra_syntax/test_data/reparse/fuzz-failures/0003.rs b/crates/syntax/test_data/reparse/fuzz-failures/0003.rs similarity index 100% rename from crates/ra_syntax/test_data/reparse/fuzz-failures/0003.rs rename to crates/syntax/test_data/reparse/fuzz-failures/0003.rs diff --git a/crates/ra_syntax/test_data/reparse/fuzz-failures/0004.rs b/crates/syntax/test_data/reparse/fuzz-failures/0004.rs similarity index 100% rename from crates/ra_syntax/test_data/reparse/fuzz-failures/0004.rs rename to crates/syntax/test_data/reparse/fuzz-failures/0004.rs diff --git a/crates/ra_syntax/test_data/reparse/fuzz-failures/0005.rs b/crates/syntax/test_data/reparse/fuzz-failures/0005.rs similarity index 100% rename from crates/ra_syntax/test_data/reparse/fuzz-failures/0005.rs rename to crates/syntax/test_data/reparse/fuzz-failures/0005.rs diff --git a/crates/test_utils/Cargo.toml b/crates/test_utils/Cargo.toml index e719f4f7c1..45e5fb97f7 100644 --- a/crates/test_utils/Cargo.toml +++ b/crates/test_utils/Cargo.toml @@ -1,9 +1,9 @@ [package] -edition = "2018" name = "test_utils" -version = "0.1.0" -authors = ["rust-analyzer developers"] +version = "0.0.0" license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" [lib] doctest = false @@ -14,4 +14,5 @@ difference = "2.0.0" text-size = "1.0.0" serde_json = "1.0.48" rustc-hash = "1.1.0" + stdx = { path = "../stdx" } diff --git a/crates/text_edit/Cargo.toml b/crates/text_edit/Cargo.toml new file mode 100644 index 0000000000..a69b1ef2b5 --- /dev/null +++ b/crates/text_edit/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "text_edit" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +text-size = "1.0.0" diff --git a/crates/text_edit/src/lib.rs b/crates/text_edit/src/lib.rs new file mode 100644 index 0000000000..ab8cd7fd11 --- /dev/null +++ b/crates/text_edit/src/lib.rs @@ -0,0 +1,186 @@ +//! Representation of a `TextEdit`. +//! +//! `rust-analyzer` never mutates text itself and only sends diffs to clients, +//! so `TextEdit` is the ultimate representation of the work done by +//! rust-analyzer. +pub use text_size::{TextRange, TextSize}; + +/// `InsertDelete` -- a single "atomic" change to text +/// +/// Must not overlap with other `InDel`s +#[derive(Debug, Clone)] +pub struct Indel { + pub insert: String, + /// Refers to offsets in the original text + pub delete: TextRange, +} + +#[derive(Default, Debug, Clone)] +pub struct TextEdit { + indels: Vec, +} + +#[derive(Debug, Default, Clone)] +pub struct TextEditBuilder { + indels: Vec, +} + +impl Indel { + pub fn insert(offset: TextSize, text: String) -> Indel { + Indel::replace(TextRange::empty(offset), text) + } + pub fn delete(range: TextRange) -> Indel { + Indel::replace(range, String::new()) + } + pub fn replace(range: TextRange, replace_with: String) -> Indel { + Indel { delete: range, insert: replace_with } + } + + pub fn apply(&self, text: &mut String) { + let start: usize = self.delete.start().into(); + let end: usize = self.delete.end().into(); + text.replace_range(start..end, &self.insert); + } +} + +impl TextEdit { + pub fn builder() -> TextEditBuilder { + TextEditBuilder::default() + } + + pub fn insert(offset: TextSize, text: String) -> TextEdit { + let mut builder = TextEdit::builder(); + builder.insert(offset, text); + builder.finish() + } + + pub fn delete(range: TextRange) -> TextEdit { + let mut builder = TextEdit::builder(); + builder.delete(range); + builder.finish() + } + + pub fn replace(range: TextRange, replace_with: String) -> TextEdit { + let mut builder = TextEdit::builder(); + builder.replace(range, replace_with); + builder.finish() + } + + pub fn len(&self) -> usize { + self.indels.len() + } + + pub fn is_empty(&self) -> bool { + self.indels.is_empty() + } + + pub fn iter(&self) -> std::slice::Iter<'_, Indel> { + self.into_iter() + } + + pub fn apply(&self, text: &mut String) { + match self.len() { + 0 => return, + 1 => { + self.indels[0].apply(text); + return; + } + _ => (), + } + + let mut total_len = TextSize::of(&*text); + for indel in self.indels.iter() { + total_len += TextSize::of(&indel.insert); + total_len -= indel.delete.end() - indel.delete.start(); + } + let mut buf = String::with_capacity(total_len.into()); + let mut prev = 0; + for indel in self.indels.iter() { + let start: usize = indel.delete.start().into(); + let end: usize = indel.delete.end().into(); + if start > prev { + buf.push_str(&text[prev..start]); + } + buf.push_str(&indel.insert); + prev = end; + } + buf.push_str(&text[prev..text.len()]); + assert_eq!(TextSize::of(&buf), total_len); + + // FIXME: figure out a way to mutate the text in-place or reuse the + // memory in some other way + *text = buf + } + + pub fn union(&mut self, other: TextEdit) -> Result<(), TextEdit> { + // FIXME: can be done without allocating intermediate vector + let mut all = self.iter().chain(other.iter()).collect::>(); + if !check_disjoint(&mut all) { + return Err(other); + } + self.indels.extend(other.indels); + assert!(check_disjoint(&mut self.indels)); + Ok(()) + } + + pub fn apply_to_offset(&self, offset: TextSize) -> Option { + let mut res = offset; + for indel in self.indels.iter() { + if indel.delete.start() >= offset { + break; + } + if offset < indel.delete.end() { + return None; + } + res += TextSize::of(&indel.insert); + res -= indel.delete.len(); + } + Some(res) + } +} + +impl IntoIterator for TextEdit { + type Item = Indel; + type IntoIter = std::vec::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.indels.into_iter() + } +} + +impl<'a> IntoIterator for &'a TextEdit { + type Item = &'a Indel; + type IntoIter = std::slice::Iter<'a, Indel>; + + fn into_iter(self) -> Self::IntoIter { + self.indels.iter() + } +} + +impl TextEditBuilder { + pub fn replace(&mut self, range: TextRange, replace_with: String) { + self.indels.push(Indel::replace(range, replace_with)) + } + pub fn delete(&mut self, range: TextRange) { + self.indels.push(Indel::delete(range)) + } + pub fn insert(&mut self, offset: TextSize, text: String) { + self.indels.push(Indel::insert(offset, text)) + } + pub fn finish(self) -> TextEdit { + let mut indels = self.indels; + assert!(check_disjoint(&mut indels)); + TextEdit { indels } + } + pub fn invalidates_offset(&self, offset: TextSize) -> bool { + self.indels.iter().any(|indel| indel.delete.contains_inclusive(offset)) + } +} + +fn check_disjoint(indels: &mut [impl std::borrow::Borrow]) -> bool { + indels.sort_by_key(|indel| (indel.borrow().delete.start(), indel.borrow().delete.end())); + indels + .iter() + .zip(indels.iter().skip(1)) + .all(|(l, r)| l.borrow().delete.end() <= r.borrow().delete.start()) +} diff --git a/crates/toolchain/Cargo.toml b/crates/toolchain/Cargo.toml new file mode 100644 index 0000000000..4856668f84 --- /dev/null +++ b/crates/toolchain/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "toolchain" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +home = "0.5.3" diff --git a/crates/toolchain/src/lib.rs b/crates/toolchain/src/lib.rs new file mode 100644 index 0000000000..3b6886f5b5 --- /dev/null +++ b/crates/toolchain/src/lib.rs @@ -0,0 +1,66 @@ +//! Discovery of `cargo` & `rustc` executables. +use std::{env, iter, path::PathBuf}; + +pub fn cargo() -> PathBuf { + get_path_for_executable("cargo") +} + +pub fn rustc() -> PathBuf { + get_path_for_executable("rustc") +} + +pub fn rustup() -> PathBuf { + get_path_for_executable("rustup") +} + +pub fn rustfmt() -> PathBuf { + get_path_for_executable("rustfmt") +} + +/// Return a `PathBuf` to use for the given executable. +/// +/// E.g., `get_path_for_executable("cargo")` may return just `cargo` if that +/// gives a valid Cargo executable; or it may return a full path to a valid +/// Cargo. +fn get_path_for_executable(executable_name: &'static str) -> PathBuf { + // The current implementation checks three places for an executable to use: + // 1) Appropriate environment variable (erroring if this is set but not a usable executable) + // example: for cargo, this checks $CARGO environment variable; for rustc, $RUSTC; etc + // 2) `` + // example: for cargo, this tries just `cargo`, which will succeed if `cargo` is on the $PATH + // 3) `~/.cargo/bin/` + // example: for cargo, this tries ~/.cargo/bin/cargo + // It seems that this is a reasonable place to try for cargo, rustc, and rustup + let env_var = executable_name.to_ascii_uppercase(); + if let Some(path) = env::var_os(&env_var) { + return path.into(); + } + + if lookup_in_path(executable_name) { + return executable_name.into(); + } + + if let Some(mut path) = home::home_dir() { + path.push(".cargo"); + path.push("bin"); + path.push(executable_name); + if let Some(path) = probe(path) { + return path; + } + } + + executable_name.into() +} + +fn lookup_in_path(exec: &str) -> bool { + let paths = env::var_os("PATH").unwrap_or_default(); + env::split_paths(&paths).map(|path| path.join(exec)).find_map(probe).is_some() +} + +fn probe(path: PathBuf) -> Option { + let with_extension = match env::consts::EXE_EXTENSION { + "" => None, + it => Some(path.with_extension(it)), + }; + iter::once(path).chain(with_extension).find(|it| it.is_file()) +} diff --git a/crates/tt/Cargo.toml b/crates/tt/Cargo.toml new file mode 100644 index 0000000000..dfcdcf03e8 --- /dev/null +++ b/crates/tt/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "tt" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here +# to reduce number of compilations +smol_str = { version = "0.1.15", features = ["serde"] } + +stdx = { path = "../stdx" } diff --git a/crates/ra_tt/src/buffer.rs b/crates/tt/src/buffer.rs similarity index 100% rename from crates/ra_tt/src/buffer.rs rename to crates/tt/src/buffer.rs diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs new file mode 100644 index 0000000000..20c3f5eabf --- /dev/null +++ b/crates/tt/src/lib.rs @@ -0,0 +1,246 @@ +//! `tt` crate defines a `TokenTree` data structure: this is the interface (both +//! input and output) of macros. It closely mirrors `proc_macro` crate's +//! `TokenTree`. +use std::{ + fmt::{self, Debug}, + panic::RefUnwindSafe, +}; + +use stdx::impl_from; + +pub use smol_str::SmolStr; + +/// Represents identity of the token. +/// +/// For hygiene purposes, we need to track which expanded tokens originated from +/// which source tokens. We do it by assigning an distinct identity to each +/// source token and making sure that identities are preserved during macro +/// expansion. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TokenId(pub u32); + +impl TokenId { + pub const fn unspecified() -> TokenId { + TokenId(!0) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TokenTree { + Leaf(Leaf), + Subtree(Subtree), +} +impl_from!(Leaf, Subtree for TokenTree); + +impl TokenTree { + pub fn empty() -> Self { + TokenTree::Subtree(Subtree::default()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum Leaf { + Literal(Literal), + Punct(Punct), + Ident(Ident), +} +impl_from!(Literal, Punct, Ident for Leaf); + +#[derive(Clone, PartialEq, Eq, Hash, Default)] +pub struct Subtree { + pub delimiter: Option, + pub token_trees: Vec, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct Delimiter { + pub id: TokenId, + pub kind: DelimiterKind, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum DelimiterKind { + Parenthesis, + Brace, + Bracket, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Literal { + pub text: SmolStr, + pub id: TokenId, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Punct { + pub char: char, + pub spacing: Spacing, + pub id: TokenId, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum Spacing { + Alone, + Joint, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Ident { + pub text: SmolStr, + pub id: TokenId, +} + +fn print_debug_subtree(f: &mut fmt::Formatter<'_>, subtree: &Subtree, level: usize) -> fmt::Result { + let align = std::iter::repeat(" ").take(level).collect::(); + + let aux = match subtree.delimiter.map(|it| (it.kind, it.id.0)) { + None => "$".to_string(), + Some((DelimiterKind::Parenthesis, id)) => format!("() {}", id), + Some((DelimiterKind::Brace, id)) => format!("{{}} {}", id), + Some((DelimiterKind::Bracket, id)) => format!("[] {}", id), + }; + + if subtree.token_trees.is_empty() { + write!(f, "{}SUBTREE {}", align, aux)?; + } else { + writeln!(f, "{}SUBTREE {}", align, aux)?; + for (idx, child) in subtree.token_trees.iter().enumerate() { + print_debug_token(f, child, level + 1)?; + if idx != subtree.token_trees.len() - 1 { + writeln!(f)?; + } + } + } + + Ok(()) +} + +fn print_debug_token(f: &mut fmt::Formatter<'_>, tkn: &TokenTree, level: usize) -> fmt::Result { + let align = std::iter::repeat(" ").take(level).collect::(); + + match tkn { + TokenTree::Leaf(leaf) => match leaf { + Leaf::Literal(lit) => write!(f, "{}LITERAL {} {}", align, lit.text, lit.id.0)?, + Leaf::Punct(punct) => write!( + f, + "{}PUNCH {} [{}] {}", + align, + punct.char, + if punct.spacing == Spacing::Alone { "alone" } else { "joint" }, + punct.id.0 + )?, + Leaf::Ident(ident) => write!(f, "{}IDENT {} {}", align, ident.text, ident.id.0)?, + }, + TokenTree::Subtree(subtree) => { + print_debug_subtree(f, subtree, level)?; + } + } + + Ok(()) +} + +impl Debug for Subtree { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + print_debug_subtree(f, self, 0) + } +} + +impl fmt::Display for TokenTree { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + TokenTree::Leaf(it) => fmt::Display::fmt(it, f), + TokenTree::Subtree(it) => fmt::Display::fmt(it, f), + } + } +} + +impl fmt::Display for Subtree { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let (l, r) = match self.delimiter_kind() { + Some(DelimiterKind::Parenthesis) => ("(", ")"), + Some(DelimiterKind::Brace) => ("{", "}"), + Some(DelimiterKind::Bracket) => ("[", "]"), + None => ("", ""), + }; + f.write_str(l)?; + let mut needs_space = false; + for tt in self.token_trees.iter() { + if needs_space { + f.write_str(" ")?; + } + needs_space = true; + match tt { + TokenTree::Leaf(Leaf::Punct(p)) => { + needs_space = p.spacing == Spacing::Alone; + fmt::Display::fmt(p, f)? + } + tt => fmt::Display::fmt(tt, f)?, + } + } + f.write_str(r)?; + Ok(()) + } +} + +impl fmt::Display for Leaf { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Leaf::Ident(it) => fmt::Display::fmt(it, f), + Leaf::Literal(it) => fmt::Display::fmt(it, f), + Leaf::Punct(it) => fmt::Display::fmt(it, f), + } + } +} + +impl fmt::Display for Ident { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.text, f) + } +} + +impl fmt::Display for Literal { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.text, f) + } +} + +impl fmt::Display for Punct { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.char, f) + } +} + +impl Subtree { + /// Count the number of tokens recursively + pub fn count(&self) -> usize { + let children_count = self + .token_trees + .iter() + .map(|c| match c { + TokenTree::Subtree(c) => c.count(), + _ => 0, + }) + .sum::(); + + self.token_trees.len() + children_count + } + + pub fn delimiter_kind(&self) -> Option { + self.delimiter.map(|it| it.kind) + } +} + +pub mod buffer; + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum ExpansionError { + IOError(String), + JsonError(String), + Unknown(String), + ExpansionError(String), +} + +pub trait TokenExpander: Debug + Send + Sync + RefUnwindSafe { + fn expand(&self, subtree: &Subtree, attrs: Option<&Subtree>) + -> Result; +} diff --git a/crates/vfs-notify/Cargo.toml b/crates/vfs-notify/Cargo.toml index fce7bae3ad..c1e53f4b15 100644 --- a/crates/vfs-notify/Cargo.toml +++ b/crates/vfs-notify/Cargo.toml @@ -1,9 +1,9 @@ [package] name = "vfs-notify" -version = "0.1.0" +version = "0.0.0" +license = "MIT OR Apache-2.0" authors = ["rust-analyzer developers"] edition = "2018" -license = "MIT OR Apache-2.0" [lib] doctest = false diff --git a/crates/vfs/Cargo.toml b/crates/vfs/Cargo.toml index b74cdb7ffa..9ae8f19b6f 100644 --- a/crates/vfs/Cargo.toml +++ b/crates/vfs/Cargo.toml @@ -1,9 +1,9 @@ [package] name = "vfs" -version = "0.1.0" +version = "0.0.0" +license = "MIT OR Apache-2.0" authors = ["rust-analyzer developers"] edition = "2018" -license = "MIT OR Apache-2.0" [lib] doctest = false diff --git a/crates/vfs/src/vfs_path.rs b/crates/vfs/src/vfs_path.rs index 04a42264e8..944a702df0 100644 --- a/crates/vfs/src/vfs_path.rs +++ b/crates/vfs/src/vfs_path.rs @@ -57,23 +57,42 @@ impl VfsPath { }; buf.push(tag); match &self.0 { - VfsPathRepr::PathBuf(it) => { - let path: &std::ffi::OsStr = it.as_os_str(); + VfsPathRepr::PathBuf(path) => { #[cfg(windows)] { - use std::os::windows::ffi::OsStrExt; - for wchar in path.encode_wide() { - buf.extend(wchar.to_le_bytes().iter().copied()); + use windows_paths::Encode; + let components = path.components(); + let mut add_sep = false; + for component in components { + if add_sep { + windows_paths::SEP.encode(buf); + } + let len_before = buf.len(); + match component { + std::path::Component::Prefix(prefix) => { + // kind() returns a normalized and comparable path prefix. + prefix.kind().encode(buf); + } + std::path::Component::RootDir => { + if !add_sep { + component.as_os_str().encode(buf); + } + } + _ => component.as_os_str().encode(buf), + } + + // some components may be encoded empty + add_sep = len_before != buf.len(); } } #[cfg(unix)] { use std::os::unix::ffi::OsStrExt; - buf.extend(path.as_bytes()); + buf.extend(path.as_os_str().as_bytes()); } #[cfg(not(any(windows, unix)))] { - buf.extend(path.to_string_lossy().as_bytes()); + buf.extend(path.as_os_str().to_string_lossy().as_bytes()); } } VfsPathRepr::VirtualPath(VirtualPath(s)) => buf.extend(s.as_bytes()), @@ -81,6 +100,112 @@ impl VfsPath { } } +#[cfg(windows)] +mod windows_paths { + pub trait Encode { + fn encode(&self, buf: &mut Vec); + } + + impl Encode for std::ffi::OsStr { + fn encode(&self, buf: &mut Vec) { + use std::os::windows::ffi::OsStrExt; + for wchar in self.encode_wide() { + buf.extend(wchar.to_le_bytes().iter().copied()); + } + } + } + + impl Encode for u8 { + fn encode(&self, buf: &mut Vec) { + let wide = *self as u16; + buf.extend(wide.to_le_bytes().iter().copied()) + } + } + + impl Encode for &str { + fn encode(&self, buf: &mut Vec) { + debug_assert!(self.is_ascii()); + for b in self.as_bytes() { + b.encode(buf) + } + } + } + + pub const SEP: &str = "\\"; + const VERBATIM: &str = "\\\\?\\"; + const UNC: &str = "UNC"; + const DEVICE: &str = "\\\\.\\"; + const COLON: &str = ":"; + + impl Encode for std::path::Prefix<'_> { + fn encode(&self, buf: &mut Vec) { + match self { + std::path::Prefix::Verbatim(c) => { + VERBATIM.encode(buf); + c.encode(buf); + } + std::path::Prefix::VerbatimUNC(server, share) => { + VERBATIM.encode(buf); + UNC.encode(buf); + SEP.encode(buf); + server.encode(buf); + SEP.encode(buf); + share.encode(buf); + } + std::path::Prefix::VerbatimDisk(d) => { + VERBATIM.encode(buf); + d.encode(buf); + COLON.encode(buf); + } + std::path::Prefix::DeviceNS(device) => { + DEVICE.encode(buf); + device.encode(buf); + } + std::path::Prefix::UNC(server, share) => { + SEP.encode(buf); + SEP.encode(buf); + server.encode(buf); + SEP.encode(buf); + share.encode(buf); + } + std::path::Prefix::Disk(d) => { + d.encode(buf); + COLON.encode(buf); + } + } + } + } + #[test] + fn paths_encoding() { + // drive letter casing agnostic + test_eq("C:/x.rs", "c:/x.rs"); + // separator agnostic + test_eq("C:/x/y.rs", "C:\\x\\y.rs"); + + fn test_eq(a: &str, b: &str) { + let mut b1 = Vec::new(); + let mut b2 = Vec::new(); + vfs(a).encode(&mut b1); + vfs(b).encode(&mut b2); + assert_eq!(b1, b2); + } + } + + #[test] + fn test_sep_root_dir_encoding() { + let mut buf = Vec::new(); + vfs("C:/x/y").encode(&mut buf); + assert_eq!(&buf, &[0, 67, 0, 58, 0, 92, 0, 120, 0, 92, 0, 121, 0]) + } + + #[cfg(test)] + fn vfs(str: &str) -> super::VfsPath { + use super::{AbsPathBuf, VfsPath}; + use std::convert::TryFrom; + VfsPath::from(AbsPathBuf::try_from(str).unwrap()) + } +} + #[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] enum VfsPathRepr { PathBuf(AbsPathBuf), diff --git a/docs/dev/README.md b/docs/dev/README.md index 67813a9c07..36edddc700 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md @@ -14,7 +14,7 @@ To learn more about how rust-analyzer works, see We also publish rustdoc docs to pages: -https://rust-analyzer.github.io/rust-analyzer/ra_ide/ +https://rust-analyzer.github.io/rust-analyzer/ide/ Various organizational and process issues are discussed in this document. @@ -92,11 +92,11 @@ This is primarily useful for performance optimizations, or for bug minimization. ## Parser Tests -Tests for the parser (`ra_parser`) live in the `ra_syntax` crate (see `test_data` directory). +Tests for the parser (`parser`) live in the `syntax` crate (see `test_data` directory). There are two kinds of tests: * Manually written test cases in `parser/ok` and `parser/err` -* "Inline" tests in `parser/inline` (these are generated) from comments in `ra_parser` crate. +* "Inline" tests in `parser/inline` (these are generated) from comments in `parser` crate. The purpose of inline tests is not to achieve full coverage by test cases, but to explain to the reader of the code what each particular `if` and `match` is responsible for. If you are tempted to add a large inline test, it might be a good idea to leave only the simplest example in place, and move the test to a manual `parser/ok` test. @@ -148,23 +148,28 @@ Internal representations are lowered to LSP in the `rust-analyzer` crate (the on ## IDE/Compiler split -There's a semi-hard split between "compiler" and "IDE", at the `ra_hir` crate. +There's a semi-hard split between "compiler" and "IDE", at the `hir` crate. Compiler derives new facts about source code. It explicitly acknowledges that not all info is available (i.e. you can't look at types during name resolution). IDE assumes that all information is available at all times. -IDE should use only types from `ra_hir`, and should not depend on the underling compiler types. -`ra_hir` is a facade. +IDE should use only types from `hir`, and should not depend on the underling compiler types. +`hir` is a facade. ## IDE API -The main IDE crate (`ra_ide`) uses "Plain Old Data" for the API. +The main IDE crate (`ide`) uses "Plain Old Data" for the API. Rather than talking in definitions and references, it talks in Strings and textual offsets. In general, API is centered around UI concerns -- the result of the call is what the user sees in the editor, and not what the compiler sees underneath. The results are 100% Rust specific though. Shout outs to LSP developers for popularizing the idea that "UI" is a good place to draw a boundary at. +## CI + +CI does not test rust-analyzer, CI is a core part of rust-analyzer, and is maintained with above average standard of quality. +CI is reproducible -- it can only be broken by changes to files in this repository, any dependence on externalities is a bug. + # Code Style & Review Process Do see [./style.md](./style.md). @@ -256,9 +261,9 @@ Release steps: * checkout the `release` branch * reset it to `upstream/nightly` * push it to `upstream`. This triggers GitHub Actions which: - ** runs `cargo xtask dist` to package binaries and VS Code extension - ** makes a GitHub release - ** pushes VS Code extension to the marketplace + * runs `cargo xtask dist` to package binaries and VS Code extension + * makes a GitHub release + * pushes VS Code extension to the marketplace * create new changelog in `rust-analyzer.github.io` * create `rust-analyzer.github.io/git.log` file with the log of merge commits since last release 2. While the release is in progress, fill-in the changelog using `git.log` diff --git a/docs/dev/architecture.md b/docs/dev/architecture.md index d0c6eea61f..6f1377f2f0 100644 --- a/docs/dev/architecture.md +++ b/docs/dev/architecture.md @@ -56,7 +56,7 @@ In particular, `cargo xtask codegen` generates: 2. [`ast/generated`](https://github.com/rust-analyzer/rust-analyzer/blob/a0be39296d2925972cacd9fbf8b5fb258fad6947/crates/ra_syntax/src/ast/generated.rs) -- AST data structure. -3. [`doc_tests/generated`](https://github.com/rust-analyzer/rust-analyzer/blob/a0be39296d2925972cacd9fbf8b5fb258fad6947/crates/ra_assists/src/doc_tests/generated.rs), +3. [`doc_tests/generated`](https://github.com/rust-analyzer/rust-analyzer/blob/a0be39296d2925972cacd9fbf8b5fb258fad6947/crates/assists/src/doc_tests/generated.rs), [`test_data/parser/inline`](https://github.com/rust-analyzer/rust-analyzer/tree/a0be39296d2925972cacd9fbf8b5fb258fad6947/crates/ra_syntax/test_data/parser/inline) -- tests for assists and the parser. @@ -64,7 +64,7 @@ The source for 1 and 2 is in [`ast_src.rs`](https://github.com/rust-analyzer/rus ## Code Walk-Through -### `crates/ra_syntax`, `crates/ra_parser` +### `crates/ra_syntax`, `crates/parser` Rust syntax tree structure and parser. See [RFC](https://github.com/rust-lang/rfcs/pull/2256) and [./syntax.md](./syntax.md) for some design notes. @@ -92,17 +92,17 @@ in particular: it shows off various methods of working with syntax tree. See [#93](https://github.com/rust-analyzer/rust-analyzer/pull/93) for an example PR which fixes a bug in the grammar. -### `crates/ra_db` +### `crates/base_db` We use the [salsa](https://github.com/salsa-rs/salsa) crate for incremental and on-demand computation. Roughly, you can think of salsa as a key-value store, but -it also can compute derived values using specified functions. The `ra_db` crate +it also can compute derived values using specified functions. The `base_db` crate provides basic infrastructure for interacting with salsa. Crucially, it defines most of the "input" queries: facts supplied by the client of the -analyzer. Reading the docs of the `ra_db::input` module should be useful: +analyzer. Reading the docs of the `base_db::input` module should be useful: everything else is strictly derived from those inputs. -### `crates/ra_hir*` crates +### `crates/hir*` crates HIR provides high-level "object oriented" access to Rust code. @@ -113,12 +113,12 @@ is responsible for guessing a HIR for a particular source position. Underneath, HIR works on top of salsa, using a `HirDatabase` trait. -`ra_hir_xxx` crates have a strong ECS flavor, in that they work with raw ids and +`hir_xxx` crates have a strong ECS flavor, in that they work with raw ids and directly query the database. -The top-level `ra_hir` façade crate wraps ids into a more OO-flavored API. +The top-level `hir` façade crate wraps ids into a more OO-flavored API. -### `crates/ra_ide` +### `crates/ide` A stateful library for analyzing many Rust files as they change. `AnalysisHost` is a mutable entity (clojure's atom) which holds the current state, incorporates @@ -136,11 +136,11 @@ offsets and strings as output. This works on top of rich code model powered by ### `crates/rust-analyzer` -An LSP implementation which wraps `ra_ide` into a language server protocol. +An LSP implementation which wraps `ide` into a language server protocol. ### `ra_vfs` -Although `hir` and `ra_ide` don't do any IO, we need to be able to read +Although `hir` and `ide` don't do any IO, we need to be able to read files from disk at the end of the day. This is what `ra_vfs` does. It also manages overlays: "dirty" files in the editor, whose "true" contents is different from data on disk. This is more or less the single really @@ -161,7 +161,7 @@ disk. For this reason, we try to avoid writing too many tests on this boundary: in a statically typed language, it's hard to make an error in the protocol itself if messages are themselves typed. -The middle, and most important, boundary is `ra_ide`. Unlike +The middle, and most important, boundary is `ide`. Unlike `rust-analyzer`, which exposes API, `ide` uses Rust API and is intended to use by various tools. Typical test creates an `AnalysisHost`, calls some `Analysis` functions and compares the results against expectation. diff --git a/docs/dev/guide.md b/docs/dev/guide.md index c3252f1f68..b5a5d7c935 100644 --- a/docs/dev/guide.md +++ b/docs/dev/guide.md @@ -40,8 +40,8 @@ terms of files and offsets, and **not** in terms of Rust concepts like structs, traits, etc. The "typed" API with Rust specific types is slightly lower in the stack, we'll talk about it later. -[`AnalysisHost`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/lib.rs#L265-L284 -[`Analysis`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/lib.rs#L291-L478 +[`AnalysisHost`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L265-L284 +[`Analysis`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L291-L478 The reason for this separation of `Analysis` and `AnalysisHost` is that we want to apply changes "uniquely", but we might also want to fork an `Analysis` and send it to @@ -69,7 +69,7 @@ the `AnalysisHost::apply_change` method, which accepts a single argument, a "transaction", so it suffices to study its methods to understand all of the input data. -[`AnalysisChange`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/lib.rs#L119-L167 +[`AnalysisChange`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L119-L167 The `(add|change|remove)_file` methods control the set of the input files, where each file has an integer id (`FileId`, picked by the client), text (`String`) @@ -253,13 +253,13 @@ All analyzer information is stored in a salsa database. `Analysis` and `AnalysisHost` types are newtype wrappers for [`RootDatabase`] -- a salsa database. -[`RootDatabase`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/db.rs#L88-L134 +[`RootDatabase`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/db.rs#L88-L134 Salsa input queries are defined in [`FilesDatabase`] (which is a part of `RootDatabase`). They closely mirror the familiar `AnalysisChange` structure: indeed, what `apply_change` does is it sets the values of input queries. -[`FilesDatabase`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_db/src/input.rs#L150-L174 +[`FilesDatabase`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/base_db/src/input.rs#L150-L174 ## From text to semantic model @@ -275,7 +275,7 @@ several times, with different sets of `cfg`s enabled. The IDE-specific task of mapping source code position into a semantic model is inherently imprecise for this reason, and is handled by the [`source_binder`]. -[`source_binder`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/source_binder.rs +[`source_binder`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/source_binder.rs The semantic interface is declared in the [`code_model_api`] module. Each entity is identified by an integer ID and has a bunch of methods which take a salsa database @@ -283,8 +283,8 @@ as an argument and returns other entities (which are also IDs). Internally, thes methods invoke various queries on the database to build the model on demand. Here's [the list of queries]. -[`code_model_api`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/code_model_api.rs -[the list of queries]: https://github.com/rust-analyzer/rust-analyzer/blob/7e84440e25e19529e4ff8a66e521d1b06349c6ec/crates/ra_hir/src/db.rs#L20-L106 +[`code_model_api`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/code_model_api.rs +[the list of queries]: https://github.com/rust-analyzer/rust-analyzer/blob/7e84440e25e19529e4ff8a66e521d1b06349c6ec/crates/hir/src/db.rs#L20-L106 The first step of building the model is parsing the source code. @@ -341,7 +341,7 @@ The algorithm for building a tree of modules is to start with a crate root declarations and recursively process child modules. This is handled by the [`module_tree_query`], with two slight variations. -[`module_tree_query`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/module_tree.rs#L116-L123 +[`module_tree_query`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/module_tree.rs#L116-L123 First, rust-analyzer builds a module tree for all crates in a source root simultaneously. The main reason for this is historical (`module_tree` predates @@ -364,7 +364,7 @@ the same, we don't have to re-execute [`module_tree_query`]. In fact, we only need to re-execute it when we add/remove new files or when we change mod declarations. -[`submodules_query`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/module_tree.rs#L41 +[`submodules_query`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/module_tree.rs#L41 We store the resulting modules in a `Vec`-based indexed arena. The indices in the arena becomes module IDs. And this brings us to the next topic: @@ -392,8 +392,8 @@ integers which can "intern" a location and return an integer ID back. The salsa database we use includes a couple of [interners]. How to "garbage collect" unused locations is an open question. -[`LocationInterner`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_db/src/loc2id.rs#L65-L71 -[interners]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/db.rs#L22-L23 +[`LocationInterner`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/base_db/src/loc2id.rs#L65-L71 +[interners]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/db.rs#L22-L23 For example, we use `LocationInterner` to assign IDs to definitions of functions, structs, enums, etc. The location, [`DefLoc`] contains two bits of information: @@ -407,7 +407,7 @@ using offsets, text ranges or syntax trees as keys and values for queries. What we do instead is we store "index" of the item among all of the items of a file (so, a positional based ID, but localized to a single file). -[`DefLoc`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/ids.rs#L127-L139 +[`DefLoc`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/ids.rs#L127-L139 One thing we've glossed over for the time being is support for macros. We have only proof of concept handling of macros at the moment, but they are extremely @@ -440,7 +440,7 @@ terms of `HirFileId`! This does not recur infinitely though: any chain of `HirFileId`s bottoms out in `HirFileId::FileId`, that is, some source file actually written by the user. -[`HirFileId`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/ids.rs#L18-L125 +[`HirFileId`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/ids.rs#L18-L125 Now that we understand how to identify a definition, in a source or in a macro-generated file, we can discuss name resolution a bit. @@ -454,14 +454,14 @@ each module into a position-independent representation which does not change if we modify bodies of the items. After that we [loop] resolving all imports until we've reached a fixed point. -[lower]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres/lower.rs#L113-L117 -[loop]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres.rs#L186-L196 +[lower]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L113-L117 +[loop]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres.rs#L186-L196 And, given all our preparation with IDs and a position-independent representation, it is satisfying to [test] that typing inside function body does not invalidate name resolution results. -[test]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres/tests.rs#L376 +[test]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/tests.rs#L376 An interesting fact about name resolution is that it "erases" all of the intermediate paths from the imports: in the end, we know which items are defined @@ -496,10 +496,10 @@ there's an intermediate [projection query] which returns only the first position-independent part of the lowering. The result of this query is stable. Naturally, name resolution [uses] this stable projection query. -[imports]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres/lower.rs#L52-L59 -[`SourceMap`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres/lower.rs#L52-L59 -[projection query]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres/lower.rs#L97-L103 -[uses]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/query_definitions.rs#L49 +[imports]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L52-L59 +[`SourceMap`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L52-L59 +[projection query]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L97-L103 +[uses]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/query_definitions.rs#L49 ## Type inference @@ -521,10 +521,10 @@ construct a mapping from `ExprId`s to types. [@flodiebold]: https://github.com/flodiebold [#327]: https://github.com/rust-analyzer/rust-analyzer/pull/327 -[lower the AST]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/expr.rs -[positional ID]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/expr.rs#L13-L15 -[a source map]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/expr.rs#L41-L44 -[type inference]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/ty.rs#L1208-L1223 +[lower the AST]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/expr.rs +[positional ID]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/expr.rs#L13-L15 +[a source map]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/expr.rs#L41-L44 +[type inference]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/hir/src/ty.rs#L1208-L1223 ## Tying it all together: completion @@ -565,11 +565,11 @@ the type to completion. [schedule it on the threadpool]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L428 [catch]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L436-L442 [the handler]: https://salsa.zulipchat.com/#narrow/stream/181542-rfcs.2Fsalsa-query-group/topic/design.20next.20steps -[ask analysis for completion]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/lib.rs#L439-L444 -[completion implementation]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion.rs#L46-L62 -[`CompletionContext`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion/completion_context.rs#L14-L37 -["IntelliJ Trick"]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion/completion_context.rs#L72-L75 -[find an ancestor `fn` node]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion/completion_context.rs#L116-L120 -[semantic model]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion/completion_context.rs#L123 -[series of independent completion routines]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion.rs#L52-L59 -[`complete_dot`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion/complete_dot.rs#L6-L22 +[ask analysis for completion]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L439-L444 +[completion implementation]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion.rs#L46-L62 +[`CompletionContext`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L14-L37 +["IntelliJ Trick"]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L72-L75 +[find an ancestor `fn` node]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L116-L120 +[semantic model]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L123 +[series of independent completion routines]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion.rs#L52-L59 +[`complete_dot`]: https://github.com/rust-analyzer/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/complete_dot.rs#L6-L22 diff --git a/docs/dev/style.md b/docs/dev/style.md index 1c68f57023..963a6d73d0 100644 --- a/docs/dev/style.md +++ b/docs/dev/style.md @@ -65,7 +65,7 @@ There are many benefits to this: It also makes sense to format snippets more compactly (for example, by placing enum definitions like `enum E { Foo, Bar }` on a single line), as long as they are still readable. -## Order of Imports +# Order of Imports Separate import groups with blank lines. Use one `use` per crate. @@ -91,19 +91,19 @@ use super::{} Module declarations come before the imports. Order them in "suggested reading order" for a person new to the code base. -## Import Style +# Import Style Qualify items from `hir` and `ast`. ```rust // Good -use ra_syntax::ast; +use syntax::ast; fn frobnicate(func: hir::Function, strukt: ast::StructDef) {} // Not as good use hir::Function; -use ra_syntax::ast::StructDef; +use syntax::ast::StructDef; fn frobnicate(func: Function, strukt: StructDef) {} ``` @@ -112,7 +112,7 @@ Avoid local `use MyEnum::*` imports. Prefer `use crate::foo::bar` to `use super::bar`. -## Order of Items +# Order of Items Optimize for the reader who sees the file for the first time, and wants to get a general idea about what's going on. People read things from top to bottom, so place most important things first. @@ -143,7 +143,7 @@ struct Foo { } ``` -## Variable Naming +# Variable Naming Use boring and long names for local variables ([yay code completion](https://github.com/rust-analyzer/rust-analyzer/pull/4162#discussion_r417130973)). The default name is a lowercased name of the type: `global_state: GlobalState`. @@ -151,12 +151,12 @@ Avoid ad-hoc acronyms and contractions, but use the ones that exist consistently The default name for "result of the function" local variable is `res`. The default name for "I don't really care about the name" variable is `it`. -## Collection types +# Collection types Prefer `rustc_hash::FxHashMap` and `rustc_hash::FxHashSet` instead of the ones in `std::collections`. They use a hasher that's slightly faster and using them consistently will reduce code size by some small amount. -## Preconditions +# Preconditions Express function preconditions in types and force the caller to provide them (rather than checking in callee): @@ -176,7 +176,7 @@ fn frobnicate(walrus: Option) { } ``` -## Premature Pessimization +# Premature Pessimization Avoid writing code which is slower than it needs to be. Don't allocate a `Vec` where an iterator would do, don't allocate strings needlessly. @@ -197,12 +197,12 @@ if words.len() != 2 { } ``` -## Documentation +# Documentation For `.md` and `.adoc` files, prefer a sentence-per-line format, don't wrap lines. If the line is too long, you want to split the sentence in two :-) -## Commit Style +# Commit Style We don't have specific rules around git history hygiene. Maintaining clean git history is encouraged, but not enforced. diff --git a/docs/dev/syntax.md b/docs/dev/syntax.md index d4bc4b07c4..c08062ef4d 100644 --- a/docs/dev/syntax.md +++ b/docs/dev/syntax.md @@ -11,7 +11,7 @@ The things described are implemented in two places * [rowan](https://github.com/rust-analyzer/rowan/tree/v0.9.0) -- a generic library for rowan syntax trees. * [ra_syntax](https://github.com/rust-analyzer/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/ra_syntax) crate inside rust-analyzer which wraps `rowan` into rust-analyzer specific API. Nothing in rust-analyzer except this crate knows about `rowan`. -* [ra_parser](https://github.com/rust-analyzer/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/ra_parser) crate parses input tokens into an `ra_syntax` tree +* [parser](https://github.com/rust-analyzer/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/parser) crate parses input tokens into an `ra_syntax` tree ## Design Goals @@ -74,7 +74,7 @@ Points of note: * The original text can be recovered by concatenating the texts of all tokens in order. * Accessing a child of particular type (for example, parameter list of a function) generally involves linerary traversing the children, looking for a specific `kind`. * Modifying the tree is roughly `O(depth)`. - We don't make special efforts to guarantree that the depth is not liner, but, in practice, syntax trees are branchy and shallow. + We don't make special efforts to guarantee that the depth is not linear, but, in practice, syntax trees are branchy and shallow. * If mandatory (grammar wise) node is missing from the input, it's just missing from the tree. * If an extra erroneous input is present, it is wrapped into a node with `ERROR` kind, and treated just like any other node. * Parser errors are not a part of syntax tree. diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index 18948cb3c4..f5db55b8cc 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts @@ -4,7 +4,7 @@ import * as ra from '../src/lsp_ext'; import * as Is from 'vscode-languageclient/lib/utils/is'; import { CallHierarchyFeature } from 'vscode-languageclient/lib/callHierarchy.proposed'; -import { SemanticTokensFeature, DocumentSemanticsTokensSignature } from 'vscode-languageclient/lib/semanticTokens.proposed'; +import { SemanticTokensFeature } from 'vscode-languageclient/lib/semanticTokens.proposed'; import { assert } from './util'; function renderCommand(cmd: ra.CommandLink) { @@ -44,12 +44,6 @@ export function createClient(serverPath: string, cwd: string): lc.LanguageClient diagnosticCollectionName: "rustc", traceOutputChannel, middleware: { - // Workaround for https://github.com/microsoft/vscode-languageserver-node/issues/576 - async provideDocumentSemanticTokens(document: vscode.TextDocument, token: vscode.CancellationToken, next: DocumentSemanticsTokensSignature) { - const res = await next(document, token); - if (res === undefined) throw new Error('busy'); - return res; - }, async provideHover(document: vscode.TextDocument, position: vscode.Position, token: vscode.CancellationToken, _next: lc.ProvideHoverSignature) { return client.sendRequest(lc.HoverRequest.type, client.code2ProtocolConverter.asTextDocumentPositionParams(document, position), token).then( (result) => { @@ -135,7 +129,7 @@ export function createClient(serverPath: string, cwd: string): lc.LanguageClient ); } - } as any + } }; const client = new lc.LanguageClient( diff --git a/editors/code/src/toolchain.ts b/editors/code/src/toolchain.ts index 80a7915e90..a5dc3cf0cc 100644 --- a/editors/code/src/toolchain.ts +++ b/editors/code/src/toolchain.ts @@ -121,12 +121,12 @@ export class Cargo { } } -/** Mirrors `ra_toolchain::cargo()` implementation */ +/** Mirrors `toolchain::cargo()` implementation */ export function cargoPath(): string { return getPathForExecutable("cargo"); } -/** Mirrors `ra_toolchain::get_path_for_executable()` implementation */ +/** Mirrors `toolchain::get_path_for_executable()` implementation */ export const getPathForExecutable = memoize( // We apply caching to decrease file-system interactions (executableName: "cargo" | "rustc" | "rustup"): string => { diff --git a/editors/code/src/util.ts b/editors/code/src/util.ts index 970fedb378..49d2d1c6fb 100644 --- a/editors/code/src/util.ts +++ b/editors/code/src/util.ts @@ -64,7 +64,8 @@ export async function sendRequestWithRetry( param: TParam, token?: vscode.CancellationToken, ): Promise { - for (const delay of [2, 4, 6, 8, 10, null]) { + // The sequence is `10 * (2 ** (2 * n))` where n is 1, 2, 3... + for (const delay of [40, 160, 640, 2560, 10240, null]) { try { return await (token ? client.sendRequest(reqType, param, token) @@ -84,8 +85,7 @@ export async function sendRequestWithRetry( log.warn("LSP request failed", { method: reqType.method, param, error }); throw error; } - - await sleep(10 * (1 << delay)); + await sleep(delay); } } throw 'unreachable'; diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 1a1140b04e..e9edbdd10b 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -18,3 +18,4 @@ quote = "1.0.2" ungrammar = "1.1.1" walkdir = "2.3.1" write-json = "0.1.0" +# Avoid adding more dependencies to this crate diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs index 1edb04c2f1..950dd61b28 100644 --- a/xtask/src/codegen.rs +++ b/xtask/src/codegen.rs @@ -33,12 +33,12 @@ const GRAMMAR_DIR: &str = "crates/ra_parser/src/grammar"; const OK_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/ok"; const ERR_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/err"; -const SYNTAX_KINDS: &str = "crates/ra_parser/src/syntax_kind/generated.rs"; -const AST_NODES: &str = "crates/ra_syntax/src/ast/generated/nodes.rs"; -const AST_TOKENS: &str = "crates/ra_syntax/src/ast/generated/tokens.rs"; +const SYNTAX_KINDS: &str = "crates/parser/src/syntax_kind/generated.rs"; +const AST_NODES: &str = "crates/syntax/src/ast/generated/nodes.rs"; +const AST_TOKENS: &str = "crates/syntax/src/ast/generated/tokens.rs"; -const ASSISTS_DIR: &str = "crates/ra_assists/src/handlers"; -const ASSISTS_TESTS: &str = "crates/ra_assists/src/tests/generated.rs"; +const ASSISTS_DIR: &str = "crates/assists/src/handlers"; +const ASSISTS_TESTS: &str = "crates/assists/src/tests/generated.rs"; const REPOSITORY_URL: &str = "https://github.com/rust-lang/rust"; const UNSTABLE_FEATURE: &str = "crates/ra_ide/src/completion/unstable_feature_descriptor.rs"; diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs index cafad8070d..dd1f4d6a2c 100644 --- a/xtask/src/codegen/gen_syntax.rs +++ b/xtask/src/codegen/gen_syntax.rs @@ -1,7 +1,7 @@ //! This module generates AST datatype used by rust-analyzer. //! //! Specifically, it generates the `SyntaxKind` enum and a number of newtype -//! wrappers around `SyntaxNode` which implement `ra_syntax::AstNode`. +//! wrappers around `SyntaxNode` which implement `syntax::AstNode`. use std::{ collections::{BTreeSet, HashSet}, diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs index 2fdb08f2e1..807ef587ce 100644 --- a/xtask/src/lib.rs +++ b/xtask/src/lib.rs @@ -103,7 +103,7 @@ pub fn run_clippy() -> Result<()> { } pub fn run_fuzzer() -> Result<()> { - let _d = pushd("./crates/ra_syntax"); + let _d = pushd("./crates/syntax"); let _e = pushenv("RUSTUP_TOOLCHAIN", "nightly"); if run!("cargo fuzz --help").is_err() { run!("cargo install cargo-fuzz")?; @@ -139,7 +139,7 @@ pub fn run_pre_cache() -> Result<()> { } fs2::remove_file("./target/.rustc_info.json")?; - let to_delete = ["ra_", "heavy_test", "xtask"]; + let to_delete = ["hir", "heavy_test", "xtask", "ide", "rust-analyzer"]; for &dir in ["./target/debug/deps", "target/debug/.fingerprint"].iter() { for entry in Path::new(dir).read_dir()? { let entry = entry?; diff --git a/xtask/tests/tidy.rs b/xtask/tests/tidy.rs index 68a70da9e9..ca9749ed47 100644 --- a/xtask/tests/tidy.rs +++ b/xtask/tests/tidy.rs @@ -44,11 +44,26 @@ fn rust_files_are_tidy() { let text = fs2::read_to_string(&path).unwrap(); check_todo(&path, &text); check_trailing_ws(&path, &text); + deny_clippy(&path, &text); tidy_docs.visit(&path, &text); } tidy_docs.finish(); } +fn deny_clippy(path: &PathBuf, text: &String) { + if text.contains("[\u{61}llow(clippy") { + panic!( + "\n\nallowing lints is forbidden: {}. +rust-analyzer intentionally doesn't check clippy on CI. +You can allow lint globally via `xtask clippy`. +See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion. + +", + path.display() + ) + } +} + #[test] fn check_licenses() { let expected = " @@ -179,16 +194,16 @@ impl TidyDocs { } let poorly_documented = [ - "ra_hir", - "ra_hir_expand", - "ra_ide", - "ra_mbe", - "ra_parser", - "ra_prof", - "ra_project_model", - "ra_syntax", - "ra_tt", - "ra_hir_ty", + "hir", + "hir_expand", + "ide", + "mbe", + "parser", + "profile", + "project_model", + "syntax", + "tt", + "hir_ty", ]; let mut has_fixmes =