mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 04:53:34 +00:00
Merge remote-tracking branch 'upstream/master' into 503-hover-doc-links
Hasn't fixed tests yet.
This commit is contained in:
commit
f05d7b41a7
655 changed files with 34763 additions and 36934 deletions
4
.gitattributes
vendored
4
.gitattributes
vendored
|
@ -1,2 +1,6 @@
|
|||
* text=auto eol=lf
|
||||
crates/ra_syntax/test_data/** -text eof=LF
|
||||
# Older git versions try to fix line endings on images, this prevents it.
|
||||
*.png binary
|
||||
*.jpg binary
|
||||
*.ico binary
|
||||
|
|
79
.github/workflows/ci.yaml
vendored
79
.github/workflows/ci.yaml
vendored
|
@ -16,19 +16,19 @@ env:
|
|||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
rust-audit:
|
||||
name: Audit Rust vulnerabilities
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
# rust-audit:
|
||||
# name: Audit Rust vulnerabilities
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# - name: Checkout repository
|
||||
# uses: actions/checkout@v2
|
||||
|
||||
- uses: actions-rs/install@v0.1
|
||||
with:
|
||||
crate: cargo-audit
|
||||
use-tool-cache: true
|
||||
# - uses: actions-rs/install@v0.1
|
||||
# with:
|
||||
# crate: cargo-audit
|
||||
# use-tool-cache: true
|
||||
|
||||
- run: cargo audit
|
||||
# - run: cargo audit
|
||||
|
||||
rust:
|
||||
name: Rust
|
||||
|
@ -61,29 +61,22 @@ jobs:
|
|||
override: true
|
||||
components: rustfmt, rust-src
|
||||
|
||||
- if: matrix.os == 'ubuntu-latest'
|
||||
run: sudo chown -R $(whoami):$(id -ng) ~/.cargo/
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v1
|
||||
- name: Cache cargo directories
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Compile
|
||||
run: cargo test --no-run
|
||||
run: cargo test --no-run --locked
|
||||
|
||||
- name: Test
|
||||
run: cargo test
|
||||
|
@ -95,6 +88,34 @@ jobs:
|
|||
if: matrix.os == 'windows-latest'
|
||||
run: Remove-Item ./target/debug/xtask.exe, ./target/debug/deps/xtask.exe
|
||||
|
||||
# Weird target to catch non-portable code
|
||||
rust-power:
|
||||
name: Rust Power
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
target: 'powerpc-unknown-linux-gnu'
|
||||
|
||||
- name: Cache cargo directories
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Check
|
||||
run: cargo check --target=powerpc-unknown-linux-gnu --all-targets
|
||||
|
||||
typescript:
|
||||
name: TypeScript
|
||||
strategy:
|
||||
|
@ -116,9 +137,9 @@ jobs:
|
|||
- run: npm ci
|
||||
working-directory: ./editors/code
|
||||
|
||||
- run: npm audit || { sleep 10 && npm audit; } || { sleep 30 && npm audit; }
|
||||
if: runner.os == 'Linux'
|
||||
working-directory: ./editors/code
|
||||
# - run: npm audit || { sleep 10 && npm audit; } || { sleep 30 && npm audit; }
|
||||
# if: runner.os == 'Linux'
|
||||
# working-directory: ./editors/code
|
||||
|
||||
- run: npm run lint
|
||||
working-directory: ./editors/code
|
||||
|
|
32
.github/workflows/metrics.yaml
vendored
Normal file
32
.github/workflows/metrics.yaml
vendored
Normal file
|
@ -0,0 +1,32 @@
|
|||
name: metrics
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
RUSTFLAGS: -D warnings
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
metrics:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
components: rust-src
|
||||
|
||||
- name: Collect metrics
|
||||
run: cargo xtask metrics
|
||||
env:
|
||||
METRICS_TOKEN: ${{ secrets.METRICS_TOKEN }}
|
552
Cargo.lock
generated
552
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -24,9 +24,16 @@ opt-level = 0
|
|||
opt-level = 0
|
||||
[profile.release.package.salsa-macros]
|
||||
opt-level = 0
|
||||
[profile.release.package.tracing-attributes]
|
||||
opt-level = 0
|
||||
[profile.release.package.xtask]
|
||||
opt-level = 0
|
||||
|
||||
# Gzipping the artifacts is up to 10 times faster with optimizations (`cargo xtask dist`).
|
||||
# `miniz_oxide` is the direct dependency of `flate2` which does all the heavy lifting
|
||||
[profile.dev.package.miniz_oxide]
|
||||
opt-level = 3
|
||||
|
||||
[patch.'crates-io']
|
||||
# rowan = { path = "../rowan" }
|
||||
|
||||
|
|
14
README.md
14
README.md
|
@ -2,11 +2,8 @@
|
|||
<img src="https://user-images.githubusercontent.com/1711539/72443316-5a79f280-37ae-11ea-858f-035209ece2dd.png" alt="rust-analyzer logo">
|
||||
</p>
|
||||
|
||||
rust-analyzer is an **experimental** modular compiler frontend for the Rust
|
||||
language. It is a part of a larger rls-2.0 effort to create excellent IDE
|
||||
support for Rust. If you want to get involved, check the rls-2.0 working group:
|
||||
|
||||
https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Fwg-rls-2.2E0
|
||||
rust-analyzer is an **experimental** modular compiler frontend for the Rust language.
|
||||
It is a part of a larger rls-2.0 effort to create excellent IDE support for Rust.
|
||||
|
||||
Work on rust-analyzer is sponsored by
|
||||
|
||||
|
@ -25,8 +22,8 @@ If you want to **contribute** to rust-analyzer or are just curious about how
|
|||
things work under the hood, check the [./docs/dev](./docs/dev) folder.
|
||||
|
||||
If you want to **use** rust-analyzer's language server with your editor of
|
||||
choice, check [the manual](https://rust-analyzer.github.io/manual.html) folder. It also contains some tips & tricks to help
|
||||
you be more productive when using rust-analyzer.
|
||||
choice, check [the manual](https://rust-analyzer.github.io/manual.html) folder.
|
||||
It also contains some tips & tricks to help you be more productive when using rust-analyzer.
|
||||
|
||||
## Communication
|
||||
|
||||
|
@ -40,8 +37,9 @@ https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frls-2.2E0
|
|||
|
||||
## Quick Links
|
||||
|
||||
* API docs: https://rust-analyzer.github.io/rust-analyzer/ra_ide/
|
||||
* Website: https://rust-analyzer.github.io/
|
||||
* Metrics: https://rust-analyzer.github.io/metrics/
|
||||
* API docs: https://rust-analyzer.github.io/rust-analyzer/ra_ide/
|
||||
|
||||
## License
|
||||
|
||||
|
|
14
crates/expect/Cargo.toml
Normal file
14
crates/expect/Cargo.toml
Normal file
|
@ -0,0 +1,14 @@
|
|||
[package]
|
||||
name = "expect"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
edition = "2018"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
once_cell = "1"
|
||||
difference = "2"
|
||||
stdx = { path = "../stdx" }
|
356
crates/expect/src/lib.rs
Normal file
356
crates/expect/src/lib.rs
Normal file
|
@ -0,0 +1,356 @@
|
|||
//! Snapshot testing library, see
|
||||
//! https://github.com/rust-analyzer/rust-analyzer/pull/5101
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
env, fmt, fs, mem,
|
||||
ops::Range,
|
||||
panic,
|
||||
path::{Path, PathBuf},
|
||||
sync::Mutex,
|
||||
};
|
||||
|
||||
use difference::Changeset;
|
||||
use once_cell::sync::Lazy;
|
||||
use stdx::{lines_with_ends, trim_indent};
|
||||
|
||||
const HELP: &str = "
|
||||
You can update all `expect![[]]` tests by running:
|
||||
|
||||
env UPDATE_EXPECT=1 cargo test
|
||||
|
||||
To update a single test, place the cursor on `expect` token and use `run` feature of rust-analyzer.
|
||||
";
|
||||
|
||||
fn update_expect() -> bool {
|
||||
env::var("UPDATE_EXPECT").is_ok()
|
||||
}
|
||||
|
||||
/// expect![[r#"inline snapshot"#]]
|
||||
#[macro_export]
|
||||
macro_rules! expect {
|
||||
[[$data:literal]] => {$crate::Expect {
|
||||
position: $crate::Position {
|
||||
file: file!(),
|
||||
line: line!(),
|
||||
column: column!(),
|
||||
},
|
||||
data: $data,
|
||||
}};
|
||||
[[]] => { $crate::expect![[""]] };
|
||||
}
|
||||
|
||||
/// expect_file!["/crates/foo/test_data/bar.html"]
|
||||
#[macro_export]
|
||||
macro_rules! expect_file {
|
||||
[$path:expr] => {$crate::ExpectFile {
|
||||
path: std::path::PathBuf::from($path)
|
||||
}};
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Expect {
|
||||
pub position: Position,
|
||||
pub data: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExpectFile {
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Position {
|
||||
pub file: &'static str,
|
||||
pub line: u32,
|
||||
pub column: u32,
|
||||
}
|
||||
|
||||
impl fmt::Display for Position {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}:{}:{}", self.file, self.line, self.column)
|
||||
}
|
||||
}
|
||||
|
||||
impl Expect {
|
||||
pub fn assert_eq(&self, actual: &str) {
|
||||
let trimmed = self.trimmed();
|
||||
if &trimmed == actual {
|
||||
return;
|
||||
}
|
||||
Runtime::fail_expect(self, &trimmed, actual);
|
||||
}
|
||||
pub fn assert_debug_eq(&self, actual: &impl fmt::Debug) {
|
||||
let actual = format!("{:#?}\n", actual);
|
||||
self.assert_eq(&actual)
|
||||
}
|
||||
|
||||
fn trimmed(&self) -> String {
|
||||
if !self.data.contains('\n') {
|
||||
return self.data.to_string();
|
||||
}
|
||||
trim_indent(self.data)
|
||||
}
|
||||
|
||||
fn locate(&self, file: &str) -> Location {
|
||||
let mut target_line = None;
|
||||
let mut line_start = 0;
|
||||
for (i, line) in lines_with_ends(file).enumerate() {
|
||||
if i == self.position.line as usize - 1 {
|
||||
let pat = "expect![[";
|
||||
let offset = line.find(pat).unwrap();
|
||||
let literal_start = line_start + offset + pat.len();
|
||||
let indent = line.chars().take_while(|&it| it == ' ').count();
|
||||
target_line = Some((literal_start, indent));
|
||||
break;
|
||||
}
|
||||
line_start += line.len();
|
||||
}
|
||||
let (literal_start, line_indent) = target_line.unwrap();
|
||||
let literal_length =
|
||||
file[literal_start..].find("]]").expect("Couldn't find matching `]]` for `expect![[`.");
|
||||
let literal_range = literal_start..literal_start + literal_length;
|
||||
Location { line_indent, literal_range }
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpectFile {
|
||||
pub fn assert_eq(&self, actual: &str) {
|
||||
let expected = self.read();
|
||||
if actual == expected {
|
||||
return;
|
||||
}
|
||||
Runtime::fail_file(self, &expected, actual);
|
||||
}
|
||||
pub fn assert_debug_eq(&self, actual: &impl fmt::Debug) {
|
||||
let actual = format!("{:#?}\n", actual);
|
||||
self.assert_eq(&actual)
|
||||
}
|
||||
fn read(&self) -> String {
|
||||
fs::read_to_string(self.abs_path()).unwrap_or_default().replace("\r\n", "\n")
|
||||
}
|
||||
fn write(&self, contents: &str) {
|
||||
fs::write(self.abs_path(), contents).unwrap()
|
||||
}
|
||||
fn abs_path(&self) -> PathBuf {
|
||||
WORKSPACE_ROOT.join(&self.path)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct Runtime {
|
||||
help_printed: bool,
|
||||
per_file: HashMap<&'static str, FileRuntime>,
|
||||
}
|
||||
static RT: Lazy<Mutex<Runtime>> = Lazy::new(Default::default);
|
||||
|
||||
impl Runtime {
|
||||
fn fail_expect(expect: &Expect, expected: &str, actual: &str) {
|
||||
let mut rt = RT.lock().unwrap_or_else(|poisoned| poisoned.into_inner());
|
||||
if update_expect() {
|
||||
println!("\x1b[1m\x1b[92mupdating\x1b[0m: {}", expect.position);
|
||||
rt.per_file
|
||||
.entry(expect.position.file)
|
||||
.or_insert_with(|| FileRuntime::new(expect))
|
||||
.update(expect, actual);
|
||||
return;
|
||||
}
|
||||
rt.panic(expect.position.to_string(), expected, actual);
|
||||
}
|
||||
|
||||
fn fail_file(expect: &ExpectFile, expected: &str, actual: &str) {
|
||||
let mut rt = RT.lock().unwrap_or_else(|poisoned| poisoned.into_inner());
|
||||
if update_expect() {
|
||||
println!("\x1b[1m\x1b[92mupdating\x1b[0m: {}", expect.path.display());
|
||||
expect.write(actual);
|
||||
return;
|
||||
}
|
||||
rt.panic(expect.path.display().to_string(), expected, actual);
|
||||
}
|
||||
|
||||
fn panic(&mut self, position: String, expected: &str, actual: &str) {
|
||||
let print_help = !mem::replace(&mut self.help_printed, true);
|
||||
let help = if print_help { HELP } else { "" };
|
||||
|
||||
let diff = Changeset::new(actual, expected, "\n");
|
||||
|
||||
println!(
|
||||
"\n
|
||||
\x1b[1m\x1b[91merror\x1b[97m: expect test failed\x1b[0m
|
||||
\x1b[1m\x1b[34m-->\x1b[0m {}
|
||||
{}
|
||||
\x1b[1mExpect\x1b[0m:
|
||||
----
|
||||
{}
|
||||
----
|
||||
|
||||
\x1b[1mActual\x1b[0m:
|
||||
----
|
||||
{}
|
||||
----
|
||||
|
||||
\x1b[1mDiff\x1b[0m:
|
||||
----
|
||||
{}
|
||||
----
|
||||
",
|
||||
position, help, expected, actual, diff
|
||||
);
|
||||
// Use resume_unwind instead of panic!() to prevent a backtrace, which is unnecessary noise.
|
||||
panic::resume_unwind(Box::new(()));
|
||||
}
|
||||
}
|
||||
|
||||
struct FileRuntime {
|
||||
path: PathBuf,
|
||||
original_text: String,
|
||||
patchwork: Patchwork,
|
||||
}
|
||||
|
||||
impl FileRuntime {
|
||||
fn new(expect: &Expect) -> FileRuntime {
|
||||
let path = WORKSPACE_ROOT.join(expect.position.file);
|
||||
let original_text = fs::read_to_string(&path).unwrap();
|
||||
let patchwork = Patchwork::new(original_text.clone());
|
||||
FileRuntime { path, original_text, patchwork }
|
||||
}
|
||||
fn update(&mut self, expect: &Expect, actual: &str) {
|
||||
let loc = expect.locate(&self.original_text);
|
||||
let patch = format_patch(loc.line_indent.clone(), actual);
|
||||
self.patchwork.patch(loc.literal_range, &patch);
|
||||
fs::write(&self.path, &self.patchwork.text).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Location {
|
||||
line_indent: usize,
|
||||
literal_range: Range<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Patchwork {
|
||||
text: String,
|
||||
indels: Vec<(Range<usize>, usize)>,
|
||||
}
|
||||
|
||||
impl Patchwork {
|
||||
fn new(text: String) -> Patchwork {
|
||||
Patchwork { text, indels: Vec::new() }
|
||||
}
|
||||
fn patch(&mut self, mut range: Range<usize>, patch: &str) {
|
||||
self.indels.push((range.clone(), patch.len()));
|
||||
self.indels.sort_by_key(|(delete, _insert)| delete.start);
|
||||
|
||||
let (delete, insert) = self
|
||||
.indels
|
||||
.iter()
|
||||
.take_while(|(delete, _)| delete.start < range.start)
|
||||
.map(|(delete, insert)| (delete.end - delete.start, insert))
|
||||
.fold((0usize, 0usize), |(x1, y1), (x2, y2)| (x1 + x2, y1 + y2));
|
||||
|
||||
for pos in &mut [&mut range.start, &mut range.end] {
|
||||
**pos -= delete;
|
||||
**pos += insert;
|
||||
}
|
||||
|
||||
self.text.replace_range(range, &patch);
|
||||
}
|
||||
}
|
||||
|
||||
fn format_patch(line_indent: usize, patch: &str) -> String {
|
||||
let mut max_hashes = 0;
|
||||
let mut cur_hashes = 0;
|
||||
for byte in patch.bytes() {
|
||||
if byte != b'#' {
|
||||
cur_hashes = 0;
|
||||
continue;
|
||||
}
|
||||
cur_hashes += 1;
|
||||
max_hashes = max_hashes.max(cur_hashes);
|
||||
}
|
||||
let hashes = &"#".repeat(max_hashes + 1);
|
||||
let indent = &" ".repeat(line_indent);
|
||||
let is_multiline = patch.contains('\n');
|
||||
|
||||
let mut buf = String::new();
|
||||
buf.push('r');
|
||||
buf.push_str(hashes);
|
||||
buf.push('"');
|
||||
if is_multiline {
|
||||
buf.push('\n');
|
||||
}
|
||||
let mut final_newline = false;
|
||||
for line in lines_with_ends(patch) {
|
||||
if is_multiline && !line.trim().is_empty() {
|
||||
buf.push_str(indent);
|
||||
buf.push_str(" ");
|
||||
}
|
||||
buf.push_str(line);
|
||||
final_newline = line.ends_with('\n');
|
||||
}
|
||||
if final_newline {
|
||||
buf.push_str(indent);
|
||||
}
|
||||
buf.push('"');
|
||||
buf.push_str(hashes);
|
||||
buf
|
||||
}
|
||||
|
||||
static WORKSPACE_ROOT: Lazy<PathBuf> = Lazy::new(|| {
|
||||
let my_manifest =
|
||||
env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned());
|
||||
// Heuristic, see https://github.com/rust-lang/cargo/issues/3946
|
||||
Path::new(&my_manifest)
|
||||
.ancestors()
|
||||
.filter(|it| it.join("Cargo.toml").exists())
|
||||
.last()
|
||||
.unwrap()
|
||||
.to_path_buf()
|
||||
});
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_format_patch() {
|
||||
let patch = format_patch(0, "hello\nworld\n");
|
||||
expect![[r##"
|
||||
r#"
|
||||
hello
|
||||
world
|
||||
"#"##]]
|
||||
.assert_eq(&patch);
|
||||
|
||||
let patch = format_patch(4, "single line");
|
||||
expect![[r##"r#"single line"#"##]].assert_eq(&patch);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_patchwork() {
|
||||
let mut patchwork = Patchwork::new("one two three".to_string());
|
||||
patchwork.patch(4..7, "zwei");
|
||||
patchwork.patch(0..3, "один");
|
||||
patchwork.patch(8..13, "3");
|
||||
expect![[r#"
|
||||
Patchwork {
|
||||
text: "один zwei 3",
|
||||
indels: [
|
||||
(
|
||||
0..3,
|
||||
8,
|
||||
),
|
||||
(
|
||||
4..7,
|
||||
4,
|
||||
),
|
||||
(
|
||||
8..13,
|
||||
1,
|
||||
),
|
||||
],
|
||||
}
|
||||
"#]]
|
||||
.assert_debug_eq(&patchwork);
|
||||
}
|
||||
}
|
|
@ -3,6 +3,7 @@ edition = "2018"
|
|||
name = "flycheck"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
@ -10,7 +11,7 @@ doctest = false
|
|||
[dependencies]
|
||||
crossbeam-channel = "0.4.0"
|
||||
log = "0.4.8"
|
||||
cargo_metadata = "0.10.0"
|
||||
cargo_metadata = "0.11.1"
|
||||
serde_json = "1.0.48"
|
||||
jod-thread = "0.1.1"
|
||||
ra_toolchain = { path = "../ra_toolchain" }
|
||||
|
|
|
@ -14,14 +14,17 @@ use std::{
|
|||
use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
|
||||
|
||||
pub use cargo_metadata::diagnostic::{
|
||||
Applicability, Diagnostic, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion,
|
||||
Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan,
|
||||
DiagnosticSpanMacroExpansion,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum FlycheckConfig {
|
||||
CargoCommand {
|
||||
command: String,
|
||||
target_triple: Option<String>,
|
||||
all_targets: bool,
|
||||
no_default_features: bool,
|
||||
all_features: bool,
|
||||
features: Vec<String>,
|
||||
extra_args: Vec<String>,
|
||||
|
@ -132,6 +135,7 @@ impl FlycheckActor {
|
|||
self.cancel_check_process();
|
||||
|
||||
let mut command = self.check_command();
|
||||
log::info!("restart flycheck {:?}", command);
|
||||
command.stdout(Stdio::piped()).stderr(Stdio::null()).stdin(Stdio::null());
|
||||
if let Ok(child) = command.spawn().map(JodChild) {
|
||||
self.cargo_handle = Some(CargoHandle::spawn(child));
|
||||
|
@ -176,6 +180,8 @@ impl FlycheckActor {
|
|||
let mut cmd = match &self.config {
|
||||
FlycheckConfig::CargoCommand {
|
||||
command,
|
||||
target_triple,
|
||||
no_default_features,
|
||||
all_targets,
|
||||
all_features,
|
||||
extra_args,
|
||||
|
@ -185,15 +191,24 @@ impl FlycheckActor {
|
|||
cmd.arg(command);
|
||||
cmd.args(&["--workspace", "--message-format=json", "--manifest-path"])
|
||||
.arg(self.workspace_root.join("Cargo.toml"));
|
||||
|
||||
if let Some(target) = target_triple {
|
||||
cmd.args(&["--target", target.as_str()]);
|
||||
}
|
||||
if *all_targets {
|
||||
cmd.arg("--all-targets");
|
||||
}
|
||||
if *all_features {
|
||||
cmd.arg("--all-features");
|
||||
} else if !features.is_empty() {
|
||||
} else {
|
||||
if *no_default_features {
|
||||
cmd.arg("--no-default-features");
|
||||
}
|
||||
if !features.is_empty() {
|
||||
cmd.arg("--features");
|
||||
cmd.arg(features.join(" "));
|
||||
}
|
||||
}
|
||||
cmd.args(extra_args);
|
||||
cmd
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ name = "paths"
|
|||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
edition = "2018"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
|
|
@ -3,6 +3,7 @@ edition = "2018"
|
|||
name = "ra_arena"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
|
|
@ -3,6 +3,7 @@ edition = "2018"
|
|||
name = "ra_assists"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
|
|
@ -4,9 +4,12 @@
|
|||
//! module, and we use to statically check that we only produce snippet
|
||||
//! assists if we are allowed to.
|
||||
|
||||
use crate::AssistKind;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AssistConfig {
|
||||
pub snippet_cap: Option<SnippetCap>,
|
||||
pub allowed: Option<Vec<AssistKind>>,
|
||||
}
|
||||
|
||||
impl AssistConfig {
|
||||
|
@ -22,6 +25,6 @@ pub struct SnippetCap {
|
|||
|
||||
impl Default for AssistConfig {
|
||||
fn default() -> Self {
|
||||
AssistConfig { snippet_cap: Some(SnippetCap { _private: () }) }
|
||||
AssistConfig { snippet_cap: Some(SnippetCap { _private: () }), allowed: None }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ use ra_text_edit::TextEditBuilder;
|
|||
|
||||
use crate::{
|
||||
assist_config::{AssistConfig, SnippetCap},
|
||||
Assist, AssistId, GroupLabel, ResolvedAssist,
|
||||
Assist, AssistId, AssistKind, GroupLabel, ResolvedAssist,
|
||||
};
|
||||
|
||||
/// `AssistContext` allows to apply an assist or check if it could be applied.
|
||||
|
@ -55,7 +55,6 @@ use crate::{
|
|||
pub(crate) struct AssistContext<'a> {
|
||||
pub(crate) config: &'a AssistConfig,
|
||||
pub(crate) sema: Semantics<'a, RootDatabase>,
|
||||
pub(crate) db: &'a RootDatabase,
|
||||
pub(crate) frange: FileRange,
|
||||
source_file: SourceFile,
|
||||
}
|
||||
|
@ -67,8 +66,11 @@ impl<'a> AssistContext<'a> {
|
|||
frange: FileRange,
|
||||
) -> AssistContext<'a> {
|
||||
let source_file = sema.parse(frange.file_id);
|
||||
let db = sema.db;
|
||||
AssistContext { config, sema, db, frange, source_file }
|
||||
AssistContext { config, sema, frange, source_file }
|
||||
}
|
||||
|
||||
pub(crate) fn db(&self) -> &RootDatabase {
|
||||
self.sema.db
|
||||
}
|
||||
|
||||
// NB, this ignores active selection.
|
||||
|
@ -101,14 +103,26 @@ pub(crate) struct Assists {
|
|||
resolve: bool,
|
||||
file: FileId,
|
||||
buf: Vec<(Assist, Option<SourceChange>)>,
|
||||
allowed: Option<Vec<AssistKind>>,
|
||||
}
|
||||
|
||||
impl Assists {
|
||||
pub(crate) fn new_resolved(ctx: &AssistContext) -> Assists {
|
||||
Assists { resolve: true, file: ctx.frange.file_id, buf: Vec::new() }
|
||||
Assists {
|
||||
resolve: true,
|
||||
file: ctx.frange.file_id,
|
||||
buf: Vec::new(),
|
||||
allowed: ctx.config.allowed.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn new_unresolved(ctx: &AssistContext) -> Assists {
|
||||
Assists { resolve: false, file: ctx.frange.file_id, buf: Vec::new() }
|
||||
Assists {
|
||||
resolve: false,
|
||||
file: ctx.frange.file_id,
|
||||
buf: Vec::new(),
|
||||
allowed: ctx.config.allowed.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn finish_unresolved(self) -> Vec<Assist> {
|
||||
|
@ -137,9 +151,13 @@ impl Assists {
|
|||
target: TextRange,
|
||||
f: impl FnOnce(&mut AssistBuilder),
|
||||
) -> Option<()> {
|
||||
if !self.is_allowed(&id) {
|
||||
return None;
|
||||
}
|
||||
let label = Assist::new(id, label.into(), None, target);
|
||||
self.add_impl(label, f)
|
||||
}
|
||||
|
||||
pub(crate) fn add_group(
|
||||
&mut self,
|
||||
group: &GroupLabel,
|
||||
|
@ -148,9 +166,14 @@ impl Assists {
|
|||
target: TextRange,
|
||||
f: impl FnOnce(&mut AssistBuilder),
|
||||
) -> Option<()> {
|
||||
if !self.is_allowed(&id) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let label = Assist::new(id, label.into(), Some(group.clone()), target);
|
||||
self.add_impl(label, f)
|
||||
}
|
||||
|
||||
fn add_impl(&mut self, label: Assist, f: impl FnOnce(&mut AssistBuilder)) -> Option<()> {
|
||||
let source_change = if self.resolve {
|
||||
let mut builder = AssistBuilder::new(self.file);
|
||||
|
@ -168,13 +191,20 @@ impl Assists {
|
|||
self.buf.sort_by_key(|(label, _edit)| label.target.len());
|
||||
self.buf
|
||||
}
|
||||
|
||||
fn is_allowed(&self, id: &AssistId) -> bool {
|
||||
match &self.allowed {
|
||||
Some(allowed) => allowed.iter().any(|kind| kind.contains(id.1)),
|
||||
None => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct AssistBuilder {
|
||||
edit: TextEditBuilder,
|
||||
file_id: FileId,
|
||||
is_snippet: bool,
|
||||
edits: Vec<SourceFileEdit>,
|
||||
change: SourceChange,
|
||||
}
|
||||
|
||||
impl AssistBuilder {
|
||||
|
@ -183,7 +213,7 @@ impl AssistBuilder {
|
|||
edit: TextEditBuilder::default(),
|
||||
file_id,
|
||||
is_snippet: false,
|
||||
edits: Vec::new(),
|
||||
change: SourceChange::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -195,8 +225,8 @@ impl AssistBuilder {
|
|||
let edit = mem::take(&mut self.edit).finish();
|
||||
if !edit.is_empty() {
|
||||
let new_edit = SourceFileEdit { file_id: self.file_id, edit };
|
||||
assert!(!self.edits.iter().any(|it| it.file_id == new_edit.file_id));
|
||||
self.edits.push(new_edit);
|
||||
assert!(!self.change.source_file_edits.iter().any(|it| it.file_id == new_edit.file_id));
|
||||
self.change.source_file_edits.push(new_edit);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -263,10 +293,10 @@ impl AssistBuilder {
|
|||
|
||||
fn finish(mut self) -> SourceChange {
|
||||
self.commit();
|
||||
let mut res: SourceChange = mem::take(&mut self.edits).into();
|
||||
let mut change = mem::take(&mut self.change);
|
||||
if self.is_snippet {
|
||||
res.is_snippet = true;
|
||||
change.is_snippet = true;
|
||||
}
|
||||
res
|
||||
change
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
use rustc_hash::FxHashMap;
|
||||
|
||||
use hir::{HirDisplay, PathResolution, SemanticsScope};
|
||||
use ra_ide_db::RootDatabase;
|
||||
use ra_syntax::{
|
||||
algo::SyntaxRewriter,
|
||||
ast::{self, AstNode},
|
||||
|
@ -32,17 +31,17 @@ impl<'a> AstTransform<'a> for NullTransformer {
|
|||
}
|
||||
|
||||
pub struct SubstituteTypeParams<'a> {
|
||||
source_scope: &'a SemanticsScope<'a, RootDatabase>,
|
||||
source_scope: &'a SemanticsScope<'a>,
|
||||
substs: FxHashMap<hir::TypeParam, ast::TypeRef>,
|
||||
previous: Box<dyn AstTransform<'a> + 'a>,
|
||||
}
|
||||
|
||||
impl<'a> SubstituteTypeParams<'a> {
|
||||
pub fn for_trait_impl(
|
||||
source_scope: &'a SemanticsScope<'a, RootDatabase>,
|
||||
source_scope: &'a SemanticsScope<'a>,
|
||||
// FIXME: there's implicit invariant that `trait_` and `source_scope` match...
|
||||
trait_: hir::Trait,
|
||||
impl_def: ast::ImplDef,
|
||||
impl_def: ast::Impl,
|
||||
) -> SubstituteTypeParams<'a> {
|
||||
let substs = get_syntactic_substs(impl_def).unwrap_or_default();
|
||||
let generic_def: hir::GenericDef = trait_.into();
|
||||
|
@ -81,7 +80,7 @@ impl<'a> SubstituteTypeParams<'a> {
|
|||
|
||||
// FIXME: It would probably be nicer if we could get this via HIR (i.e. get the
|
||||
// trait ref, and then go from the types in the substs back to the syntax)
|
||||
fn get_syntactic_substs(impl_def: ast::ImplDef) -> Option<Vec<ast::TypeRef>> {
|
||||
fn get_syntactic_substs(impl_def: ast::Impl) -> Option<Vec<ast::TypeRef>> {
|
||||
let target_trait = impl_def.target_trait()?;
|
||||
let path_type = match target_trait {
|
||||
ast::TypeRef::PathType(path) => path,
|
||||
|
@ -126,16 +125,13 @@ impl<'a> AstTransform<'a> for SubstituteTypeParams<'a> {
|
|||
}
|
||||
|
||||
pub struct QualifyPaths<'a> {
|
||||
target_scope: &'a SemanticsScope<'a, RootDatabase>,
|
||||
source_scope: &'a SemanticsScope<'a, RootDatabase>,
|
||||
target_scope: &'a SemanticsScope<'a>,
|
||||
source_scope: &'a SemanticsScope<'a>,
|
||||
previous: Box<dyn AstTransform<'a> + 'a>,
|
||||
}
|
||||
|
||||
impl<'a> QualifyPaths<'a> {
|
||||
pub fn new(
|
||||
target_scope: &'a SemanticsScope<'a, RootDatabase>,
|
||||
source_scope: &'a SemanticsScope<'a, RootDatabase>,
|
||||
) -> Self {
|
||||
pub fn new(target_scope: &'a SemanticsScope<'a>, source_scope: &'a SemanticsScope<'a>) -> Self {
|
||||
Self { target_scope, source_scope, previous: Box::new(NullTransformer) }
|
||||
}
|
||||
|
||||
|
@ -156,7 +152,7 @@ impl<'a> QualifyPaths<'a> {
|
|||
let resolution = self.source_scope.resolve_hir_path(&hir_path?)?;
|
||||
match resolution {
|
||||
PathResolution::Def(def) => {
|
||||
let found_path = from.find_use_path(self.source_scope.db, def)?;
|
||||
let found_path = from.find_use_path(self.source_scope.db.upcast(), def)?;
|
||||
let mut path = path_to_ast(found_path);
|
||||
|
||||
let type_args = p
|
||||
|
|
|
@ -8,7 +8,7 @@ use stdx::SepBy;
|
|||
|
||||
use crate::{
|
||||
assist_context::{AssistContext, Assists},
|
||||
AssistId,
|
||||
AssistId, AssistKind,
|
||||
};
|
||||
|
||||
// Assist: add_custom_impl
|
||||
|
@ -29,8 +29,8 @@ use crate::{
|
|||
// }
|
||||
// ```
|
||||
pub(crate) fn add_custom_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let input = ctx.find_node_at_offset::<ast::AttrInput>()?;
|
||||
let attr = input.syntax().parent().and_then(ast::Attr::cast)?;
|
||||
let attr = ctx.find_node_at_offset::<ast::Attr>()?;
|
||||
let input = attr.token_tree()?;
|
||||
|
||||
let attr_name = attr
|
||||
.syntax()
|
||||
|
@ -52,7 +52,7 @@ pub(crate) fn add_custom_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<
|
|||
format!("Add custom impl `{}` for `{}`", trait_token.text().as_str(), annotated_name);
|
||||
|
||||
let target = attr.syntax().text_range();
|
||||
acc.add(AssistId("add_custom_impl"), label, target, |builder| {
|
||||
acc.add(AssistId("add_custom_impl", AssistKind::Refactor), label, target, |builder| {
|
||||
let new_attr_input = input
|
||||
.syntax()
|
||||
.descendants_with_tokens()
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use hir::HirDisplay;
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode, LetStmt, NameOwner, TypeAscriptionOwner},
|
||||
ast::{self, AstNode, LetStmt, NameOwner},
|
||||
TextRange,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: add_explicit_type
|
||||
//
|
||||
|
@ -22,11 +22,11 @@ use crate::{AssistContext, AssistId, Assists};
|
|||
// }
|
||||
// ```
|
||||
pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let stmt = ctx.find_node_at_offset::<LetStmt>()?;
|
||||
let module = ctx.sema.scope(stmt.syntax()).module()?;
|
||||
let expr = stmt.initializer()?;
|
||||
let let_stmt = ctx.find_node_at_offset::<LetStmt>()?;
|
||||
let module = ctx.sema.scope(let_stmt.syntax()).module()?;
|
||||
let expr = let_stmt.initializer()?;
|
||||
// Must be a binding
|
||||
let pat = match stmt.pat()? {
|
||||
let pat = match let_stmt.pat()? {
|
||||
ast::Pat::BindPat(bind_pat) => bind_pat,
|
||||
_ => return None,
|
||||
};
|
||||
|
@ -34,8 +34,8 @@ pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext) -> Optio
|
|||
// The binding must have a name
|
||||
let name = pat.name()?;
|
||||
let name_range = name.syntax().text_range();
|
||||
let stmt_range = stmt.syntax().text_range();
|
||||
let eq_range = stmt.eq_token()?.text_range();
|
||||
let stmt_range = let_stmt.syntax().text_range();
|
||||
let eq_range = let_stmt.eq_token()?.text_range();
|
||||
// Assist should only be applicable if cursor is between 'let' and '='
|
||||
let let_range = TextRange::new(stmt_range.start(), eq_range.start());
|
||||
let cursor_in_range = let_range.contains_range(ctx.frange.range);
|
||||
|
@ -44,7 +44,7 @@ pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext) -> Optio
|
|||
}
|
||||
// Assist not applicable if the type has already been specified
|
||||
// and it has no placeholders
|
||||
let ascribed_ty = stmt.ascribed_type();
|
||||
let ascribed_ty = let_stmt.ty();
|
||||
if let Some(ty) = &ascribed_ty {
|
||||
if ty.syntax().descendants().find_map(ast::PlaceholderType::cast).is_none() {
|
||||
return None;
|
||||
|
@ -57,9 +57,9 @@ pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext) -> Optio
|
|||
return None;
|
||||
}
|
||||
|
||||
let inferred_type = ty.display_source_code(ctx.db, module.into()).ok()?;
|
||||
let inferred_type = ty.display_source_code(ctx.db(), module.into()).ok()?;
|
||||
acc.add(
|
||||
AssistId("add_explicit_type"),
|
||||
AssistId("add_explicit_type", AssistKind::RefactorRewrite),
|
||||
format!("Insert explicit type `{}`", inferred_type),
|
||||
pat_range,
|
||||
|builder| match ascribed_ty {
|
||||
|
|
|
@ -1,98 +0,0 @@
|
|||
use ra_syntax::ast::{self, AstNode, NameOwner, TypeParamsOwner};
|
||||
use stdx::{format_to, SepBy};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
|
||||
// Assist: add_impl
|
||||
//
|
||||
// Adds a new inherent impl for a type.
|
||||
//
|
||||
// ```
|
||||
// struct Ctx<T: Clone> {
|
||||
// data: T,<|>
|
||||
// }
|
||||
// ```
|
||||
// ->
|
||||
// ```
|
||||
// struct Ctx<T: Clone> {
|
||||
// data: T,
|
||||
// }
|
||||
//
|
||||
// impl<T: Clone> Ctx<T> {
|
||||
// $0
|
||||
// }
|
||||
// ```
|
||||
pub(crate) fn add_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let nominal = ctx.find_node_at_offset::<ast::NominalDef>()?;
|
||||
let name = nominal.name()?;
|
||||
let target = nominal.syntax().text_range();
|
||||
acc.add(AssistId("add_impl"), format!("Implement {}", name.text().as_str()), target, |edit| {
|
||||
let type_params = nominal.type_param_list();
|
||||
let start_offset = nominal.syntax().text_range().end();
|
||||
let mut buf = String::new();
|
||||
buf.push_str("\n\nimpl");
|
||||
if let Some(type_params) = &type_params {
|
||||
format_to!(buf, "{}", type_params.syntax());
|
||||
}
|
||||
buf.push_str(" ");
|
||||
buf.push_str(name.text().as_str());
|
||||
if let Some(type_params) = type_params {
|
||||
let lifetime_params = type_params
|
||||
.lifetime_params()
|
||||
.filter_map(|it| it.lifetime_token())
|
||||
.map(|it| it.text().clone());
|
||||
let type_params =
|
||||
type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone());
|
||||
|
||||
let generic_params = lifetime_params.chain(type_params).sep_by(", ");
|
||||
format_to!(buf, "<{}>", generic_params)
|
||||
}
|
||||
match ctx.config.snippet_cap {
|
||||
Some(cap) => {
|
||||
buf.push_str(" {\n $0\n}");
|
||||
edit.insert_snippet(cap, start_offset, buf);
|
||||
}
|
||||
None => {
|
||||
buf.push_str(" {\n}");
|
||||
edit.insert(start_offset, buf);
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::tests::{check_assist, check_assist_target};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_add_impl() {
|
||||
check_assist(add_impl, "struct Foo {<|>}\n", "struct Foo {}\n\nimpl Foo {\n $0\n}\n");
|
||||
check_assist(
|
||||
add_impl,
|
||||
"struct Foo<T: Clone> {<|>}",
|
||||
"struct Foo<T: Clone> {}\n\nimpl<T: Clone> Foo<T> {\n $0\n}",
|
||||
);
|
||||
check_assist(
|
||||
add_impl,
|
||||
"struct Foo<'a, T: Foo<'a>> {<|>}",
|
||||
"struct Foo<'a, T: Foo<'a>> {}\n\nimpl<'a, T: Foo<'a>> Foo<'a, T> {\n $0\n}",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_impl_target() {
|
||||
check_assist_target(
|
||||
add_impl,
|
||||
"
|
||||
struct SomeThingIrrelevant;
|
||||
/// Has a lifetime parameter
|
||||
struct Foo<'a, T: Foo<'a>> {<|>}
|
||||
struct EvenMoreIrrelevant;
|
||||
",
|
||||
"/// Has a lifetime parameter
|
||||
struct Foo<'a, T: Foo<'a>> {}",
|
||||
);
|
||||
}
|
||||
}
|
|
@ -12,7 +12,7 @@ use crate::{
|
|||
assist_context::{AssistContext, Assists},
|
||||
ast_transform::{self, AstTransform, QualifyPaths, SubstituteTypeParams},
|
||||
utils::{get_missing_assoc_items, render_snippet, resolve_target_trait, Cursor},
|
||||
AssistId,
|
||||
AssistId, AssistKind,
|
||||
};
|
||||
|
||||
#[derive(PartialEq)]
|
||||
|
@ -111,16 +111,17 @@ fn add_missing_impl_members_inner(
|
|||
label: &'static str,
|
||||
) -> Option<()> {
|
||||
let _p = ra_prof::profile("add_missing_impl_members_inner");
|
||||
let impl_def = ctx.find_node_at_offset::<ast::ImplDef>()?;
|
||||
let impl_item_list = impl_def.item_list()?;
|
||||
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
|
||||
let impl_item_list = impl_def.assoc_item_list()?;
|
||||
|
||||
let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?;
|
||||
|
||||
let def_name = |item: &ast::AssocItem| -> Option<SmolStr> {
|
||||
match item {
|
||||
ast::AssocItem::FnDef(def) => def.name(),
|
||||
ast::AssocItem::TypeAliasDef(def) => def.name(),
|
||||
ast::AssocItem::ConstDef(def) => def.name(),
|
||||
ast::AssocItem::Fn(def) => def.name(),
|
||||
ast::AssocItem::TypeAlias(def) => def.name(),
|
||||
ast::AssocItem::Const(def) => def.name(),
|
||||
ast::AssocItem::MacroCall(_) => None,
|
||||
}
|
||||
.map(|it| it.text().clone())
|
||||
};
|
||||
|
@ -128,13 +129,13 @@ fn add_missing_impl_members_inner(
|
|||
let missing_items = get_missing_assoc_items(&ctx.sema, &impl_def)
|
||||
.iter()
|
||||
.map(|i| match i {
|
||||
hir::AssocItem::Function(i) => ast::AssocItem::FnDef(i.source(ctx.db).value),
|
||||
hir::AssocItem::TypeAlias(i) => ast::AssocItem::TypeAliasDef(i.source(ctx.db).value),
|
||||
hir::AssocItem::Const(i) => ast::AssocItem::ConstDef(i.source(ctx.db).value),
|
||||
hir::AssocItem::Function(i) => ast::AssocItem::Fn(i.source(ctx.db()).value),
|
||||
hir::AssocItem::TypeAlias(i) => ast::AssocItem::TypeAlias(i.source(ctx.db()).value),
|
||||
hir::AssocItem::Const(i) => ast::AssocItem::Const(i.source(ctx.db()).value),
|
||||
})
|
||||
.filter(|t| def_name(&t).is_some())
|
||||
.filter(|t| match t {
|
||||
ast::AssocItem::FnDef(def) => match mode {
|
||||
ast::AssocItem::Fn(def) => match mode {
|
||||
AddMissingImplMembersMode::DefaultMethodsOnly => def.body().is_some(),
|
||||
AddMissingImplMembersMode::NoDefaultMethods => def.body().is_none(),
|
||||
},
|
||||
|
@ -147,7 +148,7 @@ fn add_missing_impl_members_inner(
|
|||
}
|
||||
|
||||
let target = impl_def.syntax().text_range();
|
||||
acc.add(AssistId(assist_id), label, target, |builder| {
|
||||
acc.add(AssistId(assist_id, AssistKind::QuickFix), label, target, |builder| {
|
||||
let n_existing_items = impl_item_list.assoc_items().count();
|
||||
let source_scope = ctx.sema.scope_for_def(trait_);
|
||||
let target_scope = ctx.sema.scope(impl_item_list.syntax());
|
||||
|
@ -157,7 +158,8 @@ fn add_missing_impl_members_inner(
|
|||
.into_iter()
|
||||
.map(|it| ast_transform::apply(&*ast_transform, it))
|
||||
.map(|it| match it {
|
||||
ast::AssocItem::FnDef(def) => ast::AssocItem::FnDef(add_body(def)),
|
||||
ast::AssocItem::Fn(def) => ast::AssocItem::Fn(add_body(def)),
|
||||
ast::AssocItem::TypeAlias(def) => ast::AssocItem::TypeAlias(def.remove_bounds()),
|
||||
_ => it,
|
||||
})
|
||||
.map(|it| edit::remove_attrs_and_docs(&it));
|
||||
|
@ -170,7 +172,7 @@ fn add_missing_impl_members_inner(
|
|||
Some(cap) => {
|
||||
let mut cursor = Cursor::Before(first_new_item.syntax());
|
||||
let placeholder;
|
||||
if let ast::AssocItem::FnDef(func) = &first_new_item {
|
||||
if let ast::AssocItem::Fn(func) = &first_new_item {
|
||||
if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) {
|
||||
if m.syntax().text() == "todo!()" {
|
||||
placeholder = m;
|
||||
|
@ -188,7 +190,7 @@ fn add_missing_impl_members_inner(
|
|||
})
|
||||
}
|
||||
|
||||
fn add_body(fn_def: ast::FnDef) -> ast::FnDef {
|
||||
fn add_body(fn_def: ast::Fn) -> ast::Fn {
|
||||
if fn_def.body().is_some() {
|
||||
return fn_def;
|
||||
}
|
||||
|
@ -681,6 +683,28 @@ impl Foo<T> for S<T> {
|
|||
fn bar(&self, this: &T, that: &Self) {
|
||||
${0:todo!()}
|
||||
}
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_assoc_type_bounds_are_removed() {
|
||||
check_assist(
|
||||
add_missing_impl_members,
|
||||
r#"
|
||||
trait Tr {
|
||||
type Ty: Copy + 'static;
|
||||
}
|
||||
|
||||
impl Tr for ()<|> {
|
||||
}"#,
|
||||
r#"
|
||||
trait Tr {
|
||||
type Ty: Copy + 'static;
|
||||
}
|
||||
|
||||
impl Tr for () {
|
||||
$0type Ty;
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ use test_utils::mark;
|
|||
|
||||
use crate::{
|
||||
assist_context::{AssistContext, Assists},
|
||||
AssistId,
|
||||
AssistId, AssistKind,
|
||||
};
|
||||
|
||||
// Assist: add_turbo_fish
|
||||
|
@ -25,7 +25,14 @@ use crate::{
|
|||
// }
|
||||
// ```
|
||||
pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let ident = ctx.find_token_at_offset(SyntaxKind::IDENT)?;
|
||||
let ident = ctx.find_token_at_offset(SyntaxKind::IDENT).or_else(|| {
|
||||
let arg_list = ctx.find_node_at_offset::<ast::ArgList>()?;
|
||||
if arg_list.args().count() > 0 {
|
||||
return None;
|
||||
}
|
||||
mark::hit!(add_turbo_fish_after_call);
|
||||
arg_list.l_paren_token()?.prev_token().filter(|it| it.kind() == SyntaxKind::IDENT)
|
||||
})?;
|
||||
let next_token = ident.next_token()?;
|
||||
if next_token.kind() == T![::] {
|
||||
mark::hit!(add_turbo_fish_one_fish_is_enough);
|
||||
|
@ -45,12 +52,15 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<(
|
|||
mark::hit!(add_turbo_fish_non_generic);
|
||||
return None;
|
||||
}
|
||||
acc.add(AssistId("add_turbo_fish"), "Add `::<>`", ident.text_range(), |builder| {
|
||||
match ctx.config.snippet_cap {
|
||||
acc.add(
|
||||
AssistId("add_turbo_fish", AssistKind::RefactorRewrite),
|
||||
"Add `::<>`",
|
||||
ident.text_range(),
|
||||
|builder| match ctx.config.snippet_cap {
|
||||
Some(cap) => builder.insert_snippet(cap, ident.text_range().end(), "::<${0:_}>"),
|
||||
None => builder.insert(ident.text_range().end(), "::<_>"),
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -79,6 +89,26 @@ fn main() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_turbo_fish_after_call() {
|
||||
mark::check!(add_turbo_fish_after_call);
|
||||
check_assist(
|
||||
add_turbo_fish,
|
||||
r#"
|
||||
fn make<T>() -> T {}
|
||||
fn main() {
|
||||
make()<|>;
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
fn make<T>() -> T {}
|
||||
fn main() {
|
||||
make::<${0:_}>();
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_turbo_fish_method() {
|
||||
check_assist(
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use ra_syntax::ast::{self, AstNode};
|
||||
|
||||
use crate::{utils::invert_boolean_expression, AssistContext, AssistId, Assists};
|
||||
use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: apply_demorgan
|
||||
//
|
||||
|
@ -39,11 +39,16 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext) -> Option<(
|
|||
let rhs_range = rhs.syntax().text_range();
|
||||
let not_rhs = invert_boolean_expression(rhs);
|
||||
|
||||
acc.add(AssistId("apply_demorgan"), "Apply De Morgan's law", op_range, |edit| {
|
||||
acc.add(
|
||||
AssistId("apply_demorgan", AssistKind::RefactorRewrite),
|
||||
"Apply De Morgan's law",
|
||||
op_range,
|
||||
|edit| {
|
||||
edit.replace(op_range, opposite_op);
|
||||
edit.replace(lhs_range, format!("!({}", not_lhs.syntax().text()));
|
||||
edit.replace(rhs_range, format!("{})", not_rhs.syntax().text()));
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// Return the opposite text for a given logical operator, if it makes sense
|
||||
|
|
|
@ -5,7 +5,7 @@ use hir::{
|
|||
AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution, Semantics, Trait,
|
||||
Type,
|
||||
};
|
||||
use ra_ide_db::{imports_locator::ImportsLocator, RootDatabase};
|
||||
use ra_ide_db::{imports_locator, RootDatabase};
|
||||
use ra_prof::profile;
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode},
|
||||
|
@ -13,7 +13,9 @@ use ra_syntax::{
|
|||
};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::{utils::insert_use_statement, AssistContext, AssistId, Assists, GroupLabel};
|
||||
use crate::{
|
||||
utils::insert_use_statement, AssistContext, AssistId, AssistKind, Assists, GroupLabel,
|
||||
};
|
||||
|
||||
// Assist: auto_import
|
||||
//
|
||||
|
@ -35,8 +37,8 @@ use crate::{utils::insert_use_statement, AssistContext, AssistId, Assists, Group
|
|||
// # pub mod std { pub mod collections { pub struct HashMap { } } }
|
||||
// ```
|
||||
pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let auto_import_assets = AutoImportAssets::new(&ctx)?;
|
||||
let proposed_imports = auto_import_assets.search_for_imports(ctx.db);
|
||||
let auto_import_assets = AutoImportAssets::new(ctx)?;
|
||||
let proposed_imports = auto_import_assets.search_for_imports(ctx);
|
||||
if proposed_imports.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
@ -46,7 +48,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
|
|||
for import in proposed_imports {
|
||||
acc.add_group(
|
||||
&group,
|
||||
AssistId("auto_import"),
|
||||
AssistId("auto_import", AssistKind::QuickFix),
|
||||
format!("Import `{}`", &import),
|
||||
range,
|
||||
|builder| {
|
||||
|
@ -90,7 +92,7 @@ impl AutoImportAssets {
|
|||
|
||||
fn for_regular_path(path_under_caret: ast::Path, ctx: &AssistContext) -> Option<Self> {
|
||||
let syntax_under_caret = path_under_caret.syntax().to_owned();
|
||||
if syntax_under_caret.ancestors().find_map(ast::UseItem::cast).is_some() {
|
||||
if syntax_under_caret.ancestors().find_map(ast::Use::cast).is_some() {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -127,11 +129,11 @@ impl AutoImportAssets {
|
|||
GroupLabel(name)
|
||||
}
|
||||
|
||||
fn search_for_imports(&self, db: &RootDatabase) -> BTreeSet<ModPath> {
|
||||
fn search_for_imports(&self, ctx: &AssistContext) -> BTreeSet<ModPath> {
|
||||
let _p = profile("auto_import::search_for_imports");
|
||||
let db = ctx.db();
|
||||
let current_crate = self.module_with_name_to_import.krate();
|
||||
ImportsLocator::new(db, current_crate)
|
||||
.find_imports(&self.get_search_query())
|
||||
imports_locator::find_imports(&ctx.sema, current_crate, &self.get_search_query())
|
||||
.into_iter()
|
||||
.filter_map(|candidate| match &self.import_candidate {
|
||||
ImportCandidate::TraitAssocItem(assoc_item_type, _) => {
|
||||
|
@ -810,6 +812,146 @@ fn main() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trait_method_cross_crate() {
|
||||
check_assist(
|
||||
auto_import,
|
||||
r"
|
||||
//- /main.rs crate:main deps:dep
|
||||
fn main() {
|
||||
let test_struct = dep::test_mod::TestStruct {};
|
||||
test_struct.test_meth<|>od()
|
||||
}
|
||||
//- /dep.rs crate:dep
|
||||
pub mod test_mod {
|
||||
pub trait TestTrait {
|
||||
fn test_method(&self);
|
||||
}
|
||||
pub struct TestStruct {}
|
||||
impl TestTrait for TestStruct {
|
||||
fn test_method(&self) {}
|
||||
}
|
||||
}
|
||||
",
|
||||
r"
|
||||
use dep::test_mod::TestTrait;
|
||||
|
||||
fn main() {
|
||||
let test_struct = dep::test_mod::TestStruct {};
|
||||
test_struct.test_method()
|
||||
}
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assoc_fn_cross_crate() {
|
||||
check_assist(
|
||||
auto_import,
|
||||
r"
|
||||
//- /main.rs crate:main deps:dep
|
||||
fn main() {
|
||||
dep::test_mod::TestStruct::test_func<|>tion
|
||||
}
|
||||
//- /dep.rs crate:dep
|
||||
pub mod test_mod {
|
||||
pub trait TestTrait {
|
||||
fn test_function();
|
||||
}
|
||||
pub struct TestStruct {}
|
||||
impl TestTrait for TestStruct {
|
||||
fn test_function() {}
|
||||
}
|
||||
}
|
||||
",
|
||||
r"
|
||||
use dep::test_mod::TestTrait;
|
||||
|
||||
fn main() {
|
||||
dep::test_mod::TestStruct::test_function
|
||||
}
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assoc_const_cross_crate() {
|
||||
check_assist(
|
||||
auto_import,
|
||||
r"
|
||||
//- /main.rs crate:main deps:dep
|
||||
fn main() {
|
||||
dep::test_mod::TestStruct::CONST<|>
|
||||
}
|
||||
//- /dep.rs crate:dep
|
||||
pub mod test_mod {
|
||||
pub trait TestTrait {
|
||||
const CONST: bool;
|
||||
}
|
||||
pub struct TestStruct {}
|
||||
impl TestTrait for TestStruct {
|
||||
const CONST: bool = true;
|
||||
}
|
||||
}
|
||||
",
|
||||
r"
|
||||
use dep::test_mod::TestTrait;
|
||||
|
||||
fn main() {
|
||||
dep::test_mod::TestStruct::CONST
|
||||
}
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assoc_fn_as_method_cross_crate() {
|
||||
check_assist_not_applicable(
|
||||
auto_import,
|
||||
r"
|
||||
//- /main.rs crate:main deps:dep
|
||||
fn main() {
|
||||
let test_struct = dep::test_mod::TestStruct {};
|
||||
test_struct.test_func<|>tion()
|
||||
}
|
||||
//- /dep.rs crate:dep
|
||||
pub mod test_mod {
|
||||
pub trait TestTrait {
|
||||
fn test_function();
|
||||
}
|
||||
pub struct TestStruct {}
|
||||
impl TestTrait for TestStruct {
|
||||
fn test_function() {}
|
||||
}
|
||||
}
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn private_trait_cross_crate() {
|
||||
check_assist_not_applicable(
|
||||
auto_import,
|
||||
r"
|
||||
//- /main.rs crate:main deps:dep
|
||||
fn main() {
|
||||
let test_struct = dep::test_mod::TestStruct {};
|
||||
test_struct.test_meth<|>od()
|
||||
}
|
||||
//- /dep.rs crate:dep
|
||||
pub mod test_mod {
|
||||
trait TestTrait {
|
||||
fn test_method(&self);
|
||||
}
|
||||
pub struct TestStruct {}
|
||||
impl TestTrait for TestStruct {
|
||||
fn test_method(&self) {}
|
||||
}
|
||||
}
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn not_applicable_for_imported_trait_for_method() {
|
||||
check_assist_not_applicable(
|
||||
|
|
|
@ -3,7 +3,7 @@ use ra_syntax::{
|
|||
AstNode, SyntaxNode,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
use test_utils::mark;
|
||||
|
||||
// Assist: change_return_type_to_result
|
||||
|
@ -20,9 +20,9 @@ use test_utils::mark;
|
|||
pub(crate) fn change_return_type_to_result(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let ret_type = ctx.find_node_at_offset::<ast::RetType>()?;
|
||||
// FIXME: extend to lambdas as well
|
||||
let fn_def = ret_type.syntax().parent().and_then(ast::FnDef::cast)?;
|
||||
let fn_def = ret_type.syntax().parent().and_then(ast::Fn::cast)?;
|
||||
|
||||
let type_ref = &ret_type.type_ref()?;
|
||||
let type_ref = &ret_type.ty()?;
|
||||
let ret_type_str = type_ref.syntax().text().to_string();
|
||||
let first_part_ret_type = ret_type_str.splitn(2, '<').next();
|
||||
if let Some(ret_type_first_part) = first_part_ret_type {
|
||||
|
@ -35,8 +35,8 @@ pub(crate) fn change_return_type_to_result(acc: &mut Assists, ctx: &AssistContex
|
|||
let block_expr = &fn_def.body()?;
|
||||
|
||||
acc.add(
|
||||
AssistId("change_return_type_to_result"),
|
||||
"Change return type to Result",
|
||||
AssistId("change_return_type_to_result", AssistKind::RefactorRewrite),
|
||||
"Wrap return type in Result",
|
||||
type_ref.syntax().text_range(),
|
||||
|builder| {
|
||||
let mut tail_return_expr_collector = TailReturnCollector::new();
|
||||
|
@ -240,7 +240,7 @@ fn get_tail_expr_from_block(expr: &Expr) -> Option<Vec<NodeType>> {
|
|||
Expr::ParenExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
|
||||
Expr::PathExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
|
||||
Expr::Label(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
|
||||
Expr::RecordLit(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
|
||||
Expr::RecordExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
|
||||
Expr::IndexExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
|
||||
Expr::MethodCallExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
|
||||
Expr::AwaitExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
use ra_syntax::{
|
||||
ast::{self, NameOwner, VisibilityOwner},
|
||||
AstNode,
|
||||
SyntaxKind::{CONST_DEF, ENUM_DEF, FN_DEF, MODULE, STRUCT_DEF, TRAIT_DEF, VISIBILITY},
|
||||
SyntaxKind::{CONST, ENUM, FN, MODULE, STATIC, STRUCT, TRAIT, VISIBILITY},
|
||||
T,
|
||||
};
|
||||
use test_utils::mark;
|
||||
|
||||
use crate::{utils::vis_offset, AssistContext, AssistId, Assists};
|
||||
use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: change_visibility
|
||||
//
|
||||
|
@ -28,12 +28,15 @@ pub(crate) fn change_visibility(acc: &mut Assists, ctx: &AssistContext) -> Optio
|
|||
|
||||
fn add_vis(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let item_keyword = ctx.token_at_offset().find(|leaf| {
|
||||
matches!(leaf.kind(), T![const] | T![fn] | T![mod] | T![struct] | T![enum] | T![trait])
|
||||
matches!(
|
||||
leaf.kind(),
|
||||
T![const] | T![static] | T![fn] | T![mod] | T![struct] | T![enum] | T![trait]
|
||||
)
|
||||
});
|
||||
|
||||
let (offset, target) = if let Some(keyword) = item_keyword {
|
||||
let parent = keyword.parent();
|
||||
let def_kws = vec![CONST_DEF, FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF];
|
||||
let def_kws = vec![CONST, STATIC, FN, MODULE, STRUCT, ENUM, TRAIT];
|
||||
// Parent is not a definition, can't add visibility
|
||||
if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) {
|
||||
return None;
|
||||
|
@ -44,7 +47,7 @@ fn add_vis(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
}
|
||||
(vis_offset(&parent), keyword.text_range())
|
||||
} else if let Some(field_name) = ctx.find_node_at_offset::<ast::Name>() {
|
||||
let field = field_name.syntax().ancestors().find_map(ast::RecordFieldDef::cast)?;
|
||||
let field = field_name.syntax().ancestors().find_map(ast::RecordField::cast)?;
|
||||
if field.name()? != field_name {
|
||||
mark::hit!(change_visibility_field_false_positive);
|
||||
return None;
|
||||
|
@ -53,7 +56,7 @@ fn add_vis(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
return None;
|
||||
}
|
||||
(vis_offset(field.syntax()), field_name.syntax().text_range())
|
||||
} else if let Some(field) = ctx.find_node_at_offset::<ast::TupleFieldDef>() {
|
||||
} else if let Some(field) = ctx.find_node_at_offset::<ast::TupleField>() {
|
||||
if field.visibility().is_some() {
|
||||
return None;
|
||||
}
|
||||
|
@ -62,16 +65,21 @@ fn add_vis(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
return None;
|
||||
};
|
||||
|
||||
acc.add(AssistId("change_visibility"), "Change visibility to pub(crate)", target, |edit| {
|
||||
acc.add(
|
||||
AssistId("change_visibility", AssistKind::RefactorRewrite),
|
||||
"Change visibility to pub(crate)",
|
||||
target,
|
||||
|edit| {
|
||||
edit.insert(offset, "pub(crate) ");
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn change_vis(acc: &mut Assists, vis: ast::Visibility) -> Option<()> {
|
||||
if vis.syntax().text() == "pub" {
|
||||
let target = vis.syntax().text_range();
|
||||
return acc.add(
|
||||
AssistId("change_visibility"),
|
||||
AssistId("change_visibility", AssistKind::RefactorRewrite),
|
||||
"Change Visibility to pub(crate)",
|
||||
target,
|
||||
|edit| {
|
||||
|
@ -82,7 +90,7 @@ fn change_vis(acc: &mut Assists, vis: ast::Visibility) -> Option<()> {
|
|||
if vis.syntax().text() == "pub(crate)" {
|
||||
let target = vis.syntax().text_range();
|
||||
return acc.add(
|
||||
AssistId("change_visibility"),
|
||||
AssistId("change_visibility", AssistKind::RefactorRewrite),
|
||||
"Change visibility to pub",
|
||||
target,
|
||||
|edit| {
|
||||
|
@ -146,6 +154,11 @@ mod tests {
|
|||
check_assist(change_visibility, "<|>const FOO = 3u8;", "pub(crate) const FOO = 3u8;");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_visibility_static() {
|
||||
check_assist(change_visibility, "<|>static FOO = 3u8;", "pub(crate) static FOO = 3u8;");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_visibility_handles_comment_attrs() {
|
||||
check_assist(
|
||||
|
|
|
@ -8,14 +8,14 @@ use ra_syntax::{
|
|||
make,
|
||||
},
|
||||
AstNode,
|
||||
SyntaxKind::{FN_DEF, LOOP_EXPR, L_CURLY, R_CURLY, WHILE_EXPR, WHITESPACE},
|
||||
SyntaxKind::{FN, LOOP_EXPR, L_CURLY, R_CURLY, WHILE_EXPR, WHITESPACE},
|
||||
SyntaxNode,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
assist_context::{AssistContext, Assists},
|
||||
utils::invert_boolean_expression,
|
||||
AssistId,
|
||||
AssistId, AssistKind,
|
||||
};
|
||||
|
||||
// Assist: convert_to_guarded_return
|
||||
|
@ -88,7 +88,7 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext)
|
|||
|
||||
let early_expression: ast::Expr = match parent_container.kind() {
|
||||
WHILE_EXPR | LOOP_EXPR => make::expr_continue(),
|
||||
FN_DEF => make::expr_return(),
|
||||
FN => make::expr_return(),
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
|
@ -99,7 +99,11 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext)
|
|||
then_block.syntax().last_child_or_token().filter(|t| t.kind() == R_CURLY)?;
|
||||
|
||||
let target = if_expr.syntax().text_range();
|
||||
acc.add(AssistId("convert_to_guarded_return"), "Convert to guarded return", target, |edit| {
|
||||
acc.add(
|
||||
AssistId("convert_to_guarded_return", AssistKind::RefactorRewrite),
|
||||
"Convert to guarded return",
|
||||
target,
|
||||
|edit| {
|
||||
let if_indent_level = IndentLevel::from_node(&if_expr.syntax());
|
||||
let new_block = match if_let_pat {
|
||||
None => {
|
||||
|
@ -108,7 +112,8 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext)
|
|||
let then_branch =
|
||||
make::block_expr(once(make::expr_stmt(early_expression).into()), None);
|
||||
let cond = invert_boolean_expression(cond_expr);
|
||||
make::expr_if(make::condition(cond, None), then_branch).indent(if_indent_level)
|
||||
make::expr_if(make::condition(cond, None), then_branch)
|
||||
.indent(if_indent_level)
|
||||
};
|
||||
replace(new_expr.syntax(), &then_block, &parent_block, &if_expr)
|
||||
}
|
||||
|
@ -178,7 +183,8 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext)
|
|||
&mut then_statements,
|
||||
)
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -10,7 +10,8 @@ use ra_syntax::{
|
|||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::{
|
||||
assist_context::AssistBuilder, utils::insert_use_statement, AssistContext, AssistId, Assists,
|
||||
assist_context::AssistBuilder, utils::insert_use_statement, AssistContext, AssistId,
|
||||
AssistKind, Assists,
|
||||
};
|
||||
|
||||
// Assist: extract_struct_from_enum_variant
|
||||
|
@ -30,30 +31,30 @@ pub(crate) fn extract_struct_from_enum_variant(
|
|||
acc: &mut Assists,
|
||||
ctx: &AssistContext,
|
||||
) -> Option<()> {
|
||||
let variant = ctx.find_node_at_offset::<ast::EnumVariant>()?;
|
||||
let variant = ctx.find_node_at_offset::<ast::Variant>()?;
|
||||
let field_list = match variant.kind() {
|
||||
ast::StructKind::Tuple(field_list) => field_list,
|
||||
_ => return None,
|
||||
};
|
||||
let variant_name = variant.name()?.to_string();
|
||||
let variant_hir = ctx.sema.to_def(&variant)?;
|
||||
if existing_struct_def(ctx.db, &variant_name, &variant_hir) {
|
||||
if existing_struct_def(ctx.db(), &variant_name, &variant_hir) {
|
||||
return None;
|
||||
}
|
||||
let enum_ast = variant.parent_enum();
|
||||
let visibility = enum_ast.visibility();
|
||||
let enum_hir = ctx.sema.to_def(&enum_ast)?;
|
||||
let variant_hir_name = variant_hir.name(ctx.db);
|
||||
let variant_hir_name = variant_hir.name(ctx.db());
|
||||
let enum_module_def = ModuleDef::from(enum_hir);
|
||||
let current_module = enum_hir.module(ctx.db);
|
||||
let current_module = enum_hir.module(ctx.db());
|
||||
let target = variant.syntax().text_range();
|
||||
acc.add(
|
||||
AssistId("extract_struct_from_enum_variant"),
|
||||
AssistId("extract_struct_from_enum_variant", AssistKind::RefactorRewrite),
|
||||
"Extract struct from enum variant",
|
||||
target,
|
||||
|builder| {
|
||||
let definition = Definition::ModuleDef(ModuleDef::EnumVariant(variant_hir));
|
||||
let res = definition.find_usages(&ctx.db, None);
|
||||
let res = definition.find_usages(&ctx.sema, None);
|
||||
let start_offset = variant.parent_enum().syntax().text_range().start();
|
||||
let mut visited_modules_set = FxHashSet::default();
|
||||
visited_modules_set.insert(current_module);
|
||||
|
@ -101,7 +102,7 @@ fn insert_import(
|
|||
enum_module_def: &ModuleDef,
|
||||
variant_hir_name: &Name,
|
||||
) -> Option<()> {
|
||||
let db = ctx.db;
|
||||
let db = ctx.db();
|
||||
let mod_path = module.find_use_path(db, enum_module_def.clone());
|
||||
if let Some(mut mod_path) = mod_path {
|
||||
mod_path.segments.pop();
|
||||
|
|
|
@ -2,14 +2,13 @@ use ra_syntax::{
|
|||
ast::{self, AstNode},
|
||||
SyntaxKind::{
|
||||
BLOCK_EXPR, BREAK_EXPR, COMMENT, LAMBDA_EXPR, LOOP_EXPR, MATCH_ARM, PATH_EXPR, RETURN_EXPR,
|
||||
WHITESPACE,
|
||||
},
|
||||
SyntaxNode,
|
||||
};
|
||||
use stdx::format_to;
|
||||
use test_utils::mark;
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: extract_variable
|
||||
//
|
||||
|
@ -36,15 +35,17 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext) -> Option
|
|||
mark::hit!(extract_var_in_comment_is_not_applicable);
|
||||
return None;
|
||||
}
|
||||
let expr = node.ancestors().find_map(valid_target_expr)?;
|
||||
let (anchor_stmt, wrap_in_block) = anchor_stmt(expr.clone())?;
|
||||
let indent = anchor_stmt.prev_sibling_or_token()?.as_token()?.clone();
|
||||
if indent.kind() != WHITESPACE {
|
||||
return None;
|
||||
}
|
||||
let target = expr.syntax().text_range();
|
||||
acc.add(AssistId("extract_variable"), "Extract into variable", target, move |edit| {
|
||||
let field_shorthand = match expr.syntax().parent().and_then(ast::RecordField::cast) {
|
||||
let to_extract = node.ancestors().find_map(valid_target_expr)?;
|
||||
let anchor = Anchor::from(&to_extract)?;
|
||||
let indent = anchor.syntax().prev_sibling_or_token()?.as_token()?.clone();
|
||||
let target = to_extract.syntax().text_range();
|
||||
acc.add(
|
||||
AssistId("extract_variable", AssistKind::RefactorExtract),
|
||||
"Extract into variable",
|
||||
target,
|
||||
move |edit| {
|
||||
let field_shorthand =
|
||||
match to_extract.syntax().parent().and_then(ast::RecordExprField::cast) {
|
||||
Some(field) => field.name_ref(),
|
||||
None => None,
|
||||
};
|
||||
|
@ -56,32 +57,26 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext) -> Option
|
|||
None => "var_name".to_string(),
|
||||
};
|
||||
let expr_range = match &field_shorthand {
|
||||
Some(it) => it.syntax().text_range().cover(expr.syntax().text_range()),
|
||||
None => expr.syntax().text_range(),
|
||||
Some(it) => it.syntax().text_range().cover(to_extract.syntax().text_range()),
|
||||
None => to_extract.syntax().text_range(),
|
||||
};
|
||||
|
||||
if wrap_in_block {
|
||||
if let Anchor::WrapInBlock(_) = anchor {
|
||||
format_to!(buf, "{{ let {} = ", var_name);
|
||||
} else {
|
||||
format_to!(buf, "let {} = ", var_name);
|
||||
};
|
||||
format_to!(buf, "{}", expr.syntax());
|
||||
format_to!(buf, "{}", to_extract.syntax());
|
||||
|
||||
let full_stmt = ast::ExprStmt::cast(anchor_stmt.clone());
|
||||
let is_full_stmt = if let Some(expr_stmt) = &full_stmt {
|
||||
Some(expr.syntax().clone()) == expr_stmt.expr().map(|e| e.syntax().clone())
|
||||
} else {
|
||||
false
|
||||
};
|
||||
if is_full_stmt {
|
||||
if let Anchor::Replace(stmt) = anchor {
|
||||
mark::hit!(test_extract_var_expr_stmt);
|
||||
if full_stmt.unwrap().semicolon_token().is_none() {
|
||||
if stmt.semicolon_token().is_none() {
|
||||
buf.push_str(";");
|
||||
}
|
||||
match ctx.config.snippet_cap {
|
||||
Some(cap) => {
|
||||
let snip =
|
||||
buf.replace(&format!("let {}", var_name), &format!("let $0{}", var_name));
|
||||
let snip = buf
|
||||
.replace(&format!("let {}", var_name), &format!("let $0{}", var_name));
|
||||
edit.replace_snippet(cap, expr_range, snip)
|
||||
}
|
||||
None => edit.replace(expr_range, buf),
|
||||
|
@ -103,7 +98,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext) -> Option
|
|||
}
|
||||
|
||||
edit.replace(expr_range, var_name.clone());
|
||||
let offset = anchor_stmt.text_range().start();
|
||||
let offset = anchor.syntax().text_range().start();
|
||||
match ctx.config.snippet_cap {
|
||||
Some(cap) => {
|
||||
let snip =
|
||||
|
@ -113,10 +108,11 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext) -> Option
|
|||
None => edit.insert(offset, buf),
|
||||
}
|
||||
|
||||
if wrap_in_block {
|
||||
edit.insert(anchor_stmt.text_range().end(), " }");
|
||||
if let Anchor::WrapInBlock(_) = anchor {
|
||||
edit.insert(anchor.syntax().text_range().end(), " }");
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// Check whether the node is a valid expression which can be extracted to a variable.
|
||||
|
@ -133,34 +129,50 @@ fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns the syntax node which will follow the freshly extractd var
|
||||
/// and a boolean indicating whether we have to wrap it within a { } block
|
||||
/// to produce correct code.
|
||||
/// It can be a statement, the last in a block expression or a wanna be block
|
||||
/// expression like a lambda or match arm.
|
||||
fn anchor_stmt(expr: ast::Expr) -> Option<(SyntaxNode, bool)> {
|
||||
expr.syntax().ancestors().find_map(|node| {
|
||||
if let Some(expr) = node.parent().and_then(ast::BlockExpr::cast).and_then(|it| it.expr()) {
|
||||
enum Anchor {
|
||||
Before(SyntaxNode),
|
||||
Replace(ast::ExprStmt),
|
||||
WrapInBlock(SyntaxNode),
|
||||
}
|
||||
|
||||
impl Anchor {
|
||||
fn from(to_extract: &ast::Expr) -> Option<Anchor> {
|
||||
to_extract.syntax().ancestors().find_map(|node| {
|
||||
if let Some(expr) =
|
||||
node.parent().and_then(ast::BlockExpr::cast).and_then(|it| it.expr())
|
||||
{
|
||||
if expr.syntax() == &node {
|
||||
mark::hit!(test_extract_var_last_expr);
|
||||
return Some((node, false));
|
||||
return Some(Anchor::Before(node));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(parent) = node.parent() {
|
||||
if parent.kind() == MATCH_ARM || parent.kind() == LAMBDA_EXPR {
|
||||
return Some((node, true));
|
||||
return Some(Anchor::WrapInBlock(node));
|
||||
}
|
||||
}
|
||||
|
||||
if ast::Stmt::cast(node.clone()).is_some() {
|
||||
return Some((node, false));
|
||||
if let Some(stmt) = ast::Stmt::cast(node.clone()) {
|
||||
if let ast::Stmt::ExprStmt(stmt) = stmt {
|
||||
if stmt.expr().as_ref() == Some(to_extract) {
|
||||
return Some(Anchor::Replace(stmt));
|
||||
}
|
||||
}
|
||||
return Some(Anchor::Before(node));
|
||||
}
|
||||
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
fn syntax(&self) -> &SyntaxNode {
|
||||
match self {
|
||||
Anchor::Before(it) | Anchor::WrapInBlock(it) => it,
|
||||
Anchor::Replace(stmt) => stmt.syntax(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use test_utils::mark;
|
||||
|
|
|
@ -8,7 +8,7 @@ use test_utils::mark;
|
|||
|
||||
use crate::{
|
||||
utils::{render_snippet, Cursor, FamousDefs},
|
||||
AssistContext, AssistId, Assists,
|
||||
AssistContext, AssistId, AssistKind, Assists,
|
||||
};
|
||||
|
||||
// Assist: fill_match_arms
|
||||
|
@ -51,11 +51,11 @@ pub(crate) fn fill_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option<
|
|||
let module = ctx.sema.scope(expr.syntax()).module()?;
|
||||
|
||||
let missing_arms: Vec<MatchArm> = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr) {
|
||||
let variants = enum_def.variants(ctx.db);
|
||||
let variants = enum_def.variants(ctx.db());
|
||||
|
||||
let mut variants = variants
|
||||
.into_iter()
|
||||
.filter_map(|variant| build_pat(ctx.db, module, variant))
|
||||
.filter_map(|variant| build_pat(ctx.db(), module, variant))
|
||||
.filter(|variant_pat| is_variant_missing(&mut arms, variant_pat))
|
||||
.map(|pat| make::match_arm(iter::once(pat), make::expr_empty_block()))
|
||||
.collect::<Vec<_>>();
|
||||
|
@ -84,11 +84,11 @@ pub(crate) fn fill_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option<
|
|||
// where each tuple represents a proposed match arm.
|
||||
enum_defs
|
||||
.into_iter()
|
||||
.map(|enum_def| enum_def.variants(ctx.db))
|
||||
.map(|enum_def| enum_def.variants(ctx.db()))
|
||||
.multi_cartesian_product()
|
||||
.map(|variants| {
|
||||
let patterns =
|
||||
variants.into_iter().filter_map(|variant| build_pat(ctx.db, module, variant));
|
||||
variants.into_iter().filter_map(|variant| build_pat(ctx.db(), module, variant));
|
||||
ast::Pat::from(make::tuple_pat(patterns))
|
||||
})
|
||||
.filter(|variant_pat| is_variant_missing(&mut arms, variant_pat))
|
||||
|
@ -103,7 +103,11 @@ pub(crate) fn fill_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option<
|
|||
}
|
||||
|
||||
let target = match_expr.syntax().text_range();
|
||||
acc.add(AssistId("fill_match_arms"), "Fill match arms", target, |builder| {
|
||||
acc.add(
|
||||
AssistId("fill_match_arms", AssistKind::QuickFix),
|
||||
"Fill match arms",
|
||||
target,
|
||||
|builder| {
|
||||
let new_arm_list = match_arm_list.remove_placeholder();
|
||||
let n_old_arms = new_arm_list.arms().count();
|
||||
let new_arm_list = new_arm_list.append_arms(missing_arms);
|
||||
|
@ -111,16 +115,25 @@ pub(crate) fn fill_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option<
|
|||
let old_range = match_arm_list.syntax().text_range();
|
||||
match (first_new_arm, ctx.config.snippet_cap) {
|
||||
(Some(first_new_arm), Some(cap)) => {
|
||||
let snippet = render_snippet(
|
||||
cap,
|
||||
new_arm_list.syntax(),
|
||||
Cursor::Before(first_new_arm.syntax()),
|
||||
);
|
||||
let extend_lifetime;
|
||||
let cursor = match first_new_arm
|
||||
.syntax()
|
||||
.descendants()
|
||||
.find_map(ast::PlaceholderPat::cast)
|
||||
{
|
||||
Some(it) => {
|
||||
extend_lifetime = it.syntax().clone();
|
||||
Cursor::Replace(&extend_lifetime)
|
||||
}
|
||||
None => Cursor::Before(first_new_arm.syntax()),
|
||||
};
|
||||
let snippet = render_snippet(cap, new_arm_list.syntax(), cursor);
|
||||
builder.replace_snippet(cap, old_range, snippet);
|
||||
}
|
||||
_ => builder.replace(old_range, new_arm_list.to_string()),
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn is_variant_missing(existing_arms: &mut Vec<MatchArm>, var: &Pat) -> bool {
|
||||
|
@ -286,11 +299,7 @@ mod tests {
|
|||
check_assist(
|
||||
fill_match_arms,
|
||||
r#"
|
||||
enum A {
|
||||
As,
|
||||
Bs,
|
||||
Cs(Option<i32>),
|
||||
}
|
||||
enum A { As, Bs, Cs(Option<i32>) }
|
||||
fn main() {
|
||||
match A::As<|> {
|
||||
A::Cs(_) | A::Bs => {}
|
||||
|
@ -298,11 +307,7 @@ mod tests {
|
|||
}
|
||||
"#,
|
||||
r#"
|
||||
enum A {
|
||||
As,
|
||||
Bs,
|
||||
Cs(Option<i32>),
|
||||
}
|
||||
enum A { As, Bs, Cs(Option<i32>) }
|
||||
fn main() {
|
||||
match A::As {
|
||||
A::Cs(_) | A::Bs => {}
|
||||
|
@ -318,17 +323,8 @@ mod tests {
|
|||
check_assist(
|
||||
fill_match_arms,
|
||||
r#"
|
||||
enum A {
|
||||
As,
|
||||
Bs,
|
||||
Cs,
|
||||
Ds(String),
|
||||
Es(B),
|
||||
}
|
||||
enum B {
|
||||
Xs,
|
||||
Ys,
|
||||
}
|
||||
enum A { As, Bs, Cs, Ds(String), Es(B) }
|
||||
enum B { Xs, Ys }
|
||||
fn main() {
|
||||
match A::As<|> {
|
||||
A::Bs if 0 < 1 => {}
|
||||
|
@ -338,17 +334,8 @@ mod tests {
|
|||
}
|
||||
"#,
|
||||
r#"
|
||||
enum A {
|
||||
As,
|
||||
Bs,
|
||||
Cs,
|
||||
Ds(String),
|
||||
Es(B),
|
||||
}
|
||||
enum B {
|
||||
Xs,
|
||||
Ys,
|
||||
}
|
||||
enum A { As, Bs, Cs, Ds(String), Es(B) }
|
||||
enum B { Xs, Ys }
|
||||
fn main() {
|
||||
match A::As {
|
||||
A::Bs if 0 < 1 => {}
|
||||
|
@ -367,11 +354,7 @@ mod tests {
|
|||
check_assist(
|
||||
fill_match_arms,
|
||||
r#"
|
||||
enum A {
|
||||
As,
|
||||
Bs,
|
||||
Cs(Option<i32>),
|
||||
}
|
||||
enum A { As, Bs, Cs(Option<i32>) }
|
||||
fn main() {
|
||||
match A::As<|> {
|
||||
A::As(_) => {}
|
||||
|
@ -380,16 +363,12 @@ mod tests {
|
|||
}
|
||||
"#,
|
||||
r#"
|
||||
enum A {
|
||||
As,
|
||||
Bs,
|
||||
Cs(Option<i32>),
|
||||
}
|
||||
enum A { As, Bs, Cs(Option<i32>) }
|
||||
fn main() {
|
||||
match A::As {
|
||||
A::As(_) => {}
|
||||
a @ A::Bs(_) => {}
|
||||
$0A::Cs(_) => {}
|
||||
A::Cs(${0:_}) => {}
|
||||
}
|
||||
}
|
||||
"#,
|
||||
|
@ -401,13 +380,7 @@ mod tests {
|
|||
check_assist(
|
||||
fill_match_arms,
|
||||
r#"
|
||||
enum A {
|
||||
As,
|
||||
Bs,
|
||||
Cs(String),
|
||||
Ds(String, String),
|
||||
Es { x: usize, y: usize }
|
||||
}
|
||||
enum A { As, Bs, Cs(String), Ds(String, String), Es { x: usize, y: usize } }
|
||||
|
||||
fn main() {
|
||||
let a = A::As;
|
||||
|
@ -415,13 +388,7 @@ mod tests {
|
|||
}
|
||||
"#,
|
||||
r#"
|
||||
enum A {
|
||||
As,
|
||||
Bs,
|
||||
Cs(String),
|
||||
Ds(String, String),
|
||||
Es { x: usize, y: usize }
|
||||
}
|
||||
enum A { As, Bs, Cs(String), Ds(String, String), Es { x: usize, y: usize } }
|
||||
|
||||
fn main() {
|
||||
let a = A::As;
|
||||
|
@ -773,7 +740,7 @@ fn foo(opt: Option<i32>) {
|
|||
r#"
|
||||
fn foo(opt: Option<i32>) {
|
||||
match opt {
|
||||
$0Some(_) => {}
|
||||
Some(${0:_}) => {}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,8 @@ use hir::{db::HirDatabase, HasSource, HasVisibility, PathResolution};
|
|||
use ra_db::FileId;
|
||||
use ra_syntax::{ast, AstNode, TextRange, TextSize};
|
||||
|
||||
use crate::{utils::vis_offset, AssistContext, AssistId, Assists};
|
||||
use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists};
|
||||
use ast::VisibilityOwner;
|
||||
|
||||
// FIXME: this really should be a fix for diagnostic, rather than an assist.
|
||||
|
||||
|
@ -41,14 +42,15 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext) -> O
|
|||
};
|
||||
|
||||
let current_module = ctx.sema.scope(&path.syntax()).module()?;
|
||||
let target_module = def.module(ctx.db)?;
|
||||
let target_module = def.module(ctx.db())?;
|
||||
|
||||
let vis = target_module.visibility_of(ctx.db, &def)?;
|
||||
if vis.is_visible_from(ctx.db, current_module.into()) {
|
||||
let vis = target_module.visibility_of(ctx.db(), &def)?;
|
||||
if vis.is_visible_from(ctx.db(), current_module.into()) {
|
||||
return None;
|
||||
};
|
||||
|
||||
let (offset, target, target_file, target_name) = target_data_for_def(ctx.db, def)?;
|
||||
let (offset, current_visibility, target, target_file, target_name) =
|
||||
target_data_for_def(ctx.db(), def)?;
|
||||
|
||||
let missing_visibility =
|
||||
if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" };
|
||||
|
@ -58,54 +60,78 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext) -> O
|
|||
Some(name) => format!("Change visibility of {} to {}", name, missing_visibility),
|
||||
};
|
||||
|
||||
acc.add(AssistId("fix_visibility"), assist_label, target, |builder| {
|
||||
acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| {
|
||||
builder.edit_file(target_file);
|
||||
match ctx.config.snippet_cap {
|
||||
Some(cap) => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)),
|
||||
Some(cap) => match current_visibility {
|
||||
Some(current_visibility) => builder.replace_snippet(
|
||||
cap,
|
||||
current_visibility.syntax().text_range(),
|
||||
format!("$0{}", missing_visibility),
|
||||
),
|
||||
None => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)),
|
||||
},
|
||||
None => match current_visibility {
|
||||
Some(current_visibility) => {
|
||||
builder.replace(current_visibility.syntax().text_range(), missing_visibility)
|
||||
}
|
||||
None => builder.insert(offset, format!("{} ", missing_visibility)),
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let record_field: ast::RecordField = ctx.find_node_at_offset()?;
|
||||
let record_field: ast::RecordExprField = ctx.find_node_at_offset()?;
|
||||
let (record_field_def, _) = ctx.sema.resolve_record_field(&record_field)?;
|
||||
|
||||
let current_module = ctx.sema.scope(record_field.syntax()).module()?;
|
||||
let visibility = record_field_def.visibility(ctx.db);
|
||||
if visibility.is_visible_from(ctx.db, current_module.into()) {
|
||||
let visibility = record_field_def.visibility(ctx.db());
|
||||
if visibility.is_visible_from(ctx.db(), current_module.into()) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let parent = record_field_def.parent_def(ctx.db);
|
||||
let parent_name = parent.name(ctx.db);
|
||||
let target_module = parent.module(ctx.db);
|
||||
let parent = record_field_def.parent_def(ctx.db());
|
||||
let parent_name = parent.name(ctx.db());
|
||||
let target_module = parent.module(ctx.db());
|
||||
|
||||
let in_file_source = record_field_def.source(ctx.db);
|
||||
let (offset, target) = match in_file_source.value {
|
||||
let in_file_source = record_field_def.source(ctx.db());
|
||||
let (offset, current_visibility, target) = match in_file_source.value {
|
||||
hir::FieldSource::Named(it) => {
|
||||
let s = it.syntax();
|
||||
(vis_offset(s), s.text_range())
|
||||
(vis_offset(s), it.visibility(), s.text_range())
|
||||
}
|
||||
hir::FieldSource::Pos(it) => {
|
||||
let s = it.syntax();
|
||||
(vis_offset(s), s.text_range())
|
||||
(vis_offset(s), it.visibility(), s.text_range())
|
||||
}
|
||||
};
|
||||
|
||||
let missing_visibility =
|
||||
if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" };
|
||||
let target_file = in_file_source.file_id.original_file(ctx.db);
|
||||
let target_file = in_file_source.file_id.original_file(ctx.db());
|
||||
|
||||
let target_name = record_field_def.name(ctx.db);
|
||||
let target_name = record_field_def.name(ctx.db());
|
||||
let assist_label =
|
||||
format!("Change visibility of {}.{} to {}", parent_name, target_name, missing_visibility);
|
||||
|
||||
acc.add(AssistId("fix_visibility"), assist_label, target, |builder| {
|
||||
acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| {
|
||||
builder.edit_file(target_file);
|
||||
match ctx.config.snippet_cap {
|
||||
Some(cap) => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)),
|
||||
Some(cap) => match current_visibility {
|
||||
Some(current_visibility) => builder.replace_snippet(
|
||||
cap,
|
||||
dbg!(current_visibility.syntax()).text_range(),
|
||||
format!("$0{}", missing_visibility),
|
||||
),
|
||||
None => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)),
|
||||
},
|
||||
None => match current_visibility {
|
||||
Some(current_visibility) => {
|
||||
builder.replace(current_visibility.syntax().text_range(), missing_visibility)
|
||||
}
|
||||
None => builder.insert(offset, format!("{} ", missing_visibility)),
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -113,24 +139,30 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext) ->
|
|||
fn target_data_for_def(
|
||||
db: &dyn HirDatabase,
|
||||
def: hir::ModuleDef,
|
||||
) -> Option<(TextSize, TextRange, FileId, Option<hir::Name>)> {
|
||||
) -> Option<(TextSize, Option<ast::Visibility>, TextRange, FileId, Option<hir::Name>)> {
|
||||
fn offset_target_and_file_id<S, Ast>(
|
||||
db: &dyn HirDatabase,
|
||||
x: S,
|
||||
) -> (TextSize, TextRange, FileId)
|
||||
) -> (TextSize, Option<ast::Visibility>, TextRange, FileId)
|
||||
where
|
||||
S: HasSource<Ast = Ast>,
|
||||
Ast: AstNode,
|
||||
Ast: AstNode + ast::VisibilityOwner,
|
||||
{
|
||||
let source = x.source(db);
|
||||
let in_file_syntax = source.syntax();
|
||||
let file_id = in_file_syntax.file_id;
|
||||
let syntax = in_file_syntax.value;
|
||||
(vis_offset(syntax), syntax.text_range(), file_id.original_file(db.upcast()))
|
||||
let current_visibility = source.value.visibility();
|
||||
(
|
||||
vis_offset(syntax),
|
||||
current_visibility,
|
||||
syntax.text_range(),
|
||||
file_id.original_file(db.upcast()),
|
||||
)
|
||||
}
|
||||
|
||||
let target_name;
|
||||
let (offset, target, target_file) = match def {
|
||||
let (offset, current_visibility, target, target_file) = match def {
|
||||
hir::ModuleDef::Function(f) => {
|
||||
target_name = Some(f.name(db));
|
||||
offset_target_and_file_id(db, f)
|
||||
|
@ -164,13 +196,13 @@ fn target_data_for_def(
|
|||
let in_file_source = m.declaration_source(db)?;
|
||||
let file_id = in_file_source.file_id.original_file(db.upcast());
|
||||
let syntax = in_file_source.value.syntax();
|
||||
(vis_offset(syntax), syntax.text_range(), file_id)
|
||||
(vis_offset(syntax), in_file_source.value.visibility(), syntax.text_range(), file_id)
|
||||
}
|
||||
// Enum variants can't be private, we can't modify builtin types
|
||||
hir::ModuleDef::EnumVariant(_) | hir::ModuleDef::BuiltinType(_) => return None,
|
||||
};
|
||||
|
||||
Some((offset, target, target_file, target_name))
|
||||
Some((offset, current_visibility, target, target_file, target_name))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -522,6 +554,34 @@ struct Bar;
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_pub_crate_with_pub() {
|
||||
check_assist(
|
||||
fix_visibility,
|
||||
r"
|
||||
//- /main.rs crate:a deps:foo
|
||||
foo::Bar<|>
|
||||
//- /lib.rs crate:foo
|
||||
pub(crate) struct Bar;
|
||||
",
|
||||
r"$0pub struct Bar;
|
||||
",
|
||||
);
|
||||
check_assist(
|
||||
fix_visibility,
|
||||
r"
|
||||
//- /main.rs crate:a deps:foo
|
||||
fn main() {
|
||||
foo::Foo { <|>bar: () };
|
||||
}
|
||||
//- /lib.rs crate:foo
|
||||
pub struct Foo { pub(crate) bar: () }
|
||||
",
|
||||
r"pub struct Foo { $0pub bar: () }
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore]
|
||||
// FIXME handle reexports properly
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use ra_syntax::ast::{AstNode, BinExpr, BinOp};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: flip_binexpr
|
||||
//
|
||||
|
@ -33,13 +33,18 @@ pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
|
|||
return None;
|
||||
}
|
||||
|
||||
acc.add(AssistId("flip_binexpr"), "Flip binary expression", op_range, |edit| {
|
||||
acc.add(
|
||||
AssistId("flip_binexpr", AssistKind::RefactorRewrite),
|
||||
"Flip binary expression",
|
||||
op_range,
|
||||
|edit| {
|
||||
if let FlipAction::FlipAndReplaceOp(new_op) = action {
|
||||
edit.replace(op_range, new_op);
|
||||
}
|
||||
edit.replace(lhs.text_range(), rhs.text());
|
||||
edit.replace(rhs.text_range(), lhs.text());
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
enum FlipAction {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use ra_syntax::{algo::non_trivia_sibling, Direction, T};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: flip_comma
|
||||
//
|
||||
|
@ -28,10 +28,15 @@ pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
return None;
|
||||
}
|
||||
|
||||
acc.add(AssistId("flip_comma"), "Flip comma", comma.text_range(), |edit| {
|
||||
acc.add(
|
||||
AssistId("flip_comma", AssistKind::RefactorRewrite),
|
||||
"Flip comma",
|
||||
comma.text_range(),
|
||||
|edit| {
|
||||
edit.replace(prev.text_range(), next.to_string());
|
||||
edit.replace(next.text_range(), prev.to_string());
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -4,7 +4,7 @@ use ra_syntax::{
|
|||
Direction, T,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: flip_trait_bound
|
||||
//
|
||||
|
@ -33,10 +33,15 @@ pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext) -> Option
|
|||
);
|
||||
|
||||
let target = plus.text_range();
|
||||
acc.add(AssistId("flip_trait_bound"), "Flip trait bounds", target, |edit| {
|
||||
acc.add(
|
||||
AssistId("flip_trait_bound", AssistKind::RefactorRewrite),
|
||||
"Flip trait bounds",
|
||||
target,
|
||||
|edit| {
|
||||
edit.replace(before.text_range(), after.to_string());
|
||||
edit.replace(after.text_range(), before.to_string());
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -4,9 +4,9 @@ use ra_syntax::{
|
|||
TextSize,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: add_derive
|
||||
// Assist: generate_derive
|
||||
//
|
||||
// Adds a new `#[derive()]` clause to a struct or enum.
|
||||
//
|
||||
|
@ -24,12 +24,16 @@ use crate::{AssistContext, AssistId, Assists};
|
|||
// y: u32,
|
||||
// }
|
||||
// ```
|
||||
pub(crate) fn add_derive(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
pub(crate) fn generate_derive(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let cap = ctx.config.snippet_cap?;
|
||||
let nominal = ctx.find_node_at_offset::<ast::NominalDef>()?;
|
||||
let nominal = ctx.find_node_at_offset::<ast::AdtDef>()?;
|
||||
let node_start = derive_insertion_offset(&nominal)?;
|
||||
let target = nominal.syntax().text_range();
|
||||
acc.add(AssistId("add_derive"), "Add `#[derive]`", target, |builder| {
|
||||
acc.add(
|
||||
AssistId("generate_derive", AssistKind::Generate),
|
||||
"Add `#[derive]`",
|
||||
target,
|
||||
|builder| {
|
||||
let derive_attr = nominal
|
||||
.attrs()
|
||||
.filter_map(|x| x.as_simple_call())
|
||||
|
@ -49,11 +53,12 @@ pub(crate) fn add_derive(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
)
|
||||
}
|
||||
};
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// Insert `derive` after doc comments.
|
||||
fn derive_insertion_offset(nominal: &ast::NominalDef) -> Option<TextSize> {
|
||||
fn derive_insertion_offset(nominal: &ast::AdtDef) -> Option<TextSize> {
|
||||
let non_ws_child = nominal
|
||||
.syntax()
|
||||
.children_with_tokens()
|
||||
|
@ -70,12 +75,12 @@ mod tests {
|
|||
#[test]
|
||||
fn add_derive_new() {
|
||||
check_assist(
|
||||
add_derive,
|
||||
generate_derive,
|
||||
"struct Foo { a: i32, <|>}",
|
||||
"#[derive($0)]\nstruct Foo { a: i32, }",
|
||||
);
|
||||
check_assist(
|
||||
add_derive,
|
||||
generate_derive,
|
||||
"struct Foo { <|> a: i32, }",
|
||||
"#[derive($0)]\nstruct Foo { a: i32, }",
|
||||
);
|
||||
|
@ -84,7 +89,7 @@ mod tests {
|
|||
#[test]
|
||||
fn add_derive_existing() {
|
||||
check_assist(
|
||||
add_derive,
|
||||
generate_derive,
|
||||
"#[derive(Clone)]\nstruct Foo { a: i32<|>, }",
|
||||
"#[derive(Clone$0)]\nstruct Foo { a: i32, }",
|
||||
);
|
||||
|
@ -93,7 +98,7 @@ mod tests {
|
|||
#[test]
|
||||
fn add_derive_new_with_doc_comment() {
|
||||
check_assist(
|
||||
add_derive,
|
||||
generate_derive,
|
||||
"
|
||||
/// `Foo` is a pretty important struct.
|
||||
/// It does stuff.
|
||||
|
@ -111,7 +116,7 @@ struct Foo { a: i32, }
|
|||
#[test]
|
||||
fn add_derive_target() {
|
||||
check_assist_target(
|
||||
add_derive,
|
||||
generate_derive,
|
||||
"
|
||||
struct SomeThingIrrelevant;
|
||||
/// `Foo` is a pretty important struct.
|
|
@ -2,9 +2,9 @@ use ra_ide_db::RootDatabase;
|
|||
use ra_syntax::ast::{self, AstNode, NameOwner};
|
||||
use test_utils::mark;
|
||||
|
||||
use crate::{utils::FamousDefs, AssistContext, AssistId, Assists};
|
||||
use crate::{utils::FamousDefs, AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: add_from_impl_for_enum
|
||||
// Assist: generate_from_impl_for_enum
|
||||
//
|
||||
// Adds a From impl for an enum variant with one tuple field.
|
||||
//
|
||||
|
@ -21,8 +21,8 @@ use crate::{utils::FamousDefs, AssistContext, AssistId, Assists};
|
|||
// }
|
||||
// }
|
||||
// ```
|
||||
pub(crate) fn add_from_impl_for_enum(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let variant = ctx.find_node_at_offset::<ast::EnumVariant>()?;
|
||||
pub(crate) fn generate_from_impl_for_enum(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let variant = ctx.find_node_at_offset::<ast::Variant>()?;
|
||||
let variant_name = variant.name()?;
|
||||
let enum_name = variant.parent_enum().name()?;
|
||||
let field_list = match variant.kind() {
|
||||
|
@ -32,7 +32,7 @@ pub(crate) fn add_from_impl_for_enum(acc: &mut Assists, ctx: &AssistContext) ->
|
|||
if field_list.fields().count() != 1 {
|
||||
return None;
|
||||
}
|
||||
let field_type = field_list.fields().next()?.type_ref()?;
|
||||
let field_type = field_list.fields().next()?.ty()?;
|
||||
let path = match field_type {
|
||||
ast::TypeRef::PathType(it) => it,
|
||||
_ => return None,
|
||||
|
@ -45,8 +45,8 @@ pub(crate) fn add_from_impl_for_enum(acc: &mut Assists, ctx: &AssistContext) ->
|
|||
|
||||
let target = variant.syntax().text_range();
|
||||
acc.add(
|
||||
AssistId("add_from_impl_for_enum"),
|
||||
"Add From impl for this enum variant",
|
||||
AssistId("generate_from_impl_for_enum", AssistKind::Generate),
|
||||
"Generate `From` impl for this enum variant",
|
||||
target,
|
||||
|edit| {
|
||||
let start_offset = variant.parent_enum().syntax().text_range().end();
|
||||
|
@ -69,7 +69,7 @@ impl From<{0}> for {1} {{
|
|||
|
||||
fn existing_from_impl(
|
||||
sema: &'_ hir::Semantics<'_, RootDatabase>,
|
||||
variant: &ast::EnumVariant,
|
||||
variant: &ast::Variant,
|
||||
) -> Option<()> {
|
||||
let variant = sema.to_def(variant)?;
|
||||
let enum_ = variant.parent_enum(sema.db);
|
||||
|
@ -97,9 +97,9 @@ mod tests {
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_add_from_impl_for_enum() {
|
||||
fn test_generate_from_impl_for_enum() {
|
||||
check_assist(
|
||||
add_from_impl_for_enum,
|
||||
generate_from_impl_for_enum,
|
||||
"enum A { <|>One(u32) }",
|
||||
r#"enum A { One(u32) }
|
||||
|
||||
|
@ -112,9 +112,9 @@ impl From<u32> for A {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_add_from_impl_for_enum_complicated_path() {
|
||||
fn test_generate_from_impl_for_enum_complicated_path() {
|
||||
check_assist(
|
||||
add_from_impl_for_enum,
|
||||
generate_from_impl_for_enum,
|
||||
r#"enum A { <|>One(foo::bar::baz::Boo) }"#,
|
||||
r#"enum A { One(foo::bar::baz::Boo) }
|
||||
|
||||
|
@ -129,7 +129,7 @@ impl From<foo::bar::baz::Boo> for A {
|
|||
fn check_not_applicable(ra_fixture: &str) {
|
||||
let fixture =
|
||||
format!("//- /main.rs crate:main deps:core\n{}\n{}", ra_fixture, FamousDefs::FIXTURE);
|
||||
check_assist_not_applicable(add_from_impl_for_enum, &fixture)
|
||||
check_assist_not_applicable(generate_from_impl_for_enum, &fixture)
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -166,7 +166,7 @@ impl From<u32> for A {
|
|||
#[test]
|
||||
fn test_add_from_impl_different_variant_impl_exists() {
|
||||
check_assist(
|
||||
add_from_impl_for_enum,
|
||||
generate_from_impl_for_enum,
|
||||
r#"enum A { <|>One(u32), Two(String), }
|
||||
|
||||
impl From<String> for A {
|
|
@ -13,10 +13,10 @@ use rustc_hash::{FxHashMap, FxHashSet};
|
|||
use crate::{
|
||||
assist_config::SnippetCap,
|
||||
utils::{render_snippet, Cursor},
|
||||
AssistContext, AssistId, Assists,
|
||||
AssistContext, AssistId, AssistKind, Assists,
|
||||
};
|
||||
|
||||
// Assist: add_function
|
||||
// Assist: generate_function
|
||||
//
|
||||
// Adds a stub function with a signature matching the function under the cursor.
|
||||
//
|
||||
|
@ -41,7 +41,7 @@ use crate::{
|
|||
// }
|
||||
//
|
||||
// ```
|
||||
pub(crate) fn add_function(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
pub(crate) fn generate_function(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let path_expr: ast::PathExpr = ctx.find_node_at_offset()?;
|
||||
let call = path_expr.syntax().parent().and_then(ast::CallExpr::cast)?;
|
||||
let path = path_expr.path()?;
|
||||
|
@ -62,7 +62,11 @@ pub(crate) fn add_function(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
|
|||
let function_builder = FunctionBuilder::from_call(&ctx, &call, &path, target_module)?;
|
||||
|
||||
let target = call.syntax().text_range();
|
||||
acc.add(AssistId("add_function"), "Add function", target, |builder| {
|
||||
acc.add(
|
||||
AssistId("generate_function", AssistKind::Generate),
|
||||
format!("Generate `{}` function", function_builder.fn_name),
|
||||
target,
|
||||
|builder| {
|
||||
let function_template = function_builder.render();
|
||||
builder.edit_file(function_template.file);
|
||||
let new_fn = function_template.to_string(ctx.config.snippet_cap);
|
||||
|
@ -70,14 +74,15 @@ pub(crate) fn add_function(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
|
|||
Some(cap) => builder.insert_snippet(cap, function_template.insert_offset, new_fn),
|
||||
None => builder.insert(function_template.insert_offset, new_fn),
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
struct FunctionTemplate {
|
||||
insert_offset: TextSize,
|
||||
placeholder_expr: ast::MacroCall,
|
||||
leading_ws: String,
|
||||
fn_def: ast::FnDef,
|
||||
fn_def: ast::Fn,
|
||||
trailing_ws: String,
|
||||
file: FileId,
|
||||
}
|
||||
|
@ -99,7 +104,7 @@ impl FunctionTemplate {
|
|||
struct FunctionBuilder {
|
||||
target: GeneratedFunctionTarget,
|
||||
fn_name: ast::Name,
|
||||
type_params: Option<ast::TypeParamList>,
|
||||
type_params: Option<ast::GenericParamList>,
|
||||
params: ast::ParamList,
|
||||
file: FileId,
|
||||
needs_pub: bool,
|
||||
|
@ -117,7 +122,7 @@ impl FunctionBuilder {
|
|||
let mut file = ctx.frange.file_id;
|
||||
let target = match &target_module {
|
||||
Some(target_module) => {
|
||||
let module_source = target_module.definition_source(ctx.db);
|
||||
let module_source = target_module.definition_source(ctx.db());
|
||||
let (in_file, target) = next_space_for_fn_in_module(ctx.sema.db, &module_source)?;
|
||||
file = in_file;
|
||||
target
|
||||
|
@ -195,7 +200,7 @@ fn fn_args(
|
|||
ctx: &AssistContext,
|
||||
target_module: hir::Module,
|
||||
call: &ast::CallExpr,
|
||||
) -> Option<(Option<ast::TypeParamList>, ast::ParamList)> {
|
||||
) -> Option<(Option<ast::GenericParamList>, ast::ParamList)> {
|
||||
let mut arg_names = Vec::new();
|
||||
let mut arg_types = Vec::new();
|
||||
for arg in call.arg_list()?.args() {
|
||||
|
@ -269,7 +274,7 @@ fn fn_arg_type(
|
|||
return None;
|
||||
}
|
||||
|
||||
if let Ok(rendered) = ty.display_source_code(ctx.db, target_module.into()) {
|
||||
if let Ok(rendered) = ty.display_source_code(ctx.db(), target_module.into()) {
|
||||
Some(rendered)
|
||||
} else {
|
||||
None
|
||||
|
@ -333,7 +338,7 @@ mod tests {
|
|||
#[test]
|
||||
fn add_function_with_no_args() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
fn foo() {
|
||||
bar<|>();
|
||||
|
@ -356,7 +361,7 @@ fn bar() {
|
|||
// This ensures that the function is correctly generated
|
||||
// in the next outer mod or file
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
impl Foo {
|
||||
fn foo() {
|
||||
|
@ -382,7 +387,7 @@ fn bar() {
|
|||
fn add_function_directly_after_current_block() {
|
||||
// The new fn should not be created at the end of the file or module
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
fn foo1() {
|
||||
bar<|>();
|
||||
|
@ -407,7 +412,7 @@ fn foo2() {}
|
|||
#[test]
|
||||
fn add_function_with_no_args_in_same_module() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
mod baz {
|
||||
fn foo() {
|
||||
|
@ -432,7 +437,7 @@ mod baz {
|
|||
#[test]
|
||||
fn add_function_with_function_call_arg() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
struct Baz;
|
||||
fn baz() -> Baz { todo!() }
|
||||
|
@ -457,7 +462,7 @@ fn bar(baz: Baz) {
|
|||
#[test]
|
||||
fn add_function_with_method_call_arg() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
struct Baz;
|
||||
impl Baz {
|
||||
|
@ -490,7 +495,7 @@ fn bar(baz: Baz) {
|
|||
#[test]
|
||||
fn add_function_with_string_literal_arg() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r#"
|
||||
fn foo() {
|
||||
<|>bar("bar")
|
||||
|
@ -511,7 +516,7 @@ fn bar(arg: &str) {
|
|||
#[test]
|
||||
fn add_function_with_char_literal_arg() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r#"
|
||||
fn foo() {
|
||||
<|>bar('x')
|
||||
|
@ -532,7 +537,7 @@ fn bar(arg: char) {
|
|||
#[test]
|
||||
fn add_function_with_int_literal_arg() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
fn foo() {
|
||||
<|>bar(42)
|
||||
|
@ -553,7 +558,7 @@ fn bar(arg: i32) {
|
|||
#[test]
|
||||
fn add_function_with_cast_int_literal_arg() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
fn foo() {
|
||||
<|>bar(42 as u8)
|
||||
|
@ -576,7 +581,7 @@ fn bar(arg: u8) {
|
|||
// Ensures that the name of the cast type isn't used
|
||||
// in the generated function signature.
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
fn foo() {
|
||||
let x = 42;
|
||||
|
@ -599,7 +604,7 @@ fn bar(x: u8) {
|
|||
#[test]
|
||||
fn add_function_with_variable_arg() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
fn foo() {
|
||||
let worble = ();
|
||||
|
@ -622,7 +627,7 @@ fn bar(worble: ()) {
|
|||
#[test]
|
||||
fn add_function_with_impl_trait_arg() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
trait Foo {}
|
||||
fn foo() -> impl Foo {
|
||||
|
@ -651,7 +656,7 @@ fn bar(foo: impl Foo) {
|
|||
#[test]
|
||||
fn borrowed_arg() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
struct Baz;
|
||||
fn baz() -> Baz { todo!() }
|
||||
|
@ -678,7 +683,7 @@ fn bar(baz: &Baz) {
|
|||
#[test]
|
||||
fn add_function_with_qualified_path_arg() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
mod Baz {
|
||||
pub struct Bof;
|
||||
|
@ -709,7 +714,7 @@ fn bar(baz: Baz::Bof) {
|
|||
// FIXME fix printing the generics of a `Ty` to make this test pass
|
||||
fn add_function_with_generic_arg() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
fn foo<T>(t: T) {
|
||||
<|>bar(t)
|
||||
|
@ -732,7 +737,7 @@ fn bar<T>(t: T) {
|
|||
// FIXME Fix function type printing to make this test pass
|
||||
fn add_function_with_fn_arg() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
struct Baz;
|
||||
impl Baz {
|
||||
|
@ -763,7 +768,7 @@ fn bar(arg: fn() -> Baz) {
|
|||
// FIXME Fix closure type printing to make this test pass
|
||||
fn add_function_with_closure_arg() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
fn foo() {
|
||||
let closure = |x: i64| x - 1;
|
||||
|
@ -786,7 +791,7 @@ fn bar(closure: impl Fn(i64) -> i64) {
|
|||
#[test]
|
||||
fn unresolveable_types_default_to_unit() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
fn foo() {
|
||||
<|>bar(baz)
|
||||
|
@ -807,7 +812,7 @@ fn bar(baz: ()) {
|
|||
#[test]
|
||||
fn arg_names_dont_overlap() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
struct Baz;
|
||||
fn baz() -> Baz { Baz }
|
||||
|
@ -832,7 +837,7 @@ fn bar(baz_1: Baz, baz_2: Baz) {
|
|||
#[test]
|
||||
fn arg_name_counters_start_at_1_per_name() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r#"
|
||||
struct Baz;
|
||||
fn baz() -> Baz { Baz }
|
||||
|
@ -857,7 +862,7 @@ fn bar(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) {
|
|||
#[test]
|
||||
fn add_function_in_module() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
mod bar {}
|
||||
|
||||
|
@ -885,7 +890,7 @@ fn foo() {
|
|||
// See https://github.com/rust-analyzer/rust-analyzer/issues/1165
|
||||
fn qualified_path_uses_correct_scope() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
"
|
||||
mod foo {
|
||||
pub struct Foo;
|
||||
|
@ -916,7 +921,7 @@ fn baz(foo: foo::Foo) {
|
|||
#[test]
|
||||
fn add_function_in_module_containing_other_items() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
mod bar {
|
||||
fn something_else() {}
|
||||
|
@ -945,7 +950,7 @@ fn foo() {
|
|||
#[test]
|
||||
fn add_function_in_nested_module() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
mod bar {
|
||||
mod baz {}
|
||||
|
@ -974,7 +979,7 @@ fn foo() {
|
|||
#[test]
|
||||
fn add_function_in_another_file() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
//- /main.rs
|
||||
mod foo;
|
||||
|
@ -996,7 +1001,7 @@ pub(crate) fn bar() {
|
|||
#[test]
|
||||
fn add_function_not_applicable_if_function_already_exists() {
|
||||
check_assist_not_applicable(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
fn foo() {
|
||||
bar<|>();
|
||||
|
@ -1013,7 +1018,7 @@ fn bar() {}
|
|||
// bar is resolved, but baz isn't.
|
||||
// The assist is only active if the cursor is on an unresolved path,
|
||||
// but the assist should only be offered if the path is a function call.
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
fn foo() {
|
||||
bar(b<|>az);
|
||||
|
@ -1028,7 +1033,7 @@ fn bar(baz: ()) {}
|
|||
#[ignore]
|
||||
fn create_method_with_no_args() {
|
||||
check_assist(
|
||||
add_function,
|
||||
generate_function,
|
||||
r"
|
||||
struct Foo;
|
||||
impl Foo {
|
109
crates/ra_assists/src/handlers/generate_impl.rs
Normal file
109
crates/ra_assists/src/handlers/generate_impl.rs
Normal file
|
@ -0,0 +1,109 @@
|
|||
use ra_syntax::ast::{self, AstNode, GenericParamsOwner, NameOwner};
|
||||
use stdx::{format_to, SepBy};
|
||||
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: generate_impl
|
||||
//
|
||||
// Adds a new inherent impl for a type.
|
||||
//
|
||||
// ```
|
||||
// struct Ctx<T: Clone> {
|
||||
// data: T,<|>
|
||||
// }
|
||||
// ```
|
||||
// ->
|
||||
// ```
|
||||
// struct Ctx<T: Clone> {
|
||||
// data: T,
|
||||
// }
|
||||
//
|
||||
// impl<T: Clone> Ctx<T> {
|
||||
// $0
|
||||
// }
|
||||
// ```
|
||||
pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let nominal = ctx.find_node_at_offset::<ast::AdtDef>()?;
|
||||
let name = nominal.name()?;
|
||||
let target = nominal.syntax().text_range();
|
||||
acc.add(
|
||||
AssistId("generate_impl", AssistKind::Generate),
|
||||
format!("Generate impl for `{}`", name),
|
||||
target,
|
||||
|edit| {
|
||||
let type_params = nominal.generic_param_list();
|
||||
let start_offset = nominal.syntax().text_range().end();
|
||||
let mut buf = String::new();
|
||||
buf.push_str("\n\nimpl");
|
||||
if let Some(type_params) = &type_params {
|
||||
format_to!(buf, "{}", type_params.syntax());
|
||||
}
|
||||
buf.push_str(" ");
|
||||
buf.push_str(name.text().as_str());
|
||||
if let Some(type_params) = type_params {
|
||||
let lifetime_params = type_params
|
||||
.lifetime_params()
|
||||
.filter_map(|it| it.lifetime_token())
|
||||
.map(|it| it.text().clone());
|
||||
let type_params = type_params
|
||||
.type_params()
|
||||
.filter_map(|it| it.name())
|
||||
.map(|it| it.text().clone());
|
||||
|
||||
let generic_params = lifetime_params.chain(type_params).sep_by(", ");
|
||||
format_to!(buf, "<{}>", generic_params)
|
||||
}
|
||||
match ctx.config.snippet_cap {
|
||||
Some(cap) => {
|
||||
buf.push_str(" {\n $0\n}");
|
||||
edit.insert_snippet(cap, start_offset, buf);
|
||||
}
|
||||
None => {
|
||||
buf.push_str(" {\n}");
|
||||
edit.insert(start_offset, buf);
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::tests::{check_assist, check_assist_target};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_add_impl() {
|
||||
check_assist(
|
||||
generate_impl,
|
||||
"struct Foo {<|>}\n",
|
||||
"struct Foo {}\n\nimpl Foo {\n $0\n}\n",
|
||||
);
|
||||
check_assist(
|
||||
generate_impl,
|
||||
"struct Foo<T: Clone> {<|>}",
|
||||
"struct Foo<T: Clone> {}\n\nimpl<T: Clone> Foo<T> {\n $0\n}",
|
||||
);
|
||||
check_assist(
|
||||
generate_impl,
|
||||
"struct Foo<'a, T: Foo<'a>> {<|>}",
|
||||
"struct Foo<'a, T: Foo<'a>> {}\n\nimpl<'a, T: Foo<'a>> Foo<'a, T> {\n $0\n}",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_impl_target() {
|
||||
check_assist_target(
|
||||
generate_impl,
|
||||
"
|
||||
struct SomeThingIrrelevant;
|
||||
/// Has a lifetime parameter
|
||||
struct Foo<'a, T: Foo<'a>> {<|>}
|
||||
struct EvenMoreIrrelevant;
|
||||
",
|
||||
"/// Has a lifetime parameter
|
||||
struct Foo<'a, T: Foo<'a>> {}",
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,15 +1,13 @@
|
|||
use hir::Adt;
|
||||
use ra_syntax::{
|
||||
ast::{
|
||||
self, AstNode, NameOwner, StructKind, TypeAscriptionOwner, TypeParamsOwner, VisibilityOwner,
|
||||
},
|
||||
ast::{self, AstNode, GenericParamsOwner, NameOwner, StructKind, VisibilityOwner},
|
||||
T,
|
||||
};
|
||||
use stdx::{format_to, SepBy};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: add_new
|
||||
// Assist: generate_new
|
||||
//
|
||||
// Adds a new inherent impl for a type.
|
||||
//
|
||||
|
@ -29,8 +27,8 @@ use crate::{AssistContext, AssistId, Assists};
|
|||
// }
|
||||
//
|
||||
// ```
|
||||
pub(crate) fn add_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let strukt = ctx.find_node_at_offset::<ast::StructDef>()?;
|
||||
pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
|
||||
|
||||
// We want to only apply this to non-union structs with named fields
|
||||
let field_list = match strukt.kind() {
|
||||
|
@ -42,7 +40,7 @@ pub(crate) fn add_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
let impl_def = find_struct_impl(&ctx, &strukt)?;
|
||||
|
||||
let target = strukt.syntax().text_range();
|
||||
acc.add(AssistId("add_new"), "Add default constructor", target, |builder| {
|
||||
acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| {
|
||||
let mut buf = String::with_capacity(512);
|
||||
|
||||
if impl_def.is_some() {
|
||||
|
@ -53,9 +51,7 @@ pub(crate) fn add_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
|
||||
let params = field_list
|
||||
.fields()
|
||||
.filter_map(|f| {
|
||||
Some(format!("{}: {}", f.name()?.syntax(), f.ascribed_type()?.syntax()))
|
||||
})
|
||||
.filter_map(|f| Some(format!("{}: {}", f.name()?.syntax(), f.ty()?.syntax())))
|
||||
.sep_by(", ");
|
||||
let fields = field_list.fields().filter_map(|f| f.name()).sep_by(", ");
|
||||
|
||||
|
@ -90,8 +86,8 @@ pub(crate) fn add_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
|
||||
// Generates the surrounding `impl Type { <code> }` including type and lifetime
|
||||
// parameters
|
||||
fn generate_impl_text(strukt: &ast::StructDef, code: &str) -> String {
|
||||
let type_params = strukt.type_param_list();
|
||||
fn generate_impl_text(strukt: &ast::Struct, code: &str) -> String {
|
||||
let type_params = strukt.generic_param_list();
|
||||
let mut buf = String::with_capacity(code.len());
|
||||
buf.push_str("\n\nimpl");
|
||||
if let Some(type_params) = &type_params {
|
||||
|
@ -121,15 +117,15 @@ fn generate_impl_text(strukt: &ast::StructDef, code: &str) -> String {
|
|||
//
|
||||
// FIXME: change the new fn checking to a more semantic approach when that's more
|
||||
// viable (e.g. we process proc macros, etc)
|
||||
fn find_struct_impl(ctx: &AssistContext, strukt: &ast::StructDef) -> Option<Option<ast::ImplDef>> {
|
||||
let db = ctx.db;
|
||||
fn find_struct_impl(ctx: &AssistContext, strukt: &ast::Struct) -> Option<Option<ast::Impl>> {
|
||||
let db = ctx.db();
|
||||
let module = strukt.syntax().ancestors().find(|node| {
|
||||
ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind())
|
||||
})?;
|
||||
|
||||
let struct_def = ctx.sema.to_def(strukt)?;
|
||||
|
||||
let block = module.descendants().filter_map(ast::ImplDef::cast).find_map(|impl_blk| {
|
||||
let block = module.descendants().filter_map(ast::Impl::cast).find_map(|impl_blk| {
|
||||
let blk = ctx.sema.to_def(&impl_blk)?;
|
||||
|
||||
// FIXME: handle e.g. `struct S<T>; impl<U> S<U> {}`
|
||||
|
@ -157,10 +153,10 @@ fn find_struct_impl(ctx: &AssistContext, strukt: &ast::StructDef) -> Option<Opti
|
|||
Some(block)
|
||||
}
|
||||
|
||||
fn has_new_fn(imp: &ast::ImplDef) -> bool {
|
||||
if let Some(il) = imp.item_list() {
|
||||
fn has_new_fn(imp: &ast::Impl) -> bool {
|
||||
if let Some(il) = imp.assoc_item_list() {
|
||||
for item in il.assoc_items() {
|
||||
if let ast::AssocItem::FnDef(f) = item {
|
||||
if let ast::AssocItem::Fn(f) = item {
|
||||
if let Some(name) = f.name() {
|
||||
if name.text().eq_ignore_ascii_case("new") {
|
||||
return true;
|
||||
|
@ -181,10 +177,10 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn test_add_new() {
|
||||
fn test_generate_new() {
|
||||
// Check output of generation
|
||||
check_assist(
|
||||
add_new,
|
||||
generate_new,
|
||||
"struct Foo {<|>}",
|
||||
"struct Foo {}
|
||||
|
||||
|
@ -194,7 +190,7 @@ impl Foo {
|
|||
",
|
||||
);
|
||||
check_assist(
|
||||
add_new,
|
||||
generate_new,
|
||||
"struct Foo<T: Clone> {<|>}",
|
||||
"struct Foo<T: Clone> {}
|
||||
|
||||
|
@ -204,7 +200,7 @@ impl<T: Clone> Foo<T> {
|
|||
",
|
||||
);
|
||||
check_assist(
|
||||
add_new,
|
||||
generate_new,
|
||||
"struct Foo<'a, T: Foo<'a>> {<|>}",
|
||||
"struct Foo<'a, T: Foo<'a>> {}
|
||||
|
||||
|
@ -214,7 +210,7 @@ impl<'a, T: Foo<'a>> Foo<'a, T> {
|
|||
",
|
||||
);
|
||||
check_assist(
|
||||
add_new,
|
||||
generate_new,
|
||||
"struct Foo { baz: String <|>}",
|
||||
"struct Foo { baz: String }
|
||||
|
||||
|
@ -224,7 +220,7 @@ impl Foo {
|
|||
",
|
||||
);
|
||||
check_assist(
|
||||
add_new,
|
||||
generate_new,
|
||||
"struct Foo { baz: String, qux: Vec<i32> <|>}",
|
||||
"struct Foo { baz: String, qux: Vec<i32> }
|
||||
|
||||
|
@ -236,7 +232,7 @@ impl Foo {
|
|||
|
||||
// Check that visibility modifiers don't get brought in for fields
|
||||
check_assist(
|
||||
add_new,
|
||||
generate_new,
|
||||
"struct Foo { pub baz: String, pub qux: Vec<i32> <|>}",
|
||||
"struct Foo { pub baz: String, pub qux: Vec<i32> }
|
||||
|
||||
|
@ -248,7 +244,7 @@ impl Foo {
|
|||
|
||||
// Check that it reuses existing impls
|
||||
check_assist(
|
||||
add_new,
|
||||
generate_new,
|
||||
"struct Foo {<|>}
|
||||
|
||||
impl Foo {}
|
||||
|
@ -261,7 +257,7 @@ impl Foo {
|
|||
",
|
||||
);
|
||||
check_assist(
|
||||
add_new,
|
||||
generate_new,
|
||||
"struct Foo {<|>}
|
||||
|
||||
impl Foo {
|
||||
|
@ -279,7 +275,7 @@ impl Foo {
|
|||
);
|
||||
|
||||
check_assist(
|
||||
add_new,
|
||||
generate_new,
|
||||
"struct Foo {<|>}
|
||||
|
||||
impl Foo {
|
||||
|
@ -304,7 +300,7 @@ impl Foo {
|
|||
|
||||
// Check visibility of new fn based on struct
|
||||
check_assist(
|
||||
add_new,
|
||||
generate_new,
|
||||
"pub struct Foo {<|>}",
|
||||
"pub struct Foo {}
|
||||
|
||||
|
@ -314,7 +310,7 @@ impl Foo {
|
|||
",
|
||||
);
|
||||
check_assist(
|
||||
add_new,
|
||||
generate_new,
|
||||
"pub(crate) struct Foo {<|>}",
|
||||
"pub(crate) struct Foo {}
|
||||
|
||||
|
@ -326,9 +322,9 @@ impl Foo {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn add_new_not_applicable_if_fn_exists() {
|
||||
fn generate_new_not_applicable_if_fn_exists() {
|
||||
check_assist_not_applicable(
|
||||
add_new,
|
||||
generate_new,
|
||||
"
|
||||
struct Foo {<|>}
|
||||
|
||||
|
@ -340,7 +336,7 @@ impl Foo {
|
|||
);
|
||||
|
||||
check_assist_not_applicable(
|
||||
add_new,
|
||||
generate_new,
|
||||
"
|
||||
struct Foo {<|>}
|
||||
|
||||
|
@ -353,9 +349,9 @@ impl Foo {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn add_new_target() {
|
||||
fn generate_new_target() {
|
||||
check_assist_target(
|
||||
add_new,
|
||||
generate_new,
|
||||
"
|
||||
struct SomeThingIrrelevant;
|
||||
/// Has a lifetime parameter
|
||||
|
@ -370,7 +366,7 @@ struct Foo<'a, T: Foo<'a>> {}",
|
|||
#[test]
|
||||
fn test_unrelated_new() {
|
||||
check_assist(
|
||||
add_new,
|
||||
generate_new,
|
||||
r##"
|
||||
pub struct AstId<N: AstNode> {
|
||||
file_id: HirFileId,
|
|
@ -7,7 +7,7 @@ use test_utils::mark;
|
|||
|
||||
use crate::{
|
||||
assist_context::{AssistContext, Assists},
|
||||
AssistId,
|
||||
AssistId, AssistKind,
|
||||
};
|
||||
|
||||
// Assist: inline_local_variable
|
||||
|
@ -44,7 +44,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext) -> O
|
|||
|
||||
let def = ctx.sema.to_def(&bind_pat)?;
|
||||
let def = Definition::Local(def);
|
||||
let refs = def.find_usages(ctx.db, None);
|
||||
let refs = def.find_usages(&ctx.sema, None);
|
||||
if refs.is_empty() {
|
||||
mark::hit!(test_not_applicable_if_variable_unused);
|
||||
return None;
|
||||
|
@ -110,13 +110,19 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext) -> O
|
|||
let init_in_paren = format!("({})", &init_str);
|
||||
|
||||
let target = bind_pat.syntax().text_range();
|
||||
acc.add(AssistId("inline_local_variable"), "Inline variable", target, move |builder| {
|
||||
acc.add(
|
||||
AssistId("inline_local_variable", AssistKind::RefactorInline),
|
||||
"Inline variable",
|
||||
target,
|
||||
move |builder| {
|
||||
builder.delete(delete_range);
|
||||
for (desc, should_wrap) in refs.iter().zip(wrap_in_parens) {
|
||||
let replacement = if should_wrap { init_in_paren.clone() } else { init_str.clone() };
|
||||
let replacement =
|
||||
if should_wrap { init_in_paren.clone() } else { init_str.clone() };
|
||||
builder.replace(desc.file_range.range, replacement)
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use ra_syntax::{
|
||||
ast::{self, NameOwner, TypeAscriptionOwner, TypeParamsOwner},
|
||||
ast::{self, GenericParamsOwner, NameOwner},
|
||||
AstNode, SyntaxKind, TextRange, TextSize,
|
||||
};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::{assist_context::AssistBuilder, AssistContext, AssistId, Assists};
|
||||
use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
static ASSIST_NAME: &str = "introduce_named_lifetime";
|
||||
static ASSIST_LABEL: &str = "Introduce named lifetime";
|
||||
|
@ -38,9 +38,9 @@ pub(crate) fn introduce_named_lifetime(acc: &mut Assists, ctx: &AssistContext) -
|
|||
let lifetime_token = ctx
|
||||
.find_token_at_offset(SyntaxKind::LIFETIME)
|
||||
.filter(|lifetime| lifetime.text() == "'_")?;
|
||||
if let Some(fn_def) = lifetime_token.ancestors().find_map(ast::FnDef::cast) {
|
||||
if let Some(fn_def) = lifetime_token.ancestors().find_map(ast::Fn::cast) {
|
||||
generate_fn_def_assist(acc, &fn_def, lifetime_token.text_range())
|
||||
} else if let Some(impl_def) = lifetime_token.ancestors().find_map(ast::ImplDef::cast) {
|
||||
} else if let Some(impl_def) = lifetime_token.ancestors().find_map(ast::Impl::cast) {
|
||||
generate_impl_def_assist(acc, &impl_def, lifetime_token.text_range())
|
||||
} else {
|
||||
None
|
||||
|
@ -50,11 +50,11 @@ pub(crate) fn introduce_named_lifetime(acc: &mut Assists, ctx: &AssistContext) -
|
|||
/// Generate the assist for the fn def case
|
||||
fn generate_fn_def_assist(
|
||||
acc: &mut Assists,
|
||||
fn_def: &ast::FnDef,
|
||||
fn_def: &ast::Fn,
|
||||
lifetime_loc: TextRange,
|
||||
) -> Option<()> {
|
||||
let param_list: ast::ParamList = fn_def.param_list()?;
|
||||
let new_lifetime_param = generate_unique_lifetime_param_name(&fn_def.type_param_list())?;
|
||||
let new_lifetime_param = generate_unique_lifetime_param_name(&fn_def.generic_param_list())?;
|
||||
let end_of_fn_ident = fn_def.name()?.ident_token()?.text_range().end();
|
||||
let self_param =
|
||||
// use the self if it's a reference and has no explicit lifetime
|
||||
|
@ -67,7 +67,7 @@ fn generate_fn_def_assist(
|
|||
// otherwise, if there's a single reference parameter without a named liftime, use that
|
||||
let fn_params_without_lifetime: Vec<_> = param_list
|
||||
.params()
|
||||
.filter_map(|param| match param.ascribed_type() {
|
||||
.filter_map(|param| match param.ty() {
|
||||
Some(ast::TypeRef::ReferenceType(ascribed_type))
|
||||
if ascribed_type.lifetime_token() == None =>
|
||||
{
|
||||
|
@ -83,7 +83,7 @@ fn generate_fn_def_assist(
|
|||
_ => return None,
|
||||
}
|
||||
};
|
||||
acc.add(AssistId(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |builder| {
|
||||
acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| {
|
||||
add_lifetime_param(fn_def, builder, end_of_fn_ident, new_lifetime_param);
|
||||
builder.replace(lifetime_loc, format!("'{}", new_lifetime_param));
|
||||
loc_needing_lifetime.map(|loc| builder.insert(loc, format!("'{} ", new_lifetime_param)));
|
||||
|
@ -93,12 +93,12 @@ fn generate_fn_def_assist(
|
|||
/// Generate the assist for the impl def case
|
||||
fn generate_impl_def_assist(
|
||||
acc: &mut Assists,
|
||||
impl_def: &ast::ImplDef,
|
||||
impl_def: &ast::Impl,
|
||||
lifetime_loc: TextRange,
|
||||
) -> Option<()> {
|
||||
let new_lifetime_param = generate_unique_lifetime_param_name(&impl_def.type_param_list())?;
|
||||
let new_lifetime_param = generate_unique_lifetime_param_name(&impl_def.generic_param_list())?;
|
||||
let end_of_impl_kw = impl_def.impl_token()?.text_range().end();
|
||||
acc.add(AssistId(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |builder| {
|
||||
acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| {
|
||||
add_lifetime_param(impl_def, builder, end_of_impl_kw, new_lifetime_param);
|
||||
builder.replace(lifetime_loc, format!("'{}", new_lifetime_param));
|
||||
})
|
||||
|
@ -107,7 +107,7 @@ fn generate_impl_def_assist(
|
|||
/// Given a type parameter list, generate a unique lifetime parameter name
|
||||
/// which is not in the list
|
||||
fn generate_unique_lifetime_param_name(
|
||||
existing_type_param_list: &Option<ast::TypeParamList>,
|
||||
existing_type_param_list: &Option<ast::GenericParamList>,
|
||||
) -> Option<char> {
|
||||
match existing_type_param_list {
|
||||
Some(type_params) => {
|
||||
|
@ -123,13 +123,13 @@ fn generate_unique_lifetime_param_name(
|
|||
|
||||
/// Add the lifetime param to `builder`. If there are type parameters in `type_params_owner`, add it to the end. Otherwise
|
||||
/// add new type params brackets with the lifetime parameter at `new_type_params_loc`.
|
||||
fn add_lifetime_param<TypeParamsOwner: ast::TypeParamsOwner>(
|
||||
fn add_lifetime_param<TypeParamsOwner: ast::GenericParamsOwner>(
|
||||
type_params_owner: &TypeParamsOwner,
|
||||
builder: &mut AssistBuilder,
|
||||
new_type_params_loc: TextSize,
|
||||
new_lifetime_param: char,
|
||||
) {
|
||||
match type_params_owner.type_param_list() {
|
||||
match type_params_owner.generic_param_list() {
|
||||
// add the new lifetime parameter to an existing type param list
|
||||
Some(type_params) => {
|
||||
builder.insert(
|
||||
|
|
|
@ -6,7 +6,7 @@ use ra_syntax::{
|
|||
use crate::{
|
||||
assist_context::{AssistContext, Assists},
|
||||
utils::invert_boolean_expression,
|
||||
AssistId,
|
||||
AssistId, AssistKind,
|
||||
};
|
||||
|
||||
// Assist: invert_if
|
||||
|
@ -54,7 +54,7 @@ pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
let else_node = else_block.syntax();
|
||||
let else_range = else_node.text_range();
|
||||
let then_range = then_node.text_range();
|
||||
acc.add(AssistId("invert_if"), "Invert if", if_range, |edit| {
|
||||
acc.add(AssistId("invert_if", AssistKind::RefactorRewrite), "Invert if", if_range, |edit| {
|
||||
edit.replace(cond_range, flip_cond.syntax().text());
|
||||
edit.replace(else_range, then_node.text());
|
||||
edit.replace(then_range, else_node.text());
|
||||
|
|
|
@ -8,7 +8,7 @@ use ra_syntax::{
|
|||
|
||||
use crate::{
|
||||
assist_context::{AssistContext, Assists},
|
||||
AssistId,
|
||||
AssistId, AssistKind,
|
||||
};
|
||||
|
||||
// Assist: merge_imports
|
||||
|
@ -28,7 +28,7 @@ pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext) -> Option<()
|
|||
let mut rewriter = SyntaxRewriter::default();
|
||||
let mut offset = ctx.offset();
|
||||
|
||||
if let Some(use_item) = tree.syntax().parent().and_then(ast::UseItem::cast) {
|
||||
if let Some(use_item) = tree.syntax().parent().and_then(ast::Use::cast) {
|
||||
let (merged, to_delete) = next_prev()
|
||||
.filter_map(|dir| neighbor(&use_item, dir))
|
||||
.filter_map(|it| Some((it.clone(), it.use_tree()?)))
|
||||
|
@ -56,9 +56,14 @@ pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext) -> Option<()
|
|||
};
|
||||
|
||||
let target = tree.syntax().text_range();
|
||||
acc.add(AssistId("merge_imports"), "Merge imports", target, |builder| {
|
||||
acc.add(
|
||||
AssistId("merge_imports", AssistKind::RefactorRewrite),
|
||||
"Merge imports",
|
||||
target,
|
||||
|builder| {
|
||||
builder.rewrite(rewriter);
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn next_prev() -> impl Iterator<Item = Direction> {
|
||||
|
|
|
@ -6,7 +6,7 @@ use ra_syntax::{
|
|||
Direction,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists, TextRange};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists, TextRange};
|
||||
|
||||
// Assist: merge_match_arms
|
||||
//
|
||||
|
@ -59,7 +59,11 @@ pub(crate) fn merge_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option
|
|||
return None;
|
||||
}
|
||||
|
||||
acc.add(AssistId("merge_match_arms"), "Merge match arms", current_text_range, |edit| {
|
||||
acc.add(
|
||||
AssistId("merge_match_arms", AssistKind::RefactorRewrite),
|
||||
"Merge match arms",
|
||||
current_text_range,
|
||||
|edit| {
|
||||
let pats = if arms_to_merge.iter().any(contains_placeholder) {
|
||||
"_".into()
|
||||
} else {
|
||||
|
@ -77,7 +81,8 @@ pub(crate) fn merge_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option
|
|||
let end = arms_to_merge.last().unwrap().syntax().text_range().end();
|
||||
|
||||
edit.replace(TextRange::new(start, end), arm);
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn contains_placeholder(a: &ast::MatchArm) -> bool {
|
||||
|
|
|
@ -5,7 +5,7 @@ use ra_syntax::{
|
|||
T,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: move_bounds_to_where_clause
|
||||
//
|
||||
|
@ -23,7 +23,7 @@ use crate::{AssistContext, AssistId, Assists};
|
|||
// }
|
||||
// ```
|
||||
pub(crate) fn move_bounds_to_where_clause(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let type_param_list = ctx.find_node_at_offset::<ast::TypeParamList>()?;
|
||||
let type_param_list = ctx.find_node_at_offset::<ast::GenericParamList>()?;
|
||||
|
||||
let mut type_params = type_param_list.type_params();
|
||||
if type_params.all(|p| p.type_bound_list().is_none()) {
|
||||
|
@ -37,20 +37,24 @@ pub(crate) fn move_bounds_to_where_clause(acc: &mut Assists, ctx: &AssistContext
|
|||
|
||||
let anchor = match_ast! {
|
||||
match parent {
|
||||
ast::FnDef(it) => it.body()?.syntax().clone().into(),
|
||||
ast::TraitDef(it) => it.item_list()?.syntax().clone().into(),
|
||||
ast::ImplDef(it) => it.item_list()?.syntax().clone().into(),
|
||||
ast::EnumDef(it) => it.variant_list()?.syntax().clone().into(),
|
||||
ast::StructDef(it) => {
|
||||
ast::Fn(it) => it.body()?.syntax().clone().into(),
|
||||
ast::Trait(it) => it.assoc_item_list()?.syntax().clone().into(),
|
||||
ast::Impl(it) => it.assoc_item_list()?.syntax().clone().into(),
|
||||
ast::Enum(it) => it.variant_list()?.syntax().clone().into(),
|
||||
ast::Struct(it) => {
|
||||
it.syntax().children_with_tokens()
|
||||
.find(|it| it.kind() == RECORD_FIELD_DEF_LIST || it.kind() == T![;])?
|
||||
.find(|it| it.kind() == RECORD_FIELD_LIST || it.kind() == T![;])?
|
||||
},
|
||||
_ => return None
|
||||
}
|
||||
};
|
||||
|
||||
let target = type_param_list.syntax().text_range();
|
||||
acc.add(AssistId("move_bounds_to_where_clause"), "Move to where clause", target, |edit| {
|
||||
acc.add(
|
||||
AssistId("move_bounds_to_where_clause", AssistKind::RefactorRewrite),
|
||||
"Move to where clause",
|
||||
target,
|
||||
|edit| {
|
||||
let new_params = type_param_list
|
||||
.type_params()
|
||||
.filter(|it| it.type_bound_list().is_some())
|
||||
|
@ -68,11 +72,14 @@ pub(crate) fn move_bounds_to_where_clause(acc: &mut Assists, ctx: &AssistContext
|
|||
};
|
||||
|
||||
let to_insert = match anchor.prev_sibling_or_token() {
|
||||
Some(ref elem) if elem.kind() == WHITESPACE => format!("{} ", where_clause.syntax()),
|
||||
Some(ref elem) if elem.kind() == WHITESPACE => {
|
||||
format!("{} ", where_clause.syntax())
|
||||
}
|
||||
_ => format!(" {}", where_clause.syntax()),
|
||||
};
|
||||
edit.insert(anchor.text_range().start(), to_insert);
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn build_predicate(param: ast::TypeParam) -> Option<ast::WherePred> {
|
||||
|
|
|
@ -3,7 +3,7 @@ use ra_syntax::{
|
|||
SyntaxKind::WHITESPACE,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: move_guard_to_arm_body
|
||||
//
|
||||
|
@ -40,7 +40,11 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext) ->
|
|||
let buf = format!("if {} {{ {} }}", guard_conditions.syntax().text(), arm_expr.syntax().text());
|
||||
|
||||
let target = guard.syntax().text_range();
|
||||
acc.add(AssistId("move_guard_to_arm_body"), "Move guard to arm body", target, |edit| {
|
||||
acc.add(
|
||||
AssistId("move_guard_to_arm_body", AssistKind::RefactorRewrite),
|
||||
"Move guard to arm body",
|
||||
target,
|
||||
|edit| {
|
||||
match space_before_guard {
|
||||
Some(element) if element.kind() == WHITESPACE => {
|
||||
edit.delete(element.text_range());
|
||||
|
@ -50,7 +54,8 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext) ->
|
|||
|
||||
edit.delete(guard.syntax().text_range());
|
||||
edit.replace_node_and_indent(arm_expr.syntax(), buf);
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// Assist: move_arm_cond_to_match_guard
|
||||
|
@ -100,7 +105,7 @@ pub(crate) fn move_arm_cond_to_match_guard(acc: &mut Assists, ctx: &AssistContex
|
|||
|
||||
let target = if_expr.syntax().text_range();
|
||||
acc.add(
|
||||
AssistId("move_arm_cond_to_match_guard"),
|
||||
AssistId("move_arm_cond_to_match_guard", AssistKind::RefactorRewrite),
|
||||
"Move condition to match guard",
|
||||
target,
|
||||
|edit| {
|
||||
|
|
|
@ -1,11 +1,14 @@
|
|||
use std::borrow::Cow;
|
||||
|
||||
use ra_syntax::{
|
||||
ast::{self, HasStringValue},
|
||||
ast::{self, HasQuotes, HasStringValue},
|
||||
AstToken,
|
||||
SyntaxKind::{RAW_STRING, STRING},
|
||||
TextSize,
|
||||
TextRange, TextSize,
|
||||
};
|
||||
use test_utils::mark;
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: make_raw_string
|
||||
//
|
||||
|
@ -26,14 +29,24 @@ pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext) -> Option<
|
|||
let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?;
|
||||
let value = token.value()?;
|
||||
let target = token.syntax().text_range();
|
||||
acc.add(AssistId("make_raw_string"), "Rewrite as raw string", target, |edit| {
|
||||
let max_hash_streak = count_hashes(&value);
|
||||
let mut hashes = String::with_capacity(max_hash_streak + 1);
|
||||
for _ in 0..hashes.capacity() {
|
||||
hashes.push('#');
|
||||
acc.add(
|
||||
AssistId("make_raw_string", AssistKind::RefactorRewrite),
|
||||
"Rewrite as raw string",
|
||||
target,
|
||||
|edit| {
|
||||
let hashes = "#".repeat(required_hashes(&value).max(1));
|
||||
if matches!(value, Cow::Borrowed(_)) {
|
||||
// Avoid replacing the whole string to better position the cursor.
|
||||
edit.insert(token.syntax().text_range().start(), format!("r{}", hashes));
|
||||
edit.insert(token.syntax().text_range().end(), format!("{}", hashes));
|
||||
} else {
|
||||
edit.replace(
|
||||
token.syntax().text_range(),
|
||||
format!("r{}\"{}\"{}", hashes, value, hashes),
|
||||
);
|
||||
}
|
||||
edit.replace(token.syntax().text_range(), format!("r{}\"{}\"{}", hashes, value, hashes));
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// Assist: make_usual_string
|
||||
|
@ -55,11 +68,24 @@ pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext) -> Optio
|
|||
let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?;
|
||||
let value = token.value()?;
|
||||
let target = token.syntax().text_range();
|
||||
acc.add(AssistId("make_usual_string"), "Rewrite as regular string", target, |edit| {
|
||||
acc.add(
|
||||
AssistId("make_usual_string", AssistKind::RefactorRewrite),
|
||||
"Rewrite as regular string",
|
||||
target,
|
||||
|edit| {
|
||||
// parse inside string to escape `"`
|
||||
let escaped = value.escape_default().to_string();
|
||||
if let Some(offsets) = token.quote_offsets() {
|
||||
if token.text()[offsets.contents - token.syntax().text_range().start()] == escaped {
|
||||
edit.replace(offsets.quotes.0, "\"");
|
||||
edit.replace(offsets.quotes.1, "\"");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
edit.replace(token.syntax().text_range(), format!("\"{}\"", escaped));
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// Assist: add_hash
|
||||
|
@ -80,7 +106,7 @@ pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext) -> Optio
|
|||
pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let token = ctx.find_token_at_offset(RAW_STRING)?;
|
||||
let target = token.text_range();
|
||||
acc.add(AssistId("add_hash"), "Add # to raw string", target, |edit| {
|
||||
acc.add(AssistId("add_hash", AssistKind::Refactor), "Add #", target, |edit| {
|
||||
edit.insert(token.text_range().start() + TextSize::of('r'), "#");
|
||||
edit.insert(token.text_range().end(), "#");
|
||||
})
|
||||
|
@ -102,44 +128,58 @@ pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
// }
|
||||
// ```
|
||||
pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let token = ctx.find_token_at_offset(RAW_STRING)?;
|
||||
let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?;
|
||||
|
||||
let text = token.text().as_str();
|
||||
if text.starts_with("r\"") {
|
||||
// no hash to remove
|
||||
if !text.starts_with("r#") && text.ends_with('#') {
|
||||
return None;
|
||||
}
|
||||
let target = token.text_range();
|
||||
acc.add(AssistId("remove_hash"), "Remove hash from raw string", target, |edit| {
|
||||
let result = &text[2..text.len() - 1];
|
||||
let result = if result.starts_with('\"') {
|
||||
// FIXME: this logic is wrong, not only the last has has to handled specially
|
||||
// no more hash, escape
|
||||
let internal_str = &result[1..result.len() - 1];
|
||||
format!("\"{}\"", internal_str.escape_default().to_string())
|
||||
} else {
|
||||
result.to_owned()
|
||||
};
|
||||
edit.replace(token.text_range(), format!("r{}", result));
|
||||
|
||||
let existing_hashes = text.chars().skip(1).take_while(|&it| it == '#').count();
|
||||
|
||||
let text_range = token.syntax().text_range();
|
||||
let internal_text = &text[token.text_range_between_quotes()? - text_range.start()];
|
||||
|
||||
if existing_hashes == required_hashes(internal_text) {
|
||||
mark::hit!(cant_remove_required_hash);
|
||||
return None;
|
||||
}
|
||||
|
||||
acc.add(AssistId("remove_hash", AssistKind::RefactorRewrite), "Remove #", text_range, |edit| {
|
||||
edit.delete(TextRange::at(text_range.start() + TextSize::of('r'), TextSize::of('#')));
|
||||
edit.delete(TextRange::new(text_range.end() - TextSize::of('#'), text_range.end()));
|
||||
})
|
||||
}
|
||||
|
||||
fn count_hashes(s: &str) -> usize {
|
||||
let mut max_hash_streak = 0usize;
|
||||
for idx in s.match_indices("\"#").map(|(i, _)| i) {
|
||||
fn required_hashes(s: &str) -> usize {
|
||||
let mut res = 0usize;
|
||||
for idx in s.match_indices('"').map(|(i, _)| i) {
|
||||
let (_, sub) = s.split_at(idx + 1);
|
||||
let nb_hash = sub.chars().take_while(|c| *c == '#').count();
|
||||
if nb_hash > max_hash_streak {
|
||||
max_hash_streak = nb_hash;
|
||||
let n_hashes = sub.chars().take_while(|c| *c == '#').count();
|
||||
res = res.max(n_hashes + 1)
|
||||
}
|
||||
res
|
||||
}
|
||||
max_hash_streak
|
||||
|
||||
#[test]
|
||||
fn test_required_hashes() {
|
||||
assert_eq!(0, required_hashes("abc"));
|
||||
assert_eq!(0, required_hashes("###"));
|
||||
assert_eq!(1, required_hashes("\""));
|
||||
assert_eq!(2, required_hashes("\"#abc"));
|
||||
assert_eq!(0, required_hashes("#abc"));
|
||||
assert_eq!(3, required_hashes("#ab\"##c"));
|
||||
assert_eq!(5, required_hashes("#ab\"##\"####c"));
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use test_utils::mark;
|
||||
|
||||
use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn make_raw_string_target() {
|
||||
check_assist_target(
|
||||
|
@ -341,33 +381,21 @@ string"###;
|
|||
fn remove_hash_works() {
|
||||
check_assist(
|
||||
remove_hash,
|
||||
r##"
|
||||
fn f() {
|
||||
let s = <|>r#"random string"#;
|
||||
}
|
||||
"##,
|
||||
r#"
|
||||
fn f() {
|
||||
let s = r"random string";
|
||||
}
|
||||
"#,
|
||||
r##"fn f() { let s = <|>r#"random string"#; }"##,
|
||||
r#"fn f() { let s = r"random string"; }"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_hash_with_quote_works() {
|
||||
check_assist(
|
||||
fn cant_remove_required_hash() {
|
||||
mark::check!(cant_remove_required_hash);
|
||||
check_assist_not_applicable(
|
||||
remove_hash,
|
||||
r##"
|
||||
fn f() {
|
||||
let s = <|>r#"random"str"ing"#;
|
||||
}
|
||||
"##,
|
||||
r#"
|
||||
fn f() {
|
||||
let s = r"random\"str\"ing";
|
||||
}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -389,27 +417,13 @@ string"###;
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn remove_hash_not_works() {
|
||||
check_assist_not_applicable(
|
||||
remove_hash,
|
||||
r#"
|
||||
fn f() {
|
||||
let s = <|>"random string";
|
||||
}
|
||||
"#,
|
||||
);
|
||||
fn remove_hash_doesnt_work() {
|
||||
check_assist_not_applicable(remove_hash, r#"fn f() { let s = <|>"random string"; }"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_hash_no_hash_not_works() {
|
||||
check_assist_not_applicable(
|
||||
remove_hash,
|
||||
r#"
|
||||
fn f() {
|
||||
let s = <|>r"random string";
|
||||
}
|
||||
"#,
|
||||
);
|
||||
fn remove_hash_no_hash_doesnt_work() {
|
||||
check_assist_not_applicable(remove_hash, r#"fn f() { let s = <|>r"random string"; }"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -487,14 +501,4 @@ string"###;
|
|||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn count_hashes_test() {
|
||||
assert_eq!(0, count_hashes("abc"));
|
||||
assert_eq!(0, count_hashes("###"));
|
||||
assert_eq!(1, count_hashes("\"#abc"));
|
||||
assert_eq!(0, count_hashes("#abc"));
|
||||
assert_eq!(2, count_hashes("#ab\"##c"));
|
||||
assert_eq!(4, count_hashes("#ab\"##\"####c"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
use ra_syntax::{
|
||||
ast::{self, AstNode},
|
||||
TextSize, T,
|
||||
TextRange, TextSize, T,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: remove_dbg
|
||||
//
|
||||
|
@ -27,19 +27,33 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
return None;
|
||||
}
|
||||
|
||||
let macro_range = macro_call.syntax().text_range();
|
||||
let is_leaf = macro_call.syntax().next_sibling().is_none();
|
||||
|
||||
let macro_content = {
|
||||
let macro_args = macro_call.token_tree()?.syntax().clone();
|
||||
let macro_end = if macro_call.semicolon_token().is_some() {
|
||||
macro_call.syntax().text_range().end() - TextSize::of(';')
|
||||
} else {
|
||||
macro_call.syntax().text_range().end()
|
||||
};
|
||||
|
||||
let text = macro_args.text();
|
||||
let without_parens = TextSize::of('(')..text.len() - TextSize::of(')');
|
||||
text.slice(without_parens).to_string()
|
||||
// macro_range determines what will be deleted and replaced with macro_content
|
||||
let macro_range = TextRange::new(macro_call.syntax().text_range().start(), macro_end);
|
||||
let paste_instead_of_dbg = {
|
||||
let text = macro_call.token_tree()?.syntax().text();
|
||||
|
||||
// leafiness determines if we should include the parenthesis or not
|
||||
let slice_index: TextRange = if is_leaf {
|
||||
// leaf means - we can extract the contents of the dbg! in text
|
||||
TextRange::new(TextSize::of('('), text.len() - TextSize::of(')'))
|
||||
} else {
|
||||
// not leaf - means we should keep the parens
|
||||
TextRange::up_to(text.len())
|
||||
};
|
||||
text.slice(slice_index).to_string()
|
||||
};
|
||||
|
||||
let target = macro_call.syntax().text_range();
|
||||
acc.add(AssistId("remove_dbg"), "Remove dbg!()", target, |builder| {
|
||||
builder.replace(macro_range, macro_content);
|
||||
acc.add(AssistId("remove_dbg", AssistKind::Refactor), "Remove dbg!()", target, |builder| {
|
||||
builder.replace(macro_range, paste_instead_of_dbg);
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -99,6 +113,7 @@ fn foo(n: usize) {
|
|||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_dbg_with_brackets_and_braces() {
|
||||
check_assist(remove_dbg, "dbg![<|>1 + 1]", "1 + 1");
|
||||
|
@ -113,7 +128,7 @@ fn foo(n: usize) {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn remove_dbg_target() {
|
||||
fn test_remove_dbg_target() {
|
||||
check_assist_target(
|
||||
remove_dbg,
|
||||
"
|
||||
|
@ -126,4 +141,65 @@ fn foo(n: usize) {
|
|||
"dbg!(n.checked_sub(4))",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_dbg_keep_semicolon() {
|
||||
// https://github.com/rust-analyzer/rust-analyzer/issues/5129#issuecomment-651399779
|
||||
// not quite though
|
||||
// adding a comment at the end of the line makes
|
||||
// the ast::MacroCall to include the semicolon at the end
|
||||
check_assist(
|
||||
remove_dbg,
|
||||
r#"let res = <|>dbg!(1 * 20); // needless comment"#,
|
||||
r#"let res = 1 * 20; // needless comment"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_dbg_keep_expression() {
|
||||
check_assist(
|
||||
remove_dbg,
|
||||
r#"let res = <|>dbg!(a + b).foo();"#,
|
||||
r#"let res = (a + b).foo();"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_dbg_from_inside_fn() {
|
||||
check_assist_target(
|
||||
remove_dbg,
|
||||
r#"
|
||||
fn square(x: u32) -> u32 {
|
||||
x * x
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let x = square(dbg<|>!(5 + 10));
|
||||
println!("{}", x);
|
||||
}"#,
|
||||
"dbg!(5 + 10)",
|
||||
);
|
||||
|
||||
check_assist(
|
||||
remove_dbg,
|
||||
r#"
|
||||
fn square(x: u32) -> u32 {
|
||||
x * x
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let x = square(dbg<|>!(5 + 10));
|
||||
println!("{}", x);
|
||||
}"#,
|
||||
r#"
|
||||
fn square(x: u32) -> u32 {
|
||||
x * x
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let x = square(5 + 10);
|
||||
println!("{}", x);
|
||||
}"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use ra_syntax::{SyntaxKind, TextRange, T};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: remove_mut
|
||||
//
|
||||
|
@ -26,7 +26,12 @@ pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
};
|
||||
|
||||
let target = mut_token.text_range();
|
||||
acc.add(AssistId("remove_mut"), "Remove `mut` keyword", target, |builder| {
|
||||
acc.add(
|
||||
AssistId("remove_mut", AssistKind::Refactor),
|
||||
"Remove `mut` keyword",
|
||||
target,
|
||||
|builder| {
|
||||
builder.delete(TextRange::new(delete_from, delete_to));
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use hir::{Adt, ModuleDef, PathResolution, Semantics, Struct};
|
|||
use ra_ide_db::RootDatabase;
|
||||
use ra_syntax::{algo, ast, match_ast, AstNode, SyntaxKind, SyntaxKind::*, SyntaxNode};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: reorder_fields
|
||||
//
|
||||
|
@ -23,7 +23,7 @@ use crate::{AssistContext, AssistId, Assists};
|
|||
// ```
|
||||
//
|
||||
pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
reorder::<ast::RecordLit>(acc, ctx).or_else(|| reorder::<ast::RecordPat>(acc, ctx))
|
||||
reorder::<ast::RecordExpr>(acc, ctx).or_else(|| reorder::<ast::RecordPat>(acc, ctx))
|
||||
}
|
||||
|
||||
fn reorder<R: AstNode>(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
|
@ -42,16 +42,21 @@ fn reorder<R: AstNode>(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
}
|
||||
|
||||
let target = record.syntax().text_range();
|
||||
acc.add(AssistId("reorder_fields"), "Reorder record fields", target, |edit| {
|
||||
acc.add(
|
||||
AssistId("reorder_fields", AssistKind::RefactorRewrite),
|
||||
"Reorder record fields",
|
||||
target,
|
||||
|edit| {
|
||||
for (old, new) in fields.iter().zip(&sorted_fields) {
|
||||
algo::diff(old, new).into_text_edit(edit.text_edit_builder());
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn get_fields_kind(node: &SyntaxNode) -> Vec<SyntaxKind> {
|
||||
match node.kind() {
|
||||
RECORD_LIT => vec![RECORD_FIELD],
|
||||
RECORD_EXPR => vec![RECORD_EXPR_FIELD],
|
||||
RECORD_PAT => vec![RECORD_FIELD_PAT, BIND_PAT],
|
||||
_ => vec![],
|
||||
}
|
||||
|
@ -60,7 +65,7 @@ fn get_fields_kind(node: &SyntaxNode) -> Vec<SyntaxKind> {
|
|||
fn get_field_name(node: &SyntaxNode) -> String {
|
||||
let res = match_ast! {
|
||||
match node {
|
||||
ast::RecordField(field) => field.field_name().map(|it| it.to_string()),
|
||||
ast::RecordExprField(field) => field.field_name().map(|it| it.to_string()),
|
||||
ast::RecordFieldPat(field) => field.field_name().map(|it| it.to_string()),
|
||||
_ => None,
|
||||
}
|
||||
|
@ -90,10 +95,10 @@ fn struct_definition(path: &ast::Path, sema: &Semantics<RootDatabase>) -> Option
|
|||
fn compute_fields_ranks(path: &ast::Path, ctx: &AssistContext) -> Option<FxHashMap<String, usize>> {
|
||||
Some(
|
||||
struct_definition(path, &ctx.sema)?
|
||||
.fields(ctx.db)
|
||||
.fields(ctx.db())
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, field)| (field.name(ctx.db).to_string(), idx))
|
||||
.map(|(idx, field)| (field.name(ctx.db()).to_string(), idx))
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use ra_syntax::{
|
|||
AstNode,
|
||||
};
|
||||
|
||||
use crate::{utils::TryEnum, AssistContext, AssistId, Assists};
|
||||
use crate::{utils::TryEnum, AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: replace_if_let_with_match
|
||||
//
|
||||
|
@ -48,7 +48,11 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext)
|
|||
};
|
||||
|
||||
let target = if_expr.syntax().text_range();
|
||||
acc.add(AssistId("replace_if_let_with_match"), "Replace with match", target, move |edit| {
|
||||
acc.add(
|
||||
AssistId("replace_if_let_with_match", AssistKind::RefactorRewrite),
|
||||
"Replace with match",
|
||||
target,
|
||||
move |edit| {
|
||||
let match_expr = {
|
||||
let then_arm = {
|
||||
let then_block = then_block.reset_indent().indent(IndentLevel(1));
|
||||
|
@ -65,12 +69,14 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext)
|
|||
let else_expr = unwrap_trivial_block(else_block);
|
||||
make::match_arm(vec![pattern], else_expr)
|
||||
};
|
||||
let match_expr = make::expr_match(expr, make::match_arm_list(vec![then_arm, else_arm]));
|
||||
let match_expr =
|
||||
make::expr_match(expr, make::match_arm_list(vec![then_arm, else_arm]));
|
||||
match_expr.indent(IndentLevel::from_node(if_expr.syntax()))
|
||||
};
|
||||
|
||||
edit.replace_ast::<ast::Expr>(if_expr.into(), match_expr);
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -9,7 +9,7 @@ use ra_syntax::{
|
|||
AstNode, T,
|
||||
};
|
||||
|
||||
use crate::{utils::TryEnum, AssistContext, AssistId, Assists};
|
||||
use crate::{utils::TryEnum, AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: replace_let_with_if_let
|
||||
//
|
||||
|
@ -44,7 +44,11 @@ pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext) ->
|
|||
let happy_variant = TryEnum::from_ty(&ctx.sema, &ty).map(|it| it.happy_case());
|
||||
|
||||
let target = let_kw.text_range();
|
||||
acc.add(AssistId("replace_let_with_if_let"), "Replace with if-let", target, |edit| {
|
||||
acc.add(
|
||||
AssistId("replace_let_with_if_let", AssistKind::RefactorRewrite),
|
||||
"Replace with if-let",
|
||||
target,
|
||||
|edit| {
|
||||
let with_placeholder: ast::Pat = match happy_variant {
|
||||
None => make::placeholder_pat().into(),
|
||||
Some(var_name) => make::tuple_struct_pat(
|
||||
|
@ -53,15 +57,18 @@ pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext) ->
|
|||
)
|
||||
.into(),
|
||||
};
|
||||
let block = make::block_expr(None, None).indent(IndentLevel::from_node(let_stmt.syntax()));
|
||||
let block =
|
||||
make::block_expr(None, None).indent(IndentLevel::from_node(let_stmt.syntax()));
|
||||
let if_ = make::expr_if(make::condition(init, Some(with_placeholder)), block);
|
||||
let stmt = make::expr_stmt(if_);
|
||||
|
||||
let placeholder = stmt.syntax().descendants().find_map(ast::PlaceholderPat::cast).unwrap();
|
||||
let placeholder =
|
||||
stmt.syntax().descendants().find_map(ast::PlaceholderPat::cast).unwrap();
|
||||
let stmt = stmt.replace_descendant(placeholder.into(), original_pat);
|
||||
|
||||
edit.replace_ast(ast::Stmt::from(let_stmt), ast::Stmt::from(stmt));
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -3,7 +3,7 @@ use ra_syntax::{algo::SyntaxRewriter, ast, match_ast, AstNode, SmolStr, SyntaxNo
|
|||
|
||||
use crate::{
|
||||
utils::{find_insert_use_container, insert_use_statement},
|
||||
AssistContext, AssistId, Assists,
|
||||
AssistContext, AssistId, AssistKind, Assists,
|
||||
};
|
||||
|
||||
// Assist: replace_qualified_name_with_use
|
||||
|
@ -25,7 +25,7 @@ pub(crate) fn replace_qualified_name_with_use(
|
|||
) -> Option<()> {
|
||||
let path: ast::Path = ctx.find_node_at_offset()?;
|
||||
// We don't want to mess with use statements
|
||||
if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() {
|
||||
if path.syntax().ancestors().find_map(ast::Use::cast).is_some() {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -37,7 +37,7 @@ pub(crate) fn replace_qualified_name_with_use(
|
|||
|
||||
let target = path.syntax().text_range();
|
||||
acc.add(
|
||||
AssistId("replace_qualified_name_with_use"),
|
||||
AssistId("replace_qualified_name_with_use", AssistKind::RefactorRewrite),
|
||||
"Replace qualified path with use",
|
||||
target,
|
||||
|builder| {
|
||||
|
@ -85,7 +85,7 @@ fn shorten_paths(rewriter: &mut SyntaxRewriter<'static>, node: SyntaxNode, path:
|
|||
match child {
|
||||
// Don't modify `use` items, as this can break the `use` item when injecting a new
|
||||
// import into the use tree.
|
||||
ast::UseItem(_it) => continue,
|
||||
ast::Use(_it) => continue,
|
||||
// Don't descend into submodules, they don't have the same `use` items in scope.
|
||||
ast::Module(_it) => continue,
|
||||
|
||||
|
@ -106,7 +106,7 @@ fn maybe_replace_path(
|
|||
path: ast::Path,
|
||||
target: ast::Path,
|
||||
) -> Option<()> {
|
||||
if !path_eq(path.clone(), target.clone()) {
|
||||
if !path_eq(path.clone(), target) {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ use ra_syntax::{
|
|||
|
||||
use crate::{
|
||||
utils::{render_snippet, Cursor, TryEnum},
|
||||
AssistContext, AssistId, Assists,
|
||||
AssistContext, AssistId, AssistKind, Assists,
|
||||
};
|
||||
|
||||
// Assist: replace_unwrap_with_match
|
||||
|
@ -46,7 +46,11 @@ pub(crate) fn replace_unwrap_with_match(acc: &mut Assists, ctx: &AssistContext)
|
|||
let ty = ctx.sema.type_of_expr(&caller)?;
|
||||
let happy_variant = TryEnum::from_ty(&ctx.sema, &ty)?.happy_case();
|
||||
let target = method_call.syntax().text_range();
|
||||
acc.add(AssistId("replace_unwrap_with_match"), "Replace unwrap with match", target, |builder| {
|
||||
acc.add(
|
||||
AssistId("replace_unwrap_with_match", AssistKind::RefactorRewrite),
|
||||
"Replace unwrap with match",
|
||||
target,
|
||||
|builder| {
|
||||
let ok_path = make::path_unqualified(make::path_segment(make::name_ref(happy_variant)));
|
||||
let it = make::bind_pat(make::name("a")).into();
|
||||
let ok_tuple = make::tuple_struct_pat(ok_path, iter::once(it)).into();
|
||||
|
@ -55,7 +59,8 @@ pub(crate) fn replace_unwrap_with_match(acc: &mut Assists, ctx: &AssistContext)
|
|||
let ok_arm = make::match_arm(iter::once(ok_tuple), make::expr_path(bind_path));
|
||||
|
||||
let unreachable_call = make::expr_unreachable();
|
||||
let err_arm = make::match_arm(iter::once(make::placeholder_pat().into()), unreachable_call);
|
||||
let err_arm =
|
||||
make::match_arm(iter::once(make::placeholder_pat().into()), unreachable_call);
|
||||
|
||||
let match_arm_list = make::match_arm_list(vec![ok_arm, err_arm]);
|
||||
let match_expr = make::expr_match(caller.clone(), match_arm_list)
|
||||
|
@ -76,7 +81,8 @@ pub(crate) fn replace_unwrap_with_match(acc: &mut Assists, ctx: &AssistContext)
|
|||
}
|
||||
None => builder.replace(range, match_expr.to_string()),
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -2,7 +2,7 @@ use std::iter::successors;
|
|||
|
||||
use ra_syntax::{ast, AstNode, T};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: split_import
|
||||
//
|
||||
|
@ -28,7 +28,7 @@ pub(crate) fn split_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
|
|||
}
|
||||
|
||||
let target = colon_colon.text_range();
|
||||
acc.add(AssistId("split_import"), "Split import", target, |edit| {
|
||||
acc.add(AssistId("split_import", AssistKind::RefactorRewrite), "Split import", target, |edit| {
|
||||
edit.replace_ast(use_tree, new_tree);
|
||||
})
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ use ra_syntax::{
|
|||
AstNode, TextRange, T,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: unwrap_block
|
||||
//
|
||||
|
@ -27,7 +27,7 @@ use crate::{AssistContext, AssistId, Assists};
|
|||
// }
|
||||
// ```
|
||||
pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let assist_id = AssistId("unwrap_block");
|
||||
let assist_id = AssistId("unwrap_block", AssistKind::RefactorRewrite);
|
||||
let assist_label = "Unwrap block";
|
||||
|
||||
let l_curly_token = ctx.find_token_at_offset(T!['{'])?;
|
||||
|
|
|
@ -26,10 +26,40 @@ pub(crate) use crate::assist_context::{AssistContext, Assists};
|
|||
|
||||
pub use assist_config::AssistConfig;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum AssistKind {
|
||||
None,
|
||||
QuickFix,
|
||||
Generate,
|
||||
Refactor,
|
||||
RefactorExtract,
|
||||
RefactorInline,
|
||||
RefactorRewrite,
|
||||
}
|
||||
|
||||
impl AssistKind {
|
||||
pub fn contains(self, other: AssistKind) -> bool {
|
||||
if self == other {
|
||||
return true;
|
||||
}
|
||||
|
||||
match self {
|
||||
AssistKind::None | AssistKind::Generate => return true,
|
||||
AssistKind::Refactor => match other {
|
||||
AssistKind::RefactorExtract
|
||||
| AssistKind::RefactorInline
|
||||
| AssistKind::RefactorRewrite => return true,
|
||||
_ => return false,
|
||||
},
|
||||
_ => return false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Unique identifier of the assist, should not be shown to the user
|
||||
/// directly.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct AssistId(pub &'static str);
|
||||
pub struct AssistId(pub &'static str, pub AssistKind);
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct GroupLabel(pub String);
|
||||
|
@ -102,13 +132,8 @@ mod handlers {
|
|||
pub(crate) type Handler = fn(&mut Assists, &AssistContext) -> Option<()>;
|
||||
|
||||
mod add_custom_impl;
|
||||
mod add_derive;
|
||||
mod add_explicit_type;
|
||||
mod add_from_impl_for_enum;
|
||||
mod add_function;
|
||||
mod add_impl;
|
||||
mod add_missing_impl_members;
|
||||
mod add_new;
|
||||
mod add_turbo_fish;
|
||||
mod apply_demorgan;
|
||||
mod auto_import;
|
||||
|
@ -122,6 +147,11 @@ mod handlers {
|
|||
mod flip_binexpr;
|
||||
mod flip_comma;
|
||||
mod flip_trait_bound;
|
||||
mod generate_derive;
|
||||
mod generate_from_impl_for_enum;
|
||||
mod generate_function;
|
||||
mod generate_impl;
|
||||
mod generate_new;
|
||||
mod inline_local_variable;
|
||||
mod introduce_named_lifetime;
|
||||
mod invert_if;
|
||||
|
@ -144,12 +174,7 @@ mod handlers {
|
|||
&[
|
||||
// These are alphabetic for the foolish consistency
|
||||
add_custom_impl::add_custom_impl,
|
||||
add_derive::add_derive,
|
||||
add_explicit_type::add_explicit_type,
|
||||
add_from_impl_for_enum::add_from_impl_for_enum,
|
||||
add_function::add_function,
|
||||
add_impl::add_impl,
|
||||
add_new::add_new,
|
||||
add_turbo_fish::add_turbo_fish,
|
||||
apply_demorgan::apply_demorgan,
|
||||
auto_import::auto_import,
|
||||
|
@ -163,6 +188,11 @@ mod handlers {
|
|||
flip_binexpr::flip_binexpr,
|
||||
flip_comma::flip_comma,
|
||||
flip_trait_bound::flip_trait_bound,
|
||||
generate_derive::generate_derive,
|
||||
generate_from_impl_for_enum::generate_from_impl_for_enum,
|
||||
generate_function::generate_function,
|
||||
generate_impl::generate_impl,
|
||||
generate_new::generate_new,
|
||||
inline_local_variable::inline_local_variable,
|
||||
introduce_named_lifetime::introduce_named_lifetime,
|
||||
invert_if::invert_if,
|
||||
|
|
|
@ -6,7 +6,7 @@ use ra_ide_db::RootDatabase;
|
|||
use ra_syntax::TextRange;
|
||||
use test_utils::{assert_eq_text, extract_offset, extract_range};
|
||||
|
||||
use crate::{handlers::Handler, Assist, AssistConfig, AssistContext, Assists};
|
||||
use crate::{handlers::Handler, Assist, AssistConfig, AssistContext, AssistKind, Assists};
|
||||
use stdx::trim_indent;
|
||||
|
||||
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
|
||||
|
@ -134,3 +134,46 @@ fn assist_order_if_expr() {
|
|||
assert_eq!(assists.next().expect("expected assist").assist.label, "Extract into variable");
|
||||
assert_eq!(assists.next().expect("expected assist").assist.label, "Replace with match");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assist_filter_works() {
|
||||
let before = "
|
||||
pub fn test_some_range(a: int) -> bool {
|
||||
if let 2..6 = <|>5<|> {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}";
|
||||
let (range, before) = extract_range(before);
|
||||
let (db, file_id) = with_single_file(&before);
|
||||
let frange = FileRange { file_id, range };
|
||||
|
||||
{
|
||||
let mut cfg = AssistConfig::default();
|
||||
cfg.allowed = Some(vec![AssistKind::Refactor]);
|
||||
|
||||
let assists = Assist::resolved(&db, &cfg, frange);
|
||||
let mut assists = assists.iter();
|
||||
|
||||
assert_eq!(assists.next().expect("expected assist").assist.label, "Extract into variable");
|
||||
assert_eq!(assists.next().expect("expected assist").assist.label, "Replace with match");
|
||||
}
|
||||
|
||||
{
|
||||
let mut cfg = AssistConfig::default();
|
||||
cfg.allowed = Some(vec![AssistKind::RefactorExtract]);
|
||||
let assists = Assist::resolved(&db, &cfg, frange);
|
||||
assert_eq!(assists.len(), 1);
|
||||
|
||||
let mut assists = assists.iter();
|
||||
assert_eq!(assists.next().expect("expected assist").assist.label, "Extract into variable");
|
||||
}
|
||||
|
||||
{
|
||||
let mut cfg = AssistConfig::default();
|
||||
cfg.allowed = Some(vec![AssistKind::QuickFix]);
|
||||
let assists = Assist::resolved(&db, &cfg, frange);
|
||||
assert!(assists.is_empty(), "All asserts but quickfixes should be filtered out");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,26 +21,6 @@ impl Debug for S {
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_add_derive() {
|
||||
check_doc_test(
|
||||
"add_derive",
|
||||
r#####"
|
||||
struct Point {
|
||||
x: u32,
|
||||
y: u32,<|>
|
||||
}
|
||||
"#####,
|
||||
r#####"
|
||||
#[derive($0)]
|
||||
struct Point {
|
||||
x: u32,
|
||||
y: u32,
|
||||
}
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_add_explicit_type() {
|
||||
check_doc_test(
|
||||
|
@ -58,52 +38,6 @@ fn main() {
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_add_from_impl_for_enum() {
|
||||
check_doc_test(
|
||||
"add_from_impl_for_enum",
|
||||
r#####"
|
||||
enum A { <|>One(u32) }
|
||||
"#####,
|
||||
r#####"
|
||||
enum A { One(u32) }
|
||||
|
||||
impl From<u32> for A {
|
||||
fn from(v: u32) -> Self {
|
||||
A::One(v)
|
||||
}
|
||||
}
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_add_function() {
|
||||
check_doc_test(
|
||||
"add_function",
|
||||
r#####"
|
||||
struct Baz;
|
||||
fn baz() -> Baz { Baz }
|
||||
fn foo() {
|
||||
bar<|>("", baz());
|
||||
}
|
||||
|
||||
"#####,
|
||||
r#####"
|
||||
struct Baz;
|
||||
fn baz() -> Baz { Baz }
|
||||
fn foo() {
|
||||
bar("", baz());
|
||||
}
|
||||
|
||||
fn bar(arg: &str, baz: Baz) {
|
||||
${0:todo!()}
|
||||
}
|
||||
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_add_hash() {
|
||||
check_doc_test(
|
||||
|
@ -121,27 +55,6 @@ fn main() {
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_add_impl() {
|
||||
check_doc_test(
|
||||
"add_impl",
|
||||
r#####"
|
||||
struct Ctx<T: Clone> {
|
||||
data: T,<|>
|
||||
}
|
||||
"#####,
|
||||
r#####"
|
||||
struct Ctx<T: Clone> {
|
||||
data: T,
|
||||
}
|
||||
|
||||
impl<T: Clone> Ctx<T> {
|
||||
$0
|
||||
}
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_add_impl_default_members() {
|
||||
check_doc_test(
|
||||
|
@ -208,28 +121,6 @@ impl Trait<u32> for () {
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_add_new() {
|
||||
check_doc_test(
|
||||
"add_new",
|
||||
r#####"
|
||||
struct Ctx<T: Clone> {
|
||||
data: T,<|>
|
||||
}
|
||||
"#####,
|
||||
r#####"
|
||||
struct Ctx<T: Clone> {
|
||||
data: T,
|
||||
}
|
||||
|
||||
impl<T: Clone> Ctx<T> {
|
||||
fn $0new(data: T) -> Self { Self { data } }
|
||||
}
|
||||
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_add_turbo_fish() {
|
||||
check_doc_test(
|
||||
|
@ -466,6 +357,115 @@ fn foo<T: Copy + Clone>() { }
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_generate_derive() {
|
||||
check_doc_test(
|
||||
"generate_derive",
|
||||
r#####"
|
||||
struct Point {
|
||||
x: u32,
|
||||
y: u32,<|>
|
||||
}
|
||||
"#####,
|
||||
r#####"
|
||||
#[derive($0)]
|
||||
struct Point {
|
||||
x: u32,
|
||||
y: u32,
|
||||
}
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_generate_from_impl_for_enum() {
|
||||
check_doc_test(
|
||||
"generate_from_impl_for_enum",
|
||||
r#####"
|
||||
enum A { <|>One(u32) }
|
||||
"#####,
|
||||
r#####"
|
||||
enum A { One(u32) }
|
||||
|
||||
impl From<u32> for A {
|
||||
fn from(v: u32) -> Self {
|
||||
A::One(v)
|
||||
}
|
||||
}
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_generate_function() {
|
||||
check_doc_test(
|
||||
"generate_function",
|
||||
r#####"
|
||||
struct Baz;
|
||||
fn baz() -> Baz { Baz }
|
||||
fn foo() {
|
||||
bar<|>("", baz());
|
||||
}
|
||||
|
||||
"#####,
|
||||
r#####"
|
||||
struct Baz;
|
||||
fn baz() -> Baz { Baz }
|
||||
fn foo() {
|
||||
bar("", baz());
|
||||
}
|
||||
|
||||
fn bar(arg: &str, baz: Baz) {
|
||||
${0:todo!()}
|
||||
}
|
||||
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_generate_impl() {
|
||||
check_doc_test(
|
||||
"generate_impl",
|
||||
r#####"
|
||||
struct Ctx<T: Clone> {
|
||||
data: T,<|>
|
||||
}
|
||||
"#####,
|
||||
r#####"
|
||||
struct Ctx<T: Clone> {
|
||||
data: T,
|
||||
}
|
||||
|
||||
impl<T: Clone> Ctx<T> {
|
||||
$0
|
||||
}
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_generate_new() {
|
||||
check_doc_test(
|
||||
"generate_new",
|
||||
r#####"
|
||||
struct Ctx<T: Clone> {
|
||||
data: T,<|>
|
||||
}
|
||||
"#####,
|
||||
r#####"
|
||||
struct Ctx<T: Clone> {
|
||||
data: T,
|
||||
}
|
||||
|
||||
impl<T: Clone> Ctx<T> {
|
||||
fn $0new(data: T) -> Self { Self { data } }
|
||||
}
|
||||
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_inline_local_variable() {
|
||||
check_doc_test(
|
||||
|
|
|
@ -56,33 +56,34 @@ pub(crate) fn render_snippet(_cap: SnippetCap, node: &SyntaxNode, cursor: Cursor
|
|||
|
||||
pub fn get_missing_assoc_items(
|
||||
sema: &Semantics<RootDatabase>,
|
||||
impl_def: &ast::ImplDef,
|
||||
impl_def: &ast::Impl,
|
||||
) -> Vec<hir::AssocItem> {
|
||||
// Names must be unique between constants and functions. However, type aliases
|
||||
// may share the same name as a function or constant.
|
||||
let mut impl_fns_consts = FxHashSet::default();
|
||||
let mut impl_type = FxHashSet::default();
|
||||
|
||||
if let Some(item_list) = impl_def.item_list() {
|
||||
if let Some(item_list) = impl_def.assoc_item_list() {
|
||||
for item in item_list.assoc_items() {
|
||||
match item {
|
||||
ast::AssocItem::FnDef(f) => {
|
||||
ast::AssocItem::Fn(f) => {
|
||||
if let Some(n) = f.name() {
|
||||
impl_fns_consts.insert(n.syntax().to_string());
|
||||
}
|
||||
}
|
||||
|
||||
ast::AssocItem::TypeAliasDef(t) => {
|
||||
ast::AssocItem::TypeAlias(t) => {
|
||||
if let Some(n) = t.name() {
|
||||
impl_type.insert(n.syntax().to_string());
|
||||
}
|
||||
}
|
||||
|
||||
ast::AssocItem::ConstDef(c) => {
|
||||
ast::AssocItem::Const(c) => {
|
||||
if let Some(n) = c.name() {
|
||||
impl_fns_consts.insert(n.syntax().to_string());
|
||||
}
|
||||
}
|
||||
ast::AssocItem::MacroCall(_) => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -108,7 +109,7 @@ pub fn get_missing_assoc_items(
|
|||
|
||||
pub(crate) fn resolve_target_trait(
|
||||
sema: &Semantics<RootDatabase>,
|
||||
impl_def: &ast::ImplDef,
|
||||
impl_def: &ast::Impl,
|
||||
) -> Option<hir::Trait> {
|
||||
let ast_path = impl_def
|
||||
.target_trait()
|
||||
|
|
|
@ -215,7 +215,7 @@ fn walk_use_tree_for_best_action(
|
|||
let prev_len = current_path_segments.len();
|
||||
|
||||
let tree_list = current_use_tree.use_tree_list();
|
||||
let alias = current_use_tree.alias();
|
||||
let alias = current_use_tree.rename();
|
||||
|
||||
let path = match current_use_tree.path() {
|
||||
Some(path) => path,
|
||||
|
@ -225,7 +225,7 @@ fn walk_use_tree_for_best_action(
|
|||
current_use_tree
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.find_map(ast::UseItem::cast)
|
||||
.find_map(ast::Use::cast)
|
||||
.map(|it| it.syntax().clone()),
|
||||
true,
|
||||
);
|
||||
|
@ -254,7 +254,7 @@ fn walk_use_tree_for_best_action(
|
|||
current_use_tree
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.find_map(ast::UseItem::cast)
|
||||
.find_map(ast::Use::cast)
|
||||
.map(|it| it.syntax().clone()),
|
||||
true,
|
||||
),
|
||||
|
@ -304,7 +304,7 @@ fn walk_use_tree_for_best_action(
|
|||
current_use_tree
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.find_map(ast::UseItem::cast)
|
||||
.find_map(ast::Use::cast)
|
||||
.map(|it| it.syntax().clone()),
|
||||
true,
|
||||
);
|
||||
|
@ -377,7 +377,7 @@ fn best_action_for_target(
|
|||
let mut storage = Vec::with_capacity(16); // this should be the only allocation
|
||||
let best_action = container
|
||||
.children()
|
||||
.filter_map(ast::UseItem::cast)
|
||||
.filter_map(ast::Use::cast)
|
||||
.filter_map(|it| it.use_tree())
|
||||
.map(|u| walk_use_tree_for_best_action(&mut storage, None, u, target))
|
||||
.fold(None, |best, a| match best {
|
||||
|
|
|
@ -3,6 +3,7 @@ edition = "2018"
|
|||
name = "ra_cfg"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
use std::slice::Iter as SliceIter;
|
||||
|
||||
use ra_syntax::SmolStr;
|
||||
use tt::{Leaf, Subtree, TokenTree};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum CfgExpr {
|
||||
|
@ -18,6 +17,9 @@ pub enum CfgExpr {
|
|||
}
|
||||
|
||||
impl CfgExpr {
|
||||
pub fn parse(tt: &tt::Subtree) -> CfgExpr {
|
||||
next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid)
|
||||
}
|
||||
/// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
|
||||
pub fn fold(&self, query: &dyn Fn(&SmolStr, Option<&SmolStr>) -> bool) -> Option<bool> {
|
||||
match self {
|
||||
|
@ -35,22 +37,18 @@ impl CfgExpr {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn parse_cfg(tt: &Subtree) -> CfgExpr {
|
||||
next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid)
|
||||
}
|
||||
|
||||
fn next_cfg_expr(it: &mut SliceIter<tt::TokenTree>) -> Option<CfgExpr> {
|
||||
let name = match it.next() {
|
||||
None => return None,
|
||||
Some(TokenTree::Leaf(Leaf::Ident(ident))) => ident.text.clone(),
|
||||
Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(),
|
||||
Some(_) => return Some(CfgExpr::Invalid),
|
||||
};
|
||||
|
||||
// Peek
|
||||
let ret = match it.as_slice().first() {
|
||||
Some(TokenTree::Leaf(Leaf::Punct(punct))) if punct.char == '=' => {
|
||||
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => {
|
||||
match it.as_slice().get(1) {
|
||||
Some(TokenTree::Leaf(Leaf::Literal(literal))) => {
|
||||
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => {
|
||||
it.next();
|
||||
it.next();
|
||||
// FIXME: escape? raw string?
|
||||
|
@ -61,7 +59,7 @@ fn next_cfg_expr(it: &mut SliceIter<tt::TokenTree>) -> Option<CfgExpr> {
|
|||
_ => return Some(CfgExpr::Invalid),
|
||||
}
|
||||
}
|
||||
Some(TokenTree::Subtree(subtree)) => {
|
||||
Some(tt::TokenTree::Subtree(subtree)) => {
|
||||
it.next();
|
||||
let mut sub_it = subtree.token_trees.iter();
|
||||
let mut subs = std::iter::from_fn(|| next_cfg_expr(&mut sub_it)).collect();
|
||||
|
@ -76,7 +74,7 @@ fn next_cfg_expr(it: &mut SliceIter<tt::TokenTree>) -> Option<CfgExpr> {
|
|||
};
|
||||
|
||||
// Eat comma separator
|
||||
if let Some(TokenTree::Leaf(Leaf::Punct(punct))) = it.as_slice().first() {
|
||||
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = it.as_slice().first() {
|
||||
if punct.char == ',' {
|
||||
it.next();
|
||||
}
|
||||
|
@ -99,7 +97,8 @@ mod tests {
|
|||
|
||||
fn assert_parse_result(input: &str, expected: CfgExpr) {
|
||||
let (tt, _) = get_token_tree_generated(input);
|
||||
assert_eq!(parse_cfg(&tt), expected);
|
||||
let cfg = CfgExpr::parse(&tt);
|
||||
assert_eq!(cfg, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -5,7 +5,7 @@ mod cfg_expr;
|
|||
use ra_syntax::SmolStr;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
pub use cfg_expr::{parse_cfg, CfgExpr};
|
||||
pub use cfg_expr::CfgExpr;
|
||||
|
||||
/// Configuration options used for conditional compilition on items with `cfg` attributes.
|
||||
/// We have two kind of options in different namespaces: atomic options like `unix`, and
|
||||
|
@ -31,19 +31,21 @@ impl CfgOptions {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn is_cfg_enabled(&self, attr: &tt::Subtree) -> Option<bool> {
|
||||
self.check(&parse_cfg(attr))
|
||||
}
|
||||
|
||||
pub fn insert_atom(&mut self, key: SmolStr) {
|
||||
self.atoms.insert(key);
|
||||
}
|
||||
|
||||
pub fn remove_atom(&mut self, name: &str) {
|
||||
self.atoms.remove(name);
|
||||
}
|
||||
|
||||
pub fn insert_key_value(&mut self, key: SmolStr, value: SmolStr) {
|
||||
self.key_values.insert((key, value));
|
||||
}
|
||||
|
||||
pub fn append(&mut self, other: &CfgOptions) {
|
||||
for atom in &other.atoms {
|
||||
self.atoms.insert(atom.clone());
|
||||
}
|
||||
|
||||
for (key, value) in &other.key_values {
|
||||
self.key_values.insert((key.clone(), value.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,13 +3,13 @@ edition = "2018"
|
|||
name = "ra_db"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
salsa = "0.14.1"
|
||||
relative-path = "1.0.0"
|
||||
salsa = "0.15.0"
|
||||
rustc-hash = "1.1.0"
|
||||
|
||||
ra_syntax = { path = "../ra_syntax" }
|
||||
|
|
|
@ -149,15 +149,17 @@ fn with_files(
|
|||
let crate_id = crate_graph.add_crate_root(
|
||||
file_id,
|
||||
meta.edition,
|
||||
Some(CrateName::new(&krate).unwrap()),
|
||||
Some(krate.clone()),
|
||||
meta.cfg,
|
||||
meta.env,
|
||||
Default::default(),
|
||||
);
|
||||
let prev = crates.insert(krate.clone(), crate_id);
|
||||
let crate_name = CrateName::new(&krate).unwrap();
|
||||
let prev = crates.insert(crate_name.clone(), crate_id);
|
||||
assert!(prev.is_none());
|
||||
for dep in meta.deps {
|
||||
crate_deps.push((krate.clone(), dep))
|
||||
let dep = CrateName::new(&dep).unwrap();
|
||||
crate_deps.push((crate_name.clone(), dep))
|
||||
}
|
||||
} else if meta.path == "/main.rs" || meta.path == "/lib.rs" {
|
||||
assert!(default_crate_root.is_none());
|
||||
|
@ -220,7 +222,7 @@ impl From<Fixture> for FileMeta {
|
|||
.edition
|
||||
.as_ref()
|
||||
.map_or(Edition::Edition2018, |v| Edition::from_str(&v).unwrap()),
|
||||
env: Env::from(f.env.iter()),
|
||||
env: f.env.into_iter().collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
|
||||
//! actual IO is done and lowered to input.
|
||||
|
||||
use std::{fmt, ops, str::FromStr, sync::Arc};
|
||||
use std::{fmt, iter::FromIterator, ops, str::FromStr, sync::Arc};
|
||||
|
||||
use ra_cfg::CfgOptions;
|
||||
use ra_syntax::SmolStr;
|
||||
|
@ -67,7 +67,7 @@ pub struct CrateGraph {
|
|||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct CrateId(pub u32);
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct CrateName(SmolStr);
|
||||
|
||||
impl CrateName {
|
||||
|
@ -94,6 +94,13 @@ impl fmt::Display for CrateName {
|
|||
}
|
||||
}
|
||||
|
||||
impl ops::Deref for CrateName {
|
||||
type Target = str;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&*self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ProcMacroId(pub u32);
|
||||
|
||||
|
@ -117,7 +124,7 @@ pub struct CrateData {
|
|||
/// The name to display to the end user.
|
||||
/// This actual crate name can be different in a particular dependent crate
|
||||
/// or may even be missing for some cases, such as a dummy crate for the code snippet.
|
||||
pub display_name: Option<CrateName>,
|
||||
pub display_name: Option<String>,
|
||||
pub cfg_options: CfgOptions,
|
||||
pub env: Env,
|
||||
pub dependencies: Vec<Dependency>,
|
||||
|
@ -138,7 +145,7 @@ pub struct Env {
|
|||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Dependency {
|
||||
pub crate_id: CrateId,
|
||||
pub name: SmolStr,
|
||||
pub name: CrateName,
|
||||
}
|
||||
|
||||
impl CrateGraph {
|
||||
|
@ -146,7 +153,7 @@ impl CrateGraph {
|
|||
&mut self,
|
||||
file_id: FileId,
|
||||
edition: Edition,
|
||||
display_name: Option<CrateName>,
|
||||
display_name: Option<String>,
|
||||
cfg_options: CfgOptions,
|
||||
env: Env,
|
||||
proc_macro: Vec<(SmolStr, Arc<dyn ra_tt::TokenExpander>)>,
|
||||
|
@ -178,7 +185,7 @@ impl CrateGraph {
|
|||
if self.dfs_find(from, to, &mut FxHashSet::default()) {
|
||||
return Err(CyclicDependenciesError);
|
||||
}
|
||||
self.arena.get_mut(&from).unwrap().add_dep(name.0, to);
|
||||
self.arena.get_mut(&from).unwrap().add_dep(name, to);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -190,6 +197,23 @@ impl CrateGraph {
|
|||
self.arena.keys().copied()
|
||||
}
|
||||
|
||||
/// Returns an iterator over all transitive dependencies of the given crate.
|
||||
pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> + '_ {
|
||||
let mut worklist = vec![of];
|
||||
let mut deps = FxHashSet::default();
|
||||
|
||||
while let Some(krate) = worklist.pop() {
|
||||
if !deps.insert(krate) {
|
||||
continue;
|
||||
}
|
||||
|
||||
worklist.extend(self[krate].dependencies.iter().map(|dep| dep.crate_id));
|
||||
}
|
||||
|
||||
deps.remove(&of);
|
||||
deps.into_iter()
|
||||
}
|
||||
|
||||
// FIXME: this only finds one crate with the given root; we could have multiple
|
||||
pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
|
||||
let (&crate_id, _) =
|
||||
|
@ -247,7 +271,7 @@ impl CrateId {
|
|||
}
|
||||
|
||||
impl CrateData {
|
||||
fn add_dep(&mut self, name: SmolStr, crate_id: CrateId) {
|
||||
fn add_dep(&mut self, name: CrateName, crate_id: CrateId) {
|
||||
self.dependencies.push(Dependency { name, crate_id })
|
||||
}
|
||||
}
|
||||
|
@ -274,18 +298,9 @@ impl fmt::Display for Edition {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, T> From<T> for Env
|
||||
where
|
||||
T: Iterator<Item = (&'a String, &'a String)>,
|
||||
{
|
||||
fn from(iter: T) -> Self {
|
||||
let mut result = Self::default();
|
||||
|
||||
for (k, v) in iter {
|
||||
result.entries.insert(k.to_owned(), v.to_owned());
|
||||
}
|
||||
|
||||
result
|
||||
impl FromIterator<(String, String)> for Env {
|
||||
fn from_iter<T: IntoIterator<Item = (String, String)>>(iter: T) -> Self {
|
||||
Env { entries: FromIterator::from_iter(iter) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -429,7 +444,10 @@ mod tests {
|
|||
.is_ok());
|
||||
assert_eq!(
|
||||
graph[crate1].dependencies,
|
||||
vec![Dependency { crate_id: crate2, name: "crate_name_with_dashes".into() }]
|
||||
vec![Dependency {
|
||||
crate_id: crate2,
|
||||
name: CrateName::new("crate_name_with_dashes").unwrap()
|
||||
}]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,9 +16,8 @@ pub use crate::{
|
|||
SourceRoot, SourceRootId,
|
||||
},
|
||||
};
|
||||
pub use relative_path::{RelativePath, RelativePathBuf};
|
||||
pub use salsa;
|
||||
pub use vfs::{file_set::FileSet, AbsPathBuf, VfsPath};
|
||||
pub use vfs::{file_set::FileSet, VfsPath};
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! impl_intern_key {
|
||||
|
@ -80,7 +79,7 @@ pub struct FilePosition {
|
|||
pub offset: TextSize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
pub struct FileRange {
|
||||
pub file_id: FileId,
|
||||
pub range: TextRange,
|
||||
|
@ -93,9 +92,9 @@ pub trait FileLoader {
|
|||
fn file_text(&self, file_id: FileId) -> Arc<String>;
|
||||
/// Note that we intentionally accept a `&str` and not a `&Path` here. This
|
||||
/// method exists to handle `#[path = "/some/path.rs"] mod foo;` and such,
|
||||
/// so the input is guaranteed to be utf-8 string. We might introduce
|
||||
/// `struct StrPath(str)` for clarity some day, but it's a bit messy, so we
|
||||
/// get by with a `&str` for the time being.
|
||||
/// so the input is guaranteed to be utf-8 string. One might be tempted to
|
||||
/// introduce some kind of "utf-8 path with / separators", but that's a bad idea. Behold
|
||||
/// `#[path = "C://no/way"]`
|
||||
fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId>;
|
||||
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>;
|
||||
}
|
||||
|
@ -113,7 +112,7 @@ pub trait SourceDatabase: CheckCanceled + FileLoader + std::fmt::Debug {
|
|||
fn crate_graph(&self) -> Arc<CrateGraph>;
|
||||
}
|
||||
|
||||
fn parse_query(db: &impl SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
|
||||
fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
|
||||
let _p = profile("parse_query").detail(|| format!("{:?}", file_id));
|
||||
let text = db.file_text(file_id);
|
||||
SourceFile::parse(&*text)
|
||||
|
@ -136,10 +135,7 @@ pub trait SourceDatabaseExt: SourceDatabase {
|
|||
fn source_root_crates(&self, id: SourceRootId) -> Arc<FxHashSet<CrateId>>;
|
||||
}
|
||||
|
||||
fn source_root_crates(
|
||||
db: &(impl SourceDatabaseExt + SourceDatabase),
|
||||
id: SourceRootId,
|
||||
) -> Arc<FxHashSet<CrateId>> {
|
||||
fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<FxHashSet<CrateId>> {
|
||||
let graph = db.crate_graph();
|
||||
let res = graph
|
||||
.iter()
|
||||
|
|
|
@ -4,6 +4,7 @@ name = "ra_fmt"
|
|||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
publish = false
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
|
|
@ -3,6 +3,7 @@ edition = "2018"
|
|||
name = "ra_hir"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
@ -15,6 +16,7 @@ arrayvec = "0.5.1"
|
|||
|
||||
itertools = "0.9.0"
|
||||
|
||||
stdx = { path = "../stdx" }
|
||||
ra_syntax = { path = "../ra_syntax" }
|
||||
ra_db = { path = "../ra_db" }
|
||||
ra_prof = { path = "../ra_prof" }
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//! FIXME: write short doc here
|
||||
use std::sync::Arc;
|
||||
use std::{iter, sync::Arc};
|
||||
|
||||
use arrayvec::ArrayVec;
|
||||
use either::Either;
|
||||
|
@ -12,6 +12,7 @@ use hir_def::{
|
|||
import_map,
|
||||
per_ns::PerNs,
|
||||
resolver::{HasResolver, Resolver},
|
||||
src::HasSource as _,
|
||||
type_ref::{Mutability, TypeRef},
|
||||
AdtId, AssocContainerId, ConstId, DefWithBodyId, EnumId, FunctionId, GenericDefId, HasModule,
|
||||
ImplId, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StaticId, StructId,
|
||||
|
@ -25,21 +26,22 @@ use hir_expand::{
|
|||
use hir_ty::{
|
||||
autoderef,
|
||||
display::{HirDisplayError, HirFormatter},
|
||||
expr::ExprValidator,
|
||||
method_resolution,
|
||||
unsafe_validation::UnsafeValidator,
|
||||
ApplicationTy, Canonical, GenericPredicate, InEnvironment, Substs, TraitEnvironment, Ty,
|
||||
TyDefId, TypeCtor,
|
||||
method_resolution, ApplicationTy, CallableDefId, Canonical, FnSig, GenericPredicate,
|
||||
InEnvironment, Substs, TraitEnvironment, Ty, TyDefId, TypeCtor,
|
||||
};
|
||||
use ra_db::{CrateId, CrateName, Edition, FileId};
|
||||
use ra_db::{CrateId, Edition, FileId};
|
||||
use ra_prof::profile;
|
||||
use ra_syntax::ast::{self, AttrsOwner, NameOwner};
|
||||
use ra_syntax::{
|
||||
ast::{self, AttrsOwner, NameOwner},
|
||||
AstNode,
|
||||
};
|
||||
use rustc_hash::FxHashSet;
|
||||
use stdx::impl_from;
|
||||
|
||||
use crate::{
|
||||
db::{DefDatabase, HirDatabase},
|
||||
has_source::HasSource,
|
||||
CallableDef, HirDisplay, InFile, Name,
|
||||
HirDisplay, InFile, Name,
|
||||
};
|
||||
|
||||
/// hir::Crate describes a single crate. It's the main interface with which
|
||||
|
@ -94,8 +96,8 @@ impl Crate {
|
|||
db.crate_graph()[self.id].edition
|
||||
}
|
||||
|
||||
pub fn display_name(self, db: &dyn HirDatabase) -> Option<CrateName> {
|
||||
db.crate_graph()[self.id].display_name.as_ref().cloned()
|
||||
pub fn display_name(self, db: &dyn HirDatabase) -> Option<String> {
|
||||
db.crate_graph()[self.id].display_name.clone()
|
||||
}
|
||||
|
||||
pub fn query_external_importables(
|
||||
|
@ -139,8 +141,8 @@ pub enum ModuleDef {
|
|||
TypeAlias(TypeAlias),
|
||||
BuiltinType(BuiltinType),
|
||||
}
|
||||
impl_froms!(
|
||||
ModuleDef: Module,
|
||||
impl_from!(
|
||||
Module,
|
||||
Function,
|
||||
Adt(Struct, Enum, Union),
|
||||
EnumVariant,
|
||||
|
@ -149,6 +151,7 @@ impl_froms!(
|
|||
Trait,
|
||||
TypeAlias,
|
||||
BuiltinType
|
||||
for ModuleDef
|
||||
);
|
||||
|
||||
impl ModuleDef {
|
||||
|
@ -376,8 +379,8 @@ pub struct Field {
|
|||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum FieldSource {
|
||||
Named(ast::RecordFieldDef),
|
||||
Pos(ast::TupleFieldDef),
|
||||
Named(ast::RecordField),
|
||||
Pos(ast::TupleField),
|
||||
}
|
||||
|
||||
impl Field {
|
||||
|
@ -556,7 +559,7 @@ pub enum Adt {
|
|||
Union(Union),
|
||||
Enum(Enum),
|
||||
}
|
||||
impl_froms!(Adt: Struct, Union, Enum);
|
||||
impl_from!(Struct, Union, Enum for Adt);
|
||||
|
||||
impl Adt {
|
||||
pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
|
||||
|
@ -599,7 +602,7 @@ pub enum VariantDef {
|
|||
Union(Union),
|
||||
EnumVariant(EnumVariant),
|
||||
}
|
||||
impl_froms!(VariantDef: Struct, Union, EnumVariant);
|
||||
impl_from!(Struct, Union, EnumVariant for VariantDef);
|
||||
|
||||
impl VariantDef {
|
||||
pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
|
||||
|
@ -642,8 +645,7 @@ pub enum DefWithBody {
|
|||
Static(Static),
|
||||
Const(Const),
|
||||
}
|
||||
|
||||
impl_froms!(DefWithBody: Function, Const, Static);
|
||||
impl_from!(Function, Const, Static for DefWithBody);
|
||||
|
||||
impl DefWithBody {
|
||||
pub fn module(self, db: &dyn HirDatabase) -> Module {
|
||||
|
@ -694,13 +696,7 @@ impl Function {
|
|||
}
|
||||
|
||||
pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) {
|
||||
let _p = profile("Function::diagnostics");
|
||||
let infer = db.infer(self.id.into());
|
||||
infer.add_diagnostics(db, self.id, sink);
|
||||
let mut validator = ExprValidator::new(self.id, infer.clone(), sink);
|
||||
validator.validate_body(db);
|
||||
let mut validator = UnsafeValidator::new(self.id, infer, sink);
|
||||
validator.validate_body(db);
|
||||
hir_ty::diagnostics::validate_body(db, self.id.into(), sink)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -945,14 +941,15 @@ pub enum GenericDef {
|
|||
// consts can have type parameters from their parents (i.e. associated consts of traits)
|
||||
Const(Const),
|
||||
}
|
||||
impl_froms!(
|
||||
GenericDef: Function,
|
||||
impl_from!(
|
||||
Function,
|
||||
Adt(Struct, Enum, Union),
|
||||
Trait,
|
||||
TypeAlias,
|
||||
ImplDef,
|
||||
EnumVariant,
|
||||
Const
|
||||
for GenericDef
|
||||
);
|
||||
|
||||
impl GenericDef {
|
||||
|
@ -973,6 +970,16 @@ pub struct Local {
|
|||
}
|
||||
|
||||
impl Local {
|
||||
pub fn is_param(self, db: &dyn HirDatabase) -> bool {
|
||||
let src = self.source(db);
|
||||
match src.value {
|
||||
Either::Left(bind_pat) => {
|
||||
bind_pat.syntax().ancestors().any(|it| ast::Param::can_cast(it.kind()))
|
||||
}
|
||||
Either::Right(_self_param) => true,
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: why is this an option? It shouldn't be?
|
||||
pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
|
||||
let body = db.body(self.parent.into());
|
||||
|
@ -1071,12 +1078,14 @@ pub struct ImplDef {
|
|||
|
||||
impl ImplDef {
|
||||
pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<ImplDef> {
|
||||
let impls = db.impls_in_crate(krate.id);
|
||||
impls.all_impls().map(Self::from).collect()
|
||||
let inherent = db.inherent_impls_in_crate(krate.id);
|
||||
let trait_ = db.trait_impls_in_crate(krate.id);
|
||||
|
||||
inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect()
|
||||
}
|
||||
pub fn for_trait(db: &dyn HirDatabase, krate: Crate, trait_: Trait) -> Vec<ImplDef> {
|
||||
let impls = db.impls_in_crate(krate.id);
|
||||
impls.lookup_impl_defs_for_trait(trait_.id).map(Self::from).collect()
|
||||
let impls = db.trait_impls_in_crate(krate.id);
|
||||
impls.for_trait(trait_.id).map(Self::from).collect()
|
||||
}
|
||||
|
||||
pub fn target_trait(self, db: &dyn HirDatabase) -> Option<TypeRef> {
|
||||
|
@ -1178,6 +1187,12 @@ impl Type {
|
|||
Type::new(db, krate, def, ty)
|
||||
}
|
||||
|
||||
pub fn is_unit(&self) -> bool {
|
||||
matches!(
|
||||
self.ty.value,
|
||||
Ty::Apply(ApplicationTy { ctor: TypeCtor::Tuple { cardinality: 0 }, .. })
|
||||
)
|
||||
}
|
||||
pub fn is_bool(&self) -> bool {
|
||||
matches!(self.ty.value, Ty::Apply(ApplicationTy { ctor: TypeCtor::Bool, .. }))
|
||||
}
|
||||
|
@ -1205,7 +1220,7 @@ impl Type {
|
|||
None => return false,
|
||||
};
|
||||
|
||||
let canonical_ty = Canonical { value: self.ty.value.clone(), num_vars: 0 };
|
||||
let canonical_ty = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) };
|
||||
method_resolution::implements_trait(
|
||||
&canonical_ty,
|
||||
db,
|
||||
|
@ -1229,15 +1244,20 @@ impl Type {
|
|||
self.ty.environment.clone(),
|
||||
hir_ty::Obligation::Trait(trait_ref),
|
||||
),
|
||||
num_vars: 0,
|
||||
kinds: Arc::new([]),
|
||||
};
|
||||
|
||||
db.trait_solve(self.krate, goal).is_some()
|
||||
}
|
||||
|
||||
// FIXME: this method is broken, as it doesn't take closures into account.
|
||||
pub fn as_callable(&self) -> Option<CallableDef> {
|
||||
Some(self.ty.value.as_callable()?.0)
|
||||
pub fn as_callable(&self, db: &dyn HirDatabase) -> Option<Callable> {
|
||||
let def = match self.ty.value {
|
||||
Ty::Apply(ApplicationTy { ctor: TypeCtor::FnDef(def), parameters: _ }) => Some(def),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let sig = self.ty.value.callable_sig(db)?;
|
||||
Some(Callable { ty: self.clone(), sig, def, is_bound_method: false })
|
||||
}
|
||||
|
||||
pub fn is_closure(&self) -> bool {
|
||||
|
@ -1304,7 +1324,7 @@ impl Type {
|
|||
pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a {
|
||||
// There should be no inference vars in types passed here
|
||||
// FIXME check that?
|
||||
let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 };
|
||||
let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) };
|
||||
let environment = self.ty.environment.clone();
|
||||
let ty = InEnvironment { value: canonical, environment };
|
||||
autoderef(db, Some(self.krate), ty)
|
||||
|
@ -1321,10 +1341,10 @@ impl Type {
|
|||
mut callback: impl FnMut(AssocItem) -> Option<T>,
|
||||
) -> Option<T> {
|
||||
for krate in self.ty.value.def_crates(db, krate.id)? {
|
||||
let impls = db.impls_in_crate(krate);
|
||||
let impls = db.inherent_impls_in_crate(krate);
|
||||
|
||||
for impl_def in impls.lookup_impl_defs(&self.ty.value) {
|
||||
for &item in db.impl_data(impl_def).items.iter() {
|
||||
for impl_def in impls.for_self_ty(&self.ty.value) {
|
||||
for &item in db.impl_data(*impl_def).items.iter() {
|
||||
if let Some(result) = callback(item.into()) {
|
||||
return Some(result);
|
||||
}
|
||||
|
@ -1345,7 +1365,7 @@ impl Type {
|
|||
// There should be no inference vars in types passed here
|
||||
// FIXME check that?
|
||||
// FIXME replace Unknown by bound vars here
|
||||
let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 };
|
||||
let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) };
|
||||
|
||||
let env = self.ty.environment.clone();
|
||||
let krate = krate.id;
|
||||
|
@ -1376,7 +1396,7 @@ impl Type {
|
|||
// There should be no inference vars in types passed here
|
||||
// FIXME check that?
|
||||
// FIXME replace Unknown by bound vars here
|
||||
let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 };
|
||||
let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) };
|
||||
|
||||
let env = self.ty.environment.clone();
|
||||
let krate = krate.id;
|
||||
|
@ -1522,6 +1542,74 @@ impl HirDisplay for Type {
|
|||
}
|
||||
}
|
||||
|
||||
// FIXME: closures
|
||||
#[derive(Debug)]
|
||||
pub struct Callable {
|
||||
ty: Type,
|
||||
sig: FnSig,
|
||||
def: Option<CallableDefId>,
|
||||
pub(crate) is_bound_method: bool,
|
||||
}
|
||||
|
||||
pub enum CallableKind {
|
||||
Function(Function),
|
||||
TupleStruct(Struct),
|
||||
TupleEnumVariant(EnumVariant),
|
||||
Closure,
|
||||
}
|
||||
|
||||
impl Callable {
|
||||
pub fn kind(&self) -> CallableKind {
|
||||
match self.def {
|
||||
Some(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()),
|
||||
Some(CallableDefId::StructId(it)) => CallableKind::TupleStruct(it.into()),
|
||||
Some(CallableDefId::EnumVariantId(it)) => CallableKind::TupleEnumVariant(it.into()),
|
||||
None => CallableKind::Closure,
|
||||
}
|
||||
}
|
||||
pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<ast::SelfParam> {
|
||||
let func = match self.def {
|
||||
Some(CallableDefId::FunctionId(it)) if self.is_bound_method => it,
|
||||
_ => return None,
|
||||
};
|
||||
let src = func.lookup(db.upcast()).source(db.upcast());
|
||||
let param_list = src.value.param_list()?;
|
||||
param_list.self_param()
|
||||
}
|
||||
pub fn n_params(&self) -> usize {
|
||||
self.sig.params().len() - if self.is_bound_method { 1 } else { 0 }
|
||||
}
|
||||
pub fn params(
|
||||
&self,
|
||||
db: &dyn HirDatabase,
|
||||
) -> Vec<(Option<Either<ast::SelfParam, ast::Pat>>, Type)> {
|
||||
let types = self
|
||||
.sig
|
||||
.params()
|
||||
.iter()
|
||||
.skip(if self.is_bound_method { 1 } else { 0 })
|
||||
.map(|ty| self.ty.derived(ty.clone()));
|
||||
let patterns = match self.def {
|
||||
Some(CallableDefId::FunctionId(func)) => {
|
||||
let src = func.lookup(db.upcast()).source(db.upcast());
|
||||
src.value.param_list().map(|param_list| {
|
||||
param_list
|
||||
.self_param()
|
||||
.map(|it| Some(Either::Left(it)))
|
||||
.filter(|_| !self.is_bound_method)
|
||||
.into_iter()
|
||||
.chain(param_list.params().map(|it| it.pat().map(Either::Right)))
|
||||
})
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
patterns.into_iter().flatten().chain(iter::repeat(None)).zip(types).collect()
|
||||
}
|
||||
pub fn return_type(&self) -> Type {
|
||||
self.ty.derived(self.sig.ret().clone())
|
||||
}
|
||||
}
|
||||
|
||||
/// For IDE only
|
||||
#[derive(Debug)]
|
||||
pub enum ScopeDef {
|
||||
|
@ -1581,8 +1669,8 @@ pub enum AttrDef {
|
|||
MacroDef(MacroDef),
|
||||
}
|
||||
|
||||
impl_froms!(
|
||||
AttrDef: Module,
|
||||
impl_from!(
|
||||
Module,
|
||||
Field,
|
||||
Adt(Struct, Enum, Union),
|
||||
EnumVariant,
|
||||
|
@ -1592,6 +1680,7 @@ impl_froms!(
|
|||
Trait,
|
||||
TypeAlias,
|
||||
MacroDef
|
||||
for AttrDef
|
||||
);
|
||||
|
||||
pub trait HasAttrs {
|
||||
|
|
|
@ -11,15 +11,15 @@ pub use hir_def::db::{
|
|||
};
|
||||
pub use hir_expand::db::{
|
||||
AstDatabase, AstDatabaseStorage, AstIdMapQuery, InternEagerExpansionQuery, InternMacroQuery,
|
||||
MacroArgQuery, MacroDefQuery, MacroExpandQuery, ParseMacroQuery,
|
||||
MacroArgTextQuery, MacroDefQuery, MacroExpandQuery, ParseMacroQuery,
|
||||
};
|
||||
pub use hir_ty::db::{
|
||||
AssociatedTyDataQuery, AssociatedTyValueQuery, CallableItemSignatureQuery, FieldTypesQuery,
|
||||
GenericDefaultsQuery, GenericPredicatesForParamQuery, GenericPredicatesQuery, HirDatabase,
|
||||
HirDatabaseStorage, ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, ImplsFromDepsQuery,
|
||||
ImplsInCrateQuery, InferQueryQuery, InternAssocTyValueQuery, InternChalkImplQuery,
|
||||
InternTypeCtorQuery, InternTypeParamIdQuery, ReturnTypeImplTraitsQuery, StructDatumQuery,
|
||||
TraitDatumQuery, TraitSolveQuery, TyQuery, ValueTyQuery,
|
||||
HirDatabaseStorage, ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, InferQueryQuery,
|
||||
InherentImplsInCrateQuery, InternTypeParamIdQuery, ReturnTypeImplTraitsQuery, StructDatumQuery,
|
||||
TraitDatumQuery, TraitImplsInCrateQuery, TraitImplsInDepsQuery, TraitSolveQuery, TyQuery,
|
||||
ValueTyQuery,
|
||||
};
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
//! FIXME: write short doc here
|
||||
pub use hir_def::diagnostics::UnresolvedModule;
|
||||
pub use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink};
|
||||
pub use hir_ty::diagnostics::{MissingFields, MissingMatchArms, MissingOkInTailExpr, NoSuchField};
|
||||
pub use hir_expand::diagnostics::{
|
||||
AstDiagnostic, Diagnostic, DiagnosticSink, DiagnosticSinkBuilder,
|
||||
};
|
||||
pub use hir_ty::diagnostics::{
|
||||
MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkInTailExpr, NoSuchField,
|
||||
};
|
||||
|
|
|
@ -57,56 +57,56 @@ impl HasSource for Field {
|
|||
}
|
||||
}
|
||||
impl HasSource for Struct {
|
||||
type Ast = ast::StructDef;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::StructDef> {
|
||||
type Ast = ast::Struct;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::Struct> {
|
||||
self.id.lookup(db.upcast()).source(db.upcast())
|
||||
}
|
||||
}
|
||||
impl HasSource for Union {
|
||||
type Ast = ast::UnionDef;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::UnionDef> {
|
||||
type Ast = ast::Union;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::Union> {
|
||||
self.id.lookup(db.upcast()).source(db.upcast())
|
||||
}
|
||||
}
|
||||
impl HasSource for Enum {
|
||||
type Ast = ast::EnumDef;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::EnumDef> {
|
||||
type Ast = ast::Enum;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::Enum> {
|
||||
self.id.lookup(db.upcast()).source(db.upcast())
|
||||
}
|
||||
}
|
||||
impl HasSource for EnumVariant {
|
||||
type Ast = ast::EnumVariant;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::EnumVariant> {
|
||||
type Ast = ast::Variant;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::Variant> {
|
||||
self.parent.id.child_source(db.upcast()).map(|map| map[self.id].clone())
|
||||
}
|
||||
}
|
||||
impl HasSource for Function {
|
||||
type Ast = ast::FnDef;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::FnDef> {
|
||||
type Ast = ast::Fn;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::Fn> {
|
||||
self.id.lookup(db.upcast()).source(db.upcast())
|
||||
}
|
||||
}
|
||||
impl HasSource for Const {
|
||||
type Ast = ast::ConstDef;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::ConstDef> {
|
||||
type Ast = ast::Const;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::Const> {
|
||||
self.id.lookup(db.upcast()).source(db.upcast())
|
||||
}
|
||||
}
|
||||
impl HasSource for Static {
|
||||
type Ast = ast::StaticDef;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::StaticDef> {
|
||||
type Ast = ast::Static;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::Static> {
|
||||
self.id.lookup(db.upcast()).source(db.upcast())
|
||||
}
|
||||
}
|
||||
impl HasSource for Trait {
|
||||
type Ast = ast::TraitDef;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::TraitDef> {
|
||||
type Ast = ast::Trait;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::Trait> {
|
||||
self.id.lookup(db.upcast()).source(db.upcast())
|
||||
}
|
||||
}
|
||||
impl HasSource for TypeAlias {
|
||||
type Ast = ast::TypeAliasDef;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::TypeAliasDef> {
|
||||
type Ast = ast::TypeAlias;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::TypeAlias> {
|
||||
self.id.lookup(db.upcast()).source(db.upcast())
|
||||
}
|
||||
}
|
||||
|
@ -120,14 +120,14 @@ impl HasSource for MacroDef {
|
|||
}
|
||||
}
|
||||
impl HasSource for ImplDef {
|
||||
type Ast = ast::ImplDef;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::ImplDef> {
|
||||
type Ast = ast::Impl;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<ast::Impl> {
|
||||
self.id.lookup(db.upcast()).source(db.upcast())
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSource for TypeParam {
|
||||
type Ast = Either<ast::TraitDef, ast::TypeParam>;
|
||||
type Ast = Either<ast::Trait, ast::TypeParam>;
|
||||
fn source(self, db: &dyn HirDatabase) -> InFile<Self::Ast> {
|
||||
let child_source = self.id.parent.child_source(db.upcast());
|
||||
child_source.map(|it| it[self.id.local_id].clone())
|
||||
|
|
|
@ -19,25 +19,6 @@
|
|||
|
||||
#![recursion_limit = "512"]
|
||||
|
||||
macro_rules! impl_froms {
|
||||
($e:ident: $($v:ident $(($($sv:ident),*))?),*$(,)?) => {
|
||||
$(
|
||||
impl From<$v> for $e {
|
||||
fn from(it: $v) -> $e {
|
||||
$e::$v(it)
|
||||
}
|
||||
}
|
||||
$($(
|
||||
impl From<$sv> for $e {
|
||||
fn from(it: $sv) -> $e {
|
||||
$e::$v($v::$sv(it))
|
||||
}
|
||||
}
|
||||
)*)?
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
mod semantics;
|
||||
pub mod db;
|
||||
mod source_analyzer;
|
||||
|
@ -51,10 +32,10 @@ mod has_source;
|
|||
|
||||
pub use crate::{
|
||||
code_model::{
|
||||
Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrDef, Const, Crate, CrateDependency,
|
||||
DefWithBody, Docs, Enum, EnumVariant, Field, FieldSource, Function, GenericDef, HasAttrs,
|
||||
HasVisibility, ImplDef, Local, MacroDef, Module, ModuleDef, ScopeDef, Static, Struct,
|
||||
Trait, Type, TypeAlias, TypeParam, Union, VariantDef, Visibility,
|
||||
Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrDef, Callable, CallableKind, Const,
|
||||
Crate, CrateDependency, DefWithBody, Docs, Enum, EnumVariant, Field, FieldSource, Function,
|
||||
GenericDef, HasAttrs, HasVisibility, ImplDef, Local, MacroDef, Module, ModuleDef, ScopeDef,
|
||||
Static, Struct, Trait, Type, TypeAlias, TypeParam, Union, VariantDef, Visibility,
|
||||
},
|
||||
has_source::HasSource,
|
||||
semantics::{original_range, PathResolution, Semantics, SemanticsScope},
|
||||
|
@ -74,6 +55,7 @@ pub use hir_def::{
|
|||
pub use hir_expand::{
|
||||
hygiene::Hygiene,
|
||||
name::{AsName, Name},
|
||||
HirFileId, InFile, MacroCallId, MacroCallLoc, MacroDefId, MacroFile, Origin,
|
||||
HirFileId, InFile, MacroCallId, MacroCallLoc, MacroDefId, /* FIXME */
|
||||
MacroFile, Origin,
|
||||
};
|
||||
pub use hir_ty::{display::HirDisplay, CallableDef};
|
||||
pub use hir_ty::display::HirDisplay;
|
||||
|
|
|
@ -6,7 +6,7 @@ use std::{cell::RefCell, fmt, iter::successors};
|
|||
|
||||
use hir_def::{
|
||||
resolver::{self, HasResolver, Resolver},
|
||||
AsMacroCall, TraitId, VariantId,
|
||||
AsMacroCall, FunctionId, TraitId, VariantId,
|
||||
};
|
||||
use hir_expand::{diagnostics::AstDiagnostic, hygiene::Hygiene, ExpansionInfo};
|
||||
use hir_ty::associated_type_shorthand_candidates;
|
||||
|
@ -24,8 +24,8 @@ use crate::{
|
|||
diagnostics::Diagnostic,
|
||||
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
|
||||
source_analyzer::{resolve_hir_path, resolve_hir_path_qualifier, SourceAnalyzer},
|
||||
AssocItem, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef,
|
||||
Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam,
|
||||
AssocItem, Callable, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module,
|
||||
ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, VariantDef,
|
||||
};
|
||||
use resolver::TypeNs;
|
||||
|
||||
|
@ -83,7 +83,13 @@ impl PathResolution {
|
|||
/// Primary API to get semantic information, like types, from syntax trees.
|
||||
pub struct Semantics<'db, DB> {
|
||||
pub db: &'db DB,
|
||||
imp: SemanticsImpl<'db>,
|
||||
}
|
||||
|
||||
pub struct SemanticsImpl<'db> {
|
||||
pub db: &'db dyn HirDatabase,
|
||||
s2d_cache: RefCell<SourceToDefCache>,
|
||||
expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>,
|
||||
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
|
||||
}
|
||||
|
||||
|
@ -95,29 +101,23 @@ impl<DB> fmt::Debug for Semantics<'_, DB> {
|
|||
|
||||
impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
||||
pub fn new(db: &DB) -> Semantics<DB> {
|
||||
Semantics { db, s2d_cache: Default::default(), cache: Default::default() }
|
||||
let impl_ = SemanticsImpl::new(db);
|
||||
Semantics { db, imp: impl_ }
|
||||
}
|
||||
|
||||
pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
|
||||
let tree = self.db.parse(file_id).tree();
|
||||
self.cache(tree.syntax().clone(), file_id.into());
|
||||
tree
|
||||
self.imp.parse(file_id)
|
||||
}
|
||||
|
||||
pub fn ast<T: AstDiagnostic + Diagnostic>(&self, d: &T) -> <T as AstDiagnostic>::AST {
|
||||
let file_id = d.source().file_id;
|
||||
let root = self.db.parse_or_expand(file_id).unwrap();
|
||||
self.cache(root, file_id);
|
||||
d.ast(self.db)
|
||||
self.imp.cache(root, file_id);
|
||||
d.ast(self.db.upcast())
|
||||
}
|
||||
|
||||
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
|
||||
let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
|
||||
let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
|
||||
let file_id = sa.expand(self.db, macro_call)?;
|
||||
let node = self.db.parse_or_expand(file_id)?;
|
||||
self.cache(node.clone(), file_id);
|
||||
Some(node)
|
||||
self.imp.expand(macro_call)
|
||||
}
|
||||
|
||||
pub fn expand_hypothetical(
|
||||
|
@ -126,37 +126,11 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
hypothetical_args: &ast::TokenTree,
|
||||
token_to_map: SyntaxToken,
|
||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||
let macro_call =
|
||||
self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call);
|
||||
let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
|
||||
let krate = sa.resolver.krate()?;
|
||||
let macro_call_id = macro_call
|
||||
.as_call_id(self.db, krate, |path| sa.resolver.resolve_path_as_macro(self.db, &path))?;
|
||||
hir_expand::db::expand_hypothetical(self.db, macro_call_id, hypothetical_args, token_to_map)
|
||||
self.imp.expand_hypothetical(actual_macro_call, hypothetical_args, token_to_map)
|
||||
}
|
||||
|
||||
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
|
||||
let parent = token.parent();
|
||||
let parent = self.find_file(parent);
|
||||
let sa = self.analyze2(parent.as_ref(), None);
|
||||
|
||||
let token = successors(Some(parent.with_value(token)), |token| {
|
||||
let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
|
||||
let tt = macro_call.token_tree()?;
|
||||
if !tt.syntax().text_range().contains_range(token.value.text_range()) {
|
||||
return None;
|
||||
}
|
||||
let file_id = sa.expand(self.db, token.with_value(¯o_call))?;
|
||||
let token = file_id.expansion_info(self.db)?.map_token_down(token.as_ref())?;
|
||||
|
||||
self.cache(find_root(&token.value.parent()), token.file_id);
|
||||
|
||||
Some(token)
|
||||
})
|
||||
.last()
|
||||
.unwrap();
|
||||
|
||||
token.value
|
||||
self.imp.descend_into_macros(token)
|
||||
}
|
||||
|
||||
pub fn descend_node_at_offset<N: ast::AstNode>(
|
||||
|
@ -164,27 +138,19 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
node: &SyntaxNode,
|
||||
offset: TextSize,
|
||||
) -> Option<N> {
|
||||
// Handle macro token cases
|
||||
node.token_at_offset(offset)
|
||||
.map(|token| self.descend_into_macros(token))
|
||||
.find_map(|it| self.ancestors_with_macros(it.parent()).find_map(N::cast))
|
||||
self.imp.descend_node_at_offset(node, offset).find_map(N::cast)
|
||||
}
|
||||
|
||||
pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
|
||||
let node = self.find_file(node.clone());
|
||||
original_range(self.db, node.as_ref())
|
||||
self.imp.original_range(node)
|
||||
}
|
||||
|
||||
pub fn diagnostics_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
|
||||
let src = diagnostics.source();
|
||||
let root = self.db.parse_or_expand(src.file_id).unwrap();
|
||||
let node = src.value.to_node(&root);
|
||||
original_range(self.db, src.with_value(&node))
|
||||
self.imp.diagnostics_range(diagnostics)
|
||||
}
|
||||
|
||||
pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
|
||||
let node = self.find_file(node);
|
||||
node.ancestors_with_macros(self.db).map(|it| it.value)
|
||||
self.imp.ancestors_with_macros(node)
|
||||
}
|
||||
|
||||
pub fn ancestors_at_offset_with_macros(
|
||||
|
@ -192,9 +158,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
node: &SyntaxNode,
|
||||
offset: TextSize,
|
||||
) -> impl Iterator<Item = SyntaxNode> + '_ {
|
||||
node.token_at_offset(offset)
|
||||
.map(|token| self.ancestors_with_macros(token.parent()))
|
||||
.kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
|
||||
self.imp.ancestors_at_offset_with_macros(node, offset)
|
||||
}
|
||||
|
||||
/// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*,
|
||||
|
@ -204,7 +168,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
node: &SyntaxNode,
|
||||
offset: TextSize,
|
||||
) -> Option<N> {
|
||||
self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
|
||||
self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
|
||||
}
|
||||
|
||||
/// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
|
||||
|
@ -217,100 +181,317 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
if let Some(it) = find_node_at_offset(&node, offset) {
|
||||
return Some(it);
|
||||
}
|
||||
self.descend_node_at_offset(&node, offset)
|
||||
|
||||
self.imp.descend_node_at_offset(node, offset).find_map(N::cast)
|
||||
}
|
||||
|
||||
pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
|
||||
self.analyze(expr.syntax()).type_of(self.db, &expr)
|
||||
self.imp.type_of_expr(expr)
|
||||
}
|
||||
|
||||
pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> {
|
||||
self.analyze(pat.syntax()).type_of_pat(self.db, &pat)
|
||||
self.imp.type_of_pat(pat)
|
||||
}
|
||||
|
||||
pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
|
||||
self.imp.type_of_self(param)
|
||||
}
|
||||
|
||||
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
|
||||
self.analyze(call.syntax()).resolve_method_call(self.db, call)
|
||||
self.imp.resolve_method_call(call).map(Function::from)
|
||||
}
|
||||
|
||||
pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
|
||||
self.imp.resolve_method_call_as_callable(call)
|
||||
}
|
||||
|
||||
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
|
||||
self.analyze(field.syntax()).resolve_field(self.db, field)
|
||||
self.imp.resolve_field(field)
|
||||
}
|
||||
|
||||
pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<(Field, Option<Local>)> {
|
||||
self.analyze(field.syntax()).resolve_record_field(self.db, field)
|
||||
pub fn resolve_record_field(
|
||||
&self,
|
||||
field: &ast::RecordExprField,
|
||||
) -> Option<(Field, Option<Local>)> {
|
||||
self.imp.resolve_record_field(field)
|
||||
}
|
||||
|
||||
pub fn resolve_record_field_pat(&self, field: &ast::RecordFieldPat) -> Option<Field> {
|
||||
self.analyze(field.syntax()).resolve_record_field_pat(self.db, field)
|
||||
self.imp.resolve_record_field_pat(field)
|
||||
}
|
||||
|
||||
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
|
||||
let sa = self.analyze(macro_call.syntax());
|
||||
let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
|
||||
sa.resolve_macro_call(self.db, macro_call)
|
||||
self.imp.resolve_macro_call(macro_call)
|
||||
}
|
||||
|
||||
pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
|
||||
self.analyze(path.syntax()).resolve_path(self.db, path)
|
||||
self.imp.resolve_path(path)
|
||||
}
|
||||
|
||||
pub fn resolve_variant(&self, record_lit: ast::RecordLit) -> Option<VariantId> {
|
||||
self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit)
|
||||
pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
|
||||
self.imp.resolve_variant(record_lit).map(VariantDef::from)
|
||||
}
|
||||
|
||||
pub fn lower_path(&self, path: &ast::Path) -> Option<Path> {
|
||||
let src = self.find_file(path.syntax().clone());
|
||||
Path::from_src(path.clone(), &Hygiene::new(self.db.upcast(), src.file_id.into()))
|
||||
self.imp.lower_path(path)
|
||||
}
|
||||
|
||||
pub fn resolve_bind_pat_to_const(&self, pat: &ast::BindPat) -> Option<ModuleDef> {
|
||||
self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat)
|
||||
self.imp.resolve_bind_pat_to_const(pat)
|
||||
}
|
||||
|
||||
// FIXME: use this instead?
|
||||
// pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
|
||||
|
||||
pub fn record_literal_missing_fields(&self, literal: &ast::RecordLit) -> Vec<(Field, Type)> {
|
||||
pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
|
||||
self.imp.record_literal_missing_fields(literal)
|
||||
}
|
||||
|
||||
pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
|
||||
self.imp.record_pattern_missing_fields(pattern)
|
||||
}
|
||||
|
||||
pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
|
||||
let src = self.imp.find_file(src.syntax().clone()).with_value(src).cloned();
|
||||
T::to_def(&self.imp, src)
|
||||
}
|
||||
|
||||
pub fn to_module_def(&self, file: FileId) -> Option<Module> {
|
||||
self.imp.to_module_def(file)
|
||||
}
|
||||
|
||||
pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
|
||||
self.imp.scope(node)
|
||||
}
|
||||
|
||||
pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
|
||||
self.imp.scope_at_offset(node, offset)
|
||||
}
|
||||
|
||||
pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
|
||||
self.imp.scope_for_def(def)
|
||||
}
|
||||
|
||||
pub fn assert_contains_node(&self, node: &SyntaxNode) {
|
||||
self.imp.assert_contains_node(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> SemanticsImpl<'db> {
|
||||
fn new(db: &'db dyn HirDatabase) -> Self {
|
||||
SemanticsImpl {
|
||||
db,
|
||||
s2d_cache: Default::default(),
|
||||
cache: Default::default(),
|
||||
expansion_info_cache: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse(&self, file_id: FileId) -> ast::SourceFile {
|
||||
let tree = self.db.parse(file_id).tree();
|
||||
self.cache(tree.syntax().clone(), file_id.into());
|
||||
tree
|
||||
}
|
||||
|
||||
fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
|
||||
let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
|
||||
let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
|
||||
let file_id = sa.expand(self.db, macro_call)?;
|
||||
let node = self.db.parse_or_expand(file_id)?;
|
||||
self.cache(node.clone(), file_id);
|
||||
Some(node)
|
||||
}
|
||||
|
||||
fn expand_hypothetical(
|
||||
&self,
|
||||
actual_macro_call: &ast::MacroCall,
|
||||
hypothetical_args: &ast::TokenTree,
|
||||
token_to_map: SyntaxToken,
|
||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||
let macro_call =
|
||||
self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call);
|
||||
let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
|
||||
let krate = sa.resolver.krate()?;
|
||||
let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
|
||||
sa.resolver.resolve_path_as_macro(self.db.upcast(), &path)
|
||||
})?;
|
||||
hir_expand::db::expand_hypothetical(
|
||||
self.db.upcast(),
|
||||
macro_call_id,
|
||||
hypothetical_args,
|
||||
token_to_map,
|
||||
)
|
||||
}
|
||||
|
||||
fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
|
||||
let _p = profile("descend_into_macros");
|
||||
let parent = token.parent();
|
||||
let parent = self.find_file(parent);
|
||||
let sa = self.analyze2(parent.as_ref(), None);
|
||||
|
||||
let token = successors(Some(parent.with_value(token)), |token| {
|
||||
self.db.check_canceled();
|
||||
let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
|
||||
let tt = macro_call.token_tree()?;
|
||||
if !tt.syntax().text_range().contains_range(token.value.text_range()) {
|
||||
return None;
|
||||
}
|
||||
let file_id = sa.expand(self.db, token.with_value(¯o_call))?;
|
||||
let token = self
|
||||
.expansion_info_cache
|
||||
.borrow_mut()
|
||||
.entry(file_id)
|
||||
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
|
||||
.as_ref()?
|
||||
.map_token_down(token.as_ref())?;
|
||||
|
||||
self.cache(find_root(&token.value.parent()), token.file_id);
|
||||
|
||||
Some(token)
|
||||
})
|
||||
.last()
|
||||
.unwrap();
|
||||
|
||||
token.value
|
||||
}
|
||||
|
||||
fn descend_node_at_offset(
|
||||
&self,
|
||||
node: &SyntaxNode,
|
||||
offset: TextSize,
|
||||
) -> impl Iterator<Item = SyntaxNode> + '_ {
|
||||
// Handle macro token cases
|
||||
node.token_at_offset(offset)
|
||||
.map(|token| self.descend_into_macros(token))
|
||||
.map(|it| self.ancestors_with_macros(it.parent()))
|
||||
.flatten()
|
||||
}
|
||||
|
||||
fn original_range(&self, node: &SyntaxNode) -> FileRange {
|
||||
let node = self.find_file(node.clone());
|
||||
original_range(self.db, node.as_ref())
|
||||
}
|
||||
|
||||
fn diagnostics_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
|
||||
let src = diagnostics.source();
|
||||
let root = self.db.parse_or_expand(src.file_id).unwrap();
|
||||
let node = src.value.to_node(&root);
|
||||
original_range(self.db, src.with_value(&node))
|
||||
}
|
||||
|
||||
fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
|
||||
let node = self.find_file(node);
|
||||
node.ancestors_with_macros(self.db.upcast()).map(|it| it.value)
|
||||
}
|
||||
|
||||
fn ancestors_at_offset_with_macros(
|
||||
&self,
|
||||
node: &SyntaxNode,
|
||||
offset: TextSize,
|
||||
) -> impl Iterator<Item = SyntaxNode> + '_ {
|
||||
node.token_at_offset(offset)
|
||||
.map(|token| self.ancestors_with_macros(token.parent()))
|
||||
.kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
|
||||
}
|
||||
|
||||
fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
|
||||
self.analyze(expr.syntax()).type_of_expr(self.db, &expr)
|
||||
}
|
||||
|
||||
fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> {
|
||||
self.analyze(pat.syntax()).type_of_pat(self.db, &pat)
|
||||
}
|
||||
|
||||
fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
|
||||
self.analyze(param.syntax()).type_of_self(self.db, ¶m)
|
||||
}
|
||||
|
||||
fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId> {
|
||||
self.analyze(call.syntax()).resolve_method_call(self.db, call)
|
||||
}
|
||||
|
||||
fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
|
||||
// FIXME: this erases Substs
|
||||
let func = self.resolve_method_call(call)?;
|
||||
let ty = self.db.value_ty(func.into());
|
||||
let resolver = self.analyze(call.syntax()).resolver;
|
||||
let ty = Type::new_with_resolver(self.db, &resolver, ty.value)?;
|
||||
let mut res = ty.as_callable(self.db)?;
|
||||
res.is_bound_method = true;
|
||||
Some(res)
|
||||
}
|
||||
|
||||
fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
|
||||
self.analyze(field.syntax()).resolve_field(self.db, field)
|
||||
}
|
||||
|
||||
fn resolve_record_field(&self, field: &ast::RecordExprField) -> Option<(Field, Option<Local>)> {
|
||||
self.analyze(field.syntax()).resolve_record_field(self.db, field)
|
||||
}
|
||||
|
||||
fn resolve_record_field_pat(&self, field: &ast::RecordFieldPat) -> Option<Field> {
|
||||
self.analyze(field.syntax()).resolve_record_field_pat(self.db, field)
|
||||
}
|
||||
|
||||
fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
|
||||
let sa = self.analyze(macro_call.syntax());
|
||||
let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
|
||||
sa.resolve_macro_call(self.db, macro_call)
|
||||
}
|
||||
|
||||
fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
|
||||
self.analyze(path.syntax()).resolve_path(self.db, path)
|
||||
}
|
||||
|
||||
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
|
||||
self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit)
|
||||
}
|
||||
|
||||
fn lower_path(&self, path: &ast::Path) -> Option<Path> {
|
||||
let src = self.find_file(path.syntax().clone());
|
||||
Path::from_src(path.clone(), &Hygiene::new(self.db.upcast(), src.file_id.into()))
|
||||
}
|
||||
|
||||
fn resolve_bind_pat_to_const(&self, pat: &ast::BindPat) -> Option<ModuleDef> {
|
||||
self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat)
|
||||
}
|
||||
|
||||
fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
|
||||
self.analyze(literal.syntax())
|
||||
.record_literal_missing_fields(self.db, literal)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
|
||||
fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
|
||||
self.analyze(pattern.syntax())
|
||||
.record_pattern_missing_fields(self.db, pattern)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
|
||||
let src = self.find_file(src.syntax().clone()).with_value(src).cloned();
|
||||
T::to_def(self, src)
|
||||
}
|
||||
|
||||
fn with_ctx<F: FnOnce(&mut SourceToDefCtx) -> T, T>(&self, f: F) -> T {
|
||||
let mut cache = self.s2d_cache.borrow_mut();
|
||||
let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache };
|
||||
f(&mut ctx)
|
||||
}
|
||||
|
||||
pub fn to_module_def(&self, file: FileId) -> Option<Module> {
|
||||
fn to_module_def(&self, file: FileId) -> Option<Module> {
|
||||
self.with_ctx(|ctx| ctx.file_to_def(file)).map(Module::from)
|
||||
}
|
||||
|
||||
pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db, DB> {
|
||||
fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
|
||||
let node = self.find_file(node.clone());
|
||||
let resolver = self.analyze2(node.as_ref(), None).resolver;
|
||||
SemanticsScope { db: self.db, resolver }
|
||||
}
|
||||
|
||||
pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db, DB> {
|
||||
fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
|
||||
let node = self.find_file(node.clone());
|
||||
let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver;
|
||||
SemanticsScope { db: self.db, resolver }
|
||||
}
|
||||
|
||||
pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db, DB> {
|
||||
let resolver = def.id.resolver(self.db);
|
||||
fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
|
||||
let resolver = def.id.resolver(self.db.upcast());
|
||||
SemanticsScope { db: self.db, resolver }
|
||||
}
|
||||
|
||||
|
@ -331,12 +512,13 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
ChildContainer::DefWithBodyId(def) => {
|
||||
return SourceAnalyzer::new_for_body(self.db, def, src, offset)
|
||||
}
|
||||
ChildContainer::TraitId(it) => it.resolver(self.db),
|
||||
ChildContainer::ImplId(it) => it.resolver(self.db),
|
||||
ChildContainer::ModuleId(it) => it.resolver(self.db),
|
||||
ChildContainer::EnumId(it) => it.resolver(self.db),
|
||||
ChildContainer::VariantId(it) => it.resolver(self.db),
|
||||
ChildContainer::GenericDefId(it) => it.resolver(self.db),
|
||||
ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
|
||||
ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
|
||||
ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()),
|
||||
ChildContainer::EnumId(it) => it.resolver(self.db.upcast()),
|
||||
ChildContainer::VariantId(it) => it.resolver(self.db.upcast()),
|
||||
ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()),
|
||||
ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()),
|
||||
};
|
||||
SourceAnalyzer::new_for_resolver(resolver, src)
|
||||
}
|
||||
|
@ -348,7 +530,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
assert!(prev == None || prev == Some(file_id))
|
||||
}
|
||||
|
||||
pub fn assert_contains_node(&self, node: &SyntaxNode) {
|
||||
fn assert_contains_node(&self, node: &SyntaxNode) {
|
||||
self.find_file(node.clone());
|
||||
}
|
||||
|
||||
|
@ -382,14 +564,14 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
pub trait ToDef: AstNode + Clone {
|
||||
type Def;
|
||||
|
||||
fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: InFile<Self>) -> Option<Self::Def>;
|
||||
fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def>;
|
||||
}
|
||||
|
||||
macro_rules! to_def_impls {
|
||||
($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
|
||||
impl ToDef for $ast {
|
||||
type Def = $def;
|
||||
fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: InFile<Self>) -> Option<Self::Def> {
|
||||
fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def> {
|
||||
sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
|
||||
}
|
||||
}
|
||||
|
@ -398,18 +580,18 @@ macro_rules! to_def_impls {
|
|||
|
||||
to_def_impls![
|
||||
(crate::Module, ast::Module, module_to_def),
|
||||
(crate::Struct, ast::StructDef, struct_to_def),
|
||||
(crate::Enum, ast::EnumDef, enum_to_def),
|
||||
(crate::Union, ast::UnionDef, union_to_def),
|
||||
(crate::Trait, ast::TraitDef, trait_to_def),
|
||||
(crate::ImplDef, ast::ImplDef, impl_to_def),
|
||||
(crate::TypeAlias, ast::TypeAliasDef, type_alias_to_def),
|
||||
(crate::Const, ast::ConstDef, const_to_def),
|
||||
(crate::Static, ast::StaticDef, static_to_def),
|
||||
(crate::Function, ast::FnDef, fn_to_def),
|
||||
(crate::Field, ast::RecordFieldDef, record_field_to_def),
|
||||
(crate::Field, ast::TupleFieldDef, tuple_field_to_def),
|
||||
(crate::EnumVariant, ast::EnumVariant, enum_variant_to_def),
|
||||
(crate::Struct, ast::Struct, struct_to_def),
|
||||
(crate::Enum, ast::Enum, enum_to_def),
|
||||
(crate::Union, ast::Union, union_to_def),
|
||||
(crate::Trait, ast::Trait, trait_to_def),
|
||||
(crate::ImplDef, ast::Impl, impl_to_def),
|
||||
(crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
|
||||
(crate::Const, ast::Const, const_to_def),
|
||||
(crate::Static, ast::Static, static_to_def),
|
||||
(crate::Function, ast::Fn, fn_to_def),
|
||||
(crate::Field, ast::RecordField, record_field_to_def),
|
||||
(crate::Field, ast::TupleField, tuple_field_to_def),
|
||||
(crate::EnumVariant, ast::Variant, enum_variant_to_def),
|
||||
(crate::TypeParam, ast::TypeParam, type_param_to_def),
|
||||
(crate::MacroDef, ast::MacroCall, macro_call_to_def), // this one is dubious, not all calls are macros
|
||||
(crate::Local, ast::BindPat, bind_pat_to_def),
|
||||
|
@ -419,12 +601,13 @@ fn find_root(node: &SyntaxNode) -> SyntaxNode {
|
|||
node.ancestors().last().unwrap()
|
||||
}
|
||||
|
||||
pub struct SemanticsScope<'a, DB> {
|
||||
pub db: &'a DB,
|
||||
#[derive(Debug)]
|
||||
pub struct SemanticsScope<'a> {
|
||||
pub db: &'a dyn HirDatabase,
|
||||
resolver: Resolver,
|
||||
}
|
||||
|
||||
impl<'a, DB: HirDatabase> SemanticsScope<'a, DB> {
|
||||
impl<'a> SemanticsScope<'a> {
|
||||
pub fn module(&self) -> Option<Module> {
|
||||
Some(Module { id: self.resolver.module()? })
|
||||
}
|
||||
|
@ -433,13 +616,13 @@ impl<'a, DB: HirDatabase> SemanticsScope<'a, DB> {
|
|||
// FIXME: rename to visible_traits to not repeat scope?
|
||||
pub fn traits_in_scope(&self) -> FxHashSet<TraitId> {
|
||||
let resolver = &self.resolver;
|
||||
resolver.traits_in_scope(self.db)
|
||||
resolver.traits_in_scope(self.db.upcast())
|
||||
}
|
||||
|
||||
pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
|
||||
let resolver = &self.resolver;
|
||||
|
||||
resolver.process_all_names(self.db, &mut |name, def| {
|
||||
resolver.process_all_names(self.db.upcast(), &mut |name, def| {
|
||||
let def = match def {
|
||||
resolver::ScopeDef::PerNs(it) => {
|
||||
let items = ScopeDef::all_items(it);
|
||||
|
|
|
@ -16,6 +16,7 @@ use ra_syntax::{
|
|||
match_ast, AstNode, SyntaxNode,
|
||||
};
|
||||
use rustc_hash::FxHashMap;
|
||||
use stdx::impl_from;
|
||||
|
||||
use crate::{db::HirDatabase, InFile, MacroDefId};
|
||||
|
||||
|
@ -64,53 +65,44 @@ impl SourceToDefCtx<'_, '_> {
|
|||
Some(ModuleId { krate: parent_module.krate, local_id: child_id })
|
||||
}
|
||||
|
||||
pub(super) fn trait_to_def(&mut self, src: InFile<ast::TraitDef>) -> Option<TraitId> {
|
||||
pub(super) fn trait_to_def(&mut self, src: InFile<ast::Trait>) -> Option<TraitId> {
|
||||
self.to_def(src, keys::TRAIT)
|
||||
}
|
||||
pub(super) fn impl_to_def(&mut self, src: InFile<ast::ImplDef>) -> Option<ImplId> {
|
||||
pub(super) fn impl_to_def(&mut self, src: InFile<ast::Impl>) -> Option<ImplId> {
|
||||
self.to_def(src, keys::IMPL)
|
||||
}
|
||||
pub(super) fn fn_to_def(&mut self, src: InFile<ast::FnDef>) -> Option<FunctionId> {
|
||||
pub(super) fn fn_to_def(&mut self, src: InFile<ast::Fn>) -> Option<FunctionId> {
|
||||
self.to_def(src, keys::FUNCTION)
|
||||
}
|
||||
pub(super) fn struct_to_def(&mut self, src: InFile<ast::StructDef>) -> Option<StructId> {
|
||||
pub(super) fn struct_to_def(&mut self, src: InFile<ast::Struct>) -> Option<StructId> {
|
||||
self.to_def(src, keys::STRUCT)
|
||||
}
|
||||
pub(super) fn enum_to_def(&mut self, src: InFile<ast::EnumDef>) -> Option<EnumId> {
|
||||
pub(super) fn enum_to_def(&mut self, src: InFile<ast::Enum>) -> Option<EnumId> {
|
||||
self.to_def(src, keys::ENUM)
|
||||
}
|
||||
pub(super) fn union_to_def(&mut self, src: InFile<ast::UnionDef>) -> Option<UnionId> {
|
||||
pub(super) fn union_to_def(&mut self, src: InFile<ast::Union>) -> Option<UnionId> {
|
||||
self.to_def(src, keys::UNION)
|
||||
}
|
||||
pub(super) fn static_to_def(&mut self, src: InFile<ast::StaticDef>) -> Option<StaticId> {
|
||||
pub(super) fn static_to_def(&mut self, src: InFile<ast::Static>) -> Option<StaticId> {
|
||||
self.to_def(src, keys::STATIC)
|
||||
}
|
||||
pub(super) fn const_to_def(&mut self, src: InFile<ast::ConstDef>) -> Option<ConstId> {
|
||||
pub(super) fn const_to_def(&mut self, src: InFile<ast::Const>) -> Option<ConstId> {
|
||||
self.to_def(src, keys::CONST)
|
||||
}
|
||||
pub(super) fn type_alias_to_def(
|
||||
&mut self,
|
||||
src: InFile<ast::TypeAliasDef>,
|
||||
) -> Option<TypeAliasId> {
|
||||
pub(super) fn type_alias_to_def(&mut self, src: InFile<ast::TypeAlias>) -> Option<TypeAliasId> {
|
||||
self.to_def(src, keys::TYPE_ALIAS)
|
||||
}
|
||||
pub(super) fn record_field_to_def(
|
||||
&mut self,
|
||||
src: InFile<ast::RecordFieldDef>,
|
||||
) -> Option<FieldId> {
|
||||
pub(super) fn record_field_to_def(&mut self, src: InFile<ast::RecordField>) -> Option<FieldId> {
|
||||
self.to_def(src, keys::RECORD_FIELD)
|
||||
}
|
||||
pub(super) fn tuple_field_to_def(
|
||||
&mut self,
|
||||
src: InFile<ast::TupleFieldDef>,
|
||||
) -> Option<FieldId> {
|
||||
pub(super) fn tuple_field_to_def(&mut self, src: InFile<ast::TupleField>) -> Option<FieldId> {
|
||||
self.to_def(src, keys::TUPLE_FIELD)
|
||||
}
|
||||
pub(super) fn enum_variant_to_def(
|
||||
&mut self,
|
||||
src: InFile<ast::EnumVariant>,
|
||||
src: InFile<ast::Variant>,
|
||||
) -> Option<EnumVariantId> {
|
||||
self.to_def(src, keys::ENUM_VARIANT)
|
||||
self.to_def(src, keys::VARIANT)
|
||||
}
|
||||
pub(super) fn bind_pat_to_def(
|
||||
&mut self,
|
||||
|
@ -162,38 +154,42 @@ impl SourceToDefCtx<'_, '_> {
|
|||
let def = self.module_to_def(container.with_value(it))?;
|
||||
def.into()
|
||||
},
|
||||
ast::TraitDef(it) => {
|
||||
ast::Trait(it) => {
|
||||
let def = self.trait_to_def(container.with_value(it))?;
|
||||
def.into()
|
||||
},
|
||||
ast::ImplDef(it) => {
|
||||
ast::Impl(it) => {
|
||||
let def = self.impl_to_def(container.with_value(it))?;
|
||||
def.into()
|
||||
},
|
||||
ast::FnDef(it) => {
|
||||
ast::Fn(it) => {
|
||||
let def = self.fn_to_def(container.with_value(it))?;
|
||||
DefWithBodyId::from(def).into()
|
||||
},
|
||||
ast::StructDef(it) => {
|
||||
ast::Struct(it) => {
|
||||
let def = self.struct_to_def(container.with_value(it))?;
|
||||
VariantId::from(def).into()
|
||||
},
|
||||
ast::EnumDef(it) => {
|
||||
ast::Enum(it) => {
|
||||
let def = self.enum_to_def(container.with_value(it))?;
|
||||
def.into()
|
||||
},
|
||||
ast::UnionDef(it) => {
|
||||
ast::Union(it) => {
|
||||
let def = self.union_to_def(container.with_value(it))?;
|
||||
VariantId::from(def).into()
|
||||
},
|
||||
ast::StaticDef(it) => {
|
||||
ast::Static(it) => {
|
||||
let def = self.static_to_def(container.with_value(it))?;
|
||||
DefWithBodyId::from(def).into()
|
||||
},
|
||||
ast::ConstDef(it) => {
|
||||
ast::Const(it) => {
|
||||
let def = self.const_to_def(container.with_value(it))?;
|
||||
DefWithBodyId::from(def).into()
|
||||
},
|
||||
ast::TypeAlias(it) => {
|
||||
let def = self.type_alias_to_def(container.with_value(it))?;
|
||||
def.into()
|
||||
},
|
||||
_ => continue,
|
||||
}
|
||||
};
|
||||
|
@ -208,12 +204,12 @@ impl SourceToDefCtx<'_, '_> {
|
|||
for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) {
|
||||
let res: GenericDefId = match_ast! {
|
||||
match (container.value) {
|
||||
ast::FnDef(it) => self.fn_to_def(container.with_value(it))?.into(),
|
||||
ast::StructDef(it) => self.struct_to_def(container.with_value(it))?.into(),
|
||||
ast::EnumDef(it) => self.enum_to_def(container.with_value(it))?.into(),
|
||||
ast::TraitDef(it) => self.trait_to_def(container.with_value(it))?.into(),
|
||||
ast::TypeAliasDef(it) => self.type_alias_to_def(container.with_value(it))?.into(),
|
||||
ast::ImplDef(it) => self.impl_to_def(container.with_value(it))?.into(),
|
||||
ast::Fn(it) => self.fn_to_def(container.with_value(it))?.into(),
|
||||
ast::Struct(it) => self.struct_to_def(container.with_value(it))?.into(),
|
||||
ast::Enum(it) => self.enum_to_def(container.with_value(it))?.into(),
|
||||
ast::Trait(it) => self.trait_to_def(container.with_value(it))?.into(),
|
||||
ast::TypeAlias(it) => self.type_alias_to_def(container.with_value(it))?.into(),
|
||||
ast::Impl(it) => self.impl_to_def(container.with_value(it))?.into(),
|
||||
_ => continue,
|
||||
}
|
||||
};
|
||||
|
@ -226,9 +222,9 @@ impl SourceToDefCtx<'_, '_> {
|
|||
for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) {
|
||||
let res: DefWithBodyId = match_ast! {
|
||||
match (container.value) {
|
||||
ast::ConstDef(it) => self.const_to_def(container.with_value(it))?.into(),
|
||||
ast::StaticDef(it) => self.static_to_def(container.with_value(it))?.into(),
|
||||
ast::FnDef(it) => self.fn_to_def(container.with_value(it))?.into(),
|
||||
ast::Const(it) => self.const_to_def(container.with_value(it))?.into(),
|
||||
ast::Static(it) => self.static_to_def(container.with_value(it))?.into(),
|
||||
ast::Fn(it) => self.fn_to_def(container.with_value(it))?.into(),
|
||||
_ => continue,
|
||||
}
|
||||
};
|
||||
|
@ -246,19 +242,21 @@ pub(crate) enum ChildContainer {
|
|||
ImplId(ImplId),
|
||||
EnumId(EnumId),
|
||||
VariantId(VariantId),
|
||||
TypeAliasId(TypeAliasId),
|
||||
/// XXX: this might be the same def as, for example an `EnumId`. However,
|
||||
/// here the children generic parameters, and not, eg enum variants.
|
||||
GenericDefId(GenericDefId),
|
||||
}
|
||||
impl_froms! {
|
||||
ChildContainer:
|
||||
impl_from! {
|
||||
DefWithBodyId,
|
||||
ModuleId,
|
||||
TraitId,
|
||||
ImplId,
|
||||
EnumId,
|
||||
VariantId,
|
||||
TypeAliasId,
|
||||
GenericDefId
|
||||
for ChildContainer
|
||||
}
|
||||
|
||||
impl ChildContainer {
|
||||
|
@ -271,6 +269,7 @@ impl ChildContainer {
|
|||
ChildContainer::ImplId(it) => it.child_by_source(db),
|
||||
ChildContainer::EnumId(it) => it.child_by_source(db),
|
||||
ChildContainer::VariantId(it) => it.child_by_source(db),
|
||||
ChildContainer::TypeAliasId(_) => DynMap::default(),
|
||||
ChildContainer::GenericDefId(it) => it.child_by_source(db),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,11 +14,11 @@ use hir_def::{
|
|||
},
|
||||
expr::{ExprId, Pat, PatId},
|
||||
resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
|
||||
AsMacroCall, DefWithBodyId, FieldId, LocalFieldId, VariantId,
|
||||
AsMacroCall, DefWithBodyId, FieldId, FunctionId, LocalFieldId, VariantId,
|
||||
};
|
||||
use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile};
|
||||
use hir_ty::{
|
||||
expr::{record_literal_missing_fields, record_pattern_missing_fields},
|
||||
diagnostics::{record_literal_missing_fields, record_pattern_missing_fields},
|
||||
InferenceResult, Substs, Ty,
|
||||
};
|
||||
use ra_syntax::{
|
||||
|
@ -115,7 +115,7 @@ impl SourceAnalyzer {
|
|||
Some(res)
|
||||
}
|
||||
|
||||
pub(crate) fn type_of(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<Type> {
|
||||
pub(crate) fn type_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<Type> {
|
||||
let expr_id = self.expr_id(db, expr)?;
|
||||
let ty = self.infer.as_ref()?[expr_id].clone();
|
||||
Type::new_with_resolver(db, &self.resolver, ty)
|
||||
|
@ -127,13 +127,24 @@ impl SourceAnalyzer {
|
|||
Type::new_with_resolver(db, &self.resolver, ty)
|
||||
}
|
||||
|
||||
pub(crate) fn type_of_self(
|
||||
&self,
|
||||
db: &dyn HirDatabase,
|
||||
param: &ast::SelfParam,
|
||||
) -> Option<Type> {
|
||||
let src = InFile { file_id: self.file_id, value: param };
|
||||
let pat_id = self.body_source_map.as_ref()?.node_self_param(src)?;
|
||||
let ty = self.infer.as_ref()?[pat_id].clone();
|
||||
Type::new_with_resolver(db, &self.resolver, ty)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_method_call(
|
||||
&self,
|
||||
db: &dyn HirDatabase,
|
||||
call: &ast::MethodCallExpr,
|
||||
) -> Option<Function> {
|
||||
) -> Option<FunctionId> {
|
||||
let expr_id = self.expr_id(db, &call.clone().into())?;
|
||||
self.infer.as_ref()?.method_resolution(expr_id).map(Function::from)
|
||||
self.infer.as_ref()?.method_resolution(expr_id)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_field(
|
||||
|
@ -148,7 +159,7 @@ impl SourceAnalyzer {
|
|||
pub(crate) fn resolve_record_field(
|
||||
&self,
|
||||
db: &dyn HirDatabase,
|
||||
field: &ast::RecordField,
|
||||
field: &ast::RecordExprField,
|
||||
) -> Option<(Field, Option<Local>)> {
|
||||
let expr = field.expr()?;
|
||||
let expr_id = self.expr_id(db, &expr)?;
|
||||
|
@ -235,7 +246,7 @@ impl SourceAnalyzer {
|
|||
}
|
||||
}
|
||||
|
||||
if let Some(rec_lit) = path.syntax().parent().and_then(ast::RecordLit::cast) {
|
||||
if let Some(rec_lit) = path.syntax().parent().and_then(ast::RecordExpr::cast) {
|
||||
let expr_id = self.expr_id(db, &rec_lit.into())?;
|
||||
if let Some(VariantId::EnumVariantId(variant)) =
|
||||
self.infer.as_ref()?.variant_resolution_for_expr(expr_id)
|
||||
|
@ -273,7 +284,7 @@ impl SourceAnalyzer {
|
|||
pub(crate) fn record_literal_missing_fields(
|
||||
&self,
|
||||
db: &dyn HirDatabase,
|
||||
literal: &ast::RecordLit,
|
||||
literal: &ast::RecordExpr,
|
||||
) -> Option<Vec<(Field, Type)>> {
|
||||
let krate = self.resolver.krate()?;
|
||||
let body = self.body.as_ref()?;
|
||||
|
@ -341,13 +352,13 @@ impl SourceAnalyzer {
|
|||
let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
|
||||
self.resolver.resolve_path_as_macro(db.upcast(), &path)
|
||||
})?;
|
||||
Some(macro_call_id.as_file())
|
||||
Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_variant(
|
||||
&self,
|
||||
db: &dyn HirDatabase,
|
||||
record_lit: ast::RecordLit,
|
||||
record_lit: ast::RecordExpr,
|
||||
) -> Option<VariantId> {
|
||||
let infer = self.infer.as_ref()?;
|
||||
let expr_id = self.expr_id(db, &record_lit.into())?;
|
||||
|
@ -394,8 +405,7 @@ fn scope_for_offset(
|
|||
)
|
||||
})
|
||||
.map(|(expr_range, scope)| {
|
||||
adjust(db, scopes, source_map, expr_range, offset.file_id, offset.value)
|
||||
.unwrap_or(*scope)
|
||||
adjust(db, scopes, source_map, expr_range, offset).unwrap_or(*scope)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -406,8 +416,7 @@ fn adjust(
|
|||
scopes: &ExprScopes,
|
||||
source_map: &BodySourceMap,
|
||||
expr_range: TextRange,
|
||||
file_id: HirFileId,
|
||||
offset: TextSize,
|
||||
offset: InFile<TextSize>,
|
||||
) -> Option<ScopeId> {
|
||||
let child_scopes = scopes
|
||||
.scope_by_expr()
|
||||
|
@ -415,7 +424,7 @@ fn adjust(
|
|||
.filter_map(|(id, scope)| {
|
||||
let source = source_map.expr_syntax(*id).ok()?;
|
||||
// FIXME: correctly handle macro expansion
|
||||
if source.file_id != file_id {
|
||||
if source.file_id != offset.file_id {
|
||||
return None;
|
||||
}
|
||||
let root = source.file_syntax(db.upcast());
|
||||
|
@ -423,7 +432,7 @@ fn adjust(
|
|||
Some((node.syntax().text_range(), scope))
|
||||
})
|
||||
.filter(|&(range, _)| {
|
||||
range.start() <= offset && expr_range.contains_range(range) && range != expr_range
|
||||
range.start() <= offset.value && expr_range.contains_range(range) && range != expr_range
|
||||
});
|
||||
|
||||
child_scopes
|
||||
|
|
|
@ -3,6 +3,7 @@ edition = "2018"
|
|||
name = "ra_hir_def"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
@ -32,4 +33,4 @@ ra_cfg = { path = "../ra_cfg" }
|
|||
tt = { path = "../ra_tt", package = "ra_tt" }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = "0.16.0"
|
||||
expect = { path = "../expect" }
|
||||
|
|
|
@ -8,7 +8,7 @@ use hir_expand::{
|
|||
InFile,
|
||||
};
|
||||
use ra_arena::{map::ArenaMap, Arena};
|
||||
use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner, VisibilityOwner};
|
||||
use ra_syntax::ast::{self, NameOwner, VisibilityOwner};
|
||||
|
||||
use crate::{
|
||||
body::{CfgExpander, LowerCtx},
|
||||
|
@ -112,7 +112,7 @@ impl EnumData {
|
|||
|
||||
impl HasChildSource for EnumId {
|
||||
type ChildId = LocalEnumVariantId;
|
||||
type Value = ast::EnumVariant;
|
||||
type Value = ast::Variant;
|
||||
fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<Self::ChildId, Self::Value>> {
|
||||
let src = self.lookup(db).source(db);
|
||||
let mut trace = Trace::new_for_map();
|
||||
|
@ -123,8 +123,8 @@ impl HasChildSource for EnumId {
|
|||
|
||||
fn lower_enum(
|
||||
db: &dyn DefDatabase,
|
||||
trace: &mut Trace<EnumVariantData, ast::EnumVariant>,
|
||||
ast: &InFile<ast::EnumDef>,
|
||||
trace: &mut Trace<EnumVariantData, ast::Variant>,
|
||||
ast: &InFile<ast::Enum>,
|
||||
module_id: ModuleId,
|
||||
) {
|
||||
let expander = CfgExpander::new(db, ast.file_id, module_id.krate);
|
||||
|
@ -179,7 +179,7 @@ impl VariantData {
|
|||
|
||||
impl HasChildSource for VariantId {
|
||||
type ChildId = LocalFieldId;
|
||||
type Value = Either<ast::TupleFieldDef, ast::RecordFieldDef>;
|
||||
type Value = Either<ast::TupleField, ast::RecordField>;
|
||||
|
||||
fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<Self::ChildId, Self::Value>> {
|
||||
let (src, module_id) = match self {
|
||||
|
@ -194,7 +194,7 @@ impl HasChildSource for VariantId {
|
|||
}
|
||||
VariantId::UnionId(it) => (
|
||||
it.lookup(db).source(db).map(|it| {
|
||||
it.record_field_def_list()
|
||||
it.record_field_list()
|
||||
.map(ast::StructKind::Record)
|
||||
.unwrap_or(ast::StructKind::Unit)
|
||||
}),
|
||||
|
@ -218,7 +218,7 @@ pub enum StructKind {
|
|||
fn lower_struct(
|
||||
db: &dyn DefDatabase,
|
||||
expander: &mut CfgExpander,
|
||||
trace: &mut Trace<FieldData, Either<ast::TupleFieldDef, ast::RecordFieldDef>>,
|
||||
trace: &mut Trace<FieldData, Either<ast::TupleField, ast::RecordField>>,
|
||||
ast: &InFile<ast::StructKind>,
|
||||
) -> StructKind {
|
||||
let ctx = LowerCtx::new(db, ast.file_id);
|
||||
|
@ -234,7 +234,7 @@ fn lower_struct(
|
|||
|| Either::Left(fd.clone()),
|
||||
|| FieldData {
|
||||
name: Name::new_tuple_field(i),
|
||||
type_ref: TypeRef::from_ast_opt(&ctx, fd.type_ref()),
|
||||
type_ref: TypeRef::from_ast_opt(&ctx, fd.ty()),
|
||||
visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())),
|
||||
},
|
||||
);
|
||||
|
@ -251,7 +251,7 @@ fn lower_struct(
|
|||
|| Either::Right(fd.clone()),
|
||||
|| FieldData {
|
||||
name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing),
|
||||
type_ref: TypeRef::from_ast_opt(&ctx, fd.ascribed_type()),
|
||||
type_ref: TypeRef::from_ast_opt(&ctx, fd.ty()),
|
||||
visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())),
|
||||
},
|
||||
);
|
||||
|
|
|
@ -5,7 +5,7 @@ use std::{ops, sync::Arc};
|
|||
use either::Either;
|
||||
use hir_expand::{hygiene::Hygiene, AstId, InFile};
|
||||
use mbe::ast_to_token_tree;
|
||||
use ra_cfg::CfgOptions;
|
||||
use ra_cfg::{CfgExpr, CfgOptions};
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode, AttrsOwner},
|
||||
SmolStr,
|
||||
|
@ -125,9 +125,12 @@ impl Attrs {
|
|||
AttrQuery { attrs: self, key }
|
||||
}
|
||||
|
||||
pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> bool {
|
||||
pub fn cfg(&self) -> impl Iterator<Item = CfgExpr> + '_ {
|
||||
// FIXME: handle cfg_attr :-)
|
||||
self.by_key("cfg").tt_values().all(|tt| cfg_options.is_cfg_enabled(tt) != Some(false))
|
||||
self.by_key("cfg").tt_values().map(CfgExpr::parse)
|
||||
}
|
||||
pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> bool {
|
||||
self.cfg().all(|cfg| cfg_options.check(&cfg) != Some(false))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -148,18 +151,15 @@ pub enum AttrInput {
|
|||
impl Attr {
|
||||
fn from_src(ast: ast::Attr, hygiene: &Hygiene) -> Option<Attr> {
|
||||
let path = ModPath::from_src(ast.path()?, hygiene)?;
|
||||
let input = match ast.input() {
|
||||
None => None,
|
||||
Some(ast::AttrInput::Literal(lit)) => {
|
||||
let input = if let Some(lit) = ast.literal() {
|
||||
// FIXME: escape? raw string?
|
||||
let value = lit.syntax().first_token()?.text().trim_matches('"').into();
|
||||
Some(AttrInput::Literal(value))
|
||||
}
|
||||
Some(ast::AttrInput::TokenTree(tt)) => {
|
||||
} else if let Some(tt) = ast.token_tree() {
|
||||
Some(AttrInput::TokenTree(ast_to_token_tree(&tt)?.0))
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Some(Attr { path, input })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ use ra_db::CrateId;
|
|||
use ra_prof::profile;
|
||||
use ra_syntax::{ast, AstNode, AstPtr};
|
||||
use rustc_hash::FxHashMap;
|
||||
use test_utils::mark;
|
||||
|
||||
pub(crate) use lower::LowerCtx;
|
||||
|
||||
|
@ -42,9 +43,15 @@ pub(crate) struct Expander {
|
|||
current_file_id: HirFileId,
|
||||
ast_id_map: Arc<AstIdMap>,
|
||||
module: ModuleId,
|
||||
recursive_limit: usize,
|
||||
recursion_limit: usize,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
const EXPANSION_RECURSION_LIMIT: usize = 32;
|
||||
|
||||
#[cfg(not(test))]
|
||||
const EXPANSION_RECURSION_LIMIT: usize = 128;
|
||||
|
||||
impl CfgExpander {
|
||||
pub(crate) fn new(
|
||||
db: &dyn DefDatabase,
|
||||
|
@ -81,7 +88,7 @@ impl Expander {
|
|||
current_file_id,
|
||||
ast_id_map,
|
||||
module,
|
||||
recursive_limit: 0,
|
||||
recursion_limit: 0,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -91,7 +98,9 @@ impl Expander {
|
|||
local_scope: Option<&ItemScope>,
|
||||
macro_call: ast::MacroCall,
|
||||
) -> Option<(Mark, T)> {
|
||||
if self.recursive_limit > 1024 {
|
||||
self.recursion_limit += 1;
|
||||
if self.recursion_limit > EXPANSION_RECURSION_LIMIT {
|
||||
mark::hit!(your_stack_belongs_to_me);
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -118,8 +127,6 @@ impl Expander {
|
|||
self.cfg_expander.hygiene = Hygiene::new(db.upcast(), file_id);
|
||||
self.current_file_id = file_id;
|
||||
self.ast_id_map = db.ast_id_map(file_id);
|
||||
self.recursive_limit += 1;
|
||||
|
||||
return Some((mark, expr));
|
||||
}
|
||||
}
|
||||
|
@ -134,7 +141,7 @@ impl Expander {
|
|||
self.cfg_expander.hygiene = Hygiene::new(db.upcast(), mark.file_id);
|
||||
self.current_file_id = mark.file_id;
|
||||
self.ast_id_map = mem::take(&mut mark.ast_id_map);
|
||||
self.recursive_limit -= 1;
|
||||
self.recursion_limit -= 1;
|
||||
mark.bomb.defuse();
|
||||
}
|
||||
|
||||
|
@ -209,7 +216,7 @@ pub struct BodySourceMap {
|
|||
expr_map_back: ArenaMap<ExprId, Result<ExprSource, SyntheticSyntax>>,
|
||||
pat_map: FxHashMap<PatSource, PatId>,
|
||||
pat_map_back: ArenaMap<PatId, Result<PatSource, SyntheticSyntax>>,
|
||||
field_map: FxHashMap<(ExprId, usize), InFile<AstPtr<ast::RecordField>>>,
|
||||
field_map: FxHashMap<(ExprId, usize), InFile<AstPtr<ast::RecordExprField>>>,
|
||||
expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>,
|
||||
}
|
||||
|
||||
|
@ -302,7 +309,53 @@ impl BodySourceMap {
|
|||
self.pat_map.get(&src).cloned()
|
||||
}
|
||||
|
||||
pub fn field_syntax(&self, expr: ExprId, field: usize) -> InFile<AstPtr<ast::RecordField>> {
|
||||
pub fn node_self_param(&self, node: InFile<&ast::SelfParam>) -> Option<PatId> {
|
||||
let src = node.map(|it| Either::Right(AstPtr::new(it)));
|
||||
self.pat_map.get(&src).cloned()
|
||||
}
|
||||
|
||||
pub fn field_syntax(&self, expr: ExprId, field: usize) -> InFile<AstPtr<ast::RecordExprField>> {
|
||||
self.field_map[&(expr, field)].clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ra_db::{fixture::WithFixture, SourceDatabase};
|
||||
use test_utils::mark;
|
||||
|
||||
use crate::ModuleDefId;
|
||||
|
||||
use super::*;
|
||||
|
||||
fn lower(ra_fixture: &str) -> Arc<Body> {
|
||||
let (db, file_id) = crate::test_db::TestDB::with_single_file(ra_fixture);
|
||||
|
||||
let krate = db.crate_graph().iter().next().unwrap();
|
||||
let def_map = db.crate_def_map(krate);
|
||||
let module = def_map.modules_for_file(file_id).next().unwrap();
|
||||
let module = &def_map[module];
|
||||
let fn_def = match module.scope.declarations().next().unwrap() {
|
||||
ModuleDefId::FunctionId(it) => it,
|
||||
_ => panic!(),
|
||||
};
|
||||
|
||||
db.body(fn_def.into())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn your_stack_belongs_to_me() {
|
||||
mark::check!(your_stack_belongs_to_me);
|
||||
lower(
|
||||
"
|
||||
macro_rules! n_nuple {
|
||||
($e:tt) => ();
|
||||
($($rest:tt)*) => {{
|
||||
(n_nuple!($($rest)*)None,)
|
||||
}};
|
||||
}
|
||||
fn main() { n_nuple!(1,2,3); }
|
||||
",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ use ra_arena::Arena;
|
|||
use ra_syntax::{
|
||||
ast::{
|
||||
self, ArgListOwner, ArrayExprKind, LiteralKind, LoopBodyOwner, ModuleItemOwner, NameOwner,
|
||||
SlicePatComponents, TypeAscriptionOwner,
|
||||
SlicePatComponents,
|
||||
},
|
||||
AstNode, AstPtr,
|
||||
};
|
||||
|
@ -379,10 +379,10 @@ impl ExprCollector<'_> {
|
|||
let expr = e.expr().map(|e| self.collect_expr(e));
|
||||
self.alloc_expr(Expr::Return { expr }, syntax_ptr)
|
||||
}
|
||||
ast::Expr::RecordLit(e) => {
|
||||
ast::Expr::RecordExpr(e) => {
|
||||
let path = e.path().and_then(|path| self.expander.parse_path(path));
|
||||
let mut field_ptrs = Vec::new();
|
||||
let record_lit = if let Some(nfl) = e.record_field_list() {
|
||||
let record_lit = if let Some(nfl) = e.record_expr_field_list() {
|
||||
let fields = nfl
|
||||
.fields()
|
||||
.inspect(|field| field_ptrs.push(AstPtr::new(field)))
|
||||
|
@ -432,7 +432,7 @@ impl ExprCollector<'_> {
|
|||
}
|
||||
ast::Expr::CastExpr(e) => {
|
||||
let expr = self.collect_expr_opt(e.expr());
|
||||
let type_ref = TypeRef::from_ast_opt(&self.ctx(), e.type_ref());
|
||||
let type_ref = TypeRef::from_ast_opt(&self.ctx(), e.ty());
|
||||
self.alloc_expr(Expr::Cast { expr, type_ref }, syntax_ptr)
|
||||
}
|
||||
ast::Expr::RefExpr(e) => {
|
||||
|
@ -466,16 +466,13 @@ impl ExprCollector<'_> {
|
|||
if let Some(pl) = e.param_list() {
|
||||
for param in pl.params() {
|
||||
let pat = self.collect_pat_opt(param.pat());
|
||||
let type_ref =
|
||||
param.ascribed_type().map(|it| TypeRef::from_ast(&self.ctx(), it));
|
||||
let type_ref = param.ty().map(|it| TypeRef::from_ast(&self.ctx(), it));
|
||||
args.push(pat);
|
||||
arg_types.push(type_ref);
|
||||
}
|
||||
}
|
||||
let ret_type = e
|
||||
.ret_type()
|
||||
.and_then(|r| r.type_ref())
|
||||
.map(|it| TypeRef::from_ast(&self.ctx(), it));
|
||||
let ret_type =
|
||||
e.ret_type().and_then(|r| r.ty()).map(|it| TypeRef::from_ast(&self.ctx(), it));
|
||||
let body = self.collect_expr_opt(e.body());
|
||||
self.alloc_expr(Expr::Lambda { args, arg_types, ret_type, body }, syntax_ptr)
|
||||
}
|
||||
|
@ -607,8 +604,7 @@ impl ExprCollector<'_> {
|
|||
.map(|s| match s {
|
||||
ast::Stmt::LetStmt(stmt) => {
|
||||
let pat = self.collect_pat_opt(stmt.pat());
|
||||
let type_ref =
|
||||
stmt.ascribed_type().map(|it| TypeRef::from_ast(&self.ctx(), it));
|
||||
let type_ref = stmt.ty().map(|it| TypeRef::from_ast(&self.ctx(), it));
|
||||
let initializer = stmt.initializer().map(|e| self.collect_expr(e));
|
||||
Statement::Let { pat, type_ref, initializer }
|
||||
}
|
||||
|
@ -627,53 +623,53 @@ impl ExprCollector<'_> {
|
|||
.items()
|
||||
.filter_map(|item| {
|
||||
let (def, name): (ModuleDefId, Option<ast::Name>) = match item {
|
||||
ast::ModuleItem::FnDef(def) => {
|
||||
ast::Item::Fn(def) => {
|
||||
let id = self.find_inner_item(&def)?;
|
||||
(
|
||||
FunctionLoc { container: container.into(), id }.intern(self.db).into(),
|
||||
def.name(),
|
||||
)
|
||||
}
|
||||
ast::ModuleItem::TypeAliasDef(def) => {
|
||||
ast::Item::TypeAlias(def) => {
|
||||
let id = self.find_inner_item(&def)?;
|
||||
(
|
||||
TypeAliasLoc { container: container.into(), id }.intern(self.db).into(),
|
||||
def.name(),
|
||||
)
|
||||
}
|
||||
ast::ModuleItem::ConstDef(def) => {
|
||||
ast::Item::Const(def) => {
|
||||
let id = self.find_inner_item(&def)?;
|
||||
(
|
||||
ConstLoc { container: container.into(), id }.intern(self.db).into(),
|
||||
def.name(),
|
||||
)
|
||||
}
|
||||
ast::ModuleItem::StaticDef(def) => {
|
||||
ast::Item::Static(def) => {
|
||||
let id = self.find_inner_item(&def)?;
|
||||
(StaticLoc { container, id }.intern(self.db).into(), def.name())
|
||||
}
|
||||
ast::ModuleItem::StructDef(def) => {
|
||||
ast::Item::Struct(def) => {
|
||||
let id = self.find_inner_item(&def)?;
|
||||
(StructLoc { container, id }.intern(self.db).into(), def.name())
|
||||
}
|
||||
ast::ModuleItem::EnumDef(def) => {
|
||||
ast::Item::Enum(def) => {
|
||||
let id = self.find_inner_item(&def)?;
|
||||
(EnumLoc { container, id }.intern(self.db).into(), def.name())
|
||||
}
|
||||
ast::ModuleItem::UnionDef(def) => {
|
||||
ast::Item::Union(def) => {
|
||||
let id = self.find_inner_item(&def)?;
|
||||
(UnionLoc { container, id }.intern(self.db).into(), def.name())
|
||||
}
|
||||
ast::ModuleItem::TraitDef(def) => {
|
||||
ast::Item::Trait(def) => {
|
||||
let id = self.find_inner_item(&def)?;
|
||||
(TraitLoc { container, id }.intern(self.db).into(), def.name())
|
||||
}
|
||||
ast::ModuleItem::ExternBlock(_) => return None, // FIXME: collect from extern blocks
|
||||
ast::ModuleItem::ImplDef(_)
|
||||
| ast::ModuleItem::UseItem(_)
|
||||
| ast::ModuleItem::ExternCrateItem(_)
|
||||
| ast::ModuleItem::Module(_)
|
||||
| ast::ModuleItem::MacroCall(_) => return None,
|
||||
ast::Item::ExternBlock(_) => return None, // FIXME: collect from extern blocks
|
||||
ast::Item::Impl(_)
|
||||
| ast::Item::Use(_)
|
||||
| ast::Item::ExternCrate(_)
|
||||
| ast::Item::Module(_)
|
||||
| ast::Item::MacroCall(_) => return None,
|
||||
};
|
||||
|
||||
Some((def, name))
|
||||
|
|
|
@ -162,7 +162,7 @@ impl ChildBySource for EnumId {
|
|||
let arena_map = arena_map.as_ref();
|
||||
for (local_id, source) in arena_map.value.iter() {
|
||||
let id = EnumVariantId { parent: *self, local_id };
|
||||
res[keys::ENUM_VARIANT].insert(arena_map.with_value(source.clone()), id)
|
||||
res[keys::VARIANT].insert(arena_map.with_value(source.clone()), id)
|
||||
}
|
||||
|
||||
res
|
||||
|
|
|
@ -27,11 +27,12 @@ pub struct FunctionData {
|
|||
/// can be called as a method.
|
||||
pub has_self_param: bool,
|
||||
pub is_unsafe: bool,
|
||||
pub is_varargs: bool,
|
||||
pub visibility: RawVisibility,
|
||||
}
|
||||
|
||||
impl FunctionData {
|
||||
pub(crate) fn fn_data_query(db: &impl DefDatabase, func: FunctionId) -> Arc<FunctionData> {
|
||||
pub(crate) fn fn_data_query(db: &dyn DefDatabase, func: FunctionId) -> Arc<FunctionData> {
|
||||
let loc = func.lookup(db);
|
||||
let item_tree = db.item_tree(loc.id.file_id);
|
||||
let func = &item_tree[loc.id.value];
|
||||
|
@ -43,6 +44,7 @@ impl FunctionData {
|
|||
attrs: item_tree.attrs(ModItem::from(loc.id.value).into()).clone(),
|
||||
has_self_param: func.has_self_param,
|
||||
is_unsafe: func.is_unsafe,
|
||||
is_varargs: func.is_varargs,
|
||||
visibility: item_tree[func.visibility].clone(),
|
||||
})
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ use hir_expand::{
|
|||
use ra_arena::{map::ArenaMap, Arena};
|
||||
use ra_db::FileId;
|
||||
use ra_prof::profile;
|
||||
use ra_syntax::ast::{self, NameOwner, TypeBoundsOwner, TypeParamsOwner};
|
||||
use ra_syntax::ast::{self, GenericParamsOwner, NameOwner, TypeBoundsOwner};
|
||||
|
||||
use crate::{
|
||||
body::LowerCtx,
|
||||
|
@ -66,7 +66,7 @@ pub enum WherePredicateTarget {
|
|||
TypeParam(LocalTypeParamId),
|
||||
}
|
||||
|
||||
type SourceMap = ArenaMap<LocalTypeParamId, Either<ast::TraitDef, ast::TypeParam>>;
|
||||
type SourceMap = ArenaMap<LocalTypeParamId, Either<ast::Trait, ast::TypeParam>>;
|
||||
|
||||
impl GenericParams {
|
||||
pub(crate) fn generic_params_query(
|
||||
|
@ -205,9 +205,9 @@ impl GenericParams {
|
|||
&mut self,
|
||||
lower_ctx: &LowerCtx,
|
||||
sm: &mut SourceMap,
|
||||
node: &dyn TypeParamsOwner,
|
||||
node: &dyn GenericParamsOwner,
|
||||
) {
|
||||
if let Some(params) = node.type_param_list() {
|
||||
if let Some(params) = node.generic_param_list() {
|
||||
self.fill_params(lower_ctx, sm, params)
|
||||
}
|
||||
if let Some(where_clause) = node.where_clause() {
|
||||
|
@ -232,7 +232,7 @@ impl GenericParams {
|
|||
&mut self,
|
||||
lower_ctx: &LowerCtx,
|
||||
sm: &mut SourceMap,
|
||||
params: ast::TypeParamList,
|
||||
params: ast::GenericParamList,
|
||||
) {
|
||||
for type_param in params.type_params() {
|
||||
let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
|
||||
|
@ -317,7 +317,7 @@ impl GenericParams {
|
|||
|
||||
impl HasChildSource for GenericDefId {
|
||||
type ChildId = LocalTypeParamId;
|
||||
type Value = Either<ast::TraitDef, ast::TypeParam>;
|
||||
type Value = Either<ast::Trait, ast::TypeParam>;
|
||||
fn child_source(&self, db: &dyn DefDatabase) -> InFile<SourceMap> {
|
||||
let (_, sm) = GenericParams::new(db, *self);
|
||||
sm
|
||||
|
|
|
@ -5,14 +5,16 @@ use std::{cmp::Ordering, fmt, hash::BuildHasherDefault, sync::Arc};
|
|||
use fst::{self, Streamer};
|
||||
use indexmap::{map::Entry, IndexMap};
|
||||
use ra_db::CrateId;
|
||||
use rustc_hash::FxHasher;
|
||||
use ra_syntax::SmolStr;
|
||||
use rustc_hash::{FxHashMap, FxHasher};
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use crate::{
|
||||
db::DefDatabase,
|
||||
item_scope::ItemInNs,
|
||||
path::{ModPath, PathKind},
|
||||
visibility::Visibility,
|
||||
ModuleDefId, ModuleId,
|
||||
AssocItemId, ModuleDefId, ModuleId, TraitId,
|
||||
};
|
||||
|
||||
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
|
@ -34,6 +36,7 @@ pub struct ImportInfo {
|
|||
///
|
||||
/// Note that all paths are relative to the containing crate's root, so the crate name still needs
|
||||
/// to be prepended to the `ModPath` before the path is valid.
|
||||
#[derive(Default)]
|
||||
pub struct ImportMap {
|
||||
map: FxIndexMap<ItemInNs, ImportInfo>,
|
||||
|
||||
|
@ -45,13 +48,17 @@ pub struct ImportMap {
|
|||
/// the index of the first one.
|
||||
importables: Vec<ItemInNs>,
|
||||
fst: fst::Map<Vec<u8>>,
|
||||
|
||||
/// Maps names of associated items to the item's ID. Only includes items whose defining trait is
|
||||
/// exported.
|
||||
assoc_map: FxHashMap<SmolStr, SmallVec<[AssocItemId; 1]>>,
|
||||
}
|
||||
|
||||
impl ImportMap {
|
||||
pub fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
|
||||
let _p = ra_prof::profile("import_map_query");
|
||||
let def_map = db.crate_def_map(krate);
|
||||
let mut import_map = FxIndexMap::with_capacity_and_hasher(64, Default::default());
|
||||
let mut import_map = Self::default();
|
||||
|
||||
// We look only into modules that are public(ly reexported), starting with the crate root.
|
||||
let empty = ModPath { kind: PathKind::Plain, segments: vec![] };
|
||||
|
@ -85,7 +92,7 @@ impl ImportMap {
|
|||
|
||||
for item in per_ns.iter_items() {
|
||||
let path = mk_path();
|
||||
match import_map.entry(item) {
|
||||
match import_map.map.entry(item) {
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(ImportInfo { path, container: module });
|
||||
}
|
||||
|
@ -105,11 +112,16 @@ impl ImportMap {
|
|||
if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() {
|
||||
worklist.push((mod_id, mk_path()));
|
||||
}
|
||||
|
||||
// If we've added a path to a trait, add the trait's methods to the method map.
|
||||
if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
|
||||
import_map.collect_trait_methods(db, tr);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut importables = import_map.iter().collect::<Vec<_>>();
|
||||
let mut importables = import_map.map.iter().collect::<Vec<_>>();
|
||||
|
||||
importables.sort_by(cmp);
|
||||
|
||||
|
@ -133,10 +145,10 @@ impl ImportMap {
|
|||
builder.insert(key, start as u64).unwrap();
|
||||
}
|
||||
|
||||
let fst = fst::Map::new(builder.into_inner().unwrap()).unwrap();
|
||||
let importables = importables.iter().map(|(item, _)| **item).collect();
|
||||
import_map.fst = fst::Map::new(builder.into_inner().unwrap()).unwrap();
|
||||
import_map.importables = importables.iter().map(|(item, _)| **item).collect();
|
||||
|
||||
Arc::new(Self { map: import_map, fst, importables })
|
||||
Arc::new(import_map)
|
||||
}
|
||||
|
||||
/// Returns the `ModPath` needed to import/mention `item`, relative to this crate's root.
|
||||
|
@ -147,6 +159,13 @@ impl ImportMap {
|
|||
pub fn import_info_for(&self, item: ItemInNs) -> Option<&ImportInfo> {
|
||||
self.map.get(&item)
|
||||
}
|
||||
|
||||
fn collect_trait_methods(&mut self, db: &dyn DefDatabase, tr: TraitId) {
|
||||
let data = db.trait_data(tr);
|
||||
for (name, item) in data.items.iter() {
|
||||
self.assoc_map.entry(name.to_string().into()).or_default().push(*item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for ImportMap {
|
||||
|
@ -290,37 +309,32 @@ pub fn search_dependencies<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
// Add all exported associated items whose names match the query (exactly).
|
||||
for map in &import_maps {
|
||||
if let Some(v) = map.assoc_map.get(&*query.query) {
|
||||
res.extend(v.iter().map(|&assoc| {
|
||||
ItemInNs::Types(match assoc {
|
||||
AssocItemId::FunctionId(it) => it.into(),
|
||||
AssocItemId::ConstId(it) => it.into(),
|
||||
AssocItemId::TypeAliasId(it) => it.into(),
|
||||
})
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use expect::{expect, Expect};
|
||||
use ra_db::{fixture::WithFixture, SourceDatabase, Upcast};
|
||||
|
||||
use crate::{test_db::TestDB, AssocContainerId, Lookup};
|
||||
|
||||
use super::*;
|
||||
use crate::test_db::TestDB;
|
||||
use insta::assert_snapshot;
|
||||
use itertools::Itertools;
|
||||
use ra_db::fixture::WithFixture;
|
||||
use ra_db::{SourceDatabase, Upcast};
|
||||
|
||||
fn import_map(ra_fixture: &str) -> String {
|
||||
let db = TestDB::with_files(ra_fixture);
|
||||
let crate_graph = db.crate_graph();
|
||||
|
||||
let s = crate_graph
|
||||
.iter()
|
||||
.filter_map(|krate| {
|
||||
let cdata = &crate_graph[krate];
|
||||
let name = cdata.display_name.as_ref()?;
|
||||
|
||||
let map = db.import_map(krate);
|
||||
|
||||
Some(format!("{}:\n{:?}", name, map))
|
||||
})
|
||||
.join("\n");
|
||||
s
|
||||
}
|
||||
|
||||
fn search_dependencies_of(ra_fixture: &str, krate_name: &str, query: Query) -> String {
|
||||
fn check_search(ra_fixture: &str, krate_name: &str, query: Query, expect: Expect) {
|
||||
let db = TestDB::with_files(ra_fixture);
|
||||
let crate_graph = db.crate_graph();
|
||||
let krate = crate_graph
|
||||
|
@ -331,7 +345,7 @@ mod tests {
|
|||
})
|
||||
.unwrap();
|
||||
|
||||
search_dependencies(db.upcast(), krate, query)
|
||||
let actual = search_dependencies(db.upcast(), krate, query)
|
||||
.into_iter()
|
||||
.filter_map(|item| {
|
||||
let mark = match item {
|
||||
|
@ -339,23 +353,67 @@ mod tests {
|
|||
ItemInNs::Values(_) => "v",
|
||||
ItemInNs::Macros(_) => "m",
|
||||
};
|
||||
let item = assoc_to_trait(&db, item);
|
||||
item.krate(db.upcast()).map(|krate| {
|
||||
let map = db.import_map(krate);
|
||||
let path = map.path_of(item).unwrap();
|
||||
format!(
|
||||
"{}::{} ({})",
|
||||
"{}::{} ({})\n",
|
||||
crate_graph[krate].display_name.as_ref().unwrap(),
|
||||
path,
|
||||
mark
|
||||
)
|
||||
})
|
||||
})
|
||||
.join("\n")
|
||||
.collect::<String>();
|
||||
expect.assert_eq(&actual)
|
||||
}
|
||||
|
||||
fn assoc_to_trait(db: &dyn DefDatabase, item: ItemInNs) -> ItemInNs {
|
||||
let assoc: AssocItemId = match item {
|
||||
ItemInNs::Types(it) | ItemInNs::Values(it) => match it {
|
||||
ModuleDefId::TypeAliasId(it) => it.into(),
|
||||
ModuleDefId::FunctionId(it) => it.into(),
|
||||
ModuleDefId::ConstId(it) => it.into(),
|
||||
_ => return item,
|
||||
},
|
||||
_ => return item,
|
||||
};
|
||||
|
||||
let container = match assoc {
|
||||
AssocItemId::FunctionId(it) => it.lookup(db).container,
|
||||
AssocItemId::ConstId(it) => it.lookup(db).container,
|
||||
AssocItemId::TypeAliasId(it) => it.lookup(db).container,
|
||||
};
|
||||
|
||||
match container {
|
||||
AssocContainerId::TraitId(it) => ItemInNs::Types(it.into()),
|
||||
_ => item,
|
||||
}
|
||||
}
|
||||
|
||||
fn check(ra_fixture: &str, expect: Expect) {
|
||||
let db = TestDB::with_files(ra_fixture);
|
||||
let crate_graph = db.crate_graph();
|
||||
|
||||
let actual = crate_graph
|
||||
.iter()
|
||||
.filter_map(|krate| {
|
||||
let cdata = &crate_graph[krate];
|
||||
let name = cdata.display_name.as_ref()?;
|
||||
|
||||
let map = db.import_map(krate);
|
||||
|
||||
Some(format!("{}:\n{:?}\n", name, map))
|
||||
})
|
||||
.collect::<String>();
|
||||
|
||||
expect.assert_eq(&actual)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn smoke() {
|
||||
let map = import_map(
|
||||
check(
|
||||
r"
|
||||
//- /main.rs crate:main deps:lib
|
||||
|
||||
|
@ -380,9 +438,7 @@ mod tests {
|
|||
pub struct Pub2; // t + v
|
||||
struct Priv;
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
expect![[r#"
|
||||
main:
|
||||
- publ1 (t)
|
||||
- real_pu2 (t)
|
||||
|
@ -392,12 +448,13 @@ mod tests {
|
|||
- Pub (t)
|
||||
- Pub2 (t)
|
||||
- Pub2 (v)
|
||||
"###);
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prefers_shortest_path() {
|
||||
let map = import_map(
|
||||
check(
|
||||
r"
|
||||
//- /main.rs crate:main
|
||||
|
||||
|
@ -409,21 +466,20 @@ mod tests {
|
|||
pub use super::sub::subsub::Def;
|
||||
}
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
expect![[r#"
|
||||
main:
|
||||
- sub (t)
|
||||
- sub::Def (t)
|
||||
- sub::subsub (t)
|
||||
"###);
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn type_reexport_cross_crate() {
|
||||
// Reexports need to be visible from a crate, even if the original crate exports the item
|
||||
// at a shorter path.
|
||||
let map = import_map(
|
||||
check(
|
||||
r"
|
||||
//- /main.rs crate:main deps:lib
|
||||
pub mod m {
|
||||
|
@ -432,9 +488,7 @@ mod tests {
|
|||
//- /lib.rs crate:lib
|
||||
pub struct S;
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
expect![[r#"
|
||||
main:
|
||||
- m (t)
|
||||
- m::S (t)
|
||||
|
@ -442,12 +496,13 @@ mod tests {
|
|||
lib:
|
||||
- S (t)
|
||||
- S (v)
|
||||
"###);
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macro_reexport() {
|
||||
let map = import_map(
|
||||
check(
|
||||
r"
|
||||
//- /main.rs crate:main deps:lib
|
||||
pub mod m {
|
||||
|
@ -459,21 +514,20 @@ mod tests {
|
|||
() => {};
|
||||
}
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
expect![[r#"
|
||||
main:
|
||||
- m (t)
|
||||
- m::pub_macro (m)
|
||||
lib:
|
||||
- pub_macro (m)
|
||||
"###);
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn module_reexport() {
|
||||
// Reexporting modules from a dependency adds all contents to the import map.
|
||||
let map = import_map(
|
||||
check(
|
||||
r"
|
||||
//- /main.rs crate:main deps:lib
|
||||
pub use lib::module as reexported_module;
|
||||
|
@ -482,9 +536,7 @@ mod tests {
|
|||
pub struct S;
|
||||
}
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
expect![[r#"
|
||||
main:
|
||||
- reexported_module (t)
|
||||
- reexported_module::S (t)
|
||||
|
@ -493,13 +545,14 @@ mod tests {
|
|||
- module (t)
|
||||
- module::S (t)
|
||||
- module::S (v)
|
||||
"###);
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cyclic_module_reexport() {
|
||||
// A cyclic reexport does not hang.
|
||||
let map = import_map(
|
||||
check(
|
||||
r"
|
||||
//- /lib.rs crate:lib
|
||||
pub mod module {
|
||||
|
@ -511,36 +564,35 @@ mod tests {
|
|||
pub use super::module;
|
||||
}
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
expect![[r#"
|
||||
lib:
|
||||
- module (t)
|
||||
- module::S (t)
|
||||
- module::S (v)
|
||||
- sub (t)
|
||||
"###);
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn private_macro() {
|
||||
let map = import_map(
|
||||
check(
|
||||
r"
|
||||
//- /lib.rs crate:lib
|
||||
macro_rules! private_macro {
|
||||
() => {};
|
||||
}
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
expect![[r#"
|
||||
lib:
|
||||
"###);
|
||||
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn namespacing() {
|
||||
let map = import_map(
|
||||
check(
|
||||
r"
|
||||
//- /lib.rs crate:lib
|
||||
pub struct Thing; // t + v
|
||||
|
@ -549,16 +601,15 @@ mod tests {
|
|||
() => {};
|
||||
}
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
expect![[r#"
|
||||
lib:
|
||||
- Thing (m)
|
||||
- Thing (t)
|
||||
- Thing (v)
|
||||
"###);
|
||||
"#]],
|
||||
);
|
||||
|
||||
let map = import_map(
|
||||
check(
|
||||
r"
|
||||
//- /lib.rs crate:lib
|
||||
pub mod Thing {} // t
|
||||
|
@ -567,13 +618,12 @@ mod tests {
|
|||
() => {};
|
||||
}
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
expect![[r#"
|
||||
lib:
|
||||
- Thing (m)
|
||||
- Thing (t)
|
||||
"###);
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -602,23 +652,33 @@ mod tests {
|
|||
}
|
||||
"#;
|
||||
|
||||
let res = search_dependencies_of(ra_fixture, "main", Query::new("fmt"));
|
||||
assert_snapshot!(res, @r###"
|
||||
check_search(
|
||||
ra_fixture,
|
||||
"main",
|
||||
Query::new("fmt"),
|
||||
expect![[r#"
|
||||
dep::fmt (t)
|
||||
dep::Fmt (t)
|
||||
dep::Fmt (v)
|
||||
dep::Fmt (m)
|
||||
dep::fmt::Display (t)
|
||||
dep::format (v)
|
||||
"###);
|
||||
dep::fmt::Display (t)
|
||||
"#]],
|
||||
);
|
||||
|
||||
let res = search_dependencies_of(ra_fixture, "main", Query::new("fmt").anchor_end());
|
||||
assert_snapshot!(res, @r###"
|
||||
check_search(
|
||||
ra_fixture,
|
||||
"main",
|
||||
Query::new("fmt").anchor_end(),
|
||||
expect![[r#"
|
||||
dep::fmt (t)
|
||||
dep::Fmt (t)
|
||||
dep::Fmt (v)
|
||||
dep::Fmt (m)
|
||||
"###);
|
||||
dep::fmt::Display (t)
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -631,26 +691,32 @@ mod tests {
|
|||
pub struct FMT;
|
||||
"#;
|
||||
|
||||
let res = search_dependencies_of(ra_fixture, "main", Query::new("FMT"));
|
||||
|
||||
assert_snapshot!(res, @r###"
|
||||
check_search(
|
||||
ra_fixture,
|
||||
"main",
|
||||
Query::new("FMT"),
|
||||
expect![[r#"
|
||||
dep::fmt (t)
|
||||
dep::fmt (v)
|
||||
dep::FMT (t)
|
||||
dep::FMT (v)
|
||||
"###);
|
||||
"#]],
|
||||
);
|
||||
|
||||
let res = search_dependencies_of(ra_fixture, "main", Query::new("FMT").case_sensitive());
|
||||
|
||||
assert_snapshot!(res, @r###"
|
||||
check_search(
|
||||
ra_fixture,
|
||||
"main",
|
||||
Query::new("FMT").case_sensitive(),
|
||||
expect![[r#"
|
||||
dep::FMT (t)
|
||||
dep::FMT (v)
|
||||
"###);
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn search_limit() {
|
||||
let res = search_dependencies_of(
|
||||
check_search(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:dep
|
||||
//- /dep.rs crate:dep
|
||||
|
@ -670,10 +736,10 @@ mod tests {
|
|||
"#,
|
||||
"main",
|
||||
Query::new("").limit(2),
|
||||
);
|
||||
assert_snapshot!(res, @r###"
|
||||
expect![[r#"
|
||||
dep::fmt (t)
|
||||
dep::Fmt (t)
|
||||
"###);
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
//! Describes items defined or visible (ie, imported) in a certain scope.
|
||||
//! This is shared between modules and blocks.
|
||||
|
||||
use std::collections::hash_map::Entry;
|
||||
|
||||
use hir_expand::name::Name;
|
||||
use once_cell::sync::Lazy;
|
||||
use ra_db::CrateId;
|
||||
|
@ -27,9 +29,15 @@ pub struct PerNsGlobImports {
|
|||
|
||||
#[derive(Debug, Default, PartialEq, Eq)]
|
||||
pub struct ItemScope {
|
||||
visible: FxHashMap<Name, PerNs>,
|
||||
types: FxHashMap<Name, (ModuleDefId, Visibility)>,
|
||||
values: FxHashMap<Name, (ModuleDefId, Visibility)>,
|
||||
macros: FxHashMap<Name, (MacroDefId, Visibility)>,
|
||||
unresolved: FxHashSet<Name>,
|
||||
|
||||
defs: Vec<ModuleDefId>,
|
||||
impls: Vec<ImplId>,
|
||||
/// Traits imported via `use Trait as _;`.
|
||||
unnamed_trait_imports: FxHashMap<TraitId, Visibility>,
|
||||
/// Macros visible in current module in legacy textual scope
|
||||
///
|
||||
/// For macros invoked by an unqualified identifier like `bar!()`, `legacy_macros` will be searched in first.
|
||||
|
@ -66,13 +74,15 @@ pub(crate) enum BuiltinShadowMode {
|
|||
impl ItemScope {
|
||||
pub fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a Name, PerNs)> + 'a {
|
||||
// FIXME: shadowing
|
||||
self.visible.iter().map(|(n, def)| (n, *def))
|
||||
}
|
||||
let keys: FxHashSet<_> = self
|
||||
.types
|
||||
.keys()
|
||||
.chain(self.values.keys())
|
||||
.chain(self.macros.keys())
|
||||
.chain(self.unresolved.iter())
|
||||
.collect();
|
||||
|
||||
pub fn entries_without_primitives<'a>(
|
||||
&'a self,
|
||||
) -> impl Iterator<Item = (&'a Name, PerNs)> + 'a {
|
||||
self.visible.iter().map(|(n, def)| (n, *def))
|
||||
keys.into_iter().map(move |name| (name, self.get(name)))
|
||||
}
|
||||
|
||||
pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ {
|
||||
|
@ -91,7 +101,7 @@ impl ItemScope {
|
|||
|
||||
/// Iterate over all module scoped macros
|
||||
pub(crate) fn macros<'a>(&'a self) -> impl Iterator<Item = (&'a Name, MacroDefId)> + 'a {
|
||||
self.visible.iter().filter_map(|(name, def)| def.take_macros().map(|macro_| (name, macro_)))
|
||||
self.entries().filter_map(|(name, def)| def.take_macros().map(|macro_| (name, macro_)))
|
||||
}
|
||||
|
||||
/// Iterate over all legacy textual scoped macros visible at the end of the module
|
||||
|
@ -101,12 +111,16 @@ impl ItemScope {
|
|||
|
||||
/// Get a name from current module scope, legacy macros are not included
|
||||
pub(crate) fn get(&self, name: &Name) -> PerNs {
|
||||
self.visible.get(name).copied().unwrap_or_else(PerNs::none)
|
||||
PerNs {
|
||||
types: self.types.get(name).copied(),
|
||||
values: self.values.get(name).copied(),
|
||||
macros: self.macros.get(name).copied(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> {
|
||||
for (name, per_ns) in &self.visible {
|
||||
if let Some(vis) = item.match_with(*per_ns) {
|
||||
for (name, per_ns) in self.entries() {
|
||||
if let Some(vis) = item.match_with(per_ns) {
|
||||
return Some((name, vis));
|
||||
}
|
||||
}
|
||||
|
@ -114,10 +128,13 @@ impl ItemScope {
|
|||
}
|
||||
|
||||
pub(crate) fn traits<'a>(&'a self) -> impl Iterator<Item = TraitId> + 'a {
|
||||
self.visible.values().filter_map(|def| match def.take_types() {
|
||||
Some(ModuleDefId::TraitId(t)) => Some(t),
|
||||
self.types
|
||||
.values()
|
||||
.filter_map(|(def, _)| match def {
|
||||
ModuleDefId::TraitId(t) => Some(*t),
|
||||
_ => None,
|
||||
})
|
||||
.chain(self.unnamed_trait_imports.keys().copied())
|
||||
}
|
||||
|
||||
pub(crate) fn define_def(&mut self, def: ModuleDefId) {
|
||||
|
@ -136,23 +153,40 @@ impl ItemScope {
|
|||
self.legacy_macros.insert(name, mac);
|
||||
}
|
||||
|
||||
pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> {
|
||||
self.unnamed_trait_imports.get(&tr).copied()
|
||||
}
|
||||
|
||||
pub(crate) fn push_unnamed_trait(&mut self, tr: TraitId, vis: Visibility) {
|
||||
self.unnamed_trait_imports.insert(tr, vis);
|
||||
}
|
||||
|
||||
pub(crate) fn push_res(&mut self, name: Name, def: PerNs) -> bool {
|
||||
let mut changed = false;
|
||||
let existing = self.visible.entry(name).or_default();
|
||||
|
||||
if existing.types.is_none() && def.types.is_some() {
|
||||
existing.types = def.types;
|
||||
if let Some(types) = def.types {
|
||||
self.types.entry(name.clone()).or_insert_with(|| {
|
||||
changed = true;
|
||||
types
|
||||
});
|
||||
}
|
||||
if let Some(values) = def.values {
|
||||
self.values.entry(name.clone()).or_insert_with(|| {
|
||||
changed = true;
|
||||
values
|
||||
});
|
||||
}
|
||||
if let Some(macros) = def.macros {
|
||||
self.macros.entry(name.clone()).or_insert_with(|| {
|
||||
changed = true;
|
||||
macros
|
||||
});
|
||||
}
|
||||
|
||||
if existing.values.is_none() && def.values.is_some() {
|
||||
existing.values = def.values;
|
||||
if def.is_none() {
|
||||
if self.unresolved.insert(name) {
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if existing.macros.is_none() && def.macros.is_some() {
|
||||
existing.macros = def.macros;
|
||||
changed = true;
|
||||
}
|
||||
|
||||
changed
|
||||
|
@ -166,17 +200,17 @@ impl ItemScope {
|
|||
def_import_type: ImportType,
|
||||
) -> bool {
|
||||
let mut changed = false;
|
||||
let existing = self.visible.entry(lookup.1.clone()).or_default();
|
||||
|
||||
macro_rules! check_changed {
|
||||
(
|
||||
$changed:ident,
|
||||
( $existing:ident / $def:ident ) . $field:ident,
|
||||
( $this:ident / $def:ident ) . $field:ident,
|
||||
$glob_imports:ident [ $lookup:ident ],
|
||||
$def_import_type:ident
|
||||
) => {
|
||||
match ($existing.$field, $def.$field) {
|
||||
(None, Some(_)) => {
|
||||
) => {{
|
||||
let existing = $this.$field.entry($lookup.1.clone());
|
||||
match (existing, $def.$field) {
|
||||
(Entry::Vacant(entry), Some(_)) => {
|
||||
match $def_import_type {
|
||||
ImportType::Glob => {
|
||||
$glob_imports.$field.insert($lookup.clone());
|
||||
|
@ -186,32 +220,46 @@ impl ItemScope {
|
|||
}
|
||||
}
|
||||
|
||||
$existing.$field = $def.$field;
|
||||
if let Some(fld) = $def.$field {
|
||||
entry.insert(fld);
|
||||
}
|
||||
$changed = true;
|
||||
}
|
||||
(Some(_), Some(_))
|
||||
(Entry::Occupied(mut entry), Some(_))
|
||||
if $glob_imports.$field.contains(&$lookup)
|
||||
&& matches!($def_import_type, ImportType::Named) =>
|
||||
{
|
||||
mark::hit!(import_shadowed);
|
||||
$glob_imports.$field.remove(&$lookup);
|
||||
$existing.$field = $def.$field;
|
||||
if let Some(fld) = $def.$field {
|
||||
entry.insert(fld);
|
||||
}
|
||||
$changed = true;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
};
|
||||
}};
|
||||
}
|
||||
|
||||
check_changed!(changed, (existing / def).types, glob_imports[lookup], def_import_type);
|
||||
check_changed!(changed, (existing / def).values, glob_imports[lookup], def_import_type);
|
||||
check_changed!(changed, (existing / def).macros, glob_imports[lookup], def_import_type);
|
||||
check_changed!(changed, (self / def).types, glob_imports[lookup], def_import_type);
|
||||
check_changed!(changed, (self / def).values, glob_imports[lookup], def_import_type);
|
||||
check_changed!(changed, (self / def).macros, glob_imports[lookup], def_import_type);
|
||||
|
||||
if def.is_none() {
|
||||
if self.unresolved.insert(lookup.1) {
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
|
||||
changed
|
||||
}
|
||||
|
||||
pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator<Item = (Name, PerNs)> + 'a {
|
||||
self.visible.iter().map(|(name, res)| (name.clone(), *res))
|
||||
pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator<Item = (Option<Name>, PerNs)> + 'a {
|
||||
self.entries().map(|(name, res)| (Some(name.clone()), res)).chain(
|
||||
self.unnamed_trait_imports
|
||||
.iter()
|
||||
.map(|(tr, vis)| (None, PerNs::types(ModuleDefId::TraitId(*tr), *vis))),
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn collect_legacy_macros(&self) -> FxHashMap<Name, MacroDefId> {
|
||||
|
|
|
@ -13,7 +13,7 @@ use std::{
|
|||
sync::Arc,
|
||||
};
|
||||
|
||||
use ast::{AstNode, AttrsOwner, NameOwner, StructKind, TypeAscriptionOwner};
|
||||
use ast::{AstNode, AttrsOwner, NameOwner, StructKind};
|
||||
use either::Either;
|
||||
use hir_expand::{
|
||||
ast_id_map::FileAstId,
|
||||
|
@ -70,7 +70,7 @@ impl GenericParamsId {
|
|||
pub struct ItemTree {
|
||||
top_level: SmallVec<[ModItem; 1]>,
|
||||
attrs: FxHashMap<AttrOwner, Attrs>,
|
||||
inner_items: FxHashMap<FileAstId<ast::ModuleItem>, SmallVec<[ModItem; 1]>>,
|
||||
inner_items: FxHashMap<FileAstId<ast::Item>, SmallVec<[ModItem; 1]>>,
|
||||
|
||||
data: Option<Box<ItemTreeData>>,
|
||||
}
|
||||
|
@ -187,7 +187,7 @@ impl ItemTree {
|
|||
///
|
||||
/// Most AST items are lowered to a single `ModItem`, but some (eg. `use` items) may be lowered
|
||||
/// to multiple items in the `ItemTree`.
|
||||
pub fn inner_items(&self, ast: FileAstId<ast::ModuleItem>) -> &[ModItem] {
|
||||
pub fn inner_items(&self, ast: FileAstId<ast::Item>) -> &[ModItem] {
|
||||
&self.inner_items[&ast]
|
||||
}
|
||||
|
||||
|
@ -310,7 +310,7 @@ from_attrs!(ModItem(ModItem), Variant(Idx<Variant>), Field(Idx<Field>));
|
|||
|
||||
/// Trait implemented by all item nodes in the item tree.
|
||||
pub trait ItemTreeNode: Clone {
|
||||
type Source: AstNode + Into<ast::ModuleItem>;
|
||||
type Source: AstNode + Into<ast::Item>;
|
||||
|
||||
fn ast_id(&self) -> FileAstId<Self::Source>;
|
||||
|
||||
|
@ -411,17 +411,17 @@ macro_rules! mod_items {
|
|||
}
|
||||
|
||||
mod_items! {
|
||||
Import in imports -> ast::UseItem,
|
||||
ExternCrate in extern_crates -> ast::ExternCrateItem,
|
||||
Function in functions -> ast::FnDef,
|
||||
Struct in structs -> ast::StructDef,
|
||||
Union in unions -> ast::UnionDef,
|
||||
Enum in enums -> ast::EnumDef,
|
||||
Const in consts -> ast::ConstDef,
|
||||
Static in statics -> ast::StaticDef,
|
||||
Trait in traits -> ast::TraitDef,
|
||||
Impl in impls -> ast::ImplDef,
|
||||
TypeAlias in type_aliases -> ast::TypeAliasDef,
|
||||
Import in imports -> ast::Use,
|
||||
ExternCrate in extern_crates -> ast::ExternCrate,
|
||||
Function in functions -> ast::Fn,
|
||||
Struct in structs -> ast::Struct,
|
||||
Union in unions -> ast::Union,
|
||||
Enum in enums -> ast::Enum,
|
||||
Const in consts -> ast::Const,
|
||||
Static in statics -> ast::Static,
|
||||
Trait in traits -> ast::Trait,
|
||||
Impl in impls -> ast::Impl,
|
||||
TypeAlias in type_aliases -> ast::TypeAlias,
|
||||
Mod in mods -> ast::Module,
|
||||
MacroCall in macro_calls -> ast::MacroCall,
|
||||
}
|
||||
|
@ -482,7 +482,7 @@ pub struct Import {
|
|||
pub is_prelude: bool,
|
||||
/// AST ID of the `use` or `extern crate` item this import was derived from. Note that many
|
||||
/// `Import`s can map to the same `use` item.
|
||||
pub ast_id: FileAstId<ast::UseItem>,
|
||||
pub ast_id: FileAstId<ast::Use>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
|
@ -492,7 +492,7 @@ pub struct ExternCrate {
|
|||
pub visibility: RawVisibilityId,
|
||||
/// Whether this is a `#[macro_use] extern crate ...`.
|
||||
pub is_macro_use: bool,
|
||||
pub ast_id: FileAstId<ast::ExternCrateItem>,
|
||||
pub ast_id: FileAstId<ast::ExternCrate>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
|
@ -503,8 +503,9 @@ pub struct Function {
|
|||
pub has_self_param: bool,
|
||||
pub is_unsafe: bool,
|
||||
pub params: Box<[TypeRef]>,
|
||||
pub is_varargs: bool,
|
||||
pub ret_type: TypeRef,
|
||||
pub ast_id: FileAstId<ast::FnDef>,
|
||||
pub ast_id: FileAstId<ast::Fn>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
|
@ -513,7 +514,7 @@ pub struct Struct {
|
|||
pub visibility: RawVisibilityId,
|
||||
pub generic_params: GenericParamsId,
|
||||
pub fields: Fields,
|
||||
pub ast_id: FileAstId<ast::StructDef>,
|
||||
pub ast_id: FileAstId<ast::Struct>,
|
||||
pub kind: StructDefKind,
|
||||
}
|
||||
|
||||
|
@ -533,7 +534,7 @@ pub struct Union {
|
|||
pub visibility: RawVisibilityId,
|
||||
pub generic_params: GenericParamsId,
|
||||
pub fields: Fields,
|
||||
pub ast_id: FileAstId<ast::UnionDef>,
|
||||
pub ast_id: FileAstId<ast::Union>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
|
@ -542,7 +543,7 @@ pub struct Enum {
|
|||
pub visibility: RawVisibilityId,
|
||||
pub generic_params: GenericParamsId,
|
||||
pub variants: IdRange<Variant>,
|
||||
pub ast_id: FileAstId<ast::EnumDef>,
|
||||
pub ast_id: FileAstId<ast::Enum>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
|
@ -551,7 +552,7 @@ pub struct Const {
|
|||
pub name: Option<Name>,
|
||||
pub visibility: RawVisibilityId,
|
||||
pub type_ref: TypeRef,
|
||||
pub ast_id: FileAstId<ast::ConstDef>,
|
||||
pub ast_id: FileAstId<ast::Const>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
|
@ -560,7 +561,7 @@ pub struct Static {
|
|||
pub visibility: RawVisibilityId,
|
||||
pub mutable: bool,
|
||||
pub type_ref: TypeRef,
|
||||
pub ast_id: FileAstId<ast::StaticDef>,
|
||||
pub ast_id: FileAstId<ast::Static>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
|
@ -570,7 +571,7 @@ pub struct Trait {
|
|||
pub generic_params: GenericParamsId,
|
||||
pub auto: bool,
|
||||
pub items: Box<[AssocItem]>,
|
||||
pub ast_id: FileAstId<ast::TraitDef>,
|
||||
pub ast_id: FileAstId<ast::Trait>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
|
@ -580,7 +581,7 @@ pub struct Impl {
|
|||
pub target_type: TypeRef,
|
||||
pub is_negative: bool,
|
||||
pub items: Box<[AssocItem]>,
|
||||
pub ast_id: FileAstId<ast::ImplDef>,
|
||||
pub ast_id: FileAstId<ast::Impl>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
|
@ -591,7 +592,7 @@ pub struct TypeAlias {
|
|||
pub bounds: Box<[TypeBound]>,
|
||||
pub generic_params: GenericParamsId,
|
||||
pub type_ref: Option<TypeRef>,
|
||||
pub ast_id: FileAstId<ast::TypeAliasDef>,
|
||||
pub ast_id: FileAstId<ast::TypeAlias>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
|
|
|
@ -1,10 +1,7 @@
|
|||
//! AST -> `ItemTree` lowering code.
|
||||
|
||||
use super::*;
|
||||
use crate::{
|
||||
attr::Attrs,
|
||||
generics::{GenericParams, TypeParamData, TypeParamProvenance},
|
||||
};
|
||||
use std::{collections::hash_map::Entry, mem, sync::Arc};
|
||||
|
||||
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
|
||||
use ra_arena::map::ArenaMap;
|
||||
use ra_syntax::{
|
||||
|
@ -12,7 +9,13 @@ use ra_syntax::{
|
|||
SyntaxNode,
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
use std::{collections::hash_map::Entry, mem, sync::Arc};
|
||||
|
||||
use crate::{
|
||||
attr::Attrs,
|
||||
generics::{GenericParams, TypeParamData, TypeParamProvenance},
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
fn id<N: ItemTreeNode>(index: Idx<N>) -> FileItemTreeId<N> {
|
||||
FileItemTreeId { index, _p: PhantomData }
|
||||
|
@ -70,19 +73,19 @@ impl Ctx {
|
|||
self.tree.data_mut()
|
||||
}
|
||||
|
||||
fn lower_mod_item(&mut self, item: &ast::ModuleItem, inner: bool) -> Option<ModItems> {
|
||||
fn lower_mod_item(&mut self, item: &ast::Item, inner: bool) -> Option<ModItems> {
|
||||
assert!(inner || self.inner_items.is_empty());
|
||||
|
||||
// Collect inner items for 1-to-1-lowered items.
|
||||
match item {
|
||||
ast::ModuleItem::StructDef(_)
|
||||
| ast::ModuleItem::UnionDef(_)
|
||||
| ast::ModuleItem::EnumDef(_)
|
||||
| ast::ModuleItem::FnDef(_)
|
||||
| ast::ModuleItem::TypeAliasDef(_)
|
||||
| ast::ModuleItem::ConstDef(_)
|
||||
| ast::ModuleItem::StaticDef(_)
|
||||
| ast::ModuleItem::MacroCall(_) => {
|
||||
ast::Item::Struct(_)
|
||||
| ast::Item::Union(_)
|
||||
| ast::Item::Enum(_)
|
||||
| ast::Item::Fn(_)
|
||||
| ast::Item::TypeAlias(_)
|
||||
| ast::Item::Const(_)
|
||||
| ast::Item::Static(_)
|
||||
| ast::Item::MacroCall(_) => {
|
||||
// Skip this if we're already collecting inner items. We'll descend into all nodes
|
||||
// already.
|
||||
if !inner {
|
||||
|
@ -92,34 +95,30 @@ impl Ctx {
|
|||
|
||||
// These are handled in their respective `lower_X` method (since we can't just blindly
|
||||
// walk them).
|
||||
ast::ModuleItem::TraitDef(_)
|
||||
| ast::ModuleItem::ImplDef(_)
|
||||
| ast::ModuleItem::ExternBlock(_) => {}
|
||||
ast::Item::Trait(_) | ast::Item::Impl(_) | ast::Item::ExternBlock(_) => {}
|
||||
|
||||
// These don't have inner items.
|
||||
ast::ModuleItem::Module(_)
|
||||
| ast::ModuleItem::ExternCrateItem(_)
|
||||
| ast::ModuleItem::UseItem(_) => {}
|
||||
ast::Item::Module(_) | ast::Item::ExternCrate(_) | ast::Item::Use(_) => {}
|
||||
};
|
||||
|
||||
let attrs = Attrs::new(item, &self.hygiene);
|
||||
let items = match item {
|
||||
ast::ModuleItem::StructDef(ast) => self.lower_struct(ast).map(Into::into),
|
||||
ast::ModuleItem::UnionDef(ast) => self.lower_union(ast).map(Into::into),
|
||||
ast::ModuleItem::EnumDef(ast) => self.lower_enum(ast).map(Into::into),
|
||||
ast::ModuleItem::FnDef(ast) => self.lower_function(ast).map(Into::into),
|
||||
ast::ModuleItem::TypeAliasDef(ast) => self.lower_type_alias(ast).map(Into::into),
|
||||
ast::ModuleItem::StaticDef(ast) => self.lower_static(ast).map(Into::into),
|
||||
ast::ModuleItem::ConstDef(ast) => Some(self.lower_const(ast).into()),
|
||||
ast::ModuleItem::Module(ast) => self.lower_module(ast).map(Into::into),
|
||||
ast::ModuleItem::TraitDef(ast) => self.lower_trait(ast).map(Into::into),
|
||||
ast::ModuleItem::ImplDef(ast) => self.lower_impl(ast).map(Into::into),
|
||||
ast::ModuleItem::UseItem(ast) => Some(ModItems(
|
||||
ast::Item::Struct(ast) => self.lower_struct(ast).map(Into::into),
|
||||
ast::Item::Union(ast) => self.lower_union(ast).map(Into::into),
|
||||
ast::Item::Enum(ast) => self.lower_enum(ast).map(Into::into),
|
||||
ast::Item::Fn(ast) => self.lower_function(ast).map(Into::into),
|
||||
ast::Item::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into),
|
||||
ast::Item::Static(ast) => self.lower_static(ast).map(Into::into),
|
||||
ast::Item::Const(ast) => Some(self.lower_const(ast).into()),
|
||||
ast::Item::Module(ast) => self.lower_module(ast).map(Into::into),
|
||||
ast::Item::Trait(ast) => self.lower_trait(ast).map(Into::into),
|
||||
ast::Item::Impl(ast) => self.lower_impl(ast).map(Into::into),
|
||||
ast::Item::Use(ast) => Some(ModItems(
|
||||
self.lower_use(ast).into_iter().map(Into::into).collect::<SmallVec<_>>(),
|
||||
)),
|
||||
ast::ModuleItem::ExternCrateItem(ast) => self.lower_extern_crate(ast).map(Into::into),
|
||||
ast::ModuleItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
|
||||
ast::ModuleItem::ExternBlock(ast) => {
|
||||
ast::Item::ExternCrate(ast) => self.lower_extern_crate(ast).map(Into::into),
|
||||
ast::Item::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
|
||||
ast::Item::ExternBlock(ast) => {
|
||||
Some(ModItems(self.lower_extern_block(ast).into_iter().collect::<SmallVec<_>>()))
|
||||
}
|
||||
};
|
||||
|
@ -147,27 +146,26 @@ impl Ctx {
|
|||
fn collect_inner_items(&mut self, container: &SyntaxNode) {
|
||||
let forced_vis = self.forced_visibility.take();
|
||||
let mut inner_items = mem::take(&mut self.tree.inner_items);
|
||||
inner_items.extend(
|
||||
container.descendants().skip(1).filter_map(ast::ModuleItem::cast).filter_map(|item| {
|
||||
inner_items.extend(container.descendants().skip(1).filter_map(ast::Item::cast).filter_map(
|
||||
|item| {
|
||||
let ast_id = self.source_ast_id_map.ast_id(&item);
|
||||
Some((ast_id, self.lower_mod_item(&item, true)?.0))
|
||||
}),
|
||||
);
|
||||
},
|
||||
));
|
||||
self.tree.inner_items = inner_items;
|
||||
self.forced_visibility = forced_vis;
|
||||
}
|
||||
|
||||
fn lower_assoc_item(&mut self, item: &ast::ModuleItem) -> Option<AssocItem> {
|
||||
fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option<AssocItem> {
|
||||
match item {
|
||||
ast::ModuleItem::FnDef(ast) => self.lower_function(ast).map(Into::into),
|
||||
ast::ModuleItem::TypeAliasDef(ast) => self.lower_type_alias(ast).map(Into::into),
|
||||
ast::ModuleItem::ConstDef(ast) => Some(self.lower_const(ast).into()),
|
||||
ast::ModuleItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
|
||||
_ => None,
|
||||
ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into),
|
||||
ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into),
|
||||
ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()),
|
||||
ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_struct(&mut self, strukt: &ast::StructDef) -> Option<FileItemTreeId<Struct>> {
|
||||
fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
|
||||
let visibility = self.lower_visibility(strukt);
|
||||
let name = strukt.name()?.as_name();
|
||||
let generic_params = self.lower_generic_params(GenericsOwner::Struct, strukt);
|
||||
|
@ -196,7 +194,7 @@ impl Ctx {
|
|||
}
|
||||
}
|
||||
|
||||
fn lower_record_fields(&mut self, fields: &ast::RecordFieldDefList) -> IdRange<Field> {
|
||||
fn lower_record_fields(&mut self, fields: &ast::RecordFieldList) -> IdRange<Field> {
|
||||
let start = self.next_field_idx();
|
||||
for field in fields.fields() {
|
||||
if let Some(data) = self.lower_record_field(&field) {
|
||||
|
@ -208,42 +206,39 @@ impl Ctx {
|
|||
IdRange::new(start..end)
|
||||
}
|
||||
|
||||
fn lower_record_field(&mut self, field: &ast::RecordFieldDef) -> Option<Field> {
|
||||
fn lower_record_field(&mut self, field: &ast::RecordField) -> Option<Field> {
|
||||
let name = field.name()?.as_name();
|
||||
let visibility = self.lower_visibility(field);
|
||||
let type_ref = self.lower_type_ref(&field.ascribed_type()?);
|
||||
let type_ref = self.lower_type_ref_opt(field.ty());
|
||||
let res = Field { name, type_ref, visibility };
|
||||
Some(res)
|
||||
}
|
||||
|
||||
fn lower_tuple_fields(&mut self, fields: &ast::TupleFieldDefList) -> IdRange<Field> {
|
||||
fn lower_tuple_fields(&mut self, fields: &ast::TupleFieldList) -> IdRange<Field> {
|
||||
let start = self.next_field_idx();
|
||||
for (i, field) in fields.fields().enumerate() {
|
||||
if let Some(data) = self.lower_tuple_field(i, &field) {
|
||||
let data = self.lower_tuple_field(i, &field);
|
||||
let idx = self.data().fields.alloc(data);
|
||||
self.add_attrs(idx.into(), Attrs::new(&field, &self.hygiene));
|
||||
}
|
||||
}
|
||||
let end = self.next_field_idx();
|
||||
IdRange::new(start..end)
|
||||
}
|
||||
|
||||
fn lower_tuple_field(&mut self, idx: usize, field: &ast::TupleFieldDef) -> Option<Field> {
|
||||
fn lower_tuple_field(&mut self, idx: usize, field: &ast::TupleField) -> Field {
|
||||
let name = Name::new_tuple_field(idx);
|
||||
let visibility = self.lower_visibility(field);
|
||||
let type_ref = self.lower_type_ref(&field.type_ref()?);
|
||||
let type_ref = self.lower_type_ref_opt(field.ty());
|
||||
let res = Field { name, type_ref, visibility };
|
||||
Some(res)
|
||||
res
|
||||
}
|
||||
|
||||
fn lower_union(&mut self, union: &ast::UnionDef) -> Option<FileItemTreeId<Union>> {
|
||||
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
|
||||
let visibility = self.lower_visibility(union);
|
||||
let name = union.name()?.as_name();
|
||||
let generic_params = self.lower_generic_params(GenericsOwner::Union, union);
|
||||
let fields = match union.record_field_def_list() {
|
||||
Some(record_field_def_list) => {
|
||||
self.lower_fields(&StructKind::Record(record_field_def_list))
|
||||
}
|
||||
let fields = match union.record_field_list() {
|
||||
Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
|
||||
None => Fields::Record(IdRange::new(self.next_field_idx()..self.next_field_idx())),
|
||||
};
|
||||
let ast_id = self.source_ast_id_map.ast_id(union);
|
||||
|
@ -251,7 +246,7 @@ impl Ctx {
|
|||
Some(id(self.data().unions.alloc(res)))
|
||||
}
|
||||
|
||||
fn lower_enum(&mut self, enum_: &ast::EnumDef) -> Option<FileItemTreeId<Enum>> {
|
||||
fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
|
||||
let visibility = self.lower_visibility(enum_);
|
||||
let name = enum_.name()?.as_name();
|
||||
let generic_params = self.lower_generic_params(GenericsOwner::Enum, enum_);
|
||||
|
@ -264,7 +259,7 @@ impl Ctx {
|
|||
Some(id(self.data().enums.alloc(res)))
|
||||
}
|
||||
|
||||
fn lower_variants(&mut self, variants: &ast::EnumVariantList) -> IdRange<Variant> {
|
||||
fn lower_variants(&mut self, variants: &ast::VariantList) -> IdRange<Variant> {
|
||||
let start = self.next_variant_idx();
|
||||
for variant in variants.variants() {
|
||||
if let Some(data) = self.lower_variant(&variant) {
|
||||
|
@ -276,14 +271,14 @@ impl Ctx {
|
|||
IdRange::new(start..end)
|
||||
}
|
||||
|
||||
fn lower_variant(&mut self, variant: &ast::EnumVariant) -> Option<Variant> {
|
||||
fn lower_variant(&mut self, variant: &ast::Variant) -> Option<Variant> {
|
||||
let name = variant.name()?.as_name();
|
||||
let fields = self.lower_fields(&variant.kind());
|
||||
let res = Variant { name, fields };
|
||||
Some(res)
|
||||
}
|
||||
|
||||
fn lower_function(&mut self, func: &ast::FnDef) -> Option<FileItemTreeId<Function>> {
|
||||
fn lower_function(&mut self, func: &ast::Fn) -> Option<FileItemTreeId<Function>> {
|
||||
let visibility = self.lower_visibility(func);
|
||||
let name = func.name()?.as_name();
|
||||
|
||||
|
@ -291,7 +286,7 @@ impl Ctx {
|
|||
let mut has_self_param = false;
|
||||
if let Some(param_list) = func.param_list() {
|
||||
if let Some(self_param) = param_list.self_param() {
|
||||
let self_type = match self_param.ascribed_type() {
|
||||
let self_type = match self_param.ty() {
|
||||
Some(type_ref) => TypeRef::from_ast(&self.body_ctx, type_ref),
|
||||
None => {
|
||||
let self_type = TypeRef::Path(name![Self].into());
|
||||
|
@ -310,11 +305,19 @@ impl Ctx {
|
|||
has_self_param = true;
|
||||
}
|
||||
for param in param_list.params() {
|
||||
let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ascribed_type());
|
||||
let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty());
|
||||
params.push(type_ref);
|
||||
}
|
||||
}
|
||||
let ret_type = match func.ret_type().and_then(|rt| rt.type_ref()) {
|
||||
|
||||
let mut is_varargs = false;
|
||||
if let Some(params) = func.param_list() {
|
||||
if let Some(last) = params.params().last() {
|
||||
is_varargs = last.dotdotdot_token().is_some();
|
||||
}
|
||||
}
|
||||
|
||||
let ret_type = match func.ret_type().and_then(|rt| rt.ty()) {
|
||||
Some(type_ref) => TypeRef::from_ast(&self.body_ctx, type_ref),
|
||||
_ => TypeRef::unit(),
|
||||
};
|
||||
|
@ -335,6 +338,7 @@ impl Ctx {
|
|||
has_self_param,
|
||||
is_unsafe: func.unsafe_token().is_some(),
|
||||
params: params.into_boxed_slice(),
|
||||
is_varargs,
|
||||
ret_type,
|
||||
ast_id,
|
||||
};
|
||||
|
@ -345,10 +349,10 @@ impl Ctx {
|
|||
|
||||
fn lower_type_alias(
|
||||
&mut self,
|
||||
type_alias: &ast::TypeAliasDef,
|
||||
type_alias: &ast::TypeAlias,
|
||||
) -> Option<FileItemTreeId<TypeAlias>> {
|
||||
let name = type_alias.name()?.as_name();
|
||||
let type_ref = type_alias.type_ref().map(|it| self.lower_type_ref(&it));
|
||||
let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it));
|
||||
let visibility = self.lower_visibility(type_alias);
|
||||
let bounds = self.lower_type_bounds(type_alias);
|
||||
let generic_params = self.lower_generic_params(GenericsOwner::TypeAlias, type_alias);
|
||||
|
@ -364,9 +368,9 @@ impl Ctx {
|
|||
Some(id(self.data().type_aliases.alloc(res)))
|
||||
}
|
||||
|
||||
fn lower_static(&mut self, static_: &ast::StaticDef) -> Option<FileItemTreeId<Static>> {
|
||||
fn lower_static(&mut self, static_: &ast::Static) -> Option<FileItemTreeId<Static>> {
|
||||
let name = static_.name()?.as_name();
|
||||
let type_ref = self.lower_type_ref_opt(static_.ascribed_type());
|
||||
let type_ref = self.lower_type_ref_opt(static_.ty());
|
||||
let visibility = self.lower_visibility(static_);
|
||||
let mutable = static_.mut_token().is_some();
|
||||
let ast_id = self.source_ast_id_map.ast_id(static_);
|
||||
|
@ -374,9 +378,9 @@ impl Ctx {
|
|||
Some(id(self.data().statics.alloc(res)))
|
||||
}
|
||||
|
||||
fn lower_const(&mut self, konst: &ast::ConstDef) -> FileItemTreeId<Const> {
|
||||
fn lower_const(&mut self, konst: &ast::Const) -> FileItemTreeId<Const> {
|
||||
let name = konst.name().map(|it| it.as_name());
|
||||
let type_ref = self.lower_type_ref_opt(konst.ascribed_type());
|
||||
let type_ref = self.lower_type_ref_opt(konst.ty());
|
||||
let visibility = self.lower_visibility(konst);
|
||||
let ast_id = self.source_ast_id_map.ast_id(konst);
|
||||
let res = Const { name, visibility, type_ref, ast_id };
|
||||
|
@ -409,15 +413,15 @@ impl Ctx {
|
|||
Some(id(self.data().mods.alloc(res)))
|
||||
}
|
||||
|
||||
fn lower_trait(&mut self, trait_def: &ast::TraitDef) -> Option<FileItemTreeId<Trait>> {
|
||||
fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
|
||||
let name = trait_def.name()?.as_name();
|
||||
let visibility = self.lower_visibility(trait_def);
|
||||
let generic_params =
|
||||
self.lower_generic_params_and_inner_items(GenericsOwner::Trait(trait_def), trait_def);
|
||||
let auto = trait_def.auto_token().is_some();
|
||||
let items = trait_def.item_list().map(|list| {
|
||||
let items = trait_def.assoc_item_list().map(|list| {
|
||||
self.with_inherited_visibility(visibility, |this| {
|
||||
list.items()
|
||||
list.assoc_items()
|
||||
.filter_map(|item| {
|
||||
let attrs = Attrs::new(&item, &this.hygiene);
|
||||
this.collect_inner_items(item.syntax());
|
||||
|
@ -441,7 +445,7 @@ impl Ctx {
|
|||
Some(id(self.data().traits.alloc(res)))
|
||||
}
|
||||
|
||||
fn lower_impl(&mut self, impl_def: &ast::ImplDef) -> Option<FileItemTreeId<Impl>> {
|
||||
fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> {
|
||||
let generic_params =
|
||||
self.lower_generic_params_and_inner_items(GenericsOwner::Impl, impl_def);
|
||||
let target_trait = impl_def.target_trait().map(|tr| self.lower_type_ref(&tr));
|
||||
|
@ -450,8 +454,9 @@ impl Ctx {
|
|||
|
||||
// We cannot use `assoc_items()` here as that does not include macro calls.
|
||||
let items = impl_def
|
||||
.item_list()?
|
||||
.items()
|
||||
.assoc_item_list()
|
||||
.into_iter()
|
||||
.flat_map(|it| it.assoc_items())
|
||||
.filter_map(|item| {
|
||||
self.collect_inner_items(item.syntax());
|
||||
let assoc = self.lower_assoc_item(&item)?;
|
||||
|
@ -465,7 +470,7 @@ impl Ctx {
|
|||
Some(id(self.data().impls.alloc(res)))
|
||||
}
|
||||
|
||||
fn lower_use(&mut self, use_item: &ast::UseItem) -> Vec<FileItemTreeId<Import>> {
|
||||
fn lower_use(&mut self, use_item: &ast::Use) -> Vec<FileItemTreeId<Import>> {
|
||||
// FIXME: cfg_attr
|
||||
let is_prelude = use_item.has_atom_attr("prelude_import");
|
||||
let visibility = self.lower_visibility(use_item);
|
||||
|
@ -494,10 +499,10 @@ impl Ctx {
|
|||
|
||||
fn lower_extern_crate(
|
||||
&mut self,
|
||||
extern_crate: &ast::ExternCrateItem,
|
||||
extern_crate: &ast::ExternCrate,
|
||||
) -> Option<FileItemTreeId<ExternCrate>> {
|
||||
let path = ModPath::from_name_ref(&extern_crate.name_ref()?);
|
||||
let alias = extern_crate.alias().map(|a| {
|
||||
let alias = extern_crate.rename().map(|a| {
|
||||
a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias)
|
||||
});
|
||||
let visibility = self.lower_visibility(extern_crate);
|
||||
|
@ -543,14 +548,16 @@ impl Ctx {
|
|||
self.collect_inner_items(item.syntax());
|
||||
let attrs = Attrs::new(&item, &self.hygiene);
|
||||
let id: ModItem = match item {
|
||||
ast::ExternItem::FnDef(ast) => {
|
||||
ast::ExternItem::Fn(ast) => {
|
||||
let func = self.lower_function(&ast)?;
|
||||
self.data().functions[func.index].is_unsafe = true;
|
||||
func.into()
|
||||
}
|
||||
ast::ExternItem::StaticDef(ast) => {
|
||||
ast::ExternItem::Static(ast) => {
|
||||
let statik = self.lower_static(&ast)?;
|
||||
statik.into()
|
||||
}
|
||||
ast::ExternItem::MacroCall(_) => return None,
|
||||
};
|
||||
self.add_attrs(id.into(), attrs);
|
||||
Some(id)
|
||||
|
@ -563,10 +570,10 @@ impl Ctx {
|
|||
fn lower_generic_params_and_inner_items(
|
||||
&mut self,
|
||||
owner: GenericsOwner<'_>,
|
||||
node: &impl ast::TypeParamsOwner,
|
||||
node: &impl ast::GenericParamsOwner,
|
||||
) -> GenericParamsId {
|
||||
// Generics are part of item headers and may contain inner items we need to collect.
|
||||
if let Some(params) = node.type_param_list() {
|
||||
if let Some(params) = node.generic_param_list() {
|
||||
self.collect_inner_items(params.syntax());
|
||||
}
|
||||
if let Some(clause) = node.where_clause() {
|
||||
|
@ -579,7 +586,7 @@ impl Ctx {
|
|||
fn lower_generic_params(
|
||||
&mut self,
|
||||
owner: GenericsOwner<'_>,
|
||||
node: &impl ast::TypeParamsOwner,
|
||||
node: &impl ast::GenericParamsOwner,
|
||||
) -> GenericParamsId {
|
||||
let mut sm = &mut ArenaMap::default();
|
||||
let mut generics = GenericParams::default();
|
||||
|
@ -692,7 +699,7 @@ enum GenericsOwner<'a> {
|
|||
Enum,
|
||||
Union,
|
||||
/// The `TraitDef` is needed to fill the source map for the implicit `Self` parameter.
|
||||
Trait(&'a ast::TraitDef),
|
||||
Trait(&'a ast::Trait),
|
||||
TypeAlias,
|
||||
Impl,
|
||||
}
|
||||
|
|
|
@ -1,13 +1,15 @@
|
|||
use super::{ItemTree, ModItem, ModKind};
|
||||
use crate::{db::DefDatabase, test_db::TestDB};
|
||||
use expect::{expect, Expect};
|
||||
use hir_expand::{db::AstDatabase, HirFileId, InFile};
|
||||
use insta::assert_snapshot;
|
||||
use ra_db::fixture::WithFixture;
|
||||
use ra_syntax::{ast, AstNode};
|
||||
use rustc_hash::FxHashSet;
|
||||
use std::sync::Arc;
|
||||
use stdx::format_to;
|
||||
|
||||
use crate::{db::DefDatabase, test_db::TestDB};
|
||||
|
||||
use super::{ItemTree, ModItem, ModKind};
|
||||
|
||||
fn test_inner_items(ra_fixture: &str) {
|
||||
let (db, file_id) = TestDB::with_single_file(ra_fixture);
|
||||
let file_id = HirFileId::from(file_id);
|
||||
|
@ -19,7 +21,7 @@ fn test_inner_items(ra_fixture: &str) {
|
|||
let mut outer_items = FxHashSet::default();
|
||||
let mut worklist = tree.top_level_items().to_vec();
|
||||
while let Some(item) = worklist.pop() {
|
||||
let node: ast::ModuleItem = match item {
|
||||
let node: ast::Item = match item {
|
||||
ModItem::Import(it) => tree.source(&db, InFile::new(file_id, it)).into(),
|
||||
ModItem::ExternCrate(it) => tree.source(&db, InFile::new(file_id, it)).into(),
|
||||
ModItem::Function(it) => tree.source(&db, InFile::new(file_id, it)).into(),
|
||||
|
@ -51,7 +53,7 @@ fn test_inner_items(ra_fixture: &str) {
|
|||
|
||||
// Now descend the root node and check that all `ast::ModuleItem`s are either recorded above, or
|
||||
// registered as inner items.
|
||||
for item in root.descendants().skip(1).filter_map(ast::ModuleItem::cast) {
|
||||
for item in root.descendants().skip(1).filter_map(ast::Item::cast) {
|
||||
if outer_items.contains(&item) {
|
||||
continue;
|
||||
}
|
||||
|
@ -162,9 +164,15 @@ fn fmt_mod_item(out: &mut String, tree: &ItemTree, item: ModItem) {
|
|||
}
|
||||
}
|
||||
|
||||
fn check(ra_fixture: &str, expect: Expect) {
|
||||
let actual = print_item_tree(ra_fixture);
|
||||
expect.assert_eq(&actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn smoke() {
|
||||
assert_snapshot!(print_item_tree(r"
|
||||
check(
|
||||
r"
|
||||
#![attr]
|
||||
|
||||
#[attr_on_use]
|
||||
|
@ -214,42 +222,44 @@ fn smoke() {
|
|||
#[union_fld]
|
||||
fld: u16,
|
||||
}
|
||||
"), @r###"
|
||||
",
|
||||
expect![[r##"
|
||||
inner attrs: Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr"))] }, input: None }]) }
|
||||
|
||||
top-level items:
|
||||
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_on_use"))] }, input: None }]) }]
|
||||
Import { path: ModPath { kind: Plain, segments: [Name(Text("a"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_glob: false, is_prelude: false, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::UseItem>(0) }
|
||||
Import { path: ModPath { kind: Plain, segments: [Name(Text("a"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_glob: false, is_prelude: false, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Use>(0) }
|
||||
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_on_use"))] }, input: None }]) }]
|
||||
Import { path: ModPath { kind: Plain, segments: [Name(Text("b"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_glob: true, is_prelude: false, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::UseItem>(0) }
|
||||
Import { path: ModPath { kind: Plain, segments: [Name(Text("b"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_glob: true, is_prelude: false, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Use>(0) }
|
||||
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("ext_crate"))] }, input: None }]) }]
|
||||
ExternCrate { path: ModPath { kind: Plain, segments: [Name(Text("krate"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_macro_use: false, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::ExternCrateItem>(1) }
|
||||
ExternCrate { path: ModPath { kind: Plain, segments: [Name(Text("krate"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_macro_use: false, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::ExternCrate>(1) }
|
||||
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("on_trait"))] }, input: None }]) }]
|
||||
Trait { name: Name(Text("Tr")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(0), auto: false, items: [TypeAlias(Idx::<TypeAlias>(0)), Const(Idx::<Const>(0)), Function(Idx::<Function>(0)), Function(Idx::<Function>(1))], ast_id: FileAstId::<ra_syntax::ast::generated::nodes::TraitDef>(2) }
|
||||
Trait { name: Name(Text("Tr")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(0), auto: false, items: [TypeAlias(Idx::<TypeAlias>(0)), Const(Idx::<Const>(0)), Function(Idx::<Function>(0)), Function(Idx::<Function>(1))], ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Trait>(2) }
|
||||
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_ty"))] }, input: None }]) }]
|
||||
> TypeAlias { name: Name(Text("AssocTy")), visibility: RawVisibilityId("pub(self)"), bounds: [Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Tr"))] }, generic_args: [Some(GenericArgs { args: [Type(Tuple([]))], has_self_type: false, bindings: [] })] })], generic_params: GenericParamsId(4294967295), type_ref: None, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::TypeAliasDef>(8) }
|
||||
> TypeAlias { name: Name(Text("AssocTy")), visibility: RawVisibilityId("pub(self)"), bounds: [Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Tr"))] }, generic_args: [Some(GenericArgs { args: [Type(Tuple([]))], has_self_type: false, bindings: [] })] })], generic_params: GenericParamsId(4294967295), type_ref: None, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::TypeAlias>(8) }
|
||||
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_const"))] }, input: None }]) }]
|
||||
> Const { name: Some(Name(Text("CONST"))), visibility: RawVisibilityId("pub(self)"), type_ref: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("u8"))] }, generic_args: [None] }), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::ConstDef>(9) }
|
||||
> Const { name: Some(Name(Text("CONST"))), visibility: RawVisibilityId("pub(self)"), type_ref: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("u8"))] }, generic_args: [None] }), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Const>(9) }
|
||||
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_method"))] }, input: None }]) }]
|
||||
> Function { name: Name(Text("method")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: true, is_unsafe: false, params: [Reference(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Self"))] }, generic_args: [None] }), Shared)], ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::FnDef>(10) }
|
||||
> Function { name: Name(Text("method")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: true, is_unsafe: false, params: [Reference(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Self"))] }, generic_args: [None] }), Shared)], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(10) }
|
||||
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_dfl_method"))] }, input: None }]) }]
|
||||
> Function { name: Name(Text("dfl_method")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: true, is_unsafe: false, params: [Reference(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Self"))] }, generic_args: [None] }), Mut)], ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::FnDef>(11) }
|
||||
> Function { name: Name(Text("dfl_method")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: true, is_unsafe: false, params: [Reference(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Self"))] }, generic_args: [None] }), Mut)], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(11) }
|
||||
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct0"))] }, input: None }]) }]
|
||||
Struct { name: Name(Text("Struct0")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), fields: Unit, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::StructDef>(3), kind: Unit }
|
||||
Struct { name: Name(Text("Struct0")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), fields: Unit, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Struct>(3), kind: Unit }
|
||||
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct1"))] }, input: None }]) }]
|
||||
Struct { name: Name(Text("Struct1")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(2), fields: Tuple(IdRange::<ra_hir_def::item_tree::Field>(0..1)), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::StructDef>(4), kind: Tuple }
|
||||
Struct { name: Name(Text("Struct1")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(2), fields: Tuple(IdRange::<ra_hir_def::item_tree::Field>(0..1)), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Struct>(4), kind: Tuple }
|
||||
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct2"))] }, input: None }]) }]
|
||||
Struct { name: Name(Text("Struct2")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(3), fields: Record(IdRange::<ra_hir_def::item_tree::Field>(1..2)), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::StructDef>(5), kind: Record }
|
||||
Struct { name: Name(Text("Struct2")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(3), fields: Record(IdRange::<ra_hir_def::item_tree::Field>(1..2)), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Struct>(5), kind: Record }
|
||||
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("en"))] }, input: None }]) }]
|
||||
Enum { name: Name(Text("En")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), variants: IdRange::<ra_hir_def::item_tree::Variant>(0..1), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::EnumDef>(6) }
|
||||
Enum { name: Name(Text("En")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), variants: IdRange::<ra_hir_def::item_tree::Variant>(0..1), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Enum>(6) }
|
||||
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("un"))] }, input: None }]) }]
|
||||
Union { name: Name(Text("Un")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), fields: Record(IdRange::<ra_hir_def::item_tree::Field>(3..4)), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::UnionDef>(7) }
|
||||
"###);
|
||||
Union { name: Name(Text("Un")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), fields: Record(IdRange::<ra_hir_def::item_tree::Field>(3..4)), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Union>(7) }
|
||||
"##]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_inner_items() {
|
||||
let tree = print_item_tree(
|
||||
check(
|
||||
r"
|
||||
impl<T:A> D for Response<T> {
|
||||
fn foo() {
|
||||
|
@ -260,26 +270,25 @@ fn simple_inner_items() {
|
|||
}
|
||||
}
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(tree, @r###"
|
||||
expect![[r#"
|
||||
inner attrs: Attrs { entries: None }
|
||||
|
||||
top-level items:
|
||||
Impl { generic_params: GenericParamsId(0), target_trait: Some(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("D"))] }, generic_args: [None] })), target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Response"))] }, generic_args: [Some(GenericArgs { args: [Type(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("T"))] }, generic_args: [None] }))], has_self_type: false, bindings: [] })] }), is_negative: false, items: [Function(Idx::<Function>(1))], ast_id: FileAstId::<ra_syntax::ast::generated::nodes::ImplDef>(0) }
|
||||
> Function { name: Name(Text("foo")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::FnDef>(1) }
|
||||
Impl { generic_params: GenericParamsId(0), target_trait: Some(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("D"))] }, generic_args: [None] })), target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Response"))] }, generic_args: [Some(GenericArgs { args: [Type(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("T"))] }, generic_args: [None] }))], has_self_type: false, bindings: [] })] }), is_negative: false, items: [Function(Idx::<Function>(1))], ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Impl>(0) }
|
||||
> Function { name: Name(Text("foo")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(1) }
|
||||
|
||||
inner items:
|
||||
|
||||
for AST FileAstId::<ra_syntax::ast::generated::nodes::ModuleItem>(2):
|
||||
Function { name: Name(Text("end")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), has_self_param: false, is_unsafe: false, params: [], ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::FnDef>(2) }
|
||||
for AST FileAstId::<ra_syntax::ast::generated::nodes::Item>(2):
|
||||
Function { name: Name(Text("end")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(2) }
|
||||
|
||||
"###);
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extern_attrs() {
|
||||
let tree = print_item_tree(
|
||||
check(
|
||||
r#"
|
||||
#[block_attr]
|
||||
extern "C" {
|
||||
|
@ -289,22 +298,21 @@ fn extern_attrs() {
|
|||
fn b() {}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(tree, @r###"
|
||||
expect![[r##"
|
||||
inner attrs: Attrs { entries: None }
|
||||
|
||||
top-level items:
|
||||
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_a"))] }, input: None }, Attr { path: ModPath { kind: Plain, segments: [Name(Text("block_attr"))] }, input: None }]) }]
|
||||
Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::FnDef>(1) }
|
||||
Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: true, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(1) }
|
||||
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_b"))] }, input: None }, Attr { path: ModPath { kind: Plain, segments: [Name(Text("block_attr"))] }, input: None }]) }]
|
||||
Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::FnDef>(2) }
|
||||
"###);
|
||||
Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: true, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(2) }
|
||||
"##]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trait_attrs() {
|
||||
let tree = print_item_tree(
|
||||
check(
|
||||
r#"
|
||||
#[trait_attr]
|
||||
trait Tr {
|
||||
|
@ -314,24 +322,23 @@ fn trait_attrs() {
|
|||
fn b() {}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(tree, @r###"
|
||||
expect![[r##"
|
||||
inner attrs: Attrs { entries: None }
|
||||
|
||||
top-level items:
|
||||
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("trait_attr"))] }, input: None }]) }]
|
||||
Trait { name: Name(Text("Tr")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(0), auto: false, items: [Function(Idx::<Function>(0)), Function(Idx::<Function>(1))], ast_id: FileAstId::<ra_syntax::ast::generated::nodes::TraitDef>(0) }
|
||||
Trait { name: Name(Text("Tr")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(0), auto: false, items: [Function(Idx::<Function>(0)), Function(Idx::<Function>(1))], ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Trait>(0) }
|
||||
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_a"))] }, input: None }]) }]
|
||||
> Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::FnDef>(1) }
|
||||
> Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(1) }
|
||||
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_b"))] }, input: None }]) }]
|
||||
> Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::FnDef>(2) }
|
||||
"###);
|
||||
> Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(2) }
|
||||
"##]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn impl_attrs() {
|
||||
let tree = print_item_tree(
|
||||
check(
|
||||
r#"
|
||||
#[impl_attr]
|
||||
impl Ty {
|
||||
|
@ -341,19 +348,18 @@ fn impl_attrs() {
|
|||
fn b() {}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(tree, @r###"
|
||||
expect![[r##"
|
||||
inner attrs: Attrs { entries: None }
|
||||
|
||||
top-level items:
|
||||
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("impl_attr"))] }, input: None }]) }]
|
||||
Impl { generic_params: GenericParamsId(4294967295), target_trait: None, target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Ty"))] }, generic_args: [None] }), is_negative: false, items: [Function(Idx::<Function>(0)), Function(Idx::<Function>(1))], ast_id: FileAstId::<ra_syntax::ast::generated::nodes::ImplDef>(0) }
|
||||
Impl { generic_params: GenericParamsId(4294967295), target_trait: None, target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Ty"))] }, generic_args: [None] }), is_negative: false, items: [Function(Idx::<Function>(0)), Function(Idx::<Function>(1))], ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Impl>(0) }
|
||||
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_a"))] }, input: None }]) }]
|
||||
> Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::FnDef>(1) }
|
||||
> Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(1) }
|
||||
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_b"))] }, input: None }]) }]
|
||||
> Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::FnDef>(2) }
|
||||
"###);
|
||||
> Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(2) }
|
||||
"##]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -391,45 +397,43 @@ fn cursed_inner_items() {
|
|||
|
||||
#[test]
|
||||
fn inner_item_attrs() {
|
||||
let tree = print_item_tree(
|
||||
check(
|
||||
r"
|
||||
fn foo() {
|
||||
#[on_inner]
|
||||
fn inner() {}
|
||||
}
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(tree, @r###"
|
||||
expect![[r##"
|
||||
inner attrs: Attrs { entries: None }
|
||||
|
||||
top-level items:
|
||||
Function { name: Name(Text("foo")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::FnDef>(0) }
|
||||
Function { name: Name(Text("foo")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(0) }
|
||||
|
||||
inner items:
|
||||
|
||||
for AST FileAstId::<ra_syntax::ast::generated::nodes::ModuleItem>(1):
|
||||
for AST FileAstId::<ra_syntax::ast::generated::nodes::Item>(1):
|
||||
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("on_inner"))] }, input: None }]) }]
|
||||
Function { name: Name(Text("inner")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::FnDef>(1) }
|
||||
Function { name: Name(Text("inner")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(1) }
|
||||
|
||||
"###);
|
||||
"##]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assoc_item_macros() {
|
||||
let tree = print_item_tree(
|
||||
check(
|
||||
r"
|
||||
impl S {
|
||||
items!();
|
||||
}
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(tree, @r###"
|
||||
expect![[r#"
|
||||
inner attrs: Attrs { entries: None }
|
||||
|
||||
top-level items:
|
||||
Impl { generic_params: GenericParamsId(4294967295), target_trait: None, target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("S"))] }, generic_args: [None] }), is_negative: false, items: [MacroCall(Idx::<MacroCall>(0))], ast_id: FileAstId::<ra_syntax::ast::generated::nodes::ImplDef>(0) }
|
||||
Impl { generic_params: GenericParamsId(4294967295), target_trait: None, target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("S"))] }, generic_args: [None] }), is_negative: false, items: [MacroCall(Idx::<MacroCall>(0))], ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Impl>(0) }
|
||||
> MacroCall { name: None, path: ModPath { kind: Plain, segments: [Name(Text("items"))] }, is_export: false, is_local_inner: false, is_builtin: false, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::MacroCall>(1) }
|
||||
"###);
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
|
|
@ -14,19 +14,19 @@ use crate::{
|
|||
|
||||
pub type Key<K, V> = crate::dyn_map::Key<InFile<K>, V, AstPtrPolicy<K, V>>;
|
||||
|
||||
pub const FUNCTION: Key<ast::FnDef, FunctionId> = Key::new();
|
||||
pub const CONST: Key<ast::ConstDef, ConstId> = Key::new();
|
||||
pub const STATIC: Key<ast::StaticDef, StaticId> = Key::new();
|
||||
pub const TYPE_ALIAS: Key<ast::TypeAliasDef, TypeAliasId> = Key::new();
|
||||
pub const IMPL: Key<ast::ImplDef, ImplId> = Key::new();
|
||||
pub const TRAIT: Key<ast::TraitDef, TraitId> = Key::new();
|
||||
pub const STRUCT: Key<ast::StructDef, StructId> = Key::new();
|
||||
pub const UNION: Key<ast::UnionDef, UnionId> = Key::new();
|
||||
pub const ENUM: Key<ast::EnumDef, EnumId> = Key::new();
|
||||
pub const FUNCTION: Key<ast::Fn, FunctionId> = Key::new();
|
||||
pub const CONST: Key<ast::Const, ConstId> = Key::new();
|
||||
pub const STATIC: Key<ast::Static, StaticId> = Key::new();
|
||||
pub const TYPE_ALIAS: Key<ast::TypeAlias, TypeAliasId> = Key::new();
|
||||
pub const IMPL: Key<ast::Impl, ImplId> = Key::new();
|
||||
pub const TRAIT: Key<ast::Trait, TraitId> = Key::new();
|
||||
pub const STRUCT: Key<ast::Struct, StructId> = Key::new();
|
||||
pub const UNION: Key<ast::Union, UnionId> = Key::new();
|
||||
pub const ENUM: Key<ast::Enum, EnumId> = Key::new();
|
||||
|
||||
pub const ENUM_VARIANT: Key<ast::EnumVariant, EnumVariantId> = Key::new();
|
||||
pub const TUPLE_FIELD: Key<ast::TupleFieldDef, FieldId> = Key::new();
|
||||
pub const RECORD_FIELD: Key<ast::RecordFieldDef, FieldId> = Key::new();
|
||||
pub const VARIANT: Key<ast::Variant, EnumVariantId> = Key::new();
|
||||
pub const TUPLE_FIELD: Key<ast::TupleField, FieldId> = Key::new();
|
||||
pub const RECORD_FIELD: Key<ast::RecordField, FieldId> = Key::new();
|
||||
pub const TYPE_PARAM: Key<ast::TypeParam, TypeParamId> = Key::new();
|
||||
|
||||
pub const MACRO: Key<ast::MacroCall, MacroDefId> = Key::new();
|
||||
|
|
|
@ -65,6 +65,7 @@ use item_tree::{
|
|||
Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, ModItem, Static, Struct, Trait,
|
||||
TypeAlias, Union,
|
||||
};
|
||||
use stdx::impl_from;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct ModuleId {
|
||||
|
@ -158,17 +159,17 @@ pub struct FunctionId(salsa::InternId);
|
|||
type FunctionLoc = AssocItemLoc<Function>;
|
||||
impl_intern!(FunctionId, FunctionLoc, intern_function, lookup_intern_function);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub struct StructId(salsa::InternId);
|
||||
type StructLoc = ItemLoc<Struct>;
|
||||
impl_intern!(StructId, StructLoc, intern_struct, lookup_intern_struct);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub struct UnionId(salsa::InternId);
|
||||
pub type UnionLoc = ItemLoc<Union>;
|
||||
impl_intern!(UnionId, UnionLoc, intern_union, lookup_intern_union);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub struct EnumId(salsa::InternId);
|
||||
pub type EnumLoc = ItemLoc<Enum>;
|
||||
impl_intern!(EnumId, EnumLoc, intern_enum, lookup_intern_enum);
|
||||
|
@ -223,25 +224,6 @@ pub struct TypeParamId {
|
|||
|
||||
pub type LocalTypeParamId = Idx<generics::TypeParamData>;
|
||||
|
||||
macro_rules! impl_froms {
|
||||
($e:ident: $($v:ident $(($($sv:ident),*))?),*) => {
|
||||
$(
|
||||
impl From<$v> for $e {
|
||||
fn from(it: $v) -> $e {
|
||||
$e::$v(it)
|
||||
}
|
||||
}
|
||||
$($(
|
||||
impl From<$sv> for $e {
|
||||
fn from(it: $sv) -> $e {
|
||||
$e::$v($v::$sv(it))
|
||||
}
|
||||
}
|
||||
)*)?
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum ContainerId {
|
||||
ModuleId(ModuleId),
|
||||
|
@ -254,16 +236,16 @@ pub enum AssocContainerId {
|
|||
ImplId(ImplId),
|
||||
TraitId(TraitId),
|
||||
}
|
||||
impl_froms!(AssocContainerId: ContainerId);
|
||||
impl_from!(ContainerId for AssocContainerId);
|
||||
|
||||
/// A Data Type
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub enum AdtId {
|
||||
StructId(StructId),
|
||||
UnionId(UnionId),
|
||||
EnumId(EnumId),
|
||||
}
|
||||
impl_froms!(AdtId: StructId, UnionId, EnumId);
|
||||
impl_from!(StructId, UnionId, EnumId for AdtId);
|
||||
|
||||
/// The defs which can be visible in the module.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
|
@ -279,8 +261,8 @@ pub enum ModuleDefId {
|
|||
TypeAliasId(TypeAliasId),
|
||||
BuiltinType(BuiltinType),
|
||||
}
|
||||
impl_froms!(
|
||||
ModuleDefId: ModuleId,
|
||||
impl_from!(
|
||||
ModuleId,
|
||||
FunctionId,
|
||||
AdtId(StructId, EnumId, UnionId),
|
||||
EnumVariantId,
|
||||
|
@ -289,6 +271,7 @@ impl_froms!(
|
|||
TraitId,
|
||||
TypeAliasId,
|
||||
BuiltinType
|
||||
for ModuleDefId
|
||||
);
|
||||
|
||||
/// The defs which have a body.
|
||||
|
@ -299,7 +282,7 @@ pub enum DefWithBodyId {
|
|||
ConstId(ConstId),
|
||||
}
|
||||
|
||||
impl_froms!(DefWithBodyId: FunctionId, ConstId, StaticId);
|
||||
impl_from!(FunctionId, ConstId, StaticId for DefWithBodyId);
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum AssocItemId {
|
||||
|
@ -311,7 +294,7 @@ pub enum AssocItemId {
|
|||
// sure that you can only turn actual assoc items into AssocItemIds. This would
|
||||
// require not implementing From, and instead having some checked way of
|
||||
// casting them, and somehow making the constructors private, which would be annoying.
|
||||
impl_froms!(AssocItemId: FunctionId, ConstId, TypeAliasId);
|
||||
impl_from!(FunctionId, ConstId, TypeAliasId for AssocItemId);
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
|
||||
pub enum GenericDefId {
|
||||
|
@ -326,14 +309,15 @@ pub enum GenericDefId {
|
|||
// consts can have type parameters from their parents (i.e. associated consts of traits)
|
||||
ConstId(ConstId),
|
||||
}
|
||||
impl_froms!(
|
||||
GenericDefId: FunctionId,
|
||||
impl_from!(
|
||||
FunctionId,
|
||||
AdtId(StructId, EnumId, UnionId),
|
||||
TraitId,
|
||||
TypeAliasId,
|
||||
ImplId,
|
||||
EnumVariantId,
|
||||
ConstId
|
||||
for GenericDefId
|
||||
);
|
||||
|
||||
impl From<AssocItemId> for GenericDefId {
|
||||
|
@ -361,8 +345,8 @@ pub enum AttrDefId {
|
|||
ImplId(ImplId),
|
||||
}
|
||||
|
||||
impl_froms!(
|
||||
AttrDefId: ModuleId,
|
||||
impl_from!(
|
||||
ModuleId,
|
||||
FieldId,
|
||||
AdtId(StructId, EnumId, UnionId),
|
||||
EnumVariantId,
|
||||
|
@ -373,6 +357,7 @@ impl_froms!(
|
|||
TypeAliasId,
|
||||
MacroDefId,
|
||||
ImplId
|
||||
for AttrDefId
|
||||
);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
|
@ -381,7 +366,7 @@ pub enum VariantId {
|
|||
StructId(StructId),
|
||||
UnionId(UnionId),
|
||||
}
|
||||
impl_froms!(VariantId: EnumVariantId, StructId, UnionId);
|
||||
impl_from!(EnumVariantId, StructId, UnionId for VariantId);
|
||||
|
||||
trait Intern {
|
||||
type ID;
|
||||
|
@ -536,7 +521,7 @@ impl AsMacroCall for AstIdWithPath<ast::MacroCall> {
|
|||
}
|
||||
}
|
||||
|
||||
impl AsMacroCall for AstIdWithPath<ast::ModuleItem> {
|
||||
impl AsMacroCall for AstIdWithPath<ast::Item> {
|
||||
fn as_call_id(
|
||||
&self,
|
||||
db: &dyn db::DefDatabase,
|
||||
|
|
|
@ -229,37 +229,37 @@ impl CrateDefMap {
|
|||
// even), as this should be a great debugging aid.
|
||||
pub fn dump(&self) -> String {
|
||||
let mut buf = String::new();
|
||||
go(&mut buf, self, "\ncrate", self.root);
|
||||
return buf.trim().to_string();
|
||||
go(&mut buf, self, "crate", self.root);
|
||||
return buf;
|
||||
|
||||
fn go(buf: &mut String, map: &CrateDefMap, path: &str, module: LocalModuleId) {
|
||||
*buf += path;
|
||||
*buf += "\n";
|
||||
format_to!(buf, "{}\n", path);
|
||||
|
||||
let mut entries: Vec<_> = map.modules[module].scope.resolutions().collect();
|
||||
entries.sort_by_key(|(name, _)| name.clone());
|
||||
|
||||
for (name, def) in entries {
|
||||
format_to!(buf, "{}:", name);
|
||||
format_to!(buf, "{}:", name.map_or("_".to_string(), |name| name.to_string()));
|
||||
|
||||
if def.types.is_some() {
|
||||
*buf += " t";
|
||||
buf.push_str(" t");
|
||||
}
|
||||
if def.values.is_some() {
|
||||
*buf += " v";
|
||||
buf.push_str(" v");
|
||||
}
|
||||
if def.macros.is_some() {
|
||||
*buf += " m";
|
||||
buf.push_str(" m");
|
||||
}
|
||||
if def.is_none() {
|
||||
*buf += " _";
|
||||
buf.push_str(" _");
|
||||
}
|
||||
|
||||
*buf += "\n";
|
||||
buf.push_str("\n");
|
||||
}
|
||||
|
||||
for (name, child) in map.modules[module].children.iter() {
|
||||
let path = &format!("{}::{}", path, name);
|
||||
let path = format!("{}::{}", path, name);
|
||||
buf.push('\n');
|
||||
go(buf, map, &path, *child);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,6 +36,10 @@ use crate::{
|
|||
TraitLoc, TypeAliasLoc, UnionLoc,
|
||||
};
|
||||
|
||||
const GLOB_RECURSION_LIMIT: usize = 100;
|
||||
const EXPANSION_DEPTH_LIMIT: usize = 128;
|
||||
const FIXED_POINT_LIMIT: usize = 8192;
|
||||
|
||||
pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap {
|
||||
let crate_graph = db.crate_graph();
|
||||
|
||||
|
@ -166,7 +170,7 @@ struct MacroDirective {
|
|||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
struct DeriveDirective {
|
||||
module_id: LocalModuleId,
|
||||
ast_id: AstIdWithPath<ast::ModuleItem>,
|
||||
ast_id: AstIdWithPath<ast::Item>,
|
||||
}
|
||||
|
||||
struct DefData<'a> {
|
||||
|
@ -217,7 +221,7 @@ impl DefCollector<'_> {
|
|||
ReachedFixedPoint::Yes => break,
|
||||
ReachedFixedPoint::No => i += 1,
|
||||
}
|
||||
if i == 10000 {
|
||||
if i == FIXED_POINT_LIMIT {
|
||||
log::error!("name resolution is stuck");
|
||||
break;
|
||||
}
|
||||
|
@ -306,7 +310,7 @@ impl DefCollector<'_> {
|
|||
if export {
|
||||
self.update(
|
||||
self.def_map.root,
|
||||
&[(name, PerNs::macros(macro_, Visibility::Public))],
|
||||
&[(Some(name), PerNs::macros(macro_, Visibility::Public))],
|
||||
Visibility::Public,
|
||||
ImportType::Named,
|
||||
);
|
||||
|
@ -332,7 +336,7 @@ impl DefCollector<'_> {
|
|||
fn define_proc_macro(&mut self, name: Name, macro_: MacroDefId) {
|
||||
self.update(
|
||||
self.def_map.root,
|
||||
&[(name, PerNs::macros(macro_, Visibility::Public))],
|
||||
&[(Some(name), PerNs::macros(macro_, Visibility::Public))],
|
||||
Visibility::Public,
|
||||
ImportType::Named,
|
||||
);
|
||||
|
@ -530,7 +534,7 @@ impl DefCollector<'_> {
|
|||
let name = variant_data.name.clone();
|
||||
let variant = EnumVariantId { parent: e, local_id };
|
||||
let res = PerNs::both(variant.into(), variant.into(), vis);
|
||||
(name, res)
|
||||
(Some(name), res)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
self.update(module_id, &resolutions, vis, ImportType::Glob);
|
||||
|
@ -546,15 +550,15 @@ impl DefCollector<'_> {
|
|||
match import.path.segments.last() {
|
||||
Some(last_segment) => {
|
||||
let name = match &import.alias {
|
||||
Some(ImportAlias::Alias(name)) => name.clone(),
|
||||
Some(ImportAlias::Underscore) => last_segment.clone(), // FIXME rust-analyzer#2736
|
||||
None => last_segment.clone(),
|
||||
Some(ImportAlias::Alias(name)) => Some(name.clone()),
|
||||
Some(ImportAlias::Underscore) => None,
|
||||
None => Some(last_segment.clone()),
|
||||
};
|
||||
log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
|
||||
|
||||
// extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658
|
||||
if import.is_extern_crate && module_id == self.def_map.root {
|
||||
if let Some(def) = def.take_types() {
|
||||
if let (Some(def), Some(name)) = (def.take_types(), name.as_ref()) {
|
||||
self.def_map.extern_prelude.insert(name.clone(), def);
|
||||
}
|
||||
}
|
||||
|
@ -569,30 +573,34 @@ impl DefCollector<'_> {
|
|||
fn update(
|
||||
&mut self,
|
||||
module_id: LocalModuleId,
|
||||
resolutions: &[(Name, PerNs)],
|
||||
resolutions: &[(Option<Name>, PerNs)],
|
||||
vis: Visibility,
|
||||
import_type: ImportType,
|
||||
) {
|
||||
self.db.check_canceled();
|
||||
self.update_recursive(module_id, resolutions, vis, import_type, 0)
|
||||
}
|
||||
|
||||
fn update_recursive(
|
||||
&mut self,
|
||||
module_id: LocalModuleId,
|
||||
resolutions: &[(Name, PerNs)],
|
||||
resolutions: &[(Option<Name>, PerNs)],
|
||||
// All resolutions are imported with this visibility; the visibilies in
|
||||
// the `PerNs` values are ignored and overwritten
|
||||
vis: Visibility,
|
||||
import_type: ImportType,
|
||||
depth: usize,
|
||||
) {
|
||||
if depth > 100 {
|
||||
if depth > GLOB_RECURSION_LIMIT {
|
||||
// prevent stack overflows (but this shouldn't be possible)
|
||||
panic!("infinite recursion in glob imports!");
|
||||
}
|
||||
let scope = &mut self.def_map.modules[module_id].scope;
|
||||
let mut changed = false;
|
||||
|
||||
for (name, res) in resolutions {
|
||||
match name {
|
||||
Some(name) => {
|
||||
let scope = &mut self.def_map.modules[module_id].scope;
|
||||
changed |= scope.push_res_with_import(
|
||||
&mut self.from_glob_import,
|
||||
(module_id, name.clone()),
|
||||
|
@ -600,6 +608,39 @@ impl DefCollector<'_> {
|
|||
import_type,
|
||||
);
|
||||
}
|
||||
None => {
|
||||
let tr = match res.take_types() {
|
||||
Some(ModuleDefId::TraitId(tr)) => tr,
|
||||
Some(other) => {
|
||||
log::debug!("non-trait `_` import of {:?}", other);
|
||||
continue;
|
||||
}
|
||||
None => continue,
|
||||
};
|
||||
let old_vis = self.def_map.modules[module_id].scope.unnamed_trait_vis(tr);
|
||||
let should_update = match old_vis {
|
||||
None => true,
|
||||
Some(old_vis) => {
|
||||
let max_vis = old_vis.max(vis, &self.def_map).unwrap_or_else(|| {
|
||||
panic!("`Tr as _` imports with unrelated visibilities {:?} and {:?} (trait {:?})", old_vis, vis, tr);
|
||||
});
|
||||
|
||||
if max_vis == old_vis {
|
||||
false
|
||||
} else {
|
||||
mark::hit!(upgrade_underscore_visibility);
|
||||
true
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if should_update {
|
||||
changed = true;
|
||||
self.def_map.modules[module_id].scope.push_unnamed_trait(tr, vis);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !changed {
|
||||
return;
|
||||
|
@ -609,14 +650,15 @@ impl DefCollector<'_> {
|
|||
.get(&module_id)
|
||||
.into_iter()
|
||||
.flat_map(|v| v.iter())
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
for (glob_importing_module, glob_import_vis) in glob_imports {
|
||||
.filter(|(glob_importing_module, _)| {
|
||||
// we know all resolutions have the same visibility (`vis`), so we
|
||||
// just need to check that once
|
||||
if !vis.is_visible_from_def_map(&self.def_map, glob_importing_module) {
|
||||
continue;
|
||||
}
|
||||
vis.is_visible_from_def_map(&self.def_map, *glob_importing_module)
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for (glob_importing_module, glob_import_vis) in glob_imports {
|
||||
self.update_recursive(
|
||||
glob_importing_module,
|
||||
resolutions,
|
||||
|
@ -677,10 +719,6 @@ impl DefCollector<'_> {
|
|||
self.unexpanded_attribute_macros = attribute_macros;
|
||||
|
||||
for (module_id, macro_call_id, depth) in resolved {
|
||||
if depth > 1024 {
|
||||
log::debug!("Max macro expansion depth reached");
|
||||
continue;
|
||||
}
|
||||
self.collect_macro_expansion(module_id, macro_call_id, depth);
|
||||
}
|
||||
|
||||
|
@ -717,6 +755,11 @@ impl DefCollector<'_> {
|
|||
macro_call_id: MacroCallId,
|
||||
depth: usize,
|
||||
) {
|
||||
if depth > EXPANSION_DEPTH_LIMIT {
|
||||
mark::hit!(macro_expansion_overflow);
|
||||
log::warn!("macro expansion is too deep");
|
||||
return;
|
||||
}
|
||||
let file_id: HirFileId = macro_call_id.as_file();
|
||||
let item_tree = self.db.item_tree(file_id);
|
||||
let mod_dir = self.mod_dirs[&module_id].clone();
|
||||
|
@ -943,7 +986,7 @@ impl ModCollector<'_, '_> {
|
|||
.unwrap_or(Visibility::Public);
|
||||
self.def_collector.update(
|
||||
self.module_id,
|
||||
&[(name.clone(), PerNs::from_def(id, vis, has_constructor))],
|
||||
&[(Some(name.clone()), PerNs::from_def(id, vis, has_constructor))],
|
||||
vis,
|
||||
ImportType::Named,
|
||||
)
|
||||
|
@ -1050,14 +1093,14 @@ impl ModCollector<'_, '_> {
|
|||
self.def_collector.def_map.modules[self.module_id].scope.define_def(def);
|
||||
self.def_collector.update(
|
||||
self.module_id,
|
||||
&[(name, PerNs::from_def(def, vis, false))],
|
||||
&[(Some(name), PerNs::from_def(def, vis, false))],
|
||||
vis,
|
||||
ImportType::Named,
|
||||
);
|
||||
res
|
||||
}
|
||||
|
||||
fn collect_derives(&mut self, attrs: &Attrs, ast_id: FileAstId<ast::ModuleItem>) {
|
||||
fn collect_derives(&mut self, attrs: &Attrs, ast_id: FileAstId<ast::Item>) {
|
||||
for derive_subtree in attrs.by_key("derive").tt_values() {
|
||||
// for #[derive(Copy, Clone)], `derive_subtree` is the `(Copy, Clone)` subtree
|
||||
for tt in &derive_subtree.token_trees {
|
||||
|
|
|
@ -1,23 +1,24 @@
|
|||
//! This module resolves `mod foo;` declaration to file.
|
||||
use hir_expand::name::Name;
|
||||
use ra_db::{FileId, RelativePathBuf};
|
||||
use ra_db::FileId;
|
||||
use ra_syntax::SmolStr;
|
||||
|
||||
use crate::{db::DefDatabase, HirFileId};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct ModDir {
|
||||
/// `.` for `mod.rs`, `lib.rs`
|
||||
/// `./foo` for `foo.rs`
|
||||
/// `./foo/bar` for `mod bar { mod x; }` nested in `foo.rs`
|
||||
path: RelativePathBuf,
|
||||
/// `` for `mod.rs`, `lib.rs`
|
||||
/// `foo/` for `foo.rs`
|
||||
/// `foo/bar/` for `mod bar { mod x; }` nested in `foo.rs`
|
||||
/// Invariant: path.is_empty() || path.ends_with('/')
|
||||
dir_path: DirPath,
|
||||
/// inside `./foo.rs`, mods with `#[path]` should *not* be relative to `./foo/`
|
||||
root_non_dir_owner: bool,
|
||||
}
|
||||
|
||||
impl ModDir {
|
||||
pub(super) fn root() -> ModDir {
|
||||
ModDir { path: RelativePathBuf::default(), root_non_dir_owner: false }
|
||||
ModDir { dir_path: DirPath::empty(), root_non_dir_owner: false }
|
||||
}
|
||||
|
||||
pub(super) fn descend_into_definition(
|
||||
|
@ -25,17 +26,21 @@ impl ModDir {
|
|||
name: &Name,
|
||||
attr_path: Option<&SmolStr>,
|
||||
) -> ModDir {
|
||||
let mut path = self.path.clone();
|
||||
match attr_to_path(attr_path) {
|
||||
None => path.push(&name.to_string()),
|
||||
let path = match attr_path.map(|it| it.as_str()) {
|
||||
None => {
|
||||
let mut path = self.dir_path.clone();
|
||||
path.push(&name.to_string());
|
||||
path
|
||||
}
|
||||
Some(attr_path) => {
|
||||
if self.root_non_dir_owner {
|
||||
assert!(path.pop());
|
||||
let mut path = self.dir_path.join_attr(attr_path, self.root_non_dir_owner);
|
||||
if !(path.is_empty() || path.ends_with('/')) {
|
||||
path.push('/')
|
||||
}
|
||||
path.push(attr_path);
|
||||
DirPath::new(path)
|
||||
}
|
||||
}
|
||||
ModDir { path, root_non_dir_owner: false }
|
||||
};
|
||||
ModDir { dir_path: path, root_non_dir_owner: false }
|
||||
}
|
||||
|
||||
pub(super) fn resolve_declaration(
|
||||
|
@ -48,34 +53,87 @@ impl ModDir {
|
|||
let file_id = file_id.original_file(db.upcast());
|
||||
|
||||
let mut candidate_files = Vec::new();
|
||||
match attr_to_path(attr_path) {
|
||||
match attr_path {
|
||||
Some(attr_path) => {
|
||||
let base =
|
||||
if self.root_non_dir_owner { self.path.parent().unwrap() } else { &self.path };
|
||||
candidate_files.push(base.join(attr_path).to_string())
|
||||
candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner))
|
||||
}
|
||||
None => {
|
||||
candidate_files.push(self.path.join(&format!("{}.rs", name)).to_string());
|
||||
candidate_files.push(self.path.join(&format!("{}/mod.rs", name)).to_string());
|
||||
candidate_files.push(format!("{}{}.rs", self.dir_path.0, name));
|
||||
candidate_files.push(format!("{}{}/mod.rs", self.dir_path.0, name));
|
||||
}
|
||||
};
|
||||
|
||||
for candidate in candidate_files.iter() {
|
||||
if let Some(file_id) = db.resolve_path(file_id, candidate.as_str()) {
|
||||
let mut root_non_dir_owner = false;
|
||||
let mut mod_path = RelativePathBuf::new();
|
||||
let is_mod_rs = candidate.ends_with("mod.rs");
|
||||
if !(is_mod_rs || attr_path.is_some()) {
|
||||
root_non_dir_owner = true;
|
||||
mod_path.push(&name.to_string());
|
||||
}
|
||||
return Ok((file_id, is_mod_rs, ModDir { path: mod_path, root_non_dir_owner }));
|
||||
|
||||
let (dir_path, root_non_dir_owner) = if is_mod_rs || attr_path.is_some() {
|
||||
(DirPath::empty(), false)
|
||||
} else {
|
||||
(DirPath::new(format!("{}/", name)), true)
|
||||
};
|
||||
return Ok((file_id, is_mod_rs, ModDir { dir_path, root_non_dir_owner }));
|
||||
}
|
||||
}
|
||||
Err(candidate_files.remove(0))
|
||||
}
|
||||
}
|
||||
|
||||
fn attr_to_path(attr: Option<&SmolStr>) -> Option<RelativePathBuf> {
|
||||
attr.and_then(|it| RelativePathBuf::from_path(&it.replace("\\", "/")).ok())
|
||||
#[derive(Clone, Debug)]
|
||||
struct DirPath(String);
|
||||
|
||||
impl DirPath {
|
||||
fn assert_invariant(&self) {
|
||||
assert!(self.0.is_empty() || self.0.ends_with('/'));
|
||||
}
|
||||
fn new(repr: String) -> DirPath {
|
||||
let res = DirPath(repr);
|
||||
res.assert_invariant();
|
||||
res
|
||||
}
|
||||
fn empty() -> DirPath {
|
||||
DirPath::new(String::new())
|
||||
}
|
||||
fn push(&mut self, name: &str) {
|
||||
self.0.push_str(name);
|
||||
self.0.push('/');
|
||||
self.assert_invariant();
|
||||
}
|
||||
fn parent(&self) -> Option<&str> {
|
||||
if self.0.is_empty() {
|
||||
return None;
|
||||
};
|
||||
let idx =
|
||||
self.0[..self.0.len() - '/'.len_utf8()].rfind('/').map_or(0, |it| it + '/'.len_utf8());
|
||||
Some(&self.0[..idx])
|
||||
}
|
||||
/// So this is the case which doesn't really work I think if we try to be
|
||||
/// 100% platform agnostic:
|
||||
///
|
||||
/// ```
|
||||
/// mod a {
|
||||
/// #[path="C://sad/face"]
|
||||
/// mod b { mod c; }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Here, we need to join logical dir path to a string path from an
|
||||
/// attribute. Ideally, we should somehow losslessly communicate the whole
|
||||
/// construction to `FileLoader`.
|
||||
fn join_attr(&self, mut attr: &str, relative_to_parent: bool) -> String {
|
||||
let base = if relative_to_parent { self.parent().unwrap() } else { &self.0 };
|
||||
|
||||
if attr.starts_with("./") {
|
||||
attr = &attr["./".len()..];
|
||||
}
|
||||
let tmp;
|
||||
let attr = if attr.contains('\\') {
|
||||
tmp = attr.replace('\\', "/");
|
||||
&tmp
|
||||
} else {
|
||||
attr
|
||||
};
|
||||
let res = format!("{}{}", base, attr);
|
||||
res
|
||||
}
|
||||
}
|
||||
|
|
|
@ -226,8 +226,16 @@ impl CrateDefMap {
|
|||
match enum_data.variant(&segment) {
|
||||
Some(local_id) => {
|
||||
let variant = EnumVariantId { parent: e, local_id };
|
||||
match &*enum_data.variants[local_id].variant_data {
|
||||
crate::adt::VariantData::Record(_) => {
|
||||
PerNs::types(variant.into(), Visibility::Public)
|
||||
}
|
||||
crate::adt::VariantData::Tuple(_)
|
||||
| crate::adt::VariantData::Unit => {
|
||||
PerNs::both(variant.into(), variant.into(), Visibility::Public)
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
return ResolvePathResult::with(
|
||||
PerNs::types(e.into(), vis),
|
||||
|
|
|
@ -6,26 +6,29 @@ mod primitives;
|
|||
|
||||
use std::sync::Arc;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
use expect::{expect, Expect};
|
||||
use ra_db::{fixture::WithFixture, SourceDatabase};
|
||||
use test_utils::mark;
|
||||
|
||||
use crate::{db::DefDatabase, nameres::*, test_db::TestDB};
|
||||
|
||||
fn def_map(ra_fixture: &str) -> String {
|
||||
compute_crate_def_map(ra_fixture).dump()
|
||||
}
|
||||
|
||||
fn compute_crate_def_map(fixture: &str) -> Arc<CrateDefMap> {
|
||||
let db = TestDB::with_files(fixture);
|
||||
let krate = db.crate_graph().iter().next().unwrap();
|
||||
db.crate_def_map(krate)
|
||||
}
|
||||
|
||||
fn check(ra_fixture: &str, expect: Expect) {
|
||||
let db = TestDB::with_files(ra_fixture);
|
||||
let krate = db.crate_graph().iter().next().unwrap();
|
||||
let actual = db.crate_def_map(krate).dump();
|
||||
expect.assert_eq(&actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn crate_def_map_smoke_test() {
|
||||
let map = def_map(
|
||||
r"
|
||||
check(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
struct S;
|
||||
|
@ -39,102 +42,94 @@ fn crate_def_map_smoke_test() {
|
|||
//- /foo/bar.rs
|
||||
pub struct Baz;
|
||||
|
||||
union U {
|
||||
to_be: bool,
|
||||
not_to_be: u8,
|
||||
}
|
||||
|
||||
union U { to_be: bool, not_to_be: u8 }
|
||||
enum E { V }
|
||||
|
||||
extern {
|
||||
static EXT: u8;
|
||||
fn ext();
|
||||
}
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
E: t
|
||||
S: t v
|
||||
V: t v
|
||||
foo: t
|
||||
|
||||
crate::foo
|
||||
bar: t
|
||||
f: v
|
||||
|
||||
crate::foo::bar
|
||||
Baz: t v
|
||||
E: t
|
||||
EXT: v
|
||||
U: t
|
||||
ext: v
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮E: t
|
||||
⋮S: t v
|
||||
⋮V: t v
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::foo
|
||||
⋮bar: t
|
||||
⋮f: v
|
||||
⋮
|
||||
⋮crate::foo::bar
|
||||
⋮Baz: t v
|
||||
⋮E: t
|
||||
⋮EXT: v
|
||||
⋮U: t
|
||||
⋮ext: v
|
||||
"###)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn crate_def_map_super_super() {
|
||||
let map = def_map(
|
||||
"
|
||||
//- /lib.rs
|
||||
check(
|
||||
r#"
|
||||
mod a {
|
||||
const A: usize = 0;
|
||||
|
||||
mod b {
|
||||
const B: usize = 0;
|
||||
|
||||
mod c {
|
||||
use super::super::*;
|
||||
}
|
||||
}
|
||||
}
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
a: t
|
||||
|
||||
crate::a
|
||||
A: v
|
||||
b: t
|
||||
|
||||
crate::a::b
|
||||
B: v
|
||||
c: t
|
||||
|
||||
crate::a::b::c
|
||||
A: v
|
||||
b: t
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮a: t
|
||||
⋮
|
||||
⋮crate::a
|
||||
⋮A: v
|
||||
⋮b: t
|
||||
⋮
|
||||
⋮crate::a::b
|
||||
⋮B: v
|
||||
⋮c: t
|
||||
⋮
|
||||
⋮crate::a::b::c
|
||||
⋮A: v
|
||||
⋮b: t
|
||||
"###)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn crate_def_map_fn_mod_same_name() {
|
||||
let map = def_map(
|
||||
"
|
||||
//- /lib.rs
|
||||
check(
|
||||
r#"
|
||||
mod m {
|
||||
pub mod z {}
|
||||
pub fn z() {}
|
||||
}
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
m: t
|
||||
|
||||
crate::m
|
||||
z: t v
|
||||
|
||||
crate::m::z
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮m: t
|
||||
⋮
|
||||
⋮crate::m
|
||||
⋮z: t v
|
||||
⋮
|
||||
⋮crate::m::z
|
||||
"###)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bogus_paths() {
|
||||
mark::check!(bogus_paths);
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
struct S;
|
||||
|
@ -143,51 +138,45 @@ fn bogus_paths() {
|
|||
//- /foo/mod.rs
|
||||
use super;
|
||||
use crate;
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
S: t v
|
||||
foo: t
|
||||
|
||||
",
|
||||
crate::foo
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮S: t v
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::foo
|
||||
"###
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn use_as() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
|
||||
use crate::foo::Baz as Foo;
|
||||
|
||||
//- /foo/mod.rs
|
||||
pub struct Baz;
|
||||
",
|
||||
);
|
||||
assert_snapshot!(map,
|
||||
@r###"
|
||||
⋮crate
|
||||
⋮Foo: t v
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::foo
|
||||
⋮Baz: t v
|
||||
"###
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Foo: t v
|
||||
foo: t
|
||||
|
||||
crate::foo
|
||||
Baz: t v
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn use_trees() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
|
||||
use crate::foo::bar::{Baz, Quux};
|
||||
|
||||
//- /foo/mod.rs
|
||||
|
@ -196,60 +185,58 @@ fn use_trees() {
|
|||
//- /foo/bar.rs
|
||||
pub struct Baz;
|
||||
pub enum Quux {};
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Baz: t v
|
||||
Quux: t
|
||||
foo: t
|
||||
|
||||
crate::foo
|
||||
bar: t
|
||||
|
||||
crate::foo::bar
|
||||
Baz: t v
|
||||
Quux: t
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Baz: t v
|
||||
⋮Quux: t
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::foo
|
||||
⋮bar: t
|
||||
⋮
|
||||
⋮crate::foo::bar
|
||||
⋮Baz: t v
|
||||
⋮Quux: t
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn re_exports() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
|
||||
use self::foo::Baz;
|
||||
|
||||
//- /foo/mod.rs
|
||||
pub mod bar;
|
||||
|
||||
pub use self::bar::Baz;
|
||||
|
||||
//- /foo/bar.rs
|
||||
pub struct Baz;
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Baz: t v
|
||||
foo: t
|
||||
|
||||
crate::foo
|
||||
Baz: t v
|
||||
bar: t
|
||||
|
||||
crate::foo::bar
|
||||
Baz: t v
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Baz: t v
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::foo
|
||||
⋮Baz: t v
|
||||
⋮bar: t
|
||||
⋮
|
||||
⋮crate::foo::bar
|
||||
⋮Baz: t v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn std_prelude() {
|
||||
mark::check!(std_prelude);
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:test_crate
|
||||
use Foo::*;
|
||||
|
||||
|
@ -260,37 +247,35 @@ fn std_prelude() {
|
|||
|
||||
//- /prelude.rs
|
||||
pub enum Foo { Bar, Baz };
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Bar: t v
|
||||
Baz: t v
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t v
|
||||
⋮Baz: t v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_import_enum_variant() {
|
||||
mark::check!(can_import_enum_variant);
|
||||
let map = def_map(
|
||||
"
|
||||
//- /lib.rs
|
||||
check(
|
||||
r#"
|
||||
enum E { V }
|
||||
use self::E::V;
|
||||
",
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮E: t
|
||||
⋮V: t v
|
||||
"###
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
E: t
|
||||
V: t v
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn edition_2015_imports() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:other_crate edition:2015
|
||||
mod foo;
|
||||
mod bar;
|
||||
|
@ -304,74 +289,73 @@ fn edition_2015_imports() {
|
|||
|
||||
//- /lib.rs crate:other_crate edition:2018
|
||||
struct FromLib;
|
||||
",
|
||||
);
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
bar: t
|
||||
foo: t
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮bar: t
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::bar
|
||||
⋮Bar: t v
|
||||
⋮
|
||||
⋮crate::foo
|
||||
⋮Bar: t v
|
||||
⋮FromLib: t v
|
||||
"###);
|
||||
crate::bar
|
||||
Bar: t v
|
||||
|
||||
crate::foo
|
||||
Bar: t v
|
||||
FromLib: t v
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn item_map_using_self() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
use crate::foo::bar::Baz::{self};
|
||||
|
||||
//- /foo/mod.rs
|
||||
pub mod bar;
|
||||
|
||||
//- /foo/bar.rs
|
||||
pub struct Baz;
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Baz: t v
|
||||
foo: t
|
||||
|
||||
crate::foo
|
||||
bar: t
|
||||
|
||||
crate::foo::bar
|
||||
Baz: t v
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Baz: t v
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::foo
|
||||
⋮bar: t
|
||||
⋮
|
||||
⋮crate::foo::bar
|
||||
⋮Baz: t v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn item_map_across_crates() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:test_crate
|
||||
use test_crate::Baz;
|
||||
|
||||
//- /lib.rs crate:test_crate
|
||||
pub struct Baz;
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Baz: t v
|
||||
"#]],
|
||||
);
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Baz: t v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extern_crate_rename() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:alloc
|
||||
extern crate alloc as alloc_crate;
|
||||
|
||||
mod alloc;
|
||||
mod sync;
|
||||
|
||||
|
@ -380,26 +364,24 @@ fn extern_crate_rename() {
|
|||
|
||||
//- /lib.rs crate:alloc
|
||||
struct Arc;
|
||||
",
|
||||
);
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
alloc_crate: t
|
||||
sync: t
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮alloc_crate: t
|
||||
⋮sync: t
|
||||
⋮
|
||||
⋮crate::sync
|
||||
⋮Arc: t v
|
||||
"###);
|
||||
crate::sync
|
||||
Arc: t v
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extern_crate_rename_2015_edition() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:alloc edition:2015
|
||||
extern crate alloc as alloc_crate;
|
||||
|
||||
mod alloc;
|
||||
mod sync;
|
||||
|
||||
|
@ -408,67 +390,61 @@ fn extern_crate_rename_2015_edition() {
|
|||
|
||||
//- /lib.rs crate:alloc
|
||||
struct Arc;
|
||||
",
|
||||
);
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
alloc_crate: t
|
||||
sync: t
|
||||
|
||||
assert_snapshot!(map,
|
||||
@r###"
|
||||
⋮crate
|
||||
⋮alloc_crate: t
|
||||
⋮sync: t
|
||||
⋮
|
||||
⋮crate::sync
|
||||
⋮Arc: t v
|
||||
"###
|
||||
crate::sync
|
||||
Arc: t v
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reexport_across_crates() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:test_crate
|
||||
use test_crate::Baz;
|
||||
|
||||
//- /lib.rs crate:test_crate
|
||||
pub use foo::Baz;
|
||||
|
||||
mod foo;
|
||||
|
||||
//- /foo.rs
|
||||
pub struct Baz;
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Baz: t v
|
||||
"#]],
|
||||
);
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Baz: t v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn values_dont_shadow_extern_crates() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:foo
|
||||
fn foo() {}
|
||||
use foo::Bar;
|
||||
|
||||
//- /foo/lib.rs crate:foo
|
||||
pub struct Bar;
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Bar: t v
|
||||
foo: v
|
||||
"#]],
|
||||
);
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t v
|
||||
⋮foo: v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn std_prelude_takes_precedence_above_core_prelude() {
|
||||
let map = def_map(
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:core,std
|
||||
use {Foo, Bar};
|
||||
|
@ -488,18 +464,17 @@ fn std_prelude_takes_precedence_above_core_prelude() {
|
|||
pub struct Bar;
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Bar: t v
|
||||
Foo: t v
|
||||
"#]],
|
||||
);
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t v
|
||||
⋮Foo: t v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cfg_not_test() {
|
||||
let map = def_map(
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:std
|
||||
use {Foo, Bar, Baz};
|
||||
|
@ -516,19 +491,18 @@ fn cfg_not_test() {
|
|||
pub struct Baz;
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Bar: t v
|
||||
Baz: _
|
||||
Foo: _
|
||||
"#]],
|
||||
);
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t v
|
||||
⋮Baz: _
|
||||
⋮Foo: _
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cfg_test() {
|
||||
let map = def_map(
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:std
|
||||
use {Foo, Bar, Baz};
|
||||
|
@ -545,19 +519,18 @@ fn cfg_test() {
|
|||
pub struct Baz;
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Bar: _
|
||||
Baz: t v
|
||||
Foo: t v
|
||||
"#]],
|
||||
);
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: _
|
||||
⋮Baz: t v
|
||||
⋮Foo: t v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn infer_multiple_namespace() {
|
||||
let map = def_map(
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs
|
||||
mod a {
|
||||
|
@ -571,18 +544,147 @@ mod b {
|
|||
pub const T: () = ();
|
||||
}
|
||||
"#,
|
||||
);
|
||||
expect![[r#"
|
||||
crate
|
||||
T: t v
|
||||
a: t
|
||||
b: t
|
||||
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮T: t v
|
||||
⋮a: t
|
||||
⋮b: t
|
||||
⋮
|
||||
⋮crate::b
|
||||
⋮T: v
|
||||
⋮
|
||||
⋮crate::a
|
||||
⋮T: t v
|
||||
"###);
|
||||
crate::b
|
||||
T: v
|
||||
|
||||
crate::a
|
||||
T: t v
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn underscore_import() {
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs
|
||||
use tr::Tr as _;
|
||||
use tr::Tr2 as _;
|
||||
|
||||
mod tr {
|
||||
pub trait Tr {}
|
||||
pub trait Tr2 {}
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
_: t
|
||||
_: t
|
||||
tr: t
|
||||
|
||||
crate::tr
|
||||
Tr: t
|
||||
Tr2: t
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn underscore_reexport() {
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs
|
||||
mod tr {
|
||||
pub trait PubTr {}
|
||||
pub trait PrivTr {}
|
||||
}
|
||||
mod reex {
|
||||
use crate::tr::PrivTr as _;
|
||||
pub use crate::tr::PubTr as _;
|
||||
}
|
||||
use crate::reex::*;
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
_: t
|
||||
reex: t
|
||||
tr: t
|
||||
|
||||
crate::tr
|
||||
PrivTr: t
|
||||
PubTr: t
|
||||
|
||||
crate::reex
|
||||
_: t
|
||||
_: t
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn underscore_pub_crate_reexport() {
|
||||
mark::check!(upgrade_underscore_visibility);
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:lib
|
||||
use lib::*;
|
||||
|
||||
//- /lib.rs crate:lib
|
||||
use tr::Tr as _;
|
||||
pub use tr::Tr as _;
|
||||
|
||||
mod tr {
|
||||
pub trait Tr {}
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
_: t
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn underscore_nontrait() {
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs
|
||||
mod m {
|
||||
pub struct Struct;
|
||||
pub enum Enum {}
|
||||
pub const CONST: () = ();
|
||||
}
|
||||
use crate::m::{Struct as _, Enum as _, CONST as _};
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
m: t
|
||||
|
||||
crate::m
|
||||
CONST: v
|
||||
Enum: t
|
||||
Struct: t v
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn underscore_name_conflict() {
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs
|
||||
struct Tr;
|
||||
|
||||
use tr::Tr as _;
|
||||
|
||||
mod tr {
|
||||
pub trait Tr {}
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
_: t
|
||||
Tr: t v
|
||||
tr: t
|
||||
|
||||
crate::tr
|
||||
Tr: t
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
|
|
@ -2,8 +2,8 @@ use super::*;
|
|||
|
||||
#[test]
|
||||
fn glob_1() {
|
||||
let map = def_map(
|
||||
r"
|
||||
check(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
use foo::*;
|
||||
|
@ -15,30 +15,29 @@ fn glob_1() {
|
|||
|
||||
//- /foo/bar.rs
|
||||
pub struct Baz;
|
||||
",
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Baz: t v
|
||||
⋮Foo: t v
|
||||
⋮bar: t
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::foo
|
||||
⋮Baz: t v
|
||||
⋮Foo: t v
|
||||
⋮bar: t
|
||||
⋮
|
||||
⋮crate::foo::bar
|
||||
⋮Baz: t v
|
||||
"###
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Baz: t v
|
||||
Foo: t v
|
||||
bar: t
|
||||
foo: t
|
||||
|
||||
crate::foo
|
||||
Baz: t v
|
||||
Foo: t v
|
||||
bar: t
|
||||
|
||||
crate::foo::bar
|
||||
Baz: t v
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn glob_2() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
use foo::*;
|
||||
|
@ -51,31 +50,30 @@ fn glob_2() {
|
|||
//- /foo/bar.rs
|
||||
pub struct Baz;
|
||||
pub use super::*;
|
||||
",
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Baz: t v
|
||||
⋮Foo: t v
|
||||
⋮bar: t
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::foo
|
||||
⋮Baz: t v
|
||||
⋮Foo: t v
|
||||
⋮bar: t
|
||||
⋮
|
||||
⋮crate::foo::bar
|
||||
⋮Baz: t v
|
||||
⋮Foo: t v
|
||||
⋮bar: t
|
||||
"###
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Baz: t v
|
||||
Foo: t v
|
||||
bar: t
|
||||
foo: t
|
||||
|
||||
crate::foo
|
||||
Baz: t v
|
||||
Foo: t v
|
||||
bar: t
|
||||
|
||||
crate::foo::bar
|
||||
Baz: t v
|
||||
Foo: t v
|
||||
bar: t
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn glob_privacy_1() {
|
||||
let map = def_map(
|
||||
check(
|
||||
r"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
|
@ -91,30 +89,29 @@ fn glob_privacy_1() {
|
|||
struct PrivateStructBar;
|
||||
pub use super::*;
|
||||
",
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Baz: t v
|
||||
⋮bar: t
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::foo
|
||||
⋮Baz: t v
|
||||
⋮PrivateStructFoo: t v
|
||||
⋮bar: t
|
||||
⋮
|
||||
⋮crate::foo::bar
|
||||
⋮Baz: t v
|
||||
⋮PrivateStructBar: t v
|
||||
⋮PrivateStructFoo: t v
|
||||
⋮bar: t
|
||||
"###
|
||||
expect![[r#"
|
||||
crate
|
||||
Baz: t v
|
||||
bar: t
|
||||
foo: t
|
||||
|
||||
crate::foo
|
||||
Baz: t v
|
||||
PrivateStructFoo: t v
|
||||
bar: t
|
||||
|
||||
crate::foo::bar
|
||||
Baz: t v
|
||||
PrivateStructBar: t v
|
||||
PrivateStructFoo: t v
|
||||
bar: t
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn glob_privacy_2() {
|
||||
let map = def_map(
|
||||
check(
|
||||
r"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
|
@ -131,203 +128,177 @@ fn glob_privacy_2() {
|
|||
struct PrivateBar;
|
||||
pub(crate) struct PubCrateStruct;
|
||||
",
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Foo: t
|
||||
⋮PubCrateStruct: t v
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::foo
|
||||
⋮Foo: t v
|
||||
⋮bar: t
|
||||
⋮
|
||||
⋮crate::foo::bar
|
||||
⋮PrivateBar: t v
|
||||
⋮PrivateBaz: t v
|
||||
⋮PubCrateStruct: t v
|
||||
"###
|
||||
expect![[r#"
|
||||
crate
|
||||
Foo: t
|
||||
PubCrateStruct: t v
|
||||
foo: t
|
||||
|
||||
crate::foo
|
||||
Foo: t v
|
||||
bar: t
|
||||
|
||||
crate::foo::bar
|
||||
PrivateBar: t v
|
||||
PrivateBaz: t v
|
||||
PubCrateStruct: t v
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn glob_across_crates() {
|
||||
mark::check!(glob_across_crates);
|
||||
let map = def_map(
|
||||
r"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:test_crate
|
||||
use test_crate::*;
|
||||
|
||||
//- /lib.rs crate:test_crate
|
||||
pub struct Baz;
|
||||
",
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Baz: t v
|
||||
"###
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Baz: t v
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn glob_privacy_across_crates() {
|
||||
let map = def_map(
|
||||
r"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:test_crate
|
||||
use test_crate::*;
|
||||
|
||||
//- /lib.rs crate:test_crate
|
||||
pub struct Baz;
|
||||
struct Foo;
|
||||
",
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Baz: t v
|
||||
"###
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Baz: t v
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn glob_enum() {
|
||||
mark::check!(glob_enum);
|
||||
let map = def_map(
|
||||
"
|
||||
//- /lib.rs
|
||||
enum Foo {
|
||||
Bar, Baz
|
||||
}
|
||||
check(
|
||||
r#"
|
||||
enum Foo { Bar, Baz }
|
||||
use self::Foo::*;
|
||||
",
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t v
|
||||
⋮Baz: t v
|
||||
⋮Foo: t
|
||||
"###
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Bar: t v
|
||||
Baz: t v
|
||||
Foo: t
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn glob_enum_group() {
|
||||
mark::check!(glob_enum_group);
|
||||
let map = def_map(
|
||||
r"
|
||||
//- /lib.rs
|
||||
enum Foo {
|
||||
Bar, Baz
|
||||
}
|
||||
check(
|
||||
r#"
|
||||
enum Foo { Bar, Baz }
|
||||
use self::Foo::{*};
|
||||
",
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t v
|
||||
⋮Baz: t v
|
||||
⋮Foo: t
|
||||
"###
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Bar: t v
|
||||
Baz: t v
|
||||
Foo: t
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn glob_shadowed_def() {
|
||||
mark::check!(import_shadowed);
|
||||
let map = def_map(
|
||||
r###"
|
||||
check(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
mod bar;
|
||||
|
||||
use foo::*;
|
||||
use bar::baz;
|
||||
|
||||
use baz::Bar;
|
||||
|
||||
//- /foo.rs
|
||||
pub mod baz {
|
||||
pub struct Foo;
|
||||
}
|
||||
pub mod baz { pub struct Foo; }
|
||||
|
||||
//- /bar.rs
|
||||
pub mod baz {
|
||||
pub struct Bar;
|
||||
}
|
||||
"###,
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t v
|
||||
⋮bar: t
|
||||
⋮baz: t
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::bar
|
||||
⋮baz: t
|
||||
⋮
|
||||
⋮crate::bar::baz
|
||||
⋮Bar: t v
|
||||
⋮
|
||||
⋮crate::foo
|
||||
⋮baz: t
|
||||
⋮
|
||||
⋮crate::foo::baz
|
||||
⋮Foo: t v
|
||||
"###
|
||||
pub mod baz { pub struct Bar; }
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Bar: t v
|
||||
bar: t
|
||||
baz: t
|
||||
foo: t
|
||||
|
||||
crate::bar
|
||||
baz: t
|
||||
|
||||
crate::bar::baz
|
||||
Bar: t v
|
||||
|
||||
crate::foo
|
||||
baz: t
|
||||
|
||||
crate::foo::baz
|
||||
Foo: t v
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn glob_shadowed_def_reversed() {
|
||||
let map = def_map(
|
||||
r###"
|
||||
check(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
mod bar;
|
||||
|
||||
use bar::baz;
|
||||
use foo::*;
|
||||
|
||||
use baz::Bar;
|
||||
|
||||
//- /foo.rs
|
||||
pub mod baz {
|
||||
pub struct Foo;
|
||||
}
|
||||
pub mod baz { pub struct Foo; }
|
||||
|
||||
//- /bar.rs
|
||||
pub mod baz {
|
||||
pub struct Bar;
|
||||
}
|
||||
"###,
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t v
|
||||
⋮bar: t
|
||||
⋮baz: t
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::bar
|
||||
⋮baz: t
|
||||
⋮
|
||||
⋮crate::bar::baz
|
||||
⋮Bar: t v
|
||||
⋮
|
||||
⋮crate::foo
|
||||
⋮baz: t
|
||||
⋮
|
||||
⋮crate::foo::baz
|
||||
⋮Foo: t v
|
||||
"###
|
||||
pub mod baz { pub struct Bar; }
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Bar: t v
|
||||
bar: t
|
||||
baz: t
|
||||
foo: t
|
||||
|
||||
crate::bar
|
||||
baz: t
|
||||
|
||||
crate::bar::baz
|
||||
Bar: t v
|
||||
|
||||
crate::foo
|
||||
baz: t
|
||||
|
||||
crate::foo::baz
|
||||
Foo: t v
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn glob_shadowed_def_dependencies() {
|
||||
let map = def_map(
|
||||
r###"
|
||||
//- /lib.rs
|
||||
check(
|
||||
r#"
|
||||
mod a { pub mod foo { pub struct X; } }
|
||||
mod b { pub use super::a::foo; }
|
||||
mod c { pub mod foo { pub struct Y; } }
|
||||
|
@ -336,33 +307,32 @@ fn glob_shadowed_def_dependencies() {
|
|||
use super::b::*;
|
||||
use foo::Y;
|
||||
}
|
||||
"###,
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮a: t
|
||||
⋮b: t
|
||||
⋮c: t
|
||||
⋮d: t
|
||||
⋮
|
||||
⋮crate::d
|
||||
⋮Y: t v
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::c
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::c::foo
|
||||
⋮Y: t v
|
||||
⋮
|
||||
⋮crate::b
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::a
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::a::foo
|
||||
⋮X: t v
|
||||
"###
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
a: t
|
||||
b: t
|
||||
c: t
|
||||
d: t
|
||||
|
||||
crate::d
|
||||
Y: t v
|
||||
foo: t
|
||||
|
||||
crate::c
|
||||
foo: t
|
||||
|
||||
crate::c::foo
|
||||
Y: t v
|
||||
|
||||
crate::b
|
||||
foo: t
|
||||
|
||||
crate::a
|
||||
foo: t
|
||||
|
||||
crate::a::foo
|
||||
X: t v
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
|
|
@ -2,8 +2,8 @@ use super::*;
|
|||
|
||||
#[test]
|
||||
fn macro_rules_are_globally_visible() {
|
||||
let map = def_map(
|
||||
r"
|
||||
check(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
macro_rules! structs {
|
||||
($($i:ident),*) => {
|
||||
|
@ -15,32 +15,29 @@ fn macro_rules_are_globally_visible() {
|
|||
|
||||
//- /nested.rs
|
||||
structs!(Bar, Baz);
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Foo: t
|
||||
nested: t
|
||||
|
||||
crate::nested
|
||||
Bar: t
|
||||
Baz: t
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Foo: t
|
||||
⋮nested: t
|
||||
⋮
|
||||
⋮crate::nested
|
||||
⋮Bar: t
|
||||
⋮Baz: t
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macro_rules_can_define_modules() {
|
||||
let map = def_map(
|
||||
r"
|
||||
check(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
macro_rules! m {
|
||||
($name:ident) => { mod $name; }
|
||||
}
|
||||
m!(n1);
|
||||
|
||||
mod m {
|
||||
m!(n3)
|
||||
}
|
||||
mod m { m!(n3) }
|
||||
|
||||
//- /n1.rs
|
||||
m!(n2)
|
||||
|
@ -48,31 +45,31 @@ fn macro_rules_can_define_modules() {
|
|||
struct X;
|
||||
//- /m/n3.rs
|
||||
struct Y;
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
m: t
|
||||
n1: t
|
||||
|
||||
crate::m
|
||||
n3: t
|
||||
|
||||
crate::m::n3
|
||||
Y: t v
|
||||
|
||||
crate::n1
|
||||
n2: t
|
||||
|
||||
crate::n1::n2
|
||||
X: t v
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮m: t
|
||||
⋮n1: t
|
||||
⋮
|
||||
⋮crate::m
|
||||
⋮n3: t
|
||||
⋮
|
||||
⋮crate::m::n3
|
||||
⋮Y: t v
|
||||
⋮
|
||||
⋮crate::n1
|
||||
⋮n2: t
|
||||
⋮
|
||||
⋮crate::n1::n2
|
||||
⋮X: t v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macro_rules_from_other_crates_are_visible() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:foo
|
||||
foo::structs!(Foo, Bar)
|
||||
mod bar;
|
||||
|
@ -87,25 +84,25 @@ fn macro_rules_from_other_crates_are_visible() {
|
|||
$(struct $i { field: u32 } )*
|
||||
}
|
||||
}
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Bar: t
|
||||
Foo: t
|
||||
bar: t
|
||||
|
||||
crate::bar
|
||||
Bar: t
|
||||
Foo: t
|
||||
bar: t
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t
|
||||
⋮Foo: t
|
||||
⋮bar: t
|
||||
⋮
|
||||
⋮crate::bar
|
||||
⋮Bar: t
|
||||
⋮Foo: t
|
||||
⋮bar: t
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macro_rules_export_with_local_inner_macros_are_visible() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:foo
|
||||
foo::structs!(Foo, Bar)
|
||||
mod bar;
|
||||
|
@ -120,30 +117,32 @@ fn macro_rules_export_with_local_inner_macros_are_visible() {
|
|||
$(struct $i { field: u32 } )*
|
||||
}
|
||||
}
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Bar: t
|
||||
Foo: t
|
||||
bar: t
|
||||
|
||||
crate::bar
|
||||
Bar: t
|
||||
Foo: t
|
||||
bar: t
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t
|
||||
⋮Foo: t
|
||||
⋮bar: t
|
||||
⋮
|
||||
⋮crate::bar
|
||||
⋮Bar: t
|
||||
⋮Foo: t
|
||||
⋮bar: t
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn local_inner_macros_makes_local_macros_usable() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:foo
|
||||
foo::structs!(Foo, Bar);
|
||||
mod bar;
|
||||
|
||||
//- /bar.rs
|
||||
use crate::*;
|
||||
|
||||
//- /lib.rs crate:foo
|
||||
#[macro_export(local_inner_macros)]
|
||||
macro_rules! structs {
|
||||
|
@ -157,32 +156,31 @@ fn local_inner_macros_makes_local_macros_usable() {
|
|||
$(struct $i { field: u32 } )*
|
||||
}
|
||||
}
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Bar: t
|
||||
Foo: t
|
||||
bar: t
|
||||
|
||||
crate::bar
|
||||
Bar: t
|
||||
Foo: t
|
||||
bar: t
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t
|
||||
⋮Foo: t
|
||||
⋮bar: t
|
||||
⋮
|
||||
⋮crate::bar
|
||||
⋮Bar: t
|
||||
⋮Foo: t
|
||||
⋮bar: t
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unexpanded_macro_should_expand_by_fixedpoint_loop() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:foo
|
||||
macro_rules! baz {
|
||||
() => {
|
||||
use foo::bar;
|
||||
}
|
||||
}
|
||||
|
||||
foo!();
|
||||
bar!();
|
||||
baz!();
|
||||
|
@ -200,21 +198,21 @@ fn unexpanded_macro_should_expand_by_fixedpoint_loop() {
|
|||
use foo::foo;
|
||||
}
|
||||
}
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Foo: t
|
||||
bar: m
|
||||
foo: m
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Foo: t
|
||||
⋮bar: m
|
||||
⋮foo: m
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macro_rules_from_other_crates_are_visible_with_macro_use() {
|
||||
mark::check!(macro_rules_from_other_crates_are_visible_with_macro_use);
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:foo
|
||||
structs!(Foo);
|
||||
structs_priv!(Bar);
|
||||
|
@ -246,25 +244,25 @@ fn macro_rules_from_other_crates_are_visible_with_macro_use() {
|
|||
($i:ident) => { struct $i; }
|
||||
}
|
||||
}
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Bar: t v
|
||||
Foo: t v
|
||||
bar: t
|
||||
foo: t
|
||||
|
||||
crate::bar
|
||||
Baz: t v
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t v
|
||||
⋮Foo: t v
|
||||
⋮bar: t
|
||||
⋮foo: t
|
||||
⋮
|
||||
⋮crate::bar
|
||||
⋮Baz: t v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prelude_is_macro_use() {
|
||||
mark::check!(prelude_is_macro_use);
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:foo
|
||||
structs!(Foo);
|
||||
structs_priv!(Bar);
|
||||
|
@ -299,25 +297,24 @@ fn prelude_is_macro_use() {
|
|||
macro_rules! structs_outside {
|
||||
($i:ident) => { struct $i; }
|
||||
}
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Bar: t v
|
||||
Foo: t v
|
||||
Out: t v
|
||||
bar: t
|
||||
|
||||
crate::bar
|
||||
Baz: t v
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t v
|
||||
⋮Foo: t v
|
||||
⋮Out: t v
|
||||
⋮bar: t
|
||||
⋮
|
||||
⋮crate::bar
|
||||
⋮Baz: t v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prelude_cycle() {
|
||||
let map = def_map(
|
||||
"
|
||||
//- /lib.rs
|
||||
check(
|
||||
r#"
|
||||
#[prelude_import]
|
||||
use self::prelude::*;
|
||||
|
||||
|
@ -328,27 +325,25 @@ fn prelude_cycle() {
|
|||
() => (mod foo {})
|
||||
}
|
||||
}
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
prelude: t
|
||||
|
||||
crate::prelude
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮prelude: t
|
||||
⋮
|
||||
⋮crate::prelude
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn plain_macros_are_legacy_textual_scoped() {
|
||||
let map = def_map(
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs
|
||||
mod m1;
|
||||
bar!(NotFoundNotMacroUse);
|
||||
|
||||
mod m2 {
|
||||
foo!(NotFoundBeforeInside2);
|
||||
}
|
||||
mod m2 { foo!(NotFoundBeforeInside2); }
|
||||
|
||||
macro_rules! foo {
|
||||
($x:ident) => { struct $x; }
|
||||
|
@ -402,46 +397,45 @@ fn plain_macros_are_legacy_textual_scoped() {
|
|||
($x:ident) => { struct $x; }
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Ok: t v
|
||||
OkAfter: t v
|
||||
OkShadowStop: t v
|
||||
m1: t
|
||||
m2: t
|
||||
m3: t
|
||||
m5: t
|
||||
m7: t
|
||||
ok_double_macro_use_shadow: v
|
||||
|
||||
crate::m7
|
||||
|
||||
crate::m1
|
||||
|
||||
crate::m5
|
||||
m6: t
|
||||
|
||||
crate::m5::m6
|
||||
|
||||
crate::m2
|
||||
|
||||
crate::m3
|
||||
OkAfterInside: t v
|
||||
OkMacroUse: t v
|
||||
m4: t
|
||||
ok_shadow: v
|
||||
|
||||
crate::m3::m4
|
||||
ok_shadow_deep: v
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Ok: t v
|
||||
⋮OkAfter: t v
|
||||
⋮OkShadowStop: t v
|
||||
⋮m1: t
|
||||
⋮m2: t
|
||||
⋮m3: t
|
||||
⋮m5: t
|
||||
⋮m7: t
|
||||
⋮ok_double_macro_use_shadow: v
|
||||
⋮
|
||||
⋮crate::m7
|
||||
⋮
|
||||
⋮crate::m1
|
||||
⋮
|
||||
⋮crate::m5
|
||||
⋮m6: t
|
||||
⋮
|
||||
⋮crate::m5::m6
|
||||
⋮
|
||||
⋮crate::m2
|
||||
⋮
|
||||
⋮crate::m3
|
||||
⋮OkAfterInside: t v
|
||||
⋮OkMacroUse: t v
|
||||
⋮m4: t
|
||||
⋮ok_shadow: v
|
||||
⋮
|
||||
⋮crate::m3::m4
|
||||
⋮ok_shadow_deep: v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn type_value_macro_live_in_different_scopes() {
|
||||
let map = def_map(
|
||||
"
|
||||
//- /main.rs
|
||||
check(
|
||||
r#"
|
||||
#[macro_export]
|
||||
macro_rules! foo {
|
||||
($x:ident) => { type $x = (); }
|
||||
|
@ -452,20 +446,20 @@ fn type_value_macro_live_in_different_scopes() {
|
|||
|
||||
use self::foo as baz;
|
||||
fn baz() {}
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
bar: t m
|
||||
baz: t v m
|
||||
foo: t m
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮bar: t m
|
||||
⋮baz: t v m
|
||||
⋮foo: t m
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macro_use_can_be_aliased() {
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:foo
|
||||
#[macro_use]
|
||||
extern crate foo;
|
||||
|
@ -482,21 +476,20 @@ fn macro_use_can_be_aliased() {
|
|||
($x:ident) => { struct $x; }
|
||||
}
|
||||
}
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Alias: t v
|
||||
Direct: t v
|
||||
foo: t
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Alias: t v
|
||||
⋮Direct: t v
|
||||
⋮foo: t
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn path_qualified_macros() {
|
||||
let map = def_map(
|
||||
"
|
||||
//- /main.rs
|
||||
check(
|
||||
r#"
|
||||
macro_rules! foo {
|
||||
($x:ident) => { struct $x; }
|
||||
}
|
||||
|
@ -516,37 +509,36 @@ fn path_qualified_macros() {
|
|||
macro_rules! bar {
|
||||
($x:ident) => { struct $x; }
|
||||
}
|
||||
|
||||
pub use bar as alias1;
|
||||
pub use super::bar as alias2;
|
||||
pub use crate::bar as alias3;
|
||||
pub use self::bar as not_found;
|
||||
}
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
OkAliasCrate: t v
|
||||
OkAliasPlain: t v
|
||||
OkAliasSuper: t v
|
||||
OkCrate: t v
|
||||
OkPlain: t v
|
||||
bar: m
|
||||
m: t
|
||||
|
||||
crate::m
|
||||
alias1: m
|
||||
alias2: m
|
||||
alias3: m
|
||||
not_found: _
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮OkAliasCrate: t v
|
||||
⋮OkAliasPlain: t v
|
||||
⋮OkAliasSuper: t v
|
||||
⋮OkCrate: t v
|
||||
⋮OkPlain: t v
|
||||
⋮bar: m
|
||||
⋮m: t
|
||||
⋮
|
||||
⋮crate::m
|
||||
⋮alias1: m
|
||||
⋮alias2: m
|
||||
⋮alias3: m
|
||||
⋮not_found: _
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macro_dollar_crate_is_correct_in_item() {
|
||||
mark::check!(macro_dollar_crate_self);
|
||||
let map = def_map(
|
||||
"
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:foo
|
||||
#[macro_use]
|
||||
extern crate foo;
|
||||
|
@ -585,26 +577,26 @@ fn macro_dollar_crate_is_correct_in_item() {
|
|||
|
||||
struct Bar;
|
||||
struct Baz;
|
||||
",
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
Bar: t v
|
||||
Baz: t v
|
||||
Foo: t v
|
||||
FooSelf: t v
|
||||
foo: t
|
||||
m: t
|
||||
|
||||
crate::m
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t v
|
||||
⋮Baz: t v
|
||||
⋮Foo: t v
|
||||
⋮FooSelf: t v
|
||||
⋮foo: t
|
||||
⋮m: t
|
||||
⋮
|
||||
⋮crate::m
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macro_dollar_crate_is_correct_in_indirect_deps() {
|
||||
mark::check!(macro_dollar_crate_other);
|
||||
// From std
|
||||
let map = def_map(
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:std
|
||||
foo!();
|
||||
|
@ -630,27 +622,15 @@ fn macro_dollar_crate_is_correct_in_indirect_deps() {
|
|||
|
||||
pub struct bar;
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
bar: t v
|
||||
"#]],
|
||||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮bar: t v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expand_derive() {
|
||||
let map = compute_crate_def_map(
|
||||
"
|
||||
//- /main.rs
|
||||
#[derive(Clone)]
|
||||
struct Foo;
|
||||
",
|
||||
);
|
||||
assert_eq!(map.modules[map.root].scope.impls().len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expand_multiple_derive() {
|
||||
let map = compute_crate_def_map(
|
||||
"
|
||||
//- /main.rs
|
||||
|
@ -660,3 +640,30 @@ fn expand_multiple_derive() {
|
|||
);
|
||||
assert_eq!(map.modules[map.root].scope.impls().len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macro_expansion_overflow() {
|
||||
mark::check!(macro_expansion_overflow);
|
||||
check(
|
||||
r#"
|
||||
macro_rules! a {
|
||||
($e:expr; $($t:tt)*) => {
|
||||
b!($($t)*);
|
||||
};
|
||||
() => {};
|
||||
}
|
||||
|
||||
macro_rules! b {
|
||||
(static = $e:expr; $($t:tt)*) => {
|
||||
a!($e; $($t)*);
|
||||
};
|
||||
() => {};
|
||||
}
|
||||
|
||||
b! { static = #[] (); }
|
||||
"#,
|
||||
expect![[r#"
|
||||
crate
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue