mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 20:43:21 +00:00
Doctest autoimport
This commit is contained in:
parent
d1e8b8d134
commit
755077e372
5 changed files with 42 additions and 20 deletions
|
@ -18,14 +18,16 @@ use ra_ide_db::imports_locator::ImportsLocatorIde;
|
||||||
// fn main() {
|
// fn main() {
|
||||||
// let map = HashMap<|>::new();
|
// let map = HashMap<|>::new();
|
||||||
// }
|
// }
|
||||||
|
// # pub mod std { pub mod collections { pub struct HashMap { } } }
|
||||||
// ```
|
// ```
|
||||||
// ->
|
// ->
|
||||||
// ```
|
// ```
|
||||||
// use std::collections::HashMap;
|
// use std::collections::HashMap;
|
||||||
//
|
//
|
||||||
// fn main() {
|
// fn main() {
|
||||||
// let map = HashMap<|>::new();
|
// let map = HashMap::new();
|
||||||
// }
|
// }
|
||||||
|
// # pub mod std { pub mod collections { pub struct HashMap { } } }
|
||||||
// ```
|
// ```
|
||||||
pub(crate) fn auto_import(ctx: AssistCtx) -> Option<Assist> {
|
pub(crate) fn auto_import(ctx: AssistCtx) -> Option<Assist> {
|
||||||
let path_to_import: ast::Path = ctx.find_node_at_offset()?;
|
let path_to_import: ast::Path = ctx.find_node_at_offset()?;
|
||||||
|
|
|
@ -5,18 +5,12 @@
|
||||||
|
|
||||||
mod generated;
|
mod generated;
|
||||||
|
|
||||||
use ra_db::{fixture::WithFixture, FileRange};
|
use ra_db::FileRange;
|
||||||
use test_utils::{assert_eq_text, extract_range_or_offset};
|
use test_utils::{assert_eq_text, extract_range_or_offset};
|
||||||
|
|
||||||
use ra_ide_db::RootDatabase;
|
|
||||||
|
|
||||||
fn check(assist_id: &str, before: &str, after: &str) {
|
fn check(assist_id: &str, before: &str, after: &str) {
|
||||||
// FIXME we cannot get the imports search functionality here yet, but still need to generate a test and a doc for an assist
|
|
||||||
if assist_id == "auto_import" {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let (selection, before) = extract_range_or_offset(before);
|
let (selection, before) = extract_range_or_offset(before);
|
||||||
let (db, file_id) = RootDatabase::with_single_file(&before);
|
let (db, file_id) = crate::helpers::with_single_file(&before);
|
||||||
let frange = FileRange { file_id, range: selection.into() };
|
let frange = FileRange { file_id, range: selection.into() };
|
||||||
|
|
||||||
let assist = crate::assists(&db, frange)
|
let assist = crate::assists(&db, frange)
|
||||||
|
|
|
@ -222,13 +222,15 @@ fn doctest_auto_import() {
|
||||||
fn main() {
|
fn main() {
|
||||||
let map = HashMap<|>::new();
|
let map = HashMap<|>::new();
|
||||||
}
|
}
|
||||||
|
pub mod std { pub mod collections { pub struct HashMap { } } }
|
||||||
"#####,
|
"#####,
|
||||||
r#####"
|
r#####"
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let map = HashMap<|>::new();
|
let map = HashMap::new();
|
||||||
}
|
}
|
||||||
|
pub mod std { pub mod collections { pub struct HashMap { } } }
|
||||||
"#####,
|
"#####,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -223,7 +223,7 @@ fn main() {
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let map = HashMap┃::new();
|
let map = HashMap::new();
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,28 @@ struct Assist {
|
||||||
after: String,
|
after: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn hide_hash_comments(text: &str) -> String {
|
||||||
|
text.split('\n') // want final newline
|
||||||
|
.filter(|&it| !(it.starts_with("# ") || it == "#"))
|
||||||
|
.map(|it| format!("{}\n", it))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn reveal_hash_comments(text: &str) -> String {
|
||||||
|
text.split('\n') // want final newline
|
||||||
|
.map(|it| {
|
||||||
|
if it.starts_with("# ") {
|
||||||
|
&it[2..]
|
||||||
|
} else if it == "#" {
|
||||||
|
""
|
||||||
|
} else {
|
||||||
|
it
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map(|it| format!("{}\n", it))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
fn collect_assists() -> Result<Vec<Assist>> {
|
fn collect_assists() -> Result<Vec<Assist>> {
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
for entry in fs::read_dir(project_root().join(codegen::ASSISTS_DIR))? {
|
for entry in fs::read_dir(project_root().join(codegen::ASSISTS_DIR))? {
|
||||||
|
@ -91,13 +113,14 @@ fn doctest_{}() {{
|
||||||
check(
|
check(
|
||||||
"{}",
|
"{}",
|
||||||
r#####"
|
r#####"
|
||||||
{}
|
{}"#####, r#####"
|
||||||
"#####, r#####"
|
{}"#####)
|
||||||
{}
|
|
||||||
"#####)
|
|
||||||
}}
|
}}
|
||||||
"######,
|
"######,
|
||||||
assist.id, assist.id, assist.before, assist.after
|
assist.id,
|
||||||
|
assist.id,
|
||||||
|
reveal_hash_comments(&assist.before),
|
||||||
|
reveal_hash_comments(&assist.after)
|
||||||
);
|
);
|
||||||
|
|
||||||
buf.push_str(&test)
|
buf.push_str(&test)
|
||||||
|
@ -123,12 +146,13 @@ fn generate_docs(assists: &[Assist], mode: Mode) -> Result<()> {
|
||||||
```rust
|
```rust
|
||||||
// BEFORE
|
// BEFORE
|
||||||
{}
|
{}
|
||||||
|
|
||||||
// AFTER
|
// AFTER
|
||||||
{}
|
{}```
|
||||||
```
|
|
||||||
",
|
",
|
||||||
assist.id, assist.doc, before, after
|
assist.id,
|
||||||
|
assist.doc,
|
||||||
|
hide_hash_comments(&before),
|
||||||
|
hide_hash_comments(&after)
|
||||||
);
|
);
|
||||||
buf.push_str(&docs);
|
buf.push_str(&docs);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue