mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-11-15 01:17:27 +00:00
Merge #9476
9476: internal: overhaul codegen r=matklad a=matklad
bors r+
🤖
Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
336194c09b
32 changed files with 767 additions and 725 deletions
20
Cargo.lock
generated
20
Cargo.lock
generated
|
@ -600,6 +600,7 @@ dependencies = [
|
||||||
"itertools",
|
"itertools",
|
||||||
"profile",
|
"profile",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
|
"sourcegen",
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
"test_utils",
|
"test_utils",
|
||||||
|
@ -621,10 +622,12 @@ dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"profile",
|
"profile",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
|
"sourcegen",
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
"test_utils",
|
"test_utils",
|
||||||
"text_edit",
|
"text_edit",
|
||||||
|
"xshell",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -662,6 +665,7 @@ dependencies = [
|
||||||
"itertools",
|
"itertools",
|
||||||
"profile",
|
"profile",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
|
"sourcegen",
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
"test_utils",
|
"test_utils",
|
||||||
|
@ -1323,6 +1327,7 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_path_to_error",
|
"serde_path_to_error",
|
||||||
|
"sourcegen",
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
"test_utils",
|
"test_utils",
|
||||||
|
@ -1518,6 +1523,13 @@ version = "1.0.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "45456094d1983e2ee2a18fdfebce3189fa451699d0502cb8e3b49dba5ba41451"
|
checksum = "45456094d1983e2ee2a18fdfebce3189fa451699d0502cb8e3b49dba5ba41451"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sourcegen"
|
||||||
|
version = "0.0.0"
|
||||||
|
dependencies = [
|
||||||
|
"xshell",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "stdx"
|
name = "stdx"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
|
@ -1563,17 +1575,20 @@ dependencies = [
|
||||||
"itertools",
|
"itertools",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"parser",
|
"parser",
|
||||||
|
"proc-macro2",
|
||||||
"profile",
|
"profile",
|
||||||
|
"quote",
|
||||||
"rayon",
|
"rayon",
|
||||||
"rowan",
|
"rowan",
|
||||||
"rustc-ap-rustc_lexer",
|
"rustc-ap-rustc_lexer",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"serde",
|
"serde",
|
||||||
"smol_str",
|
"smol_str",
|
||||||
|
"sourcegen",
|
||||||
"stdx",
|
"stdx",
|
||||||
"test_utils",
|
"test_utils",
|
||||||
"text_edit",
|
"text_edit",
|
||||||
"walkdir",
|
"ungrammar",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1942,9 +1957,6 @@ version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"flate2",
|
"flate2",
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"ungrammar",
|
|
||||||
"walkdir",
|
"walkdir",
|
||||||
"write-json",
|
"write-json",
|
||||||
"xflags",
|
"xflags",
|
||||||
|
|
|
@ -24,4 +24,5 @@ hir = { path = "../hir", version = "0.0.0" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
test_utils = { path = "../test_utils" }
|
test_utils = { path = "../test_utils" }
|
||||||
|
sourcegen = { path = "../sourcegen" }
|
||||||
expect-test = "1.1"
|
expect-test = "1.1"
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
mod sourcegen;
|
||||||
mod generated;
|
mod generated;
|
||||||
|
|
||||||
use expect_test::expect;
|
use expect_test::expect;
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! Generated file, do not edit by hand, see `xtask/src/codegen`
|
//! Generated by `sourcegen_assists_docs`, do not edit by hand.
|
||||||
|
|
||||||
use super::check_doc_test;
|
use super::check_doc_test;
|
||||||
|
|
||||||
|
|
|
@ -1,49 +1,84 @@
|
||||||
//! Generates `assists.md` documentation.
|
//! Generates `assists.md` documentation.
|
||||||
|
|
||||||
use std::{fmt, path::Path};
|
use std::{fmt, fs, path::Path};
|
||||||
|
|
||||||
use xshell::write_file;
|
use test_utils::project_root;
|
||||||
|
|
||||||
use crate::{
|
#[test]
|
||||||
codegen::{self, extract_comment_blocks_with_empty_lines, reformat, Location, PREAMBLE},
|
fn sourcegen_assists_docs() {
|
||||||
project_root, rust_files_in, Result,
|
let assists = Assist::collect();
|
||||||
};
|
|
||||||
|
|
||||||
pub(crate) fn generate_assists_tests() -> Result<()> {
|
{
|
||||||
let assists = Assist::collect()?;
|
// Generate doctests.
|
||||||
generate_tests(&assists)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn generate_assists_docs() -> Result<()> {
|
let mut buf = "
|
||||||
let assists = Assist::collect()?;
|
use super::check_doc_test;
|
||||||
let contents = assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
|
"
|
||||||
let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim());
|
.to_string();
|
||||||
let dst = project_root().join("docs/user/generated_assists.adoc");
|
for assist in assists.iter() {
|
||||||
write_file(dst, &contents)?;
|
let test = format!(
|
||||||
Ok(())
|
r######"
|
||||||
|
#[test]
|
||||||
|
fn doctest_{}() {{
|
||||||
|
check_doc_test(
|
||||||
|
"{}",
|
||||||
|
r#####"
|
||||||
|
{}"#####, r#####"
|
||||||
|
{}"#####)
|
||||||
|
}}
|
||||||
|
"######,
|
||||||
|
assist.id,
|
||||||
|
assist.id,
|
||||||
|
reveal_hash_comments(&assist.before),
|
||||||
|
reveal_hash_comments(&assist.after)
|
||||||
|
);
|
||||||
|
|
||||||
|
buf.push_str(&test)
|
||||||
|
}
|
||||||
|
let buf = sourcegen::add_preamble("sourcegen_assists_docs", sourcegen::reformat(buf));
|
||||||
|
sourcegen::ensure_file_contents(
|
||||||
|
&project_root().join("crates/ide_assists/src/tests/generated.rs"),
|
||||||
|
&buf,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// Generate assists manual. Note that we do _not_ commit manual to the
|
||||||
|
// git repo. Instead, `cargo xtask release` runs this test before making
|
||||||
|
// a release.
|
||||||
|
|
||||||
|
let contents = sourcegen::add_preamble(
|
||||||
|
"sourcegen_assists_docs",
|
||||||
|
assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"),
|
||||||
|
);
|
||||||
|
let dst = project_root().join("docs/user/generated_assists.adoc");
|
||||||
|
fs::write(dst, contents).unwrap();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct Assist {
|
struct Assist {
|
||||||
id: String,
|
id: String,
|
||||||
location: Location,
|
location: sourcegen::Location,
|
||||||
doc: String,
|
doc: String,
|
||||||
before: String,
|
before: String,
|
||||||
after: String,
|
after: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Assist {
|
impl Assist {
|
||||||
fn collect() -> Result<Vec<Assist>> {
|
fn collect() -> Vec<Assist> {
|
||||||
|
let handlers_dir = project_root().join("crates/ide_assists/src/handlers");
|
||||||
|
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
for path in rust_files_in(&project_root().join("crates/ide_assists/src/handlers")) {
|
for path in sourcegen::list_rust_files(&handlers_dir) {
|
||||||
collect_file(&mut res, path.as_path())?;
|
collect_file(&mut res, path.as_path());
|
||||||
}
|
}
|
||||||
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
|
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
|
||||||
return Ok(res);
|
return res;
|
||||||
|
|
||||||
fn collect_file(acc: &mut Vec<Assist>, path: &Path) -> Result<()> {
|
fn collect_file(acc: &mut Vec<Assist>, path: &Path) {
|
||||||
let text = xshell::read_file(path)?;
|
let text = fs::read_to_string(path).unwrap();
|
||||||
let comment_blocks = extract_comment_blocks_with_empty_lines("Assist", &text);
|
let comment_blocks = sourcegen::CommentBlock::extract("Assist", &text);
|
||||||
|
|
||||||
for block in comment_blocks {
|
for block in comment_blocks {
|
||||||
// FIXME: doesn't support blank lines yet, need to tweak
|
// FIXME: doesn't support blank lines yet, need to tweak
|
||||||
|
@ -68,21 +103,20 @@ impl Assist {
|
||||||
assert_eq!(lines.next().unwrap().as_str(), "->");
|
assert_eq!(lines.next().unwrap().as_str(), "->");
|
||||||
assert_eq!(lines.next().unwrap().as_str(), "```");
|
assert_eq!(lines.next().unwrap().as_str(), "```");
|
||||||
let after = take_until(lines.by_ref(), "```");
|
let after = take_until(lines.by_ref(), "```");
|
||||||
let location = Location::new(path.to_path_buf(), block.line);
|
let location = sourcegen::Location { file: path.to_path_buf(), line: block.line };
|
||||||
acc.push(Assist { id, location, doc, before, after })
|
acc.push(Assist { id, location, doc, before, after })
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn take_until<'a>(lines: impl Iterator<Item = &'a String>, marker: &str) -> String {
|
fn take_until<'a>(lines: impl Iterator<Item = &'a String>, marker: &str) -> String {
|
||||||
let mut buf = Vec::new();
|
let mut buf = Vec::new();
|
||||||
for line in lines {
|
for line in lines {
|
||||||
if line == marker {
|
if line == marker {
|
||||||
break;
|
break;
|
||||||
}
|
|
||||||
buf.push(line.clone());
|
|
||||||
}
|
}
|
||||||
buf.join("\n")
|
buf.push(line.clone());
|
||||||
}
|
}
|
||||||
Ok(())
|
buf.join("\n")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -114,36 +148,6 @@ impl fmt::Display for Assist {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_tests(assists: &[Assist]) -> Result<()> {
|
|
||||||
let mut buf = String::from("use super::check_doc_test;\n");
|
|
||||||
|
|
||||||
for assist in assists.iter() {
|
|
||||||
let test = format!(
|
|
||||||
r######"
|
|
||||||
#[test]
|
|
||||||
fn doctest_{}() {{
|
|
||||||
check_doc_test(
|
|
||||||
"{}",
|
|
||||||
r#####"
|
|
||||||
{}"#####, r#####"
|
|
||||||
{}"#####)
|
|
||||||
}}
|
|
||||||
"######,
|
|
||||||
assist.id,
|
|
||||||
assist.id,
|
|
||||||
reveal_hash_comments(&assist.before),
|
|
||||||
reveal_hash_comments(&assist.after)
|
|
||||||
);
|
|
||||||
|
|
||||||
buf.push_str(&test)
|
|
||||||
}
|
|
||||||
let buf = reformat(&buf)?;
|
|
||||||
codegen::ensure_file_contents(
|
|
||||||
&project_root().join("crates/ide_assists/src/tests/generated.rs"),
|
|
||||||
&buf,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hide_hash_comments(text: &str) -> String {
|
fn hide_hash_comments(text: &str) -> String {
|
||||||
text.split('\n') // want final newline
|
text.split('\n') // want final newline
|
||||||
.filter(|&it| !(it.starts_with("# ") || it == "#"))
|
.filter(|&it| !(it.starts_with("# ") || it == "#"))
|
|
@ -29,5 +29,8 @@ profile = { path = "../profile", version = "0.0.0" }
|
||||||
hir = { path = "../hir", version = "0.0.0" }
|
hir = { path = "../hir", version = "0.0.0" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
test_utils = { path = "../test_utils" }
|
|
||||||
expect-test = "1.1"
|
expect-test = "1.1"
|
||||||
|
xshell = "0.1"
|
||||||
|
|
||||||
|
test_utils = { path = "../test_utils" }
|
||||||
|
sourcegen = { path = "../sourcegen" }
|
||||||
|
|
|
@ -10,6 +10,7 @@ mod items;
|
||||||
mod pattern;
|
mod pattern;
|
||||||
mod type_pos;
|
mod type_pos;
|
||||||
mod predicate;
|
mod predicate;
|
||||||
|
mod sourcegen;
|
||||||
|
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
|
|
|
@ -1,53 +1,55 @@
|
||||||
//! Generates descriptors structure for unstable feature from Unstable Book
|
//! Generates descriptors structure for unstable feature from Unstable Book
|
||||||
use std::borrow::Cow;
|
use std::{
|
||||||
use std::fmt::Write;
|
borrow::Cow,
|
||||||
use std::path::{Path, PathBuf};
|
fs,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
use walkdir::WalkDir;
|
use stdx::format_to;
|
||||||
use xshell::{cmd, read_file};
|
use test_utils::project_root;
|
||||||
|
use xshell::cmd;
|
||||||
|
|
||||||
use crate::codegen::{ensure_file_contents, project_root, reformat, Result};
|
/// This clones rustc repo, and so is not worth to keep up-to-date. We update
|
||||||
|
/// manually by un-ignoring the test from time to time.
|
||||||
pub(crate) fn generate_lint_completions() -> Result<()> {
|
#[test]
|
||||||
if !project_root().join("./target/rust").exists() {
|
#[ignore]
|
||||||
cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?;
|
fn sourcegen_lint_completions() {
|
||||||
|
let rust_repo = project_root().join("./target/rust");
|
||||||
|
if !rust_repo.exists() {
|
||||||
|
cmd!("git clone --depth=1 https://github.com/rust-lang/rust {rust_repo}").run().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut contents = String::from(
|
let mut contents = r"
|
||||||
r#"pub struct Lint {
|
pub struct Lint {
|
||||||
pub label: &'static str,
|
pub label: &'static str,
|
||||||
pub description: &'static str,
|
pub description: &'static str,
|
||||||
}
|
}
|
||||||
|
"
|
||||||
"#,
|
.to_string();
|
||||||
);
|
generate_lint_descriptor(&mut contents);
|
||||||
generate_lint_descriptor(&mut contents)?;
|
|
||||||
contents.push('\n');
|
contents.push('\n');
|
||||||
|
|
||||||
generate_feature_descriptor(&mut contents, "./target/rust/src/doc/unstable-book/src".into())?;
|
generate_feature_descriptor(&mut contents, "./target/rust/src/doc/unstable-book/src".into());
|
||||||
contents.push('\n');
|
contents.push('\n');
|
||||||
|
|
||||||
cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?;
|
cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run().unwrap();
|
||||||
generate_descriptor_clippy(&mut contents, Path::new("./target/clippy_lints.json"))?;
|
generate_descriptor_clippy(&mut contents, Path::new("./target/clippy_lints.json"));
|
||||||
let contents = reformat(&contents)?;
|
|
||||||
|
let contents =
|
||||||
|
sourcegen::add_preamble("sourcegen_lint_completions", sourcegen::reformat(contents));
|
||||||
|
|
||||||
let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs");
|
let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs");
|
||||||
ensure_file_contents(destination.as_path(), &contents)?;
|
sourcegen::ensure_file_contents(destination.as_path(), &contents);
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_lint_descriptor(buf: &mut String) -> Result<()> {
|
fn generate_lint_descriptor(buf: &mut String) {
|
||||||
let stdout = cmd!("rustc -W help").read()?;
|
let stdout = cmd!("rustc -W help").read().unwrap();
|
||||||
let start_lints =
|
let start_lints = stdout.find("---- ------- -------").unwrap();
|
||||||
stdout.find("---- ------- -------").ok_or_else(|| anyhow::format_err!(""))?;
|
let start_lint_groups = stdout.find("---- ---------").unwrap();
|
||||||
let start_lint_groups =
|
let end_lints = stdout.find("Lint groups provided by rustc:").unwrap();
|
||||||
stdout.find("---- ---------").ok_or_else(|| anyhow::format_err!(""))?;
|
|
||||||
let end_lints =
|
|
||||||
stdout.find("Lint groups provided by rustc:").ok_or_else(|| anyhow::format_err!(""))?;
|
|
||||||
let end_lint_groups = stdout
|
let end_lint_groups = stdout
|
||||||
.find("Lint tools like Clippy can provide additional lints and lint groups.")
|
.find("Lint tools like Clippy can provide additional lints and lint groups.")
|
||||||
.ok_or_else(|| anyhow::format_err!(""))?;
|
.unwrap();
|
||||||
buf.push_str(r#"pub const DEFAULT_LINTS: &[Lint] = &["#);
|
buf.push_str(r#"pub const DEFAULT_LINTS: &[Lint] = &["#);
|
||||||
buf.push('\n');
|
buf.push('\n');
|
||||||
let mut lints = stdout[start_lints..end_lints]
|
let mut lints = stdout[start_lints..end_lints]
|
||||||
|
@ -75,32 +77,30 @@ fn generate_lint_descriptor(buf: &mut String) -> Result<()> {
|
||||||
push_lint_completion(buf, &name.replace("-", "_"), &description)
|
push_lint_completion(buf, &name.replace("-", "_"), &description)
|
||||||
});
|
});
|
||||||
buf.push_str("];\n");
|
buf.push_str("];\n");
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_feature_descriptor(buf: &mut String, src_dir: PathBuf) -> Result<()> {
|
fn generate_feature_descriptor(buf: &mut String, src_dir: PathBuf) {
|
||||||
buf.push_str(r#"pub const FEATURES: &[Lint] = &["#);
|
let mut features = ["language-features", "library-features"]
|
||||||
buf.push('\n');
|
|
||||||
let mut vec = ["language-features", "library-features"]
|
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|it| WalkDir::new(src_dir.join(it)))
|
.flat_map(|it| sourcegen::list_files(&src_dir.join(it)))
|
||||||
.filter_map(|e| e.ok())
|
.filter(|path| {
|
||||||
.filter(|entry| {
|
|
||||||
// Get all `.md ` files
|
// Get all `.md ` files
|
||||||
entry.file_type().is_file() && entry.path().extension().unwrap_or_default() == "md"
|
path.extension().unwrap_or_default().to_str().unwrap_or_default() == "md"
|
||||||
})
|
})
|
||||||
.map(|entry| {
|
.map(|path| {
|
||||||
let path = entry.path();
|
|
||||||
let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_");
|
let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_");
|
||||||
let doc = read_file(path).unwrap();
|
let doc = fs::read_to_string(path).unwrap();
|
||||||
(feature_ident, doc)
|
(feature_ident, doc)
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
vec.sort_by(|(feature_ident, _), (feature_ident2, _)| feature_ident.cmp(feature_ident2));
|
features.sort_by(|(feature_ident, _), (feature_ident2, _)| feature_ident.cmp(feature_ident2));
|
||||||
vec.into_iter()
|
|
||||||
.for_each(|(feature_ident, doc)| push_lint_completion(buf, &feature_ident, &doc));
|
buf.push_str(r#"pub const FEATURES: &[Lint] = &["#);
|
||||||
|
for (feature_ident, doc) in features.into_iter() {
|
||||||
|
push_lint_completion(buf, &feature_ident, &doc)
|
||||||
|
}
|
||||||
|
buf.push('\n');
|
||||||
buf.push_str("];\n");
|
buf.push_str("];\n");
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
@ -113,9 +113,9 @@ fn unescape(s: &str) -> String {
|
||||||
s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "")
|
s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
|
fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
|
||||||
let file_content = read_file(path)?;
|
let file_content = std::fs::read_to_string(path).unwrap();
|
||||||
let mut clippy_lints: Vec<ClippyLint> = vec![];
|
let mut clippy_lints: Vec<ClippyLint> = Vec::new();
|
||||||
|
|
||||||
for line in file_content.lines().map(|line| line.trim()) {
|
for line in file_content.lines().map(|line| line.trim()) {
|
||||||
if line.starts_with(r#""id":"#) {
|
if line.starts_with(r#""id":"#) {
|
||||||
|
@ -144,27 +144,25 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id));
|
clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id));
|
||||||
|
|
||||||
buf.push_str(r#"pub const CLIPPY_LINTS: &[Lint] = &["#);
|
buf.push_str(r#"pub const CLIPPY_LINTS: &[Lint] = &["#);
|
||||||
buf.push('\n');
|
buf.push('\n');
|
||||||
clippy_lints.into_iter().for_each(|clippy_lint| {
|
for clippy_lint in clippy_lints.into_iter() {
|
||||||
let lint_ident = format!("clippy::{}", clippy_lint.id);
|
let lint_ident = format!("clippy::{}", clippy_lint.id);
|
||||||
let doc = clippy_lint.help;
|
let doc = clippy_lint.help;
|
||||||
push_lint_completion(buf, &lint_ident, &doc);
|
push_lint_completion(buf, &lint_ident, &doc);
|
||||||
});
|
}
|
||||||
|
|
||||||
buf.push_str("];\n");
|
buf.push_str("];\n");
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_lint_completion(buf: &mut String, label: &str, description: &str) {
|
fn push_lint_completion(buf: &mut String, label: &str, description: &str) {
|
||||||
writeln!(
|
format_to!(
|
||||||
buf,
|
buf,
|
||||||
r###" Lint {{
|
r###" Lint {{
|
||||||
label: "{}",
|
label: "{}",
|
||||||
description: r##"{}"##
|
description: r##"{}"##
|
||||||
}},"###,
|
}},"###,
|
||||||
label, description
|
label,
|
||||||
)
|
description
|
||||||
.unwrap();
|
);
|
||||||
}
|
}
|
|
@ -1,10 +1,9 @@
|
||||||
//! Generated file, do not edit by hand, see `xtask/src/codegen`
|
//! Generated by `sourcegen_lint_completions`, do not edit by hand.
|
||||||
|
|
||||||
pub struct Lint {
|
pub struct Lint {
|
||||||
pub label: &'static str,
|
pub label: &'static str,
|
||||||
pub description: &'static str,
|
pub description: &'static str,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const DEFAULT_LINTS: &[Lint] = &[
|
pub const DEFAULT_LINTS: &[Lint] = &[
|
||||||
Lint {
|
Lint {
|
||||||
label: "absolute_paths_not_starting_with_crate",
|
label: "absolute_paths_not_starting_with_crate",
|
||||||
|
@ -71,8 +70,8 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||||
description: r##"detects when an null pointer is dereferenced"##,
|
description: r##"detects when an null pointer is dereferenced"##,
|
||||||
},
|
},
|
||||||
Lint {
|
Lint {
|
||||||
label: "disjoint_capture_migration",
|
label: "disjoint_capture_drop_reorder",
|
||||||
description: r##"Drop reorder and auto traits error because of `capture_disjoint_fields`"##,
|
description: r##"Drop reorder because of `capture_disjoint_fields`"##,
|
||||||
},
|
},
|
||||||
Lint { label: "drop_bounds", description: r##"bounds of the form `T: Drop` are useless"## },
|
Lint { label: "drop_bounds", description: r##"bounds of the form `T: Drop` are useless"## },
|
||||||
Lint {
|
Lint {
|
||||||
|
@ -98,7 +97,7 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||||
},
|
},
|
||||||
Lint {
|
Lint {
|
||||||
label: "future_incompatible",
|
label: "future_incompatible",
|
||||||
description: r##"lint group for: keyword-idents, anonymous-parameters, ellipsis-inclusive-range-patterns, forbidden-lint-groups, illegal-floating-point-literal-pattern, private-in-public, pub-use-of-private-extern-crate, invalid-type-param-default, const-err, unaligned-references, patterns-in-fns-without-body, missing-fragment-specifier, late-bound-lifetime-arguments, order-dependent-trait-objects, coherence-leak-check, tyvar-behind-raw-pointer, bare-trait-objects, absolute-paths-not-starting-with-crate, unstable-name-collisions, where-clauses-object-safety, proc-macro-derive-resolution-fallback, macro-expanded-macro-exports-accessed-by-absolute-paths, ill-formed-attribute-input, conflicting-repr-hints, ambiguous-associated-items, mutable-borrow-reservation-conflict, indirect-structural-match, pointer-structural-match, nontrivial-structural-match, soft-unstable, cenum-impl-drop-cast, const-evaluatable-unchecked, uninhabited-static, unsupported-naked-functions, semicolon-in-expressions-from-macros, legacy-derive-helpers, proc-macro-back-compat, array-into-iter"##,
|
description: r##"lint group for: keyword-idents, anonymous-parameters, forbidden-lint-groups, illegal-floating-point-literal-pattern, private-in-public, pub-use-of-private-extern-crate, invalid-type-param-default, const-err, unaligned-references, patterns-in-fns-without-body, missing-fragment-specifier, late-bound-lifetime-arguments, order-dependent-trait-objects, coherence-leak-check, tyvar-behind-raw-pointer, absolute-paths-not-starting-with-crate, unstable-name-collisions, where-clauses-object-safety, proc-macro-derive-resolution-fallback, macro-expanded-macro-exports-accessed-by-absolute-paths, ill-formed-attribute-input, conflicting-repr-hints, ambiguous-associated-items, mutable-borrow-reservation-conflict, indirect-structural-match, pointer-structural-match, nontrivial-structural-match, soft-unstable, cenum-impl-drop-cast, const-evaluatable-unchecked, uninhabited-static, unsupported-naked-functions, semicolon-in-expressions-from-macros, legacy-derive-helpers, proc-macro-back-compat, array-into-iter"##,
|
||||||
},
|
},
|
||||||
Lint {
|
Lint {
|
||||||
label: "ill_formed_attribute_input",
|
label: "ill_formed_attribute_input",
|
||||||
|
@ -139,7 +138,7 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||||
},
|
},
|
||||||
Lint {
|
Lint {
|
||||||
label: "invalid_value",
|
label: "invalid_value",
|
||||||
description: r##"an invalid value is being created (such as a null reference)"##,
|
description: r##"an invalid value is being created (such as a NULL reference)"##,
|
||||||
},
|
},
|
||||||
Lint {
|
Lint {
|
||||||
label: "irrefutable_let_patterns",
|
label: "irrefutable_let_patterns",
|
||||||
|
@ -291,10 +290,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||||
label: "rust_2018_idioms",
|
label: "rust_2018_idioms",
|
||||||
description: r##"lint group for: bare-trait-objects, unused-extern-crates, ellipsis-inclusive-range-patterns, elided-lifetimes-in-paths, explicit-outlives-requirements"##,
|
description: r##"lint group for: bare-trait-objects, unused-extern-crates, ellipsis-inclusive-range-patterns, elided-lifetimes-in-paths, explicit-outlives-requirements"##,
|
||||||
},
|
},
|
||||||
Lint {
|
|
||||||
label: "rust_2021_compatibility",
|
|
||||||
description: r##"lint group for: ellipsis-inclusive-range-patterns, bare-trait-objects"##,
|
|
||||||
},
|
|
||||||
Lint {
|
Lint {
|
||||||
label: "semicolon_in_expressions_from_macros",
|
label: "semicolon_in_expressions_from_macros",
|
||||||
description: r##"trailing semicolon in macro body used as expression"##,
|
description: r##"trailing semicolon in macro body used as expression"##,
|
||||||
|
@ -804,6 +799,7 @@ Inline assembly is currently supported on the following architectures:
|
||||||
- Hexagon
|
- Hexagon
|
||||||
- MIPS32r2 and MIPS64r2
|
- MIPS32r2 and MIPS64r2
|
||||||
- wasm32
|
- wasm32
|
||||||
|
- BPF
|
||||||
|
|
||||||
## Basic usage
|
## Basic usage
|
||||||
|
|
||||||
|
@ -1229,7 +1225,7 @@ reg_spec := <register class> / "<explicit register>"
|
||||||
operand_expr := expr / "_" / expr "=>" expr / expr "=>" "_"
|
operand_expr := expr / "_" / expr "=>" expr / expr "=>" "_"
|
||||||
reg_operand := dir_spec "(" reg_spec ")" operand_expr
|
reg_operand := dir_spec "(" reg_spec ")" operand_expr
|
||||||
operand := reg_operand / "const" const_expr / "sym" path
|
operand := reg_operand / "const" const_expr / "sym" path
|
||||||
option := "pure" / "nomem" / "readonly" / "preserves_flags" / "noreturn" / "nostack" / "att_syntax"
|
option := "pure" / "nomem" / "readonly" / "preserves_flags" / "noreturn" / "nostack" / "att_syntax" / "raw"
|
||||||
options := "options(" option *["," option] [","] ")"
|
options := "options(" option *["," option] [","] ")"
|
||||||
asm := "asm!(" format_string *("," format_string) *("," [ident "="] operand) ["," options] [","] ")"
|
asm := "asm!(" format_string *("," format_string) *("," [ident "="] operand) ["," options] [","] ")"
|
||||||
```
|
```
|
||||||
|
@ -1344,6 +1340,8 @@ Here is the list of currently supported register classes:
|
||||||
| PowerPC | `reg_nonzero` | | `r[1-31]` | `b` |
|
| PowerPC | `reg_nonzero` | | `r[1-31]` | `b` |
|
||||||
| PowerPC | `freg` | `f[0-31]` | `f` |
|
| PowerPC | `freg` | `f[0-31]` | `f` |
|
||||||
| wasm32 | `local` | None\* | `r` |
|
| wasm32 | `local` | None\* | `r` |
|
||||||
|
| BPF | `reg` | `r[0-10]` | `r` |
|
||||||
|
| BPF | `wreg` | `w[0-10]` | `w` |
|
||||||
|
|
||||||
> **Note**: On x86 we treat `reg_byte` differently from `reg` because the compiler can allocate `al` and `ah` separately whereas `reg` reserves the whole register.
|
> **Note**: On x86 we treat `reg_byte` differently from `reg` because the compiler can allocate `al` and `ah` separately whereas `reg` reserves the whole register.
|
||||||
>
|
>
|
||||||
|
@ -1389,6 +1387,8 @@ Each register class has constraints on which value types they can be used with.
|
||||||
| PowerPC | `reg_nonzero` | None | `i8`, `i16`, `i32` |
|
| PowerPC | `reg_nonzero` | None | `i8`, `i16`, `i32` |
|
||||||
| PowerPC | `freg` | None | `f32`, `f64` |
|
| PowerPC | `freg` | None | `f32`, `f64` |
|
||||||
| wasm32 | `local` | None | `i8` `i16` `i32` `i64` `f32` `f64` |
|
| wasm32 | `local` | None | `i8` `i16` `i32` `i64` `f32` `f64` |
|
||||||
|
| BPF | `reg` | None | `i8` `i16` `i32` `i64` |
|
||||||
|
| BPF | `wreg` | `alu32` | `i8` `i16` `i32` |
|
||||||
|
|
||||||
> **Note**: For the purposes of the above table pointers, function pointers and `isize`/`usize` are treated as the equivalent integer type (`i16`/`i32`/`i64` depending on the target).
|
> **Note**: For the purposes of the above table pointers, function pointers and `isize`/`usize` are treated as the equivalent integer type (`i16`/`i32`/`i64` depending on the target).
|
||||||
|
|
||||||
|
@ -1448,6 +1448,7 @@ Some registers have multiple names. These are all treated by the compiler as ide
|
||||||
| Hexagon | `r29` | `sp` |
|
| Hexagon | `r29` | `sp` |
|
||||||
| Hexagon | `r30` | `fr` |
|
| Hexagon | `r30` | `fr` |
|
||||||
| Hexagon | `r31` | `lr` |
|
| Hexagon | `r31` | `lr` |
|
||||||
|
| BPF | `r[0-10]` | `w[0-10]` |
|
||||||
|
|
||||||
Some registers cannot be used for input or output operands:
|
Some registers cannot be used for input or output operands:
|
||||||
|
|
||||||
|
@ -1549,6 +1550,7 @@ Currently the following options are defined:
|
||||||
- `noreturn`: The `asm` block never returns, and its return type is defined as `!` (never). Behavior is undefined if execution falls through past the end of the asm code. A `noreturn` asm block behaves just like a function which doesn't return; notably, local variables in scope are not dropped before it is invoked.
|
- `noreturn`: The `asm` block never returns, and its return type is defined as `!` (never). Behavior is undefined if execution falls through past the end of the asm code. A `noreturn` asm block behaves just like a function which doesn't return; notably, local variables in scope are not dropped before it is invoked.
|
||||||
- `nostack`: The `asm` block does not push data to the stack, or write to the stack red-zone (if supported by the target). If this option is *not* used then the stack pointer is guaranteed to be suitably aligned (according to the target ABI) for a function call.
|
- `nostack`: The `asm` block does not push data to the stack, or write to the stack red-zone (if supported by the target). If this option is *not* used then the stack pointer is guaranteed to be suitably aligned (according to the target ABI) for a function call.
|
||||||
- `att_syntax`: This option is only valid on x86, and causes the assembler to use the `.att_syntax prefix` mode of the GNU assembler. Register operands are substituted in with a leading `%`.
|
- `att_syntax`: This option is only valid on x86, and causes the assembler to use the `.att_syntax prefix` mode of the GNU assembler. Register operands are substituted in with a leading `%`.
|
||||||
|
- `raw`: This causes the template string to be parsed as a raw assembly string, with no special handling for `{` and `}`. This is primarily useful when including raw assembly code from an external file using `include_str!`.
|
||||||
|
|
||||||
The compiler performs some additional checks on options:
|
The compiler performs some additional checks on options:
|
||||||
- The `nomem` and `readonly` options are mutually exclusive: it is a compile-time error to specify both.
|
- The `nomem` and `readonly` options are mutually exclusive: it is a compile-time error to specify both.
|
||||||
|
@ -3801,6 +3803,39 @@ fn cheap_clone<T: CheapToClone>(t: T) -> T {
|
||||||
|
|
||||||
This is expected to replace the unstable `overlapping_marker_traits`
|
This is expected to replace the unstable `overlapping_marker_traits`
|
||||||
feature, which applied to all empty traits (without needing an opt-in).
|
feature, which applied to all empty traits (without needing an opt-in).
|
||||||
|
"##,
|
||||||
|
},
|
||||||
|
Lint {
|
||||||
|
label: "more_qualified_paths",
|
||||||
|
description: r##"# `more_qualified_paths`
|
||||||
|
|
||||||
|
The `more_qualified_paths` feature can be used in order to enable the
|
||||||
|
use of qualified paths in patterns.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#![feature(more_qualified_paths)]
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
// destructure through a qualified path
|
||||||
|
let <Foo as A>::Assoc { br } = StructStruct { br: 2 };
|
||||||
|
}
|
||||||
|
|
||||||
|
struct StructStruct {
|
||||||
|
br: i8,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Foo;
|
||||||
|
|
||||||
|
trait A {
|
||||||
|
type Assoc;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl A for Foo {
|
||||||
|
type Assoc = StructStruct;
|
||||||
|
}
|
||||||
|
```
|
||||||
"##,
|
"##,
|
||||||
},
|
},
|
||||||
Lint {
|
Lint {
|
||||||
|
@ -4681,60 +4716,6 @@ let result: Result<i32, ParseIntError> = try {
|
||||||
};
|
};
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
```
|
```
|
||||||
"##,
|
|
||||||
},
|
|
||||||
Lint {
|
|
||||||
label: "try_trait",
|
|
||||||
description: r##"# `try_trait`
|
|
||||||
|
|
||||||
The tracking issue for this feature is: [#42327]
|
|
||||||
|
|
||||||
[#42327]: https://github.com/rust-lang/rust/issues/42327
|
|
||||||
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
This introduces a new trait `Try` for extending the `?` operator to types
|
|
||||||
other than `Result` (a part of [RFC 1859]). The trait provides the canonical
|
|
||||||
way to _view_ a type in terms of a success/failure dichotomy. This will
|
|
||||||
allow `?` to supplant the `try_opt!` macro on `Option` and the `try_ready!`
|
|
||||||
macro on `Poll`, among other things.
|
|
||||||
|
|
||||||
[RFC 1859]: https://github.com/rust-lang/rfcs/pull/1859
|
|
||||||
|
|
||||||
Here's an example implementation of the trait:
|
|
||||||
|
|
||||||
```rust,ignore (cannot-reimpl-Try)
|
|
||||||
/// A distinct type to represent the `None` value of an `Option`.
|
|
||||||
///
|
|
||||||
/// This enables using the `?` operator on `Option`; it's rarely useful alone.
|
|
||||||
#[derive(Debug)]
|
|
||||||
#[unstable(feature = "try_trait", issue = "42327")]
|
|
||||||
pub struct None { _priv: () }
|
|
||||||
|
|
||||||
#[unstable(feature = "try_trait", issue = "42327")]
|
|
||||||
impl<T> ops::Try for Option<T> {
|
|
||||||
type Ok = T;
|
|
||||||
type Error = None;
|
|
||||||
|
|
||||||
fn into_result(self) -> Result<T, None> {
|
|
||||||
self.ok_or(None { _priv: () })
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_ok(v: T) -> Self {
|
|
||||||
Some(v)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_error(_: None) -> Self {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Note the `Error` associated type here is a new marker. The `?` operator
|
|
||||||
allows interconversion between different `Try` implementers only when
|
|
||||||
the error type can be converted `Into` the error type of the enclosing
|
|
||||||
function (or catch block). Having a distinct error type (as opposed to
|
|
||||||
just `()`, or similar) restricts this to where it's semantically meaningful.
|
|
||||||
"##,
|
"##,
|
||||||
},
|
},
|
||||||
Lint {
|
Lint {
|
||||||
|
@ -5035,6 +5016,10 @@ checked."##,
|
||||||
label: "clippy::almost_swapped",
|
label: "clippy::almost_swapped",
|
||||||
description: r##"Checks for `foo = bar; bar = foo` sequences."##,
|
description: r##"Checks for `foo = bar; bar = foo` sequences."##,
|
||||||
},
|
},
|
||||||
|
Lint {
|
||||||
|
label: "clippy::append_instead_of_extend",
|
||||||
|
description: r##"Checks for occurrences where one vector gets extended instead of append"##,
|
||||||
|
},
|
||||||
Lint {
|
Lint {
|
||||||
label: "clippy::approx_constant",
|
label: "clippy::approx_constant",
|
||||||
description: r##"Checks for floating point literals that approximate
|
description: r##"Checks for floating point literals that approximate
|
||||||
|
@ -5371,6 +5356,25 @@ explicitly or vice versa."##,
|
||||||
label: "clippy::disallowed_method",
|
label: "clippy::disallowed_method",
|
||||||
description: r##"Denies the configured methods and functions in clippy.toml"##,
|
description: r##"Denies the configured methods and functions in clippy.toml"##,
|
||||||
},
|
},
|
||||||
|
Lint {
|
||||||
|
label: "clippy::disallowed_script_idents",
|
||||||
|
description: r##"Checks for usage of unicode scripts other than those explicitly allowed
|
||||||
|
by the lint config.
|
||||||
|
|
||||||
|
This lint doesn't take into account non-text scripts such as `Unknown` and `Linear_A`.
|
||||||
|
It also ignores the `Common` script type.
|
||||||
|
While configuring, be sure to use official script name [aliases] from
|
||||||
|
[the list of supported scripts][supported_scripts].
|
||||||
|
|
||||||
|
See also: [`non_ascii_idents`].
|
||||||
|
|
||||||
|
[aliases]: http://www.unicode.org/reports/tr24/tr24-31.html#Script_Value_Aliases
|
||||||
|
[supported_scripts]: https://www.unicode.org/iso15924/iso15924-codes.html"##,
|
||||||
|
},
|
||||||
|
Lint {
|
||||||
|
label: "clippy::disallowed_type",
|
||||||
|
description: r##"Denies the configured types in clippy.toml."##,
|
||||||
|
},
|
||||||
Lint {
|
Lint {
|
||||||
label: "clippy::diverging_sub_expression",
|
label: "clippy::diverging_sub_expression",
|
||||||
description: r##"Checks for diverging calls that are not match arms or
|
description: r##"Checks for diverging calls that are not match arms or
|
||||||
|
@ -6147,6 +6151,11 @@ used to clamp values, but switched so that the result is constant."##,
|
||||||
label: "clippy::missing_docs_in_private_items",
|
label: "clippy::missing_docs_in_private_items",
|
||||||
description: r##"Warns if there is missing doc for any documentable item
|
description: r##"Warns if there is missing doc for any documentable item
|
||||||
(public or private)."##,
|
(public or private)."##,
|
||||||
|
},
|
||||||
|
Lint {
|
||||||
|
label: "clippy::missing_enforced_import_renames",
|
||||||
|
description: r##"Checks for imports that do not rename the item as specified
|
||||||
|
in the `enforce-import-renames` config option."##,
|
||||||
},
|
},
|
||||||
Lint {
|
Lint {
|
||||||
label: "clippy::missing_errors_doc",
|
label: "clippy::missing_errors_doc",
|
||||||
|
@ -6372,6 +6381,10 @@ concisely."##,
|
||||||
description: r##"Checks for duplicate open options as well as combinations
|
description: r##"Checks for duplicate open options as well as combinations
|
||||||
that make no sense."##,
|
that make no sense."##,
|
||||||
},
|
},
|
||||||
|
Lint {
|
||||||
|
label: "clippy::nonstandard_macro_braces",
|
||||||
|
description: r##"Checks that common macros are used with consistent bracing."##,
|
||||||
|
},
|
||||||
Lint {
|
Lint {
|
||||||
label: "clippy::not_unsafe_ptr_arg_deref",
|
label: "clippy::not_unsafe_ptr_arg_deref",
|
||||||
description: r##"Checks for public functions that dereference raw pointer
|
description: r##"Checks for public functions that dereference raw pointer
|
||||||
|
@ -6560,6 +6573,7 @@ upper bound, e.g., `x..(y+1)`."##,
|
||||||
label: "clippy::rc_buffer",
|
label: "clippy::rc_buffer",
|
||||||
description: r##"Checks for `Rc<T>` and `Arc<T>` when `T` is a mutable buffer type such as `String` or `Vec`."##,
|
description: r##"Checks for `Rc<T>` and `Arc<T>` when `T` is a mutable buffer type such as `String` or `Vec`."##,
|
||||||
},
|
},
|
||||||
|
Lint { label: "clippy::rc_mutex", description: r##"Checks for `Rc<Mutex<T>>`."## },
|
||||||
Lint {
|
Lint {
|
||||||
label: "clippy::redundant_allocation",
|
label: "clippy::redundant_allocation",
|
||||||
description: r##"Checks for use of redundant allocations anywhere in the code."##,
|
description: r##"Checks for use of redundant allocations anywhere in the code."##,
|
||||||
|
|
|
@ -27,3 +27,4 @@ ide_db = { path = "../ide_db", version = "0.0.0" }
|
||||||
expect-test = "1.1"
|
expect-test = "1.1"
|
||||||
|
|
||||||
test_utils = { path = "../test_utils" }
|
test_utils = { path = "../test_utils" }
|
||||||
|
sourcegen = { path = "../sourcegen" }
|
||||||
|
|
|
@ -49,6 +49,9 @@ mod handlers {
|
||||||
pub(crate) mod unlinked_file;
|
pub(crate) mod unlinked_file;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
use hir::{diagnostics::AnyDiagnostic, Semantics};
|
use hir::{diagnostics::AnyDiagnostic, Semantics};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
|
assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
|
||||||
|
@ -223,152 +226,3 @@ fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist {
|
||||||
source_change: None,
|
source_change: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use expect_test::Expect;
|
|
||||||
use ide_db::{
|
|
||||||
assists::AssistResolveStrategy,
|
|
||||||
base_db::{fixture::WithFixture, SourceDatabaseExt},
|
|
||||||
RootDatabase,
|
|
||||||
};
|
|
||||||
use stdx::trim_indent;
|
|
||||||
use test_utils::{assert_eq_text, extract_annotations};
|
|
||||||
|
|
||||||
use crate::{DiagnosticsConfig, Severity};
|
|
||||||
|
|
||||||
/// Takes a multi-file input fixture with annotated cursor positions,
|
|
||||||
/// and checks that:
|
|
||||||
/// * a diagnostic is produced
|
|
||||||
/// * the first diagnostic fix trigger range touches the input cursor position
|
|
||||||
/// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied
|
|
||||||
#[track_caller]
|
|
||||||
pub(crate) fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
|
|
||||||
check_nth_fix(0, ra_fixture_before, ra_fixture_after);
|
|
||||||
}
|
|
||||||
/// Takes a multi-file input fixture with annotated cursor positions,
|
|
||||||
/// and checks that:
|
|
||||||
/// * a diagnostic is produced
|
|
||||||
/// * every diagnostic fixes trigger range touches the input cursor position
|
|
||||||
/// * that the contents of the file containing the cursor match `after` after each diagnostic fix is applied
|
|
||||||
pub(crate) fn check_fixes(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) {
|
|
||||||
for (i, ra_fixture_after) in ra_fixtures_after.iter().enumerate() {
|
|
||||||
check_nth_fix(i, ra_fixture_before, ra_fixture_after)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[track_caller]
|
|
||||||
fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
|
|
||||||
let after = trim_indent(ra_fixture_after);
|
|
||||||
|
|
||||||
let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
|
|
||||||
let diagnostic = super::diagnostics(
|
|
||||||
&db,
|
|
||||||
&DiagnosticsConfig::default(),
|
|
||||||
&AssistResolveStrategy::All,
|
|
||||||
file_position.file_id,
|
|
||||||
)
|
|
||||||
.pop()
|
|
||||||
.expect("no diagnostics");
|
|
||||||
let fix = &diagnostic.fixes.expect("diagnostic misses fixes")[nth];
|
|
||||||
let actual = {
|
|
||||||
let source_change = fix.source_change.as_ref().unwrap();
|
|
||||||
let file_id = *source_change.source_file_edits.keys().next().unwrap();
|
|
||||||
let mut actual = db.file_text(file_id).to_string();
|
|
||||||
|
|
||||||
for edit in source_change.source_file_edits.values() {
|
|
||||||
edit.apply(&mut actual);
|
|
||||||
}
|
|
||||||
actual
|
|
||||||
};
|
|
||||||
|
|
||||||
assert_eq_text!(&after, &actual);
|
|
||||||
assert!(
|
|
||||||
fix.target.contains_inclusive(file_position.offset),
|
|
||||||
"diagnostic fix range {:?} does not touch cursor position {:?}",
|
|
||||||
fix.target,
|
|
||||||
file_position.offset
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks that there's a diagnostic *without* fix at `$0`.
|
|
||||||
pub(crate) fn check_no_fix(ra_fixture: &str) {
|
|
||||||
let (db, file_position) = RootDatabase::with_position(ra_fixture);
|
|
||||||
let diagnostic = super::diagnostics(
|
|
||||||
&db,
|
|
||||||
&DiagnosticsConfig::default(),
|
|
||||||
&AssistResolveStrategy::All,
|
|
||||||
file_position.file_id,
|
|
||||||
)
|
|
||||||
.pop()
|
|
||||||
.unwrap();
|
|
||||||
assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {:?}", diagnostic);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) {
|
|
||||||
let (db, file_id) = RootDatabase::with_single_file(ra_fixture);
|
|
||||||
let diagnostics = super::diagnostics(
|
|
||||||
&db,
|
|
||||||
&DiagnosticsConfig::default(),
|
|
||||||
&AssistResolveStrategy::All,
|
|
||||||
file_id,
|
|
||||||
);
|
|
||||||
expect.assert_debug_eq(&diagnostics)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[track_caller]
|
|
||||||
pub(crate) fn check_diagnostics(ra_fixture: &str) {
|
|
||||||
let mut config = DiagnosticsConfig::default();
|
|
||||||
config.disabled.insert("inactive-code".to_string());
|
|
||||||
check_diagnostics_with_config(config, ra_fixture)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[track_caller]
|
|
||||||
pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixture: &str) {
|
|
||||||
let (db, files) = RootDatabase::with_many_files(ra_fixture);
|
|
||||||
for file_id in files {
|
|
||||||
let diagnostics =
|
|
||||||
super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
|
|
||||||
|
|
||||||
let expected = extract_annotations(&*db.file_text(file_id));
|
|
||||||
let mut actual = diagnostics
|
|
||||||
.into_iter()
|
|
||||||
.map(|d| {
|
|
||||||
let mut annotation = String::new();
|
|
||||||
if let Some(fixes) = &d.fixes {
|
|
||||||
assert!(!fixes.is_empty());
|
|
||||||
annotation.push_str("💡 ")
|
|
||||||
}
|
|
||||||
annotation.push_str(match d.severity {
|
|
||||||
Severity::Error => "error",
|
|
||||||
Severity::WeakWarning => "weak",
|
|
||||||
});
|
|
||||||
annotation.push_str(": ");
|
|
||||||
annotation.push_str(&d.message);
|
|
||||||
(d.range, annotation)
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
actual.sort_by_key(|(range, _)| range.start());
|
|
||||||
assert_eq!(expected, actual);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_disabled_diagnostics() {
|
|
||||||
let mut config = DiagnosticsConfig::default();
|
|
||||||
config.disabled.insert("unresolved-module".into());
|
|
||||||
|
|
||||||
let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
|
|
||||||
|
|
||||||
let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
|
|
||||||
assert!(diagnostics.is_empty());
|
|
||||||
|
|
||||||
let diagnostics = super::diagnostics(
|
|
||||||
&db,
|
|
||||||
&DiagnosticsConfig::default(),
|
|
||||||
&AssistResolveStrategy::All,
|
|
||||||
file_id,
|
|
||||||
);
|
|
||||||
assert!(!diagnostics.is_empty());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
146
crates/ide_diagnostics/src/tests.rs
Normal file
146
crates/ide_diagnostics/src/tests.rs
Normal file
|
@ -0,0 +1,146 @@
|
||||||
|
mod sourcegen;
|
||||||
|
|
||||||
|
use expect_test::Expect;
|
||||||
|
use ide_db::{
|
||||||
|
assists::AssistResolveStrategy,
|
||||||
|
base_db::{fixture::WithFixture, SourceDatabaseExt},
|
||||||
|
RootDatabase,
|
||||||
|
};
|
||||||
|
use stdx::trim_indent;
|
||||||
|
use test_utils::{assert_eq_text, extract_annotations};
|
||||||
|
|
||||||
|
use crate::{DiagnosticsConfig, Severity};
|
||||||
|
|
||||||
|
/// Takes a multi-file input fixture with annotated cursor positions,
|
||||||
|
/// and checks that:
|
||||||
|
/// * a diagnostic is produced
|
||||||
|
/// * the first diagnostic fix trigger range touches the input cursor position
|
||||||
|
/// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied
|
||||||
|
#[track_caller]
|
||||||
|
pub(crate) fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
|
||||||
|
check_nth_fix(0, ra_fixture_before, ra_fixture_after);
|
||||||
|
}
|
||||||
|
/// Takes a multi-file input fixture with annotated cursor positions,
|
||||||
|
/// and checks that:
|
||||||
|
/// * a diagnostic is produced
|
||||||
|
/// * every diagnostic fixes trigger range touches the input cursor position
|
||||||
|
/// * that the contents of the file containing the cursor match `after` after each diagnostic fix is applied
|
||||||
|
pub(crate) fn check_fixes(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) {
|
||||||
|
for (i, ra_fixture_after) in ra_fixtures_after.iter().enumerate() {
|
||||||
|
check_nth_fix(i, ra_fixture_before, ra_fixture_after)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
|
||||||
|
let after = trim_indent(ra_fixture_after);
|
||||||
|
|
||||||
|
let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
|
||||||
|
let diagnostic = super::diagnostics(
|
||||||
|
&db,
|
||||||
|
&DiagnosticsConfig::default(),
|
||||||
|
&AssistResolveStrategy::All,
|
||||||
|
file_position.file_id,
|
||||||
|
)
|
||||||
|
.pop()
|
||||||
|
.expect("no diagnostics");
|
||||||
|
let fix = &diagnostic.fixes.expect("diagnostic misses fixes")[nth];
|
||||||
|
let actual = {
|
||||||
|
let source_change = fix.source_change.as_ref().unwrap();
|
||||||
|
let file_id = *source_change.source_file_edits.keys().next().unwrap();
|
||||||
|
let mut actual = db.file_text(file_id).to_string();
|
||||||
|
|
||||||
|
for edit in source_change.source_file_edits.values() {
|
||||||
|
edit.apply(&mut actual);
|
||||||
|
}
|
||||||
|
actual
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq_text!(&after, &actual);
|
||||||
|
assert!(
|
||||||
|
fix.target.contains_inclusive(file_position.offset),
|
||||||
|
"diagnostic fix range {:?} does not touch cursor position {:?}",
|
||||||
|
fix.target,
|
||||||
|
file_position.offset
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks that there's a diagnostic *without* fix at `$0`.
|
||||||
|
pub(crate) fn check_no_fix(ra_fixture: &str) {
|
||||||
|
let (db, file_position) = RootDatabase::with_position(ra_fixture);
|
||||||
|
let diagnostic = super::diagnostics(
|
||||||
|
&db,
|
||||||
|
&DiagnosticsConfig::default(),
|
||||||
|
&AssistResolveStrategy::All,
|
||||||
|
file_position.file_id,
|
||||||
|
)
|
||||||
|
.pop()
|
||||||
|
.unwrap();
|
||||||
|
assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {:?}", diagnostic);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) {
|
||||||
|
let (db, file_id) = RootDatabase::with_single_file(ra_fixture);
|
||||||
|
let diagnostics = super::diagnostics(
|
||||||
|
&db,
|
||||||
|
&DiagnosticsConfig::default(),
|
||||||
|
&AssistResolveStrategy::All,
|
||||||
|
file_id,
|
||||||
|
);
|
||||||
|
expect.assert_debug_eq(&diagnostics)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
pub(crate) fn check_diagnostics(ra_fixture: &str) {
|
||||||
|
let mut config = DiagnosticsConfig::default();
|
||||||
|
config.disabled.insert("inactive-code".to_string());
|
||||||
|
check_diagnostics_with_config(config, ra_fixture)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixture: &str) {
|
||||||
|
let (db, files) = RootDatabase::with_many_files(ra_fixture);
|
||||||
|
for file_id in files {
|
||||||
|
let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
|
||||||
|
|
||||||
|
let expected = extract_annotations(&*db.file_text(file_id));
|
||||||
|
let mut actual = diagnostics
|
||||||
|
.into_iter()
|
||||||
|
.map(|d| {
|
||||||
|
let mut annotation = String::new();
|
||||||
|
if let Some(fixes) = &d.fixes {
|
||||||
|
assert!(!fixes.is_empty());
|
||||||
|
annotation.push_str("💡 ")
|
||||||
|
}
|
||||||
|
annotation.push_str(match d.severity {
|
||||||
|
Severity::Error => "error",
|
||||||
|
Severity::WeakWarning => "weak",
|
||||||
|
});
|
||||||
|
annotation.push_str(": ");
|
||||||
|
annotation.push_str(&d.message);
|
||||||
|
(d.range, annotation)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
actual.sort_by_key(|(range, _)| range.start());
|
||||||
|
assert_eq!(expected, actual);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_disabled_diagnostics() {
|
||||||
|
let mut config = DiagnosticsConfig::default();
|
||||||
|
config.disabled.insert("unresolved-module".into());
|
||||||
|
|
||||||
|
let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
|
||||||
|
|
||||||
|
let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
|
||||||
|
assert!(diagnostics.is_empty());
|
||||||
|
|
||||||
|
let diagnostics = super::diagnostics(
|
||||||
|
&db,
|
||||||
|
&DiagnosticsConfig::default(),
|
||||||
|
&AssistResolveStrategy::All,
|
||||||
|
file_id,
|
||||||
|
);
|
||||||
|
assert!(!diagnostics.is_empty());
|
||||||
|
}
|
|
@ -1,43 +1,40 @@
|
||||||
//! Generates `assists.md` documentation.
|
//! Generates `assists.md` documentation.
|
||||||
|
|
||||||
use std::{fmt, path::PathBuf};
|
use std::{fmt, fs, io, path::PathBuf};
|
||||||
|
|
||||||
use xshell::write_file;
|
use sourcegen::project_root;
|
||||||
|
|
||||||
use crate::{
|
#[test]
|
||||||
codegen::{extract_comment_blocks_with_empty_lines, Location, PREAMBLE},
|
fn sourcegen_diagnostic_docs() {
|
||||||
project_root, rust_files, Result,
|
let diagnostics = Diagnostic::collect().unwrap();
|
||||||
};
|
|
||||||
|
|
||||||
pub(crate) fn generate_diagnostic_docs() -> Result<()> {
|
|
||||||
let diagnostics = Diagnostic::collect()?;
|
|
||||||
let contents =
|
let contents =
|
||||||
diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
|
diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
|
||||||
let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim());
|
let contents = sourcegen::add_preamble("sourcegen_diagnostic_docs", contents);
|
||||||
let dst = project_root().join("docs/user/generated_diagnostic.adoc");
|
let dst = project_root().join("docs/user/generated_diagnostic.adoc");
|
||||||
write_file(&dst, &contents)?;
|
fs::write(&dst, &contents).unwrap();
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct Diagnostic {
|
struct Diagnostic {
|
||||||
id: String,
|
id: String,
|
||||||
location: Location,
|
location: sourcegen::Location,
|
||||||
doc: String,
|
doc: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Diagnostic {
|
impl Diagnostic {
|
||||||
fn collect() -> Result<Vec<Diagnostic>> {
|
fn collect() -> io::Result<Vec<Diagnostic>> {
|
||||||
|
let handlers_dir = project_root().join("crates/ide_diagnostics/src/handlers");
|
||||||
|
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
for path in rust_files() {
|
for path in sourcegen::list_rust_files(&handlers_dir) {
|
||||||
collect_file(&mut res, path)?;
|
collect_file(&mut res, path)?;
|
||||||
}
|
}
|
||||||
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
|
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
|
||||||
return Ok(res);
|
return Ok(res);
|
||||||
|
|
||||||
fn collect_file(acc: &mut Vec<Diagnostic>, path: PathBuf) -> Result<()> {
|
fn collect_file(acc: &mut Vec<Diagnostic>, path: PathBuf) -> io::Result<()> {
|
||||||
let text = xshell::read_file(&path)?;
|
let text = fs::read_to_string(&path)?;
|
||||||
let comment_blocks = extract_comment_blocks_with_empty_lines("Diagnostic", &text);
|
let comment_blocks = sourcegen::CommentBlock::extract("Diagnostic", &text);
|
||||||
|
|
||||||
for block in comment_blocks {
|
for block in comment_blocks {
|
||||||
let id = block.id;
|
let id = block.id;
|
||||||
|
@ -45,7 +42,7 @@ impl Diagnostic {
|
||||||
panic!("invalid diagnostic name: {:?}:\n {}", id, msg)
|
panic!("invalid diagnostic name: {:?}:\n {}", id, msg)
|
||||||
}
|
}
|
||||||
let doc = block.contents.join("\n");
|
let doc = block.contents.join("\n");
|
||||||
let location = Location::new(path.clone(), block.line);
|
let location = sourcegen::Location { file: path.clone(), line: block.line };
|
||||||
acc.push(Diagnostic { id, location, doc })
|
acc.push(Diagnostic { id, location, doc })
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! Generated file, do not edit by hand, see `xtask/src/codegen`
|
//! Generated by `sourcegen_ast`, do not edit by hand.
|
||||||
|
|
||||||
#![allow(bad_style, missing_docs, unreachable_pub)]
|
#![allow(bad_style, missing_docs, unreachable_pub)]
|
||||||
#[doc = r" The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`."]
|
#[doc = r" The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`."]
|
||||||
|
|
|
@ -66,7 +66,9 @@ jemallocator = { version = "0.4.1", package = "tikv-jemallocator", optional = tr
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
expect-test = "1.1"
|
expect-test = "1.1"
|
||||||
|
|
||||||
test_utils = { path = "../test_utils" }
|
test_utils = { path = "../test_utils" }
|
||||||
|
sourcegen = { path = "../sourcegen" }
|
||||||
mbe = { path = "../mbe" }
|
mbe = { path = "../mbe" }
|
||||||
tt = { path = "../tt" }
|
tt = { path = "../tt" }
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
//! specific JSON shapes here -- there's little value in such tests, as we can't
|
//! specific JSON shapes here -- there's little value in such tests, as we can't
|
||||||
//! be sure without a real client anyway.
|
//! be sure without a real client anyway.
|
||||||
|
|
||||||
|
mod sourcegen;
|
||||||
mod testdir;
|
mod testdir;
|
||||||
mod support;
|
mod support;
|
||||||
|
|
||||||
|
|
|
@ -1,42 +1,43 @@
|
||||||
//! Generates `assists.md` documentation.
|
//! Generates `assists.md` documentation.
|
||||||
|
|
||||||
use std::{fmt, path::PathBuf};
|
use std::{fmt, fs, io, path::PathBuf};
|
||||||
|
|
||||||
use xshell::write_file;
|
#[test]
|
||||||
|
fn sourcegen_feature_docs() {
|
||||||
use crate::{
|
let features = Feature::collect().unwrap();
|
||||||
codegen::{extract_comment_blocks_with_empty_lines, Location, PREAMBLE},
|
|
||||||
project_root, rust_files, Result,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub(crate) fn generate_feature_docs() -> Result<()> {
|
|
||||||
let features = Feature::collect()?;
|
|
||||||
let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
|
let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
|
||||||
let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim());
|
let contents = format!(
|
||||||
let dst = project_root().join("docs/user/generated_features.adoc");
|
"
|
||||||
write_file(&dst, &contents)?;
|
// Generated file, do not edit by hand, see `sourcegen_feature_docs`.
|
||||||
Ok(())
|
{}
|
||||||
|
",
|
||||||
|
contents.trim()
|
||||||
|
);
|
||||||
|
let dst = sourcegen::project_root().join("docs/user/generated_features.adoc");
|
||||||
|
fs::write(&dst, &contents).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct Feature {
|
struct Feature {
|
||||||
id: String,
|
id: String,
|
||||||
location: Location,
|
location: sourcegen::Location,
|
||||||
doc: String,
|
doc: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Feature {
|
impl Feature {
|
||||||
fn collect() -> Result<Vec<Feature>> {
|
fn collect() -> io::Result<Vec<Feature>> {
|
||||||
|
let crates_dir = sourcegen::project_root().join("crates");
|
||||||
|
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
for path in rust_files() {
|
for path in sourcegen::list_rust_files(&crates_dir) {
|
||||||
collect_file(&mut res, path)?;
|
collect_file(&mut res, path)?;
|
||||||
}
|
}
|
||||||
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
|
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
|
||||||
return Ok(res);
|
return Ok(res);
|
||||||
|
|
||||||
fn collect_file(acc: &mut Vec<Feature>, path: PathBuf) -> Result<()> {
|
fn collect_file(acc: &mut Vec<Feature>, path: PathBuf) -> io::Result<()> {
|
||||||
let text = xshell::read_file(&path)?;
|
let text = std::fs::read_to_string(&path)?;
|
||||||
let comment_blocks = extract_comment_blocks_with_empty_lines("Feature", &text);
|
let comment_blocks = sourcegen::CommentBlock::extract("Feature", &text);
|
||||||
|
|
||||||
for block in comment_blocks {
|
for block in comment_blocks {
|
||||||
let id = block.id;
|
let id = block.id;
|
||||||
|
@ -44,7 +45,7 @@ impl Feature {
|
||||||
panic!("invalid feature name: {:?}:\n {}", id, msg)
|
panic!("invalid feature name: {:?}:\n {}", id, msg)
|
||||||
}
|
}
|
||||||
let doc = block.contents.join("\n");
|
let doc = block.contents.join("\n");
|
||||||
let location = Location::new(path.clone(), block.line);
|
let location = sourcegen::Location { file: path.clone(), line: block.line };
|
||||||
acc.push(Feature { id, location, doc })
|
acc.push(Feature { id, location, doc })
|
||||||
}
|
}
|
||||||
|
|
13
crates/sourcegen/Cargo.toml
Normal file
13
crates/sourcegen/Cargo.toml
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
[package]
|
||||||
|
name = "sourcegen"
|
||||||
|
version = "0.0.0"
|
||||||
|
description = "TBD"
|
||||||
|
license = "MIT OR Apache-2.0"
|
||||||
|
authors = ["rust-analyzer developers"]
|
||||||
|
edition = "2018"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
doctest = false
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
xshell = "0.1"
|
195
crates/sourcegen/src/lib.rs
Normal file
195
crates/sourcegen/src/lib.rs
Normal file
|
@ -0,0 +1,195 @@
|
||||||
|
//! rust-analyzer relies heavily on source code generation.
|
||||||
|
//!
|
||||||
|
//! Things like feature documentation or assist tests are implemented by
|
||||||
|
//! processing rust-analyzer's own source code and generating the appropriate
|
||||||
|
//! output. See `sourcegen_` tests in various crates.
|
||||||
|
//!
|
||||||
|
//! This crate contains utilities to make this kind of source-gen easy.
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
fmt, fs, mem,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
|
use xshell::{cmd, pushenv};
|
||||||
|
|
||||||
|
pub fn list_rust_files(dir: &Path) -> Vec<PathBuf> {
|
||||||
|
let mut res = list_files(dir);
|
||||||
|
res.retain(|it| {
|
||||||
|
it.file_name().unwrap_or_default().to_str().unwrap_or_default().ends_with(".rs")
|
||||||
|
});
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn list_files(dir: &Path) -> Vec<PathBuf> {
|
||||||
|
let mut res = Vec::new();
|
||||||
|
let mut work = vec![dir.to_path_buf()];
|
||||||
|
while let Some(dir) = work.pop() {
|
||||||
|
for entry in dir.read_dir().unwrap() {
|
||||||
|
let entry = entry.unwrap();
|
||||||
|
let file_type = entry.file_type().unwrap();
|
||||||
|
let path = entry.path();
|
||||||
|
let is_hidden =
|
||||||
|
path.file_name().unwrap_or_default().to_str().unwrap_or_default().starts_with('.');
|
||||||
|
if !is_hidden {
|
||||||
|
if file_type.is_dir() {
|
||||||
|
work.push(path)
|
||||||
|
} else if file_type.is_file() {
|
||||||
|
res.push(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct CommentBlock {
|
||||||
|
pub id: String,
|
||||||
|
pub line: usize,
|
||||||
|
pub contents: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CommentBlock {
|
||||||
|
pub fn extract(tag: &str, text: &str) -> Vec<CommentBlock> {
|
||||||
|
assert!(tag.starts_with(char::is_uppercase));
|
||||||
|
|
||||||
|
let tag = format!("{}:", tag);
|
||||||
|
let mut res = Vec::new();
|
||||||
|
for (line, mut block) in do_extract_comment_blocks(text, true) {
|
||||||
|
let first = block.remove(0);
|
||||||
|
if let Some(id) = first.strip_prefix(&tag) {
|
||||||
|
let id = id.trim().to_string();
|
||||||
|
let block = CommentBlock { id, line, contents: block };
|
||||||
|
res.push(block);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extract_untagged(text: &str) -> Vec<CommentBlock> {
|
||||||
|
let mut res = Vec::new();
|
||||||
|
for (line, block) in do_extract_comment_blocks(text, false) {
|
||||||
|
let id = String::new();
|
||||||
|
let block = CommentBlock { id, line, contents: block };
|
||||||
|
res.push(block);
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_extract_comment_blocks(
|
||||||
|
text: &str,
|
||||||
|
allow_blocks_with_empty_lines: bool,
|
||||||
|
) -> Vec<(usize, Vec<String>)> {
|
||||||
|
let mut res = Vec::new();
|
||||||
|
|
||||||
|
let prefix = "// ";
|
||||||
|
let lines = text.lines().map(str::trim_start);
|
||||||
|
|
||||||
|
let mut block = (0, vec![]);
|
||||||
|
for (line_num, line) in lines.enumerate() {
|
||||||
|
if line == "//" && allow_blocks_with_empty_lines {
|
||||||
|
block.1.push(String::new());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let is_comment = line.starts_with(prefix);
|
||||||
|
if is_comment {
|
||||||
|
block.1.push(line[prefix.len()..].to_string());
|
||||||
|
} else {
|
||||||
|
if !block.1.is_empty() {
|
||||||
|
res.push(mem::take(&mut block));
|
||||||
|
}
|
||||||
|
block.0 = line_num + 2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !block.1.is_empty() {
|
||||||
|
res.push(block)
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Location {
|
||||||
|
pub file: PathBuf,
|
||||||
|
pub line: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Location {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
let path = self.file.strip_prefix(&project_root()).unwrap().display().to_string();
|
||||||
|
let path = path.replace('\\', "/");
|
||||||
|
let name = self.file.file_name().unwrap();
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"https://github.com/rust-analyzer/rust-analyzer/blob/master/{}#L{}[{}]",
|
||||||
|
path,
|
||||||
|
self.line,
|
||||||
|
name.to_str().unwrap()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ensure_rustfmt() {
|
||||||
|
let version = cmd!("rustfmt --version").read().unwrap_or_default();
|
||||||
|
if !version.contains("stable") {
|
||||||
|
panic!(
|
||||||
|
"Failed to run rustfmt from toolchain 'stable'. \
|
||||||
|
Please run `rustup component add rustfmt --toolchain stable` to install it.",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reformat(text: String) -> String {
|
||||||
|
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
|
||||||
|
ensure_rustfmt();
|
||||||
|
let rustfmt_toml = project_root().join("rustfmt.toml");
|
||||||
|
let mut stdout = cmd!("rustfmt --config-path {rustfmt_toml} --config fn_single_line=true")
|
||||||
|
.stdin(text)
|
||||||
|
.read()
|
||||||
|
.unwrap();
|
||||||
|
if !stdout.ends_with('\n') {
|
||||||
|
stdout.push('\n');
|
||||||
|
}
|
||||||
|
stdout
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_preamble(generator: &'static str, mut text: String) -> String {
|
||||||
|
let preamble = format!("//! Generated by `{}`, do not edit by hand.\n\n", generator);
|
||||||
|
text.insert_str(0, &preamble);
|
||||||
|
text
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks that the `file` has the specified `contents`. If that is not the
|
||||||
|
/// case, updates the file and then fails the test.
|
||||||
|
pub fn ensure_file_contents(file: &Path, contents: &str) {
|
||||||
|
if let Ok(old_contents) = fs::read_to_string(file) {
|
||||||
|
if normalize_newlines(&old_contents) == normalize_newlines(contents) {
|
||||||
|
// File is already up to date.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let display_path = file.strip_prefix(&project_root()).unwrap_or(file);
|
||||||
|
eprintln!(
|
||||||
|
"\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n",
|
||||||
|
display_path.display()
|
||||||
|
);
|
||||||
|
if std::env::var("CI").is_ok() {
|
||||||
|
eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n");
|
||||||
|
}
|
||||||
|
if let Some(parent) = file.parent() {
|
||||||
|
let _ = fs::create_dir_all(parent);
|
||||||
|
}
|
||||||
|
fs::write(file, contents).unwrap();
|
||||||
|
panic!("some file was not up to date and has been updated, simply re-run the tests")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn normalize_newlines(s: &str) -> String {
|
||||||
|
s.replace("\r\n", "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn project_root() -> PathBuf {
|
||||||
|
let dir = env!("CARGO_MANIFEST_DIR");
|
||||||
|
PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned()
|
||||||
|
}
|
|
@ -28,7 +28,11 @@ parser = { path = "../parser", version = "0.0.0" }
|
||||||
profile = { path = "../profile", version = "0.0.0" }
|
profile = { path = "../profile", version = "0.0.0" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
test_utils = { path = "../test_utils" }
|
|
||||||
walkdir = "2.3.1"
|
|
||||||
rayon = "1"
|
rayon = "1"
|
||||||
expect-test = "1.1"
|
expect-test = "1.1"
|
||||||
|
proc-macro2 = "1.0.8"
|
||||||
|
quote = "1.0.2"
|
||||||
|
ungrammar = "=1.14"
|
||||||
|
|
||||||
|
test_utils = { path = "../test_utils" }
|
||||||
|
sourcegen = { path = "../sourcegen" }
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! Generated file, do not edit by hand, see `xtask/src/codegen`
|
//! Generated by `sourcegen_ast`, do not edit by hand.
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::{self, support, AstChildren, AstNode},
|
ast::{self, support, AstChildren, AstNode},
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! Generated file, do not edit by hand, see `xtask/src/codegen`
|
//! Generated by `sourcegen_ast`, do not edit by hand.
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::AstToken,
|
ast::AstToken,
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
mod sourcegen_tests;
|
||||||
|
mod sourcegen_ast;
|
||||||
|
mod ast_src;
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
fmt::Write,
|
fmt::Write,
|
||||||
fs,
|
fs,
|
||||||
|
@ -152,20 +156,14 @@ fn reparse_fuzz_tests() {
|
||||||
/// Test that Rust-analyzer can parse and validate the rust-analyzer
|
/// Test that Rust-analyzer can parse and validate the rust-analyzer
|
||||||
#[test]
|
#[test]
|
||||||
fn self_hosting_parsing() {
|
fn self_hosting_parsing() {
|
||||||
let dir = project_root().join("crates");
|
let crates_dir = project_root().join("crates");
|
||||||
let files = walkdir::WalkDir::new(dir)
|
|
||||||
.into_iter()
|
let mut files = ::sourcegen::list_rust_files(&crates_dir);
|
||||||
.filter_entry(|entry| {
|
files.retain(|path| {
|
||||||
// Get all files which are not in the crates/syntax/test_data folder
|
// Get all files which are not in the crates/syntax/test_data folder
|
||||||
!entry.path().components().any(|component| component.as_os_str() == "test_data")
|
!path.components().any(|component| component.as_os_str() == "test_data")
|
||||||
})
|
});
|
||||||
.map(|e| e.unwrap())
|
|
||||||
.filter(|entry| {
|
|
||||||
// Get all `.rs ` files
|
|
||||||
!entry.path().is_dir() && (entry.path().extension().unwrap_or_default() == "rs")
|
|
||||||
})
|
|
||||||
.map(|entry| entry.into_path())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
assert!(
|
assert!(
|
||||||
files.len() > 100,
|
files.len() > 100,
|
||||||
"self_hosting_parsing found too few files - is it running in the right directory?"
|
"self_hosting_parsing found too few files - is it running in the right directory?"
|
||||||
|
|
|
@ -12,32 +12,31 @@ use proc_macro2::{Punct, Spacing};
|
||||||
use quote::{format_ident, quote};
|
use quote::{format_ident, quote};
|
||||||
use ungrammar::{rust_grammar, Grammar, Rule};
|
use ungrammar::{rust_grammar, Grammar, Rule};
|
||||||
|
|
||||||
use crate::{
|
use crate::tests::ast_src::{
|
||||||
ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC},
|
AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC,
|
||||||
codegen::{ensure_file_contents, reformat},
|
|
||||||
project_root, Result,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) fn generate_syntax() -> Result<()> {
|
#[test]
|
||||||
|
fn sourcegen_ast() {
|
||||||
let grammar = rust_grammar();
|
let grammar = rust_grammar();
|
||||||
let ast = lower(&grammar);
|
let ast = lower(&grammar);
|
||||||
|
|
||||||
let syntax_kinds_file = project_root().join("crates/parser/src/syntax_kind/generated.rs");
|
let syntax_kinds_file =
|
||||||
let syntax_kinds = generate_syntax_kinds(KINDS_SRC)?;
|
sourcegen::project_root().join("crates/parser/src/syntax_kind/generated.rs");
|
||||||
ensure_file_contents(syntax_kinds_file.as_path(), &syntax_kinds)?;
|
let syntax_kinds = generate_syntax_kinds(KINDS_SRC);
|
||||||
|
sourcegen::ensure_file_contents(syntax_kinds_file.as_path(), &syntax_kinds);
|
||||||
|
|
||||||
let ast_tokens_file = project_root().join("crates/syntax/src/ast/generated/tokens.rs");
|
let ast_tokens_file =
|
||||||
let contents = generate_tokens(&ast)?;
|
sourcegen::project_root().join("crates/syntax/src/ast/generated/tokens.rs");
|
||||||
ensure_file_contents(ast_tokens_file.as_path(), &contents)?;
|
let contents = generate_tokens(&ast);
|
||||||
|
sourcegen::ensure_file_contents(ast_tokens_file.as_path(), &contents);
|
||||||
|
|
||||||
let ast_nodes_file = project_root().join("crates/syntax/src/ast/generated/nodes.rs");
|
let ast_nodes_file = sourcegen::project_root().join("crates/syntax/src/ast/generated/nodes.rs");
|
||||||
let contents = generate_nodes(KINDS_SRC, &ast)?;
|
let contents = generate_nodes(KINDS_SRC, &ast);
|
||||||
ensure_file_contents(ast_nodes_file.as_path(), &contents)?;
|
sourcegen::ensure_file_contents(ast_nodes_file.as_path(), &contents);
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_tokens(grammar: &AstSrc) -> Result<String> {
|
fn generate_tokens(grammar: &AstSrc) -> String {
|
||||||
let tokens = grammar.tokens.iter().map(|token| {
|
let tokens = grammar.tokens.iter().map(|token| {
|
||||||
let name = format_ident!("{}", token);
|
let name = format_ident!("{}", token);
|
||||||
let kind = format_ident!("{}", to_upper_snake_case(token));
|
let kind = format_ident!("{}", to_upper_snake_case(token));
|
||||||
|
@ -61,18 +60,20 @@ fn generate_tokens(grammar: &AstSrc) -> Result<String> {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
let pretty = reformat(
|
sourcegen::add_preamble(
|
||||||
"e! {
|
"sourcegen_ast",
|
||||||
use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken};
|
sourcegen::reformat(
|
||||||
#(#tokens)*
|
quote! {
|
||||||
}
|
use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken};
|
||||||
.to_string(),
|
#(#tokens)*
|
||||||
)?
|
}
|
||||||
.replace("#[derive", "\n#[derive");
|
.to_string(),
|
||||||
Ok(pretty)
|
),
|
||||||
|
)
|
||||||
|
.replace("#[derive", "\n#[derive")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> {
|
fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String {
|
||||||
let (node_defs, node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
|
let (node_defs, node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
|
||||||
.nodes
|
.nodes
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -230,7 +231,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> {
|
||||||
.filter(|name| !defined_nodes.iter().any(|&it| it == name))
|
.filter(|name| !defined_nodes.iter().any(|&it| it == name))
|
||||||
{
|
{
|
||||||
drop(node)
|
drop(node)
|
||||||
// TODO: restore this
|
// FIXME: restore this
|
||||||
// eprintln!("Warning: node {} not defined in ast source", node);
|
// eprintln!("Warning: node {} not defined in ast source", node);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -262,8 +263,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let pretty = reformat(&res)?;
|
sourcegen::add_preamble("sourcegen_ast", sourcegen::reformat(res))
|
||||||
Ok(pretty)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_doc_comment(contents: &[String], dest: &mut String) {
|
fn write_doc_comment(contents: &[String], dest: &mut String) {
|
||||||
|
@ -272,7 +272,7 @@ fn write_doc_comment(contents: &[String], dest: &mut String) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> {
|
fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String {
|
||||||
let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar
|
let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar
|
||||||
.punct
|
.punct
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -384,7 +384,7 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
reformat(&ast.to_string())
|
sourcegen::add_preamble("sourcegen_ast", sourcegen::reformat(ast.to_string()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_upper_snake_case(s: &str) -> String {
|
fn to_upper_snake_case(s: &str) -> String {
|
||||||
|
@ -580,7 +580,7 @@ fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, r
|
||||||
acc.push(field);
|
acc.push(field);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
todo!("{:?}", rule)
|
panic!("unhandled rule: {:?}", rule)
|
||||||
}
|
}
|
||||||
Rule::Labeled { label: l, rule } => {
|
Rule::Labeled { label: l, rule } => {
|
||||||
assert!(label.is_none());
|
assert!(label.is_none());
|
|
@ -1,26 +1,28 @@
|
||||||
//! This module greps parser's code for specially formatted comments and turnes
|
//! This module greps parser's code for specially formatted comments and turns
|
||||||
//! them into tests.
|
//! them into tests.
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
|
||||||
fs, iter,
|
fs, iter,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use rustc_hash::FxHashMap;
|
||||||
codegen::{ensure_file_contents, extract_comment_blocks},
|
|
||||||
project_root, Result,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub(crate) fn generate_parser_tests() -> Result<()> {
|
#[test]
|
||||||
let tests = tests_from_dir(&project_root().join(Path::new("crates/parser/src/grammar")))?;
|
fn sourcegen_parser_tests() {
|
||||||
fn install_tests(tests: &HashMap<String, Test>, into: &str) -> Result<()> {
|
let grammar_dir = sourcegen::project_root().join(Path::new("crates/parser/src/grammar"));
|
||||||
let tests_dir = project_root().join(into);
|
let tests = tests_from_dir(&grammar_dir);
|
||||||
|
|
||||||
|
install_tests(&tests.ok, "crates/syntax/test_data/parser/inline/ok");
|
||||||
|
install_tests(&tests.err, "crates/syntax/test_data/parser/inline/err");
|
||||||
|
|
||||||
|
fn install_tests(tests: &FxHashMap<String, Test>, into: &str) {
|
||||||
|
let tests_dir = sourcegen::project_root().join(into);
|
||||||
if !tests_dir.is_dir() {
|
if !tests_dir.is_dir() {
|
||||||
fs::create_dir_all(&tests_dir)?;
|
fs::create_dir_all(&tests_dir).unwrap();
|
||||||
}
|
}
|
||||||
// ok is never actually read, but it needs to be specified to create a Test in existing_tests
|
// ok is never actually read, but it needs to be specified to create a Test in existing_tests
|
||||||
let existing = existing_tests(&tests_dir, true)?;
|
let existing = existing_tests(&tests_dir, true);
|
||||||
for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
|
for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
|
||||||
panic!("Test is deleted: {}", t);
|
panic!("Test is deleted: {}", t);
|
||||||
}
|
}
|
||||||
|
@ -35,12 +37,9 @@ pub(crate) fn generate_parser_tests() -> Result<()> {
|
||||||
tests_dir.join(file_name)
|
tests_dir.join(file_name)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
ensure_file_contents(&path, &test.text)?;
|
sourcegen::ensure_file_contents(&path, &test.text);
|
||||||
}
|
}
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
install_tests(&tests.ok, "crates/syntax/test_data/parser/inline/ok")?;
|
|
||||||
install_tests(&tests.err, "crates/syntax/test_data/parser/inline/err")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -52,14 +51,14 @@ struct Test {
|
||||||
|
|
||||||
#[derive(Default, Debug)]
|
#[derive(Default, Debug)]
|
||||||
struct Tests {
|
struct Tests {
|
||||||
ok: HashMap<String, Test>,
|
ok: FxHashMap<String, Test>,
|
||||||
err: HashMap<String, Test>,
|
err: FxHashMap<String, Test>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_tests(s: &str) -> Vec<Test> {
|
fn collect_tests(s: &str) -> Vec<Test> {
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
for comment_block in extract_comment_blocks(s) {
|
for comment_block in sourcegen::CommentBlock::extract_untagged(s) {
|
||||||
let first_line = &comment_block[0];
|
let first_line = &comment_block.contents[0];
|
||||||
let (name, ok) = if let Some(name) = first_line.strip_prefix("test ") {
|
let (name, ok) = if let Some(name) = first_line.strip_prefix("test ") {
|
||||||
(name.to_string(), true)
|
(name.to_string(), true)
|
||||||
} else if let Some(name) = first_line.strip_prefix("test_err ") {
|
} else if let Some(name) = first_line.strip_prefix("test_err ") {
|
||||||
|
@ -67,7 +66,7 @@ fn collect_tests(s: &str) -> Vec<Test> {
|
||||||
} else {
|
} else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
let text: String = comment_block[1..]
|
let text: String = comment_block.contents[1..]
|
||||||
.iter()
|
.iter()
|
||||||
.cloned()
|
.cloned()
|
||||||
.chain(iter::once(String::new()))
|
.chain(iter::once(String::new()))
|
||||||
|
@ -79,41 +78,34 @@ fn collect_tests(s: &str) -> Vec<Test> {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tests_from_dir(dir: &Path) -> Result<Tests> {
|
fn tests_from_dir(dir: &Path) -> Tests {
|
||||||
let mut res = Tests::default();
|
let mut res = Tests::default();
|
||||||
for entry in ::walkdir::WalkDir::new(dir) {
|
for entry in sourcegen::list_rust_files(dir) {
|
||||||
let entry = entry.unwrap();
|
process_file(&mut res, entry.as_path());
|
||||||
if !entry.file_type().is_file() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if entry.path().extension().unwrap_or_default() != "rs" {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
process_file(&mut res, entry.path())?;
|
|
||||||
}
|
}
|
||||||
let grammar_rs = dir.parent().unwrap().join("grammar.rs");
|
let grammar_rs = dir.parent().unwrap().join("grammar.rs");
|
||||||
process_file(&mut res, &grammar_rs)?;
|
process_file(&mut res, &grammar_rs);
|
||||||
return Ok(res);
|
return res;
|
||||||
fn process_file(res: &mut Tests, path: &Path) -> Result<()> {
|
|
||||||
let text = fs::read_to_string(path)?;
|
fn process_file(res: &mut Tests, path: &Path) {
|
||||||
|
let text = fs::read_to_string(path).unwrap();
|
||||||
|
|
||||||
for test in collect_tests(&text) {
|
for test in collect_tests(&text) {
|
||||||
if test.ok {
|
if test.ok {
|
||||||
if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
|
if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
|
||||||
anyhow::bail!("Duplicate test: {}", old_test.name);
|
panic!("Duplicate test: {}", old_test.name);
|
||||||
}
|
}
|
||||||
} else if let Some(old_test) = res.err.insert(test.name.clone(), test) {
|
} else if let Some(old_test) = res.err.insert(test.name.clone(), test) {
|
||||||
anyhow::bail!("Duplicate test: {}", old_test.name);
|
panic!("Duplicate test: {}", old_test.name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn existing_tests(dir: &Path, ok: bool) -> Result<HashMap<String, (PathBuf, Test)>> {
|
fn existing_tests(dir: &Path, ok: bool) -> FxHashMap<String, (PathBuf, Test)> {
|
||||||
let mut res = HashMap::new();
|
let mut res = FxHashMap::default();
|
||||||
for file in fs::read_dir(dir)? {
|
for file in fs::read_dir(dir).unwrap() {
|
||||||
let file = file?;
|
let file = file.unwrap();
|
||||||
let path = file.path();
|
let path = file.path();
|
||||||
if path.extension().unwrap_or_default() != "rs" {
|
if path.extension().unwrap_or_default() != "rs" {
|
||||||
continue;
|
continue;
|
||||||
|
@ -122,11 +114,11 @@ fn existing_tests(dir: &Path, ok: bool) -> Result<HashMap<String, (PathBuf, Test
|
||||||
let file_name = path.file_name().unwrap().to_str().unwrap();
|
let file_name = path.file_name().unwrap().to_str().unwrap();
|
||||||
file_name[5..file_name.len() - 3].to_string()
|
file_name[5..file_name.len() - 3].to_string()
|
||||||
};
|
};
|
||||||
let text = xshell::read_file(&path)?;
|
let text = fs::read_to_string(&path).unwrap();
|
||||||
let test = Test { name: name.clone(), text, ok };
|
let test = Test { name: name.clone(), text, ok };
|
||||||
if let Some(old) = res.insert(name, (path, test)) {
|
if let Some(old) = res.insert(name, (path, test)) {
|
||||||
println!("Duplicate test: {:?}", old);
|
println!("Duplicate test: {:?}", old);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(res)
|
res
|
||||||
}
|
}
|
|
@ -325,6 +325,8 @@ In particular, we generate:
|
||||||
|
|
||||||
* Documentation tests for assists
|
* Documentation tests for assists
|
||||||
|
|
||||||
|
See the `sourcegen` crate for details.
|
||||||
|
|
||||||
**Architecture Invariant:** we avoid bootstrapping.
|
**Architecture Invariant:** we avoid bootstrapping.
|
||||||
For codegen we need to parse Rust code.
|
For codegen we need to parse Rust code.
|
||||||
Using rust-analyzer for that would work and would be fun, but it would also complicate the build process a lot.
|
Using rust-analyzer for that would work and would be fun, but it would also complicate the build process a lot.
|
||||||
|
|
|
@ -9,9 +9,6 @@ license = "MIT OR Apache-2.0"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.26"
|
anyhow = "1.0.26"
|
||||||
flate2 = "1.0"
|
flate2 = "1.0"
|
||||||
proc-macro2 = "1.0.8"
|
|
||||||
quote = "1.0.2"
|
|
||||||
ungrammar = "=1.14"
|
|
||||||
walkdir = "2.3.1"
|
walkdir = "2.3.1"
|
||||||
write-json = "0.1.0"
|
write-json = "0.1.0"
|
||||||
xshell = "0.1"
|
xshell = "0.1"
|
||||||
|
|
|
@ -1,166 +0,0 @@
|
||||||
//! We use code generation heavily in rust-analyzer.
|
|
||||||
//!
|
|
||||||
//! Rather then doing it via proc-macros, we use old-school way of just dumping
|
|
||||||
//! the source code.
|
|
||||||
//!
|
|
||||||
//! This module's submodules define specific bits that we generate.
|
|
||||||
|
|
||||||
mod gen_syntax;
|
|
||||||
mod gen_parser_tests;
|
|
||||||
mod gen_lint_completions;
|
|
||||||
mod gen_assists_docs;
|
|
||||||
mod gen_feature_docs;
|
|
||||||
mod gen_diagnostic_docs;
|
|
||||||
|
|
||||||
use std::{
|
|
||||||
fmt, mem,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
};
|
|
||||||
use xshell::{cmd, pushenv};
|
|
||||||
|
|
||||||
use crate::{ensure_rustfmt, project_root, Result};
|
|
||||||
|
|
||||||
pub(crate) use self::{
|
|
||||||
gen_assists_docs::generate_assists_tests, gen_lint_completions::generate_lint_completions,
|
|
||||||
gen_parser_tests::generate_parser_tests, gen_syntax::generate_syntax,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub(crate) fn docs() -> Result<()> {
|
|
||||||
// We don't commit docs to the repo, so we can just overwrite them.
|
|
||||||
gen_assists_docs::generate_assists_docs()?;
|
|
||||||
gen_feature_docs::generate_feature_docs()?;
|
|
||||||
gen_diagnostic_docs::generate_diagnostic_docs()?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
fn used() {
|
|
||||||
generate_parser_tests();
|
|
||||||
generate_assists_tests();
|
|
||||||
generate_syntax();
|
|
||||||
generate_lint_completions();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks that the `file` has the specified `contents`. If that is not the
|
|
||||||
/// case, updates the file and then fails the test.
|
|
||||||
pub(crate) fn ensure_file_contents(file: &Path, contents: &str) -> Result<()> {
|
|
||||||
match std::fs::read_to_string(file) {
|
|
||||||
Ok(old_contents) if normalize_newlines(&old_contents) == normalize_newlines(contents) => {
|
|
||||||
return Ok(())
|
|
||||||
}
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
let display_path = file.strip_prefix(&project_root()).unwrap_or(file);
|
|
||||||
eprintln!(
|
|
||||||
"\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n",
|
|
||||||
display_path.display()
|
|
||||||
);
|
|
||||||
if std::env::var("CI").is_ok() {
|
|
||||||
eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n");
|
|
||||||
}
|
|
||||||
if let Some(parent) = file.parent() {
|
|
||||||
let _ = std::fs::create_dir_all(parent);
|
|
||||||
}
|
|
||||||
std::fs::write(file, contents).unwrap();
|
|
||||||
anyhow::bail!("some file was not up to date and has been updated, simply re-run the tests")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn normalize_newlines(s: &str) -> String {
|
|
||||||
s.replace("\r\n", "\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
const PREAMBLE: &str = "Generated file, do not edit by hand, see `xtask/src/codegen`";
|
|
||||||
|
|
||||||
fn reformat(text: &str) -> Result<String> {
|
|
||||||
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
|
|
||||||
ensure_rustfmt()?;
|
|
||||||
let rustfmt_toml = project_root().join("rustfmt.toml");
|
|
||||||
let stdout = cmd!("rustfmt --config-path {rustfmt_toml} --config fn_single_line=true")
|
|
||||||
.stdin(text)
|
|
||||||
.read()?;
|
|
||||||
Ok(format!("//! {}\n\n{}\n", PREAMBLE, stdout))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_comment_blocks(text: &str) -> Vec<Vec<String>> {
|
|
||||||
do_extract_comment_blocks(text, false).into_iter().map(|(_line, block)| block).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_comment_blocks_with_empty_lines(tag: &str, text: &str) -> Vec<CommentBlock> {
|
|
||||||
assert!(tag.starts_with(char::is_uppercase));
|
|
||||||
let tag = format!("{}:", tag);
|
|
||||||
let mut res = Vec::new();
|
|
||||||
for (line, mut block) in do_extract_comment_blocks(text, true) {
|
|
||||||
let first = block.remove(0);
|
|
||||||
if first.starts_with(&tag) {
|
|
||||||
let id = first[tag.len()..].trim().to_string();
|
|
||||||
let block = CommentBlock { id, line, contents: block };
|
|
||||||
res.push(block);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
struct CommentBlock {
|
|
||||||
id: String,
|
|
||||||
line: usize,
|
|
||||||
contents: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn do_extract_comment_blocks(
|
|
||||||
text: &str,
|
|
||||||
allow_blocks_with_empty_lines: bool,
|
|
||||||
) -> Vec<(usize, Vec<String>)> {
|
|
||||||
let mut res = Vec::new();
|
|
||||||
|
|
||||||
let prefix = "// ";
|
|
||||||
let lines = text.lines().map(str::trim_start);
|
|
||||||
|
|
||||||
let mut block = (0, vec![]);
|
|
||||||
for (line_num, line) in lines.enumerate() {
|
|
||||||
if line == "//" && allow_blocks_with_empty_lines {
|
|
||||||
block.1.push(String::new());
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let is_comment = line.starts_with(prefix);
|
|
||||||
if is_comment {
|
|
||||||
block.1.push(line[prefix.len()..].to_string());
|
|
||||||
} else {
|
|
||||||
if !block.1.is_empty() {
|
|
||||||
res.push(mem::take(&mut block));
|
|
||||||
}
|
|
||||||
block.0 = line_num + 2;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !block.1.is_empty() {
|
|
||||||
res.push(block)
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct Location {
|
|
||||||
file: PathBuf,
|
|
||||||
line: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Location {
|
|
||||||
fn new(file: PathBuf, line: usize) -> Self {
|
|
||||||
Self { file, line }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Location {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
let path = self.file.strip_prefix(&project_root()).unwrap().display().to_string();
|
|
||||||
let path = path.replace('\\', "/");
|
|
||||||
let name = self.file.file_name().unwrap();
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"https://github.com/rust-analyzer/rust-analyzer/blob/master/{}#L{}[{}]",
|
|
||||||
path,
|
|
||||||
self.line,
|
|
||||||
name.to_str().unwrap()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -9,8 +9,6 @@
|
||||||
//! `.cargo/config`.
|
//! `.cargo/config`.
|
||||||
mod flags;
|
mod flags;
|
||||||
|
|
||||||
mod codegen;
|
|
||||||
mod ast_src;
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tidy;
|
mod tidy;
|
||||||
|
|
||||||
|
@ -24,7 +22,6 @@ use std::{
|
||||||
env,
|
env,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
use walkdir::{DirEntry, WalkDir};
|
|
||||||
use xshell::{cmd, cp, pushd, pushenv};
|
use xshell::{cmd, cp, pushd, pushenv};
|
||||||
|
|
||||||
fn main() -> Result<()> {
|
fn main() -> Result<()> {
|
||||||
|
@ -63,31 +60,6 @@ fn project_root() -> PathBuf {
|
||||||
.to_path_buf()
|
.to_path_buf()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rust_files() -> impl Iterator<Item = PathBuf> {
|
|
||||||
rust_files_in(&project_root().join("crates"))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
fn cargo_files() -> impl Iterator<Item = PathBuf> {
|
|
||||||
files_in(&project_root(), "toml")
|
|
||||||
.filter(|path| path.file_name().map(|it| it == "Cargo.toml").unwrap_or(false))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn rust_files_in(path: &Path) -> impl Iterator<Item = PathBuf> {
|
|
||||||
files_in(path, "rs")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ensure_rustfmt() -> Result<()> {
|
|
||||||
let out = cmd!("rustfmt --version").read()?;
|
|
||||||
if !out.contains("stable") {
|
|
||||||
bail!(
|
|
||||||
"Failed to run rustfmt from toolchain 'stable'. \
|
|
||||||
Please run `rustup component add rustfmt --toolchain stable` to install it.",
|
|
||||||
)
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run_fuzzer() -> Result<()> {
|
fn run_fuzzer() -> Result<()> {
|
||||||
let _d = pushd("./crates/syntax")?;
|
let _d = pushd("./crates/syntax")?;
|
||||||
let _e = pushenv("RUSTUP_TOOLCHAIN", "nightly");
|
let _e = pushenv("RUSTUP_TOOLCHAIN", "nightly");
|
||||||
|
@ -113,18 +85,3 @@ fn date_iso() -> Result<String> {
|
||||||
fn is_release_tag(tag: &str) -> bool {
|
fn is_release_tag(tag: &str) -> bool {
|
||||||
tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit())
|
tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn files_in(path: &Path, ext: &'static str) -> impl Iterator<Item = PathBuf> {
|
|
||||||
let iter = WalkDir::new(path);
|
|
||||||
return iter
|
|
||||||
.into_iter()
|
|
||||||
.filter_entry(|e| !is_hidden(e))
|
|
||||||
.map(|e| e.unwrap())
|
|
||||||
.filter(|e| !e.file_type().is_dir())
|
|
||||||
.map(|e| e.into_path())
|
|
||||||
.filter(move |path| path.extension().map(|it| it == ext).unwrap_or(false));
|
|
||||||
|
|
||||||
fn is_hidden(entry: &DirEntry) -> bool {
|
|
||||||
entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ mod changelog;
|
||||||
|
|
||||||
use xshell::{cmd, pushd, read_dir, read_file, write_file};
|
use xshell::{cmd, pushd, read_dir, read_file, write_file};
|
||||||
|
|
||||||
use crate::{codegen, date_iso, flags, is_release_tag, project_root, Result};
|
use crate::{date_iso, flags, is_release_tag, project_root, Result};
|
||||||
|
|
||||||
impl flags::Release {
|
impl flags::Release {
|
||||||
pub(crate) fn run(self) -> Result<()> {
|
pub(crate) fn run(self) -> Result<()> {
|
||||||
|
@ -21,7 +21,10 @@ impl flags::Release {
|
||||||
// to delete old tags.
|
// to delete old tags.
|
||||||
cmd!("git push --force").run()?;
|
cmd!("git push --force").run()?;
|
||||||
}
|
}
|
||||||
codegen::docs()?;
|
|
||||||
|
// Generates bits of manual.adoc.
|
||||||
|
cmd!("cargo test -p ide_assists -p ide_diagnostics -p rust-analyzer -- sourcegen_")
|
||||||
|
.run()?;
|
||||||
|
|
||||||
let website_root = project_root().join("../rust-analyzer.github.io");
|
let website_root = project_root().join("../rust-analyzer.github.io");
|
||||||
let changelog_dir = website_root.join("./thisweek/_posts");
|
let changelog_dir = website_root.join("./thisweek/_posts");
|
||||||
|
|
|
@ -3,38 +3,24 @@ use std::{
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use walkdir::{DirEntry, WalkDir};
|
||||||
use xshell::{cmd, pushd, pushenv, read_file};
|
use xshell::{cmd, pushd, pushenv, read_file};
|
||||||
|
|
||||||
use crate::{cargo_files, codegen, project_root, rust_files};
|
use crate::project_root;
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn generate_grammar() {
|
|
||||||
codegen::generate_syntax().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn generate_parser_tests() {
|
|
||||||
codegen::generate_parser_tests().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn generate_assists_tests() {
|
|
||||||
codegen::generate_assists_tests().unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This clones rustc repo, and so is not worth to keep up-to-date. We update
|
|
||||||
/// manually by un-ignoring the test from time to time.
|
|
||||||
#[test]
|
|
||||||
#[ignore]
|
|
||||||
fn generate_lint_completions() {
|
|
||||||
codegen::generate_lint_completions().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn check_code_formatting() {
|
fn check_code_formatting() {
|
||||||
let _dir = pushd(project_root()).unwrap();
|
let _dir = pushd(project_root()).unwrap();
|
||||||
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
|
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
|
||||||
crate::ensure_rustfmt().unwrap();
|
|
||||||
|
let out = cmd!("rustfmt --version").read().unwrap();
|
||||||
|
if !out.contains("stable") {
|
||||||
|
panic!(
|
||||||
|
"Failed to run rustfmt from toolchain 'stable'. \
|
||||||
|
Please run `rustup component add rustfmt --toolchain stable` to install it.",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
let res = cmd!("cargo fmt -- --check").run();
|
let res = cmd!("cargo fmt -- --check").run();
|
||||||
if res.is_err() {
|
if res.is_err() {
|
||||||
let _ = cmd!("cargo fmt").run();
|
let _ = cmd!("cargo fmt").run();
|
||||||
|
@ -42,11 +28,6 @@ fn check_code_formatting() {
|
||||||
res.unwrap()
|
res.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn smoke_test_generate_documentation() {
|
|
||||||
codegen::docs().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn check_lsp_extensions_docs() {
|
fn check_lsp_extensions_docs() {
|
||||||
let expected_hash = {
|
let expected_hash = {
|
||||||
|
@ -344,6 +325,8 @@ fn check_test_attrs(path: &Path, text: &str) {
|
||||||
// A legit test which needs to be ignored, as it takes too long to run
|
// A legit test which needs to be ignored, as it takes too long to run
|
||||||
// :(
|
// :(
|
||||||
"hir_def/src/nameres/collector.rs",
|
"hir_def/src/nameres/collector.rs",
|
||||||
|
// Long sourcegen test to generate lint completions.
|
||||||
|
"ide_completion/src/tests/sourcegen.rs",
|
||||||
// Obviously needs ignore.
|
// Obviously needs ignore.
|
||||||
"ide_assists/src/handlers/toggle_ignore.rs",
|
"ide_assists/src/handlers/toggle_ignore.rs",
|
||||||
// See above.
|
// See above.
|
||||||
|
@ -498,3 +481,31 @@ fn find_mark<'a>(text: &'a str, mark: &'static str) -> Option<&'a str> {
|
||||||
let text = &text[..idx];
|
let text = &text[..idx];
|
||||||
Some(text)
|
Some(text)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn rust_files() -> impl Iterator<Item = PathBuf> {
|
||||||
|
rust_files_in(&project_root().join("crates"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cargo_files() -> impl Iterator<Item = PathBuf> {
|
||||||
|
files_in(&project_root(), "toml")
|
||||||
|
.filter(|path| path.file_name().map(|it| it == "Cargo.toml").unwrap_or(false))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rust_files_in(path: &Path) -> impl Iterator<Item = PathBuf> {
|
||||||
|
files_in(path, "rs")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn files_in(path: &Path, ext: &'static str) -> impl Iterator<Item = PathBuf> {
|
||||||
|
let iter = WalkDir::new(path);
|
||||||
|
return iter
|
||||||
|
.into_iter()
|
||||||
|
.filter_entry(|e| !is_hidden(e))
|
||||||
|
.map(|e| e.unwrap())
|
||||||
|
.filter(|e| !e.file_type().is_dir())
|
||||||
|
.map(|e| e.into_path())
|
||||||
|
.filter(move |path| path.extension().map(|it| it == ext).unwrap_or(false));
|
||||||
|
|
||||||
|
fn is_hidden(entry: &DirEntry) -> bool {
|
||||||
|
entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue