internal: overhaul code generation

* Keep codegen adjacent to the relevant crates.
* Remove codgen deps from xtask, speeding-up from-source installation.

This regresses the release process a bit, as it now needs to run the
tests (and, by extension, compile the code).
This commit is contained in:
Aleksey Kladov 2021-07-03 22:11:03 +03:00
parent 668d061245
commit 58d2ece88a
31 changed files with 686 additions and 659 deletions

20
Cargo.lock generated
View file

@ -600,6 +600,7 @@ dependencies = [
"itertools",
"profile",
"rustc-hash",
"sourcegen",
"stdx",
"syntax",
"test_utils",
@ -621,10 +622,12 @@ dependencies = [
"once_cell",
"profile",
"rustc-hash",
"sourcegen",
"stdx",
"syntax",
"test_utils",
"text_edit",
"xshell",
]
[[package]]
@ -662,6 +665,7 @@ dependencies = [
"itertools",
"profile",
"rustc-hash",
"sourcegen",
"stdx",
"syntax",
"test_utils",
@ -1323,6 +1327,7 @@ dependencies = [
"serde",
"serde_json",
"serde_path_to_error",
"sourcegen",
"stdx",
"syntax",
"test_utils",
@ -1518,6 +1523,13 @@ version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45456094d1983e2ee2a18fdfebce3189fa451699d0502cb8e3b49dba5ba41451"
[[package]]
name = "sourcegen"
version = "0.0.0"
dependencies = [
"xshell",
]
[[package]]
name = "stdx"
version = "0.0.0"
@ -1563,17 +1575,20 @@ dependencies = [
"itertools",
"once_cell",
"parser",
"proc-macro2",
"profile",
"quote",
"rayon",
"rowan",
"rustc-ap-rustc_lexer",
"rustc-hash",
"serde",
"smol_str",
"sourcegen",
"stdx",
"test_utils",
"text_edit",
"walkdir",
"ungrammar",
]
[[package]]
@ -1942,9 +1957,6 @@ version = "0.1.0"
dependencies = [
"anyhow",
"flate2",
"proc-macro2",
"quote",
"ungrammar",
"walkdir",
"write-json",
"xflags",

View file

@ -24,4 +24,5 @@ hir = { path = "../hir", version = "0.0.0" }
[dev-dependencies]
test_utils = { path = "../test_utils" }
sourcegen = { path = "../sourcegen" }
expect-test = "1.1"

View file

@ -1,3 +1,4 @@
mod sourcegen;
mod generated;
use expect_test::expect;

View file

@ -1,4 +1,4 @@
//! Generated file, do not edit by hand, see `xtask/src/codegen`
//! Generated by `sourcegen_assists_docs`, do not edit by hand.
use super::check_doc_test;

View file

@ -1,49 +1,84 @@
//! Generates `assists.md` documentation.
use std::{fmt, path::Path};
use std::{fmt, fs, path::Path};
use xshell::write_file;
use test_utils::project_root;
use crate::{
codegen::{self, extract_comment_blocks_with_empty_lines, reformat, Location, PREAMBLE},
project_root, rust_files_in, Result,
};
#[test]
fn sourcegen_assists_docs() {
let assists = Assist::collect();
pub(crate) fn generate_assists_tests() -> Result<()> {
let assists = Assist::collect()?;
generate_tests(&assists)
}
{
// Generate doctests.
pub(crate) fn generate_assists_docs() -> Result<()> {
let assists = Assist::collect()?;
let contents = assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim());
let dst = project_root().join("docs/user/generated_assists.adoc");
write_file(dst, &contents)?;
Ok(())
let mut buf = "
use super::check_doc_test;
"
.to_string();
for assist in assists.iter() {
let test = format!(
r######"
#[test]
fn doctest_{}() {{
check_doc_test(
"{}",
r#####"
{}"#####, r#####"
{}"#####)
}}
"######,
assist.id,
assist.id,
reveal_hash_comments(&assist.before),
reveal_hash_comments(&assist.after)
);
buf.push_str(&test)
}
let buf = sourcegen::add_preamble("sourcegen_assists_docs", sourcegen::reformat(buf));
sourcegen::ensure_file_contents(
&project_root().join("crates/ide_assists/src/tests/generated.rs"),
&buf,
);
}
{
// Generate assists manual. Note that we do _not_ commit manual to the
// git repo. Instead, `cargo xtask release` runs this test before making
// a release.
let contents = sourcegen::add_preamble(
"sourcegen_assists_docs",
assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"),
);
let dst = project_root().join("docs/user/generated_assists.adoc");
fs::write(dst, contents).unwrap();
}
}
#[derive(Debug)]
struct Assist {
id: String,
location: Location,
location: sourcegen::Location,
doc: String,
before: String,
after: String,
}
impl Assist {
fn collect() -> Result<Vec<Assist>> {
fn collect() -> Vec<Assist> {
let handlers_dir = project_root().join("crates/ide_assists/src/handlers");
let mut res = Vec::new();
for path in rust_files_in(&project_root().join("crates/ide_assists/src/handlers")) {
collect_file(&mut res, path.as_path())?;
for path in sourcegen::list_rust_files(&handlers_dir) {
collect_file(&mut res, path.as_path());
}
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
return Ok(res);
return res;
fn collect_file(acc: &mut Vec<Assist>, path: &Path) -> Result<()> {
let text = xshell::read_file(path)?;
let comment_blocks = extract_comment_blocks_with_empty_lines("Assist", &text);
fn collect_file(acc: &mut Vec<Assist>, path: &Path) {
let text = fs::read_to_string(path).unwrap();
let comment_blocks = sourcegen::CommentBlock::extract("Assist", &text);
for block in comment_blocks {
// FIXME: doesn't support blank lines yet, need to tweak
@ -68,21 +103,20 @@ impl Assist {
assert_eq!(lines.next().unwrap().as_str(), "->");
assert_eq!(lines.next().unwrap().as_str(), "```");
let after = take_until(lines.by_ref(), "```");
let location = Location::new(path.to_path_buf(), block.line);
let location = sourcegen::Location { file: path.to_path_buf(), line: block.line };
acc.push(Assist { id, location, doc, before, after })
}
}
fn take_until<'a>(lines: impl Iterator<Item = &'a String>, marker: &str) -> String {
let mut buf = Vec::new();
for line in lines {
if line == marker {
break;
}
buf.push(line.clone());
fn take_until<'a>(lines: impl Iterator<Item = &'a String>, marker: &str) -> String {
let mut buf = Vec::new();
for line in lines {
if line == marker {
break;
}
buf.join("\n")
buf.push(line.clone());
}
Ok(())
buf.join("\n")
}
}
}
@ -114,36 +148,6 @@ impl fmt::Display for Assist {
}
}
fn generate_tests(assists: &[Assist]) -> Result<()> {
let mut buf = String::from("use super::check_doc_test;\n");
for assist in assists.iter() {
let test = format!(
r######"
#[test]
fn doctest_{}() {{
check_doc_test(
"{}",
r#####"
{}"#####, r#####"
{}"#####)
}}
"######,
assist.id,
assist.id,
reveal_hash_comments(&assist.before),
reveal_hash_comments(&assist.after)
);
buf.push_str(&test)
}
let buf = reformat(&buf)?;
codegen::ensure_file_contents(
&project_root().join("crates/ide_assists/src/tests/generated.rs"),
&buf,
)
}
fn hide_hash_comments(text: &str) -> String {
text.split('\n') // want final newline
.filter(|&it| !(it.starts_with("# ") || it == "#"))

View file

@ -29,5 +29,8 @@ profile = { path = "../profile", version = "0.0.0" }
hir = { path = "../hir", version = "0.0.0" }
[dev-dependencies]
test_utils = { path = "../test_utils" }
expect-test = "1.1"
xshell = "0.1"
test_utils = { path = "../test_utils" }
sourcegen = { path = "../sourcegen" }

View file

@ -10,6 +10,7 @@ mod items;
mod pattern;
mod type_pos;
mod predicate;
mod sourcegen;
use std::mem;

View file

@ -1,53 +1,53 @@
//! Generates descriptors structure for unstable feature from Unstable Book
use std::borrow::Cow;
use std::fmt::Write;
use std::path::{Path, PathBuf};
use std::{
borrow::Cow,
fs,
path::{Path, PathBuf},
};
use walkdir::WalkDir;
use xshell::{cmd, read_file};
use stdx::format_to;
use test_utils::project_root;
use xshell::cmd;
use crate::codegen::{ensure_file_contents, project_root, reformat, Result};
pub(crate) fn generate_lint_completions() -> Result<()> {
/// This clones rustc repo, and so is not worth to keep up-to-date. We update
/// manually by un-ignoring the test from time to time.
#[test]
#[ignore]
fn sourcegen_lint_completions() {
if !project_root().join("./target/rust").exists() {
cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?;
cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run().unwrap();
}
let mut contents = String::from(
r#"pub struct Lint {
let mut contents = r"
pub struct Lint {
pub label: &'static str,
pub description: &'static str,
}
"#,
);
generate_lint_descriptor(&mut contents)?;
"
.to_string();
generate_lint_descriptor(&mut contents);
contents.push('\n');
generate_feature_descriptor(&mut contents, "./target/rust/src/doc/unstable-book/src".into())?;
generate_feature_descriptor(&mut contents, "./target/rust/src/doc/unstable-book/src".into());
contents.push('\n');
cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?;
generate_descriptor_clippy(&mut contents, Path::new("./target/clippy_lints.json"))?;
let contents = reformat(&contents)?;
cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run().unwrap();
generate_descriptor_clippy(&mut contents, Path::new("./target/clippy_lints.json"));
let contents =
sourcegen::add_preamble("sourcegen_lint_completions", sourcegen::reformat(contents));
let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs");
ensure_file_contents(destination.as_path(), &contents)?;
Ok(())
sourcegen::ensure_file_contents(destination.as_path(), &contents);
}
fn generate_lint_descriptor(buf: &mut String) -> Result<()> {
let stdout = cmd!("rustc -W help").read()?;
let start_lints =
stdout.find("---- ------- -------").ok_or_else(|| anyhow::format_err!(""))?;
let start_lint_groups =
stdout.find("---- ---------").ok_or_else(|| anyhow::format_err!(""))?;
let end_lints =
stdout.find("Lint groups provided by rustc:").ok_or_else(|| anyhow::format_err!(""))?;
fn generate_lint_descriptor(buf: &mut String) {
let stdout = cmd!("rustc -W help").read().unwrap();
let start_lints = stdout.find("---- ------- -------").unwrap();
let start_lint_groups = stdout.find("---- ---------").unwrap();
let end_lints = stdout.find("Lint groups provided by rustc:").unwrap();
let end_lint_groups = stdout
.find("Lint tools like Clippy can provide additional lints and lint groups.")
.ok_or_else(|| anyhow::format_err!(""))?;
.unwrap();
buf.push_str(r#"pub const DEFAULT_LINTS: &[Lint] = &["#);
buf.push('\n');
let mut lints = stdout[start_lints..end_lints]
@ -75,32 +75,31 @@ fn generate_lint_descriptor(buf: &mut String) -> Result<()> {
push_lint_completion(buf, &name.replace("-", "_"), &description)
});
buf.push_str("];\n");
Ok(())
}
fn generate_feature_descriptor(buf: &mut String, src_dir: PathBuf) -> Result<()> {
buf.push_str(r#"pub const FEATURES: &[Lint] = &["#);
buf.push('\n');
let mut vec = ["language-features", "library-features"]
fn generate_feature_descriptor(buf: &mut String, src_dir: PathBuf) {
let mut features = ["language-features", "library-features"]
.iter()
.flat_map(|it| WalkDir::new(src_dir.join(it)))
.filter_map(|e| e.ok())
.filter(|entry| {
.flat_map(|it| sourcegen::list_files(&src_dir.join(it)))
.filter(|path| {
// Get all `.md ` files
entry.file_type().is_file() && entry.path().extension().unwrap_or_default() == "md"
path.extension().unwrap_or_default().to_str().unwrap_or_default() == "md"
})
.map(|entry| {
let path = entry.path();
.map(|path| {
let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_");
let doc = read_file(path).unwrap();
let doc = fs::read_to_string(path).unwrap();
(feature_ident, doc)
})
.collect::<Vec<_>>();
vec.sort_by(|(feature_ident, _), (feature_ident2, _)| feature_ident.cmp(feature_ident2));
vec.into_iter()
.for_each(|(feature_ident, doc)| push_lint_completion(buf, &feature_ident, &doc));
features.sort_by(|(feature_ident, _), (feature_ident2, _)| feature_ident.cmp(feature_ident2));
for (feature_ident, doc) in features.into_iter() {
push_lint_completion(buf, &feature_ident, &doc)
}
buf.push_str(r#"pub const FEATURES: &[Lint] = &["#);
buf.push('\n');
buf.push_str("];\n");
Ok(())
}
#[derive(Default)]
@ -113,9 +112,9 @@ fn unescape(s: &str) -> String {
s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "")
}
fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
let file_content = read_file(path)?;
let mut clippy_lints: Vec<ClippyLint> = vec![];
fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
let file_content = std::fs::read_to_string(path).unwrap();
let mut clippy_lints: Vec<ClippyLint> = Vec::new();
for line in file_content.lines().map(|line| line.trim()) {
if line.starts_with(r#""id":"#) {
@ -144,27 +143,25 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
}
}
clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id));
buf.push_str(r#"pub const CLIPPY_LINTS: &[Lint] = &["#);
buf.push('\n');
clippy_lints.into_iter().for_each(|clippy_lint| {
for clippy_lint in clippy_lints.into_iter() {
let lint_ident = format!("clippy::{}", clippy_lint.id);
let doc = clippy_lint.help;
push_lint_completion(buf, &lint_ident, &doc);
});
}
buf.push_str("];\n");
Ok(())
}
fn push_lint_completion(buf: &mut String, label: &str, description: &str) {
writeln!(
format_to!(
buf,
r###" Lint {{
label: "{}",
description: r##"{}"##
}},"###,
label, description
)
.unwrap();
label,
description
);
}

View file

@ -27,3 +27,4 @@ ide_db = { path = "../ide_db", version = "0.0.0" }
expect-test = "1.1"
test_utils = { path = "../test_utils" }
sourcegen = { path = "../sourcegen" }

View file

@ -49,6 +49,9 @@ mod handlers {
pub(crate) mod unlinked_file;
}
#[cfg(test)]
mod tests;
use hir::{diagnostics::AnyDiagnostic, Semantics};
use ide_db::{
assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
@ -223,152 +226,3 @@ fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist {
source_change: None,
}
}
#[cfg(test)]
mod tests {
use expect_test::Expect;
use ide_db::{
assists::AssistResolveStrategy,
base_db::{fixture::WithFixture, SourceDatabaseExt},
RootDatabase,
};
use stdx::trim_indent;
use test_utils::{assert_eq_text, extract_annotations};
use crate::{DiagnosticsConfig, Severity};
/// Takes a multi-file input fixture with annotated cursor positions,
/// and checks that:
/// * a diagnostic is produced
/// * the first diagnostic fix trigger range touches the input cursor position
/// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied
#[track_caller]
pub(crate) fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
check_nth_fix(0, ra_fixture_before, ra_fixture_after);
}
/// Takes a multi-file input fixture with annotated cursor positions,
/// and checks that:
/// * a diagnostic is produced
/// * every diagnostic fixes trigger range touches the input cursor position
/// * that the contents of the file containing the cursor match `after` after each diagnostic fix is applied
pub(crate) fn check_fixes(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) {
for (i, ra_fixture_after) in ra_fixtures_after.iter().enumerate() {
check_nth_fix(i, ra_fixture_before, ra_fixture_after)
}
}
#[track_caller]
fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
let after = trim_indent(ra_fixture_after);
let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
let diagnostic = super::diagnostics(
&db,
&DiagnosticsConfig::default(),
&AssistResolveStrategy::All,
file_position.file_id,
)
.pop()
.expect("no diagnostics");
let fix = &diagnostic.fixes.expect("diagnostic misses fixes")[nth];
let actual = {
let source_change = fix.source_change.as_ref().unwrap();
let file_id = *source_change.source_file_edits.keys().next().unwrap();
let mut actual = db.file_text(file_id).to_string();
for edit in source_change.source_file_edits.values() {
edit.apply(&mut actual);
}
actual
};
assert_eq_text!(&after, &actual);
assert!(
fix.target.contains_inclusive(file_position.offset),
"diagnostic fix range {:?} does not touch cursor position {:?}",
fix.target,
file_position.offset
);
}
/// Checks that there's a diagnostic *without* fix at `$0`.
pub(crate) fn check_no_fix(ra_fixture: &str) {
let (db, file_position) = RootDatabase::with_position(ra_fixture);
let diagnostic = super::diagnostics(
&db,
&DiagnosticsConfig::default(),
&AssistResolveStrategy::All,
file_position.file_id,
)
.pop()
.unwrap();
assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {:?}", diagnostic);
}
pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) {
let (db, file_id) = RootDatabase::with_single_file(ra_fixture);
let diagnostics = super::diagnostics(
&db,
&DiagnosticsConfig::default(),
&AssistResolveStrategy::All,
file_id,
);
expect.assert_debug_eq(&diagnostics)
}
#[track_caller]
pub(crate) fn check_diagnostics(ra_fixture: &str) {
let mut config = DiagnosticsConfig::default();
config.disabled.insert("inactive-code".to_string());
check_diagnostics_with_config(config, ra_fixture)
}
#[track_caller]
pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixture: &str) {
let (db, files) = RootDatabase::with_many_files(ra_fixture);
for file_id in files {
let diagnostics =
super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
let expected = extract_annotations(&*db.file_text(file_id));
let mut actual = diagnostics
.into_iter()
.map(|d| {
let mut annotation = String::new();
if let Some(fixes) = &d.fixes {
assert!(!fixes.is_empty());
annotation.push_str("💡 ")
}
annotation.push_str(match d.severity {
Severity::Error => "error",
Severity::WeakWarning => "weak",
});
annotation.push_str(": ");
annotation.push_str(&d.message);
(d.range, annotation)
})
.collect::<Vec<_>>();
actual.sort_by_key(|(range, _)| range.start());
assert_eq!(expected, actual);
}
}
#[test]
fn test_disabled_diagnostics() {
let mut config = DiagnosticsConfig::default();
config.disabled.insert("unresolved-module".into());
let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
assert!(diagnostics.is_empty());
let diagnostics = super::diagnostics(
&db,
&DiagnosticsConfig::default(),
&AssistResolveStrategy::All,
file_id,
);
assert!(!diagnostics.is_empty());
}
}

View file

@ -0,0 +1,146 @@
mod sourcegen;
use expect_test::Expect;
use ide_db::{
assists::AssistResolveStrategy,
base_db::{fixture::WithFixture, SourceDatabaseExt},
RootDatabase,
};
use stdx::trim_indent;
use test_utils::{assert_eq_text, extract_annotations};
use crate::{DiagnosticsConfig, Severity};
/// Takes a multi-file input fixture with annotated cursor positions,
/// and checks that:
/// * a diagnostic is produced
/// * the first diagnostic fix trigger range touches the input cursor position
/// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied
#[track_caller]
pub(crate) fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
check_nth_fix(0, ra_fixture_before, ra_fixture_after);
}
/// Takes a multi-file input fixture with annotated cursor positions,
/// and checks that:
/// * a diagnostic is produced
/// * every diagnostic fixes trigger range touches the input cursor position
/// * that the contents of the file containing the cursor match `after` after each diagnostic fix is applied
pub(crate) fn check_fixes(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) {
for (i, ra_fixture_after) in ra_fixtures_after.iter().enumerate() {
check_nth_fix(i, ra_fixture_before, ra_fixture_after)
}
}
#[track_caller]
fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
let after = trim_indent(ra_fixture_after);
let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
let diagnostic = super::diagnostics(
&db,
&DiagnosticsConfig::default(),
&AssistResolveStrategy::All,
file_position.file_id,
)
.pop()
.expect("no diagnostics");
let fix = &diagnostic.fixes.expect("diagnostic misses fixes")[nth];
let actual = {
let source_change = fix.source_change.as_ref().unwrap();
let file_id = *source_change.source_file_edits.keys().next().unwrap();
let mut actual = db.file_text(file_id).to_string();
for edit in source_change.source_file_edits.values() {
edit.apply(&mut actual);
}
actual
};
assert_eq_text!(&after, &actual);
assert!(
fix.target.contains_inclusive(file_position.offset),
"diagnostic fix range {:?} does not touch cursor position {:?}",
fix.target,
file_position.offset
);
}
/// Checks that there's a diagnostic *without* fix at `$0`.
pub(crate) fn check_no_fix(ra_fixture: &str) {
let (db, file_position) = RootDatabase::with_position(ra_fixture);
let diagnostic = super::diagnostics(
&db,
&DiagnosticsConfig::default(),
&AssistResolveStrategy::All,
file_position.file_id,
)
.pop()
.unwrap();
assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {:?}", diagnostic);
}
pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) {
let (db, file_id) = RootDatabase::with_single_file(ra_fixture);
let diagnostics = super::diagnostics(
&db,
&DiagnosticsConfig::default(),
&AssistResolveStrategy::All,
file_id,
);
expect.assert_debug_eq(&diagnostics)
}
#[track_caller]
pub(crate) fn check_diagnostics(ra_fixture: &str) {
let mut config = DiagnosticsConfig::default();
config.disabled.insert("inactive-code".to_string());
check_diagnostics_with_config(config, ra_fixture)
}
#[track_caller]
pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixture: &str) {
let (db, files) = RootDatabase::with_many_files(ra_fixture);
for file_id in files {
let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
let expected = extract_annotations(&*db.file_text(file_id));
let mut actual = diagnostics
.into_iter()
.map(|d| {
let mut annotation = String::new();
if let Some(fixes) = &d.fixes {
assert!(!fixes.is_empty());
annotation.push_str("💡 ")
}
annotation.push_str(match d.severity {
Severity::Error => "error",
Severity::WeakWarning => "weak",
});
annotation.push_str(": ");
annotation.push_str(&d.message);
(d.range, annotation)
})
.collect::<Vec<_>>();
actual.sort_by_key(|(range, _)| range.start());
assert_eq!(expected, actual);
}
}
#[test]
fn test_disabled_diagnostics() {
let mut config = DiagnosticsConfig::default();
config.disabled.insert("unresolved-module".into());
let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
assert!(diagnostics.is_empty());
let diagnostics = super::diagnostics(
&db,
&DiagnosticsConfig::default(),
&AssistResolveStrategy::All,
file_id,
);
assert!(!diagnostics.is_empty());
}

View file

@ -1,43 +1,40 @@
//! Generates `assists.md` documentation.
use std::{fmt, path::PathBuf};
use std::{fmt, fs, io, path::PathBuf};
use xshell::write_file;
use sourcegen::project_root;
use crate::{
codegen::{extract_comment_blocks_with_empty_lines, Location, PREAMBLE},
project_root, rust_files, Result,
};
pub(crate) fn generate_diagnostic_docs() -> Result<()> {
let diagnostics = Diagnostic::collect()?;
#[test]
fn sourcegen_diagnostic_docs() {
let diagnostics = Diagnostic::collect().unwrap();
let contents =
diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim());
let contents = sourcegen::add_preamble("sourcegen_diagnostic_docs", contents);
let dst = project_root().join("docs/user/generated_diagnostic.adoc");
write_file(&dst, &contents)?;
Ok(())
fs::write(&dst, &contents).unwrap();
}
#[derive(Debug)]
struct Diagnostic {
id: String,
location: Location,
location: sourcegen::Location,
doc: String,
}
impl Diagnostic {
fn collect() -> Result<Vec<Diagnostic>> {
fn collect() -> io::Result<Vec<Diagnostic>> {
let handlers_dir = project_root().join("crates/ide_diagnostics/src/handlers");
let mut res = Vec::new();
for path in rust_files() {
for path in sourcegen::list_rust_files(&handlers_dir) {
collect_file(&mut res, path)?;
}
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
return Ok(res);
fn collect_file(acc: &mut Vec<Diagnostic>, path: PathBuf) -> Result<()> {
let text = xshell::read_file(&path)?;
let comment_blocks = extract_comment_blocks_with_empty_lines("Diagnostic", &text);
fn collect_file(acc: &mut Vec<Diagnostic>, path: PathBuf) -> io::Result<()> {
let text = fs::read_to_string(&path)?;
let comment_blocks = sourcegen::CommentBlock::extract("Diagnostic", &text);
for block in comment_blocks {
let id = block.id;
@ -45,7 +42,7 @@ impl Diagnostic {
panic!("invalid diagnostic name: {:?}:\n {}", id, msg)
}
let doc = block.contents.join("\n");
let location = Location::new(path.clone(), block.line);
let location = sourcegen::Location { file: path.clone(), line: block.line };
acc.push(Diagnostic { id, location, doc })
}

View file

@ -1,4 +1,4 @@
//! Generated file, do not edit by hand, see `xtask/src/codegen`
//! Generated by `sourcegen_ast`, do not edit by hand.
#![allow(bad_style, missing_docs, unreachable_pub)]
#[doc = r" The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`."]

View file

@ -66,7 +66,9 @@ jemallocator = { version = "0.4.1", package = "tikv-jemallocator", optional = tr
[dev-dependencies]
expect-test = "1.1"
test_utils = { path = "../test_utils" }
sourcegen = { path = "../sourcegen" }
mbe = { path = "../mbe" }
tt = { path = "../tt" }

View file

@ -8,6 +8,7 @@
//! specific JSON shapes here -- there's little value in such tests, as we can't
//! be sure without a real client anyway.
mod sourcegen;
mod testdir;
mod support;

View file

@ -1,42 +1,43 @@
//! Generates `assists.md` documentation.
use std::{fmt, path::PathBuf};
use std::{fmt, fs, io, path::PathBuf};
use xshell::write_file;
use crate::{
codegen::{extract_comment_blocks_with_empty_lines, Location, PREAMBLE},
project_root, rust_files, Result,
};
pub(crate) fn generate_feature_docs() -> Result<()> {
let features = Feature::collect()?;
#[test]
fn sourcegen_feature_docs() {
let features = Feature::collect().unwrap();
let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim());
let dst = project_root().join("docs/user/generated_features.adoc");
write_file(&dst, &contents)?;
Ok(())
let contents = format!(
"
// Generated file, do not edit by hand, see `sourcegen_feature_docs`.
{}
",
contents.trim()
);
let dst = sourcegen::project_root().join("docs/user/generated_features.adoc");
fs::write(&dst, &contents).unwrap();
}
#[derive(Debug)]
struct Feature {
id: String,
location: Location,
location: sourcegen::Location,
doc: String,
}
impl Feature {
fn collect() -> Result<Vec<Feature>> {
fn collect() -> io::Result<Vec<Feature>> {
let crates_dir = sourcegen::project_root().join("crates");
let mut res = Vec::new();
for path in rust_files() {
for path in sourcegen::list_rust_files(&crates_dir) {
collect_file(&mut res, path)?;
}
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
return Ok(res);
fn collect_file(acc: &mut Vec<Feature>, path: PathBuf) -> Result<()> {
let text = xshell::read_file(&path)?;
let comment_blocks = extract_comment_blocks_with_empty_lines("Feature", &text);
fn collect_file(acc: &mut Vec<Feature>, path: PathBuf) -> io::Result<()> {
let text = std::fs::read_to_string(&path)?;
let comment_blocks = sourcegen::CommentBlock::extract("Feature", &text);
for block in comment_blocks {
let id = block.id;
@ -44,7 +45,7 @@ impl Feature {
panic!("invalid feature name: {:?}:\n {}", id, msg)
}
let doc = block.contents.join("\n");
let location = Location::new(path.clone(), block.line);
let location = sourcegen::Location { file: path.clone(), line: block.line };
acc.push(Feature { id, location, doc })
}

View file

@ -0,0 +1,13 @@
[package]
name = "sourcegen"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
[lib]
doctest = false
[dependencies]
xshell = "0.1"

195
crates/sourcegen/src/lib.rs Normal file
View file

@ -0,0 +1,195 @@
//! rust-analyzer relies heavily on source code generation.
//!
//! Things like feature documentation or assist tests are implemented by
//! processing rust-analyzer's own source code and generating the appropriate
//! output. See `sourcegen_` tests in various crates.
//!
//! This crate contains utilities to make this kind of source-gen easy.
use std::{
fmt, fs, mem,
path::{Path, PathBuf},
};
use xshell::{cmd, pushenv};
pub fn list_rust_files(dir: &Path) -> Vec<PathBuf> {
let mut res = list_files(dir);
res.retain(|it| {
it.file_name().unwrap_or_default().to_str().unwrap_or_default().ends_with(".rs")
});
res
}
pub fn list_files(dir: &Path) -> Vec<PathBuf> {
let mut res = Vec::new();
let mut work = vec![dir.to_path_buf()];
while let Some(dir) = work.pop() {
for entry in dir.read_dir().unwrap() {
let entry = entry.unwrap();
let file_type = entry.file_type().unwrap();
let path = entry.path();
let is_hidden =
path.file_name().unwrap_or_default().to_str().unwrap_or_default().starts_with('.');
if !is_hidden {
if file_type.is_dir() {
work.push(path)
} else if file_type.is_file() {
res.push(path)
}
}
}
}
res
}
pub struct CommentBlock {
pub id: String,
pub line: usize,
pub contents: Vec<String>,
}
impl CommentBlock {
pub fn extract(tag: &str, text: &str) -> Vec<CommentBlock> {
assert!(tag.starts_with(char::is_uppercase));
let tag = format!("{}:", tag);
let mut res = Vec::new();
for (line, mut block) in do_extract_comment_blocks(text, true) {
let first = block.remove(0);
if let Some(id) = first.strip_prefix(&tag) {
let id = id.trim().to_string();
let block = CommentBlock { id, line, contents: block };
res.push(block);
}
}
res
}
pub fn extract_untagged(text: &str) -> Vec<CommentBlock> {
let mut res = Vec::new();
for (line, block) in do_extract_comment_blocks(text, false) {
let id = String::new();
let block = CommentBlock { id, line, contents: block };
res.push(block);
}
res
}
}
fn do_extract_comment_blocks(
text: &str,
allow_blocks_with_empty_lines: bool,
) -> Vec<(usize, Vec<String>)> {
let mut res = Vec::new();
let prefix = "// ";
let lines = text.lines().map(str::trim_start);
let mut block = (0, vec![]);
for (line_num, line) in lines.enumerate() {
if line == "//" && allow_blocks_with_empty_lines {
block.1.push(String::new());
continue;
}
let is_comment = line.starts_with(prefix);
if is_comment {
block.1.push(line[prefix.len()..].to_string());
} else {
if !block.1.is_empty() {
res.push(mem::take(&mut block));
}
block.0 = line_num + 2;
}
}
if !block.1.is_empty() {
res.push(block)
}
res
}
#[derive(Debug)]
pub struct Location {
pub file: PathBuf,
pub line: usize,
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let path = self.file.strip_prefix(&project_root()).unwrap().display().to_string();
let path = path.replace('\\', "/");
let name = self.file.file_name().unwrap();
write!(
f,
"https://github.com/rust-analyzer/rust-analyzer/blob/master/{}#L{}[{}]",
path,
self.line,
name.to_str().unwrap()
)
}
}
fn ensure_rustfmt() {
let version = cmd!("rustfmt --version").read().unwrap_or_default();
if !version.contains("stable") {
panic!(
"Failed to run rustfmt from toolchain 'stable'. \
Please run `rustup component add rustfmt --toolchain stable` to install it.",
)
}
}
pub fn reformat(text: String) -> String {
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
ensure_rustfmt();
let rustfmt_toml = project_root().join("rustfmt.toml");
let mut stdout = cmd!("rustfmt --config-path {rustfmt_toml} --config fn_single_line=true")
.stdin(text)
.read()
.unwrap();
if !stdout.ends_with('\n') {
stdout.push('\n');
}
stdout
}
pub fn add_preamble(generator: &'static str, mut text: String) -> String {
let preamble = format!("//! Generated by `{}`, do not edit by hand.\n\n", generator);
text.insert_str(0, &preamble);
text
}
/// Checks that the `file` has the specified `contents`. If that is not the
/// case, updates the file and then fails the test.
pub fn ensure_file_contents(file: &Path, contents: &str) {
if let Ok(old_contents) = fs::read_to_string(file) {
if normalize_newlines(&old_contents) == normalize_newlines(contents) {
// File is already up to date.
return;
}
}
let display_path = file.strip_prefix(&project_root()).unwrap_or(file);
eprintln!(
"\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n",
display_path.display()
);
if std::env::var("CI").is_ok() {
eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n");
}
if let Some(parent) = file.parent() {
let _ = fs::create_dir_all(parent);
}
fs::write(file, contents).unwrap();
panic!("some file was not up to date and has been updated, simply re-run the tests")
}
fn normalize_newlines(s: &str) -> String {
s.replace("\r\n", "\n")
}
pub fn project_root() -> PathBuf {
let dir = env!("CARGO_MANIFEST_DIR");
PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned()
}

View file

@ -28,7 +28,11 @@ parser = { path = "../parser", version = "0.0.0" }
profile = { path = "../profile", version = "0.0.0" }
[dev-dependencies]
test_utils = { path = "../test_utils" }
walkdir = "2.3.1"
rayon = "1"
expect-test = "1.1"
proc-macro2 = "1.0.8"
quote = "1.0.2"
ungrammar = "=1.14"
test_utils = { path = "../test_utils" }
sourcegen = { path = "../sourcegen" }

View file

@ -1,4 +1,4 @@
//! Generated file, do not edit by hand, see `xtask/src/codegen`
//! Generated by `sourcegen_ast`, do not edit by hand.
use crate::{
ast::{self, support, AstChildren, AstNode},

View file

@ -1,4 +1,4 @@
//! Generated file, do not edit by hand, see `xtask/src/codegen`
//! Generated by `sourcegen_ast`, do not edit by hand.
use crate::{
ast::AstToken,

View file

@ -1,3 +1,7 @@
mod sourcegen_tests;
mod sourcegen_ast;
mod ast_src;
use std::{
fmt::Write,
fs,
@ -152,20 +156,14 @@ fn reparse_fuzz_tests() {
/// Test that Rust-analyzer can parse and validate the rust-analyzer
#[test]
fn self_hosting_parsing() {
let dir = project_root().join("crates");
let files = walkdir::WalkDir::new(dir)
.into_iter()
.filter_entry(|entry| {
// Get all files which are not in the crates/syntax/test_data folder
!entry.path().components().any(|component| component.as_os_str() == "test_data")
})
.map(|e| e.unwrap())
.filter(|entry| {
// Get all `.rs ` files
!entry.path().is_dir() && (entry.path().extension().unwrap_or_default() == "rs")
})
.map(|entry| entry.into_path())
.collect::<Vec<_>>();
let crates_dir = project_root().join("crates");
let mut files = ::sourcegen::list_rust_files(&crates_dir);
files.retain(|path| {
// Get all files which are not in the crates/syntax/test_data folder
!path.components().any(|component| component.as_os_str() == "test_data")
});
assert!(
files.len() > 100,
"self_hosting_parsing found too few files - is it running in the right directory?"

View file

@ -12,32 +12,31 @@ use proc_macro2::{Punct, Spacing};
use quote::{format_ident, quote};
use ungrammar::{rust_grammar, Grammar, Rule};
use crate::{
ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC},
codegen::{ensure_file_contents, reformat},
project_root, Result,
use crate::tests::ast_src::{
AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC,
};
pub(crate) fn generate_syntax() -> Result<()> {
#[test]
fn sourcegen_ast() {
let grammar = rust_grammar();
let ast = lower(&grammar);
let syntax_kinds_file = project_root().join("crates/parser/src/syntax_kind/generated.rs");
let syntax_kinds = generate_syntax_kinds(KINDS_SRC)?;
ensure_file_contents(syntax_kinds_file.as_path(), &syntax_kinds)?;
let syntax_kinds_file =
sourcegen::project_root().join("crates/parser/src/syntax_kind/generated.rs");
let syntax_kinds = generate_syntax_kinds(KINDS_SRC);
sourcegen::ensure_file_contents(syntax_kinds_file.as_path(), &syntax_kinds);
let ast_tokens_file = project_root().join("crates/syntax/src/ast/generated/tokens.rs");
let contents = generate_tokens(&ast)?;
ensure_file_contents(ast_tokens_file.as_path(), &contents)?;
let ast_tokens_file =
sourcegen::project_root().join("crates/syntax/src/ast/generated/tokens.rs");
let contents = generate_tokens(&ast);
sourcegen::ensure_file_contents(ast_tokens_file.as_path(), &contents);
let ast_nodes_file = project_root().join("crates/syntax/src/ast/generated/nodes.rs");
let contents = generate_nodes(KINDS_SRC, &ast)?;
ensure_file_contents(ast_nodes_file.as_path(), &contents)?;
Ok(())
let ast_nodes_file = sourcegen::project_root().join("crates/syntax/src/ast/generated/nodes.rs");
let contents = generate_nodes(KINDS_SRC, &ast);
sourcegen::ensure_file_contents(ast_nodes_file.as_path(), &contents);
}
fn generate_tokens(grammar: &AstSrc) -> Result<String> {
fn generate_tokens(grammar: &AstSrc) -> String {
let tokens = grammar.tokens.iter().map(|token| {
let name = format_ident!("{}", token);
let kind = format_ident!("{}", to_upper_snake_case(token));
@ -61,18 +60,20 @@ fn generate_tokens(grammar: &AstSrc) -> Result<String> {
}
});
let pretty = reformat(
&quote! {
use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken};
#(#tokens)*
}
.to_string(),
)?
.replace("#[derive", "\n#[derive");
Ok(pretty)
sourcegen::add_preamble(
"sourcegen_ast",
sourcegen::reformat(
quote! {
use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken};
#(#tokens)*
}
.to_string(),
),
)
.replace("#[derive", "\n#[derive")
}
fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> {
fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String {
let (node_defs, node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
.nodes
.iter()
@ -230,7 +231,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> {
.filter(|name| !defined_nodes.iter().any(|&it| it == name))
{
drop(node)
// TODO: restore this
// FIXME: restore this
// eprintln!("Warning: node {} not defined in ast source", node);
}
@ -262,8 +263,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> {
}
}
let pretty = reformat(&res)?;
Ok(pretty)
sourcegen::add_preamble("sourcegen_ast", sourcegen::reformat(res))
}
fn write_doc_comment(contents: &[String], dest: &mut String) {
@ -272,7 +272,7 @@ fn write_doc_comment(contents: &[String], dest: &mut String) {
}
}
fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> {
fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String {
let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar
.punct
.iter()
@ -384,7 +384,7 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> {
}
};
reformat(&ast.to_string())
sourcegen::add_preamble("sourcegen_ast", sourcegen::reformat(ast.to_string()))
}
fn to_upper_snake_case(s: &str) -> String {
@ -580,7 +580,7 @@ fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, r
acc.push(field);
return;
}
todo!("{:?}", rule)
panic!("unhandled rule: {:?}", rule)
}
Rule::Labeled { label: l, rule } => {
assert!(label.is_none());

View file

@ -1,26 +1,28 @@
//! This module greps parser's code for specially formatted comments and turnes
//! This module greps parser's code for specially formatted comments and turns
//! them into tests.
use std::{
collections::HashMap,
fs, iter,
path::{Path, PathBuf},
};
use crate::{
codegen::{ensure_file_contents, extract_comment_blocks},
project_root, Result,
};
use rustc_hash::FxHashMap;
pub(crate) fn generate_parser_tests() -> Result<()> {
let tests = tests_from_dir(&project_root().join(Path::new("crates/parser/src/grammar")))?;
fn install_tests(tests: &HashMap<String, Test>, into: &str) -> Result<()> {
let tests_dir = project_root().join(into);
#[test]
fn sourcegen_parser_tests() {
let grammar_dir = sourcegen::project_root().join(Path::new("crates/parser/src/grammar"));
let tests = tests_from_dir(&grammar_dir);
install_tests(&tests.ok, "crates/syntax/test_data/parser/inline/ok");
install_tests(&tests.err, "crates/syntax/test_data/parser/inline/err");
fn install_tests(tests: &FxHashMap<String, Test>, into: &str) {
let tests_dir = sourcegen::project_root().join(into);
if !tests_dir.is_dir() {
fs::create_dir_all(&tests_dir)?;
fs::create_dir_all(&tests_dir).unwrap();
}
// ok is never actually read, but it needs to be specified to create a Test in existing_tests
let existing = existing_tests(&tests_dir, true)?;
let existing = existing_tests(&tests_dir, true);
for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
panic!("Test is deleted: {}", t);
}
@ -35,12 +37,9 @@ pub(crate) fn generate_parser_tests() -> Result<()> {
tests_dir.join(file_name)
}
};
ensure_file_contents(&path, &test.text)?;
sourcegen::ensure_file_contents(&path, &test.text);
}
Ok(())
}
install_tests(&tests.ok, "crates/syntax/test_data/parser/inline/ok")?;
install_tests(&tests.err, "crates/syntax/test_data/parser/inline/err")
}
#[derive(Debug)]
@ -52,14 +51,14 @@ struct Test {
#[derive(Default, Debug)]
struct Tests {
ok: HashMap<String, Test>,
err: HashMap<String, Test>,
ok: FxHashMap<String, Test>,
err: FxHashMap<String, Test>,
}
fn collect_tests(s: &str) -> Vec<Test> {
let mut res = Vec::new();
for comment_block in extract_comment_blocks(s) {
let first_line = &comment_block[0];
for comment_block in sourcegen::CommentBlock::extract_untagged(s) {
let first_line = &comment_block.contents[0];
let (name, ok) = if let Some(name) = first_line.strip_prefix("test ") {
(name.to_string(), true)
} else if let Some(name) = first_line.strip_prefix("test_err ") {
@ -67,7 +66,7 @@ fn collect_tests(s: &str) -> Vec<Test> {
} else {
continue;
};
let text: String = comment_block[1..]
let text: String = comment_block.contents[1..]
.iter()
.cloned()
.chain(iter::once(String::new()))
@ -79,41 +78,34 @@ fn collect_tests(s: &str) -> Vec<Test> {
res
}
fn tests_from_dir(dir: &Path) -> Result<Tests> {
fn tests_from_dir(dir: &Path) -> Tests {
let mut res = Tests::default();
for entry in ::walkdir::WalkDir::new(dir) {
let entry = entry.unwrap();
if !entry.file_type().is_file() {
continue;
}
if entry.path().extension().unwrap_or_default() != "rs" {
continue;
}
process_file(&mut res, entry.path())?;
for entry in sourcegen::list_rust_files(dir) {
process_file(&mut res, entry.as_path());
}
let grammar_rs = dir.parent().unwrap().join("grammar.rs");
process_file(&mut res, &grammar_rs)?;
return Ok(res);
fn process_file(res: &mut Tests, path: &Path) -> Result<()> {
let text = fs::read_to_string(path)?;
process_file(&mut res, &grammar_rs);
return res;
fn process_file(res: &mut Tests, path: &Path) {
let text = fs::read_to_string(path).unwrap();
for test in collect_tests(&text) {
if test.ok {
if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
anyhow::bail!("Duplicate test: {}", old_test.name);
panic!("Duplicate test: {}", old_test.name);
}
} else if let Some(old_test) = res.err.insert(test.name.clone(), test) {
anyhow::bail!("Duplicate test: {}", old_test.name);
panic!("Duplicate test: {}", old_test.name);
}
}
Ok(())
}
}
fn existing_tests(dir: &Path, ok: bool) -> Result<HashMap<String, (PathBuf, Test)>> {
let mut res = HashMap::new();
for file in fs::read_dir(dir)? {
let file = file?;
fn existing_tests(dir: &Path, ok: bool) -> FxHashMap<String, (PathBuf, Test)> {
let mut res = FxHashMap::default();
for file in fs::read_dir(dir).unwrap() {
let file = file.unwrap();
let path = file.path();
if path.extension().unwrap_or_default() != "rs" {
continue;
@ -122,11 +114,11 @@ fn existing_tests(dir: &Path, ok: bool) -> Result<HashMap<String, (PathBuf, Test
let file_name = path.file_name().unwrap().to_str().unwrap();
file_name[5..file_name.len() - 3].to_string()
};
let text = xshell::read_file(&path)?;
let text = fs::read_to_string(&path).unwrap();
let test = Test { name: name.clone(), text, ok };
if let Some(old) = res.insert(name, (path, test)) {
println!("Duplicate test: {:?}", old);
}
}
Ok(res)
res
}

View file

@ -325,6 +325,8 @@ In particular, we generate:
* Documentation tests for assists
See the `sourcegen` crate for details.
**Architecture Invariant:** we avoid bootstrapping.
For codegen we need to parse Rust code.
Using rust-analyzer for that would work and would be fun, but it would also complicate the build process a lot.

View file

@ -9,9 +9,6 @@ license = "MIT OR Apache-2.0"
[dependencies]
anyhow = "1.0.26"
flate2 = "1.0"
proc-macro2 = "1.0.8"
quote = "1.0.2"
ungrammar = "=1.14"
walkdir = "2.3.1"
write-json = "0.1.0"
xshell = "0.1"

View file

@ -1,166 +0,0 @@
//! We use code generation heavily in rust-analyzer.
//!
//! Rather then doing it via proc-macros, we use old-school way of just dumping
//! the source code.
//!
//! This module's submodules define specific bits that we generate.
mod gen_syntax;
mod gen_parser_tests;
mod gen_lint_completions;
mod gen_assists_docs;
mod gen_feature_docs;
mod gen_diagnostic_docs;
use std::{
fmt, mem,
path::{Path, PathBuf},
};
use xshell::{cmd, pushenv};
use crate::{ensure_rustfmt, project_root, Result};
pub(crate) use self::{
gen_assists_docs::generate_assists_tests, gen_lint_completions::generate_lint_completions,
gen_parser_tests::generate_parser_tests, gen_syntax::generate_syntax,
};
pub(crate) fn docs() -> Result<()> {
// We don't commit docs to the repo, so we can just overwrite them.
gen_assists_docs::generate_assists_docs()?;
gen_feature_docs::generate_feature_docs()?;
gen_diagnostic_docs::generate_diagnostic_docs()?;
Ok(())
}
#[allow(unused)]
fn used() {
generate_parser_tests();
generate_assists_tests();
generate_syntax();
generate_lint_completions();
}
/// Checks that the `file` has the specified `contents`. If that is not the
/// case, updates the file and then fails the test.
pub(crate) fn ensure_file_contents(file: &Path, contents: &str) -> Result<()> {
match std::fs::read_to_string(file) {
Ok(old_contents) if normalize_newlines(&old_contents) == normalize_newlines(contents) => {
return Ok(())
}
_ => (),
}
let display_path = file.strip_prefix(&project_root()).unwrap_or(file);
eprintln!(
"\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n",
display_path.display()
);
if std::env::var("CI").is_ok() {
eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n");
}
if let Some(parent) = file.parent() {
let _ = std::fs::create_dir_all(parent);
}
std::fs::write(file, contents).unwrap();
anyhow::bail!("some file was not up to date and has been updated, simply re-run the tests")
}
fn normalize_newlines(s: &str) -> String {
s.replace("\r\n", "\n")
}
const PREAMBLE: &str = "Generated file, do not edit by hand, see `xtask/src/codegen`";
fn reformat(text: &str) -> Result<String> {
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
ensure_rustfmt()?;
let rustfmt_toml = project_root().join("rustfmt.toml");
let stdout = cmd!("rustfmt --config-path {rustfmt_toml} --config fn_single_line=true")
.stdin(text)
.read()?;
Ok(format!("//! {}\n\n{}\n", PREAMBLE, stdout))
}
fn extract_comment_blocks(text: &str) -> Vec<Vec<String>> {
do_extract_comment_blocks(text, false).into_iter().map(|(_line, block)| block).collect()
}
fn extract_comment_blocks_with_empty_lines(tag: &str, text: &str) -> Vec<CommentBlock> {
assert!(tag.starts_with(char::is_uppercase));
let tag = format!("{}:", tag);
let mut res = Vec::new();
for (line, mut block) in do_extract_comment_blocks(text, true) {
let first = block.remove(0);
if first.starts_with(&tag) {
let id = first[tag.len()..].trim().to_string();
let block = CommentBlock { id, line, contents: block };
res.push(block);
}
}
res
}
struct CommentBlock {
id: String,
line: usize,
contents: Vec<String>,
}
fn do_extract_comment_blocks(
text: &str,
allow_blocks_with_empty_lines: bool,
) -> Vec<(usize, Vec<String>)> {
let mut res = Vec::new();
let prefix = "// ";
let lines = text.lines().map(str::trim_start);
let mut block = (0, vec![]);
for (line_num, line) in lines.enumerate() {
if line == "//" && allow_blocks_with_empty_lines {
block.1.push(String::new());
continue;
}
let is_comment = line.starts_with(prefix);
if is_comment {
block.1.push(line[prefix.len()..].to_string());
} else {
if !block.1.is_empty() {
res.push(mem::take(&mut block));
}
block.0 = line_num + 2;
}
}
if !block.1.is_empty() {
res.push(block)
}
res
}
#[derive(Debug)]
struct Location {
file: PathBuf,
line: usize,
}
impl Location {
fn new(file: PathBuf, line: usize) -> Self {
Self { file, line }
}
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let path = self.file.strip_prefix(&project_root()).unwrap().display().to_string();
let path = path.replace('\\', "/");
let name = self.file.file_name().unwrap();
write!(
f,
"https://github.com/rust-analyzer/rust-analyzer/blob/master/{}#L{}[{}]",
path,
self.line,
name.to_str().unwrap()
)
}
}

View file

@ -9,8 +9,6 @@
//! `.cargo/config`.
mod flags;
mod codegen;
mod ast_src;
#[cfg(test)]
mod tidy;
@ -24,7 +22,6 @@ use std::{
env,
path::{Path, PathBuf},
};
use walkdir::{DirEntry, WalkDir};
use xshell::{cmd, cp, pushd, pushenv};
fn main() -> Result<()> {
@ -63,31 +60,6 @@ fn project_root() -> PathBuf {
.to_path_buf()
}
fn rust_files() -> impl Iterator<Item = PathBuf> {
rust_files_in(&project_root().join("crates"))
}
#[cfg(test)]
fn cargo_files() -> impl Iterator<Item = PathBuf> {
files_in(&project_root(), "toml")
.filter(|path| path.file_name().map(|it| it == "Cargo.toml").unwrap_or(false))
}
fn rust_files_in(path: &Path) -> impl Iterator<Item = PathBuf> {
files_in(path, "rs")
}
fn ensure_rustfmt() -> Result<()> {
let out = cmd!("rustfmt --version").read()?;
if !out.contains("stable") {
bail!(
"Failed to run rustfmt from toolchain 'stable'. \
Please run `rustup component add rustfmt --toolchain stable` to install it.",
)
}
Ok(())
}
fn run_fuzzer() -> Result<()> {
let _d = pushd("./crates/syntax")?;
let _e = pushenv("RUSTUP_TOOLCHAIN", "nightly");
@ -113,18 +85,3 @@ fn date_iso() -> Result<String> {
fn is_release_tag(tag: &str) -> bool {
tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit())
}
fn files_in(path: &Path, ext: &'static str) -> impl Iterator<Item = PathBuf> {
let iter = WalkDir::new(path);
return iter
.into_iter()
.filter_entry(|e| !is_hidden(e))
.map(|e| e.unwrap())
.filter(|e| !e.file_type().is_dir())
.map(|e| e.into_path())
.filter(move |path| path.extension().map(|it| it == ext).unwrap_or(false));
fn is_hidden(entry: &DirEntry) -> bool {
entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false)
}
}

View file

@ -2,7 +2,7 @@ mod changelog;
use xshell::{cmd, pushd, read_dir, read_file, write_file};
use crate::{codegen, date_iso, flags, is_release_tag, project_root, Result};
use crate::{date_iso, flags, is_release_tag, project_root, Result};
impl flags::Release {
pub(crate) fn run(self) -> Result<()> {
@ -21,7 +21,10 @@ impl flags::Release {
// to delete old tags.
cmd!("git push --force").run()?;
}
codegen::docs()?;
// Generates bits of manual.adoc.
cmd!("cargo test -p ide_assists -p ide_diagnostics -p rust-analyzer -- sourcegen_")
.run()?;
let website_root = project_root().join("../rust-analyzer.github.io");
let changelog_dir = website_root.join("./thisweek/_posts");

View file

@ -3,38 +3,24 @@ use std::{
path::{Path, PathBuf},
};
use walkdir::{DirEntry, WalkDir};
use xshell::{cmd, pushd, pushenv, read_file};
use crate::{cargo_files, codegen, project_root, rust_files};
#[test]
fn generate_grammar() {
codegen::generate_syntax().unwrap()
}
#[test]
fn generate_parser_tests() {
codegen::generate_parser_tests().unwrap()
}
#[test]
fn generate_assists_tests() {
codegen::generate_assists_tests().unwrap();
}
/// This clones rustc repo, and so is not worth to keep up-to-date. We update
/// manually by un-ignoring the test from time to time.
#[test]
#[ignore]
fn generate_lint_completions() {
codegen::generate_lint_completions().unwrap()
}
use crate::project_root;
#[test]
fn check_code_formatting() {
let _dir = pushd(project_root()).unwrap();
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
crate::ensure_rustfmt().unwrap();
let out = cmd!("rustfmt --version").read().unwrap();
if !out.contains("stable") {
panic!(
"Failed to run rustfmt from toolchain 'stable'. \
Please run `rustup component add rustfmt --toolchain stable` to install it.",
)
}
let res = cmd!("cargo fmt -- --check").run();
if res.is_err() {
let _ = cmd!("cargo fmt").run();
@ -42,11 +28,6 @@ fn check_code_formatting() {
res.unwrap()
}
#[test]
fn smoke_test_generate_documentation() {
codegen::docs().unwrap()
}
#[test]
fn check_lsp_extensions_docs() {
let expected_hash = {
@ -344,6 +325,8 @@ fn check_test_attrs(path: &Path, text: &str) {
// A legit test which needs to be ignored, as it takes too long to run
// :(
"hir_def/src/nameres/collector.rs",
// Long sourcegen test to generate lint completions.
"ide_completion/src/tests/sourcegen.rs",
// Obviously needs ignore.
"ide_assists/src/handlers/toggle_ignore.rs",
// See above.
@ -498,3 +481,31 @@ fn find_mark<'a>(text: &'a str, mark: &'static str) -> Option<&'a str> {
let text = &text[..idx];
Some(text)
}
fn rust_files() -> impl Iterator<Item = PathBuf> {
rust_files_in(&project_root().join("crates"))
}
fn cargo_files() -> impl Iterator<Item = PathBuf> {
files_in(&project_root(), "toml")
.filter(|path| path.file_name().map(|it| it == "Cargo.toml").unwrap_or(false))
}
fn rust_files_in(path: &Path) -> impl Iterator<Item = PathBuf> {
files_in(path, "rs")
}
fn files_in(path: &Path, ext: &'static str) -> impl Iterator<Item = PathBuf> {
let iter = WalkDir::new(path);
return iter
.into_iter()
.filter_entry(|e| !is_hidden(e))
.map(|e| e.unwrap())
.filter(|e| !e.file_type().is_dir())
.map(|e| e.into_path())
.filter(move |path| path.extension().map(|it| it == ext).unwrap_or(false));
fn is_hidden(entry: &DirEntry) -> bool {
entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false)
}
}