rust-analyzer/xtask/tests/tidy.rs

282 lines
7.9 KiB
Rust
Raw Normal View History

2020-03-17 09:26:29 +00:00
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
2020-05-06 08:25:25 +00:00
use xtask::{
codegen::{self, Mode},
2020-07-14 15:14:00 +00:00
not_bash::{fs2, run},
2020-05-06 08:25:25 +00:00
project_root, run_rustfmt, rust_files,
};
#[test]
fn generated_grammar_is_fresh() {
if let Err(error) = codegen::generate_syntax(Mode::Verify) {
panic!("{}. Please update it by running `cargo xtask codegen`", error);
}
}
#[test]
fn generated_tests_are_fresh() {
if let Err(error) = codegen::generate_parser_tests(Mode::Verify) {
panic!("{}. Please update tests by running `cargo xtask codegen`", error);
}
}
#[test]
fn generated_assists_are_fresh() {
2020-06-03 16:22:05 +00:00
if let Err(error) = codegen::generate_assists_tests(Mode::Verify) {
2020-05-06 08:25:25 +00:00
panic!("{}. Please update assists by running `cargo xtask codegen`", error);
}
}
#[test]
fn check_code_formatting() {
if let Err(error) = run_rustfmt(Mode::Verify) {
panic!("{}. Please format the code by running `cargo format`", error);
}
}
2020-03-17 09:26:29 +00:00
2020-10-06 13:17:16 +00:00
#[test]
fn smoke_test_docs_generation() {
// We don't commit docs to the repo, so we can just overwrite in tests.
codegen::generate_assists_docs(Mode::Overwrite).unwrap();
codegen::generate_feature_docs(Mode::Overwrite).unwrap();
}
2020-03-17 09:26:29 +00:00
#[test]
fn rust_files_are_tidy() {
let mut tidy_docs = TidyDocs::default();
2020-03-21 15:04:28 +00:00
for path in rust_files(&project_root().join("crates")) {
2020-03-17 09:26:29 +00:00
let text = fs2::read_to_string(&path).unwrap();
check_todo(&path, &text);
check_trailing_ws(&path, &text);
2020-08-12 10:45:38 +00:00
deny_clippy(&path, &text);
2020-03-17 09:26:29 +00:00
tidy_docs.visit(&path, &text);
}
tidy_docs.finish();
}
2020-08-24 21:17:00 +00:00
#[test]
fn check_merge_commits() {
2020-09-01 07:38:17 +00:00
let stdout = run!("git rev-list --merges --invert-grep --author 'bors\\[bot\\]' HEAD~19.."; echo = false)
.unwrap();
if !stdout.is_empty() {
panic!(
"
Merge commits are not allowed in the history.
When updating a pull-request, please rebase your feature branch
on top of master by running `git rebase master`. If rebase fails,
you can re-apply your changes like this:
# Abort in-progress rebase, if any.
$ git rebase --abort
# Make the branch point to the latest commit from master,
# while maintaining your local changes uncommited.
$ git reset --soft origin/master
# Commit all changes in a single batch.
$ git commit -am'My changes'
# Push the changes. We did a rebase, so we need `--force` option.
# `--force-with-lease` is a more safe (Rusty) version of `--force`.
$ git push --force-with-lease
And don't fear to mess something up during a rebase -- you can
always restore the previous state using `git ref-log`:
https://github.blog/2015-06-08-how-to-undo-almost-anything-with-git/#redo-after-undo-local
"
);
2020-08-24 21:17:00 +00:00
}
}
2020-08-12 10:45:38 +00:00
fn deny_clippy(path: &PathBuf, text: &String) {
if text.contains("[\u{61}llow(clippy") {
panic!(
"\n\nallowing lints is forbidden: {}.
rust-analyzer intentionally doesn't check clippy on CI.
You can allow lint globally via `xtask clippy`.
2020-08-12 11:03:43 +00:00
See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion.
2020-08-12 10:45:38 +00:00
",
path.display()
)
}
}
2020-07-14 15:14:00 +00:00
#[test]
fn check_licenses() {
let expected = "
0BSD OR MIT OR Apache-2.0
2020-08-01 00:55:04 +00:00
Apache-2.0
2020-07-14 15:14:00 +00:00
Apache-2.0 OR BSL-1.0
Apache-2.0 OR MIT
2020-08-27 16:55:42 +00:00
Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT
2020-07-14 15:14:00 +00:00
Apache-2.0/MIT
BSD-2-Clause
BSD-3-Clause
CC0-1.0
ISC
MIT
MIT / Apache-2.0
MIT OR Apache-2.0
MIT/Apache-2.0
Unlicense OR MIT
Unlicense/MIT
2020-08-18 08:49:18 +00:00
Zlib OR Apache-2.0 OR MIT
2020-07-14 15:14:00 +00:00
"
.lines()
.filter(|it| !it.is_empty())
.collect::<Vec<_>>();
let meta = run!("cargo metadata --format-version 1"; echo = false).unwrap();
let mut licenses = meta
.split(|c| c == ',' || c == '{' || c == '}')
.filter(|it| it.contains(r#""license""#))
.map(|it| it.trim())
.map(|it| it[r#""license":"#.len()..].trim_matches('"'))
.collect::<Vec<_>>();
licenses.sort();
licenses.dedup();
2020-07-31 23:52:24 +00:00
assert_eq!(licenses, expected);
2020-07-14 15:14:00 +00:00
}
2020-03-17 09:26:29 +00:00
fn check_todo(path: &Path, text: &str) {
2020-07-08 20:47:50 +00:00
let need_todo = &[
// This file itself obviously needs to use todo (<- like this!).
"tests/cli.rs",
2020-07-08 20:47:50 +00:00
// Some of our assists generate `todo!()`.
2020-05-06 08:21:35 +00:00
"tests/generated.rs",
"handlers/add_missing_impl_members.rs",
2020-05-19 22:07:00 +00:00
"handlers/add_turbo_fish.rs",
2020-07-03 16:15:03 +00:00
"handlers/generate_function.rs",
2020-07-08 20:47:50 +00:00
// To support generating `todo!()` in assists, we have `expr_todo()` in
// `ast::make`.
"ast/make.rs",
2020-08-09 17:35:42 +00:00
// The documentation in string literals may contain anything for its own purposes
2020-08-18 17:31:06 +00:00
"completion/generated_features.rs",
];
2020-07-08 20:47:50 +00:00
if need_todo.iter().any(|p| path.ends_with(p)) {
2020-03-17 09:26:29 +00:00
return;
}
if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") {
panic!(
"\nTODO markers or todo! macros should not be committed to the master branch,\n\
2020-03-17 09:26:29 +00:00
use FIXME instead\n\
{}\n",
path.display(),
)
}
}
fn check_trailing_ws(path: &Path, text: &str) {
if is_exclude_dir(path, &["test_data"]) {
return;
}
for (line_number, line) in text.lines().enumerate() {
if line.chars().last().map(char::is_whitespace) == Some(true) {
panic!("Trailing whitespace in {} at line {}", path.display(), line_number)
}
}
}
2020-03-17 09:26:29 +00:00
#[derive(Default)]
struct TidyDocs {
missing_docs: Vec<String>,
contains_fixme: Vec<PathBuf>,
}
impl TidyDocs {
fn visit(&mut self, path: &Path, text: &str) {
// Test hopefully don't really need comments, and for assists we already
// have special comments which are source of doc tests and user docs.
if is_exclude_dir(path, &["tests", "test_data"]) {
return;
}
if is_exclude_file(path) {
2020-03-17 09:26:29 +00:00
return;
}
let first_line = match text.lines().next() {
Some(it) => it,
None => return,
};
if first_line.starts_with("//!") {
if first_line.contains("FIXME") {
self.contains_fixme.push(path.to_path_buf());
2020-03-17 09:26:29 +00:00
}
} else {
if text.contains("// Feature:") || text.contains("// Assist:") {
return;
}
2020-03-17 09:26:29 +00:00
self.missing_docs.push(path.display().to_string());
}
fn is_exclude_file(d: &Path) -> bool {
let file_names = ["tests.rs"];
d.file_name()
.unwrap_or_default()
.to_str()
.map(|f_n| file_names.iter().any(|name| *name == f_n))
.unwrap_or(false)
}
}
fn finish(self) {
if !self.missing_docs.is_empty() {
panic!(
"\nMissing docs strings\n\n\
modules:\n{}\n\n",
self.missing_docs.join("\n")
)
}
2020-07-08 20:47:50 +00:00
let poorly_documented = [
2020-08-13 14:36:55 +00:00
"hir",
2020-08-13 14:26:29 +00:00
"hir_expand",
2020-08-13 15:42:52 +00:00
"ide",
2020-08-13 08:08:11 +00:00
"mbe",
2020-08-12 15:06:49 +00:00
"parser",
2020-08-12 14:32:36 +00:00
"profile",
"project_model",
2020-08-12 16:26:51 +00:00
"syntax",
2020-08-12 14:46:20 +00:00
"tt",
2020-08-13 14:35:29 +00:00
"hir_ty",
2020-03-17 09:26:29 +00:00
];
let mut has_fixmes =
2020-07-08 20:47:50 +00:00
poorly_documented.iter().map(|it| (*it, false)).collect::<HashMap<&str, bool>>();
2020-03-17 09:26:29 +00:00
'outer: for path in self.contains_fixme {
2020-07-08 20:47:50 +00:00
for krate in poorly_documented.iter() {
2020-03-17 09:26:29 +00:00
if path.components().any(|it| it.as_os_str() == *krate) {
has_fixmes.insert(krate, true);
continue 'outer;
}
}
panic!("FIXME doc in a fully-documented crate: {}", path.display())
}
for (krate, has_fixme) in has_fixmes.iter() {
if !has_fixme {
2020-07-08 20:47:50 +00:00
panic!("crate {} is fully documented :tada:, remove it from the list of poorly documented crates", krate)
2020-03-17 09:26:29 +00:00
}
}
}
}
fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool {
p.strip_prefix(project_root())
.unwrap()
.components()
.rev()
.skip(1)
.filter_map(|it| it.as_os_str().to_str())
.any(|it| dirs_to_exclude.contains(&it))
}