mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 05:23:24 +00:00
Move lint definition generation into xtask/codegen
This commit is contained in:
parent
0964374274
commit
76463eee48
12 changed files with 90 additions and 86 deletions
4
Cargo.lock
generated
4
Cargo.lock
generated
|
@ -717,12 +717,10 @@ dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
"nohash-hasher",
|
"nohash-hasher",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"oorandom",
|
|
||||||
"parser",
|
"parser",
|
||||||
"profile",
|
"profile",
|
||||||
"rayon",
|
"rayon",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"sourcegen",
|
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
|
@ -731,7 +729,6 @@ dependencies = [
|
||||||
"text-edit",
|
"text-edit",
|
||||||
"tracing",
|
"tracing",
|
||||||
"triomphe",
|
"triomphe",
|
||||||
"xshell",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -916,6 +913,7 @@ name = "line-index"
|
||||||
version = "0.1.1"
|
version = "0.1.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nohash-hasher",
|
"nohash-hasher",
|
||||||
|
"oorandom",
|
||||||
"text-size",
|
"text-size",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -44,13 +44,10 @@ line-index.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
expect-test = "1.4.0"
|
expect-test = "1.4.0"
|
||||||
oorandom = "11.1.3"
|
|
||||||
xshell.workspace = true
|
|
||||||
|
|
||||||
# local deps
|
# local deps
|
||||||
test-utils.workspace = true
|
test-utils.workspace = true
|
||||||
test-fixture.workspace = true
|
test-fixture.workspace = true
|
||||||
sourcegen.workspace = true
|
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
|
@ -412,9 +412,3 @@ impl SnippetCap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
mod line_index;
|
|
||||||
mod sourcegen_lints;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,49 +0,0 @@
|
||||||
use line_index::{LineCol, LineIndex, WideEncoding};
|
|
||||||
use test_utils::skip_slow_tests;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_every_chars() {
|
|
||||||
if skip_slow_tests() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let text: String = {
|
|
||||||
let mut chars: Vec<char> = ((0 as char)..char::MAX).collect(); // Neat!
|
|
||||||
chars.extend("\n".repeat(chars.len() / 16).chars());
|
|
||||||
let mut rng = oorandom::Rand32::new(stdx::rand::seed());
|
|
||||||
stdx::rand::shuffle(&mut chars, |i| rng.rand_range(0..i as u32) as usize);
|
|
||||||
chars.into_iter().collect()
|
|
||||||
};
|
|
||||||
assert!(text.contains('💩')); // Sanity check.
|
|
||||||
|
|
||||||
let line_index = LineIndex::new(&text);
|
|
||||||
|
|
||||||
let mut lin_col = LineCol { line: 0, col: 0 };
|
|
||||||
let mut col_utf16 = 0;
|
|
||||||
let mut col_utf32 = 0;
|
|
||||||
for (offset, c) in text.char_indices() {
|
|
||||||
let got_offset = line_index.offset(lin_col).unwrap();
|
|
||||||
assert_eq!(usize::from(got_offset), offset);
|
|
||||||
|
|
||||||
let got_lin_col = line_index.line_col(got_offset);
|
|
||||||
assert_eq!(got_lin_col, lin_col);
|
|
||||||
|
|
||||||
for (enc, col) in [(WideEncoding::Utf16, col_utf16), (WideEncoding::Utf32, col_utf32)] {
|
|
||||||
let wide_lin_col = line_index.to_wide(enc, lin_col).unwrap();
|
|
||||||
let got_lin_col = line_index.to_utf8(enc, wide_lin_col).unwrap();
|
|
||||||
assert_eq!(got_lin_col, lin_col);
|
|
||||||
assert_eq!(wide_lin_col.col, col)
|
|
||||||
}
|
|
||||||
|
|
||||||
if c == '\n' {
|
|
||||||
lin_col.line += 1;
|
|
||||||
lin_col.col = 0;
|
|
||||||
col_utf16 = 0;
|
|
||||||
col_utf32 = 0;
|
|
||||||
} else {
|
|
||||||
lin_col.col += c.len_utf8() as u32;
|
|
||||||
col_utf16 += c.len_utf16() as u32;
|
|
||||||
col_utf32 += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,6 +1,4 @@
|
||||||
#![allow(clippy::print_stderr)]
|
#![allow(clippy::print_stderr)]
|
||||||
#[cfg(not(feature = "in-rust-tree"))]
|
|
||||||
mod sourcegen;
|
|
||||||
|
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
assists::AssistResolveStrategy, base_db::SourceDatabaseExt, LineIndexDatabase, RootDatabase,
|
assists::AssistResolveStrategy, base_db::SourceDatabaseExt, LineIndexDatabase, RootDatabase,
|
||||||
|
|
|
@ -10,5 +10,8 @@ edition = "2021"
|
||||||
text-size = "1.1.1"
|
text-size = "1.1.1"
|
||||||
nohash-hasher = "0.2.0"
|
nohash-hasher = "0.2.0"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
oorandom = "11.1.3"
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
|
@ -142,3 +142,56 @@ fn test_to_wide() {
|
||||||
let wide_line_col = line_index.to_wide(WideEncoding::Utf16, line_col.unwrap());
|
let wide_line_col = line_index.to_wide(WideEncoding::Utf16, line_col.unwrap());
|
||||||
assert_eq!(wide_line_col, Some(WideLineCol { line: 5, col: 4 }));
|
assert_eq!(wide_line_col, Some(WideLineCol { line: 5, col: 4 }));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_every_chars() {
|
||||||
|
let text: String = {
|
||||||
|
let mut chars: Vec<char> = ((0 as char)..char::MAX).collect(); // Neat!
|
||||||
|
chars.extend("\n".repeat(chars.len() / 16).chars());
|
||||||
|
let seed = std::hash::Hasher::finish(&std::hash::BuildHasher::build_hasher(
|
||||||
|
#[allow(clippy::disallowed_types)]
|
||||||
|
&std::collections::hash_map::RandomState::new(),
|
||||||
|
));
|
||||||
|
let mut rng = oorandom::Rand32::new(seed);
|
||||||
|
let mut rand_index = |i| rng.rand_range(0..i as u32) as usize;
|
||||||
|
let mut remaining = chars.len() - 1;
|
||||||
|
while remaining > 0 {
|
||||||
|
let index = rand_index(remaining);
|
||||||
|
chars.swap(remaining, index);
|
||||||
|
remaining -= 1;
|
||||||
|
}
|
||||||
|
chars.into_iter().collect()
|
||||||
|
};
|
||||||
|
assert!(text.contains('💩')); // Sanity check.
|
||||||
|
|
||||||
|
let line_index = LineIndex::new(&text);
|
||||||
|
|
||||||
|
let mut lin_col = LineCol { line: 0, col: 0 };
|
||||||
|
let mut col_utf16 = 0;
|
||||||
|
let mut col_utf32 = 0;
|
||||||
|
for (offset, c) in text.char_indices() {
|
||||||
|
let got_offset = line_index.offset(lin_col).unwrap();
|
||||||
|
assert_eq!(usize::from(got_offset), offset);
|
||||||
|
|
||||||
|
let got_lin_col = line_index.line_col(got_offset);
|
||||||
|
assert_eq!(got_lin_col, lin_col);
|
||||||
|
|
||||||
|
for (enc, col) in [(WideEncoding::Utf16, col_utf16), (WideEncoding::Utf32, col_utf32)] {
|
||||||
|
let wide_lin_col = line_index.to_wide(enc, lin_col).unwrap();
|
||||||
|
let got_lin_col = line_index.to_utf8(enc, wide_lin_col).unwrap();
|
||||||
|
assert_eq!(got_lin_col, lin_col);
|
||||||
|
assert_eq!(wide_lin_col.col, col)
|
||||||
|
}
|
||||||
|
|
||||||
|
if c == '\n' {
|
||||||
|
lin_col.line += 1;
|
||||||
|
lin_col.col = 0;
|
||||||
|
col_utf16 = 0;
|
||||||
|
col_utf32 = 0;
|
||||||
|
} else {
|
||||||
|
lin_col.col += c.len_utf8() as u32;
|
||||||
|
col_utf16 += c.len_utf16() as u32;
|
||||||
|
col_utf32 += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -9,14 +9,20 @@ use crate::{flags, project_root};
|
||||||
|
|
||||||
pub(crate) mod assists_doc_tests;
|
pub(crate) mod assists_doc_tests;
|
||||||
pub(crate) mod diagnostics_docs;
|
pub(crate) mod diagnostics_docs;
|
||||||
|
mod lints;
|
||||||
|
|
||||||
impl flags::Codegen {
|
impl flags::Codegen {
|
||||||
pub(crate) fn run(self, _sh: &Shell) -> anyhow::Result<()> {
|
pub(crate) fn run(self, _sh: &Shell) -> anyhow::Result<()> {
|
||||||
match self.codegen_type.unwrap_or_default() {
|
match self.codegen_type.unwrap_or_default() {
|
||||||
flags::CodegenType::All => {
|
flags::CodegenType::All => {
|
||||||
|
diagnostics_docs::generate(self.check);
|
||||||
assists_doc_tests::generate(self.check);
|
assists_doc_tests::generate(self.check);
|
||||||
|
// lints::generate(self.check) Updating clones the rust repo, so don't run it unless
|
||||||
|
// explicitly asked for
|
||||||
}
|
}
|
||||||
flags::CodegenType::AssistsDocTests => assists_doc_tests::generate(self.check),
|
flags::CodegenType::AssistsDocTests => assists_doc_tests::generate(self.check),
|
||||||
|
flags::CodegenType::DiagnosticsDocs => diagnostics_docs::generate(self.check),
|
||||||
|
flags::CodegenType::LintDefinitions => lints::generate(self.check),
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,7 @@ use crate::{
|
||||||
project_root,
|
project_root,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn generate(check: bool) {
|
pub(crate) fn generate(check: bool) {
|
||||||
let diagnostics = Diagnostic::collect().unwrap();
|
let diagnostics = Diagnostic::collect().unwrap();
|
||||||
if !check {
|
if !check {
|
||||||
let contents =
|
let contents =
|
||||||
|
|
|
@ -2,18 +2,18 @@
|
||||||
//! and lints from rustc, rustdoc, and clippy.
|
//! and lints from rustc, rustdoc, and clippy.
|
||||||
use std::{borrow::Cow, fs, path::Path};
|
use std::{borrow::Cow, fs, path::Path};
|
||||||
|
|
||||||
use itertools::Itertools;
|
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
use test_utils::project_root;
|
|
||||||
use xshell::{cmd, Shell};
|
use xshell::{cmd, Shell};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
codegen::{add_preamble, ensure_file_contents, list_files, reformat},
|
||||||
|
project_root,
|
||||||
|
};
|
||||||
|
|
||||||
const DESTINATION: &str = "crates/ide-db/src/generated/lints.rs";
|
const DESTINATION: &str = "crates/ide-db/src/generated/lints.rs";
|
||||||
|
|
||||||
/// This clones rustc repo, and so is not worth to keep up-to-date. We update
|
/// This clones rustc repo, and so is not worth to keep up-to-date on a constant basis.
|
||||||
/// manually by un-ignoring the test from time to time.
|
pub(crate) fn generate(check: bool) {
|
||||||
#[test]
|
|
||||||
#[ignore]
|
|
||||||
fn sourcegen_lint_completions() {
|
|
||||||
let sh = &Shell::new().unwrap();
|
let sh = &Shell::new().unwrap();
|
||||||
|
|
||||||
let rust_repo = project_root().join("./target/rust");
|
let rust_repo = project_root().join("./target/rust");
|
||||||
|
@ -73,10 +73,10 @@ pub struct LintGroup {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
generate_descriptor_clippy(&mut contents, &lints_json);
|
generate_descriptor_clippy(&mut contents, &lints_json);
|
||||||
|
|
||||||
let contents = sourcegen::add_preamble("sourcegen_lints", sourcegen::reformat(contents));
|
let contents = add_preamble("sourcegen_lints", reformat(contents));
|
||||||
|
|
||||||
let destination = project_root().join(DESTINATION);
|
let destination = project_root().join(DESTINATION);
|
||||||
sourcegen::ensure_file_contents(destination.as_path(), &contents);
|
ensure_file_contents(destination.as_path(), &contents, check);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses the output of `rustdoc -Whelp` and prints `Lint` and `LintGroup` constants into `buf`.
|
/// Parses the output of `rustdoc -Whelp` and prints `Lint` and `LintGroup` constants into `buf`.
|
||||||
|
@ -130,10 +130,9 @@ fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
let lints = lints
|
let mut lints = lints.chain(lint_groups).collect::<Vec<_>>();
|
||||||
.chain(lint_groups)
|
lints.sort_by(|(ident, ..), (ident2, ..)| ident.cmp(ident2));
|
||||||
.sorted_by(|(ident, ..), (ident2, ..)| ident.cmp(ident2))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
for (name, description, ..) in &lints {
|
for (name, description, ..) in &lints {
|
||||||
push_lint_completion(buf, &name.replace('-', "_"), description);
|
push_lint_completion(buf, &name.replace('-', "_"), description);
|
||||||
}
|
}
|
||||||
|
@ -177,10 +176,8 @@ fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
let lints_rustdoc = lints_rustdoc
|
let mut lints_rustdoc = lints_rustdoc.chain(lint_groups_rustdoc).collect::<Vec<_>>();
|
||||||
.chain(lint_groups_rustdoc)
|
lints_rustdoc.sort_by(|(ident, ..), (ident2, ..)| ident.cmp(ident2));
|
||||||
.sorted_by(|(ident, ..), (ident2, ..)| ident.cmp(ident2))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
for (name, description, ..) in &lints_rustdoc {
|
for (name, description, ..) in &lints_rustdoc {
|
||||||
push_lint_completion(buf, &name.replace('-', "_"), description)
|
push_lint_completion(buf, &name.replace('-', "_"), description)
|
||||||
|
@ -212,7 +209,7 @@ fn find_and_slice<'a>(i: &'a str, p: &str) -> &'a str {
|
||||||
fn generate_feature_descriptor(buf: &mut String, src_dir: &Path) {
|
fn generate_feature_descriptor(buf: &mut String, src_dir: &Path) {
|
||||||
let mut features = ["language-features", "library-features"]
|
let mut features = ["language-features", "library-features"]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|it| sourcegen::list_files(&src_dir.join(it)))
|
.flat_map(|it| list_files(&src_dir.join(it)))
|
||||||
// Get all `.md` files
|
// Get all `.md` files
|
||||||
.filter(|path| path.extension() == Some("md".as_ref()))
|
.filter(|path| path.extension() == Some("md".as_ref()))
|
||||||
.map(|path| {
|
.map(|path| {
|
||||||
|
@ -302,7 +299,7 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
|
||||||
let children = children.iter().map(|id| format!("clippy::{id}")).collect::<Vec<_>>();
|
let children = children.iter().map(|id| format!("clippy::{id}")).collect::<Vec<_>>();
|
||||||
if !children.is_empty() {
|
if !children.is_empty() {
|
||||||
let lint_ident = format!("clippy::{id}");
|
let lint_ident = format!("clippy::{id}");
|
||||||
let description = format!("lint group for: {}", children.iter().join(", "));
|
let description = format!("lint group for: {}", children.join(", "));
|
||||||
push_lint_group(buf, &lint_ident, &description, &children);
|
push_lint_group(buf, &lint_ident, &description, &children);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -331,7 +328,10 @@ fn push_lint_group(buf: &mut String, label: &str, description: &str, children: &
|
||||||
|
|
||||||
push_lint_completion(buf, label, description);
|
push_lint_completion(buf, label, description);
|
||||||
|
|
||||||
let children = format!("&[{}]", children.iter().map(|it| format!("\"{it}\"")).join(", "));
|
let children = format!(
|
||||||
|
"&[{}]",
|
||||||
|
children.iter().map(|it| format!("\"{it}\"")).collect::<Vec<_>>().join(", ")
|
||||||
|
);
|
||||||
format_to!(
|
format_to!(
|
||||||
buf,
|
buf,
|
||||||
r###"
|
r###"
|
|
@ -92,6 +92,8 @@ pub enum CodegenType {
|
||||||
#[default]
|
#[default]
|
||||||
All,
|
All,
|
||||||
AssistsDocTests,
|
AssistsDocTests,
|
||||||
|
DiagnosticsDocs,
|
||||||
|
LintDefinitions,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromStr for CodegenType {
|
impl FromStr for CodegenType {
|
||||||
|
@ -100,6 +102,8 @@ impl FromStr for CodegenType {
|
||||||
match s {
|
match s {
|
||||||
"all" => Ok(Self::All),
|
"all" => Ok(Self::All),
|
||||||
"assists-doc-tests" => Ok(Self::AssistsDocTests),
|
"assists-doc-tests" => Ok(Self::AssistsDocTests),
|
||||||
|
"diagnostics-docs" => Ok(Self::DiagnosticsDocs),
|
||||||
|
"lints-definitions" => Ok(Self::LintDefinitions),
|
||||||
_ => Err("Invalid option".to_owned()),
|
_ => Err("Invalid option".to_owned()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,7 @@ impl flags::Release {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generates bits of manual.adoc.
|
// Generates bits of manual.adoc.
|
||||||
cmd!(sh, "cargo test -p ide-diagnostics -p rust-analyzer -- sourcegen_").run()?;
|
codegen::diagnostics_docs::generate(false);
|
||||||
codegen::assists_doc_tests::generate(false);
|
codegen::assists_doc_tests::generate(false);
|
||||||
|
|
||||||
let website_root = project_root().join("../rust-analyzer.github.io");
|
let website_root = project_root().join("../rust-analyzer.github.io");
|
||||||
|
|
Loading…
Reference in a new issue