mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-14 22:24:14 +00:00
5075c77957
This builds on #2231 but was actually done before that. You see, the cause for #2231 was that I got this error message: Error: Error { kind: Io(Os { code: 2, kind: NotFound, message: "No such file or directory" }) } Just switching to `anyhow::Result` got me stack traces (when setting `RUST_LIB_BACKTRACE=1`) that at least showed stack backtrace: 0: std::backtrace::Backtrace::create 1: std::backtrace::Backtrace::capture 2: anyhow::error::<impl core::convert::From<E> for anyhow::Error>::from 3: xtask::install_server 4: xtask::install 5: xtask::main 6: std::rt::lang_start::{{closure}} 7: std::panicking::try::do_call 8: __rust_maybe_catch_panic 9: std::rt::lang_start_internal 10: std::rt::lang_start 11: main With the added contexts (not at all exhaustive), the error became Error: install server Caused by: 0: build AutoCfg with target directory 1: No such file or directory (os error 2) Since anyhow is such a small thing (no new transitive dependencies!), and in general gives you `Result<T, Box<dyn Error>>` on steroids, I think this a nice small change. The only slightly annoying thing was to replace all the `Err(format!(…))?` calls (haven't even looked at whether we can make it support wrapping strings though), but the `bail!` macro is shorter anyway :)
134 lines
4.3 KiB
Rust
134 lines
4.3 KiB
Rust
//! This module greps parser's code for specially formatted comments and turnes
|
|
//! them into tests.
|
|
|
|
use std::{
|
|
collections::HashMap,
|
|
fs, iter,
|
|
path::{Path, PathBuf},
|
|
};
|
|
|
|
use crate::{
|
|
codegen::{self, extract_comment_blocks, update, Mode},
|
|
project_root, Result,
|
|
};
|
|
|
|
pub fn generate_parser_tests(mode: Mode) -> Result<()> {
|
|
let tests = tests_from_dir(&project_root().join(Path::new(codegen::GRAMMAR_DIR)))?;
|
|
fn install_tests(tests: &HashMap<String, Test>, into: &str, mode: Mode) -> Result<()> {
|
|
let tests_dir = project_root().join(into);
|
|
if !tests_dir.is_dir() {
|
|
fs::create_dir_all(&tests_dir)?;
|
|
}
|
|
// ok is never actually read, but it needs to be specified to create a Test in existing_tests
|
|
let existing = existing_tests(&tests_dir, true)?;
|
|
for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
|
|
panic!("Test is deleted: {}", t);
|
|
}
|
|
|
|
let mut new_idx = existing.len() + 1;
|
|
for (name, test) in tests {
|
|
let path = match existing.get(name) {
|
|
Some((path, _test)) => path.clone(),
|
|
None => {
|
|
let file_name = format!("{:04}_{}.rs", new_idx, name);
|
|
new_idx += 1;
|
|
tests_dir.join(file_name)
|
|
}
|
|
};
|
|
update(&path, &test.text, mode)?;
|
|
}
|
|
Ok(())
|
|
}
|
|
install_tests(&tests.ok, codegen::OK_INLINE_TESTS_DIR, mode)?;
|
|
install_tests(&tests.err, codegen::ERR_INLINE_TESTS_DIR, mode)
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
struct Test {
|
|
pub name: String,
|
|
pub text: String,
|
|
pub ok: bool,
|
|
}
|
|
|
|
#[derive(Default, Debug)]
|
|
struct Tests {
|
|
pub ok: HashMap<String, Test>,
|
|
pub err: HashMap<String, Test>,
|
|
}
|
|
|
|
fn collect_tests(s: &str) -> Vec<Test> {
|
|
let mut res = Vec::new();
|
|
for comment_block in extract_comment_blocks(s) {
|
|
let first_line = &comment_block[0];
|
|
let (name, ok) = if first_line.starts_with("test ") {
|
|
let name = first_line["test ".len()..].to_string();
|
|
(name, true)
|
|
} else if first_line.starts_with("test_err ") {
|
|
let name = first_line["test_err ".len()..].to_string();
|
|
(name, false)
|
|
} else {
|
|
continue;
|
|
};
|
|
let text: String = comment_block[1..]
|
|
.iter()
|
|
.cloned()
|
|
.chain(iter::once(String::new()))
|
|
.collect::<Vec<_>>()
|
|
.join("\n");
|
|
assert!(!text.trim().is_empty() && text.ends_with('\n'));
|
|
res.push(Test { name, text, ok })
|
|
}
|
|
res
|
|
}
|
|
|
|
fn tests_from_dir(dir: &Path) -> Result<Tests> {
|
|
let mut res = Tests::default();
|
|
for entry in ::walkdir::WalkDir::new(dir) {
|
|
let entry = entry.unwrap();
|
|
if !entry.file_type().is_file() {
|
|
continue;
|
|
}
|
|
if entry.path().extension().unwrap_or_default() != "rs" {
|
|
continue;
|
|
}
|
|
process_file(&mut res, entry.path())?;
|
|
}
|
|
let grammar_rs = dir.parent().unwrap().join("grammar.rs");
|
|
process_file(&mut res, &grammar_rs)?;
|
|
return Ok(res);
|
|
fn process_file(res: &mut Tests, path: &Path) -> Result<()> {
|
|
let text = fs::read_to_string(path)?;
|
|
|
|
for test in collect_tests(&text) {
|
|
if test.ok {
|
|
if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
|
|
anyhow::bail!("Duplicate test: {}", old_test.name);
|
|
}
|
|
} else if let Some(old_test) = res.err.insert(test.name.clone(), test) {
|
|
anyhow::bail!("Duplicate test: {}", old_test.name);
|
|
}
|
|
}
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
fn existing_tests(dir: &Path, ok: bool) -> Result<HashMap<String, (PathBuf, Test)>> {
|
|
let mut res = HashMap::new();
|
|
for file in fs::read_dir(dir)? {
|
|
let file = file?;
|
|
let path = file.path();
|
|
if path.extension().unwrap_or_default() != "rs" {
|
|
continue;
|
|
}
|
|
let name = {
|
|
let file_name = path.file_name().unwrap().to_str().unwrap();
|
|
file_name[5..file_name.len() - 3].to_string()
|
|
};
|
|
let text = fs::read_to_string(&path)?;
|
|
let test = Test { name: name.clone(), text, ok };
|
|
if let Some(old) = res.insert(name, (path, test)) {
|
|
println!("Duplicate test: {:?}", old);
|
|
}
|
|
}
|
|
Ok(res)
|
|
}
|