mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 20:43:21 +00:00
Add infra for inline tests
This commit is contained in:
parent
9435ea4b8e
commit
9b3d806b0d
14 changed files with 173 additions and 35 deletions
|
@ -1,3 +1,4 @@
|
|||
[alias]
|
||||
parse = "run --package tools --bin parse"
|
||||
gen = "run --package tools --bin gen"
|
||||
collect-tests = "run --package tools --bin collect-tests --"
|
||||
|
|
|
@ -10,6 +10,7 @@ install:
|
|||
build: false
|
||||
|
||||
test_script:
|
||||
- cargo collect-tests --verify
|
||||
- cargo test
|
||||
|
||||
branches:
|
||||
|
|
|
@ -52,11 +52,15 @@ fn item(p: &mut Parser) {
|
|||
STATIC_ITEM
|
||||
}
|
||||
CONST_KW => match p.nth(1) {
|
||||
// test const_fn
|
||||
// const fn foo() {}
|
||||
FN_KW => {
|
||||
p.bump();
|
||||
fn_item(p);
|
||||
FN_ITEM
|
||||
}
|
||||
// test const_unsafe_fn
|
||||
// const unsafe fn foo() {}
|
||||
UNSAFE_KW if p.nth(2) == FN_KW => {
|
||||
p.bump();
|
||||
p.bump();
|
||||
|
|
1
tests/data/parser/inline/0001_const_unsafe_fn.rs
Normal file
1
tests/data/parser/inline/0001_const_unsafe_fn.rs
Normal file
|
@ -0,0 +1 @@
|
|||
const unsafe fn foo() {}
|
15
tests/data/parser/inline/0001_const_unsafe_fn.txt
Normal file
15
tests/data/parser/inline/0001_const_unsafe_fn.txt
Normal file
|
@ -0,0 +1,15 @@
|
|||
FILE@[0; 25)
|
||||
FN_ITEM@[0; 25)
|
||||
CONST_KW@[0; 5)
|
||||
WHITESPACE@[5; 6)
|
||||
UNSAFE_KW@[6; 12)
|
||||
WHITESPACE@[12; 13)
|
||||
FN_KW@[13; 15)
|
||||
WHITESPACE@[15; 16)
|
||||
IDENT@[16; 19) "foo"
|
||||
L_PAREN@[19; 20)
|
||||
R_PAREN@[20; 21)
|
||||
WHITESPACE@[21; 22)
|
||||
L_CURLY@[22; 23)
|
||||
R_CURLY@[23; 24)
|
||||
WHITESPACE@[24; 25)
|
1
tests/data/parser/inline/0002_const_fn.rs
Normal file
1
tests/data/parser/inline/0002_const_fn.rs
Normal file
|
@ -0,0 +1 @@
|
|||
const fn foo() {}
|
13
tests/data/parser/inline/0002_const_fn.txt
Normal file
13
tests/data/parser/inline/0002_const_fn.txt
Normal file
|
@ -0,0 +1,13 @@
|
|||
FILE@[0; 18)
|
||||
FN_ITEM@[0; 18)
|
||||
CONST_KW@[0; 5)
|
||||
WHITESPACE@[5; 6)
|
||||
FN_KW@[6; 8)
|
||||
WHITESPACE@[8; 9)
|
||||
IDENT@[9; 12) "foo"
|
||||
L_PAREN@[12; 13)
|
||||
R_PAREN@[13; 14)
|
||||
WHITESPACE@[14; 15)
|
||||
L_CURLY@[15; 16)
|
||||
R_CURLY@[16; 17)
|
||||
WHITESPACE@[17; 18)
|
|
@ -1,5 +0,0 @@
|
|||
const fn foo() {
|
||||
}
|
||||
|
||||
const unsafe fn foo() {
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
FILE@[0; 46)
|
||||
FN_ITEM@[0; 20)
|
||||
CONST_KW@[0; 5)
|
||||
WHITESPACE@[5; 6)
|
||||
FN_KW@[6; 8)
|
||||
WHITESPACE@[8; 9)
|
||||
IDENT@[9; 12) "foo"
|
||||
L_PAREN@[12; 13)
|
||||
R_PAREN@[13; 14)
|
||||
WHITESPACE@[14; 15)
|
||||
L_CURLY@[15; 16)
|
||||
WHITESPACE@[16; 17)
|
||||
R_CURLY@[17; 18)
|
||||
WHITESPACE@[18; 20)
|
||||
FN_ITEM@[20; 46)
|
||||
CONST_KW@[20; 25)
|
||||
WHITESPACE@[25; 26)
|
||||
UNSAFE_KW@[26; 32)
|
||||
WHITESPACE@[32; 33)
|
||||
FN_KW@[33; 35)
|
||||
WHITESPACE@[35; 36)
|
||||
IDENT@[36; 39) "foo"
|
||||
L_PAREN@[39; 40)
|
||||
R_PAREN@[40; 41)
|
||||
WHITESPACE@[41; 42)
|
||||
L_CURLY@[42; 43)
|
||||
WHITESPACE@[43; 44)
|
||||
R_CURLY@[44; 45)
|
||||
WHITESPACE@[45; 46)
|
|
@ -7,7 +7,7 @@ use testutils::dir_tests;
|
|||
|
||||
#[test]
|
||||
fn parser_tests() {
|
||||
dir_tests(&["parser/ok", "parser/err"], |text| {
|
||||
dir_tests(&["parser/inline", "parser/ok", "parser/err"], |text| {
|
||||
let tokens = tokenize(text);
|
||||
let file = parse(text.to_string(), &tokens);
|
||||
dump_tree(&file)
|
||||
|
|
|
@ -9,4 +9,6 @@ serde = "1.0.26"
|
|||
serde_derive = "1.0.26"
|
||||
file = "1.1.1"
|
||||
ron = "0.1.5"
|
||||
walkdir = "2"
|
||||
itertools = "0.7"
|
||||
libsyntax2 = { path = "../" }
|
||||
|
|
134
tools/src/bin/collect-tests.rs
Normal file
134
tools/src/bin/collect-tests.rs
Normal file
|
@ -0,0 +1,134 @@
|
|||
extern crate file;
|
||||
extern crate walkdir;
|
||||
extern crate itertools;
|
||||
|
||||
use walkdir::WalkDir;
|
||||
use itertools::Itertools;
|
||||
|
||||
use std::path::{PathBuf, Path};
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
|
||||
fn main() {
|
||||
let verify = ::std::env::args().any(|arg| arg == "--verify");
|
||||
|
||||
let d = grammar_dir();
|
||||
let tests = tests_from_dir(&d);
|
||||
let existing = existing_tests();
|
||||
|
||||
for t in existing.difference(&tests) {
|
||||
panic!("Test is deleted: {}\n{}", t.name, t.text);
|
||||
}
|
||||
|
||||
let new_tests = tests.difference(&existing);
|
||||
for (i, t) in new_tests.enumerate() {
|
||||
if verify {
|
||||
panic!("Inline test is not recorded: {}", t.name);
|
||||
}
|
||||
|
||||
let name = format!("{:04}_{}.rs", existing.len() + i + 1, t.name);
|
||||
println!("Creating {}", name);
|
||||
let path = inline_tests_dir().join(name);
|
||||
file::put_text(&path, &t.text).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Eq)]
|
||||
struct Test {
|
||||
name: String,
|
||||
text: String,
|
||||
}
|
||||
|
||||
impl PartialEq for Test {
|
||||
fn eq(&self, other: &Test) -> bool {
|
||||
self.name.eq(&other.name)
|
||||
}
|
||||
}
|
||||
|
||||
impl ::std::hash::Hash for Test {
|
||||
fn hash<H: ::std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.name.hash(state)
|
||||
}
|
||||
}
|
||||
|
||||
fn tests_from_dir(dir: &Path) -> HashSet<Test> {
|
||||
let mut res = HashSet::new();
|
||||
for entry in WalkDir::new(dir) {
|
||||
let entry = entry.unwrap();
|
||||
if !entry.file_type().is_file() {
|
||||
continue
|
||||
}
|
||||
if entry.path().extension().unwrap_or_default() != "rs" {
|
||||
continue
|
||||
}
|
||||
let text = file::get_text(entry.path())
|
||||
.unwrap();
|
||||
|
||||
for test in collect_tests(&text) {
|
||||
if let Some(old_test) = res.replace(test) {
|
||||
panic!("Duplicate test: {}", old_test.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
fn collect_tests(s: &str) -> Vec<Test> {
|
||||
let mut res = vec![];
|
||||
let prefix = "// ";
|
||||
let comment_blocks = s.lines()
|
||||
.map(str::trim_left)
|
||||
.group_by(|line| line.starts_with(prefix));
|
||||
|
||||
for (is_comment, block) in comment_blocks.into_iter() {
|
||||
if !is_comment {
|
||||
continue;
|
||||
}
|
||||
let mut block = block.map(|line| &line[prefix.len()..]);
|
||||
let first = block.next().unwrap();
|
||||
if !first.starts_with("test ") {
|
||||
continue
|
||||
}
|
||||
let name = first["test ".len()..].to_string();
|
||||
let text: String = itertools::join(block.chain(::std::iter::once("")), "\n");
|
||||
assert!(!text.trim().is_empty() && text.ends_with("\n"));
|
||||
res.push(Test { name, text })
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
fn existing_tests() -> HashSet<Test> {
|
||||
let mut res = HashSet::new();
|
||||
for file in fs::read_dir(&inline_tests_dir()).unwrap() {
|
||||
let file = file.unwrap();
|
||||
let path = file.path();
|
||||
if path.extension().unwrap_or_default() != "rs" {
|
||||
continue
|
||||
}
|
||||
let name = path.file_name().unwrap().to_str().unwrap();
|
||||
let name = name["0000_".len()..name.len() - 3].to_string();
|
||||
let text = file::get_text(&path).unwrap();
|
||||
res.insert(Test { name, text });
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
fn inline_tests_dir() -> PathBuf {
|
||||
let res = base_dir().join("tests/data/parser/inline");
|
||||
if !res.is_dir() {
|
||||
fs::create_dir_all(&res).unwrap();
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
fn grammar_dir() -> PathBuf {
|
||||
base_dir().join("src/parser/event_parser/grammar")
|
||||
}
|
||||
|
||||
fn base_dir() -> PathBuf {
|
||||
let dir = env!("CARGO_MANIFEST_DIR");
|
||||
PathBuf::from(dir).parent().unwrap().to_owned()
|
||||
}
|
||||
|
||||
|
Loading…
Reference in a new issue