39: Tools r=matklad a=matklad

closes #34 

bors r+
This commit is contained in:
bors[bot] 2018-02-03 19:44:37 +00:00
commit 75b59bf519
16 changed files with 192 additions and 38 deletions

View file

@ -1,3 +1,4 @@
[alias]
parse = "run --package tools --bin parse"
gen = "run --package tools --bin gen"
collect-tests = "run --package tools --bin collect-tests --"

View file

@ -10,6 +10,7 @@ install:
build: false
test_script:
- cargo collect-tests --verify
- cargo test
branches:

View file

@ -19,12 +19,26 @@ files to have the same name except for the leading number. In general,
test suite should be append-only: old tests should not be modified,
new tests should be created instead.
Note that only `ok` tests are normative: `err` tests test error
recovery and it is totally ok for a parser to not implement any error
recovery at all. However, for libsyntax2.0 we do care about error
recovery, and we do care about precise and useful error messages.
There are also so-called "inline tests". They appear as the comments
with a `test` header in the source code, like this:
```rust
// test fn_basic
// fn foo() {}
fn fn_item(p: &mut Parser) {
// ...
}
```
You can run `cargo collect-tests` command to collect all inline tests
into `tests/data/inline` directory. The main advantage of inline tests
is that they help to illustrate what the relevant code is doing.
Contribution opportunity: design and implement testing infrastructure
for validators.

View file

@ -17,14 +17,20 @@ cargo tool
```
# Tool: `gen`
## Tool: `gen`
This tool reads a "grammar" from [grammar.ron](../grammar.ron) and
generates the `syntax_kinds.rs` file. You should run this tool if you
add new keywords or syntax elements.
# Tool: 'parse'
## Tool: `parse`
This tool reads rust source code from the standard input, parses it,
and prints the result to stdout.
## Tool: `collect-tests`
This tools collect inline tests from comments in libsyntax2 source code
and places them into `tests/data/inline` directory.

View file

@ -52,11 +52,15 @@ fn item(p: &mut Parser) {
STATIC_ITEM
}
CONST_KW => match p.nth(1) {
// test const_fn
// const fn foo() {}
FN_KW => {
p.bump();
fn_item(p);
FN_ITEM
}
// test const_unsafe_fn
// const unsafe fn foo() {}
UNSAFE_KW if p.nth(2) == FN_KW => {
p.bump();
p.bump();

View file

@ -0,0 +1 @@
const unsafe fn foo() {}

View file

@ -0,0 +1,15 @@
FILE@[0; 25)
FN_ITEM@[0; 25)
CONST_KW@[0; 5)
WHITESPACE@[5; 6)
UNSAFE_KW@[6; 12)
WHITESPACE@[12; 13)
FN_KW@[13; 15)
WHITESPACE@[15; 16)
IDENT@[16; 19) "foo"
L_PAREN@[19; 20)
R_PAREN@[20; 21)
WHITESPACE@[21; 22)
L_CURLY@[22; 23)
R_CURLY@[23; 24)
WHITESPACE@[24; 25)

View file

@ -0,0 +1 @@
const fn foo() {}

View file

@ -0,0 +1,13 @@
FILE@[0; 18)
FN_ITEM@[0; 18)
CONST_KW@[0; 5)
WHITESPACE@[5; 6)
FN_KW@[6; 8)
WHITESPACE@[8; 9)
IDENT@[9; 12) "foo"
L_PAREN@[12; 13)
R_PAREN@[13; 14)
WHITESPACE@[14; 15)
L_CURLY@[15; 16)
R_CURLY@[16; 17)
WHITESPACE@[17; 18)

View file

@ -1,5 +0,0 @@
const fn foo() {
}
const unsafe fn foo() {
}

View file

@ -1,29 +0,0 @@
FILE@[0; 46)
FN_ITEM@[0; 20)
CONST_KW@[0; 5)
WHITESPACE@[5; 6)
FN_KW@[6; 8)
WHITESPACE@[8; 9)
IDENT@[9; 12) "foo"
L_PAREN@[12; 13)
R_PAREN@[13; 14)
WHITESPACE@[14; 15)
L_CURLY@[15; 16)
WHITESPACE@[16; 17)
R_CURLY@[17; 18)
WHITESPACE@[18; 20)
FN_ITEM@[20; 46)
CONST_KW@[20; 25)
WHITESPACE@[25; 26)
UNSAFE_KW@[26; 32)
WHITESPACE@[32; 33)
FN_KW@[33; 35)
WHITESPACE@[35; 36)
IDENT@[36; 39) "foo"
L_PAREN@[39; 40)
R_PAREN@[40; 41)
WHITESPACE@[41; 42)
L_CURLY@[42; 43)
WHITESPACE@[43; 44)
R_CURLY@[44; 45)
WHITESPACE@[45; 46)

View file

@ -7,7 +7,7 @@ use testutils::dir_tests;
#[test]
fn parser_tests() {
dir_tests(&["parser/ok", "parser/err"], |text| {
dir_tests(&["parser/inline", "parser/ok", "parser/err"], |text| {
let tokens = tokenize(text);
let file = parse(text.to_string(), &tokens);
dump_tree(&file)

View file

@ -9,4 +9,6 @@ serde = "1.0.26"
serde_derive = "1.0.26"
file = "1.1.1"
ron = "0.1.5"
walkdir = "2"
itertools = "0.7"
libsyntax2 = { path = "../" }

View file

@ -0,0 +1,130 @@
extern crate file;
extern crate itertools;
extern crate walkdir;
use walkdir::WalkDir;
use itertools::Itertools;
use std::path::{Path, PathBuf};
use std::collections::HashSet;
use std::fs;
fn main() {
let verify = ::std::env::args().any(|arg| arg == "--verify");
let d = grammar_dir();
let tests = tests_from_dir(&d);
let existing = existing_tests();
for t in existing.difference(&tests) {
panic!("Test is deleted: {}\n{}", t.name, t.text);
}
let new_tests = tests.difference(&existing);
for (i, t) in new_tests.enumerate() {
if verify {
panic!("Inline test is not recorded: {}", t.name);
}
let name = format!("{:04}_{}.rs", existing.len() + i + 1, t.name);
println!("Creating {}", name);
let path = inline_tests_dir().join(name);
file::put_text(&path, &t.text).unwrap();
}
}
#[derive(Debug, Eq)]
struct Test {
name: String,
text: String,
}
impl PartialEq for Test {
fn eq(&self, other: &Test) -> bool {
self.name.eq(&other.name)
}
}
impl ::std::hash::Hash for Test {
fn hash<H: ::std::hash::Hasher>(&self, state: &mut H) {
self.name.hash(state)
}
}
fn tests_from_dir(dir: &Path) -> HashSet<Test> {
let mut res = HashSet::new();
for entry in WalkDir::new(dir) {
let entry = entry.unwrap();
if !entry.file_type().is_file() {
continue;
}
if entry.path().extension().unwrap_or_default() != "rs" {
continue;
}
let text = file::get_text(entry.path()).unwrap();
for test in collect_tests(&text) {
if let Some(old_test) = res.replace(test) {
panic!("Duplicate test: {}", old_test.name)
}
}
}
res
}
fn collect_tests(s: &str) -> Vec<Test> {
let mut res = vec![];
let prefix = "// ";
let comment_blocks = s.lines()
.map(str::trim_left)
.group_by(|line| line.starts_with(prefix));
for (is_comment, block) in comment_blocks.into_iter() {
if !is_comment {
continue;
}
let mut block = block.map(|line| &line[prefix.len()..]);
let first = block.next().unwrap();
if !first.starts_with("test ") {
continue;
}
let name = first["test ".len()..].to_string();
let text: String = itertools::join(block.chain(::std::iter::once("")), "\n");
assert!(!text.trim().is_empty() && text.ends_with("\n"));
res.push(Test { name, text })
}
res
}
fn existing_tests() -> HashSet<Test> {
let mut res = HashSet::new();
for file in fs::read_dir(&inline_tests_dir()).unwrap() {
let file = file.unwrap();
let path = file.path();
if path.extension().unwrap_or_default() != "rs" {
continue;
}
let name = path.file_name().unwrap().to_str().unwrap();
let name = name["0000_".len()..name.len() - 3].to_string();
let text = file::get_text(&path).unwrap();
res.insert(Test { name, text });
}
res
}
fn inline_tests_dir() -> PathBuf {
let res = base_dir().join("tests/data/parser/inline");
if !res.is_dir() {
fs::create_dir_all(&res).unwrap();
}
res
}
fn grammar_dir() -> PathBuf {
base_dir().join("src/parser/event_parser/grammar")
}
fn base_dir() -> PathBuf {
let dir = env!("CARGO_MANIFEST_DIR");
PathBuf::from(dir).parent().unwrap().to_owned()
}