From 39024fdc14703e3936f88d9ec938aad55085d05a Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sun, 31 Dec 2017 23:27:36 +0300 Subject: [PATCH] Parser test boilerplate --- grammar.ron | 5 ++- src/bin/gen.rs | 14 ++++-- src/lib.rs | 2 +- src/syntax_kinds.rs | 4 +- tests/data/parser/0000_empty.rs | 0 tests/data/parser/0000_empty.txt | 1 + tests/parser.rs | 76 ++++++++++++++++++++++++++++++++ 7 files changed, 95 insertions(+), 7 deletions(-) create mode 100644 tests/data/parser/0000_empty.rs create mode 100644 tests/data/parser/0000_empty.txt create mode 100644 tests/parser.rs diff --git a/grammar.ron b/grammar.ron index 71e354dacf..439c4ef9c8 100644 --- a/grammar.ron +++ b/grammar.ron @@ -1,5 +1,5 @@ Grammar( - syntax_kinds: [ + tokens: [ "ERROR", "IDENT", "UNDERSCORE", @@ -51,5 +51,8 @@ Grammar( "COMMENT", "DOC_COMMENT", "SHEBANG", + ], + nodes: [ + "FILE" ] ) \ No newline at end of file diff --git a/src/bin/gen.rs b/src/bin/gen.rs index 4acf381e3c..f5a66d9f25 100644 --- a/src/bin/gen.rs +++ b/src/bin/gen.rs @@ -17,7 +17,8 @@ fn main() { #[derive(Deserialize)] struct Grammar { - syntax_kinds: Vec, + tokens: Vec, + nodes: Vec, } impl Grammar { @@ -31,7 +32,12 @@ impl Grammar { acc.push_str("// Generated from grammar.ron\n"); acc.push_str("use tree::{SyntaxKind, SyntaxInfo};\n"); acc.push_str("\n"); - for (idx, kind) in self.syntax_kinds.iter().enumerate() { + + let syntax_kinds: Vec<&String> = + self.tokens.iter().chain(self.nodes.iter()) + .collect(); + + for (idx, kind) in syntax_kinds.iter().enumerate() { let sname = scream(kind); write!( acc, @@ -40,8 +46,8 @@ impl Grammar { ).unwrap(); } acc.push_str("\n"); - write!(acc, "static INFOS: [SyntaxInfo; {}] = [\n", self.syntax_kinds.len()).unwrap(); - for kind in self.syntax_kinds.iter() { + write!(acc, "static INFOS: [SyntaxInfo; {}] = [\n", syntax_kinds.len()).unwrap(); + for kind in syntax_kinds.iter() { let sname = scream(kind); write!( acc, diff --git a/src/lib.rs b/src/lib.rs index 82213e2b39..0685e3f2ca 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -6,5 +6,5 @@ mod lexer; pub mod syntax_kinds; pub use text::{TextUnit, TextRange}; -pub use tree::{SyntaxKind, Token}; +pub use tree::{SyntaxKind, Token, FileBuilder, File, Node}; pub use lexer::{next_token, tokenize}; diff --git a/src/syntax_kinds.rs b/src/syntax_kinds.rs index ec2a036b9e..b83f48dd8c 100644 --- a/src/syntax_kinds.rs +++ b/src/syntax_kinds.rs @@ -52,8 +52,9 @@ pub const THIN_ARROW: SyntaxKind = SyntaxKind(47); pub const COMMENT: SyntaxKind = SyntaxKind(48); pub const DOC_COMMENT: SyntaxKind = SyntaxKind(49); pub const SHEBANG: SyntaxKind = SyntaxKind(50); +pub const FILE: SyntaxKind = SyntaxKind(51); -static INFOS: [SyntaxInfo; 51] = [ +static INFOS: [SyntaxInfo; 52] = [ SyntaxInfo { name: "ERROR" }, SyntaxInfo { name: "IDENT" }, SyntaxInfo { name: "UNDERSCORE" }, @@ -105,6 +106,7 @@ static INFOS: [SyntaxInfo; 51] = [ SyntaxInfo { name: "COMMENT" }, SyntaxInfo { name: "DOC_COMMENT" }, SyntaxInfo { name: "SHEBANG" }, + SyntaxInfo { name: "FILE" }, ]; pub(crate) fn syntax_info(kind: SyntaxKind) -> &'static SyntaxInfo { diff --git a/tests/data/parser/0000_empty.rs b/tests/data/parser/0000_empty.rs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/data/parser/0000_empty.txt b/tests/data/parser/0000_empty.txt new file mode 100644 index 0000000000..843ea118d0 --- /dev/null +++ b/tests/data/parser/0000_empty.txt @@ -0,0 +1 @@ +FILE@[0; 0) \ No newline at end of file diff --git a/tests/parser.rs b/tests/parser.rs new file mode 100644 index 0000000000..f61b6830b6 --- /dev/null +++ b/tests/parser.rs @@ -0,0 +1,76 @@ +extern crate file; +#[macro_use(assert_diff)] +extern crate difference; +extern crate libsyntax2; + +use std::path::{PathBuf, Path}; +use std::fs::read_dir; +use std::fmt::Write; + +use libsyntax2::{tokenize, Token, Node, File, FileBuilder}; + +#[test] +fn parser_tests() { + for test_case in parser_test_cases() { + parser_test_case(&test_case); + } +} + +fn parser_test_dir() -> PathBuf { + let dir = env!("CARGO_MANIFEST_DIR"); + PathBuf::from(dir).join("tests/data/parser") +} + +fn parser_test_cases() -> Vec { + let mut acc = Vec::new(); + let dir = parser_test_dir(); + for file in read_dir(&dir).unwrap() { + let file = file.unwrap(); + let path = file.path(); + if path.extension().unwrap_or_default() == "rs" { + acc.push(path); + } + } + acc.sort(); + acc +} + +fn parser_test_case(path: &Path) { + let actual = { + let text = file::get_text(path).unwrap(); + let tokens = tokenize(&text); + let file = parse(text, &tokens); + dump_tree(&file) + }; + let expected = file::get_text(&path.with_extension("txt")).unwrap(); + let expected = expected.as_str(); + let actual = actual.as_str(); + if expected == actual { + return + } + if expected.trim() == actual.trim() { + panic!("Whitespace difference!") + } + assert_diff!(expected, actual, "\n", 0) +} + +fn dump_tree(file: &File) -> String { + let mut result = String::new(); + go(file.root(), &mut result, 0); + return result; + + fn go(node: Node, buff: &mut String, level: usize) { + buff.push_str(&String::from(" ").repeat(level)); + write!(buff, "{:?}\n", node); + for child in node.children() { + go(child, buff, level + 1) + } + } +} + +fn parse(text: String, tokens: &[Token]) -> File { + let mut builder = FileBuilder::new(text); + builder.start_internal(libsyntax2::syntax_kinds::FILE); + builder.finish_internal(); + builder.finish() +} \ No newline at end of file