G: shebang

This commit is contained in:
Aleksey Kladov 2018-01-07 15:34:11 +03:00
parent 9e4052cc2e
commit 7c6f0f9128
8 changed files with 64 additions and 47 deletions

View file

@ -6,7 +6,7 @@ use syntax_kinds::*;
pub fn file(p: &mut Parser) { pub fn file(p: &mut Parser) {
node(p, FILE, |p| { node(p, FILE, |p| {
shebang(p); p.optional(SHEBANG);
inner_attributes(p); inner_attributes(p);
many(p, |p| { many(p, |p| {
skip_to_first( skip_to_first(
@ -17,11 +17,6 @@ pub fn file(p: &mut Parser) {
}) })
} }
fn shebang(_: &mut Parser) {
//TODO
}
fn inner_attributes(_: &mut Parser) { fn inner_attributes(_: &mut Parser) {
//TODO //TODO
} }
@ -136,4 +131,10 @@ impl<'p> Parser<'p> {
false false
} }
} }
pub(crate) fn optional(&mut self, kind: SyntaxKind) {
if self.current_is(kind) {
self.bump();
}
}
} }

View file

@ -0,0 +1,2 @@
#!/use/bin/env rusti
#!/use/bin/env rusti

View file

@ -0,0 +1,7 @@
FILE@[0; 42)
SHEBANG@[0; 20)
ERROR@[20; 42)
err: `expected item`
WHITESPACE@[20; 21)
SHEBANG@[21; 41)
WHITESPACE@[41; 42)

View file

@ -0,0 +1 @@
#!/use/bin/env rusti

View file

@ -0,0 +1,2 @@
FILE@[0; 20)
SHEBANG@[0; 20)

View file

@ -2,30 +2,20 @@ extern crate file;
extern crate libsyntax2; extern crate libsyntax2;
extern crate testutils; extern crate testutils;
use std::path::{Path};
use std::fmt::Write; use std::fmt::Write;
use libsyntax2::{Token, tokenize}; use libsyntax2::{Token, tokenize};
use testutils::{assert_equal_text, collect_tests}; use testutils::dir_tests;
#[test] #[test]
fn lexer_tests() { fn lexer_tests() {
for test_case in collect_tests(&["lexer"]) { dir_tests(
lexer_test_case(&test_case); &["lexer"],
|text| {
let tokens = tokenize(text);
dump_tokens(&tokens, text)
} }
} )
fn lexer_test_case(path: &Path) {
let actual = {
let text = file::get_text(path).unwrap();
let tokens = tokenize(&text);
dump_tokens(&tokens, &text)
};
let path = path.with_extension("txt");
let expected = file::get_text(&path).unwrap();
let expected = expected.as_str();
let actual = actual.as_str();
assert_equal_text(expected, actual, &path)
} }
fn dump_tokens(tokens: &[Token], text: &str) -> String { fn dump_tokens(tokens: &[Token], text: &str) -> String {

View file

@ -2,33 +2,21 @@ extern crate file;
extern crate libsyntax2; extern crate libsyntax2;
extern crate testutils; extern crate testutils;
use std::path::{Path};
use std::fmt::Write; use std::fmt::Write;
use libsyntax2::{tokenize, parse, Node, File}; use libsyntax2::{tokenize, parse, Node, File};
use testutils::{collect_tests, assert_equal_text}; use testutils::dir_tests;
#[test] #[test]
fn parser_tests() { fn parser_tests() {
for test_case in collect_tests(&["parser/ok", "parser/err"]) { dir_tests(
parser_test_case(&test_case); &["parser/ok", "parser/err"],
} |text| {
} let tokens = tokenize(text);
let file = parse(text.to_string(), &tokens);
fn parser_test_case(path: &Path) {
let actual = {
let text = file::get_text(path).unwrap();
let tokens = tokenize(&text);
let file = parse(text, &tokens);
dump_tree(&file) dump_tree(&file)
}; }
let expected_path = path.with_extension("txt"); )
let expected = file::get_text(&expected_path).expect(
&format!("Can't read {}", expected_path.display())
);
let expected = expected.as_str();
let actual = actual.as_str();
assert_equal_text(expected, actual, &expected_path);
} }
fn dump_tree(file: &File) -> String { fn dump_tree(file: &File) -> String {

View file

@ -6,7 +6,33 @@ use std::fs::read_dir;
use difference::Changeset; use difference::Changeset;
pub fn assert_equal_text( pub fn dir_tests<F>(
paths: &[&str],
f: F
)
where
F: Fn(&str) -> String
{
for path in collect_tests(paths) {
let actual = {
let text = file::get_text(&path).unwrap();
f(&text)
};
let path = path.with_extension("txt");
if !path.exists() {
println!("\nfile: {}", path.display());
println!("No .txt file with expected result, creating...");
file::put_text(&path, actual).unwrap();
panic!("No expected result")
}
let expected = file::get_text(&path).unwrap();
let expected = expected.as_str();
let actual = actual.as_str();
assert_equal_text(expected, actual, &path);
}
}
fn assert_equal_text(
expected: &str, expected: &str,
actual: &str, actual: &str,
path: &Path path: &Path
@ -16,7 +42,7 @@ pub fn assert_equal_text(
} }
} }
pub fn collect_tests(paths: &[&str]) -> Vec<PathBuf> { fn collect_tests(paths: &[&str]) -> Vec<PathBuf> {
paths.iter().flat_map(|path| { paths.iter().flat_map(|path| {
let path = test_data_dir().join(path); let path = test_data_dir().join(path);
test_from_dir(&path).into_iter() test_from_dir(&path).into_iter()