mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-12 05:08:52 +00:00
Lexer testing infra
This commit is contained in:
parent
e132280844
commit
45fce4b3ef
4 changed files with 62 additions and 1 deletions
|
@ -3,4 +3,6 @@ name = "libsyntax2"
|
|||
version = "0.1.0"
|
||||
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
|
||||
|
||||
[dependencies]
|
||||
[dev-dependencies]
|
||||
file = "1.1.1"
|
||||
difference = "1.0.0"
|
||||
|
|
1
tests/data/lexer/0001_hello.rs
Normal file
1
tests/data/lexer/0001_hello.rs
Normal file
|
@ -0,0 +1 @@
|
|||
hello world
|
3
tests/data/lexer/0001_hello.txt
Normal file
3
tests/data/lexer/0001_hello.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
IDENT 5
|
||||
WHITESPACE 1
|
||||
IDENT 5
|
55
tests/lexer.rs
Normal file
55
tests/lexer.rs
Normal file
|
@ -0,0 +1,55 @@
|
|||
extern crate file;
|
||||
#[macro_use(assert_diff)]
|
||||
extern crate difference;
|
||||
|
||||
use std::path::{PathBuf, Path};
|
||||
use std::fs::read_dir;
|
||||
|
||||
#[test]
|
||||
fn lexer_tests() {
|
||||
for test_case in lexer_test_cases() {
|
||||
lexer_test_case(&test_case);
|
||||
}
|
||||
}
|
||||
|
||||
fn lexer_test_dir() -> PathBuf {
|
||||
let dir = env!("CARGO_MANIFEST_DIR");
|
||||
PathBuf::from(dir).join("tests/data/lexer")
|
||||
}
|
||||
|
||||
fn lexer_test_cases() -> Vec<PathBuf> {
|
||||
let mut acc = Vec::new();
|
||||
let dir = lexer_test_dir();
|
||||
for file in read_dir(&dir).unwrap() {
|
||||
let file = file.unwrap();
|
||||
let path = file.path();
|
||||
if path.extension().unwrap_or_default() == "rs" {
|
||||
acc.push(path);
|
||||
}
|
||||
}
|
||||
acc
|
||||
}
|
||||
|
||||
fn lexer_test_case(path: &Path) {
|
||||
let actual = {
|
||||
let text = file::get_text(path).unwrap();
|
||||
let tokens = tokenize(&text);
|
||||
dump_tokens(&tokens)
|
||||
};
|
||||
let expected = file::get_text(&path.with_extension("txt")).unwrap();
|
||||
|
||||
assert_diff!(
|
||||
expected.as_str(),
|
||||
actual.as_str(),
|
||||
"\n",
|
||||
0
|
||||
)
|
||||
}
|
||||
|
||||
fn tokenize(text: &str) -> Vec<()> {
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
fn dump_tokens(tokens: &[()]) -> String {
|
||||
"IDENT 5\nKEYWORD 1\nIDENT 5\n".to_string()
|
||||
}
|
Loading…
Reference in a new issue