mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-12 13:18:47 +00:00
Parser test boilerplate
This commit is contained in:
parent
8e42f26965
commit
39024fdc14
7 changed files with 95 additions and 7 deletions
|
@ -1,5 +1,5 @@
|
|||
Grammar(
|
||||
syntax_kinds: [
|
||||
tokens: [
|
||||
"ERROR",
|
||||
"IDENT",
|
||||
"UNDERSCORE",
|
||||
|
@ -51,5 +51,8 @@ Grammar(
|
|||
"COMMENT",
|
||||
"DOC_COMMENT",
|
||||
"SHEBANG",
|
||||
],
|
||||
nodes: [
|
||||
"FILE"
|
||||
]
|
||||
)
|
|
@ -17,7 +17,8 @@ fn main() {
|
|||
|
||||
#[derive(Deserialize)]
|
||||
struct Grammar {
|
||||
syntax_kinds: Vec<String>,
|
||||
tokens: Vec<String>,
|
||||
nodes: Vec<String>,
|
||||
}
|
||||
|
||||
impl Grammar {
|
||||
|
@ -31,7 +32,12 @@ impl Grammar {
|
|||
acc.push_str("// Generated from grammar.ron\n");
|
||||
acc.push_str("use tree::{SyntaxKind, SyntaxInfo};\n");
|
||||
acc.push_str("\n");
|
||||
for (idx, kind) in self.syntax_kinds.iter().enumerate() {
|
||||
|
||||
let syntax_kinds: Vec<&String> =
|
||||
self.tokens.iter().chain(self.nodes.iter())
|
||||
.collect();
|
||||
|
||||
for (idx, kind) in syntax_kinds.iter().enumerate() {
|
||||
let sname = scream(kind);
|
||||
write!(
|
||||
acc,
|
||||
|
@ -40,8 +46,8 @@ impl Grammar {
|
|||
).unwrap();
|
||||
}
|
||||
acc.push_str("\n");
|
||||
write!(acc, "static INFOS: [SyntaxInfo; {}] = [\n", self.syntax_kinds.len()).unwrap();
|
||||
for kind in self.syntax_kinds.iter() {
|
||||
write!(acc, "static INFOS: [SyntaxInfo; {}] = [\n", syntax_kinds.len()).unwrap();
|
||||
for kind in syntax_kinds.iter() {
|
||||
let sname = scream(kind);
|
||||
write!(
|
||||
acc,
|
||||
|
|
|
@ -6,5 +6,5 @@ mod lexer;
|
|||
|
||||
pub mod syntax_kinds;
|
||||
pub use text::{TextUnit, TextRange};
|
||||
pub use tree::{SyntaxKind, Token};
|
||||
pub use tree::{SyntaxKind, Token, FileBuilder, File, Node};
|
||||
pub use lexer::{next_token, tokenize};
|
||||
|
|
|
@ -52,8 +52,9 @@ pub const THIN_ARROW: SyntaxKind = SyntaxKind(47);
|
|||
pub const COMMENT: SyntaxKind = SyntaxKind(48);
|
||||
pub const DOC_COMMENT: SyntaxKind = SyntaxKind(49);
|
||||
pub const SHEBANG: SyntaxKind = SyntaxKind(50);
|
||||
pub const FILE: SyntaxKind = SyntaxKind(51);
|
||||
|
||||
static INFOS: [SyntaxInfo; 51] = [
|
||||
static INFOS: [SyntaxInfo; 52] = [
|
||||
SyntaxInfo { name: "ERROR" },
|
||||
SyntaxInfo { name: "IDENT" },
|
||||
SyntaxInfo { name: "UNDERSCORE" },
|
||||
|
@ -105,6 +106,7 @@ static INFOS: [SyntaxInfo; 51] = [
|
|||
SyntaxInfo { name: "COMMENT" },
|
||||
SyntaxInfo { name: "DOC_COMMENT" },
|
||||
SyntaxInfo { name: "SHEBANG" },
|
||||
SyntaxInfo { name: "FILE" },
|
||||
];
|
||||
|
||||
pub(crate) fn syntax_info(kind: SyntaxKind) -> &'static SyntaxInfo {
|
||||
|
|
0
tests/data/parser/0000_empty.rs
Normal file
0
tests/data/parser/0000_empty.rs
Normal file
1
tests/data/parser/0000_empty.txt
Normal file
1
tests/data/parser/0000_empty.txt
Normal file
|
@ -0,0 +1 @@
|
|||
FILE@[0; 0)
|
76
tests/parser.rs
Normal file
76
tests/parser.rs
Normal file
|
@ -0,0 +1,76 @@
|
|||
extern crate file;
|
||||
#[macro_use(assert_diff)]
|
||||
extern crate difference;
|
||||
extern crate libsyntax2;
|
||||
|
||||
use std::path::{PathBuf, Path};
|
||||
use std::fs::read_dir;
|
||||
use std::fmt::Write;
|
||||
|
||||
use libsyntax2::{tokenize, Token, Node, File, FileBuilder};
|
||||
|
||||
#[test]
|
||||
fn parser_tests() {
|
||||
for test_case in parser_test_cases() {
|
||||
parser_test_case(&test_case);
|
||||
}
|
||||
}
|
||||
|
||||
fn parser_test_dir() -> PathBuf {
|
||||
let dir = env!("CARGO_MANIFEST_DIR");
|
||||
PathBuf::from(dir).join("tests/data/parser")
|
||||
}
|
||||
|
||||
fn parser_test_cases() -> Vec<PathBuf> {
|
||||
let mut acc = Vec::new();
|
||||
let dir = parser_test_dir();
|
||||
for file in read_dir(&dir).unwrap() {
|
||||
let file = file.unwrap();
|
||||
let path = file.path();
|
||||
if path.extension().unwrap_or_default() == "rs" {
|
||||
acc.push(path);
|
||||
}
|
||||
}
|
||||
acc.sort();
|
||||
acc
|
||||
}
|
||||
|
||||
fn parser_test_case(path: &Path) {
|
||||
let actual = {
|
||||
let text = file::get_text(path).unwrap();
|
||||
let tokens = tokenize(&text);
|
||||
let file = parse(text, &tokens);
|
||||
dump_tree(&file)
|
||||
};
|
||||
let expected = file::get_text(&path.with_extension("txt")).unwrap();
|
||||
let expected = expected.as_str();
|
||||
let actual = actual.as_str();
|
||||
if expected == actual {
|
||||
return
|
||||
}
|
||||
if expected.trim() == actual.trim() {
|
||||
panic!("Whitespace difference!")
|
||||
}
|
||||
assert_diff!(expected, actual, "\n", 0)
|
||||
}
|
||||
|
||||
fn dump_tree(file: &File) -> String {
|
||||
let mut result = String::new();
|
||||
go(file.root(), &mut result, 0);
|
||||
return result;
|
||||
|
||||
fn go(node: Node, buff: &mut String, level: usize) {
|
||||
buff.push_str(&String::from(" ").repeat(level));
|
||||
write!(buff, "{:?}\n", node);
|
||||
for child in node.children() {
|
||||
go(child, buff, level + 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse(text: String, tokens: &[Token]) -> File {
|
||||
let mut builder = FileBuilder::new(text);
|
||||
builder.start_internal(libsyntax2::syntax_kinds::FILE);
|
||||
builder.finish_internal();
|
||||
builder.finish()
|
||||
}
|
Loading…
Reference in a new issue