rust-analyzer/tests/lexer.rs

29 lines
666 B
Rust
Raw Normal View History

2017-12-28 21:56:36 +00:00
extern crate libsyntax2;
2018-01-07 11:56:08 +00:00
extern crate testutils;
2017-12-28 21:27:56 +00:00
2017-12-28 21:56:36 +00:00
use std::fmt::Write;
2018-01-27 23:31:23 +00:00
use libsyntax2::{tokenize, Token};
2018-01-07 12:34:11 +00:00
use testutils::dir_tests;
2017-12-28 21:27:56 +00:00
#[test]
fn lexer_tests() {
2018-01-27 23:31:23 +00:00
dir_tests(&["lexer"], |text| {
let tokens = tokenize(text);
dump_tokens(&tokens, text)
})
2017-12-28 21:27:56 +00:00
}
2017-12-30 12:29:09 +00:00
fn dump_tokens(tokens: &[Token], text: &str) -> String {
2017-12-28 21:56:36 +00:00
let mut acc = String::new();
2017-12-30 12:29:09 +00:00
let mut offset = 0;
2017-12-28 21:56:36 +00:00
for token in tokens {
2017-12-30 12:29:09 +00:00
let len: u32 = token.len.into();
let len = len as usize;
let token_text = &text[offset..offset + len];
offset += len;
write!(acc, "{:?} {} {:?}\n", token.kind, token.len, token_text).unwrap()
2017-12-28 21:56:36 +00:00
}
acc
2018-01-27 23:31:23 +00:00
}