parser tests work

This commit is contained in:
Aleksey Kladov 2021-12-12 17:58:45 +03:00
parent 26bfd6023f
commit 6ce587ba5a
8 changed files with 92 additions and 140 deletions

View file

@ -24,40 +24,11 @@ mod tokens;
pub(crate) use token_set::TokenSet;
pub use syntax_kind::SyntaxKind;
use crate::tokens::Tokens;
pub use crate::{syntax_kind::SyntaxKind, tokens::Tokens};
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ParseError(pub Box<String>);
/// `TokenSource` abstracts the source of the tokens parser operates on.
///
/// Hopefully this will allow us to treat text and token trees in the same way!
pub trait TokenSource {
fn current(&self) -> Token;
/// Lookahead n token
fn lookahead_nth(&self, n: usize) -> Token;
/// bump cursor to next token
fn bump(&mut self);
/// Is the current token a specified keyword?
fn is_keyword(&self, kw: &str) -> bool;
}
/// `Token` abstracts the cursor of `TokenSource` operates on.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct Token {
/// What is the current token?
pub kind: SyntaxKind,
/// Is the current token joined to the next one (`> >` vs `>>`).
pub is_jointed_to_next: bool,
pub contextual_kw: SyntaxKind,
}
/// `TreeSink` abstracts details of a particular syntax tree implementation.
pub trait TreeSink {
/// Adds new token to the current branch.

View file

@ -334,6 +334,18 @@ impl SyntaxKind {
};
Some(kw)
}
pub fn from_contextual_keyword(ident: &str) -> Option<SyntaxKind> {
let kw = match ident {
"auto" => AUTO_KW,
"default" => DEFAULT_KW,
"existential" => EXISTENTIAL_KW,
"union" => UNION_KW,
"raw" => RAW_KW,
"macro_rules" => MACRO_RULES_KW,
_ => return None,
};
Some(kw)
}
pub fn from_char(c: char) -> Option<SyntaxKind> {
let tok = match c {
';' => SEMICOLON,

View file

@ -1,8 +1,19 @@
use crate::{SyntaxKind, Token};
use crate::SyntaxKind;
#[allow(non_camel_case_types)]
type bits = u64;
/// `Token` abstracts the cursor of `TokenSource` operates on.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub(crate) struct Token {
/// What is the current token?
pub(crate) kind: SyntaxKind,
/// Is the current token joined to the next one (`> >` vs `>>`).
pub(crate) is_jointed_to_next: bool,
pub(crate) contextual_kw: SyntaxKind,
}
/// Main input to the parser.
///
/// A sequence of tokens represented internally as a struct of arrays.
@ -49,13 +60,14 @@ impl Tokens {
self.kind.len()
}
pub(crate) fn get(&self, idx: usize) -> Token {
if idx > self.len() {
return self.eof();
}
if idx < self.len() {
let kind = self.kind[idx];
let is_jointed_to_next = self.get_joint(idx);
let contextual_kw = self.contextual_kw[idx];
Token { kind, is_jointed_to_next, contextual_kw }
} else {
self.eof()
}
}
#[cold]

View file

@ -2,12 +2,10 @@
//! incremental reparsing.
pub(crate) mod lexer;
mod text_token_source;
mod text_tree_sink;
mod reparsing;
use parser::SyntaxKind;
use text_token_source::TextTokenSource;
use text_tree_sink::TextTreeSink;
use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode};
@ -15,12 +13,12 @@ use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode};
pub(crate) use crate::parsing::{lexer::*, reparsing::incremental_reparse};
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
let (tokens, lexer_errors) = tokenize(text);
let (lexer_tokens, lexer_errors) = tokenize(text);
let parser_tokens = to_parser_tokens(text, &lexer_tokens);
let mut token_source = TextTokenSource::new(text, &tokens);
let mut tree_sink = TextTreeSink::new(text, &tokens);
let mut tree_sink = TextTreeSink::new(text, &lexer_tokens);
parser::parse_source_file(&mut token_source, &mut tree_sink);
parser::parse_source_file(&parser_tokens, &mut tree_sink);
let (tree, mut parser_errors) = tree_sink.finish();
parser_errors.extend(lexer_errors);
@ -33,26 +31,47 @@ pub(crate) fn parse_text_as<T: AstNode>(
text: &str,
entry_point: parser::ParserEntryPoint,
) -> Result<T, ()> {
let (tokens, lexer_errors) = tokenize(text);
let (lexer_tokens, lexer_errors) = tokenize(text);
if !lexer_errors.is_empty() {
return Err(());
}
let mut token_source = TextTokenSource::new(text, &tokens);
let mut tree_sink = TextTreeSink::new(text, &tokens);
let parser_tokens = to_parser_tokens(text, &lexer_tokens);
let mut tree_sink = TextTreeSink::new(text, &lexer_tokens);
// TextTreeSink assumes that there's at least some root node to which it can attach errors and
// tokens. We arbitrarily give it a SourceFile.
use parser::TreeSink;
tree_sink.start_node(SyntaxKind::SOURCE_FILE);
parser::parse(&mut token_source, &mut tree_sink, entry_point);
parser::parse(&parser_tokens, &mut tree_sink, entry_point);
tree_sink.finish_node();
let (tree, parser_errors) = tree_sink.finish();
use parser::TokenSource;
if !parser_errors.is_empty() || token_source.current().kind != SyntaxKind::EOF {
let (tree, parser_errors, eof) = tree_sink.finish_eof();
if !parser_errors.is_empty() || !eof {
return Err(());
}
SyntaxNode::new_root(tree).first_child().and_then(T::cast).ok_or(())
}
pub(crate) fn to_parser_tokens(text: &str, lexer_tokens: &[lexer::Token]) -> ::parser::Tokens {
let mut off = 0;
let mut res = parser::Tokens::default();
let mut was_joint = true;
for t in lexer_tokens {
if t.kind.is_trivia() {
was_joint = false;
} else if t.kind == SyntaxKind::IDENT {
let token_text = &text[off..][..usize::from(t.len)];
let contextual_kw =
SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT);
res.push_ident(contextual_kw);
} else {
res.push(was_joint, t.kind);
was_joint = true;
}
off += usize::from(t.len);
}
res
}

View file

@ -12,8 +12,8 @@ use text_edit::Indel;
use crate::{
parsing::{
lexer::{lex_single_syntax_kind, tokenize, Token},
text_token_source::TextTokenSource,
text_tree_sink::TextTreeSink,
to_parser_tokens,
},
syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode},
SyntaxError,
@ -91,14 +91,14 @@ fn reparse_block(
let (node, reparser) = find_reparsable_node(root, edit.delete)?;
let text = get_text_after_edit(node.clone().into(), edit);
let (tokens, new_lexer_errors) = tokenize(&text);
if !is_balanced(&tokens) {
let (lexer_tokens, new_lexer_errors) = tokenize(&text);
if !is_balanced(&lexer_tokens) {
return None;
}
let parser_tokens = to_parser_tokens(&text, &lexer_tokens);
let mut token_source = TextTokenSource::new(&text, &tokens);
let mut tree_sink = TextTreeSink::new(&text, &tokens);
reparser.parse(&mut token_source, &mut tree_sink);
let mut tree_sink = TextTreeSink::new(&text, &lexer_tokens);
reparser.parse(&parser_tokens, &mut tree_sink);
let (green, mut new_parser_errors) = tree_sink.finish();
new_parser_errors.extend(new_lexer_errors);

View file

@ -1,82 +0,0 @@
//! See `TextTokenSource` docs.
use parser::TokenSource;
use crate::{parsing::lexer::Token, SyntaxKind::EOF, TextRange, TextSize};
/// Implementation of `parser::TokenSource` that takes tokens from source code text.
pub(crate) struct TextTokenSource<'t> {
text: &'t str,
/// token and its start position (non-whitespace/comment tokens)
/// ```non-rust
/// struct Foo;
/// ^------^--^-
/// | | \________
/// | \____ \
/// | \ |
/// (struct, 0) (Foo, 7) (;, 10)
/// ```
/// `[(struct, 0), (Foo, 7), (;, 10)]`
token_offset_pairs: Vec<(Token, TextSize)>,
/// Current token and position
curr: (parser::Token, usize),
}
impl<'t> TokenSource for TextTokenSource<'t> {
fn current(&self) -> parser::Token {
self.curr.0
}
fn lookahead_nth(&self, n: usize) -> parser::Token {
mk_token(self.curr.1 + n, &self.token_offset_pairs)
}
fn bump(&mut self) {
if self.curr.0.kind == EOF {
return;
}
let pos = self.curr.1 + 1;
self.curr = (mk_token(pos, &self.token_offset_pairs), pos);
}
fn is_keyword(&self, kw: &str) -> bool {
self.token_offset_pairs
.get(self.curr.1)
.map_or(false, |(token, offset)| &self.text[TextRange::at(*offset, token.len)] == kw)
}
}
fn mk_token(pos: usize, token_offset_pairs: &[(Token, TextSize)]) -> parser::Token {
let (kind, is_jointed_to_next) = match token_offset_pairs.get(pos) {
Some((token, offset)) => (
token.kind,
token_offset_pairs
.get(pos + 1)
.map_or(false, |(_, next_offset)| offset + token.len == *next_offset),
),
None => (EOF, false),
};
parser::Token { kind, is_jointed_to_next }
}
impl<'t> TextTokenSource<'t> {
/// Generate input from tokens(expect comment and whitespace).
pub(crate) fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> {
let token_offset_pairs: Vec<_> = raw_tokens
.iter()
.filter_map({
let mut len = 0.into();
move |token| {
let pair = if token.kind.is_trivia() { None } else { Some((*token, len)) };
len += token.len;
pair
}
})
.collect();
let first = mk_token(0, &token_offset_pairs);
TextTokenSource { text, token_offset_pairs, curr: (first, 0) }
}
}

View file

@ -104,7 +104,7 @@ impl<'a> TextTreeSink<'a> {
}
}
pub(super) fn finish(mut self) -> (GreenNode, Vec<SyntaxError>) {
pub(super) fn finish_eof(mut self) -> (GreenNode, Vec<SyntaxError>, bool) {
match mem::replace(&mut self.state, State::Normal) {
State::PendingFinish => {
self.eat_trivias();
@ -113,7 +113,15 @@ impl<'a> TextTreeSink<'a> {
State::PendingStart | State::Normal => unreachable!(),
}
self.inner.finish_raw()
let (node, errors) = self.inner.finish_raw();
let is_eof = self.token_pos == self.tokens.len();
(node, errors, is_eof)
}
pub(super) fn finish(self) -> (GreenNode, Vec<SyntaxError>) {
let (node, errors, _eof) = self.finish_eof();
(node, errors)
}
fn eat_trivias(&mut self) {

View file

@ -359,6 +359,10 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String {
let full_keywords =
full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(kw)));
let contextual_keywords_values = &grammar.contextual_keywords;
let contextual_keywords =
contextual_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(kw)));
let all_keywords_values =
grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>();
let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
@ -428,6 +432,14 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String {
Some(kw)
}
pub fn from_contextual_keyword(ident: &str) -> Option<SyntaxKind> {
let kw = match ident {
#(#contextual_keywords_values => #contextual_keywords,)*
_ => return None,
};
Some(kw)
}
pub fn from_char(c: char) -> Option<SyntaxKind> {
let tok = match c {
#(#single_byte_tokens_values => #single_byte_tokens,)*