Extract parser input into a separate struct

This commit is contained in:
Aleksey Kladov 2018-02-04 14:35:59 +03:00
parent aa36ad008e
commit 852543212b
3 changed files with 89 additions and 28 deletions

77
src/parser/input.rs Normal file
View file

@ -0,0 +1,77 @@
use {SyntaxKind, TextRange, TextUnit, Token};
use syntax_kinds::EOF;
use super::is_insignificant;
use std::ops::{Add, AddAssign};
pub(crate) struct ParserInput<'t> {
#[allow(unused)]
text: &'t str,
#[allow(unused)]
start_offsets: Vec<TextUnit>,
tokens: Vec<Token>, // non-whitespace tokens
}
impl<'t> ParserInput<'t> {
pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> ParserInput<'t> {
let mut tokens = Vec::new();
let mut start_offsets = Vec::new();
let mut len = TextUnit::new(0);
for &token in raw_tokens.iter() {
if !is_insignificant(token.kind) {
tokens.push(token);
start_offsets.push(len);
}
len += token.len;
}
ParserInput {
text,
start_offsets,
tokens,
}
}
pub fn kind(&self, pos: InputPosition) -> SyntaxKind {
let idx = pos.0 as usize;
if !(idx < self.tokens.len()) {
return EOF;
}
self.tokens[idx].kind
}
#[allow(unused)]
pub fn text(&self, pos: InputPosition) -> &'t str {
let idx = pos.0 as usize;
if !(idx < self.tokens.len()) {
return "";
}
let start_offset = self.start_offsets[idx];
let end_offset = self.tokens[idx].len;
let range = TextRange::from_to(start_offset, end_offset);
&self.text[range]
}
}
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub(crate) struct InputPosition(u32);
impl InputPosition {
pub fn new() -> Self {
InputPosition(0)
}
}
impl Add<u32> for InputPosition {
type Output = InputPosition;
fn add(self, rhs: u32) -> InputPosition {
InputPosition(self.0 + rhs)
}
}
impl AddAssign<u32> for InputPosition {
fn add_assign(&mut self, rhs: u32) {
self.0 += rhs
}
}

View file

@ -4,6 +4,7 @@ use syntax_kinds::*;
#[macro_use] #[macro_use]
mod parser; mod parser;
mod input;
mod event; mod event;
mod grammar; mod grammar;
use self::event::Event; use self::event::Event;
@ -11,7 +12,8 @@ use self::event::Event;
/// Parse a sequence of tokens into the representative node tree /// Parse a sequence of tokens into the representative node tree
pub fn parse(text: String, tokens: &[Token]) -> File { pub fn parse(text: String, tokens: &[Token]) -> File {
let events = { let events = {
let mut parser = parser::Parser::new(&text, tokens); let input = input::ParserInput::new(&text, tokens);
let mut parser = parser::Parser::new(&input);
grammar::file(&mut parser); grammar::file(&mut parser);
parser.into_events() parser.into_events()
}; };

View file

@ -1,7 +1,6 @@
use {SyntaxKind, TextUnit, Token};
use super::Event; use super::Event;
use super::is_insignificant; use super::input::{InputPosition, ParserInput};
use SyntaxKind::{EOF, TOMBSTONE}; use SyntaxKind::{self, EOF, TOMBSTONE};
pub(crate) struct Marker { pub(crate) struct Marker {
pos: u32, pos: u32,
@ -98,35 +97,18 @@ macro_rules! token_set {
} }
pub(crate) struct Parser<'t> { pub(crate) struct Parser<'t> {
#[allow(unused)] inp: &'t ParserInput<'t>,
text: &'t str,
#[allow(unused)]
start_offsets: Vec<TextUnit>,
tokens: Vec<Token>, // non-whitespace tokens
pos: usize, pos: InputPosition,
events: Vec<Event>, events: Vec<Event>,
} }
impl<'t> Parser<'t> { impl<'t> Parser<'t> {
pub(crate) fn new(text: &'t str, raw_tokens: &'t [Token]) -> Parser<'t> { pub(crate) fn new(inp: &'t ParserInput<'t>) -> Parser<'t> {
let mut tokens = Vec::new();
let mut start_offsets = Vec::new();
let mut len = TextUnit::new(0);
for &token in raw_tokens.iter() {
if !is_insignificant(token.kind) {
tokens.push(token);
start_offsets.push(len);
}
len += token.len;
}
Parser { Parser {
text, inp,
start_offsets,
tokens,
pos: 0, pos: InputPosition::new(),
events: Vec::new(), events: Vec::new(),
} }
} }
@ -163,8 +145,8 @@ impl<'t> Parser<'t> {
}); });
} }
pub(crate) fn nth(&self, n: usize) -> SyntaxKind { pub(crate) fn nth(&self, n: u32) -> SyntaxKind {
self.tokens.get(self.pos + n).map(|t| t.kind).unwrap_or(EOF) self.inp.kind(self.pos + n)
} }
pub(crate) fn current(&self) -> SyntaxKind { pub(crate) fn current(&self) -> SyntaxKind {