mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 13:03:31 +00:00
Parser: even more groundwork
This commit is contained in:
parent
e24cadb490
commit
4cda325530
3 changed files with 43 additions and 28 deletions
9
src/parser/event_parser/grammar.rs
Normal file
9
src/parser/event_parser/grammar.rs
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
use super::Event;
|
||||||
|
use super::parser::Parser;
|
||||||
|
|
||||||
|
use syntax_kinds::*;
|
||||||
|
|
||||||
|
pub fn parse_file(p: &mut Parser) {
|
||||||
|
p.start(FILE);
|
||||||
|
p.finish();
|
||||||
|
}
|
20
src/parser/event_parser/mod.rs
Normal file
20
src/parser/event_parser/mod.rs
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
use {Token, TextUnit, SyntaxKind};
|
||||||
|
|
||||||
|
use syntax_kinds::*;
|
||||||
|
mod grammar;
|
||||||
|
mod parser;
|
||||||
|
|
||||||
|
pub(crate) enum Event {
|
||||||
|
Start { kind: SyntaxKind },
|
||||||
|
Finish,
|
||||||
|
Token {
|
||||||
|
kind: SyntaxKind,
|
||||||
|
n_raw_tokens: u8,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn parse<'t>(text: &'t str, raw_tokens: &'t [Token]) -> Vec<Event> {
|
||||||
|
let mut parser = parser::Parser::new(text, raw_tokens);
|
||||||
|
grammar::parse_file(&mut parser);
|
||||||
|
parser.into_events()
|
||||||
|
}
|
|
@ -1,24 +1,8 @@
|
||||||
use {Token, TextUnit, SyntaxKind};
|
use {Token, SyntaxKind, TextUnit};
|
||||||
|
use super::Event;
|
||||||
|
use syntax_kinds::{WHITESPACE, COMMENT};
|
||||||
|
|
||||||
use syntax_kinds::*;
|
pub struct Parser<'t> {
|
||||||
|
|
||||||
|
|
||||||
pub(crate) enum Event {
|
|
||||||
Start { kind: SyntaxKind },
|
|
||||||
Finish,
|
|
||||||
Token {
|
|
||||||
kind: SyntaxKind,
|
|
||||||
n_raw_tokens: u8,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn parse<'t>(text: &'t str, raw_tokens: &'t [Token]) -> Vec<Event> {
|
|
||||||
let mut parser = Parser::new(text, raw_tokens);
|
|
||||||
parse_file(&mut parser);
|
|
||||||
parser.events
|
|
||||||
}
|
|
||||||
|
|
||||||
struct Parser<'t> {
|
|
||||||
text: &'t str,
|
text: &'t str,
|
||||||
raw_tokens: &'t [Token],
|
raw_tokens: &'t [Token],
|
||||||
non_ws_tokens: Vec<(usize, TextUnit)>,
|
non_ws_tokens: Vec<(usize, TextUnit)>,
|
||||||
|
@ -28,7 +12,7 @@ struct Parser<'t> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'t> Parser<'t> {
|
impl<'t> Parser<'t> {
|
||||||
fn new(text: &'t str, raw_tokens: &'t [Token]) -> Parser<'t> {
|
pub(crate) fn new(text: &'t str, raw_tokens: &'t [Token]) -> Parser<'t> {
|
||||||
let mut non_ws_tokens = Vec::new();
|
let mut non_ws_tokens = Vec::new();
|
||||||
let mut len = TextUnit::new(0);
|
let mut len = TextUnit::new(0);
|
||||||
for (idx, &token) in raw_tokens.iter().enumerate() {
|
for (idx, &token) in raw_tokens.iter().enumerate() {
|
||||||
|
@ -49,18 +33,20 @@ impl<'t> Parser<'t> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn start(&mut self, kind: SyntaxKind) {
|
pub(crate) fn into_events(self) -> Vec<Event> {
|
||||||
|
assert!(self.pos == self.non_ws_tokens.len());
|
||||||
|
self.events
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn start(&mut self, kind: SyntaxKind) {
|
||||||
self.event(Event::Start { kind });
|
self.event(Event::Start { kind });
|
||||||
}
|
}
|
||||||
fn finish(&mut self) {
|
|
||||||
|
pub(crate) fn finish(&mut self) {
|
||||||
self.event(Event::Finish);
|
self.event(Event::Finish);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn event(&mut self, event: Event) {
|
fn event(&mut self, event: Event) {
|
||||||
self.events.push(event)
|
self.events.push(event)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_file(p: &mut Parser) {
|
|
||||||
p.start(FILE);
|
|
||||||
p.finish();
|
|
||||||
}
|
|
Loading…
Reference in a new issue