This commit is contained in:
Aleksey Kladov 2018-02-04 17:06:43 +03:00
parent c119e8fd8b
commit 351107d0b1
5 changed files with 7 additions and 9 deletions

View file

@ -1,4 +1,4 @@
use {File, FileBuilder, Sink, SyntaxKind, Token, TextUnit}; use {File, FileBuilder, Sink, SyntaxKind, TextUnit, Token};
use syntax_kinds::TOMBSTONE; use syntax_kinds::TOMBSTONE;
use super::is_insignificant; use super::is_insignificant;
@ -139,7 +139,7 @@ pub(super) fn to_file(text: String, tokens: &[Token], events: Vec<Event>) -> Fil
idx += 1; idx += 1;
} }
builder.leaf(kind, len); builder.leaf(kind, len);
}, }
&Event::Error { ref message } => builder.error().message(message.clone()).emit(), &Event::Error { ref message } => builder.error().message(message.clone()).emit(),
} }
} }

View file

@ -122,7 +122,7 @@ fn item(p: &mut Parser) {
if !p.at(FN_KW) { if !p.at(FN_KW) {
item.abandon(p); item.abandon(p);
p.error().message("expected function").emit(); p.error().message("expected function").emit();
return return;
} }
fn_item(p); fn_item(p);
FN_ITEM FN_ITEM

View file

@ -46,10 +46,7 @@ impl<'t> ParserInput<'t> {
if !(idx < self.tokens.len()) { if !(idx < self.tokens.len()) {
return ""; return "";
} }
let range = TextRange::from_len( let range = TextRange::from_len(self.start_offsets[idx], self.tokens[idx].len);
self.start_offsets[idx],
self.tokens[idx].len
);
&self.text[range] &self.text[range]
} }
} }

View file

@ -169,7 +169,6 @@ impl<'t> Parser<'t> {
self.nth(0) self.nth(0)
} }
fn event(&mut self, event: Event) { fn event(&mut self, event: Event) {
self.events.push(event) self.events.push(event)
} }

View file

@ -39,7 +39,9 @@ impl Grammar {
acc.push_str("use tree::SyntaxInfo;\n"); acc.push_str("use tree::SyntaxInfo;\n");
acc.push_str("\n"); acc.push_str("\n");
let syntax_kinds: Vec<String> =self.tokens.iter().cloned() let syntax_kinds: Vec<String> = self.tokens
.iter()
.cloned()
.chain(self.keywords.iter().map(|kw| kw_token(kw))) .chain(self.keywords.iter().map(|kw| kw_token(kw)))
.chain(self.contextual_keywords.iter().map(|kw| kw_token(kw))) .chain(self.contextual_keywords.iter().map(|kw| kw_token(kw)))
.chain(self.nodes.iter().cloned()) .chain(self.nodes.iter().cloned())