rust-analyzer/crates/syntax/src/parsing/text_token_source.rs

85 lines
2.5 KiB
Rust
Raw Normal View History

//! See `TextTokenSource` docs.
2020-08-12 15:06:49 +00:00
use parser::TokenSource;
2019-02-21 10:37:32 +00:00
2020-04-24 21:40:41 +00:00
use crate::{parsing::lexer::Token, SyntaxKind::EOF, TextRange, TextSize};
2020-08-12 15:06:49 +00:00
/// Implementation of `parser::TokenSource` that takes tokens from source code text.
2019-02-23 13:07:29 +00:00
pub(crate) struct TextTokenSource<'t> {
2019-02-21 12:24:42 +00:00
text: &'t str,
/// token and its start position (non-whitespace/comment tokens)
2019-02-21 12:24:42 +00:00
/// ```non-rust
/// struct Foo;
/// ^------^--^-
/// | | \________
/// | \____ \
/// | \ |
/// (struct, 0) (Foo, 7) (;, 10)
2019-02-21 12:24:42 +00:00
/// ```
/// `[(struct, 0), (Foo, 7), (;, 10)]`
token_offset_pairs: Vec<(Token, TextSize)>,
2019-05-25 12:31:53 +00:00
/// Current token and position
2020-08-12 15:06:49 +00:00
curr: (parser::Token, usize),
2019-02-21 12:24:42 +00:00
}
2019-02-23 13:07:29 +00:00
impl<'t> TokenSource for TextTokenSource<'t> {
2020-08-12 15:06:49 +00:00
fn current(&self) -> parser::Token {
2019-07-04 17:26:44 +00:00
self.curr.0
2019-02-20 18:50:07 +00:00
}
2019-05-25 12:31:53 +00:00
2020-08-12 15:06:49 +00:00
fn lookahead_nth(&self, n: usize) -> parser::Token {
mk_token(self.curr.1 + n, &self.token_offset_pairs)
2019-05-25 12:31:53 +00:00
}
fn bump(&mut self) {
if self.curr.0.kind == EOF {
return;
2019-02-20 18:50:07 +00:00
}
2019-05-25 12:31:53 +00:00
let pos = self.curr.1 + 1;
self.curr = (mk_token(pos, &self.token_offset_pairs), pos);
2019-02-20 18:50:07 +00:00
}
2019-05-25 12:31:53 +00:00
fn is_keyword(&self, kw: &str) -> bool {
self.token_offset_pairs
.get(self.curr.1)
.map(|(token, offset)| &self.text[TextRange::at(*offset, token.len)] == kw)
.unwrap_or(false)
2019-02-20 18:50:07 +00:00
}
}
2020-08-12 15:06:49 +00:00
fn mk_token(pos: usize, token_offset_pairs: &[(Token, TextSize)]) -> parser::Token {
let (kind, is_jointed_to_next) = match token_offset_pairs.get(pos) {
Some((token, offset)) => (
token.kind,
token_offset_pairs
.get(pos + 1)
.map(|(_, next_offset)| offset + token.len == *next_offset)
.unwrap_or(false),
),
None => (EOF, false),
2019-05-25 12:31:53 +00:00
};
2020-08-12 15:06:49 +00:00
parser::Token { kind, is_jointed_to_next }
2019-05-25 12:31:53 +00:00
}
2019-02-23 13:07:29 +00:00
impl<'t> TextTokenSource<'t> {
2018-12-31 13:30:37 +00:00
/// Generate input from tokens(expect comment and whitespace).
pub(crate) fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> {
let token_offset_pairs: Vec<_> = raw_tokens
.iter()
.filter_map({
let mut len = 0.into();
move |token| {
let pair = if token.kind.is_trivia() { None } else { Some((*token, len)) };
len += token.len;
pair
}
})
.collect();
let first = mk_token(0, &token_offset_pairs);
TextTokenSource { text, token_offset_pairs, curr: (first, 0) }
}
}