rust-analyzer/crates/ra_parser/src/lib.rs

150 lines
5.1 KiB
Rust
Raw Normal View History

2019-02-21 12:24:42 +00:00
//! The Rust parser.
//!
//! The parser doesn't know about concrete representation of tokens and syntax
//! trees. Abstract `TokenSource` and `TreeSink` traits are used instead. As a
//! consequence, this crates does not contain a lexer.
//!
//! The `Parser` struct from the `parser` module is a cursor into the sequence
//! of tokens. Parsing routines use `Parser` to inspect current state and
//! advance the parsing.
//!
//! The actual parsing happens in the `grammar` module.
//!
//! Tests for this crate live in `ra_syntax` crate.
2019-02-21 10:27:45 +00:00
#[macro_use]
mod token_set;
mod syntax_kind;
mod event;
mod parser;
mod grammar;
pub(crate) use token_set::TokenSet;
pub use syntax_kind::SyntaxKind;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ParseError(pub String);
2019-02-21 12:24:42 +00:00
/// `TokenSource` abstracts the source of the tokens parser operates one.
///
/// Hopefully this will allow us to treat text and token trees in the same way!
pub trait TokenSource {
/// What is the current token?
fn token_kind(&self, pos: usize) -> SyntaxKind;
/// Is the current token joined to the next one (`> >` vs `>>`).
fn is_token_joint_to_next(&self, pos: usize) -> bool;
/// Is the current token a specified keyword?
fn is_keyword(&self, pos: usize, kw: &str) -> bool;
}
2019-02-21 10:27:45 +00:00
/// `TreeSink` abstracts details of a particular syntax tree implementation.
pub trait TreeSink {
2019-03-30 10:25:53 +00:00
/// Adds new token to the current branch.
fn token(&mut self, kind: SyntaxKind, n_tokens: u8);
2019-02-21 10:27:45 +00:00
/// Start new branch and make it current.
2019-03-30 10:25:53 +00:00
fn start_node(&mut self, kind: SyntaxKind);
2019-02-21 10:27:45 +00:00
/// Finish current branch and restore previous
/// branch as current.
2019-03-30 10:25:53 +00:00
fn finish_node(&mut self);
2019-02-21 10:27:45 +00:00
fn error(&mut self, error: ParseError);
}
fn parse_from_tokens<F>(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink, f: F)
where
F: FnOnce(&mut parser::Parser),
{
2019-02-21 10:27:45 +00:00
let mut p = parser::Parser::new(token_source);
f(&mut p);
2019-02-21 10:27:45 +00:00
let events = p.finish();
event::process(tree_sink, events);
}
/// Parse given tokens into the given sink as a rust file.
pub fn parse(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse_from_tokens(token_source, tree_sink, grammar::root);
}
/// Parse given tokens into the given sink as a path
pub fn parse_path(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse_from_tokens(token_source, tree_sink, grammar::path);
}
/// Parse given tokens into the given sink as a expression
pub fn parse_expr(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse_from_tokens(token_source, tree_sink, grammar::expr);
}
/// Parse given tokens into the given sink as a ty
pub fn parse_ty(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse_from_tokens(token_source, tree_sink, grammar::type_);
}
/// Parse given tokens into the given sink as a pattern
pub fn parse_pat(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse_from_tokens(token_source, tree_sink, grammar::pattern);
}
2019-04-17 04:34:43 +00:00
/// Parse given tokens into the given sink as a statement
pub fn parse_stmt(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink, with_semi: bool) {
parse_from_tokens(token_source, tree_sink, |p| grammar::stmt(p, with_semi));
}
2019-04-19 10:30:43 +00:00
/// Parse given tokens into the given sink as a block
pub fn parse_block(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse_from_tokens(token_source, tree_sink, grammar::block);
}
2019-04-19 11:33:29 +00:00
pub fn parse_meta(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse_from_tokens(token_source, tree_sink, grammar::meta_item);
}
2019-04-18 02:21:36 +00:00
/// Parse given tokens into the given sink as an item
pub fn parse_item(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse_from_tokens(token_source, tree_sink, grammar::item);
}
2019-04-19 13:38:26 +00:00
/// Parse given tokens into the given sink as an visibility qualifier
pub fn parse_vis(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse_from_tokens(token_source, tree_sink, |p| {
grammar::opt_visibility(p);
});
}
pub fn parse_macro_items(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse_from_tokens(token_source, tree_sink, grammar::macro_items);
}
2019-04-18 19:49:56 +00:00
pub fn parse_macro_stmts(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse_from_tokens(token_source, tree_sink, grammar::macro_stmts);
}
2019-02-21 12:24:42 +00:00
/// A parsing function for a specific braced-block.
2019-02-21 10:27:45 +00:00
pub struct Reparser(fn(&mut parser::Parser));
impl Reparser {
2019-02-21 12:24:42 +00:00
/// If the node is a braced block, return the corresponding `Reparser`.
2019-02-21 10:27:45 +00:00
pub fn for_node(
node: SyntaxKind,
first_child: Option<SyntaxKind>,
parent: Option<SyntaxKind>,
) -> Option<Reparser> {
grammar::reparser(node, first_child, parent).map(Reparser)
}
2019-02-21 12:24:42 +00:00
/// Re-parse given tokens using this `Reparser`.
///
/// Tokens must start with `{`, end with `}` and form a valid brace
/// sequence.
2019-02-21 10:37:32 +00:00
pub fn parse(self, token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
let Reparser(r) = self;
let mut p = parser::Parser::new(token_source);
r(&mut p);
let events = p.finish();
event::process(tree_sink, events);
}
2019-02-21 10:27:45 +00:00
}