rust-analyzer/crates/ra_syntax/src/lib.rs

88 lines
2.7 KiB
Rust
Raw Normal View History

//! An experimental implementation of [Rust RFC#2256 libsyntax2.0][rfc#2256].
//!
//! The intent is to be an IDE-ready parser, i.e. one that offers
//!
//! - easy and fast incremental re-parsing,
//! - graceful handling of errors, and
//! - maintains all information in the source file.
//!
//! For more information, see [the RFC][rfc#2265], or [the working draft][RFC.md].
//!
//! [rfc#2256]: <https://github.com/rust-lang/rfcs/pull/2256>
//! [RFC.md]: <https://github.com/matklad/libsyntax2/blob/master/docs/RFC.md>
2019-02-08 11:49:43 +00:00
#![forbid(missing_debug_implementations, unconditional_recursion, future_incompatible)]
2018-07-29 10:51:55 +00:00
#![deny(bad_style, missing_docs)]
#![allow(missing_docs)]
//#![warn(unreachable_pub)] // rust-lang/rust#47816
2018-07-31 12:40:40 +00:00
pub mod algo;
pub mod ast;
2017-12-28 21:56:36 +00:00
mod lexer;
2018-07-31 20:38:19 +00:00
#[macro_use]
2018-09-06 13:54:54 +00:00
mod token_set;
2018-07-31 20:38:19 +00:00
mod grammar;
mod parser_api;
2018-07-31 20:38:19 +00:00
mod parser_impl;
mod reparsing;
mod string_lexing;
2018-07-29 12:16:07 +00:00
mod syntax_kinds;
2018-07-30 12:25:52 +00:00
/// Utilities for simple uses of the parser.
pub mod utils;
mod validation;
mod syntax_node;
2019-01-23 14:37:10 +00:00
mod ptr;
2018-07-29 12:16:07 +00:00
2018-12-06 17:49:36 +00:00
pub use rowan::{SmolStr, TextRange, TextUnit};
2018-10-15 16:55:32 +00:00
pub use crate::{
2018-08-25 08:40:17 +00:00
ast::AstNode,
2018-07-30 11:08:06 +00:00
lexer::{tokenize, Token},
syntax_kinds::SyntaxKind,
syntax_node::{Direction, SyntaxError, SyntaxNode, WalkEvent, Location, TreeArc},
2019-01-23 15:26:02 +00:00
ptr::{SyntaxNodePtr, AstPtr},
2018-07-29 12:16:07 +00:00
};
use ra_text_edit::AtomTextEdit;
use crate::syntax_node::GreenNode;
2018-08-25 09:10:35 +00:00
2019-01-07 13:15:47 +00:00
/// `SourceFile` represents a parse tree for a single Rust file.
pub use crate::ast::SourceFile;
2018-08-25 08:40:17 +00:00
2019-01-07 13:15:47 +00:00
impl SourceFile {
fn new(green: GreenNode, errors: Vec<SyntaxError>) -> TreeArc<SourceFile> {
2018-10-02 14:07:12 +00:00
let root = SyntaxNode::new(green, errors);
2018-09-08 15:34:41 +00:00
if cfg!(debug_assertions) {
2019-01-07 13:15:47 +00:00
utils::validate_block_structure(&root);
2018-09-08 15:34:41 +00:00
}
2018-11-07 15:38:43 +00:00
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
TreeArc::cast(root)
2018-08-25 08:40:17 +00:00
}
2019-01-14 18:30:21 +00:00
pub fn parse(text: &str) -> TreeArc<SourceFile> {
2018-08-25 09:10:35 +00:00
let tokens = tokenize(&text);
let (green, errors) =
parser_impl::parse_with(syntax_node::GreenBuilder::new(), text, &tokens, grammar::root);
2019-01-07 13:15:47 +00:00
SourceFile::new(green, errors)
2018-08-25 09:10:35 +00:00
}
pub fn reparse(&self, edit: &AtomTextEdit) -> TreeArc<SourceFile> {
2019-02-08 11:49:43 +00:00
self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
2018-08-25 10:17:54 +00:00
}
pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<TreeArc<SourceFile>> {
reparsing::incremental_reparse(self.syntax(), edit, self.errors())
2019-01-07 13:15:47 +00:00
.map(|(green_node, errors)| SourceFile::new(green_node, errors))
2018-08-25 10:17:54 +00:00
}
fn full_reparse(&self, edit: &AtomTextEdit) -> TreeArc<SourceFile> {
2019-01-08 18:59:55 +00:00
let text = edit.apply(self.syntax().text().to_string());
2019-01-07 13:15:47 +00:00
SourceFile::parse(&text)
2018-08-25 08:40:17 +00:00
}
2018-08-25 08:40:17 +00:00
pub fn errors(&self) -> Vec<SyntaxError> {
2018-11-06 19:47:38 +00:00
let mut errors = self.syntax.root_data().clone();
errors.extend(validation::validate(self));
errors
2018-08-25 08:40:17 +00:00
}
2018-08-24 16:27:30 +00:00
}