2018-01-28 01:29:14 +00:00
|
|
|
//! An experimental implementation of [Rust RFC#2256 libsyntax2.0][rfc#2256].
|
|
|
|
//!
|
|
|
|
//! The intent is to be an IDE-ready parser, i.e. one that offers
|
|
|
|
//!
|
|
|
|
//! - easy and fast incremental re-parsing,
|
|
|
|
//! - graceful handling of errors, and
|
|
|
|
//! - maintains all information in the source file.
|
|
|
|
//!
|
|
|
|
//! For more information, see [the RFC][rfc#2265], or [the working draft][RFC.md].
|
|
|
|
//!
|
|
|
|
//! [rfc#2256]: <https://github.com/rust-lang/rfcs/pull/2256>
|
|
|
|
//! [RFC.md]: <https://github.com/matklad/libsyntax2/blob/master/docs/RFC.md>
|
|
|
|
|
2018-07-30 11:08:06 +00:00
|
|
|
#![forbid(
|
|
|
|
missing_debug_implementations,
|
|
|
|
unconditional_recursion,
|
|
|
|
future_incompatible
|
|
|
|
)]
|
2018-07-29 10:51:55 +00:00
|
|
|
#![deny(bad_style, missing_docs)]
|
|
|
|
#![allow(missing_docs)]
|
2018-01-28 01:29:14 +00:00
|
|
|
//#![warn(unreachable_pub)] // rust-lang/rust#47816
|
|
|
|
|
2018-07-31 12:40:40 +00:00
|
|
|
extern crate itertools;
|
2018-07-30 11:08:06 +00:00
|
|
|
extern crate unicode_xid;
|
2018-08-01 11:55:37 +00:00
|
|
|
extern crate drop_bomb;
|
2018-08-01 19:07:09 +00:00
|
|
|
extern crate parking_lot;
|
2018-08-13 11:24:22 +00:00
|
|
|
extern crate smol_str;
|
|
|
|
extern crate text_unit;
|
2017-12-29 20:33:04 +00:00
|
|
|
|
2018-07-31 12:40:40 +00:00
|
|
|
pub mod algo;
|
|
|
|
pub mod ast;
|
2017-12-28 21:56:36 +00:00
|
|
|
mod lexer;
|
2018-07-31 20:38:19 +00:00
|
|
|
#[macro_use]
|
2018-09-06 13:54:54 +00:00
|
|
|
mod token_set;
|
2018-07-31 20:38:19 +00:00
|
|
|
mod parser_api;
|
|
|
|
mod grammar;
|
|
|
|
mod parser_impl;
|
|
|
|
|
2018-07-29 12:16:07 +00:00
|
|
|
mod syntax_kinds;
|
2018-08-08 16:44:16 +00:00
|
|
|
mod yellow;
|
2018-07-30 12:25:52 +00:00
|
|
|
/// Utilities for simple uses of the parser.
|
|
|
|
pub mod utils;
|
2018-08-24 10:41:25 +00:00
|
|
|
pub mod text_utils;
|
2018-07-29 12:16:07 +00:00
|
|
|
|
|
|
|
pub use {
|
2018-08-18 09:42:28 +00:00
|
|
|
text_unit::{TextRange, TextUnit},
|
|
|
|
smol_str::SmolStr,
|
2018-08-25 08:40:17 +00:00
|
|
|
ast::AstNode,
|
2018-07-30 11:08:06 +00:00
|
|
|
lexer::{tokenize, Token},
|
2018-07-29 12:16:07 +00:00
|
|
|
syntax_kinds::SyntaxKind,
|
2018-08-17 18:10:55 +00:00
|
|
|
yellow::{SyntaxNode, SyntaxNodeRef, OwnedRoot, RefRoot, TreeRoot, SyntaxError},
|
2018-07-29 12:16:07 +00:00
|
|
|
};
|
|
|
|
|
2018-08-25 10:17:54 +00:00
|
|
|
use {
|
|
|
|
SyntaxKind::*,
|
|
|
|
yellow::{GreenNode, SyntaxRoot},
|
|
|
|
parser_api::Parser,
|
|
|
|
};
|
2018-08-25 09:10:35 +00:00
|
|
|
|
2018-08-25 08:40:17 +00:00
|
|
|
#[derive(Clone, Debug)]
|
2018-08-25 08:44:58 +00:00
|
|
|
pub struct File {
|
2018-08-25 08:40:17 +00:00
|
|
|
root: SyntaxNode
|
|
|
|
}
|
|
|
|
|
2018-08-25 08:44:58 +00:00
|
|
|
impl File {
|
2018-08-25 10:57:13 +00:00
|
|
|
fn new(green: GreenNode, errors: Vec<SyntaxError>) -> File {
|
|
|
|
let root = SyntaxRoot::new(green, errors);
|
2018-08-25 09:10:35 +00:00
|
|
|
let root = SyntaxNode::new_owned(root);
|
|
|
|
validate_block_structure(root.borrowed());
|
2018-08-25 08:44:58 +00:00
|
|
|
File { root }
|
2018-08-25 08:40:17 +00:00
|
|
|
}
|
2018-08-25 09:44:26 +00:00
|
|
|
pub fn parse(text: &str) -> File {
|
2018-08-25 09:10:35 +00:00
|
|
|
let tokens = tokenize(&text);
|
2018-08-25 11:45:17 +00:00
|
|
|
let (green, errors) = parser_impl::parse_with::<yellow::GreenBuilder>(
|
|
|
|
text, &tokens, grammar::root,
|
|
|
|
);
|
2018-08-25 10:57:13 +00:00
|
|
|
File::new(green, errors)
|
2018-08-25 09:10:35 +00:00
|
|
|
}
|
2018-08-25 10:17:54 +00:00
|
|
|
pub fn reparse(&self, edit: &AtomEdit) -> File {
|
2018-08-25 10:42:40 +00:00
|
|
|
self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
|
2018-08-25 10:17:54 +00:00
|
|
|
}
|
2018-08-25 11:45:17 +00:00
|
|
|
pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option<File> {
|
2018-08-25 10:17:54 +00:00
|
|
|
let (node, reparser) = find_reparsable_node(self.syntax(), edit.delete)?;
|
2018-08-25 10:57:13 +00:00
|
|
|
let text = replace_range(
|
2018-08-28 11:06:30 +00:00
|
|
|
node.text().to_string(),
|
2018-08-25 10:57:13 +00:00
|
|
|
edit.delete - node.range().start(),
|
|
|
|
&edit.insert,
|
|
|
|
);
|
|
|
|
let tokens = tokenize(&text);
|
|
|
|
if !is_balanced(&tokens) {
|
|
|
|
return None;
|
|
|
|
}
|
2018-08-25 11:45:17 +00:00
|
|
|
let (green, new_errors) = parser_impl::parse_with::<yellow::GreenBuilder>(
|
|
|
|
&text, &tokens, reparser,
|
|
|
|
);
|
|
|
|
let green_root = node.replace_with(green);
|
2018-08-25 12:12:17 +00:00
|
|
|
let errors = merge_errors(self.errors(), new_errors, node, edit);
|
2018-08-25 11:45:17 +00:00
|
|
|
Some(File::new(green_root, errors))
|
2018-08-25 10:17:54 +00:00
|
|
|
}
|
|
|
|
fn full_reparse(&self, edit: &AtomEdit) -> File {
|
2018-08-28 11:06:30 +00:00
|
|
|
let text = replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert);
|
2018-08-25 10:17:54 +00:00
|
|
|
File::parse(&text)
|
|
|
|
}
|
2018-08-25 08:44:17 +00:00
|
|
|
pub fn ast(&self) -> ast::Root {
|
|
|
|
ast::Root::cast(self.syntax()).unwrap()
|
2018-08-25 08:40:17 +00:00
|
|
|
}
|
|
|
|
pub fn syntax(&self) -> SyntaxNodeRef {
|
|
|
|
self.root.borrowed()
|
|
|
|
}
|
|
|
|
pub fn errors(&self) -> Vec<SyntaxError> {
|
|
|
|
self.syntax().root.syntax_root().errors.clone()
|
|
|
|
}
|
2018-08-24 16:27:30 +00:00
|
|
|
}
|
|
|
|
|
2018-08-24 17:50:37 +00:00
|
|
|
#[cfg(not(debug_assertions))]
|
|
|
|
fn validate_block_structure(_: SyntaxNodeRef) {}
|
|
|
|
|
|
|
|
#[cfg(debug_assertions)]
|
2018-08-24 16:27:30 +00:00
|
|
|
fn validate_block_structure(root: SyntaxNodeRef) {
|
|
|
|
let mut stack = Vec::new();
|
|
|
|
for node in algo::walk::preorder(root) {
|
|
|
|
match node.kind() {
|
|
|
|
SyntaxKind::L_CURLY => {
|
|
|
|
stack.push(node)
|
|
|
|
}
|
|
|
|
SyntaxKind::R_CURLY => {
|
|
|
|
if let Some(pair) = stack.pop() {
|
2018-08-24 17:50:37 +00:00
|
|
|
assert_eq!(
|
|
|
|
node.parent(),
|
|
|
|
pair.parent(),
|
2018-09-03 21:49:21 +00:00
|
|
|
"\nunpaired curleys:\n{}\n{}\n",
|
|
|
|
root.text(),
|
2018-08-24 17:50:37 +00:00
|
|
|
utils::dump_tree(root),
|
|
|
|
);
|
2018-08-24 16:27:30 +00:00
|
|
|
assert!(
|
|
|
|
node.next_sibling().is_none() && pair.prev_sibling().is_none(),
|
2018-08-26 06:12:18 +00:00
|
|
|
"\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n",
|
2018-08-24 16:27:30 +00:00
|
|
|
node,
|
|
|
|
root.text(),
|
|
|
|
node.text(),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => (),
|
|
|
|
}
|
|
|
|
}
|
2018-07-29 12:16:07 +00:00
|
|
|
}
|
2018-08-25 09:44:26 +00:00
|
|
|
|
|
|
|
#[derive(Debug, Clone)]
|
|
|
|
pub struct AtomEdit {
|
|
|
|
pub delete: TextRange,
|
|
|
|
pub insert: String,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AtomEdit {
|
|
|
|
pub fn replace(range: TextRange, replace_with: String) -> AtomEdit {
|
|
|
|
AtomEdit { delete: range, insert: replace_with }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn delete(range: TextRange) -> AtomEdit {
|
|
|
|
AtomEdit::replace(range, String::new())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn insert(offset: TextUnit, text: String) -> AtomEdit {
|
|
|
|
AtomEdit::replace(TextRange::offset_len(offset, 0.into()), text)
|
|
|
|
}
|
|
|
|
}
|
2018-08-25 10:17:54 +00:00
|
|
|
|
|
|
|
fn find_reparsable_node(node: SyntaxNodeRef, range: TextRange) -> Option<(SyntaxNodeRef, fn(&mut Parser))> {
|
|
|
|
let node = algo::find_covering_node(node, range);
|
|
|
|
return algo::ancestors(node)
|
|
|
|
.filter_map(|node| reparser(node).map(|r| (node, r)))
|
|
|
|
.next();
|
|
|
|
|
|
|
|
fn reparser(node: SyntaxNodeRef) -> Option<fn(&mut Parser)> {
|
|
|
|
let res = match node.kind() {
|
|
|
|
BLOCK => grammar::block,
|
|
|
|
NAMED_FIELD_DEF_LIST => grammar::named_field_def_list,
|
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
Some(res)
|
|
|
|
}
|
|
|
|
}
|
2018-08-25 10:57:13 +00:00
|
|
|
|
2018-08-25 11:45:17 +00:00
|
|
|
pub /*(meh)*/ fn replace_range(mut text: String, range: TextRange, replace_with: &str) -> String {
|
2018-08-25 10:57:13 +00:00
|
|
|
let start = u32::from(range.start()) as usize;
|
|
|
|
let end = u32::from(range.end()) as usize;
|
|
|
|
text.replace_range(start..end, replace_with);
|
|
|
|
text
|
|
|
|
}
|
|
|
|
|
|
|
|
fn is_balanced(tokens: &[Token]) -> bool {
|
|
|
|
if tokens.len() == 0
|
|
|
|
|| tokens.first().unwrap().kind != L_CURLY
|
|
|
|
|| tokens.last().unwrap().kind != R_CURLY {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
let mut balance = 0usize;
|
|
|
|
for t in tokens.iter() {
|
|
|
|
match t.kind {
|
|
|
|
L_CURLY => balance += 1,
|
|
|
|
R_CURLY => balance = match balance.checked_sub(1) {
|
|
|
|
Some(b) => b,
|
|
|
|
None => return false,
|
|
|
|
},
|
|
|
|
_ => (),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
balance == 0
|
|
|
|
}
|
2018-08-25 11:45:17 +00:00
|
|
|
|
|
|
|
fn merge_errors(
|
|
|
|
old_errors: Vec<SyntaxError>,
|
|
|
|
new_errors: Vec<SyntaxError>,
|
2018-08-25 12:12:17 +00:00
|
|
|
old_node: SyntaxNodeRef,
|
2018-08-25 11:45:17 +00:00
|
|
|
edit: &AtomEdit,
|
|
|
|
) -> Vec<SyntaxError> {
|
|
|
|
let mut res = Vec::new();
|
|
|
|
for e in old_errors {
|
2018-08-25 12:12:17 +00:00
|
|
|
if e.offset < old_node.range().start() {
|
2018-08-25 11:45:17 +00:00
|
|
|
res.push(e)
|
2018-08-25 12:12:17 +00:00
|
|
|
} else if e.offset > old_node.range().end() {
|
2018-08-25 11:45:17 +00:00
|
|
|
res.push(SyntaxError {
|
|
|
|
msg: e.msg,
|
|
|
|
offset: e.offset + TextUnit::of_str(&edit.insert) - edit.delete.len(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for e in new_errors {
|
|
|
|
res.push(SyntaxError {
|
|
|
|
msg: e.msg,
|
2018-08-25 12:12:17 +00:00
|
|
|
offset: e.offset + old_node.range().start(),
|
2018-08-25 11:45:17 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
res
|
|
|
|
}
|