mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-11-10 07:04:22 +00:00
fix compilation
This commit is contained in:
parent
d334b5a1db
commit
c47f9e2d37
9 changed files with 39 additions and 59 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1100,6 +1100,7 @@ dependencies = [
|
||||||
"drop_bomb 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
"drop_bomb 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"ra_parser 0.1.0",
|
||||||
"ra_text_edit 0.1.0",
|
"ra_text_edit 0.1.0",
|
||||||
"rowan 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rowan 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"smol_str 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
"smol_str 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
|
|
@ -53,12 +53,12 @@ impl Reparser {
|
||||||
) -> Option<Reparser> {
|
) -> Option<Reparser> {
|
||||||
grammar::reparser(node, first_child, parent).map(Reparser)
|
grammar::reparser(node, first_child, parent).map(Reparser)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
pub fn reparse(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink, reparser: Reparser) {
|
pub fn parse(self, token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||||
let Reparser(r) = reparser;
|
let Reparser(r) = self;
|
||||||
let mut p = parser::Parser::new(token_source);
|
let mut p = parser::Parser::new(token_source);
|
||||||
r(&mut p);
|
r(&mut p);
|
||||||
let events = p.finish();
|
let events = p.finish();
|
||||||
event::process(tree_sink, events);
|
event::process(tree_sink, events);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,7 @@ text_unit = { version = "0.1.6", features = ["serde"] }
|
||||||
smol_str = { version = "0.1.9", features = ["serde"] }
|
smol_str = { version = "0.1.9", features = ["serde"] }
|
||||||
|
|
||||||
ra_text_edit = { path = "../ra_text_edit" }
|
ra_text_edit = { path = "../ra_text_edit" }
|
||||||
|
ra_parser = { path = "../ra_parser" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
test_utils = { path = "../test_utils" }
|
test_utils = { path = "../test_utils" }
|
||||||
|
|
|
@ -16,7 +16,6 @@
|
||||||
#![allow(missing_docs)]
|
#![allow(missing_docs)]
|
||||||
//#![warn(unreachable_pub)] // rust-lang/rust#47816
|
//#![warn(unreachable_pub)] // rust-lang/rust#47816
|
||||||
|
|
||||||
mod syntax_kinds;
|
|
||||||
mod syntax_node;
|
mod syntax_node;
|
||||||
mod syntax_text;
|
mod syntax_text;
|
||||||
mod syntax_error;
|
mod syntax_error;
|
||||||
|
@ -31,9 +30,9 @@ pub mod ast;
|
||||||
pub mod utils;
|
pub mod utils;
|
||||||
|
|
||||||
pub use rowan::{SmolStr, TextRange, TextUnit};
|
pub use rowan::{SmolStr, TextRange, TextUnit};
|
||||||
|
pub use ra_parser::SyntaxKind;
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
ast::AstNode,
|
ast::AstNode,
|
||||||
syntax_kinds::SyntaxKind,
|
|
||||||
syntax_error::{SyntaxError, SyntaxErrorKind, Location},
|
syntax_error::{SyntaxError, SyntaxErrorKind, Location},
|
||||||
syntax_text::SyntaxText,
|
syntax_text::SyntaxText,
|
||||||
syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc},
|
syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc},
|
||||||
|
|
|
@ -1,50 +1,28 @@
|
||||||
#[macro_use]
|
|
||||||
mod token_set;
|
|
||||||
mod builder;
|
mod builder;
|
||||||
mod lexer;
|
mod lexer;
|
||||||
mod event;
|
|
||||||
mod input;
|
mod input;
|
||||||
mod parser;
|
|
||||||
mod grammar;
|
|
||||||
mod reparsing;
|
mod reparsing;
|
||||||
|
|
||||||
|
use ra_parser::{parse, ParseError};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
SyntaxKind, SyntaxError,
|
SyntaxKind, SyntaxError,
|
||||||
parsing::{
|
parsing::{
|
||||||
builder::TreeBuilder,
|
builder::TreeBuilder,
|
||||||
input::ParserInput,
|
input::ParserInput,
|
||||||
event::process,
|
|
||||||
parser::Parser,
|
|
||||||
},
|
},
|
||||||
syntax_node::GreenNode,
|
syntax_node::GreenNode,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub use self::lexer::{tokenize, Token};
|
pub use self::lexer::{tokenize, Token};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
|
||||||
pub struct ParseError(pub String);
|
|
||||||
|
|
||||||
pub(crate) use self::reparsing::incremental_reparse;
|
pub(crate) use self::reparsing::incremental_reparse;
|
||||||
|
|
||||||
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
|
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
|
||||||
let tokens = tokenize(&text);
|
let tokens = tokenize(&text);
|
||||||
let tree_sink = TreeBuilder::new(text, &tokens);
|
let token_source = ParserInput::new(text, &tokens);
|
||||||
parse_with(tree_sink, text, &tokens, grammar::root)
|
let mut tree_sink = TreeBuilder::new(text, &tokens);
|
||||||
}
|
parse(&token_source, &mut tree_sink);
|
||||||
|
|
||||||
fn parse_with<S: TreeSink>(
|
|
||||||
mut tree_sink: S,
|
|
||||||
text: &str,
|
|
||||||
tokens: &[Token],
|
|
||||||
f: fn(&mut Parser),
|
|
||||||
) -> S::Tree {
|
|
||||||
let events = {
|
|
||||||
let input = ParserInput::new(text, &tokens);
|
|
||||||
let mut p = Parser::new(&input);
|
|
||||||
f(&mut p);
|
|
||||||
p.finish()
|
|
||||||
};
|
|
||||||
process(&mut tree_sink, events);
|
|
||||||
tree_sink.finish()
|
tree_sink.finish()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
|
use ra_parser::{TreeSink, ParseError};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
SmolStr, SyntaxError, SyntaxErrorKind, TextUnit, TextRange,
|
SmolStr, SyntaxError, SyntaxErrorKind, TextUnit, TextRange,
|
||||||
SyntaxKind::{self, *},
|
SyntaxKind::{self, *},
|
||||||
parsing::{TreeSink, ParseError, Token},
|
parsing::Token,
|
||||||
syntax_node::{GreenNode, RaTypes},
|
syntax_node::{GreenNode, RaTypes},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -17,8 +19,6 @@ pub(crate) struct TreeBuilder<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TreeSink for TreeBuilder<'a> {
|
impl<'a> TreeSink for TreeBuilder<'a> {
|
||||||
type Tree = (GreenNode, Vec<SyntaxError>);
|
|
||||||
|
|
||||||
fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8) {
|
fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8) {
|
||||||
self.eat_trivias();
|
self.eat_trivias();
|
||||||
let n_tokens = n_tokens as usize;
|
let n_tokens = n_tokens as usize;
|
||||||
|
@ -65,10 +65,6 @@ impl<'a> TreeSink for TreeBuilder<'a> {
|
||||||
let error = SyntaxError::new(SyntaxErrorKind::ParseError(error), self.text_pos);
|
let error = SyntaxError::new(SyntaxErrorKind::ParseError(error), self.text_pos);
|
||||||
self.errors.push(error)
|
self.errors.push(error)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn finish(self) -> (GreenNode, Vec<SyntaxError>) {
|
|
||||||
(self.inner.finish(), self.errors)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TreeBuilder<'a> {
|
impl<'a> TreeBuilder<'a> {
|
||||||
|
@ -82,6 +78,11 @@ impl<'a> TreeBuilder<'a> {
|
||||||
inner: GreenNodeBuilder::new(),
|
inner: GreenNodeBuilder::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(super) fn finish(self) -> (GreenNode, Vec<SyntaxError>) {
|
||||||
|
(self.inner.finish(), self.errors)
|
||||||
|
}
|
||||||
|
|
||||||
fn eat_trivias(&mut self) {
|
fn eat_trivias(&mut self) {
|
||||||
while let Some(&token) = self.tokens.get(self.token_pos) {
|
while let Some(&token) = self.tokens.get(self.token_pos) {
|
||||||
if !token.kind.is_trivia() {
|
if !token.kind.is_trivia() {
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
|
use ra_parser::TokenSource;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit,
|
SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit,
|
||||||
parsing::{
|
parsing::lexer::Token,
|
||||||
TokenSource,
|
|
||||||
lexer::Token,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
impl<'t> TokenSource for ParserInput<'t> {
|
impl<'t> TokenSource for ParserInput<'t> {
|
||||||
|
|
|
@ -1,18 +1,18 @@
|
||||||
|
use ra_text_edit::AtomTextEdit;
|
||||||
|
use ra_parser::Reparser;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
SyntaxKind::*, TextRange, TextUnit,
|
SyntaxKind::*, TextRange, TextUnit,
|
||||||
algo,
|
algo,
|
||||||
syntax_node::{GreenNode, SyntaxNode},
|
syntax_node::{GreenNode, SyntaxNode},
|
||||||
syntax_error::SyntaxError,
|
syntax_error::SyntaxError,
|
||||||
parsing::{
|
parsing::{
|
||||||
grammar, parse_with,
|
input::ParserInput,
|
||||||
builder::TreeBuilder,
|
builder::TreeBuilder,
|
||||||
parser::Parser,
|
|
||||||
lexer::{tokenize, Token},
|
lexer::{tokenize, Token},
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
use ra_text_edit::AtomTextEdit;
|
|
||||||
|
|
||||||
pub(crate) fn incremental_reparse(
|
pub(crate) fn incremental_reparse(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
edit: &AtomTextEdit,
|
edit: &AtomTextEdit,
|
||||||
|
@ -61,8 +61,10 @@ fn reparse_block<'node>(
|
||||||
if !is_balanced(&tokens) {
|
if !is_balanced(&tokens) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let tree_sink = TreeBuilder::new(&text, &tokens);
|
let token_source = ParserInput::new(&text, &tokens);
|
||||||
let (green, new_errors) = parse_with(tree_sink, &text, &tokens, reparser);
|
let mut tree_sink = TreeBuilder::new(&text, &tokens);
|
||||||
|
reparser.parse(&token_source, &mut tree_sink);
|
||||||
|
let (green, new_errors) = tree_sink.finish();
|
||||||
Some((node, green, new_errors))
|
Some((node, green, new_errors))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -78,15 +80,12 @@ fn is_contextual_kw(text: &str) -> bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_reparsable_node(
|
fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxNode, Reparser)> {
|
||||||
node: &SyntaxNode,
|
|
||||||
range: TextRange,
|
|
||||||
) -> Option<(&SyntaxNode, fn(&mut Parser))> {
|
|
||||||
let node = algo::find_covering_node(node, range);
|
let node = algo::find_covering_node(node, range);
|
||||||
node.ancestors().find_map(|node| {
|
node.ancestors().find_map(|node| {
|
||||||
let first_child = node.first_child().map(|it| it.kind());
|
let first_child = node.first_child().map(|it| it.kind());
|
||||||
let parent = node.parent().map(|it| it.kind());
|
let parent = node.parent().map(|it| it.kind());
|
||||||
grammar::reparser(node.kind(), first_child, parent).map(|r| (node, r))
|
Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use crate::{TextRange, TextUnit, parsing::ParseError};
|
use ra_parser::ParseError;
|
||||||
|
|
||||||
|
use crate::{TextRange, TextUnit};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct SyntaxError {
|
pub struct SyntaxError {
|
||||||
|
|
Loading…
Reference in a new issue