5727: Rename ra_parser -> parser
 r=matklad a=matklad

bors r+
🤖

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2020-08-12 15:15:00 +00:00 committed by GitHub
commit 3d6889cba7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
47 changed files with 65 additions and 65 deletions

20
Cargo.lock generated
View file

@ -834,6 +834,13 @@ dependencies = [
"winapi 0.3.9",
]
[[package]]
name = "parser"
version = "0.0.0"
dependencies = [
"drop_bomb",
]
[[package]]
name = "paths"
version = "0.1.0"
@ -1018,10 +1025,10 @@ dependencies = [
"arena",
"either",
"log",
"parser",
"profile",
"ra_db",
"ra_mbe",
"ra_parser",
"ra_syntax",
"rustc-hash",
"test_utils",
@ -1105,7 +1112,7 @@ name = "ra_mbe"
version = "0.1.0"
dependencies = [
"log",
"ra_parser",
"parser",
"ra_syntax",
"rustc-hash",
"smallvec",
@ -1113,13 +1120,6 @@ dependencies = [
"tt",
]
[[package]]
name = "ra_parser"
version = "0.1.0"
dependencies = [
"drop_bomb",
]
[[package]]
name = "ra_proc_macro"
version = "0.1.0"
@ -1190,7 +1190,7 @@ dependencies = [
"expect",
"itertools",
"once_cell",
"ra_parser",
"parser",
"rayon",
"rowan",
"rustc-ap-rustc_lexer",

View file

@ -1,10 +1,9 @@
[package]
edition = "2018"
name = "ra_parser"
version = "0.1.0"
authors = ["rust-analyzer developers"]
publish = false
name = "parser"
version = "0.0.0"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
[lib]
doctest = false

View file

@ -16,7 +16,7 @@ rustc-hash = "1.0.0"
arena = { path = "../arena" }
ra_db = { path = "../ra_db" }
ra_syntax = { path = "../ra_syntax" }
ra_parser = { path = "../ra_parser" }
parser = { path = "../parser" }
profile = { path = "../profile" }
tt = { path = "../tt" }
mbe = { path = "../ra_mbe", package = "ra_mbe" }

View file

@ -2,7 +2,7 @@
use log::debug;
use ra_parser::FragmentKind;
use parser::FragmentKind;
use ra_syntax::{
ast::{self, AstNode, GenericParamsOwner, ModuleItemOwner, NameOwner},
match_ast,

View file

@ -6,8 +6,8 @@ use crate::{
use either::Either;
use mbe::parse_to_token_tree;
use parser::FragmentKind;
use ra_db::FileId;
use ra_parser::FragmentKind;
use ra_syntax::ast::{self, AstToken, HasStringValue};
macro_rules! register_builtin {

View file

@ -3,8 +3,8 @@
use std::sync::Arc;
use mbe::{ExpandResult, MacroRules};
use parser::FragmentKind;
use ra_db::{salsa, SourceDatabase};
use ra_parser::FragmentKind;
use ra_syntax::{algo::diff, AstNode, GreenNode, Parse, SyntaxKind::*, SyntaxNode};
use crate::{

View file

@ -25,8 +25,8 @@ use crate::{
EagerCallLoc, EagerMacroId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind,
};
use parser::FragmentKind;
use ra_db::CrateId;
use ra_parser::FragmentKind;
use ra_syntax::{algo::SyntaxRewriter, SyntaxNode};
use std::sync::Arc;

View file

@ -317,7 +317,7 @@ pub struct ExpansionInfo {
}
pub use mbe::Origin;
use ra_parser::FragmentKind;
use parser::FragmentKind;
impl ExpansionInfo {
pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {

View file

@ -10,7 +10,7 @@ doctest = false
[dependencies]
ra_syntax = { path = "../ra_syntax" }
ra_parser = { path = "../ra_parser" }
parser = { path = "../parser" }
tt = { path = "../tt" }
rustc-hash = "1.1.0"
smallvec = "1.2.0"

View file

@ -9,7 +9,7 @@ use crate::{
};
use super::ExpandResult;
use ra_parser::{FragmentKind::*, TreeSink};
use parser::{FragmentKind::*, TreeSink};
use ra_syntax::{SmolStr, SyntaxKind};
use tt::buffer::{Cursor, TokenBuffer};
@ -285,7 +285,7 @@ impl<'a> TtIter<'a> {
pub(crate) fn expect_fragment(
&mut self,
fragment_kind: ra_parser::FragmentKind,
fragment_kind: parser::FragmentKind,
) -> ExpandResult<Option<tt::TokenTree>> {
pub(crate) struct OffsetTokenSink<'a> {
pub(crate) cursor: Cursor<'a>,
@ -303,7 +303,7 @@ impl<'a> TtIter<'a> {
}
fn start_node(&mut self, _kind: SyntaxKind) {}
fn finish_node(&mut self) {}
fn error(&mut self, _error: ra_parser::ParseError) {
fn error(&mut self, _error: parser::ParseError) {
self.error = true;
}
}
@ -312,7 +312,7 @@ impl<'a> TtIter<'a> {
let mut src = SubtreeTokenSource::new(&buffer);
let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
ra_parser::parse_fragment(&mut src, &mut sink, fragment_kind);
parser::parse_fragment(&mut src, &mut sink, fragment_kind);
let mut err = None;
if !sink.cursor.is_root() || sink.error {

View file

@ -1,6 +1,6 @@
//! FIXME: write short doc here
use ra_parser::{Token, TokenSource};
use parser::{Token, TokenSource};
use ra_syntax::{lex_single_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T};
use std::cell::{Cell, Ref, RefCell};
use tt::buffer::{Cursor, TokenBuffer};

View file

@ -1,6 +1,6 @@
//! FIXME: write short doc here
use ra_parser::{FragmentKind, ParseError, TreeSink};
use parser::{FragmentKind, ParseError, TreeSink};
use ra_syntax::{
ast::{self, make::tokens::doc_comment},
tokenize, AstToken, Parse, SmolStr, SyntaxKind,
@ -81,7 +81,7 @@ pub fn token_tree_to_syntax_node(
let buffer = TokenBuffer::new(&tokens);
let mut token_source = SubtreeTokenSource::new(&buffer);
let mut tree_sink = TtTreeSink::new(buffer.begin());
ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
if tree_sink.roots.len() != 1 {
return Err(ExpandError::ConversionError);
}
@ -715,7 +715,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
mod tests {
use super::*;
use crate::tests::parse_macro;
use ra_parser::TokenSource;
use parser::TokenSource;
use ra_syntax::{
algo::{insert_children, InsertPosition},
ast::AstNode,

View file

@ -1,6 +1,6 @@
use std::fmt::Write;
use ra_parser::FragmentKind;
use ::parser::FragmentKind;
use ra_syntax::{ast, AstNode, NodeOrToken, SyntaxKind::IDENT, SyntaxNode, WalkEvent, T};
use test_utils::assert_eq_text;
@ -9,9 +9,10 @@ use super::*;
mod rule_parsing {
use ra_syntax::{ast, AstNode};
use super::*;
use crate::ast_to_token_tree;
use super::*;
#[test]
fn test_valid_arms() {
fn check(macro_body: &str) {

View file

@ -21,7 +21,7 @@ once_cell = "1.3.1"
stdx = { path = "../stdx" }
text_edit = { path = "../text_edit" }
ra_parser = { path = "../ra_parser" }
parser = { path = "../parser" }
# This crate transitively depends on `smol_str` via `rowan`.
# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here

View file

@ -4,7 +4,7 @@
use std::fmt;
use itertools::Itertools;
use ra_parser::SyntaxKind;
use parser::SyntaxKind;
use crate::{
ast::{self, support, AstNode, NameOwner, SyntaxNode},

View file

@ -11,7 +11,7 @@
//!
//! The most interesting modules here are `syntax_node` (which defines concrete
//! syntax tree) and `ast` (which defines abstract syntax tree on top of the
//! CST). The actual parser live in a separate `ra_parser` crate, though the
//! CST). The actual parser live in a separate `parser` crate, though the
//! lexer lives in this crate.
//!
//! See `api_walkthrough` test in this file for a quick API tour!
@ -53,7 +53,7 @@ pub use crate::{
SyntaxNodeChildren, SyntaxToken, SyntaxTreeBuilder,
},
};
pub use ra_parser::{SyntaxKind, T};
pub use parser::{SyntaxKind, T};
pub use rowan::{SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent};
/// `Parse` is the result of the parsing: a syntax tree and a collection of
@ -169,35 +169,35 @@ impl SourceFile {
impl ast::Path {
/// Returns `text`, parsed as a path, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_fragment(text, ra_parser::FragmentKind::Path)
parsing::parse_text_fragment(text, parser::FragmentKind::Path)
}
}
impl ast::Pat {
/// Returns `text`, parsed as a pattern, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_fragment(text, ra_parser::FragmentKind::Pattern)
parsing::parse_text_fragment(text, parser::FragmentKind::Pattern)
}
}
impl ast::Expr {
/// Returns `text`, parsed as an expression, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_fragment(text, ra_parser::FragmentKind::Expr)
parsing::parse_text_fragment(text, parser::FragmentKind::Expr)
}
}
impl ast::Item {
/// Returns `text`, parsed as an item, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_fragment(text, ra_parser::FragmentKind::Item)
parsing::parse_text_fragment(text, parser::FragmentKind::Item)
}
}
impl ast::Type {
/// Returns `text`, parsed as an type reference, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_fragment(text, ra_parser::FragmentKind::Type)
parsing::parse_text_fragment(text, parser::FragmentKind::Type)
}
}

View file

@ -1,4 +1,4 @@
//! Lexing, bridging to ra_parser (which does the actual parsing) and
//! Lexing, bridging to parser (which does the actual parsing) and
//! incremental reparsing.
mod lexer;
@ -13,7 +13,7 @@ use text_tree_sink::TextTreeSink;
pub use lexer::*;
pub(crate) use self::reparsing::incremental_reparse;
use ra_parser::SyntaxKind;
use parser::SyntaxKind;
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
let (tokens, lexer_errors) = tokenize(&text);
@ -21,7 +21,7 @@ pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
let mut token_source = TextTokenSource::new(text, &tokens);
let mut tree_sink = TextTreeSink::new(text, &tokens);
ra_parser::parse(&mut token_source, &mut tree_sink);
parser::parse(&mut token_source, &mut tree_sink);
let (tree, mut parser_errors) = tree_sink.finish();
parser_errors.extend(lexer_errors);
@ -32,7 +32,7 @@ pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
/// Returns `text` parsed as a `T` provided there are no parse errors.
pub(crate) fn parse_text_fragment<T: AstNode>(
text: &str,
fragment_kind: ra_parser::FragmentKind,
fragment_kind: parser::FragmentKind,
) -> Result<T, ()> {
let (tokens, lexer_errors) = tokenize(&text);
if !lexer_errors.is_empty() {
@ -44,13 +44,13 @@ pub(crate) fn parse_text_fragment<T: AstNode>(
// TextTreeSink assumes that there's at least some root node to which it can attach errors and
// tokens. We arbitrarily give it a SourceFile.
use ra_parser::TreeSink;
use parser::TreeSink;
tree_sink.start_node(SyntaxKind::SOURCE_FILE);
ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
tree_sink.finish_node();
let (tree, parser_errors) = tree_sink.finish();
use ra_parser::TokenSource;
use parser::TokenSource;
if !parser_errors.is_empty() || token_source.current().kind != SyntaxKind::EOF {
return Err(());
}

View file

@ -6,7 +6,7 @@
//! - otherwise, we search for the nearest `{}` block which contains the edit
//! and try to parse only this block.
use ra_parser::Reparser;
use parser::Reparser;
use text_edit::Indel;
use crate::{

View file

@ -1,10 +1,10 @@
//! See `TextTokenSource` docs.
use ra_parser::TokenSource;
use parser::TokenSource;
use crate::{parsing::lexer::Token, SyntaxKind::EOF, TextRange, TextSize};
/// Implementation of `ra_parser::TokenSource` that takes tokens from source code text.
/// Implementation of `parser::TokenSource` that takes tokens from source code text.
pub(crate) struct TextTokenSource<'t> {
text: &'t str,
/// token and its start position (non-whitespace/comment tokens)
@ -20,15 +20,15 @@ pub(crate) struct TextTokenSource<'t> {
token_offset_pairs: Vec<(Token, TextSize)>,
/// Current token and position
curr: (ra_parser::Token, usize),
curr: (parser::Token, usize),
}
impl<'t> TokenSource for TextTokenSource<'t> {
fn current(&self) -> ra_parser::Token {
fn current(&self) -> parser::Token {
self.curr.0
}
fn lookahead_nth(&self, n: usize) -> ra_parser::Token {
fn lookahead_nth(&self, n: usize) -> parser::Token {
mk_token(self.curr.1 + n, &self.token_offset_pairs)
}
@ -49,7 +49,7 @@ impl<'t> TokenSource for TextTokenSource<'t> {
}
}
fn mk_token(pos: usize, token_offset_pairs: &[(Token, TextSize)]) -> ra_parser::Token {
fn mk_token(pos: usize, token_offset_pairs: &[(Token, TextSize)]) -> parser::Token {
let (kind, is_jointed_to_next) = match token_offset_pairs.get(pos) {
Some((token, offset)) => (
token.kind,
@ -60,7 +60,7 @@ fn mk_token(pos: usize, token_offset_pairs: &[(Token, TextSize)]) -> ra_parser::
),
None => (EOF, false),
};
ra_parser::Token { kind, is_jointed_to_next }
parser::Token { kind, is_jointed_to_next }
}
impl<'t> TextTokenSource<'t> {

View file

@ -2,7 +2,7 @@
use std::mem;
use ra_parser::{ParseError, TreeSink};
use parser::{ParseError, TreeSink};
use crate::{
parsing::Token,

View file

@ -71,7 +71,7 @@ impl SyntaxTreeBuilder {
self.inner.finish_node()
}
pub fn error(&mut self, error: ra_parser::ParseError, text_pos: TextSize) {
pub fn error(&mut self, error: parser::ParseError, text_pos: TextSize) {
self.errors.push(SyntaxError::new_at_offset(*error.0, text_pos))
}
}

View file

@ -92,11 +92,11 @@ This is primarily useful for performance optimizations, or for bug minimization.
## Parser Tests
Tests for the parser (`ra_parser`) live in the `ra_syntax` crate (see `test_data` directory).
Tests for the parser (`parser`) live in the `ra_syntax` crate (see `test_data` directory).
There are two kinds of tests:
* Manually written test cases in `parser/ok` and `parser/err`
* "Inline" tests in `parser/inline` (these are generated) from comments in `ra_parser` crate.
* "Inline" tests in `parser/inline` (these are generated) from comments in `parser` crate.
The purpose of inline tests is not to achieve full coverage by test cases, but to explain to the reader of the code what each particular `if` and `match` is responsible for.
If you are tempted to add a large inline test, it might be a good idea to leave only the simplest example in place, and move the test to a manual `parser/ok` test.

View file

@ -64,7 +64,7 @@ The source for 1 and 2 is in [`ast_src.rs`](https://github.com/rust-analyzer/rus
## Code Walk-Through
### `crates/ra_syntax`, `crates/ra_parser`
### `crates/ra_syntax`, `crates/parser`
Rust syntax tree structure and parser. See
[RFC](https://github.com/rust-lang/rfcs/pull/2256) and [./syntax.md](./syntax.md) for some design notes.

View file

@ -11,7 +11,7 @@ The things described are implemented in two places
* [rowan](https://github.com/rust-analyzer/rowan/tree/v0.9.0) -- a generic library for rowan syntax trees.
* [ra_syntax](https://github.com/rust-analyzer/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/ra_syntax) crate inside rust-analyzer which wraps `rowan` into rust-analyzer specific API.
Nothing in rust-analyzer except this crate knows about `rowan`.
* [ra_parser](https://github.com/rust-analyzer/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/ra_parser) crate parses input tokens into an `ra_syntax` tree
* [parser](https://github.com/rust-analyzer/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/parser) crate parses input tokens into an `ra_syntax` tree
## Design Goals

View file

@ -24,11 +24,11 @@ pub use self::{
gen_syntax::generate_syntax,
};
const GRAMMAR_DIR: &str = "crates/ra_parser/src/grammar";
const GRAMMAR_DIR: &str = "crates/parser/src/grammar";
const OK_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/ok";
const ERR_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/err";
const SYNTAX_KINDS: &str = "crates/ra_parser/src/syntax_kind/generated.rs";
const SYNTAX_KINDS: &str = "crates/parser/src/syntax_kind/generated.rs";
const AST_NODES: &str = "crates/ra_syntax/src/ast/generated/nodes.rs";
const AST_TOKENS: &str = "crates/ra_syntax/src/ast/generated/tokens.rs";

View file

@ -196,7 +196,7 @@ impl TidyDocs {
"ra_hir_expand",
"ra_ide",
"ra_mbe",
"ra_parser",
"parser",
"profile",
"ra_project_model",
"ra_syntax",