migrate ra_syntax to the new rowan API

This commit is contained in:
Aleksey Kladov 2019-07-18 19:23:05 +03:00
parent 58d4983ba5
commit d402974aa0
20 changed files with 1189 additions and 2352 deletions

6
Cargo.lock generated
View file

@ -1313,7 +1313,7 @@ dependencies = [
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_parser 0.1.0", "ra_parser 0.1.0",
"ra_text_edit 0.1.0", "ra_text_edit 0.1.0",
"rowan 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", "rowan 0.5.5 (git+https://github.com/rust-analyzer/rowan?branch=cursor)",
"smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0", "test_utils 0.1.0",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1585,7 +1585,7 @@ dependencies = [
[[package]] [[package]]
name = "rowan" name = "rowan"
version = "0.5.5" version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/rust-analyzer/rowan?branch=cursor#d41b21587487f8b372ee779e37c557b873ba0715"
dependencies = [ dependencies = [
"colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2275,7 +2275,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c" "checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c"
"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
"checksum ron 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "17f52a24414403f81528b67488cf8edc4eda977d3af1646bb6b106a600ead78f" "checksum ron 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "17f52a24414403f81528b67488cf8edc4eda977d3af1646bb6b106a600ead78f"
"checksum rowan 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "500ba7550373d42593a5228085bad391517378fa31ad2a84defe100dd8259fef" "checksum rowan 0.5.5 (git+https://github.com/rust-analyzer/rowan?branch=cursor)" = "<none>"
"checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af" "checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af"
"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8" "checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8"
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"

View file

@ -6,3 +6,4 @@ incremental = true
debug = true debug = true
[patch.'crates-io'] [patch.'crates-io']
rowan = { git = "https://github.com/rust-analyzer/rowan", branch = "cursor" }

View file

@ -9,8 +9,8 @@ pub use rowan::TokenAtOffset;
pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffset<SyntaxToken> { pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffset<SyntaxToken> {
match node.0.token_at_offset(offset) { match node.0.token_at_offset(offset) {
TokenAtOffset::None => TokenAtOffset::None, TokenAtOffset::None => TokenAtOffset::None,
TokenAtOffset::Single(n) => TokenAtOffset::Single(n.into()), TokenAtOffset::Single(n) => TokenAtOffset::Single(SyntaxToken(n)),
TokenAtOffset::Between(l, r) => TokenAtOffset::Between(l.into(), r.into()), TokenAtOffset::Between(l, r) => TokenAtOffset::Between(SyntaxToken(l), SyntaxToken(r)),
} }
} }
@ -22,7 +22,7 @@ pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffse
pub fn ancestors_at_offset( pub fn ancestors_at_offset(
node: &SyntaxNode, node: &SyntaxNode,
offset: TextUnit, offset: TextUnit,
) -> impl Iterator<Item = &SyntaxNode> { ) -> impl Iterator<Item = SyntaxNode> {
find_token_at_offset(node, offset) find_token_at_offset(node, offset)
.map(|token| token.parent().ancestors()) .map(|token| token.parent().ancestors())
.kmerge_by(|node1, node2| node1.range().len() < node2.range().len()) .kmerge_by(|node1, node2| node1.range().len() < node2.range().len())
@ -37,7 +37,7 @@ pub fn ancestors_at_offset(
/// ``` /// ```
/// ///
/// then the shorter node will be silently preferred. /// then the shorter node will be silently preferred.
pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) -> Option<&N> { pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) -> Option<N> {
ancestors_at_offset(syntax, offset).find_map(N::cast) ancestors_at_offset(syntax, offset).find_map(N::cast)
} }
@ -59,5 +59,5 @@ pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Optio
} }
pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement { pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement {
root.0.covering_node(range).into() SyntaxElement::new(root.0.covering_node(range))
} }

View file

@ -16,7 +16,7 @@ pub trait Visitor<'a>: Sized {
fn visit<N, F>(self, f: F) -> Vis<Self, N, F> fn visit<N, F>(self, f: F) -> Vis<Self, N, F>
where where
N: AstNode + 'a, N: AstNode + 'a,
F: FnOnce(&'a N) -> Self::Output, F: FnOnce(N) -> Self::Output,
{ {
Vis { inner: self, f, ph: PhantomData } Vis { inner: self, f, ph: PhantomData }
} }
@ -29,7 +29,7 @@ pub trait VisitorCtx<'a>: Sized {
fn visit<N, F>(self, f: F) -> VisCtx<Self, N, F> fn visit<N, F>(self, f: F) -> VisCtx<Self, N, F>
where where
N: AstNode + 'a, N: AstNode + 'a,
F: FnOnce(&'a N, Self::Ctx) -> Self::Output, F: FnOnce(N, Self::Ctx) -> Self::Output,
{ {
VisCtx { inner: self, f, ph: PhantomData } VisCtx { inner: self, f, ph: PhantomData }
} }
@ -74,13 +74,13 @@ impl<'a, V, N, F> Visitor<'a> for Vis<V, N, F>
where where
V: Visitor<'a>, V: Visitor<'a>,
N: AstNode + 'a, N: AstNode + 'a,
F: FnOnce(&'a N) -> <V as Visitor<'a>>::Output, F: FnOnce(N) -> <V as Visitor<'a>>::Output,
{ {
type Output = <V as Visitor<'a>>::Output; type Output = <V as Visitor<'a>>::Output;
fn accept(self, node: &'a SyntaxNode) -> Option<Self::Output> { fn accept(self, node: &'a SyntaxNode) -> Option<Self::Output> {
let Vis { inner, f, .. } = self; let Vis { inner, f, .. } = self;
inner.accept(node).or_else(|| N::cast(node).map(f)) inner.accept(node).or_else(|| N::cast(node.clone()).map(f))
} }
} }
@ -95,14 +95,14 @@ impl<'a, V, N, F> VisitorCtx<'a> for VisCtx<V, N, F>
where where
V: VisitorCtx<'a>, V: VisitorCtx<'a>,
N: AstNode + 'a, N: AstNode + 'a,
F: FnOnce(&'a N, <V as VisitorCtx<'a>>::Ctx) -> <V as VisitorCtx<'a>>::Output, F: FnOnce(N, <V as VisitorCtx<'a>>::Ctx) -> <V as VisitorCtx<'a>>::Output,
{ {
type Output = <V as VisitorCtx<'a>>::Output; type Output = <V as VisitorCtx<'a>>::Output;
type Ctx = <V as VisitorCtx<'a>>::Ctx; type Ctx = <V as VisitorCtx<'a>>::Ctx;
fn accept(self, node: &'a SyntaxNode) -> Result<Self::Output, Self::Ctx> { fn accept(self, node: &'a SyntaxNode) -> Result<Self::Output, Self::Ctx> {
let VisCtx { inner, f, .. } = self; let VisCtx { inner, f, .. } = self;
inner.accept(node).or_else(|ctx| match N::cast(node) { inner.accept(node).or_else(|ctx| match N::cast(node.clone()) {
None => Err(ctx), None => Err(ctx),
Some(node) => Ok(f(node, ctx)), Some(node) => Ok(f(node, ctx)),
}) })

View file

@ -9,7 +9,7 @@ mod expr_extensions;
use std::marker::PhantomData; use std::marker::PhantomData;
use crate::{ use crate::{
syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken, TreeArc}, syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
SmolStr, SmolStr,
}; };
@ -25,51 +25,49 @@ pub use self::{
/// conversion itself has zero runtime cost: ast and syntax nodes have exactly /// conversion itself has zero runtime cost: ast and syntax nodes have exactly
/// the same representation: a pointer to the tree root and a pointer to the /// the same representation: a pointer to the tree root and a pointer to the
/// node itself. /// node itself.
pub trait AstNode: pub trait AstNode {
rowan::TransparentNewType<Repr = rowan::SyntaxNode> + ToOwned<Owned = TreeArc<Self>> fn cast(syntax: SyntaxNode) -> Option<Self>
{
fn cast(syntax: &SyntaxNode) -> Option<&Self>
where where
Self: Sized; Self: Sized;
fn syntax(&self) -> &SyntaxNode; fn syntax(&self) -> &SyntaxNode;
} }
/// Like `AstNode`, but wraps tokens rather than interior nodes. /// Like `AstNode`, but wraps tokens rather than interior nodes.
pub trait AstToken<'a> { pub trait AstToken {
fn cast(token: SyntaxToken<'a>) -> Option<Self> fn cast(token: SyntaxToken) -> Option<Self>
where where
Self: Sized; Self: Sized;
fn syntax(&self) -> SyntaxToken<'a>; fn syntax(&self) -> &SyntaxToken;
fn text(&self) -> &'a SmolStr { fn text(&self) -> &SmolStr {
self.syntax().text() self.syntax().text()
} }
} }
/// An iterator over `SyntaxNode` children of a particular AST type. /// An iterator over `SyntaxNode` children of a particular AST type.
#[derive(Debug)] #[derive(Debug)]
pub struct AstChildren<'a, N> { pub struct AstChildren<N> {
inner: SyntaxNodeChildren<'a>, inner: SyntaxNodeChildren,
ph: PhantomData<N>, ph: PhantomData<N>,
} }
impl<'a, N> AstChildren<'a, N> { impl<N> AstChildren<N> {
fn new(parent: &'a SyntaxNode) -> Self { fn new(parent: &SyntaxNode) -> Self {
AstChildren { inner: parent.children(), ph: PhantomData } AstChildren { inner: parent.children(), ph: PhantomData }
} }
} }
impl<'a, N: AstNode + 'a> Iterator for AstChildren<'a, N> { impl<N: AstNode> Iterator for AstChildren<N> {
type Item = &'a N; type Item = N;
fn next(&mut self) -> Option<&'a N> { fn next(&mut self) -> Option<N> {
self.inner.by_ref().find_map(N::cast) self.inner.by_ref().find_map(N::cast)
} }
} }
fn child_opt<P: AstNode, C: AstNode>(parent: &P) -> Option<&C> { fn child_opt<P: AstNode + ?Sized, C: AstNode>(parent: &P) -> Option<C> {
children(parent).next() children(parent).next()
} }
fn children<P: AstNode, C: AstNode>(parent: &P) -> AstChildren<C> { fn children<P: AstNode + ?Sized, C: AstNode>(parent: &P) -> AstChildren<C> {
AstChildren::new(parent.syntax()) AstChildren::new(parent.syntax())
} }
@ -123,7 +121,7 @@ fn test_doc_comment_preserves_indents() {
#[test] #[test]
fn test_where_predicates() { fn test_where_predicates() {
fn assert_bound(text: &str, bound: Option<&TypeBound>) { fn assert_bound(text: &str, bound: Option<TypeBound>) {
assert_eq!(text, bound.unwrap().syntax().text().to_string()); assert_eq!(text, bound.unwrap().syntax().text().to_string());
} }

View file

@ -8,20 +8,20 @@ use crate::{
}; };
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum ElseBranch<'a> { pub enum ElseBranch {
Block(&'a ast::Block), Block(ast::Block),
IfExpr(&'a ast::IfExpr), IfExpr(ast::IfExpr),
} }
impl ast::IfExpr { impl ast::IfExpr {
pub fn then_branch(&self) -> Option<&ast::Block> { pub fn then_branch(&self) -> Option<ast::Block> {
self.blocks().nth(0) self.blocks().nth(0)
} }
pub fn else_branch(&self) -> Option<ElseBranch> { pub fn else_branch(&self) -> Option<ElseBranch> {
let res = match self.blocks().nth(1) { let res = match self.blocks().nth(1) {
Some(block) => ElseBranch::Block(block), Some(block) => ElseBranch::Block(block),
None => { None => {
let elif: &ast::IfExpr = child_opt(self)?; let elif: ast::IfExpr = child_opt(self)?;
ElseBranch::IfExpr(elif) ElseBranch::IfExpr(elif)
} }
}; };
@ -60,7 +60,7 @@ impl ast::PrefixExpr {
} }
pub fn op_token(&self) -> Option<SyntaxToken> { pub fn op_token(&self) -> Option<SyntaxToken> {
self.syntax().first_child_or_token()?.as_token() self.syntax().first_child_or_token()?.as_token().cloned()
} }
} }
@ -132,7 +132,7 @@ pub enum BinOp {
impl ast::BinExpr { impl ast::BinExpr {
fn op_details(&self) -> Option<(SyntaxToken, BinOp)> { fn op_details(&self) -> Option<(SyntaxToken, BinOp)> {
self.syntax().children_with_tokens().filter_map(|it| it.as_token()).find_map(|c| { self.syntax().children_with_tokens().filter_map(|it| it.as_token().cloned()).find_map(|c| {
match c.kind() { match c.kind() {
T![||] => Some((c, BinOp::BooleanOr)), T![||] => Some((c, BinOp::BooleanOr)),
T![&&] => Some((c, BinOp::BooleanAnd)), T![&&] => Some((c, BinOp::BooleanAnd)),
@ -178,15 +178,15 @@ impl ast::BinExpr {
self.op_details().map(|t| t.0) self.op_details().map(|t| t.0)
} }
pub fn lhs(&self) -> Option<&ast::Expr> { pub fn lhs(&self) -> Option<ast::Expr> {
children(self).nth(0) children(self).nth(0)
} }
pub fn rhs(&self) -> Option<&ast::Expr> { pub fn rhs(&self) -> Option<ast::Expr> {
children(self).nth(1) children(self).nth(1)
} }
pub fn sub_exprs(&self) -> (Option<&ast::Expr>, Option<&ast::Expr>) { pub fn sub_exprs(&self) -> (Option<ast::Expr>, Option<ast::Expr>) {
let mut children = children(self); let mut children = children(self);
let first = children.next(); let first = children.next();
let second = children.next(); let second = children.next();
@ -194,9 +194,9 @@ impl ast::BinExpr {
} }
} }
pub enum ArrayExprKind<'a> { pub enum ArrayExprKind {
Repeat { initializer: Option<&'a ast::Expr>, repeat: Option<&'a ast::Expr> }, Repeat { initializer: Option<ast::Expr>, repeat: Option<ast::Expr> },
ElementList(AstChildren<'a, ast::Expr>), ElementList(AstChildren<ast::Expr>),
} }
impl ast::ArrayExpr { impl ast::ArrayExpr {
@ -275,12 +275,12 @@ impl ast::Literal {
#[test] #[test]
fn test_literal_with_attr() { fn test_literal_with_attr() {
let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#); let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#);
let lit = parse.tree.syntax().descendants().find_map(ast::Literal::cast).unwrap(); let lit = parse.tree().syntax().descendants().find_map(ast::Literal::cast).unwrap();
assert_eq!(lit.token().text(), r#""Hello""#); assert_eq!(lit.token().text(), r#""Hello""#);
} }
impl ast::NamedField { impl ast::NamedField {
pub fn parent_struct_lit(&self) -> &ast::StructLit { pub fn parent_struct_lit(&self) -> ast::StructLit {
self.syntax().ancestors().find_map(ast::StructLit::cast).unwrap() self.syntax().ancestors().find_map(ast::StructLit::cast).unwrap()
} }
} }

View file

@ -4,7 +4,7 @@
use itertools::Itertools; use itertools::Itertools;
use crate::{ use crate::{
ast::{self, child_opt, children, AstNode}, ast::{self, child_opt, children, AstNode, SyntaxNode},
SmolStr, SyntaxElement, SmolStr, SyntaxElement,
SyntaxKind::*, SyntaxKind::*,
SyntaxToken, T, SyntaxToken, T,
@ -13,15 +13,20 @@ use ra_parser::SyntaxKind;
impl ast::Name { impl ast::Name {
pub fn text(&self) -> &SmolStr { pub fn text(&self) -> &SmolStr {
let ident = self.syntax().first_child_or_token().unwrap().as_token().unwrap(); text_of_first_token(self.syntax())
ident.text()
} }
} }
impl ast::NameRef { impl ast::NameRef {
pub fn text(&self) -> &SmolStr { pub fn text(&self) -> &SmolStr {
let ident = self.syntax().first_child_or_token().unwrap().as_token().unwrap(); text_of_first_token(self.syntax())
ident.text() }
}
fn text_of_first_token(node: &SyntaxNode) -> &SmolStr {
match node.0.green().children().first() {
Some(rowan::GreenElement::Token(it)) => it.text(),
_ => panic!(),
} }
} }
@ -50,10 +55,10 @@ impl ast::Attr {
} }
} }
pub fn as_call(&self) -> Option<(SmolStr, &ast::TokenTree)> { pub fn as_call(&self) -> Option<(SmolStr, ast::TokenTree)> {
let tt = self.value()?; let tt = self.value()?;
let (_bra, attr, args, _ket) = tt.syntax().children_with_tokens().collect_tuple()?; let (_bra, attr, args, _ket) = tt.syntax().children_with_tokens().collect_tuple()?;
let args = ast::TokenTree::cast(args.as_node()?)?; let args = ast::TokenTree::cast(args.as_node()?.clone())?;
if attr.kind() == IDENT { if attr.kind() == IDENT {
Some((attr.as_token()?.text().clone(), args)) Some((attr.as_token()?.text().clone(), args))
} else { } else {
@ -86,16 +91,16 @@ impl ast::Attr {
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum PathSegmentKind<'a> { pub enum PathSegmentKind {
Name(&'a ast::NameRef), Name(ast::NameRef),
SelfKw, SelfKw,
SuperKw, SuperKw,
CrateKw, CrateKw,
} }
impl ast::PathSegment { impl ast::PathSegment {
pub fn parent_path(&self) -> &ast::Path { pub fn parent_path(&self) -> ast::Path {
self.syntax() self.syntax()
.parent() .parent()
.and_then(ast::Path::cast) .and_then(ast::Path::cast)
@ -125,7 +130,7 @@ impl ast::PathSegment {
} }
impl ast::Path { impl ast::Path {
pub fn parent_path(&self) -> Option<&ast::Path> { pub fn parent_path(&self) -> Option<ast::Path> {
self.syntax().parent().and_then(ast::Path::cast) self.syntax().parent().and_then(ast::Path::cast)
} }
} }
@ -146,7 +151,7 @@ impl ast::UseTree {
} }
impl ast::UseTreeList { impl ast::UseTreeList {
pub fn parent_use_tree(&self) -> &ast::UseTree { pub fn parent_use_tree(&self) -> ast::UseTree {
self.syntax() self.syntax()
.parent() .parent()
.and_then(ast::UseTree::cast) .and_then(ast::UseTree::cast)
@ -155,21 +160,21 @@ impl ast::UseTreeList {
} }
impl ast::ImplBlock { impl ast::ImplBlock {
pub fn target_type(&self) -> Option<&ast::TypeRef> { pub fn target_type(&self) -> Option<ast::TypeRef> {
match self.target() { match self.target() {
(Some(t), None) | (_, Some(t)) => Some(t), (Some(t), None) | (_, Some(t)) => Some(t),
_ => None, _ => None,
} }
} }
pub fn target_trait(&self) -> Option<&ast::TypeRef> { pub fn target_trait(&self) -> Option<ast::TypeRef> {
match self.target() { match self.target() {
(Some(t), Some(_)) => Some(t), (Some(t), Some(_)) => Some(t),
_ => None, _ => None,
} }
} }
fn target(&self) -> (Option<&ast::TypeRef>, Option<&ast::TypeRef>) { fn target(&self) -> (Option<ast::TypeRef>, Option<ast::TypeRef>) {
let mut types = children(self); let mut types = children(self);
let first = types.next(); let first = types.next();
let second = types.next(); let second = types.next();
@ -182,13 +187,13 @@ impl ast::ImplBlock {
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum StructKind<'a> { pub enum StructKind {
Tuple(&'a ast::PosFieldDefList), Tuple(ast::PosFieldDefList),
Named(&'a ast::NamedFieldDefList), Named(ast::NamedFieldDefList),
Unit, Unit,
} }
impl StructKind<'_> { impl StructKind {
fn from_node<N: AstNode>(node: &N) -> StructKind { fn from_node<N: AstNode>(node: &N) -> StructKind {
if let Some(nfdl) = child_opt::<_, ast::NamedFieldDefList>(node) { if let Some(nfdl) = child_opt::<_, ast::NamedFieldDefList>(node) {
StructKind::Named(nfdl) StructKind::Named(nfdl)
@ -218,7 +223,7 @@ impl ast::StructDef {
} }
impl ast::EnumVariant { impl ast::EnumVariant {
pub fn parent_enum(&self) -> &ast::EnumDef { pub fn parent_enum(&self) -> ast::EnumDef {
self.syntax() self.syntax()
.parent() .parent()
.and_then(|it| it.parent()) .and_then(|it| it.parent())
@ -231,10 +236,10 @@ impl ast::EnumVariant {
} }
impl ast::FnDef { impl ast::FnDef {
pub fn semicolon_token(&self) -> Option<SyntaxToken<'_>> { pub fn semicolon_token(&self) -> Option<SyntaxToken> {
self.syntax() self.syntax()
.last_child_or_token() .last_child_or_token()
.and_then(|it| it.as_token()) .and_then(|it| it.as_token().cloned())
.filter(|it| it.kind() == T![;]) .filter(|it| it.kind() == T![;])
} }
} }
@ -258,9 +263,9 @@ impl ast::ExprStmt {
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum FieldKind<'a> { pub enum FieldKind {
Name(&'a ast::NameRef), Name(ast::NameRef),
Index(SyntaxToken<'a>), Index(SyntaxToken),
} }
impl ast::FieldExpr { impl ast::FieldExpr {
@ -271,6 +276,7 @@ impl ast::FieldExpr {
.find(|c| c.kind() == SyntaxKind::INT_NUMBER || c.kind() == SyntaxKind::FLOAT_NUMBER) .find(|c| c.kind() == SyntaxKind::INT_NUMBER || c.kind() == SyntaxKind::FLOAT_NUMBER)
.as_ref() .as_ref()
.and_then(SyntaxElement::as_token) .and_then(SyntaxElement::as_token)
.cloned()
} }
pub fn field_access(&self) -> Option<FieldKind> { pub fn field_access(&self) -> Option<FieldKind> {
@ -326,7 +332,7 @@ impl ast::SelfParam {
pub fn self_kw_token(&self) -> SyntaxToken { pub fn self_kw_token(&self) -> SyntaxToken {
self.syntax() self.syntax()
.children_with_tokens() .children_with_tokens()
.filter_map(|it| it.as_token()) .filter_map(|it| it.as_token().cloned())
.find(|it| it.kind() == T![self]) .find(|it| it.kind() == T![self])
.expect("invalid tree: self param must have self") .expect("invalid tree: self param must have self")
} }
@ -355,7 +361,7 @@ impl ast::LifetimeParam {
pub fn lifetime_token(&self) -> Option<SyntaxToken> { pub fn lifetime_token(&self) -> Option<SyntaxToken> {
self.syntax() self.syntax()
.children_with_tokens() .children_with_tokens()
.filter_map(|it| it.as_token()) .filter_map(|it| it.as_token().cloned())
.find(|it| it.kind() == LIFETIME) .find(|it| it.kind() == LIFETIME)
} }
} }
@ -364,7 +370,7 @@ impl ast::WherePred {
pub fn lifetime_token(&self) -> Option<SyntaxToken> { pub fn lifetime_token(&self) -> Option<SyntaxToken> {
self.syntax() self.syntax()
.children_with_tokens() .children_with_tokens()
.filter_map(|it| it.as_token()) .filter_map(|it| it.as_token().cloned())
.find(|it| it.kind() == LIFETIME) .find(|it| it.kind() == LIFETIME)
} }
} }

File diff suppressed because it is too large Load diff

View file

@ -11,94 +11,73 @@ the below applies to the result of this template
#![cfg_attr(rustfmt, rustfmt_skip)] #![cfg_attr(rustfmt, rustfmt_skip)]
use rowan::TransparentNewType;
use crate::{ use crate::{
SyntaxNode, SyntaxKind::*, SyntaxNode, SyntaxKind::*,
syntax_node::{TreeArc},
ast::{self, AstNode}, ast::{self, AstNode},
}; };
{% for node, methods in ast %} {% for node, methods in ast %}
// {{ node }} // {{ node }}
{%- if methods.enum %} {%- if methods.enum %}
#[derive(Debug, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[repr(transparent)]
pub struct {{ node }} { pub struct {{ node }} {
pub(crate) syntax: SyntaxNode, pub(crate) syntax: SyntaxNode,
} }
unsafe impl TransparentNewType for {{ node }} {
type Repr = rowan::SyntaxNode;
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum {{ node }}Kind<'a> { pub enum {{ node }}Kind {
{%- for kind in methods.enum %} {%- for kind in methods.enum %}
{{ kind }}(&'a {{ kind }}), {{ kind }}({{ kind }}),
{%- endfor %} {%- endfor %}
} }
{%- for kind in methods.enum %} {%- for kind in methods.enum %}
impl<'a> From<&'a {{ kind }}> for &'a {{ node }} { impl From<{{ kind }}> for {{ node }} {
fn from(n: &'a {{ kind }}) -> &'a {{ node }} { fn from(n: {{ kind }}) -> {{ node }} {
{{ node }}::cast(&n.syntax).unwrap() {{ node }}::cast(n.syntax).unwrap()
} }
} }
{%- endfor %} {%- endfor %}
impl AstNode for {{ node }} { impl AstNode for {{ node }} {
fn cast(syntax: &SyntaxNode) -> Option<&Self> { fn cast(syntax: SyntaxNode) -> Option<Self> {
match syntax.kind() { match syntax.kind() {
{%- for kind in methods.enum %} {%- for kind in methods.enum %}
| {{ kind | SCREAM }} | {{ kind | SCREAM }}
{%- endfor %} => Some({{ node }}::from_repr(syntax.into_repr())), {%- endfor %} => Some({{ node }} { syntax }),
_ => None, _ => None,
} }
} }
fn syntax(&self) -> &SyntaxNode { &self.syntax } fn syntax(&self) -> &SyntaxNode { &self.syntax }
} }
impl ToOwned for {{ node }} {
type Owned = TreeArc<{{ node }}>;
fn to_owned(&self) -> TreeArc<{{ node }}> { TreeArc::cast(self.syntax.to_owned()) }
}
impl {{ node }} { impl {{ node }} {
pub fn kind(&self) -> {{ node }}Kind { pub fn kind(&self) -> {{ node }}Kind {
match self.syntax.kind() { match self.syntax.kind() {
{%- for kind in methods.enum %} {%- for kind in methods.enum %}
{{ kind | SCREAM }} => {{ node }}Kind::{{ kind }}({{ kind }}::cast(&self.syntax).unwrap()), {{ kind | SCREAM }} => {{ node }}Kind::{{ kind }}({{ kind }}::cast(self.syntax.clone()).unwrap()),
{%- endfor %} {%- endfor %}
_ => unreachable!(), _ => unreachable!(),
} }
} }
} }
{% else %} {% else %}
#[derive(Debug, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[repr(transparent)]
pub struct {{ node }} { pub struct {{ node }} {
pub(crate) syntax: SyntaxNode, pub(crate) syntax: SyntaxNode,
} }
unsafe impl TransparentNewType for {{ node }} {
type Repr = rowan::SyntaxNode;
}
impl AstNode for {{ node }} { impl AstNode for {{ node }} {
fn cast(syntax: &SyntaxNode) -> Option<&Self> { fn cast(syntax: SyntaxNode) -> Option<Self> {
match syntax.kind() { match syntax.kind() {
{{ node | SCREAM }} => Some({{ node }}::from_repr(syntax.into_repr())), {{ node | SCREAM }} => Some({{ node }} { syntax }),
_ => None, _ => None,
} }
} }
fn syntax(&self) -> &SyntaxNode { &self.syntax } fn syntax(&self) -> &SyntaxNode { &self.syntax }
} }
impl ToOwned for {{ node }} {
type Owned = TreeArc<{{ node }}>;
fn to_owned(&self) -> TreeArc<{{ node }}> { TreeArc::cast(self.syntax.to_owned()) }
}
{% endif %} {% endif %}
{% if methods.traits -%} {% if methods.traits -%}
@ -113,7 +92,7 @@ impl {{ node }} {
{%- for m in methods.collections -%} {%- for m in methods.collections -%}
{%- set method_name = m.0 -%} {%- set method_name = m.0 -%}
{%- set ChildName = m.1 %} {%- set ChildName = m.1 %}
pub fn {{ method_name }}(&self) -> impl Iterator<Item = &{{ ChildName }}> { pub fn {{ method_name }}(&self) -> impl Iterator<Item = {{ ChildName }}> {
super::children(self) super::children(self)
} }
{% endfor -%} {% endfor -%}
@ -129,7 +108,7 @@ impl {{ node }} {
{%- set method_name = m.0 -%} {%- set method_name = m.0 -%}
{%- set ChildName = m.1 %} {%- set ChildName = m.1 %}
{%- endif %} {%- endif %}
pub fn {{ method_name }}(&self) -> Option<&{{ ChildName }}> { pub fn {{ method_name }}(&self) -> Option<{{ ChildName }}> {
super::child_opt(self) super::child_opt(self)
} }
{% endfor -%} {% endfor -%}

View file

@ -6,23 +6,23 @@ use crate::{
SyntaxToken, SyntaxToken,
}; };
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Comment<'a>(SyntaxToken<'a>); pub struct Comment(SyntaxToken);
impl<'a> AstToken<'a> for Comment<'a> { impl AstToken for Comment {
fn cast(token: SyntaxToken<'a>) -> Option<Self> { fn cast(token: SyntaxToken) -> Option<Self> {
if token.kind() == COMMENT { if token.kind() == COMMENT {
Some(Comment(token)) Some(Comment(token))
} else { } else {
None None
} }
} }
fn syntax(&self) -> SyntaxToken<'a> { fn syntax(&self) -> &SyntaxToken {
self.0 &self.0
} }
} }
impl<'a> Comment<'a> { impl Comment {
pub fn kind(&self) -> CommentKind { pub fn kind(&self) -> CommentKind {
kind_by_prefix(self.text()) kind_by_prefix(self.text())
} }
@ -90,22 +90,22 @@ fn prefix_by_kind(kind: CommentKind) -> &'static str {
unreachable!() unreachable!()
} }
pub struct Whitespace<'a>(SyntaxToken<'a>); pub struct Whitespace(SyntaxToken);
impl<'a> AstToken<'a> for Whitespace<'a> { impl AstToken for Whitespace {
fn cast(token: SyntaxToken<'a>) -> Option<Self> { fn cast(token: SyntaxToken) -> Option<Self> {
if token.kind() == WHITESPACE { if token.kind() == WHITESPACE {
Some(Whitespace(token)) Some(Whitespace(token))
} else { } else {
None None
} }
} }
fn syntax(&self) -> SyntaxToken<'a> { fn syntax(&self) -> &SyntaxToken {
self.0 &self.0
} }
} }
impl<'a> Whitespace<'a> { impl Whitespace {
pub fn spans_multiple_lines(&self) -> bool { pub fn spans_multiple_lines(&self) -> bool {
let text = self.text(); let text = self.text();
text.find('\n').map_or(false, |idx| text[idx + 1..].contains('\n')) text.find('\n').map_or(false, |idx| text[idx + 1..].contains('\n'))

View file

@ -10,37 +10,37 @@ use crate::{
}; };
pub trait TypeAscriptionOwner: AstNode { pub trait TypeAscriptionOwner: AstNode {
fn ascribed_type(&self) -> Option<&ast::TypeRef> { fn ascribed_type(&self) -> Option<ast::TypeRef> {
child_opt(self) child_opt(self)
} }
} }
pub trait NameOwner: AstNode { pub trait NameOwner: AstNode {
fn name(&self) -> Option<&ast::Name> { fn name(&self) -> Option<ast::Name> {
child_opt(self) child_opt(self)
} }
} }
pub trait VisibilityOwner: AstNode { pub trait VisibilityOwner: AstNode {
fn visibility(&self) -> Option<&ast::Visibility> { fn visibility(&self) -> Option<ast::Visibility> {
child_opt(self) child_opt(self)
} }
} }
pub trait LoopBodyOwner: AstNode { pub trait LoopBodyOwner: AstNode {
fn loop_body(&self) -> Option<&ast::Block> { fn loop_body(&self) -> Option<ast::Block> {
child_opt(self) child_opt(self)
} }
} }
pub trait TryBlockBodyOwner: AstNode { pub trait TryBlockBodyOwner: AstNode {
fn try_body(&self) -> Option<&ast::Block> { fn try_body(&self) -> Option<ast::Block> {
child_opt(self) child_opt(self)
} }
} }
pub trait ArgListOwner: AstNode { pub trait ArgListOwner: AstNode {
fn arg_list(&self) -> Option<&ast::ArgList> { fn arg_list(&self) -> Option<ast::ArgList> {
child_opt(self) child_opt(self)
} }
} }
@ -51,10 +51,10 @@ pub trait FnDefOwner: AstNode {
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum ItemOrMacro<'a> { pub enum ItemOrMacro {
Item(&'a ast::ModuleItem), Item(ast::ModuleItem),
Macro(&'a ast::MacroCall), Macro(ast::MacroCall),
} }
pub trait ModuleItemOwner: AstNode { pub trait ModuleItemOwner: AstNode {
@ -67,14 +67,14 @@ pub trait ModuleItemOwner: AstNode {
} }
#[derive(Debug)] #[derive(Debug)]
pub struct ItemOrMacroIter<'a>(SyntaxNodeChildren<'a>); pub struct ItemOrMacroIter(SyntaxNodeChildren);
impl<'a> Iterator for ItemOrMacroIter<'a> { impl Iterator for ItemOrMacroIter {
type Item = ItemOrMacro<'a>; type Item = ItemOrMacro;
fn next(&mut self) -> Option<ItemOrMacro<'a>> { fn next(&mut self) -> Option<ItemOrMacro> {
loop { loop {
let n = self.0.next()?; let n = self.0.next()?;
if let Some(item) = ast::ModuleItem::cast(n) { if let Some(item) = ast::ModuleItem::cast(n.clone()) {
return Some(ItemOrMacro::Item(item)); return Some(ItemOrMacro::Item(item));
} }
if let Some(call) = ast::MacroCall::cast(n) { if let Some(call) = ast::MacroCall::cast(n) {
@ -85,17 +85,17 @@ impl<'a> Iterator for ItemOrMacroIter<'a> {
} }
pub trait TypeParamsOwner: AstNode { pub trait TypeParamsOwner: AstNode {
fn type_param_list(&self) -> Option<&ast::TypeParamList> { fn type_param_list(&self) -> Option<ast::TypeParamList> {
child_opt(self) child_opt(self)
} }
fn where_clause(&self) -> Option<&ast::WhereClause> { fn where_clause(&self) -> Option<ast::WhereClause> {
child_opt(self) child_opt(self)
} }
} }
pub trait TypeBoundsOwner: AstNode { pub trait TypeBoundsOwner: AstNode {
fn type_bound_list(&self) -> Option<&ast::TypeBoundList> { fn type_bound_list(&self) -> Option<ast::TypeBoundList> {
child_opt(self) child_opt(self)
} }
} }
@ -148,19 +148,19 @@ pub trait DocCommentsOwner: AstNode {
} }
} }
pub struct CommentIter<'a> { pub struct CommentIter {
iter: SyntaxElementChildren<'a>, iter: SyntaxElementChildren,
} }
impl<'a> Iterator for CommentIter<'a> { impl Iterator for CommentIter {
type Item = ast::Comment<'a>; type Item = ast::Comment;
fn next(&mut self) -> Option<ast::Comment<'a>> { fn next(&mut self) -> Option<ast::Comment> {
self.iter.by_ref().find_map(|el| el.as_token().and_then(ast::Comment::cast)) self.iter.by_ref().find_map(|el| el.as_token().cloned().and_then(ast::Comment::cast))
} }
} }
pub trait DefaultTypeParamOwner: AstNode { pub trait DefaultTypeParamOwner: AstNode {
fn default_type(&self) -> Option<&ast::PathType> { fn default_type(&self) -> Option<ast::PathType> {
child_opt(self) child_opt(self)
} }
} }

View file

@ -9,7 +9,7 @@ fn check_file_invariants(file: &SourceFile) {
pub fn check_parser(text: &str) { pub fn check_parser(text: &str) {
let file = SourceFile::parse(text); let file = SourceFile::parse(text);
check_file_invariants(&file.tree); check_file_invariants(&file.tree());
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -45,16 +45,16 @@ impl CheckReparse {
pub fn run(&self) { pub fn run(&self) {
let parse = SourceFile::parse(&self.text); let parse = SourceFile::parse(&self.text);
let new_parse = parse.reparse(&self.edit); let new_parse = parse.reparse(&self.edit);
check_file_invariants(&new_parse.tree); check_file_invariants(&new_parse.tree());
assert_eq!(&new_parse.tree.syntax().text().to_string(), &self.edited_text); assert_eq!(&new_parse.tree().syntax().text().to_string(), &self.edited_text);
let full_reparse = SourceFile::parse(&self.edited_text); let full_reparse = SourceFile::parse(&self.edited_text);
for (a, b) in for (a, b) in
new_parse.tree.syntax().descendants().zip(full_reparse.tree.syntax().descendants()) new_parse.tree().syntax().descendants().zip(full_reparse.tree().syntax().descendants())
{ {
if (a.kind(), a.range()) != (b.kind(), b.range()) { if (a.kind(), a.range()) != (b.kind(), b.range()) {
eprint!("original:\n{}", parse.tree.syntax().debug_dump()); eprint!("original:\n{}", parse.tree().syntax().debug_dump());
eprint!("reparsed:\n{}", new_parse.tree.syntax().debug_dump()); eprint!("reparsed:\n{}", new_parse.tree().syntax().debug_dump());
eprint!("full reparse:\n{}", full_reparse.tree.syntax().debug_dump()); eprint!("full reparse:\n{}", full_reparse.tree().syntax().debug_dump());
assert_eq!( assert_eq!(
format!("{:?}", a), format!("{:?}", a),
format!("{:?}", b), format!("{:?}", b),

View file

@ -31,7 +31,7 @@ pub mod ast;
#[doc(hidden)] #[doc(hidden)]
pub mod fuzz; pub mod fuzz;
use std::{fmt::Write, sync::Arc}; use std::{fmt::Write, marker::PhantomData, sync::Arc};
use ra_text_edit::AtomTextEdit; use ra_text_edit::AtomTextEdit;
@ -43,8 +43,8 @@ pub use crate::{
ptr::{AstPtr, SyntaxNodePtr}, ptr::{AstPtr, SyntaxNodePtr},
syntax_error::{Location, SyntaxError, SyntaxErrorKind}, syntax_error::{Location, SyntaxError, SyntaxErrorKind},
syntax_node::{ syntax_node::{
Direction, InsertPosition, SyntaxElement, SyntaxNode, SyntaxNodeWrapper, SyntaxToken, Direction, InsertPosition, SyntaxElement, SyntaxNode, SyntaxToken, SyntaxTreeBuilder,
SyntaxTreeBuilder, TreeArc, WalkEvent, WalkEvent,
}, },
syntax_text::SyntaxText, syntax_text::SyntaxText,
}; };
@ -58,48 +58,63 @@ pub use rowan::{SmolStr, TextRange, TextUnit};
/// Note that we always produce a syntax tree, even for completely invalid /// Note that we always produce a syntax tree, even for completely invalid
/// files. /// files.
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub struct Parse<T: SyntaxNodeWrapper> { pub struct Parse<T> {
tree: TreeArc<T>, green: GreenNode,
errors: Arc<Vec<SyntaxError>>, errors: Arc<Vec<SyntaxError>>,
_ty: PhantomData<fn() -> T>,
} }
impl<T: SyntaxNodeWrapper> Clone for Parse<T> { impl<T> Clone for Parse<T> {
fn clone(&self) -> Parse<T> { fn clone(&self) -> Parse<T> {
Parse { tree: self.tree.clone(), errors: self.errors.clone() } Parse { green: self.green.clone(), errors: self.errors.clone(), _ty: PhantomData }
} }
} }
impl<T: SyntaxNodeWrapper> Parse<T> { impl<T> Parse<T> {
fn new(tree: TreeArc<T>, errors: Vec<SyntaxError>) -> Parse<T> { fn new(green: GreenNode, errors: Vec<SyntaxError>) -> Parse<T> {
Parse { tree, errors: Arc::new(errors) } Parse { green, errors: Arc::new(errors), _ty: PhantomData }
} }
pub fn tree(&self) -> &T { fn syntax_node(&self) -> SyntaxNode {
&*self.tree SyntaxNode::new(self.green.clone())
}
}
impl<T: AstNode> Parse<T> {
pub fn to_syntax(self) -> Parse<SyntaxNode> {
Parse { green: self.green, errors: self.errors, _ty: PhantomData }
}
pub fn tree(&self) -> T {
T::cast(self.syntax_node()).unwrap()
} }
pub fn errors(&self) -> &[SyntaxError] { pub fn errors(&self) -> &[SyntaxError] {
&*self.errors &*self.errors
} }
pub fn ok(self) -> Result<TreeArc<T>, Arc<Vec<SyntaxError>>> { pub fn ok(self) -> Result<T, Arc<Vec<SyntaxError>>> {
if self.errors.is_empty() { if self.errors.is_empty() {
Ok(self.tree) Ok(self.tree())
} else { } else {
Err(self.errors) Err(self.errors)
} }
} }
} }
impl<T: AstNode> Parse<T> { impl Parse<SyntaxNode> {
pub fn to_syntax(this: Self) -> Parse<SyntaxNode> { pub fn cast<N: AstNode>(self) -> Option<Parse<N>> {
Parse { tree: this.tree().syntax().to_owned(), errors: this.errors } if N::cast(self.syntax_node()).is_some() {
Some(Parse { green: self.green, errors: self.errors, _ty: PhantomData })
} else {
None
}
} }
} }
impl Parse<SourceFile> { impl Parse<SourceFile> {
pub fn debug_dump(&self) -> String { pub fn debug_dump(&self) -> String {
let mut buf = self.tree.syntax().debug_dump(); let mut buf = self.tree().syntax().debug_dump();
for err in self.errors.iter() { for err in self.errors.iter() {
writeln!(buf, "error {:?}: {}", err.location(), err.kind()).unwrap(); writeln!(buf, "error {:?}: {}", err.location(), err.kind()).unwrap();
} }
@ -112,45 +127,38 @@ impl Parse<SourceFile> {
fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<Parse<SourceFile>> { fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<Parse<SourceFile>> {
// FIXME: validation errors are not handled here // FIXME: validation errors are not handled here
parsing::incremental_reparse(self.tree.syntax(), edit, self.errors.to_vec()).map( parsing::incremental_reparse(self.tree().syntax(), edit, self.errors.to_vec()).map(
|(green_node, errors, _reparsed_range)| Parse { |(green_node, errors, _reparsed_range)| Parse {
tree: SourceFile::new(green_node), green: green_node,
errors: Arc::new(errors), errors: Arc::new(errors),
_ty: PhantomData,
}, },
) )
} }
fn full_reparse(&self, edit: &AtomTextEdit) -> Parse<SourceFile> { fn full_reparse(&self, edit: &AtomTextEdit) -> Parse<SourceFile> {
let text = edit.apply(self.tree.syntax().text().to_string()); let text = edit.apply(self.tree().syntax().text().to_string());
SourceFile::parse(&text) SourceFile::parse(&text)
} }
} }
impl Parse<SyntaxNode> {
pub fn cast<T: AstNode>(self) -> Option<Parse<T>> {
let node = T::cast(&self.tree)?;
Some(Parse { tree: node.to_owned(), errors: self.errors })
}
}
/// `SourceFile` represents a parse tree for a single Rust file. /// `SourceFile` represents a parse tree for a single Rust file.
pub use crate::ast::SourceFile; pub use crate::ast::SourceFile;
impl SourceFile { impl SourceFile {
fn new(green: GreenNode) -> TreeArc<SourceFile> { fn new(green: GreenNode) -> SourceFile {
let root = SyntaxNode::new(green); let root = SyntaxNode::new(green);
if cfg!(debug_assertions) { if cfg!(debug_assertions) {
validation::validate_block_structure(&root); validation::validate_block_structure(&root);
} }
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
TreeArc::cast(root) SourceFile::cast(root).unwrap()
} }
pub fn parse(text: &str) -> Parse<SourceFile> { pub fn parse(text: &str) -> Parse<SourceFile> {
let (green, mut errors) = parsing::parse_text(text); let (green, mut errors) = parsing::parse_text(text);
let tree = SourceFile::new(green); errors.extend(validation::validate(&SourceFile::new(green.clone())));
errors.extend(validation::validate(&tree)); Parse { green, errors: Arc::new(errors), _ty: PhantomData }
Parse { tree, errors: Arc::new(errors) }
} }
} }
@ -170,14 +178,14 @@ fn api_walkthrough() {
// The `parse` method returns a `Parse` -- a pair of syntax tree and a list // The `parse` method returns a `Parse` -- a pair of syntax tree and a list
// of errors. That is, syntax tree is constructed even in presence of errors. // of errors. That is, syntax tree is constructed even in presence of errors.
let parse = SourceFile::parse(source_code); let parse = SourceFile::parse(source_code);
assert!(parse.errors.is_empty()); assert!(parse.errors().is_empty());
// Due to the way ownership is set up, owned syntax Nodes always live behind // The `tree` method returns an owned syntax node of type `SourceFile`.
// a `TreeArc` smart pointer. `TreeArc` is roughly an `std::sync::Arc` which // Owned nodes are cheap: inside, they are `Rc` handles to the underling data.
// points to the whole file instead of an individual node. let file: SourceFile = parse.tree();
let file: TreeArc<SourceFile> = parse.tree;
// `SourceFile` is the root of the syntax tree. We can iterate file's items: // `SourceFile` is the root of the syntax tree. We can iterate file's items.
// Let's fetch the `foo` function.
let mut func = None; let mut func = None;
for item in file.items() { for item in file.items() {
match item.kind() { match item.kind() {
@ -185,31 +193,26 @@ fn api_walkthrough() {
_ => unreachable!(), _ => unreachable!(),
} }
} }
// The returned items are always references. let func: ast::FnDef = func.unwrap();
let func: &ast::FnDef = func.unwrap();
// All nodes implement `ToOwned` trait, with `Owned = TreeArc<Self>`.
// `to_owned` is a cheap operation: atomic increment.
let _owned_func: TreeArc<ast::FnDef> = func.to_owned();
// Each AST node has a bunch of getters for children. All getters return // Each AST node has a bunch of getters for children. All getters return
// `Option`s though, to account for incomplete code. Some getters are common // `Option`s though, to account for incomplete code. Some getters are common
// for several kinds of node. In this case, a trait like `ast::NameOwner` // for several kinds of node. In this case, a trait like `ast::NameOwner`
// usually exists. By convention, all ast types should be used with `ast::` // usually exists. By convention, all ast types should be used with `ast::`
// qualifier. // qualifier.
let name: Option<&ast::Name> = func.name(); let name: Option<ast::Name> = func.name();
let name = name.unwrap(); let name = name.unwrap();
assert_eq!(name.text(), "foo"); assert_eq!(name.text(), "foo");
// Let's get the `1 + 1` expression! // Let's get the `1 + 1` expression!
let block: &ast::Block = func.body().unwrap(); let block: ast::Block = func.body().unwrap();
let expr: &ast::Expr = block.expr().unwrap(); let expr: ast::Expr = block.expr().unwrap();
// "Enum"-like nodes are represented using the "kind" pattern. It allows us // "Enum"-like nodes are represented using the "kind" pattern. It allows us
// to match exhaustively against all flavors of nodes, while maintaining // to match exhaustively against all flavors of nodes, while maintaining
// internal representation flexibility. The drawback is that one can't write // internal representation flexibility. The drawback is that one can't write
// nested matches as one pattern. // nested matches as one pattern.
let bin_expr: &ast::BinExpr = match expr.kind() { let bin_expr: ast::BinExpr = match expr.kind() {
ast::ExprKind::BinExpr(e) => e, ast::ExprKind::BinExpr(e) => e,
_ => unreachable!(), _ => unreachable!(),
}; };
@ -219,23 +222,14 @@ fn api_walkthrough() {
let expr_syntax: &SyntaxNode = expr.syntax(); let expr_syntax: &SyntaxNode = expr.syntax();
// Note how `expr` and `bin_expr` are in fact the same node underneath: // Note how `expr` and `bin_expr` are in fact the same node underneath:
assert!(std::ptr::eq(expr_syntax, bin_expr.syntax())); assert!(expr_syntax == bin_expr.syntax());
// To go from CST to AST, `AstNode::cast` function is used: // To go from CST to AST, `AstNode::cast` function is used:
let expr = match ast::Expr::cast(expr_syntax) { let _expr: ast::Expr = match ast::Expr::cast(expr_syntax.clone()) {
Some(e) => e, Some(e) => e,
None => unreachable!(), None => unreachable!(),
}; };
// Note how expr is also a reference!
let expr: &ast::Expr = expr;
// This is possible because the underlying representation is the same:
assert_eq!(
expr as *const ast::Expr as *const u8,
expr_syntax as *const SyntaxNode as *const u8
);
// The two properties each syntax node has is a `SyntaxKind`: // The two properties each syntax node has is a `SyntaxKind`:
assert_eq!(expr_syntax.kind(), SyntaxKind::BIN_EXPR); assert_eq!(expr_syntax.kind(), SyntaxKind::BIN_EXPR);
@ -248,7 +242,7 @@ fn api_walkthrough() {
assert_eq!(text.to_string(), "1 + 1"); assert_eq!(text.to_string(), "1 + 1");
// There's a bunch of traversal methods on `SyntaxNode`: // There's a bunch of traversal methods on `SyntaxNode`:
assert_eq!(expr_syntax.parent(), Some(block.syntax())); assert_eq!(expr_syntax.parent().as_ref(), Some(block.syntax()));
assert_eq!(block.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{'])); assert_eq!(block.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{']));
assert_eq!( assert_eq!(
expr_syntax.next_sibling_or_token().map(|it| it.kind()), expr_syntax.next_sibling_or_token().map(|it| it.kind()),
@ -257,7 +251,7 @@ fn api_walkthrough() {
// As well as some iterator helpers: // As well as some iterator helpers:
let f = expr_syntax.ancestors().find_map(ast::FnDef::cast); let f = expr_syntax.ancestors().find_map(ast::FnDef::cast);
assert_eq!(f, Some(&*func)); assert_eq!(f, Some(func));
assert!(expr_syntax.siblings_with_tokens(Direction::Next).any(|it| it.kind() == T!['}'])); assert!(expr_syntax.siblings_with_tokens(Direction::Next).any(|it| it.kind() == T!['}']));
assert_eq!( assert_eq!(
expr_syntax.descendants_with_tokens().count(), expr_syntax.descendants_with_tokens().count(),
@ -272,7 +266,7 @@ fn api_walkthrough() {
for event in expr_syntax.preorder_with_tokens() { for event in expr_syntax.preorder_with_tokens() {
match event { match event {
WalkEvent::Enter(node) => { WalkEvent::Enter(node) => {
let text = match node { let text = match &node {
SyntaxElement::Node(it) => it.text().to_string(), SyntaxElement::Node(it) => it.text().to_string(),
SyntaxElement::Token(it) => it.text().to_string(), SyntaxElement::Token(it) => it.text().to_string(),
}; };
@ -319,7 +313,7 @@ fn api_walkthrough() {
let mut exprs_visit = Vec::new(); let mut exprs_visit = Vec::new();
for node in file.syntax().descendants() { for node in file.syntax().descendants() {
if let Some(result) = if let Some(result) =
visitor().visit::<ast::Expr, _>(|expr| expr.syntax().text().to_string()).accept(node) visitor().visit::<ast::Expr, _>(|expr| expr.syntax().text().to_string()).accept(&node)
{ {
exprs_visit.push(result); exprs_visit.push(result);
} }

View file

@ -41,7 +41,7 @@ fn reparse_token<'node>(
root: &'node SyntaxNode, root: &'node SyntaxNode,
edit: &AtomTextEdit, edit: &AtomTextEdit,
) -> Option<(GreenNode, TextRange)> { ) -> Option<(GreenNode, TextRange)> {
let token = algo::find_covering_element(root, edit.delete).as_token()?; let token = algo::find_covering_element(root, edit.delete).as_token()?.clone();
match token.kind() { match token.kind() {
WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => {
if token.kind() == WHITESPACE || token.kind() == COMMENT { if token.kind() == WHITESPACE || token.kind() == COMMENT {
@ -51,7 +51,7 @@ fn reparse_token<'node>(
} }
} }
let text = get_text_after_edit(token.into(), &edit); let text = get_text_after_edit(token.clone().into(), &edit);
let lex_tokens = tokenize(&text); let lex_tokens = tokenize(&text);
let lex_token = match lex_tokens[..] { let lex_token = match lex_tokens[..] {
[lex_token] if lex_token.kind == token.kind() => lex_token, [lex_token] if lex_token.kind == token.kind() => lex_token,
@ -81,7 +81,7 @@ fn reparse_block<'node>(
edit: &AtomTextEdit, edit: &AtomTextEdit,
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
let (node, reparser) = find_reparsable_node(root, edit.delete)?; let (node, reparser) = find_reparsable_node(root, edit.delete)?;
let text = get_text_after_edit(node.into(), &edit); let text = get_text_after_edit(node.clone().into(), &edit);
let tokens = tokenize(&text); let tokens = tokenize(&text);
if !is_balanced(&tokens) { if !is_balanced(&tokens) {
return None; return None;
@ -109,7 +109,7 @@ fn is_contextual_kw(text: &str) -> bool {
} }
} }
fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxNode, Reparser)> { fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> {
let node = algo::find_covering_element(node, range); let node = algo::find_covering_element(node, range);
let mut ancestors = match node { let mut ancestors = match node {
SyntaxElement::Token(it) => it.parent().ancestors(), SyntaxElement::Token(it) => it.parent().ancestors(),
@ -167,8 +167,6 @@ fn merge_errors(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::sync::Arc;
use test_utils::{assert_eq_text, extract_range}; use test_utils::{assert_eq_text, extract_range};
use super::*; use super::*;
@ -180,18 +178,18 @@ mod tests {
let after = edit.apply(before.clone()); let after = edit.apply(before.clone());
let fully_reparsed = SourceFile::parse(&after); let fully_reparsed = SourceFile::parse(&after);
let incrementally_reparsed = { let incrementally_reparsed: Parse<SourceFile> = {
let f = SourceFile::parse(&before); let f = SourceFile::parse(&before);
let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() }; let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() };
let (green, new_errors, range) = let (green, new_errors, range) =
incremental_reparse(f.tree.syntax(), &edit, f.errors.to_vec()).unwrap(); incremental_reparse(f.tree().syntax(), &edit, f.errors.to_vec()).unwrap();
assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
Parse { tree: SourceFile::new(green), errors: Arc::new(new_errors) } Parse::new(green, new_errors)
}; };
assert_eq_text!( assert_eq_text!(
&fully_reparsed.tree.syntax().debug_dump(), &fully_reparsed.tree().syntax().debug_dump(),
&incrementally_reparsed.tree.syntax().debug_dump(), &incrementally_reparsed.tree().syntax().debug_dump(),
); );
} }

View file

@ -1,6 +1,7 @@
use crate::{AstNode, SyntaxKind, SyntaxNode, TextRange};
use std::{iter::successors, marker::PhantomData}; use std::{iter::successors, marker::PhantomData};
use crate::{AstNode, SyntaxKind, SyntaxNode, TextRange};
/// A pointer to a syntax node inside a file. It can be used to remember a /// A pointer to a syntax node inside a file. It can be used to remember a
/// specific node across reparses of the same file. /// specific node across reparses of the same file.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -14,9 +15,9 @@ impl SyntaxNodePtr {
SyntaxNodePtr { range: node.range(), kind: node.kind() } SyntaxNodePtr { range: node.range(), kind: node.kind() }
} }
pub fn to_node(self, root: &SyntaxNode) -> &SyntaxNode { pub fn to_node(self, root: &SyntaxNode) -> SyntaxNode {
assert!(root.parent().is_none()); assert!(root.parent().is_none());
successors(Some(root), |&node| { successors(Some(root.clone()), |node| {
node.children().find(|it| self.range.is_subrange(&it.range())) node.children().find(|it| self.range.is_subrange(&it.range()))
}) })
.find(|it| it.range() == self.range && it.kind() == self.kind) .find(|it| it.range() == self.range && it.kind() == self.kind)
@ -51,7 +52,7 @@ impl<N: AstNode> AstPtr<N> {
AstPtr { raw: SyntaxNodePtr::new(node.syntax()), _ty: PhantomData } AstPtr { raw: SyntaxNodePtr::new(node.syntax()), _ty: PhantomData }
} }
pub fn to_node(self, root: &SyntaxNode) -> &N { pub fn to_node(self, root: &SyntaxNode) -> N {
let syntax_node = self.raw.to_node(root); let syntax_node = self.raw.to_node(root);
N::cast(syntax_node).unwrap() N::cast(syntax_node).unwrap()
} }
@ -75,5 +76,5 @@ fn test_local_syntax_ptr() {
let field = file.syntax().descendants().find_map(ast::NamedFieldDef::cast).unwrap(); let field = file.syntax().descendants().find_map(ast::NamedFieldDef::cast).unwrap();
let ptr = SyntaxNodePtr::new(field.syntax()); let ptr = SyntaxNodePtr::new(field.syntax());
let field_syntax = ptr.to_node(file.syntax()); let field_syntax = ptr.to_node(file.syntax());
assert_eq!(field.syntax(), &*field_syntax); assert_eq!(field.syntax(), &field_syntax);
} }

View file

@ -7,14 +7,13 @@
//! modules just wraps its API. //! modules just wraps its API.
use std::{ use std::{
borrow::Borrow,
fmt::{self, Write}, fmt::{self, Write},
iter::successors, iter::successors,
ops::RangeInclusive, ops::RangeInclusive,
}; };
use ra_parser::ParseError; use ra_parser::ParseError;
use rowan::{GreenNodeBuilder, TransparentNewType}; use rowan::GreenNodeBuilder;
use crate::{ use crate::{
syntax_error::{SyntaxError, SyntaxErrorKind}, syntax_error::{SyntaxError, SyntaxErrorKind},
@ -33,86 +32,8 @@ pub enum InsertPosition<T> {
After(T), After(T),
} }
/// Marker trait for CST and AST nodes #[derive(PartialEq, Eq, Hash, Clone)]
pub trait SyntaxNodeWrapper: TransparentNewType<Repr = rowan::SyntaxNode> {} pub struct SyntaxNode(pub(crate) rowan::cursor::SyntaxNode);
impl<T: TransparentNewType<Repr = rowan::SyntaxNode>> SyntaxNodeWrapper for T {}
/// An owning smart pointer for CST or AST node.
#[derive(PartialEq, Eq, Hash)]
pub struct TreeArc<T: SyntaxNodeWrapper>(pub(crate) rowan::TreeArc<T>);
impl<T: SyntaxNodeWrapper> Borrow<T> for TreeArc<T> {
fn borrow(&self) -> &T {
&*self
}
}
impl<T> TreeArc<T>
where
T: SyntaxNodeWrapper,
{
pub(crate) fn cast<U>(this: TreeArc<T>) -> TreeArc<U>
where
U: SyntaxNodeWrapper,
{
TreeArc(rowan::TreeArc::cast(this.0))
}
}
impl<T> std::ops::Deref for TreeArc<T>
where
T: SyntaxNodeWrapper,
{
type Target = T;
fn deref(&self) -> &T {
self.0.deref()
}
}
impl<T> PartialEq<T> for TreeArc<T>
where
T: SyntaxNodeWrapper,
T: PartialEq<T>,
{
fn eq(&self, other: &T) -> bool {
let t: &T = self;
t == other
}
}
impl<T> Clone for TreeArc<T>
where
T: SyntaxNodeWrapper,
{
fn clone(&self) -> TreeArc<T> {
TreeArc(self.0.clone())
}
}
impl<T> fmt::Debug for TreeArc<T>
where
T: SyntaxNodeWrapper,
T: fmt::Debug,
{
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0, fmt)
}
}
#[derive(PartialEq, Eq, Hash)]
#[repr(transparent)]
pub struct SyntaxNode(pub(crate) rowan::SyntaxNode);
unsafe impl TransparentNewType for SyntaxNode {
type Repr = rowan::SyntaxNode;
}
impl ToOwned for SyntaxNode {
type Owned = TreeArc<SyntaxNode>;
fn to_owned(&self) -> TreeArc<SyntaxNode> {
let ptr = TreeArc(self.0.to_owned());
TreeArc::cast(ptr)
}
}
impl fmt::Debug for SyntaxNode { impl fmt::Debug for SyntaxNode {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
@ -133,9 +54,9 @@ pub enum Direction {
} }
impl SyntaxNode { impl SyntaxNode {
pub(crate) fn new(green: GreenNode) -> TreeArc<SyntaxNode> { pub(crate) fn new(green: GreenNode) -> SyntaxNode {
let ptr = TreeArc(rowan::SyntaxNode::new(green, None)); let inner = rowan::cursor::SyntaxNode::new_root(green);
TreeArc::cast(ptr) SyntaxNode(inner)
} }
pub fn kind(&self) -> SyntaxKind { pub fn kind(&self) -> SyntaxKind {
@ -143,47 +64,47 @@ impl SyntaxNode {
} }
pub fn range(&self) -> TextRange { pub fn range(&self) -> TextRange {
self.0.range() self.0.text_range()
} }
pub fn text(&self) -> SyntaxText { pub fn text(&self) -> SyntaxText {
SyntaxText::new(self) SyntaxText::new(self)
} }
pub fn parent(&self) -> Option<&SyntaxNode> { pub fn parent(&self) -> Option<SyntaxNode> {
self.0.parent().map(SyntaxNode::from_repr) self.0.parent().map(SyntaxNode)
} }
pub fn first_child(&self) -> Option<&SyntaxNode> { pub fn first_child(&self) -> Option<SyntaxNode> {
self.0.first_child().map(SyntaxNode::from_repr) self.0.first_child().map(SyntaxNode)
} }
pub fn first_child_or_token(&self) -> Option<SyntaxElement> { pub fn first_child_or_token(&self) -> Option<SyntaxElement> {
self.0.first_child_or_token().map(SyntaxElement::from) self.0.first_child_or_token().map(SyntaxElement::new)
} }
pub fn last_child(&self) -> Option<&SyntaxNode> { pub fn last_child(&self) -> Option<SyntaxNode> {
self.0.last_child().map(SyntaxNode::from_repr) self.0.last_child().map(SyntaxNode)
} }
pub fn last_child_or_token(&self) -> Option<SyntaxElement> { pub fn last_child_or_token(&self) -> Option<SyntaxElement> {
self.0.last_child_or_token().map(SyntaxElement::from) self.0.last_child_or_token().map(SyntaxElement::new)
} }
pub fn next_sibling(&self) -> Option<&SyntaxNode> { pub fn next_sibling(&self) -> Option<SyntaxNode> {
self.0.next_sibling().map(SyntaxNode::from_repr) self.0.next_sibling().map(SyntaxNode)
} }
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> { pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
self.0.next_sibling_or_token().map(SyntaxElement::from) self.0.next_sibling_or_token().map(SyntaxElement::new)
} }
pub fn prev_sibling(&self) -> Option<&SyntaxNode> { pub fn prev_sibling(&self) -> Option<SyntaxNode> {
self.0.prev_sibling().map(SyntaxNode::from_repr) self.0.prev_sibling().map(SyntaxNode)
} }
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> { pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
self.0.prev_sibling_or_token().map(SyntaxElement::from) self.0.prev_sibling_or_token().map(SyntaxElement::new)
} }
pub fn children(&self) -> SyntaxNodeChildren { pub fn children(&self) -> SyntaxNodeChildren {
@ -195,18 +116,18 @@ impl SyntaxNode {
} }
pub fn first_token(&self) -> Option<SyntaxToken> { pub fn first_token(&self) -> Option<SyntaxToken> {
self.0.first_token().map(SyntaxToken::from) self.0.first_token().map(SyntaxToken)
} }
pub fn last_token(&self) -> Option<SyntaxToken> { pub fn last_token(&self) -> Option<SyntaxToken> {
self.0.last_token().map(SyntaxToken::from) self.0.last_token().map(SyntaxToken)
} }
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode> { pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode> {
successors(Some(self), |&node| node.parent()) successors(Some(self.clone()), |node| node.parent())
} }
pub fn descendants(&self) -> impl Iterator<Item = &SyntaxNode> { pub fn descendants(&self) -> impl Iterator<Item = SyntaxNode> {
self.preorder().filter_map(|event| match event { self.preorder().filter_map(|event| match event {
WalkEvent::Enter(node) => Some(node), WalkEvent::Enter(node) => Some(node),
WalkEvent::Leave(_) => None, WalkEvent::Leave(_) => None,
@ -220,8 +141,8 @@ impl SyntaxNode {
}) })
} }
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &SyntaxNode> { pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = SyntaxNode> {
successors(Some(self), move |&node| match direction { successors(Some(self.clone()), move |node| match direction {
Direction::Next => node.next_sibling(), Direction::Next => node.next_sibling(),
Direction::Prev => node.prev_sibling(), Direction::Prev => node.prev_sibling(),
}) })
@ -231,29 +152,29 @@ impl SyntaxNode {
&self, &self,
direction: Direction, direction: Direction,
) -> impl Iterator<Item = SyntaxElement> { ) -> impl Iterator<Item = SyntaxElement> {
let me: SyntaxElement = self.into(); let me: SyntaxElement = self.clone().into();
successors(Some(me), move |el| match direction { successors(Some(me), move |el| match direction {
Direction::Next => el.next_sibling_or_token(), Direction::Next => el.next_sibling_or_token(),
Direction::Prev => el.prev_sibling_or_token(), Direction::Prev => el.prev_sibling_or_token(),
}) })
} }
pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&SyntaxNode>> { pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<SyntaxNode>> {
self.0.preorder().map(|event| match event { self.0.preorder().map(|event| match event {
WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode::from_repr(n)), WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode(n)),
WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxNode::from_repr(n)), WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxNode(n)),
}) })
} }
pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<SyntaxElement>> { pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<SyntaxElement>> {
self.0.preorder_with_tokens().map(|event| match event { self.0.preorder_with_tokens().map(|event| match event {
WalkEvent::Enter(n) => WalkEvent::Enter(n.into()), WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxElement::new(n)),
WalkEvent::Leave(n) => WalkEvent::Leave(n.into()), WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxElement::new(n)),
}) })
} }
pub fn memory_size_of_subtree(&self) -> usize { pub fn memory_size_of_subtree(&self) -> usize {
self.0.memory_size_of_subtree() 0
} }
pub fn debug_dump(&self) -> String { pub fn debug_dump(&self) -> String {
@ -290,11 +211,11 @@ impl SyntaxNode {
/// ///
/// This is a type-unsafe low-level editing API, if you need to use it, /// This is a type-unsafe low-level editing API, if you need to use it,
/// prefer to create a type-safe abstraction on top of it instead. /// prefer to create a type-safe abstraction on top of it instead.
pub fn insert_children<'a>( pub fn insert_children(
&self, &self,
position: InsertPosition<SyntaxElement<'_>>, position: InsertPosition<SyntaxElement>,
to_insert: impl Iterator<Item = SyntaxElement<'a>>, to_insert: impl Iterator<Item = SyntaxElement>,
) -> TreeArc<SyntaxNode> { ) -> SyntaxNode {
let mut delta = TextUnit::default(); let mut delta = TextUnit::default();
let to_insert = to_insert.map(|element| { let to_insert = to_insert.map(|element| {
delta += element.text_len(); delta += element.text_len();
@ -303,7 +224,7 @@ impl SyntaxNode {
let old_children = self.0.green().children(); let old_children = self.0.green().children();
let new_children = match position { let new_children = match &position {
InsertPosition::First => { InsertPosition::First => {
to_insert.chain(old_children.iter().cloned()).collect::<Box<[_]>>() to_insert.chain(old_children.iter().cloned()).collect::<Box<[_]>>()
} }
@ -312,7 +233,7 @@ impl SyntaxNode {
} }
InsertPosition::Before(anchor) | InsertPosition::After(anchor) => { InsertPosition::Before(anchor) | InsertPosition::After(anchor) => {
let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 }; let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 };
let split_at = self.position_of_child(anchor) + take_anchor; let split_at = self.position_of_child(anchor.clone()) + take_anchor;
let (before, after) = old_children.split_at(split_at); let (before, after) = old_children.split_at(split_at);
before before
.iter() .iter()
@ -330,13 +251,13 @@ impl SyntaxNode {
/// ///
/// This is a type-unsafe low-level editing API, if you need to use it, /// This is a type-unsafe low-level editing API, if you need to use it,
/// prefer to create a type-safe abstraction on top of it instead. /// prefer to create a type-safe abstraction on top of it instead.
pub fn replace_children<'a>( pub fn replace_children(
&self, &self,
to_delete: RangeInclusive<SyntaxElement<'_>>, to_delete: RangeInclusive<SyntaxElement>,
to_insert: impl Iterator<Item = SyntaxElement<'a>>, to_insert: impl Iterator<Item = SyntaxElement>,
) -> TreeArc<SyntaxNode> { ) -> SyntaxNode {
let start = self.position_of_child(*to_delete.start()); let start = self.position_of_child(to_delete.start().clone());
let end = self.position_of_child(*to_delete.end()); let end = self.position_of_child(to_delete.end().clone());
let old_children = self.0.green().children(); let old_children = self.0.green().children();
let new_children = old_children[..start] let new_children = old_children[..start]
@ -348,7 +269,7 @@ impl SyntaxNode {
self.with_children(new_children) self.with_children(new_children)
} }
fn with_children(&self, new_children: Box<[rowan::GreenElement]>) -> TreeArc<SyntaxNode> { fn with_children(&self, new_children: Box<[rowan::GreenElement]>) -> SyntaxNode {
let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>(); let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>();
let new_node = GreenNode::new(rowan::SyntaxKind(self.kind() as u16), new_children); let new_node = GreenNode::new(rowan::SyntaxKind(self.kind() as u16), new_children);
let new_file_node = self.replace_with(new_node); let new_file_node = self.replace_with(new_node);
@ -364,7 +285,7 @@ impl SyntaxNode {
fn position_of_child(&self, child: SyntaxElement) -> usize { fn position_of_child(&self, child: SyntaxElement) -> usize {
self.children_with_tokens() self.children_with_tokens()
.position(|it| it == child) .position(|it| it == child)
.expect("elemetn is not a child of current element") .expect("element is not a child of current element")
} }
} }
@ -377,11 +298,11 @@ fn to_green_element(element: SyntaxElement) -> rowan::GreenElement {
} }
} }
#[derive(Clone, Copy, PartialEq, Eq, Hash)] #[derive(Clone, PartialEq, Eq, Hash)]
pub struct SyntaxToken<'a>(pub(crate) rowan::SyntaxToken<'a>); pub struct SyntaxToken(pub(crate) rowan::cursor::SyntaxToken);
//FIXME: always output text //FIXME: always output text
impl<'a> fmt::Debug for SyntaxToken<'a> { impl fmt::Debug for SyntaxToken {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{:?}@{:?}", self.kind(), self.range())?; write!(fmt, "{:?}@{:?}", self.kind(), self.range())?;
if self.text().len() < 25 { if self.text().len() < 25 {
@ -398,60 +319,54 @@ impl<'a> fmt::Debug for SyntaxToken<'a> {
} }
} }
impl<'a> fmt::Display for SyntaxToken<'a> { impl fmt::Display for SyntaxToken {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(self.text(), fmt) fmt::Display::fmt(self.text(), fmt)
} }
} }
impl<'a> From<rowan::SyntaxToken<'a>> for SyntaxToken<'a> { impl SyntaxToken {
fn from(t: rowan::SyntaxToken<'a>) -> Self {
SyntaxToken(t)
}
}
impl<'a> SyntaxToken<'a> {
pub fn kind(&self) -> SyntaxKind { pub fn kind(&self) -> SyntaxKind {
self.0.kind().0.into() self.0.kind().0.into()
} }
pub fn text(&self) -> &'a SmolStr { pub fn text(&self) -> &SmolStr {
self.0.text() self.0.text()
} }
pub fn range(&self) -> TextRange { pub fn range(&self) -> TextRange {
self.0.range() self.0.text_range()
} }
pub fn parent(&self) -> &'a SyntaxNode { pub fn parent(&self) -> SyntaxNode {
SyntaxNode::from_repr(self.0.parent()) SyntaxNode(self.0.parent())
} }
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement<'a>> { pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
self.0.next_sibling_or_token().map(SyntaxElement::from) self.0.next_sibling_or_token().map(SyntaxElement::new)
} }
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement<'a>> { pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
self.0.prev_sibling_or_token().map(SyntaxElement::from) self.0.prev_sibling_or_token().map(SyntaxElement::new)
} }
pub fn siblings_with_tokens( pub fn siblings_with_tokens(
&self, &self,
direction: Direction, direction: Direction,
) -> impl Iterator<Item = SyntaxElement<'a>> { ) -> impl Iterator<Item = SyntaxElement> {
let me: SyntaxElement = (*self).into(); let me: SyntaxElement = self.clone().into();
successors(Some(me), move |el| match direction { successors(Some(me), move |el| match direction {
Direction::Next => el.next_sibling_or_token(), Direction::Next => el.next_sibling_or_token(),
Direction::Prev => el.prev_sibling_or_token(), Direction::Prev => el.prev_sibling_or_token(),
}) })
} }
pub fn next_token(&self) -> Option<SyntaxToken<'a>> { pub fn next_token(&self) -> Option<SyntaxToken> {
self.0.next_token().map(SyntaxToken::from) self.0.next_token().map(SyntaxToken)
} }
pub fn prev_token(&self) -> Option<SyntaxToken<'a>> { pub fn prev_token(&self) -> Option<SyntaxToken> {
self.0.prev_token().map(SyntaxToken::from) self.0.prev_token().map(SyntaxToken)
} }
pub(crate) fn replace_with(&self, new_token: GreenToken) -> GreenNode { pub(crate) fn replace_with(&self, new_token: GreenToken) -> GreenNode {
@ -459,13 +374,25 @@ impl<'a> SyntaxToken<'a> {
} }
} }
#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] #[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub enum SyntaxElement<'a> { pub enum SyntaxElement {
Node(&'a SyntaxNode), Node(SyntaxNode),
Token(SyntaxToken<'a>), Token(SyntaxToken),
} }
impl<'a> fmt::Display for SyntaxElement<'a> { impl From<SyntaxNode> for SyntaxElement {
fn from(node: SyntaxNode) -> Self {
SyntaxElement::Node(node)
}
}
impl From<SyntaxToken> for SyntaxElement {
fn from(token: SyntaxToken) -> Self {
SyntaxElement::Token(token)
}
}
impl fmt::Display for SyntaxElement {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self { match self {
SyntaxElement::Node(it) => fmt::Display::fmt(it, fmt), SyntaxElement::Node(it) => fmt::Display::fmt(it, fmt),
@ -474,7 +401,14 @@ impl<'a> fmt::Display for SyntaxElement<'a> {
} }
} }
impl<'a> SyntaxElement<'a> { impl SyntaxElement {
pub(crate) fn new(el: rowan::cursor::SyntaxElement) -> Self {
match el {
rowan::cursor::SyntaxElement::Node(it) => SyntaxElement::Node(SyntaxNode(it)),
rowan::cursor::SyntaxElement::Token(it) => SyntaxElement::Token(SyntaxToken(it)),
}
}
pub fn kind(&self) -> SyntaxKind { pub fn kind(&self) -> SyntaxKind {
match self { match self {
SyntaxElement::Node(it) => it.kind(), SyntaxElement::Node(it) => it.kind(),
@ -482,42 +416,49 @@ impl<'a> SyntaxElement<'a> {
} }
} }
pub fn as_node(&self) -> Option<&'a SyntaxNode> { pub fn as_node(&self) -> Option<&SyntaxNode> {
match self { match self {
SyntaxElement::Node(node) => Some(*node), SyntaxElement::Node(node) => Some(node),
SyntaxElement::Token(_) => None, SyntaxElement::Token(_) => None,
} }
} }
pub fn as_token(&self) -> Option<SyntaxToken<'a>> { pub fn as_token(&self) -> Option<&SyntaxToken> {
match self { match self {
SyntaxElement::Node(_) => None, SyntaxElement::Node(_) => None,
SyntaxElement::Token(token) => Some(*token), SyntaxElement::Token(token) => Some(token),
} }
} }
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement<'a>> { pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
match self { match self {
SyntaxElement::Node(it) => it.next_sibling_or_token(), SyntaxElement::Node(it) => it.next_sibling_or_token(),
SyntaxElement::Token(it) => it.next_sibling_or_token(), SyntaxElement::Token(it) => it.next_sibling_or_token(),
} }
} }
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement<'a>> { pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
match self { match self {
SyntaxElement::Node(it) => it.prev_sibling_or_token(), SyntaxElement::Node(it) => it.prev_sibling_or_token(),
SyntaxElement::Token(it) => it.prev_sibling_or_token(), SyntaxElement::Token(it) => it.prev_sibling_or_token(),
} }
} }
pub fn ancestors(&self) -> impl Iterator<Item = &'a SyntaxNode> { pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode> {
match self { match self {
SyntaxElement::Node(it) => it, SyntaxElement::Node(it) => it.clone(),
SyntaxElement::Token(it) => it.parent(), SyntaxElement::Token(it) => it.parent(),
} }
.ancestors() .ancestors()
} }
pub fn range(&self) -> TextRange {
match self {
SyntaxElement::Node(it) => it.range(),
SyntaxElement::Token(it) => it.range(),
}
}
fn text_len(&self) -> TextUnit { fn text_len(&self) -> TextUnit {
match self { match self {
SyntaxElement::Node(node) => node.0.green().text_len(), SyntaxElement::Node(node) => node.0.green().text_len(),
@ -526,55 +467,23 @@ impl<'a> SyntaxElement<'a> {
} }
} }
impl<'a> From<rowan::SyntaxElement<'a>> for SyntaxElement<'a> { #[derive(Clone, Debug)]
fn from(el: rowan::SyntaxElement<'a>) -> Self { pub struct SyntaxNodeChildren(rowan::cursor::SyntaxNodeChildren);
match el {
rowan::SyntaxElement::Node(n) => SyntaxElement::Node(SyntaxNode::from_repr(n)), impl Iterator for SyntaxNodeChildren {
rowan::SyntaxElement::Token(t) => SyntaxElement::Token(t.into()), type Item = SyntaxNode;
} fn next(&mut self) -> Option<SyntaxNode> {
self.0.next().map(SyntaxNode)
} }
} }
impl<'a> From<&'a SyntaxNode> for SyntaxElement<'a> { #[derive(Clone, Debug)]
fn from(node: &'a SyntaxNode) -> SyntaxElement<'a> { pub struct SyntaxElementChildren(rowan::cursor::SyntaxElementChildren);
SyntaxElement::Node(node)
}
}
impl<'a> From<SyntaxToken<'a>> for SyntaxElement<'a> { impl Iterator for SyntaxElementChildren {
fn from(token: SyntaxToken<'a>) -> SyntaxElement<'a> { type Item = SyntaxElement;
SyntaxElement::Token(token) fn next(&mut self) -> Option<SyntaxElement> {
} self.0.next().map(SyntaxElement::new)
}
impl<'a> SyntaxElement<'a> {
pub fn range(&self) -> TextRange {
match self {
SyntaxElement::Node(it) => it.range(),
SyntaxElement::Token(it) => it.range(),
}
}
}
#[derive(Debug)]
pub struct SyntaxNodeChildren<'a>(rowan::SyntaxNodeChildren<'a>);
impl<'a> Iterator for SyntaxNodeChildren<'a> {
type Item = &'a SyntaxNode;
fn next(&mut self) -> Option<&'a SyntaxNode> {
self.0.next().map(SyntaxNode::from_repr)
}
}
#[derive(Debug)]
pub struct SyntaxElementChildren<'a>(rowan::SyntaxElementChildren<'a>);
impl<'a> Iterator for SyntaxElementChildren<'a> {
type Item = SyntaxElement<'a>;
fn next(&mut self) -> Option<SyntaxElement<'a>> {
self.0.next().map(SyntaxElement::from)
} }
} }
@ -601,7 +510,7 @@ impl SyntaxTreeBuilder {
if cfg!(debug_assertions) { if cfg!(debug_assertions) {
crate::validation::validate_block_structure(&node); crate::validation::validate_block_structure(&node);
} }
Parse::new(node, errors) Parse::new(node.0.green().clone(), errors)
} }
pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) { pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) {

View file

@ -16,29 +16,36 @@ impl<'a> SyntaxText<'a> {
SyntaxText { node, range: node.range() } SyntaxText { node, range: node.range() }
} }
pub fn chunks(&self) -> impl Iterator<Item = &'a str> { pub fn chunks(&self) -> impl Iterator<Item = SmolStr> {
let range = self.range; let range = self.range;
self.node.descendants_with_tokens().filter_map(move |el| match el { self.node.descendants_with_tokens().filter_map(move |el| match el {
SyntaxElement::Token(t) => { SyntaxElement::Token(t) => {
let text = t.text(); let text = t.text();
let range = range.intersection(&t.range())?; let range = range.intersection(&t.range())?;
let range = range - t.range().start(); let res = if range == t.range() {
Some(&text[range]) t.text().clone()
} else {
let range = range - t.range().start();
text[range].into()
};
Some(res)
} }
SyntaxElement::Node(_) => None, SyntaxElement::Node(_) => None,
}) })
} }
pub fn push_to(&self, buf: &mut String) { pub fn push_to(&self, buf: &mut String) {
self.chunks().for_each(|it| buf.push_str(it)); self.chunks().for_each(|it| buf.push_str(it.as_str()));
} }
pub fn to_string(&self) -> String { pub fn to_string(&self) -> String {
self.chunks().collect() let mut buf = String::new();
self.push_to(&mut buf);
buf
} }
pub fn to_smol_string(&self) -> SmolStr { pub fn to_smol_string(&self) -> SmolStr {
self.chunks().collect() self.to_string().into()
} }
pub fn contains(&self, c: char) -> bool { pub fn contains(&self, c: char) -> bool {
@ -52,7 +59,7 @@ impl<'a> SyntaxText<'a> {
let pos: TextUnit = (pos as u32).into(); let pos: TextUnit = (pos as u32).into();
return Some(acc + pos); return Some(acc + pos);
} }
acc += TextUnit::of_str(chunk); acc += TextUnit::of_str(chunk.as_str());
} }
None None
} }
@ -97,7 +104,7 @@ impl<'a> SyntaxText<'a> {
let mut start: TextUnit = 0.into(); let mut start: TextUnit = 0.into();
let offset = offset.into(); let offset = offset.into();
for chunk in self.chunks() { for chunk in self.chunks() {
let end = start + TextUnit::of_str(chunk); let end = start + TextUnit::of_str(chunk.as_str());
if start <= offset && offset < end { if start <= offset && offset < end {
let off: usize = u32::from(offset - start) as usize; let off: usize = u32::from(offset - start) as usize;
return Some(chunk[off..].chars().next().unwrap()); return Some(chunk[off..].chars().next().unwrap());
@ -129,7 +136,7 @@ impl From<SyntaxText<'_>> for String {
impl PartialEq<str> for SyntaxText<'_> { impl PartialEq<str> for SyntaxText<'_> {
fn eq(&self, mut rhs: &str) -> bool { fn eq(&self, mut rhs: &str) -> bool {
for chunk in self.chunks() { for chunk in self.chunks() {
if !rhs.starts_with(chunk) { if !rhs.starts_with(chunk.as_str()) {
return false; return false;
} }
rhs = &rhs[chunk.len()..]; rhs = &rhs[chunk.len()..];

View file

@ -19,13 +19,13 @@ pub(crate) fn validate(file: &SourceFile) -> Vec<SyntaxError> {
.visit::<ast::Literal, _>(validate_literal) .visit::<ast::Literal, _>(validate_literal)
.visit::<ast::Block, _>(block::validate_block_node) .visit::<ast::Block, _>(block::validate_block_node)
.visit::<ast::FieldExpr, _>(field_expr::validate_field_expr_node) .visit::<ast::FieldExpr, _>(field_expr::validate_field_expr_node)
.accept(node); .accept(&node);
} }
errors errors
} }
// FIXME: kill duplication // FIXME: kill duplication
fn validate_literal(literal: &ast::Literal, acc: &mut Vec<SyntaxError>) { fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
let token = literal.token(); let token = literal.token();
let text = token.text().as_str(); let text = token.text().as_str();
match token.kind() { match token.kind() {

View file

@ -5,7 +5,7 @@ use crate::{
SyntaxKind::*, SyntaxKind::*,
}; };
pub(crate) fn validate_block_node(node: &ast::Block, errors: &mut Vec<SyntaxError>) { pub(crate) fn validate_block_node(node: ast::Block, errors: &mut Vec<SyntaxError>) {
if let Some(parent) = node.syntax().parent() { if let Some(parent) = node.syntax().parent() {
match parent.kind() { match parent.kind() {
FN_DEF => return, FN_DEF => return,

View file

@ -4,7 +4,7 @@ use crate::{
SyntaxErrorKind::*, SyntaxErrorKind::*,
}; };
pub(crate) fn validate_field_expr_node(node: &ast::FieldExpr, errors: &mut Vec<SyntaxError>) { pub(crate) fn validate_field_expr_node(node: ast::FieldExpr, errors: &mut Vec<SyntaxError>) {
if let Some(FieldKind::Index(idx)) = node.field_access() { if let Some(FieldKind::Index(idx)) = node.field_access() {
if idx.text().chars().any(|c| c < '0' || c > '9') { if idx.text().chars().any(|c| c < '0' || c > '9') {
errors.push(SyntaxError::new(InvalidTupleIndexFormat, idx.range())); errors.push(SyntaxError::new(InvalidTupleIndexFormat, idx.range()));