mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 05:23:24 +00:00
Merge #3909
3909: Generate tokense r=matklad a=matklad
bors r+
🤖
Co-authored-by: Luca Barbieri <luca@luca-barbieri.com>
Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
8595693287
4 changed files with 3176 additions and 74 deletions
|
@ -30,7 +30,7 @@ pub use self::{
|
|||
/// conversion itself has zero runtime cost: ast and syntax nodes have exactly
|
||||
/// the same representation: a pointer to the tree root and a pointer to the
|
||||
/// node itself.
|
||||
pub trait AstNode: std::fmt::Display {
|
||||
pub trait AstNode {
|
||||
fn can_cast(kind: SyntaxKind) -> bool
|
||||
where
|
||||
Self: Sized;
|
||||
|
@ -49,10 +49,16 @@ fn assert_ast_is_object_safe() {
|
|||
|
||||
/// Like `AstNode`, but wraps tokens rather than interior nodes.
|
||||
pub trait AstToken {
|
||||
fn cast(token: SyntaxToken) -> Option<Self>
|
||||
fn can_cast(token: SyntaxKind) -> bool
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
fn cast(syntax: SyntaxToken) -> Option<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
fn syntax(&self) -> &SyntaxToken;
|
||||
|
||||
fn text(&self) -> &SmolStr {
|
||||
self.syntax().text()
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,26 +1,10 @@
|
|||
//! There are many AstNodes, but only a few tokens, so we hand-write them here.
|
||||
|
||||
use crate::{
|
||||
ast::AstToken,
|
||||
SyntaxKind::{COMMENT, RAW_STRING, STRING, WHITESPACE},
|
||||
SyntaxToken, TextRange, TextUnit,
|
||||
ast::{AstToken, Comment, RawString, String, Whitespace},
|
||||
TextRange, TextUnit,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Comment(SyntaxToken);
|
||||
|
||||
impl AstToken for Comment {
|
||||
fn cast(token: SyntaxToken) -> Option<Self> {
|
||||
match token.kind() {
|
||||
COMMENT => Some(Comment(token)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxToken {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Comment {
|
||||
pub fn kind(&self) -> CommentKind {
|
||||
kind_by_prefix(self.text())
|
||||
|
@ -89,20 +73,6 @@ fn prefix_by_kind(kind: CommentKind) -> &'static str {
|
|||
unreachable!()
|
||||
}
|
||||
|
||||
pub struct Whitespace(SyntaxToken);
|
||||
|
||||
impl AstToken for Whitespace {
|
||||
fn cast(token: SyntaxToken) -> Option<Self> {
|
||||
match token.kind() {
|
||||
WHITESPACE => Some(Whitespace(token)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxToken {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Whitespace {
|
||||
pub fn spans_multiple_lines(&self) -> bool {
|
||||
let text = self.text();
|
||||
|
@ -168,20 +138,6 @@ pub trait HasStringValue: HasQuotes {
|
|||
fn value(&self) -> Option<std::string::String>;
|
||||
}
|
||||
|
||||
pub struct String(SyntaxToken);
|
||||
|
||||
impl AstToken for String {
|
||||
fn cast(token: SyntaxToken) -> Option<Self> {
|
||||
match token.kind() {
|
||||
STRING => Some(String(token)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxToken {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl HasStringValue for String {
|
||||
fn value(&self) -> Option<std::string::String> {
|
||||
let text = self.text().as_str();
|
||||
|
@ -201,20 +157,6 @@ impl HasStringValue for String {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct RawString(SyntaxToken);
|
||||
|
||||
impl AstToken for RawString {
|
||||
fn cast(token: SyntaxToken) -> Option<Self> {
|
||||
match token.kind() {
|
||||
RAW_STRING => Some(RawString(token)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxToken {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl HasStringValue for RawString {
|
||||
fn value(&self) -> Option<std::string::String> {
|
||||
let text = self.text().as_str();
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
|
||||
use proc_macro2::{Punct, Spacing};
|
||||
use quote::{format_ident, quote};
|
||||
use std::borrow::Cow;
|
||||
use std::collections::{BTreeSet, HashMap, HashSet};
|
||||
|
||||
use crate::{
|
||||
ast_src::{AstSrc, FieldSrc, KindsSrc, AST_SRC, KINDS_SRC},
|
||||
|
@ -18,13 +20,108 @@ pub fn generate_syntax(mode: Mode) -> Result<()> {
|
|||
update(syntax_kinds_file.as_path(), &syntax_kinds, mode)?;
|
||||
|
||||
let ast_file = project_root().join(codegen::AST);
|
||||
let ast = generate_ast(AST_SRC)?;
|
||||
let ast = generate_ast(KINDS_SRC, AST_SRC)?;
|
||||
update(ast_file.as_path(), &ast, mode)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn generate_ast(grammar: AstSrc<'_>) -> Result<String> {
|
||||
#[derive(Debug, Default, Clone)]
|
||||
struct ElementKinds {
|
||||
kinds: BTreeSet<proc_macro2::Ident>,
|
||||
has_nodes: bool,
|
||||
has_tokens: bool,
|
||||
}
|
||||
|
||||
fn generate_ast(kinds: KindsSrc<'_>, grammar: AstSrc<'_>) -> Result<String> {
|
||||
let all_token_kinds: Vec<_> = kinds
|
||||
.punct
|
||||
.into_iter()
|
||||
.map(|(_, kind)| kind)
|
||||
.copied()
|
||||
.map(|x| x.into())
|
||||
.chain(
|
||||
kinds
|
||||
.keywords
|
||||
.into_iter()
|
||||
.chain(kinds.contextual_keywords.into_iter())
|
||||
.map(|name| Cow::Owned(format!("{}_KW", to_upper_snake_case(&name)))),
|
||||
)
|
||||
.chain(kinds.literals.into_iter().copied().map(|x| x.into()))
|
||||
.chain(kinds.tokens.into_iter().copied().map(|x| x.into()))
|
||||
.collect();
|
||||
|
||||
let mut element_kinds_map = HashMap::new();
|
||||
for kind in &all_token_kinds {
|
||||
let kind = &**kind;
|
||||
let name = to_pascal_case(kind);
|
||||
element_kinds_map.insert(
|
||||
name,
|
||||
ElementKinds {
|
||||
kinds: Some(format_ident!("{}", kind)).into_iter().collect(),
|
||||
has_nodes: false,
|
||||
has_tokens: true,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
for kind in kinds.nodes {
|
||||
let name = to_pascal_case(kind);
|
||||
element_kinds_map.insert(
|
||||
name,
|
||||
ElementKinds {
|
||||
kinds: Some(format_ident!("{}", *kind)).into_iter().collect(),
|
||||
has_nodes: true,
|
||||
has_tokens: false,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
for en in grammar.enums {
|
||||
let mut element_kinds: ElementKinds = Default::default();
|
||||
for variant in en.variants {
|
||||
if let Some(variant_element_kinds) = element_kinds_map.get(*variant) {
|
||||
element_kinds.kinds.extend(variant_element_kinds.kinds.iter().cloned());
|
||||
element_kinds.has_tokens |= variant_element_kinds.has_tokens;
|
||||
element_kinds.has_nodes |= variant_element_kinds.has_nodes;
|
||||
} else {
|
||||
panic!("Enum variant has type that does not exist or was not declared before the enum: {}", *variant);
|
||||
}
|
||||
}
|
||||
element_kinds_map.insert(en.name.to_string(), element_kinds);
|
||||
}
|
||||
|
||||
let tokens = all_token_kinds.iter().map(|kind_str| {
|
||||
let kind_str = &**kind_str;
|
||||
let kind = format_ident!("{}", kind_str);
|
||||
let name = format_ident!("{}", to_pascal_case(kind_str));
|
||||
quote! {
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct #name {
|
||||
pub(crate) syntax: SyntaxToken,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for #name {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(&self.syntax, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl AstToken for #name {
|
||||
fn can_cast(kind: SyntaxKind) -> bool {
|
||||
match kind {
|
||||
#kind => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
fn cast(syntax: SyntaxToken) -> Option<Self> {
|
||||
if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxToken { &self.syntax }
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let nodes = grammar.nodes.iter().map(|node| {
|
||||
let name = format_ident!("{}", node.name);
|
||||
let kind = format_ident!("{}", to_upper_snake_case(&name.to_string()));
|
||||
|
@ -42,6 +139,7 @@ fn generate_ast(grammar: AstSrc<'_>) -> Result<String> {
|
|||
FieldSrc::Optional(ty) | FieldSrc::Many(ty) => ty,
|
||||
FieldSrc::Shorthand => name,
|
||||
};
|
||||
|
||||
let ty = format_ident!("{}", ty);
|
||||
|
||||
match field {
|
||||
|
@ -86,6 +184,7 @@ fn generate_ast(grammar: AstSrc<'_>) -> Result<String> {
|
|||
}
|
||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||
}
|
||||
|
||||
#(#traits)*
|
||||
|
||||
impl #name {
|
||||
|
@ -154,12 +253,25 @@ fn generate_ast(grammar: AstSrc<'_>) -> Result<String> {
|
|||
}
|
||||
});
|
||||
|
||||
let defined_nodes: HashSet<_> = grammar.nodes.iter().map(|node| node.name).collect();
|
||||
|
||||
for node in kinds
|
||||
.nodes
|
||||
.iter()
|
||||
.map(|kind| to_pascal_case(*kind))
|
||||
.filter(|name| !defined_nodes.contains(&**name))
|
||||
{
|
||||
eprintln!("Warning: node {} not defined in ast source", node);
|
||||
}
|
||||
|
||||
let ast = quote! {
|
||||
#[allow(unused_imports)]
|
||||
use crate::{
|
||||
SyntaxNode, SyntaxKind::{self, *},
|
||||
ast::{self, AstNode, AstChildren},
|
||||
SyntaxNode, SyntaxToken, SyntaxElement, NodeOrToken, SyntaxKind::{self, *},
|
||||
ast::{self, AstNode, AstToken, AstChildren},
|
||||
};
|
||||
|
||||
#(#tokens)*
|
||||
#(#nodes)*
|
||||
#(#enums)*
|
||||
};
|
||||
|
@ -282,12 +394,12 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> {
|
|||
|
||||
fn to_upper_snake_case(s: &str) -> String {
|
||||
let mut buf = String::with_capacity(s.len());
|
||||
let mut prev_is_upper = None;
|
||||
let mut prev = false;
|
||||
for c in s.chars() {
|
||||
if c.is_ascii_uppercase() && prev_is_upper == Some(false) {
|
||||
if c.is_ascii_uppercase() && prev {
|
||||
buf.push('_')
|
||||
}
|
||||
prev_is_upper = Some(c.is_ascii_uppercase());
|
||||
prev = true;
|
||||
|
||||
buf.push(c.to_ascii_uppercase());
|
||||
}
|
||||
|
@ -296,14 +408,30 @@ fn to_upper_snake_case(s: &str) -> String {
|
|||
|
||||
fn to_lower_snake_case(s: &str) -> String {
|
||||
let mut buf = String::with_capacity(s.len());
|
||||
let mut prev_is_upper = None;
|
||||
let mut prev = false;
|
||||
for c in s.chars() {
|
||||
if c.is_ascii_uppercase() && prev_is_upper == Some(false) {
|
||||
if c.is_ascii_uppercase() && prev {
|
||||
buf.push('_')
|
||||
}
|
||||
prev_is_upper = Some(c.is_ascii_uppercase());
|
||||
prev = true;
|
||||
|
||||
buf.push(c.to_ascii_lowercase());
|
||||
}
|
||||
buf
|
||||
}
|
||||
|
||||
fn to_pascal_case(s: &str) -> String {
|
||||
let mut buf = String::with_capacity(s.len());
|
||||
let mut prev_is_underscore = true;
|
||||
for c in s.chars() {
|
||||
if c == '_' {
|
||||
prev_is_underscore = true;
|
||||
} else if prev_is_underscore {
|
||||
buf.push(c.to_ascii_uppercase());
|
||||
prev_is_underscore = false;
|
||||
} else {
|
||||
buf.push(c.to_ascii_lowercase());
|
||||
}
|
||||
}
|
||||
buf
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue