deserialize grammar

This commit is contained in:
Aleksey Kladov 2019-08-18 21:54:51 +03:00
parent 229d7943d8
commit d545a5c75c
4 changed files with 88 additions and 56 deletions

1
Cargo.lock generated
View file

@ -1365,6 +1365,7 @@ dependencies = [
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)",
"teraron 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "teraron 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
] ]

View file

@ -2,61 +2,61 @@
// See `cargo gen-syntax` (defined in crates/tools/src/main.rs) // See `cargo gen-syntax` (defined in crates/tools/src/main.rs)
Grammar( Grammar(
single_byte_tokens: [ single_byte_tokens: [
[";", "SEMI"], (";", "SEMI"),
[",", "COMMA"], (",", "COMMA"),
["(", "L_PAREN"], ("(", "L_PAREN"),
[")", "R_PAREN"], (")", "R_PAREN"),
["{", "L_CURLY"], ("{", "L_CURLY"),
["}", "R_CURLY"], ("}", "R_CURLY"),
["[", "L_BRACK"], ("[", "L_BRACK"),
["]", "R_BRACK"], ("]", "R_BRACK"),
["<", "L_ANGLE"], ("<", "L_ANGLE"),
[">", "R_ANGLE"], (">", "R_ANGLE"),
["@", "AT"], ("@", "AT"),
["#", "POUND"], ("#", "POUND"),
["~", "TILDE"], ("~", "TILDE"),
["?", "QUESTION"], ("?", "QUESTION"),
["$", "DOLLAR"], ("$", "DOLLAR"),
["&", "AMP"], ("&", "AMP"),
["|", "PIPE"], ("|", "PIPE"),
["+", "PLUS"], ("+", "PLUS"),
["*", "STAR"], ("*", "STAR"),
["/", "SLASH"], ("/", "SLASH"),
["^", "CARET"], ("^", "CARET"),
["%", "PERCENT"], ("%", "PERCENT"),
["_", "UNDERSCORE"], ("_", "UNDERSCORE"),
], ],
// Tokens for which the longest match must be chosen (e.g. `..` is a DOTDOT, but `.` is a DOT) // Tokens for which the longest match must be chosen (e.g. `..` is a DOTDOT, but `.` is a DOT)
multi_byte_tokens: [ multi_byte_tokens: [
[".", "DOT"], (".", "DOT"),
["..", "DOTDOT"], ("..", "DOTDOT"),
["...", "DOTDOTDOT"], ("...", "DOTDOTDOT"),
["..=", "DOTDOTEQ"], ("..=", "DOTDOTEQ"),
[":", "COLON"], (":", "COLON"),
["::", "COLONCOLON"], ("::", "COLONCOLON"),
["=", "EQ"], ("=", "EQ"),
["==", "EQEQ"], ("==", "EQEQ"),
["=>", "FAT_ARROW"], ("=>", "FAT_ARROW"),
["!", "EXCL"], ("!", "EXCL"),
["!=", "NEQ"], ("!=", "NEQ"),
["-", "MINUS"], ("-", "MINUS"),
["->", "THIN_ARROW"], ("->", "THIN_ARROW"),
["<=", "LTEQ"], ("<=", "LTEQ"),
[">=", "GTEQ"], (">=", "GTEQ"),
["+=", "PLUSEQ"], ("+=", "PLUSEQ"),
["-=", "MINUSEQ"], ("-=", "MINUSEQ"),
["|=", "PIPEEQ"], ("|=", "PIPEEQ"),
["&=", "AMPEQ"], ("&=", "AMPEQ"),
["^=", "CARETEQ"], ("^=", "CARETEQ"),
["/=", "SLASHEQ"], ("/=", "SLASHEQ"),
["*=", "STAREQ"], ("*=", "STAREQ"),
["%=", "PERCENTEQ"], ("%=", "PERCENTEQ"),
["&&", "AMPAMP"], ("&&", "AMPAMP"),
["||", "PIPEPIPE"], ("||", "PIPEPIPE"),
["<<", "SHL"], ("<<", "SHL"),
[">>", "SHR"], (">>", "SHR"),
["<<=", "SHLEQ"], ("<<=", "SHLEQ"),
[">>=", "SHREQ"], (">>=", "SHREQ"),
], ],
keywords: [ keywords: [
"async", "async",
@ -692,7 +692,7 @@ Grammar(
"LifetimeArg": (), "LifetimeArg": (),
"MacroItems": ( "MacroItems": (
traits: [ "ModuleItemOwner", "FnDefOwner" ], traits: [ "ModuleItemOwner", "FnDefOwner" ],
), ),
"MacroStmts" : ( "MacroStmts" : (

View file

@ -12,3 +12,4 @@ itertools = "0.8.0"
clap = "2.32.0" clap = "2.32.0"
quote = "1.0.2" quote = "1.0.2"
ron = "0.5.1" ron = "0.5.1"
serde = { version = "1.0.0", features = ["derive"] }

View file

@ -1,6 +1,8 @@
use std::{fs, path::Path}; use std::{collections::BTreeMap, fs, path::Path};
use quote::quote;
use ron; use ron;
use serde::Deserialize;
use crate::{project_root, Mode, Result, AST, GRAMMAR}; use crate::{project_root, Mode, Result, AST, GRAMMAR};
@ -12,10 +14,38 @@ pub fn generate(mode: Mode) -> Result<()> {
} }
fn generate_ast(grammar_src: &Path, dst: &Path, mode: Mode) -> Result<()> { fn generate_ast(grammar_src: &Path, dst: &Path, mode: Mode) -> Result<()> {
let src: ron::Value = { let src: Grammar = {
let text = fs::read_to_string(grammar_src)?; let text = fs::read_to_string(grammar_src)?;
ron::de::from_str(&text)? ron::de::from_str(&text)?
}; };
eprintln!("{:?}", src); eprintln!("{:#?}", src);
Ok(()) Ok(())
} }
#[derive(Deserialize, Debug)]
struct Grammar {
single_byte_tokens: Vec<(String, String)>,
multi_byte_tokens: Vec<(String, String)>,
keywords: Vec<String>,
contextual_keywords: Vec<String>,
literals: Vec<String>,
tokens: Vec<String>,
ast: BTreeMap<String, AstNode>,
}
#[derive(Deserialize, Debug)]
struct AstNode {
#[serde(default)]
traits: Vec<String>,
#[serde(default)]
collections: Vec<Attr>,
#[serde(default)]
options: Vec<Attr>,
}
#[derive(Deserialize, Debug)]
#[serde(untagged)]
enum Attr {
Type(String),
NameType(String, String),
}