mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 21:13:37 +00:00
commit
3c70ae2e26
9 changed files with 75 additions and 30 deletions
3
.cargo/config
Normal file
3
.cargo/config
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
[alias]
|
||||||
|
parse = "run --package tools --bin parse"
|
||||||
|
gen = "run --package tools --bin gen"
|
|
@ -4,13 +4,11 @@ version = "0.1.0"
|
||||||
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
|
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
|
|
||||||
|
[workspace]
|
||||||
|
members = [ "tools" ]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
unicode-xid = "0.1.0"
|
unicode-xid = "0.1.0"
|
||||||
|
|
||||||
serde = "1.0.26"
|
|
||||||
serde_derive = "1.0.26"
|
|
||||||
file = "1.1.1"
|
|
||||||
ron = "0.1.5"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
testutils = { path = "./tests/testutils" }
|
testutils = { path = "./tests/testutils" }
|
||||||
|
|
30
docs/TOOLS.md
Normal file
30
docs/TOOLS.md
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
# Tools used to implement libsyntax
|
||||||
|
|
||||||
|
libsyntax uses several tools to help with development.
|
||||||
|
|
||||||
|
Each tool is a binary in the [tools/](../tools) package.
|
||||||
|
You can run them via `cargo run` command.
|
||||||
|
|
||||||
|
```
|
||||||
|
cargo run --package tools --bin tool
|
||||||
|
```
|
||||||
|
|
||||||
|
There are also aliases in [./cargo/config](../.cargo/config),
|
||||||
|
so the following also works:
|
||||||
|
|
||||||
|
```
|
||||||
|
cargo tool
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
# Tool: `gen`
|
||||||
|
|
||||||
|
This tool reads a "grammar" from [grammar.ron](../grammar.ron) and
|
||||||
|
generates the `syntax_kinds.rs` file. You should run this tool if you
|
||||||
|
add new keywords or syntax elements.
|
||||||
|
|
||||||
|
|
||||||
|
# Tool: 'parse'
|
||||||
|
|
||||||
|
This tool reads rust source code from the standard input, parses it,
|
||||||
|
and prints the result to stdout.
|
|
@ -1,4 +1,3 @@
|
||||||
extern crate file;
|
|
||||||
extern crate libsyntax2;
|
extern crate libsyntax2;
|
||||||
extern crate testutils;
|
extern crate testutils;
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
extern crate file;
|
|
||||||
extern crate libsyntax2;
|
extern crate libsyntax2;
|
||||||
extern crate testutils;
|
extern crate testutils;
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
extern crate difference;
|
extern crate difference;
|
||||||
extern crate file;
|
extern crate file;
|
||||||
|
|
||||||
use std::path::{PathBuf, Path};
|
use std::path::{Path, PathBuf};
|
||||||
use std::fs::read_dir;
|
use std::fs::read_dir;
|
||||||
|
|
||||||
use difference::Changeset;
|
use difference::Changeset;
|
||||||
|
@ -21,12 +21,9 @@ fn read_text(path: &Path) -> String {
|
||||||
file::get_text(path).unwrap().replace("\r\n", "\n")
|
file::get_text(path).unwrap().replace("\r\n", "\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dir_tests<F>(
|
pub fn dir_tests<F>(paths: &[&str], f: F)
|
||||||
paths: &[&str],
|
|
||||||
f: F
|
|
||||||
)
|
|
||||||
where
|
where
|
||||||
F: Fn(&str) -> String
|
F: Fn(&str) -> String,
|
||||||
{
|
{
|
||||||
for path in collect_tests(paths) {
|
for path in collect_tests(paths) {
|
||||||
let actual = {
|
let actual = {
|
||||||
|
@ -47,21 +44,20 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_equal_text(
|
fn assert_equal_text(expected: &str, actual: &str, path: &Path) {
|
||||||
expected: &str,
|
|
||||||
actual: &str,
|
|
||||||
path: &Path
|
|
||||||
) {
|
|
||||||
if expected != actual {
|
if expected != actual {
|
||||||
print_difference(expected, actual, path)
|
print_difference(expected, actual, path)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_tests(paths: &[&str]) -> Vec<PathBuf> {
|
fn collect_tests(paths: &[&str]) -> Vec<PathBuf> {
|
||||||
paths.iter().flat_map(|path| {
|
paths
|
||||||
|
.iter()
|
||||||
|
.flat_map(|path| {
|
||||||
let path = test_data_dir().join(path);
|
let path = test_data_dir().join(path);
|
||||||
test_from_dir(&path).into_iter()
|
test_from_dir(&path).into_iter()
|
||||||
}).collect()
|
})
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_from_dir(dir: &Path) -> Vec<PathBuf> {
|
fn test_from_dir(dir: &Path) -> Vec<PathBuf> {
|
||||||
|
@ -95,8 +91,10 @@ fn print_difference(expected: &str, actual: &str, path: &Path) {
|
||||||
fn project_dir() -> PathBuf {
|
fn project_dir() -> PathBuf {
|
||||||
let dir = env!("CARGO_MANIFEST_DIR");
|
let dir = env!("CARGO_MANIFEST_DIR");
|
||||||
PathBuf::from(dir)
|
PathBuf::from(dir)
|
||||||
.parent().unwrap()
|
.parent()
|
||||||
.parent().unwrap()
|
.unwrap()
|
||||||
|
.parent()
|
||||||
|
.unwrap()
|
||||||
.to_owned()
|
.to_owned()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
12
tools/Cargo.toml
Normal file
12
tools/Cargo.toml
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
[package]
|
||||||
|
name = "tools"
|
||||||
|
version = "0.1.0"
|
||||||
|
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
|
||||||
|
publish = false
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
serde = "1.0.26"
|
||||||
|
serde_derive = "1.0.26"
|
||||||
|
file = "1.1.1"
|
||||||
|
ron = "0.1.5"
|
||||||
|
libsyntax2 = { path = "../" }
|
|
@ -11,7 +11,10 @@ use std::fmt::Write;
|
||||||
fn main() {
|
fn main() {
|
||||||
let grammar = Grammar::read();
|
let grammar = Grammar::read();
|
||||||
let text = grammar.to_syntax_kinds();
|
let text = grammar.to_syntax_kinds();
|
||||||
file::put_text(&generated_file(), &text).unwrap();
|
let target = generated_file();
|
||||||
|
if text != file::get_text(&target).unwrap_or_default() {
|
||||||
|
file::put_text(&target, &text).unwrap();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
|
@ -94,13 +97,11 @@ impl Grammar {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn grammar_file() -> PathBuf {
|
fn grammar_file() -> PathBuf {
|
||||||
let dir = env!("CARGO_MANIFEST_DIR");
|
base_dir().join("grammar.ron")
|
||||||
PathBuf::from(dir).join("grammar.ron")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generated_file() -> PathBuf {
|
fn generated_file() -> PathBuf {
|
||||||
let dir = env!("CARGO_MANIFEST_DIR");
|
base_dir().join("src/syntax_kinds.rs")
|
||||||
PathBuf::from(dir).join("src/syntax_kinds.rs")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scream(word: &str) -> String {
|
fn scream(word: &str) -> String {
|
||||||
|
@ -110,3 +111,8 @@ fn scream(word: &str) -> String {
|
||||||
fn kw_token(keyword: &str) -> String {
|
fn kw_token(keyword: &str) -> String {
|
||||||
format!("{}_KW", scream(keyword))
|
format!("{}_KW", scream(keyword))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn base_dir() -> PathBuf {
|
||||||
|
let dir = env!("CARGO_MANIFEST_DIR");
|
||||||
|
PathBuf::from(dir).parent().unwrap().to_owned()
|
||||||
|
}
|
Loading…
Reference in a new issue