rust-analyzer/crates/ra_syntax/src/validation.rs

109 lines
3.8 KiB
Rust
Raw Normal View History

mod unescape;
mod block;
2019-04-05 20:34:45 +00:00
mod field_expr;
2018-11-08 14:42:00 +00:00
2019-01-07 13:15:47 +00:00
use crate::{
algo::visit::{visitor_ctx, VisitorCtx},
2019-07-21 10:34:15 +00:00
ast, SyntaxError,
SyntaxKind::{BYTE, BYTE_STRING, CHAR, STRING},
SyntaxNode, TextUnit, T,
2019-01-07 13:15:47 +00:00
};
pub(crate) use unescape::EscapeError;
2019-07-21 10:34:15 +00:00
pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> {
2018-11-08 14:42:00 +00:00
let mut errors = Vec::new();
2019-07-21 10:34:15 +00:00
for node in root.descendants() {
2018-11-08 14:42:00 +00:00
let _ = visitor_ctx(&mut errors)
2019-03-30 10:25:53 +00:00
.visit::<ast::Literal, _>(validate_literal)
2019-02-21 12:51:22 +00:00
.visit::<ast::Block, _>(block::validate_block_node)
2019-04-05 20:34:45 +00:00
.visit::<ast::FieldExpr, _>(field_expr::validate_field_expr_node)
2019-07-18 16:23:05 +00:00
.accept(&node);
2018-11-08 14:42:00 +00:00
}
errors
}
2019-02-21 12:51:22 +00:00
2019-03-30 10:25:53 +00:00
// FIXME: kill duplication
2019-07-18 16:23:05 +00:00
fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
let token = literal.token();
let text = token.text().as_str();
match token.kind() {
BYTE => {
if let Some(end) = text.rfind('\'') {
if let Some(without_quotes) = text.get(2..end) {
if let Err((off, err)) = unescape::unescape_byte(without_quotes) {
2019-07-20 09:58:27 +00:00
let off = token.text_range().start() + TextUnit::from_usize(off + 2);
acc.push(SyntaxError::new(err.into(), off))
}
}
}
}
CHAR => {
if let Some(end) = text.rfind('\'') {
if let Some(without_quotes) = text.get(1..end) {
if let Err((off, err)) = unescape::unescape_char(without_quotes) {
2019-07-20 09:58:27 +00:00
let off = token.text_range().start() + TextUnit::from_usize(off + 1);
acc.push(SyntaxError::new(err.into(), off))
}
}
}
}
BYTE_STRING => {
if let Some(end) = text.rfind('\"') {
if let Some(without_quotes) = text.get(2..end) {
unescape::unescape_byte_str(without_quotes, &mut |range, char| {
if let Err(err) = char {
let off = range.start;
2019-07-20 09:58:27 +00:00
let off = token.text_range().start() + TextUnit::from_usize(off + 2);
acc.push(SyntaxError::new(err.into(), off))
}
})
}
}
}
STRING => {
if let Some(end) = text.rfind('\"') {
if let Some(without_quotes) = text.get(1..end) {
unescape::unescape_str(without_quotes, &mut |range, char| {
if let Err(err) = char {
let off = range.start;
2019-07-20 09:58:27 +00:00
let off = token.text_range().start() + TextUnit::from_usize(off + 1);
acc.push(SyntaxError::new(err.into(), off))
}
})
}
}
}
2019-03-30 10:25:53 +00:00
_ => (),
}
}
2019-02-21 12:51:22 +00:00
pub(crate) fn validate_block_structure(root: &SyntaxNode) {
let mut stack = Vec::new();
for node in root.descendants() {
match node.kind() {
2019-05-15 12:35:47 +00:00
T!['{'] => stack.push(node),
T!['}'] => {
2019-02-21 12:51:22 +00:00
if let Some(pair) = stack.pop() {
assert_eq!(
node.parent(),
pair.parent(),
2019-07-20 09:48:24 +00:00
"\nunpaired curleys:\n{}\n{:#?}\n",
2019-02-21 12:51:22 +00:00
root.text(),
2019-07-20 09:48:24 +00:00
root,
2019-02-21 12:51:22 +00:00
);
assert!(
node.next_sibling().is_none() && pair.prev_sibling().is_none(),
"\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n",
node,
root.text(),
node.text(),
);
}
}
_ => (),
}
}
}