2022-02-08 17:13:18 +00:00
|
|
|
use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
|
2022-02-07 19:30:28 +00:00
|
|
|
use rustc_hash::FxHashMap;
|
|
|
|
use syntax::{
|
|
|
|
ast::{self, AstNode},
|
2022-02-08 17:13:18 +00:00
|
|
|
match_ast, SyntaxKind, SyntaxNode, TextRange,
|
2022-02-07 19:30:28 +00:00
|
|
|
};
|
2022-02-08 17:13:18 +00:00
|
|
|
use tt::Subtree;
|
2022-02-07 19:30:28 +00:00
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct SyntaxFixups {
|
|
|
|
pub append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
|
|
|
pub replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
2022-02-09 10:58:52 +00:00
|
|
|
pub map: SyntaxFixupMap,
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
|
2022-02-09 10:58:52 +00:00
|
|
|
#[derive(Debug, PartialEq, Eq)]
|
|
|
|
pub struct SyntaxFixupMap {
|
|
|
|
original: Vec<(Subtree, TokenMap)>,
|
|
|
|
}
|
|
|
|
|
|
|
|
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
|
|
|
|
|
2022-02-07 19:30:28 +00:00
|
|
|
pub fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
|
|
|
let mut append = FxHashMap::default();
|
|
|
|
let mut replace = FxHashMap::default();
|
|
|
|
let mut preorder = node.preorder();
|
2022-02-09 10:58:52 +00:00
|
|
|
let mut original = Vec::new();
|
2022-02-07 19:30:28 +00:00
|
|
|
while let Some(event) = preorder.next() {
|
|
|
|
let node = match event {
|
|
|
|
syntax::WalkEvent::Enter(node) => node,
|
|
|
|
syntax::WalkEvent::Leave(_) => continue,
|
|
|
|
};
|
2022-02-09 10:58:52 +00:00
|
|
|
if can_handle_error(&node) && has_error_to_handle(&node) {
|
|
|
|
// the node contains an error node, we have to completely replace it by something valid
|
|
|
|
let original_tree = mbe::syntax_node_to_token_tree(&node);
|
|
|
|
// TODO handle token ids / token map
|
|
|
|
let idx = original.len() as u32;
|
|
|
|
original.push(original_tree);
|
|
|
|
let replacement = SyntheticToken {
|
|
|
|
kind: SyntaxKind::IDENT,
|
|
|
|
text: "__ra_fixup".into(),
|
|
|
|
range: node.text_range(),
|
|
|
|
id: SyntheticTokenId(idx),
|
|
|
|
};
|
|
|
|
replace.insert(node.clone(), vec![replacement]);
|
2022-02-07 19:30:28 +00:00
|
|
|
preorder.skip_subtree();
|
|
|
|
continue;
|
|
|
|
}
|
2022-02-08 17:13:18 +00:00
|
|
|
let end_range = TextRange::empty(node.text_range().end());
|
2022-02-07 19:30:28 +00:00
|
|
|
match_ast! {
|
|
|
|
match node {
|
|
|
|
ast::FieldExpr(it) => {
|
|
|
|
if it.name_ref().is_none() {
|
|
|
|
// incomplete field access: some_expr.|
|
2022-02-08 17:13:18 +00:00
|
|
|
append.insert(node.clone(), vec![
|
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::IDENT,
|
|
|
|
text: "__ra_fixup".into(),
|
|
|
|
range: end_range,
|
2022-02-09 10:58:52 +00:00
|
|
|
id: EMPTY_ID,
|
2022-02-08 17:13:18 +00:00
|
|
|
},
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
ast::ExprStmt(it) => {
|
|
|
|
if it.semicolon_token().is_none() {
|
|
|
|
append.insert(node.clone(), vec![
|
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::SEMICOLON,
|
|
|
|
text: ";".into(),
|
|
|
|
range: end_range,
|
2022-02-09 10:58:52 +00:00
|
|
|
id: EMPTY_ID,
|
2022-02-08 17:13:18 +00:00
|
|
|
},
|
|
|
|
]);
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
_ => (),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-02-09 10:58:52 +00:00
|
|
|
SyntaxFixups { append, replace, map: SyntaxFixupMap { original } }
|
|
|
|
}
|
|
|
|
|
|
|
|
fn has_error(node: &SyntaxNode) -> bool {
|
|
|
|
node.children().any(|c| c.kind() == SyntaxKind::ERROR)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn can_handle_error(node: &SyntaxNode) -> bool {
|
|
|
|
ast::Expr::can_cast(node.kind())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn has_error_to_handle(node: &SyntaxNode) -> bool {
|
|
|
|
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
|
2022-02-09 10:58:52 +00:00
|
|
|
pub fn reverse_fixups(tt: &mut Subtree, token_map: &TokenMap, fixup_map: &SyntaxFixupMap) {
|
2022-02-07 19:30:28 +00:00
|
|
|
tt.token_trees.retain(|tt| match tt {
|
2022-02-09 10:58:52 +00:00
|
|
|
tt::TokenTree::Leaf(leaf) => {
|
|
|
|
token_map.synthetic_token_id(leaf.id()).is_none()
|
|
|
|
|| token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID)
|
|
|
|
}
|
2022-02-07 19:30:28 +00:00
|
|
|
_ => true,
|
|
|
|
});
|
|
|
|
tt.token_trees.iter_mut().for_each(|tt| match tt {
|
2022-02-09 10:58:52 +00:00
|
|
|
tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, fixup_map),
|
|
|
|
tt::TokenTree::Leaf(leaf) => {
|
|
|
|
if let Some(id) = token_map.synthetic_token_id(leaf.id()) {
|
|
|
|
let (original, _original_tmap) = &fixup_map.original[id.0 as usize];
|
|
|
|
*tt = tt::TokenTree::Subtree(original.clone());
|
|
|
|
}
|
|
|
|
}
|
2022-02-07 19:30:28 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2022-02-08 17:13:18 +00:00
|
|
|
use expect_test::{expect, Expect};
|
2022-02-07 19:30:28 +00:00
|
|
|
|
|
|
|
use super::reverse_fixups;
|
|
|
|
|
|
|
|
#[track_caller]
|
|
|
|
fn check(ra_fixture: &str, mut expect: Expect) {
|
|
|
|
let parsed = syntax::SourceFile::parse(ra_fixture);
|
2022-02-09 10:58:52 +00:00
|
|
|
eprintln!("parse: {:#?}", parsed.syntax_node());
|
2022-02-07 19:30:28 +00:00
|
|
|
let fixups = super::fixup_syntax(&parsed.syntax_node());
|
2022-02-09 11:00:03 +00:00
|
|
|
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_with_modifications(
|
2022-02-07 19:30:28 +00:00
|
|
|
&parsed.syntax_node(),
|
|
|
|
fixups.replace,
|
|
|
|
fixups.append,
|
|
|
|
);
|
|
|
|
|
|
|
|
let mut actual = tt.to_string();
|
|
|
|
actual.push_str("\n");
|
|
|
|
|
|
|
|
expect.indent(false);
|
|
|
|
expect.assert_eq(&actual);
|
|
|
|
|
|
|
|
// the fixed-up tree should be syntactically valid
|
|
|
|
let (parse, _) = mbe::token_tree_to_syntax_node(&tt, ::mbe::TopEntryPoint::MacroItems);
|
2022-02-08 17:13:18 +00:00
|
|
|
assert_eq!(
|
|
|
|
parse.errors(),
|
|
|
|
&[],
|
|
|
|
"parse has syntax errors. parse tree:\n{:#?}",
|
|
|
|
parse.syntax_node()
|
|
|
|
);
|
2022-02-07 19:30:28 +00:00
|
|
|
|
2022-02-09 10:58:52 +00:00
|
|
|
reverse_fixups(&mut tt, &tmap, &fixups.map);
|
2022-02-07 19:30:28 +00:00
|
|
|
|
|
|
|
// the fixed-up + reversed version should be equivalent to the original input
|
|
|
|
// (but token IDs don't matter)
|
|
|
|
let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
|
|
|
|
assert_eq!(tt.to_string(), original_as_tt.to_string());
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn incomplete_field_expr_1() {
|
2022-02-08 17:13:18 +00:00
|
|
|
check(
|
|
|
|
r#"
|
2022-02-07 19:30:28 +00:00
|
|
|
fn foo() {
|
|
|
|
a.
|
|
|
|
}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#,
|
|
|
|
expect![[r#"
|
2022-02-07 19:30:28 +00:00
|
|
|
fn foo () {a . __ra_fixup}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#]],
|
|
|
|
)
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn incomplete_field_expr_2() {
|
2022-02-08 17:13:18 +00:00
|
|
|
check(
|
|
|
|
r#"
|
2022-02-07 19:30:28 +00:00
|
|
|
fn foo() {
|
|
|
|
a. ;
|
|
|
|
}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#,
|
|
|
|
expect![[r#"
|
2022-02-07 19:30:28 +00:00
|
|
|
fn foo () {a . __ra_fixup ;}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#]],
|
|
|
|
)
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn incomplete_field_expr_3() {
|
2022-02-08 17:13:18 +00:00
|
|
|
check(
|
|
|
|
r#"
|
2022-02-07 19:30:28 +00:00
|
|
|
fn foo() {
|
|
|
|
a. ;
|
|
|
|
bar();
|
|
|
|
}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#,
|
|
|
|
expect![[r#"
|
2022-02-07 19:30:28 +00:00
|
|
|
fn foo () {a . __ra_fixup ; bar () ;}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#]],
|
|
|
|
)
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn field_expr_before_call() {
|
|
|
|
// another case that easily happens while typing
|
2022-02-08 17:13:18 +00:00
|
|
|
check(
|
|
|
|
r#"
|
2022-02-07 19:30:28 +00:00
|
|
|
fn foo() {
|
|
|
|
a.b
|
|
|
|
bar();
|
|
|
|
}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#,
|
|
|
|
expect![[r#"
|
|
|
|
fn foo () {a . b ; bar () ;}
|
2022-02-09 10:58:52 +00:00
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn extraneous_comma() {
|
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
|
|
|
bar(,);
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
|
|
|
fn foo () {__ra_fixup ;}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#]],
|
|
|
|
)
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
}
|