Auto merge of #12775 - flodiebold:syntax-fixup-if, r=flodiebold

fix: Improve syntax fixup a bit, handle incomplete `if`

- allow appending tokens after a token, not just a node
- allow inserting delimiters (and remove them again)
- fix up `if {}` and `if` without anything following
This commit is contained in:
bors 2022-07-16 11:05:19 +00:00
commit 3d7da510f9
3 changed files with 120 additions and 34 deletions

View file

@ -150,7 +150,7 @@ pub fn expand_speculative(
// Build the subtree and token mapping for the speculative args
let censor = censor_for_macro_input(&loc, speculative_args);
let mut fixups = fixup::fixup_syntax(speculative_args);
fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
speculative_args,
fixups.token_map,
@ -295,7 +295,7 @@ fn macro_arg(
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
let mut fixups = fixup::fixup_syntax(&node);
fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
&node,
fixups.token_map,

View file

@ -6,7 +6,7 @@ use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
use rustc_hash::FxHashMap;
use syntax::{
ast::{self, AstNode},
match_ast, SyntaxKind, SyntaxNode, TextRange,
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
};
use tt::Subtree;
@ -15,8 +15,8 @@ use tt::Subtree;
/// reverse those changes afterwards, and a token map.
#[derive(Debug)]
pub(crate) struct SyntaxFixups {
pub(crate) append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
pub(crate) replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
pub(crate) undo_info: SyntaxFixupUndoInfo,
pub(crate) token_map: TokenMap,
pub(crate) next_id: u32,
@ -31,8 +31,8 @@ pub struct SyntaxFixupUndoInfo {
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
let mut append = FxHashMap::default();
let mut replace = FxHashMap::default();
let mut append = FxHashMap::<SyntaxElement, _>::default();
let mut replace = FxHashMap::<SyntaxElement, _>::default();
let mut preorder = node.preorder();
let mut original = Vec::new();
let mut token_map = TokenMap::default();
@ -63,7 +63,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
range: node.text_range(),
id: SyntheticTokenId(idx),
};
replace.insert(node.clone(), vec![replacement]);
replace.insert(node.clone().into(), vec![replacement]);
preorder.skip_subtree();
continue;
}
@ -75,7 +75,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
ast::FieldExpr(it) => {
if it.name_ref().is_none() {
// incomplete field access: some_expr.|
append.insert(node.clone(), vec![
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::IDENT,
text: "__ra_fixup".into(),
@ -87,7 +87,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
},
ast::ExprStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone(), vec![
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::SEMICOLON,
text: ";".into(),
@ -99,7 +99,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
},
ast::LetStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone(), vec![
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::SEMICOLON,
text: ";".into(),
@ -109,6 +109,41 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
]);
}
},
ast::IfExpr(it) => {
if it.condition().is_none() {
// insert placeholder token after the if token
let if_token = match it.if_token() {
Some(t) => t,
None => continue,
};
append.insert(if_token.into(), vec![
SyntheticToken {
kind: SyntaxKind::IDENT,
text: "__ra_fixup".into(),
range: end_range,
id: EMPTY_ID,
},
]);
}
if it.then_branch().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
]);
}
},
// FIXME: foo::
// FIXME: for, loop, match etc.
_ => (),
}
}
@ -144,7 +179,10 @@ pub(crate) fn reverse_fixups(
token_map.synthetic_token_id(leaf.id()).is_none()
|| token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID)
}
_ => true,
tt::TokenTree::Subtree(st) => st.delimiter.map_or(true, |d| {
token_map.synthetic_token_id(d.id).is_none()
|| token_map.synthetic_token_id(d.id) != Some(EMPTY_ID)
}),
});
tt.token_trees.iter_mut().for_each(|tt| match tt {
tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, undo_info),
@ -295,6 +333,49 @@ fn foo() {
"#,
expect![[r#"
fn foo () {__ra_fixup ;}
"#]],
)
}
#[test]
fn fixup_if_1() {
check(
r#"
fn foo() {
if a
}
"#,
expect![[r#"
fn foo () {if a {}}
"#]],
)
}
#[test]
fn fixup_if_2() {
check(
r#"
fn foo() {
if
}
"#,
expect![[r#"
fn foo () {if __ra_fixup {}}
"#]],
)
}
#[test]
fn fixup_if_3() {
check(
r#"
fn foo() {
if {}
}
"#,
// the {} gets parsed as the condition, I think?
expect![[r#"
fn foo () {if {} {}}
"#]],
)
}

View file

@ -31,8 +31,8 @@ pub fn syntax_node_to_token_tree_with_modifications(
node: &SyntaxNode,
existing_token_map: TokenMap,
next_id: u32,
replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
) -> (tt::Subtree, TokenMap, u32) {
let global_offset = node.text_range().start();
let mut c = Convertor::new(node, global_offset, existing_token_map, next_id, replace, append);
@ -221,7 +221,7 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
if let Some(kind) = delim {
let mut subtree = tt::Subtree::default();
let (id, idx) = conv.id_alloc().open_delim(range);
let (id, idx) = conv.id_alloc().open_delim(range, synth_id);
subtree.delimiter = Some(tt::Delimiter { id, kind });
stack.push(StackEntry { subtree, idx, open_range: range });
continue;
@ -404,7 +404,11 @@ impl TokenIdAlloc {
token_id
}
fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) {
fn open_delim(
&mut self,
open_abs_range: TextRange,
synthetic_id: Option<SyntheticTokenId>,
) -> (tt::TokenId, usize) {
let token_id = tt::TokenId(self.next_id);
self.next_id += 1;
let idx = self.map.insert_delim(
@ -412,6 +416,9 @@ impl TokenIdAlloc {
open_abs_range - self.global_offset,
open_abs_range - self.global_offset,
);
if let Some(id) = synthetic_id {
self.map.insert_synthetic(token_id, id);
}
(token_id, idx)
}
@ -511,8 +518,8 @@ struct Convertor {
current: Option<SyntaxToken>,
current_synthetic: Vec<SyntheticToken>,
preorder: PreorderWithTokens,
replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
range: TextRange,
punct_offset: Option<(SyntaxToken, TextSize)>,
}
@ -523,8 +530,8 @@ impl Convertor {
global_offset: TextSize,
existing_token_map: TokenMap,
next_id: u32,
mut replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
mut append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
mut replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
mut append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
) -> Convertor {
let range = node.text_range();
let mut preorder = node.preorder_with_tokens();
@ -543,14 +550,14 @@ impl Convertor {
fn next_token(
preorder: &mut PreorderWithTokens,
replace: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
append: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
replace: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
append: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
while let Some(ev) = preorder.next() {
let ele = match ev {
WalkEvent::Enter(ele) => ele,
WalkEvent::Leave(SyntaxElement::Node(node)) => {
if let Some(mut v) = append.remove(&node) {
WalkEvent::Leave(ele) => {
if let Some(mut v) = append.remove(&ele) {
if !v.is_empty() {
v.reverse();
return (None, v);
@ -558,19 +565,17 @@ impl Convertor {
}
continue;
}
_ => continue,
};
if let Some(mut v) = replace.remove(&ele) {
preorder.skip_subtree();
if !v.is_empty() {
v.reverse();
return (None, v);
}
}
match ele {
SyntaxElement::Token(t) => return (Some(t), Vec::new()),
SyntaxElement::Node(node) => {
if let Some(mut v) = replace.remove(&node) {
preorder.skip_subtree();
if !v.is_empty() {
v.reverse();
return (None, v);
}
}
}
_ => {}
}
}
(None, Vec::new())