From 0a4065d12df88409da6767c398e6c837f53844d6 Mon Sep 17 00:00:00 2001 From: Florian Diebold Date: Sat, 16 Jul 2022 12:38:33 +0200 Subject: [PATCH] Improve syntax fixup a bit, handle incomplete `if` - allow appending tokens after a token, not just a node - allow inserting delimiters (and remove them again) - fix up `if {}` and `if` without anything following --- crates/hir-expand/src/db.rs | 4 +- crates/hir-expand/src/fixup.rs | 101 ++++++++++++++++++++++++++++---- crates/mbe/src/syntax_bridge.rs | 49 +++++++++------- 3 files changed, 120 insertions(+), 34 deletions(-) diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 8c08fdcd2e..bd60c3d268 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -150,7 +150,7 @@ pub fn expand_speculative( // Build the subtree and token mapping for the speculative args let censor = censor_for_macro_input(&loc, speculative_args); let mut fixups = fixup::fixup_syntax(speculative_args); - fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new()))); + fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( speculative_args, fixups.token_map, @@ -295,7 +295,7 @@ fn macro_arg( let node = SyntaxNode::new_root(arg); let censor = censor_for_macro_input(&loc, &node); let mut fixups = fixup::fixup_syntax(&node); - fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new()))); + fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( &node, fixups.token_map, diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index c924478cec..9999790fae 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -6,7 +6,7 @@ use mbe::{SyntheticToken, SyntheticTokenId, TokenMap}; use rustc_hash::FxHashMap; use syntax::{ ast::{self, AstNode}, - match_ast, SyntaxKind, SyntaxNode, TextRange, + match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, }; use tt::Subtree; @@ -15,8 +15,8 @@ use tt::Subtree; /// reverse those changes afterwards, and a token map. #[derive(Debug)] pub(crate) struct SyntaxFixups { - pub(crate) append: FxHashMap>, - pub(crate) replace: FxHashMap>, + pub(crate) append: FxHashMap>, + pub(crate) replace: FxHashMap>, pub(crate) undo_info: SyntaxFixupUndoInfo, pub(crate) token_map: TokenMap, pub(crate) next_id: u32, @@ -31,8 +31,8 @@ pub struct SyntaxFixupUndoInfo { const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0); pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { - let mut append = FxHashMap::default(); - let mut replace = FxHashMap::default(); + let mut append = FxHashMap::::default(); + let mut replace = FxHashMap::::default(); let mut preorder = node.preorder(); let mut original = Vec::new(); let mut token_map = TokenMap::default(); @@ -63,7 +63,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { range: node.text_range(), id: SyntheticTokenId(idx), }; - replace.insert(node.clone(), vec![replacement]); + replace.insert(node.clone().into(), vec![replacement]); preorder.skip_subtree(); continue; } @@ -75,7 +75,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { ast::FieldExpr(it) => { if it.name_ref().is_none() { // incomplete field access: some_expr.| - append.insert(node.clone(), vec![ + append.insert(node.clone().into(), vec![ SyntheticToken { kind: SyntaxKind::IDENT, text: "__ra_fixup".into(), @@ -87,7 +87,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { }, ast::ExprStmt(it) => { if it.semicolon_token().is_none() { - append.insert(node.clone(), vec![ + append.insert(node.clone().into(), vec![ SyntheticToken { kind: SyntaxKind::SEMICOLON, text: ";".into(), @@ -99,7 +99,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { }, ast::LetStmt(it) => { if it.semicolon_token().is_none() { - append.insert(node.clone(), vec![ + append.insert(node.clone().into(), vec![ SyntheticToken { kind: SyntaxKind::SEMICOLON, text: ";".into(), @@ -109,6 +109,41 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { ]); } }, + ast::IfExpr(it) => { + if it.condition().is_none() { + // insert placeholder token after the if token + let if_token = match it.if_token() { + Some(t) => t, + None => continue, + }; + append.insert(if_token.into(), vec![ + SyntheticToken { + kind: SyntaxKind::IDENT, + text: "__ra_fixup".into(), + range: end_range, + id: EMPTY_ID, + }, + ]); + } + if it.then_branch().is_none() { + append.insert(node.clone().into(), vec![ + SyntheticToken { + kind: SyntaxKind::L_CURLY, + text: "{".into(), + range: end_range, + id: EMPTY_ID, + }, + SyntheticToken { + kind: SyntaxKind::R_CURLY, + text: "}".into(), + range: end_range, + id: EMPTY_ID, + }, + ]); + } + }, + // FIXME: foo:: + // FIXME: for, loop, match etc. _ => (), } } @@ -144,7 +179,10 @@ pub(crate) fn reverse_fixups( token_map.synthetic_token_id(leaf.id()).is_none() || token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID) } - _ => true, + tt::TokenTree::Subtree(st) => st.delimiter.map_or(true, |d| { + token_map.synthetic_token_id(d.id).is_none() + || token_map.synthetic_token_id(d.id) != Some(EMPTY_ID) + }), }); tt.token_trees.iter_mut().for_each(|tt| match tt { tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, undo_info), @@ -295,6 +333,49 @@ fn foo() { "#, expect![[r#" fn foo () {__ra_fixup ;} +"#]], + ) + } + + #[test] + fn fixup_if_1() { + check( + r#" +fn foo() { + if a +} +"#, + expect![[r#" +fn foo () {if a {}} +"#]], + ) + } + + #[test] + fn fixup_if_2() { + check( + r#" +fn foo() { + if +} +"#, + expect![[r#" +fn foo () {if __ra_fixup {}} +"#]], + ) + } + + #[test] + fn fixup_if_3() { + check( + r#" +fn foo() { + if {} +} +"#, + // the {} gets parsed as the condition, I think? + expect![[r#" +fn foo () {if {} {}} "#]], ) } diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 21a0aa4284..aca6ecd424 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -31,8 +31,8 @@ pub fn syntax_node_to_token_tree_with_modifications( node: &SyntaxNode, existing_token_map: TokenMap, next_id: u32, - replace: FxHashMap>, - append: FxHashMap>, + replace: FxHashMap>, + append: FxHashMap>, ) -> (tt::Subtree, TokenMap, u32) { let global_offset = node.text_range().start(); let mut c = Convertor::new(node, global_offset, existing_token_map, next_id, replace, append); @@ -221,7 +221,7 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { if let Some(kind) = delim { let mut subtree = tt::Subtree::default(); - let (id, idx) = conv.id_alloc().open_delim(range); + let (id, idx) = conv.id_alloc().open_delim(range, synth_id); subtree.delimiter = Some(tt::Delimiter { id, kind }); stack.push(StackEntry { subtree, idx, open_range: range }); continue; @@ -404,7 +404,11 @@ impl TokenIdAlloc { token_id } - fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) { + fn open_delim( + &mut self, + open_abs_range: TextRange, + synthetic_id: Option, + ) -> (tt::TokenId, usize) { let token_id = tt::TokenId(self.next_id); self.next_id += 1; let idx = self.map.insert_delim( @@ -412,6 +416,9 @@ impl TokenIdAlloc { open_abs_range - self.global_offset, open_abs_range - self.global_offset, ); + if let Some(id) = synthetic_id { + self.map.insert_synthetic(token_id, id); + } (token_id, idx) } @@ -511,8 +518,8 @@ struct Convertor { current: Option, current_synthetic: Vec, preorder: PreorderWithTokens, - replace: FxHashMap>, - append: FxHashMap>, + replace: FxHashMap>, + append: FxHashMap>, range: TextRange, punct_offset: Option<(SyntaxToken, TextSize)>, } @@ -523,8 +530,8 @@ impl Convertor { global_offset: TextSize, existing_token_map: TokenMap, next_id: u32, - mut replace: FxHashMap>, - mut append: FxHashMap>, + mut replace: FxHashMap>, + mut append: FxHashMap>, ) -> Convertor { let range = node.text_range(); let mut preorder = node.preorder_with_tokens(); @@ -543,14 +550,14 @@ impl Convertor { fn next_token( preorder: &mut PreorderWithTokens, - replace: &mut FxHashMap>, - append: &mut FxHashMap>, + replace: &mut FxHashMap>, + append: &mut FxHashMap>, ) -> (Option, Vec) { while let Some(ev) = preorder.next() { let ele = match ev { WalkEvent::Enter(ele) => ele, - WalkEvent::Leave(SyntaxElement::Node(node)) => { - if let Some(mut v) = append.remove(&node) { + WalkEvent::Leave(ele) => { + if let Some(mut v) = append.remove(&ele) { if !v.is_empty() { v.reverse(); return (None, v); @@ -558,19 +565,17 @@ impl Convertor { } continue; } - _ => continue, }; + if let Some(mut v) = replace.remove(&ele) { + preorder.skip_subtree(); + if !v.is_empty() { + v.reverse(); + return (None, v); + } + } match ele { SyntaxElement::Token(t) => return (Some(t), Vec::new()), - SyntaxElement::Node(node) => { - if let Some(mut v) = replace.remove(&node) { - preorder.skip_subtree(); - if !v.is_empty() { - v.reverse(); - return (None, v); - } - } - } + _ => {} } } (None, Vec::new())