mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-29 06:23:25 +00:00
Auto merge of #12775 - flodiebold:syntax-fixup-if, r=flodiebold
fix: Improve syntax fixup a bit, handle incomplete `if` - allow appending tokens after a token, not just a node - allow inserting delimiters (and remove them again) - fix up `if {}` and `if` without anything following
This commit is contained in:
commit
3d7da510f9
3 changed files with 120 additions and 34 deletions
|
@ -150,7 +150,7 @@ pub fn expand_speculative(
|
||||||
// Build the subtree and token mapping for the speculative args
|
// Build the subtree and token mapping for the speculative args
|
||||||
let censor = censor_for_macro_input(&loc, speculative_args);
|
let censor = censor_for_macro_input(&loc, speculative_args);
|
||||||
let mut fixups = fixup::fixup_syntax(speculative_args);
|
let mut fixups = fixup::fixup_syntax(speculative_args);
|
||||||
fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
|
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
||||||
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
||||||
speculative_args,
|
speculative_args,
|
||||||
fixups.token_map,
|
fixups.token_map,
|
||||||
|
@ -295,7 +295,7 @@ fn macro_arg(
|
||||||
let node = SyntaxNode::new_root(arg);
|
let node = SyntaxNode::new_root(arg);
|
||||||
let censor = censor_for_macro_input(&loc, &node);
|
let censor = censor_for_macro_input(&loc, &node);
|
||||||
let mut fixups = fixup::fixup_syntax(&node);
|
let mut fixups = fixup::fixup_syntax(&node);
|
||||||
fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
|
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
||||||
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
||||||
&node,
|
&node,
|
||||||
fixups.token_map,
|
fixups.token_map,
|
||||||
|
|
|
@ -6,7 +6,7 @@ use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode},
|
ast::{self, AstNode},
|
||||||
match_ast, SyntaxKind, SyntaxNode, TextRange,
|
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
|
||||||
};
|
};
|
||||||
use tt::Subtree;
|
use tt::Subtree;
|
||||||
|
|
||||||
|
@ -15,8 +15,8 @@ use tt::Subtree;
|
||||||
/// reverse those changes afterwards, and a token map.
|
/// reverse those changes afterwards, and a token map.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct SyntaxFixups {
|
pub(crate) struct SyntaxFixups {
|
||||||
pub(crate) append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
||||||
pub(crate) replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
||||||
pub(crate) undo_info: SyntaxFixupUndoInfo,
|
pub(crate) undo_info: SyntaxFixupUndoInfo,
|
||||||
pub(crate) token_map: TokenMap,
|
pub(crate) token_map: TokenMap,
|
||||||
pub(crate) next_id: u32,
|
pub(crate) next_id: u32,
|
||||||
|
@ -31,8 +31,8 @@ pub struct SyntaxFixupUndoInfo {
|
||||||
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
|
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
|
||||||
|
|
||||||
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
let mut append = FxHashMap::default();
|
let mut append = FxHashMap::<SyntaxElement, _>::default();
|
||||||
let mut replace = FxHashMap::default();
|
let mut replace = FxHashMap::<SyntaxElement, _>::default();
|
||||||
let mut preorder = node.preorder();
|
let mut preorder = node.preorder();
|
||||||
let mut original = Vec::new();
|
let mut original = Vec::new();
|
||||||
let mut token_map = TokenMap::default();
|
let mut token_map = TokenMap::default();
|
||||||
|
@ -63,7 +63,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
range: node.text_range(),
|
range: node.text_range(),
|
||||||
id: SyntheticTokenId(idx),
|
id: SyntheticTokenId(idx),
|
||||||
};
|
};
|
||||||
replace.insert(node.clone(), vec![replacement]);
|
replace.insert(node.clone().into(), vec![replacement]);
|
||||||
preorder.skip_subtree();
|
preorder.skip_subtree();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -75,7 +75,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
ast::FieldExpr(it) => {
|
ast::FieldExpr(it) => {
|
||||||
if it.name_ref().is_none() {
|
if it.name_ref().is_none() {
|
||||||
// incomplete field access: some_expr.|
|
// incomplete field access: some_expr.|
|
||||||
append.insert(node.clone(), vec![
|
append.insert(node.clone().into(), vec![
|
||||||
SyntheticToken {
|
SyntheticToken {
|
||||||
kind: SyntaxKind::IDENT,
|
kind: SyntaxKind::IDENT,
|
||||||
text: "__ra_fixup".into(),
|
text: "__ra_fixup".into(),
|
||||||
|
@ -87,7 +87,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
},
|
},
|
||||||
ast::ExprStmt(it) => {
|
ast::ExprStmt(it) => {
|
||||||
if it.semicolon_token().is_none() {
|
if it.semicolon_token().is_none() {
|
||||||
append.insert(node.clone(), vec![
|
append.insert(node.clone().into(), vec![
|
||||||
SyntheticToken {
|
SyntheticToken {
|
||||||
kind: SyntaxKind::SEMICOLON,
|
kind: SyntaxKind::SEMICOLON,
|
||||||
text: ";".into(),
|
text: ";".into(),
|
||||||
|
@ -99,7 +99,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
},
|
},
|
||||||
ast::LetStmt(it) => {
|
ast::LetStmt(it) => {
|
||||||
if it.semicolon_token().is_none() {
|
if it.semicolon_token().is_none() {
|
||||||
append.insert(node.clone(), vec![
|
append.insert(node.clone().into(), vec![
|
||||||
SyntheticToken {
|
SyntheticToken {
|
||||||
kind: SyntaxKind::SEMICOLON,
|
kind: SyntaxKind::SEMICOLON,
|
||||||
text: ";".into(),
|
text: ";".into(),
|
||||||
|
@ -109,6 +109,41 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
ast::IfExpr(it) => {
|
||||||
|
if it.condition().is_none() {
|
||||||
|
// insert placeholder token after the if token
|
||||||
|
let if_token = match it.if_token() {
|
||||||
|
Some(t) => t,
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
append.insert(if_token.into(), vec![
|
||||||
|
SyntheticToken {
|
||||||
|
kind: SyntaxKind::IDENT,
|
||||||
|
text: "__ra_fixup".into(),
|
||||||
|
range: end_range,
|
||||||
|
id: EMPTY_ID,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
if it.then_branch().is_none() {
|
||||||
|
append.insert(node.clone().into(), vec![
|
||||||
|
SyntheticToken {
|
||||||
|
kind: SyntaxKind::L_CURLY,
|
||||||
|
text: "{".into(),
|
||||||
|
range: end_range,
|
||||||
|
id: EMPTY_ID,
|
||||||
|
},
|
||||||
|
SyntheticToken {
|
||||||
|
kind: SyntaxKind::R_CURLY,
|
||||||
|
text: "}".into(),
|
||||||
|
range: end_range,
|
||||||
|
id: EMPTY_ID,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// FIXME: foo::
|
||||||
|
// FIXME: for, loop, match etc.
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -144,7 +179,10 @@ pub(crate) fn reverse_fixups(
|
||||||
token_map.synthetic_token_id(leaf.id()).is_none()
|
token_map.synthetic_token_id(leaf.id()).is_none()
|
||||||
|| token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID)
|
|| token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID)
|
||||||
}
|
}
|
||||||
_ => true,
|
tt::TokenTree::Subtree(st) => st.delimiter.map_or(true, |d| {
|
||||||
|
token_map.synthetic_token_id(d.id).is_none()
|
||||||
|
|| token_map.synthetic_token_id(d.id) != Some(EMPTY_ID)
|
||||||
|
}),
|
||||||
});
|
});
|
||||||
tt.token_trees.iter_mut().for_each(|tt| match tt {
|
tt.token_trees.iter_mut().for_each(|tt| match tt {
|
||||||
tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, undo_info),
|
tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, undo_info),
|
||||||
|
@ -295,6 +333,49 @@ fn foo() {
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
fn foo () {__ra_fixup ;}
|
fn foo () {__ra_fixup ;}
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn fixup_if_1() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
fn foo() {
|
||||||
|
if a
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
fn foo () {if a {}}
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn fixup_if_2() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
fn foo() {
|
||||||
|
if
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
fn foo () {if __ra_fixup {}}
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn fixup_if_3() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
fn foo() {
|
||||||
|
if {}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
// the {} gets parsed as the condition, I think?
|
||||||
|
expect![[r#"
|
||||||
|
fn foo () {if {} {}}
|
||||||
"#]],
|
"#]],
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,8 +31,8 @@ pub fn syntax_node_to_token_tree_with_modifications(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
existing_token_map: TokenMap,
|
existing_token_map: TokenMap,
|
||||||
next_id: u32,
|
next_id: u32,
|
||||||
replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
||||||
append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
||||||
) -> (tt::Subtree, TokenMap, u32) {
|
) -> (tt::Subtree, TokenMap, u32) {
|
||||||
let global_offset = node.text_range().start();
|
let global_offset = node.text_range().start();
|
||||||
let mut c = Convertor::new(node, global_offset, existing_token_map, next_id, replace, append);
|
let mut c = Convertor::new(node, global_offset, existing_token_map, next_id, replace, append);
|
||||||
|
@ -221,7 +221,7 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
|
||||||
|
|
||||||
if let Some(kind) = delim {
|
if let Some(kind) = delim {
|
||||||
let mut subtree = tt::Subtree::default();
|
let mut subtree = tt::Subtree::default();
|
||||||
let (id, idx) = conv.id_alloc().open_delim(range);
|
let (id, idx) = conv.id_alloc().open_delim(range, synth_id);
|
||||||
subtree.delimiter = Some(tt::Delimiter { id, kind });
|
subtree.delimiter = Some(tt::Delimiter { id, kind });
|
||||||
stack.push(StackEntry { subtree, idx, open_range: range });
|
stack.push(StackEntry { subtree, idx, open_range: range });
|
||||||
continue;
|
continue;
|
||||||
|
@ -404,7 +404,11 @@ impl TokenIdAlloc {
|
||||||
token_id
|
token_id
|
||||||
}
|
}
|
||||||
|
|
||||||
fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) {
|
fn open_delim(
|
||||||
|
&mut self,
|
||||||
|
open_abs_range: TextRange,
|
||||||
|
synthetic_id: Option<SyntheticTokenId>,
|
||||||
|
) -> (tt::TokenId, usize) {
|
||||||
let token_id = tt::TokenId(self.next_id);
|
let token_id = tt::TokenId(self.next_id);
|
||||||
self.next_id += 1;
|
self.next_id += 1;
|
||||||
let idx = self.map.insert_delim(
|
let idx = self.map.insert_delim(
|
||||||
|
@ -412,6 +416,9 @@ impl TokenIdAlloc {
|
||||||
open_abs_range - self.global_offset,
|
open_abs_range - self.global_offset,
|
||||||
open_abs_range - self.global_offset,
|
open_abs_range - self.global_offset,
|
||||||
);
|
);
|
||||||
|
if let Some(id) = synthetic_id {
|
||||||
|
self.map.insert_synthetic(token_id, id);
|
||||||
|
}
|
||||||
(token_id, idx)
|
(token_id, idx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -511,8 +518,8 @@ struct Convertor {
|
||||||
current: Option<SyntaxToken>,
|
current: Option<SyntaxToken>,
|
||||||
current_synthetic: Vec<SyntheticToken>,
|
current_synthetic: Vec<SyntheticToken>,
|
||||||
preorder: PreorderWithTokens,
|
preorder: PreorderWithTokens,
|
||||||
replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
||||||
append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
||||||
range: TextRange,
|
range: TextRange,
|
||||||
punct_offset: Option<(SyntaxToken, TextSize)>,
|
punct_offset: Option<(SyntaxToken, TextSize)>,
|
||||||
}
|
}
|
||||||
|
@ -523,8 +530,8 @@ impl Convertor {
|
||||||
global_offset: TextSize,
|
global_offset: TextSize,
|
||||||
existing_token_map: TokenMap,
|
existing_token_map: TokenMap,
|
||||||
next_id: u32,
|
next_id: u32,
|
||||||
mut replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
mut replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
||||||
mut append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
mut append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
||||||
) -> Convertor {
|
) -> Convertor {
|
||||||
let range = node.text_range();
|
let range = node.text_range();
|
||||||
let mut preorder = node.preorder_with_tokens();
|
let mut preorder = node.preorder_with_tokens();
|
||||||
|
@ -543,14 +550,14 @@ impl Convertor {
|
||||||
|
|
||||||
fn next_token(
|
fn next_token(
|
||||||
preorder: &mut PreorderWithTokens,
|
preorder: &mut PreorderWithTokens,
|
||||||
replace: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
replace: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
||||||
append: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
append: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
||||||
) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
|
) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
|
||||||
while let Some(ev) = preorder.next() {
|
while let Some(ev) = preorder.next() {
|
||||||
let ele = match ev {
|
let ele = match ev {
|
||||||
WalkEvent::Enter(ele) => ele,
|
WalkEvent::Enter(ele) => ele,
|
||||||
WalkEvent::Leave(SyntaxElement::Node(node)) => {
|
WalkEvent::Leave(ele) => {
|
||||||
if let Some(mut v) = append.remove(&node) {
|
if let Some(mut v) = append.remove(&ele) {
|
||||||
if !v.is_empty() {
|
if !v.is_empty() {
|
||||||
v.reverse();
|
v.reverse();
|
||||||
return (None, v);
|
return (None, v);
|
||||||
|
@ -558,19 +565,17 @@ impl Convertor {
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
_ => continue,
|
|
||||||
};
|
};
|
||||||
match ele {
|
if let Some(mut v) = replace.remove(&ele) {
|
||||||
SyntaxElement::Token(t) => return (Some(t), Vec::new()),
|
|
||||||
SyntaxElement::Node(node) => {
|
|
||||||
if let Some(mut v) = replace.remove(&node) {
|
|
||||||
preorder.skip_subtree();
|
preorder.skip_subtree();
|
||||||
if !v.is_empty() {
|
if !v.is_empty() {
|
||||||
v.reverse();
|
v.reverse();
|
||||||
return (None, v);
|
return (None, v);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
match ele {
|
||||||
|
SyntaxElement::Token(t) => return (Some(t), Vec::new()),
|
||||||
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(None, Vec::new())
|
(None, Vec::new())
|
||||||
|
|
Loading…
Reference in a new issue