2022-02-09 15:30:10 +00:00
|
|
|
//! To make attribute macros work reliably when typing, we need to take care to
|
|
|
|
//! fix up syntax errors in the code we're passing to them.
|
|
|
|
use std::mem;
|
|
|
|
|
2022-02-08 17:13:18 +00:00
|
|
|
use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
|
2022-02-07 19:30:28 +00:00
|
|
|
use rustc_hash::FxHashMap;
|
2022-11-10 10:22:20 +00:00
|
|
|
use smallvec::SmallVec;
|
2022-02-07 19:30:28 +00:00
|
|
|
use syntax::{
|
2022-07-26 14:10:26 +00:00
|
|
|
ast::{self, AstNode, HasLoopBody},
|
2022-07-16 10:38:33 +00:00
|
|
|
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
|
2022-02-07 19:30:28 +00:00
|
|
|
};
|
2023-01-31 10:49:49 +00:00
|
|
|
use tt::token_id::Subtree;
|
2022-02-07 19:30:28 +00:00
|
|
|
|
2022-02-09 15:30:10 +00:00
|
|
|
/// The result of calculating fixes for a syntax node -- a bunch of changes
|
|
|
|
/// (appending to and replacing nodes), the information that is needed to
|
|
|
|
/// reverse those changes afterwards, and a token map.
|
2022-02-07 19:30:28 +00:00
|
|
|
#[derive(Debug)]
|
2022-02-09 16:43:37 +00:00
|
|
|
pub(crate) struct SyntaxFixups {
|
2022-07-16 10:38:33 +00:00
|
|
|
pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
|
|
|
pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
2022-02-09 16:43:37 +00:00
|
|
|
pub(crate) undo_info: SyntaxFixupUndoInfo,
|
|
|
|
pub(crate) token_map: TokenMap,
|
|
|
|
pub(crate) next_id: u32,
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
|
2022-02-09 15:30:10 +00:00
|
|
|
/// This is the information needed to reverse the fixups.
|
2022-02-09 10:58:52 +00:00
|
|
|
#[derive(Debug, PartialEq, Eq)]
|
2022-02-09 15:30:10 +00:00
|
|
|
pub struct SyntaxFixupUndoInfo {
|
|
|
|
original: Vec<Subtree>,
|
2022-02-09 10:58:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
|
|
|
|
|
2022-02-09 16:43:37 +00:00
|
|
|
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
2022-07-16 10:38:33 +00:00
|
|
|
let mut append = FxHashMap::<SyntaxElement, _>::default();
|
|
|
|
let mut replace = FxHashMap::<SyntaxElement, _>::default();
|
2022-02-07 19:30:28 +00:00
|
|
|
let mut preorder = node.preorder();
|
2022-02-09 10:58:52 +00:00
|
|
|
let mut original = Vec::new();
|
2022-02-09 15:30:10 +00:00
|
|
|
let mut token_map = TokenMap::default();
|
|
|
|
let mut next_id = 0;
|
2022-02-07 19:30:28 +00:00
|
|
|
while let Some(event) = preorder.next() {
|
|
|
|
let node = match event {
|
|
|
|
syntax::WalkEvent::Enter(node) => node,
|
|
|
|
syntax::WalkEvent::Leave(_) => continue,
|
|
|
|
};
|
2022-02-09 15:30:10 +00:00
|
|
|
|
2022-02-09 10:58:52 +00:00
|
|
|
if can_handle_error(&node) && has_error_to_handle(&node) {
|
|
|
|
// the node contains an error node, we have to completely replace it by something valid
|
2022-02-09 15:30:10 +00:00
|
|
|
let (original_tree, new_tmap, new_next_id) =
|
|
|
|
mbe::syntax_node_to_token_tree_with_modifications(
|
|
|
|
&node,
|
|
|
|
mem::take(&mut token_map),
|
|
|
|
next_id,
|
|
|
|
Default::default(),
|
|
|
|
Default::default(),
|
|
|
|
);
|
|
|
|
token_map = new_tmap;
|
|
|
|
next_id = new_next_id;
|
2022-02-09 10:58:52 +00:00
|
|
|
let idx = original.len() as u32;
|
|
|
|
original.push(original_tree);
|
|
|
|
let replacement = SyntheticToken {
|
|
|
|
kind: SyntaxKind::IDENT,
|
|
|
|
text: "__ra_fixup".into(),
|
|
|
|
range: node.text_range(),
|
|
|
|
id: SyntheticTokenId(idx),
|
|
|
|
};
|
2022-07-16 10:38:33 +00:00
|
|
|
replace.insert(node.clone().into(), vec![replacement]);
|
2022-02-07 19:30:28 +00:00
|
|
|
preorder.skip_subtree();
|
|
|
|
continue;
|
|
|
|
}
|
2022-02-09 15:30:10 +00:00
|
|
|
// In some other situations, we can fix things by just appending some tokens.
|
2022-02-08 17:13:18 +00:00
|
|
|
let end_range = TextRange::empty(node.text_range().end());
|
2022-02-07 19:30:28 +00:00
|
|
|
match_ast! {
|
|
|
|
match node {
|
|
|
|
ast::FieldExpr(it) => {
|
|
|
|
if it.name_ref().is_none() {
|
|
|
|
// incomplete field access: some_expr.|
|
2022-07-16 10:38:33 +00:00
|
|
|
append.insert(node.clone().into(), vec![
|
2022-02-08 17:13:18 +00:00
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::IDENT,
|
|
|
|
text: "__ra_fixup".into(),
|
|
|
|
range: end_range,
|
2022-02-09 10:58:52 +00:00
|
|
|
id: EMPTY_ID,
|
2022-02-08 17:13:18 +00:00
|
|
|
},
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
ast::ExprStmt(it) => {
|
|
|
|
if it.semicolon_token().is_none() {
|
2022-07-16 10:38:33 +00:00
|
|
|
append.insert(node.clone().into(), vec![
|
2022-02-08 17:13:18 +00:00
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::SEMICOLON,
|
|
|
|
text: ";".into(),
|
|
|
|
range: end_range,
|
2022-02-09 10:58:52 +00:00
|
|
|
id: EMPTY_ID,
|
2022-02-08 17:13:18 +00:00
|
|
|
},
|
|
|
|
]);
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
},
|
2022-03-03 17:29:40 +00:00
|
|
|
ast::LetStmt(it) => {
|
|
|
|
if it.semicolon_token().is_none() {
|
2022-07-16 10:38:33 +00:00
|
|
|
append.insert(node.clone().into(), vec![
|
2022-03-03 17:29:40 +00:00
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::SEMICOLON,
|
|
|
|
text: ";".into(),
|
|
|
|
range: end_range,
|
|
|
|
id: EMPTY_ID,
|
|
|
|
},
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
},
|
2022-07-16 10:38:33 +00:00
|
|
|
ast::IfExpr(it) => {
|
|
|
|
if it.condition().is_none() {
|
|
|
|
// insert placeholder token after the if token
|
|
|
|
let if_token = match it.if_token() {
|
|
|
|
Some(t) => t,
|
|
|
|
None => continue,
|
|
|
|
};
|
|
|
|
append.insert(if_token.into(), vec![
|
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::IDENT,
|
|
|
|
text: "__ra_fixup".into(),
|
|
|
|
range: end_range,
|
|
|
|
id: EMPTY_ID,
|
|
|
|
},
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
if it.then_branch().is_none() {
|
|
|
|
append.insert(node.clone().into(), vec![
|
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::L_CURLY,
|
|
|
|
text: "{".into(),
|
|
|
|
range: end_range,
|
|
|
|
id: EMPTY_ID,
|
|
|
|
},
|
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::R_CURLY,
|
|
|
|
text: "}".into(),
|
|
|
|
range: end_range,
|
|
|
|
id: EMPTY_ID,
|
|
|
|
},
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
},
|
2022-07-26 14:10:26 +00:00
|
|
|
ast::WhileExpr(it) => {
|
|
|
|
if it.condition().is_none() {
|
|
|
|
// insert placeholder token after the while token
|
|
|
|
let while_token = match it.while_token() {
|
|
|
|
Some(t) => t,
|
|
|
|
None => continue,
|
|
|
|
};
|
|
|
|
append.insert(while_token.into(), vec![
|
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::IDENT,
|
|
|
|
text: "__ra_fixup".into(),
|
|
|
|
range: end_range,
|
|
|
|
id: EMPTY_ID,
|
|
|
|
},
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
if it.loop_body().is_none() {
|
|
|
|
append.insert(node.clone().into(), vec![
|
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::L_CURLY,
|
|
|
|
text: "{".into(),
|
|
|
|
range: end_range,
|
|
|
|
id: EMPTY_ID,
|
|
|
|
},
|
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::R_CURLY,
|
|
|
|
text: "}".into(),
|
|
|
|
range: end_range,
|
|
|
|
id: EMPTY_ID,
|
|
|
|
},
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
},
|
2022-07-27 16:18:51 +00:00
|
|
|
ast::LoopExpr(it) => {
|
|
|
|
if it.loop_body().is_none() {
|
|
|
|
append.insert(node.clone().into(), vec![
|
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::L_CURLY,
|
|
|
|
text: "{".into(),
|
|
|
|
range: end_range,
|
|
|
|
id: EMPTY_ID,
|
|
|
|
},
|
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::R_CURLY,
|
|
|
|
text: "}".into(),
|
|
|
|
range: end_range,
|
|
|
|
id: EMPTY_ID,
|
|
|
|
},
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
},
|
2022-07-16 10:38:33 +00:00
|
|
|
// FIXME: foo::
|
2022-08-03 18:44:21 +00:00
|
|
|
ast::MatchExpr(it) => {
|
|
|
|
if it.expr().is_none() {
|
|
|
|
let match_token = match it.match_token() {
|
|
|
|
Some(t) => t,
|
|
|
|
None => continue
|
|
|
|
};
|
|
|
|
append.insert(match_token.into(), vec![
|
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::IDENT,
|
|
|
|
text: "__ra_fixup".into(),
|
|
|
|
range: end_range,
|
|
|
|
id: EMPTY_ID
|
|
|
|
},
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
if it.match_arm_list().is_none() {
|
|
|
|
// No match arms
|
|
|
|
append.insert(node.clone().into(), vec![
|
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::L_CURLY,
|
|
|
|
text: "{".into(),
|
|
|
|
range: end_range,
|
|
|
|
id: EMPTY_ID,
|
|
|
|
},
|
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::R_CURLY,
|
|
|
|
text: "}".into(),
|
|
|
|
range: end_range,
|
|
|
|
id: EMPTY_ID,
|
|
|
|
},
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
},
|
2022-08-03 19:51:30 +00:00
|
|
|
ast::ForExpr(it) => {
|
|
|
|
let for_token = match it.for_token() {
|
|
|
|
Some(token) => token,
|
|
|
|
None => continue
|
|
|
|
};
|
|
|
|
|
|
|
|
let [pat, in_token, iter] = [
|
2022-08-03 20:27:43 +00:00
|
|
|
(SyntaxKind::UNDERSCORE, "_"),
|
|
|
|
(SyntaxKind::IN_KW, "in"),
|
2022-08-03 19:51:30 +00:00
|
|
|
(SyntaxKind::IDENT, "__ra_fixup")
|
|
|
|
].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID});
|
|
|
|
|
|
|
|
if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() {
|
|
|
|
append.insert(for_token.into(), vec![pat, in_token, iter]);
|
2022-08-04 14:43:09 +00:00
|
|
|
// does something funky -- see test case for_no_pat
|
2022-08-04 13:28:25 +00:00
|
|
|
} else if it.pat().is_none() {
|
|
|
|
append.insert(for_token.into(), vec![pat]);
|
2022-08-03 20:27:43 +00:00
|
|
|
}
|
2022-08-03 19:51:30 +00:00
|
|
|
|
|
|
|
if it.loop_body().is_none() {
|
|
|
|
append.insert(node.clone().into(), vec![
|
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::L_CURLY,
|
|
|
|
text: "{".into(),
|
|
|
|
range: end_range,
|
|
|
|
id: EMPTY_ID,
|
|
|
|
},
|
|
|
|
SyntheticToken {
|
|
|
|
kind: SyntaxKind::R_CURLY,
|
|
|
|
text: "}".into(),
|
|
|
|
range: end_range,
|
|
|
|
id: EMPTY_ID,
|
|
|
|
},
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
},
|
2022-02-07 19:30:28 +00:00
|
|
|
_ => (),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-02-09 15:30:10 +00:00
|
|
|
SyntaxFixups {
|
|
|
|
append,
|
|
|
|
replace,
|
|
|
|
token_map,
|
|
|
|
next_id,
|
|
|
|
undo_info: SyntaxFixupUndoInfo { original },
|
|
|
|
}
|
2022-02-09 10:58:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn has_error(node: &SyntaxNode) -> bool {
|
|
|
|
node.children().any(|c| c.kind() == SyntaxKind::ERROR)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn can_handle_error(node: &SyntaxNode) -> bool {
|
|
|
|
ast::Expr::can_cast(node.kind())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn has_error_to_handle(node: &SyntaxNode) -> bool {
|
|
|
|
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
|
2022-02-09 16:52:15 +00:00
|
|
|
pub(crate) fn reverse_fixups(
|
|
|
|
tt: &mut Subtree,
|
|
|
|
token_map: &TokenMap,
|
|
|
|
undo_info: &SyntaxFixupUndoInfo,
|
|
|
|
) {
|
2022-11-10 10:22:20 +00:00
|
|
|
let tts = std::mem::take(&mut tt.token_trees);
|
|
|
|
tt.token_trees = tts
|
|
|
|
.into_iter()
|
|
|
|
.filter(|tt| match tt {
|
2023-01-31 10:49:49 +00:00
|
|
|
tt::TokenTree::Leaf(leaf) => {
|
|
|
|
token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID)
|
|
|
|
}
|
2022-11-10 10:22:20 +00:00
|
|
|
tt::TokenTree::Subtree(st) => {
|
2023-01-31 10:49:49 +00:00
|
|
|
token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID)
|
2022-02-09 10:58:52 +00:00
|
|
|
}
|
2022-11-10 10:22:20 +00:00
|
|
|
})
|
|
|
|
.flat_map(|tt| match tt {
|
|
|
|
tt::TokenTree::Subtree(mut tt) => {
|
|
|
|
reverse_fixups(&mut tt, token_map, undo_info);
|
|
|
|
SmallVec::from_const([tt.into()])
|
|
|
|
}
|
|
|
|
tt::TokenTree::Leaf(leaf) => {
|
2023-01-31 10:49:49 +00:00
|
|
|
if let Some(id) = token_map.synthetic_token_id(*leaf.span()) {
|
2022-11-10 10:22:20 +00:00
|
|
|
let original = undo_info.original[id.0 as usize].clone();
|
2023-01-31 10:49:49 +00:00
|
|
|
if original.delimiter.kind == tt::DelimiterKind::Invisible {
|
2022-11-10 10:22:20 +00:00
|
|
|
original.token_trees.into()
|
|
|
|
} else {
|
|
|
|
SmallVec::from_const([original.into()])
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
SmallVec::from_const([leaf.into()])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect();
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2022-02-08 17:13:18 +00:00
|
|
|
use expect_test::{expect, Expect};
|
2022-02-07 19:30:28 +00:00
|
|
|
|
2023-01-31 10:49:49 +00:00
|
|
|
use crate::tt;
|
|
|
|
|
2022-02-07 19:30:28 +00:00
|
|
|
use super::reverse_fixups;
|
|
|
|
|
2022-11-10 10:24:01 +00:00
|
|
|
// The following three functions are only meant to check partial structural equivalence of
|
|
|
|
// `TokenTree`s, see the last assertion in `check()`.
|
|
|
|
fn check_leaf_eq(a: &tt::Leaf, b: &tt::Leaf) -> bool {
|
|
|
|
match (a, b) {
|
|
|
|
(tt::Leaf::Literal(a), tt::Leaf::Literal(b)) => a.text == b.text,
|
|
|
|
(tt::Leaf::Punct(a), tt::Leaf::Punct(b)) => a.char == b.char,
|
|
|
|
(tt::Leaf::Ident(a), tt::Leaf::Ident(b)) => a.text == b.text,
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn check_subtree_eq(a: &tt::Subtree, b: &tt::Subtree) -> bool {
|
2023-01-31 10:49:49 +00:00
|
|
|
a.delimiter.kind == b.delimiter.kind
|
2022-11-10 10:24:01 +00:00
|
|
|
&& a.token_trees.len() == b.token_trees.len()
|
|
|
|
&& a.token_trees.iter().zip(&b.token_trees).all(|(a, b)| check_tt_eq(a, b))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn check_tt_eq(a: &tt::TokenTree, b: &tt::TokenTree) -> bool {
|
|
|
|
match (a, b) {
|
|
|
|
(tt::TokenTree::Leaf(a), tt::TokenTree::Leaf(b)) => check_leaf_eq(a, b),
|
|
|
|
(tt::TokenTree::Subtree(a), tt::TokenTree::Subtree(b)) => check_subtree_eq(a, b),
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-07 19:30:28 +00:00
|
|
|
#[track_caller]
|
|
|
|
fn check(ra_fixture: &str, mut expect: Expect) {
|
|
|
|
let parsed = syntax::SourceFile::parse(ra_fixture);
|
|
|
|
let fixups = super::fixup_syntax(&parsed.syntax_node());
|
2022-02-09 15:30:10 +00:00
|
|
|
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
2022-02-07 19:30:28 +00:00
|
|
|
&parsed.syntax_node(),
|
2022-02-09 15:30:10 +00:00
|
|
|
fixups.token_map,
|
|
|
|
fixups.next_id,
|
2022-02-07 19:30:28 +00:00
|
|
|
fixups.replace,
|
|
|
|
fixups.append,
|
|
|
|
);
|
|
|
|
|
2022-12-23 18:42:58 +00:00
|
|
|
let actual = format!("{tt}\n");
|
2022-02-07 19:30:28 +00:00
|
|
|
|
|
|
|
expect.indent(false);
|
|
|
|
expect.assert_eq(&actual);
|
|
|
|
|
|
|
|
// the fixed-up tree should be syntactically valid
|
|
|
|
let (parse, _) = mbe::token_tree_to_syntax_node(&tt, ::mbe::TopEntryPoint::MacroItems);
|
2022-11-05 10:01:26 +00:00
|
|
|
assert!(
|
|
|
|
parse.errors().is_empty(),
|
2022-02-08 17:13:18 +00:00
|
|
|
"parse has syntax errors. parse tree:\n{:#?}",
|
|
|
|
parse.syntax_node()
|
|
|
|
);
|
2022-02-07 19:30:28 +00:00
|
|
|
|
2022-02-09 15:30:10 +00:00
|
|
|
reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
|
2022-02-07 19:30:28 +00:00
|
|
|
|
|
|
|
// the fixed-up + reversed version should be equivalent to the original input
|
2022-11-10 10:24:01 +00:00
|
|
|
// modulo token IDs and `Punct`s' spacing.
|
2022-02-07 19:30:28 +00:00
|
|
|
let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
|
2022-11-10 10:24:01 +00:00
|
|
|
assert!(
|
|
|
|
check_subtree_eq(&tt, &original_as_tt),
|
2023-01-31 10:49:49 +00:00
|
|
|
"different token tree: {tt:?},\n{original_as_tt:?}"
|
2022-11-10 10:24:01 +00:00
|
|
|
);
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
|
2022-08-03 19:51:30 +00:00
|
|
|
#[test]
|
2022-08-04 14:43:09 +00:00
|
|
|
fn just_for_token() {
|
2022-08-03 19:51:30 +00:00
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
|
|
|
for
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
|
|
|
fn foo () {for _ in __ra_fixup {}}
|
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2022-08-04 14:43:09 +00:00
|
|
|
#[test]
|
|
|
|
fn for_no_iter_pattern() {
|
2022-08-04 13:28:25 +00:00
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
2022-08-04 14:43:09 +00:00
|
|
|
for {}
|
2022-08-04 13:28:25 +00:00
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
|
|
|
fn foo () {for _ in __ra_fixup {}}
|
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
2022-08-04 14:43:09 +00:00
|
|
|
|
2022-08-03 19:51:30 +00:00
|
|
|
#[test]
|
2022-08-04 14:43:09 +00:00
|
|
|
fn for_no_body() {
|
2022-08-03 19:51:30 +00:00
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
2022-08-04 14:43:09 +00:00
|
|
|
for bar in qux
|
2022-08-03 19:51:30 +00:00
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
2022-08-04 14:43:09 +00:00
|
|
|
fn foo () {for bar in qux {}}
|
2022-08-03 19:51:30 +00:00
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2022-08-04 14:43:09 +00:00
|
|
|
// FIXME: https://github.com/rust-lang/rust-analyzer/pull/12937#discussion_r937633695
|
2022-08-03 19:51:30 +00:00
|
|
|
#[test]
|
2022-08-04 14:43:09 +00:00
|
|
|
fn for_no_pat() {
|
2022-08-03 19:51:30 +00:00
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
2022-08-04 14:43:09 +00:00
|
|
|
for in qux {
|
|
|
|
|
|
|
|
}
|
2022-08-03 19:51:30 +00:00
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
2022-08-04 14:43:09 +00:00
|
|
|
fn foo () {__ra_fixup}
|
2022-08-03 19:51:30 +00:00
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
2022-08-03 18:44:21 +00:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn match_no_expr_no_arms() {
|
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
|
|
|
match
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
2022-08-04 13:28:25 +00:00
|
|
|
fn foo () {match __ra_fixup {}}
|
2022-08-03 18:44:21 +00:00
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn match_expr_no_arms() {
|
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
|
|
|
match x {
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
|
|
|
fn foo () {match x {}}
|
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn match_no_expr() {
|
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
|
|
|
match {
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
2022-08-04 13:28:25 +00:00
|
|
|
fn foo () {match __ra_fixup {}}
|
2022-08-03 18:44:21 +00:00
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2022-02-07 19:30:28 +00:00
|
|
|
#[test]
|
|
|
|
fn incomplete_field_expr_1() {
|
2022-02-08 17:13:18 +00:00
|
|
|
check(
|
|
|
|
r#"
|
2022-02-07 19:30:28 +00:00
|
|
|
fn foo() {
|
|
|
|
a.
|
|
|
|
}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#,
|
|
|
|
expect![[r#"
|
2022-11-05 10:01:26 +00:00
|
|
|
fn foo () {a . __ra_fixup}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#]],
|
|
|
|
)
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn incomplete_field_expr_2() {
|
2022-02-08 17:13:18 +00:00
|
|
|
check(
|
|
|
|
r#"
|
2022-02-07 19:30:28 +00:00
|
|
|
fn foo() {
|
2022-08-10 08:29:23 +00:00
|
|
|
a.;
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#,
|
|
|
|
expect![[r#"
|
2022-11-10 10:24:01 +00:00
|
|
|
fn foo () {a . __ra_fixup ;}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#]],
|
|
|
|
)
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn incomplete_field_expr_3() {
|
2022-02-08 17:13:18 +00:00
|
|
|
check(
|
|
|
|
r#"
|
2022-02-07 19:30:28 +00:00
|
|
|
fn foo() {
|
2022-08-10 08:29:23 +00:00
|
|
|
a.;
|
2022-02-07 19:30:28 +00:00
|
|
|
bar();
|
|
|
|
}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#,
|
|
|
|
expect![[r#"
|
2022-11-10 10:24:01 +00:00
|
|
|
fn foo () {a . __ra_fixup ; bar () ;}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#]],
|
|
|
|
)
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
|
2022-03-03 17:29:40 +00:00
|
|
|
#[test]
|
|
|
|
fn incomplete_let() {
|
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
|
|
|
let x = a
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
|
|
|
fn foo () {let x = a ;}
|
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn incomplete_field_expr_in_let() {
|
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
|
|
|
let x = a.
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
2022-11-05 10:01:26 +00:00
|
|
|
fn foo () {let x = a . __ra_fixup ;}
|
2022-03-03 17:29:40 +00:00
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2022-02-07 19:30:28 +00:00
|
|
|
#[test]
|
|
|
|
fn field_expr_before_call() {
|
|
|
|
// another case that easily happens while typing
|
2022-02-08 17:13:18 +00:00
|
|
|
check(
|
|
|
|
r#"
|
2022-02-07 19:30:28 +00:00
|
|
|
fn foo() {
|
|
|
|
a.b
|
|
|
|
bar();
|
|
|
|
}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#,
|
|
|
|
expect![[r#"
|
2022-11-05 10:01:26 +00:00
|
|
|
fn foo () {a . b ; bar () ;}
|
2022-02-09 10:58:52 +00:00
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn extraneous_comma() {
|
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
|
|
|
bar(,);
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
|
|
|
fn foo () {__ra_fixup ;}
|
2022-07-16 10:38:33 +00:00
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fixup_if_1() {
|
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
|
|
|
if a
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
|
|
|
fn foo () {if a {}}
|
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fixup_if_2() {
|
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
|
|
|
if
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
|
|
|
fn foo () {if __ra_fixup {}}
|
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fixup_if_3() {
|
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
|
|
|
if {}
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
2023-03-15 11:53:39 +00:00
|
|
|
fn foo () {if __ra_fixup {} {}}
|
2022-07-26 14:10:26 +00:00
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fixup_while_1() {
|
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
|
|
|
while
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
|
|
|
fn foo () {while __ra_fixup {}}
|
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fixup_while_2() {
|
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
|
|
|
while foo
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
|
|
|
fn foo () {while foo {}}
|
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
#[test]
|
|
|
|
fn fixup_while_3() {
|
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
|
|
|
while {}
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
|
|
|
fn foo () {while __ra_fixup {}}
|
2022-07-27 16:18:51 +00:00
|
|
|
"#]],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fixup_loop() {
|
|
|
|
check(
|
|
|
|
r#"
|
|
|
|
fn foo() {
|
|
|
|
loop
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
expect![[r#"
|
|
|
|
fn foo () {loop {}}
|
2022-02-08 17:13:18 +00:00
|
|
|
"#]],
|
|
|
|
)
|
2022-02-07 19:30:28 +00:00
|
|
|
}
|
|
|
|
}
|