Rename syntax_node_to_token_tree_censored

This commit is contained in:
Florian Diebold 2022-02-09 12:00:03 +01:00
parent ecf3cff4a6
commit bdb7ae5dd0
4 changed files with 10 additions and 9 deletions

View file

@ -151,8 +151,11 @@ pub fn expand_speculative(
let censor = censor_for_macro_input(&loc, &speculative_args); let censor = censor_for_macro_input(&loc, &speculative_args);
let mut fixups = fixup::fixup_syntax(&speculative_args); let mut fixups = fixup::fixup_syntax(&speculative_args);
fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new()))); fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
let (mut tt, spec_args_tmap) = let (mut tt, spec_args_tmap) = mbe::syntax_node_to_token_tree_with_modifications(
mbe::syntax_node_to_token_tree_censored(&speculative_args, fixups.replace, fixups.append); &speculative_args,
fixups.replace,
fixups.append,
);
let (attr_arg, token_id) = match loc.kind { let (attr_arg, token_id) = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => { MacroCallKind::Attr { invoc_attr_index, .. } => {
@ -303,11 +306,10 @@ fn macro_arg(
let node = SyntaxNode::new_root(arg); let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node); let censor = censor_for_macro_input(&loc, &node);
// TODO only fixup for attribute macro input
let mut fixups = fixup::fixup_syntax(&node); let mut fixups = fixup::fixup_syntax(&node);
fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new()))); fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
let (mut tt, tmap) = let (mut tt, tmap) =
mbe::syntax_node_to_token_tree_censored(&node, fixups.replace, fixups.append); mbe::syntax_node_to_token_tree_with_modifications(&node, fixups.replace, fixups.append);
if loc.def.is_proc_macro() { if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included // proc macros expect their inputs without parentheses, MBEs expect it with them included

View file

@ -123,7 +123,7 @@ mod tests {
let parsed = syntax::SourceFile::parse(ra_fixture); let parsed = syntax::SourceFile::parse(ra_fixture);
eprintln!("parse: {:#?}", parsed.syntax_node()); eprintln!("parse: {:#?}", parsed.syntax_node());
let fixups = super::fixup_syntax(&parsed.syntax_node()); let fixups = super::fixup_syntax(&parsed.syntax_node());
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored( let (mut tt, tmap) = mbe::syntax_node_to_token_tree_with_modifications(
&parsed.syntax_node(), &parsed.syntax_node(),
fixups.replace, fixups.replace,
fixups.append, fixups.append,

View file

@ -30,7 +30,7 @@ pub use tt::{Delimiter, DelimiterKind, Punct};
pub use crate::{ pub use crate::{
syntax_bridge::{ syntax_bridge::{
parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
syntax_node_to_token_tree_censored, token_tree_to_syntax_node, SyntheticToken, syntax_node_to_token_tree_with_modifications, token_tree_to_syntax_node, SyntheticToken,
SyntheticTokenId, SyntheticTokenId,
}, },
token_map::TokenMap, token_map::TokenMap,

View file

@ -15,13 +15,12 @@ use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
/// Convert the syntax node to a `TokenTree` (what macro /// Convert the syntax node to a `TokenTree` (what macro
/// will consume). /// will consume).
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
syntax_node_to_token_tree_censored(node, Default::default(), Default::default()) syntax_node_to_token_tree_with_modifications(node, Default::default(), Default::default())
} }
// TODO rename
/// Convert the syntax node to a `TokenTree` (what macro will consume) /// Convert the syntax node to a `TokenTree` (what macro will consume)
/// with the censored range excluded. /// with the censored range excluded.
pub fn syntax_node_to_token_tree_censored( pub fn syntax_node_to_token_tree_with_modifications(
node: &SyntaxNode, node: &SyntaxNode,
replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>, replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>, append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,