internal: remove useless helpers

We generally avoid "syntax only" helper wrappers, which don't do much:
they make code easier to write, but harder to read. They also make
investigations harder, as "find_usages" needs to be invoked both for the
wrapped and unwrapped APIs
This commit is contained in:
Aleksey Kladov 2021-08-09 15:41:19 +03:00
parent 977fef713e
commit 9aa6be71a5
12 changed files with 32 additions and 36 deletions

View file

@ -1,5 +1,5 @@
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use mbe::ast_to_token_tree; use mbe::syntax_node_to_token_tree;
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
@ -8,7 +8,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
let (tt, _) = { let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
ast_to_token_tree(&tt) syntax_node_to_token_tree(tt.syntax())
}; };
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected); assert_eq!(cfg, expected);
@ -18,7 +18,7 @@ fn check_dnf(input: &str, expect: Expect) {
let (tt, _) = { let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
ast_to_token_tree(&tt) syntax_node_to_token_tree(tt.syntax())
}; };
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
@ -29,7 +29,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let (tt, _) = { let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
ast_to_token_tree(&tt) syntax_node_to_token_tree(tt.syntax())
}; };
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg); let dnf = DnfExpr::new(cfg);
@ -42,7 +42,7 @@ fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let (tt, _) = { let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
ast_to_token_tree(&tt) syntax_node_to_token_tree(tt.syntax())
}; };
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg); let dnf = DnfExpr::new(cfg);

View file

@ -12,7 +12,7 @@ use either::Either;
use hir_expand::{hygiene::Hygiene, name::AsName, AstId, InFile}; use hir_expand::{hygiene::Hygiene, name::AsName, AstId, InFile};
use itertools::Itertools; use itertools::Itertools;
use la_arena::ArenaMap; use la_arena::ArenaMap;
use mbe::{ast_to_token_tree, DelimiterKind}; use mbe::{syntax_node_to_token_tree, DelimiterKind};
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use syntax::{ use syntax::{
ast::{self, AstNode, AttrsOwner}, ast::{self, AstNode, AttrsOwner},
@ -679,7 +679,7 @@ impl Attr {
}; };
Some(Interned::new(AttrInput::Literal(value))) Some(Interned::new(AttrInput::Literal(value)))
} else if let Some(tt) = ast.token_tree() { } else if let Some(tt) = ast.token_tree() {
Some(Interned::new(AttrInput::TokenTree(ast_to_token_tree(&tt).0))) Some(Interned::new(AttrInput::TokenTree(syntax_node_to_token_tree(tt.syntax()).0)))
} else { } else {
None None
}; };

View file

@ -610,7 +610,7 @@ mod tests {
let fragment = crate::to_fragment_kind(&macro_call); let fragment = crate::to_fragment_kind(&macro_call);
let args = macro_call.token_tree().unwrap(); let args = macro_call.token_tree().unwrap();
let parsed_args = mbe::ast_to_token_tree(&args).0; let parsed_args = mbe::syntax_node_to_token_tree(args.syntax()).0;
let call_id = AstId::new(file_id.into(), ast_id_map.ast_id(&macro_call)); let call_id = AstId::new(file_id.into(), ast_id_map.ast_id(&macro_call));
let arg_id = db.intern_macro(MacroCallLoc { let arg_id = db.intern_macro(MacroCallLoc {

View file

@ -281,7 +281,7 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<TokenExpander>>
MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) { MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) {
ast::Macro::MacroRules(macro_rules) => { ast::Macro::MacroRules(macro_rules) => {
let arg = macro_rules.token_tree()?; let arg = macro_rules.token_tree()?;
let (tt, def_site_token_map) = mbe::ast_to_token_tree(&arg); let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = match mbe::MacroRules::parse(&tt) { let mac = match mbe::MacroRules::parse(&tt) {
Ok(it) => it, Ok(it) => it,
Err(err) => { Err(err) => {
@ -294,7 +294,7 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<TokenExpander>>
} }
ast::Macro::MacroDef(macro_def) => { ast::Macro::MacroDef(macro_def) => {
let arg = macro_def.body()?; let arg = macro_def.body()?;
let (tt, def_site_token_map) = mbe::ast_to_token_tree(&arg); let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = match mbe::MacroDef::parse(&tt) { let mac = match mbe::MacroDef::parse(&tt) {
Ok(it) => it, Ok(it) => it,
Err(err) => { Err(err) => {

View file

@ -107,7 +107,7 @@ pub fn expand_eager_macro(
mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError), mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError),
) -> Result<MacroCallId, ErrorEmitted> { ) -> Result<MacroCallId, ErrorEmitted> {
let parsed_args = diagnostic_sink.option_with( let parsed_args = diagnostic_sink.option_with(
|| Some(mbe::ast_to_token_tree(&macro_call.value.token_tree()?).0), || Some(mbe::syntax_node_to_token_tree(&macro_call.value.token_tree()?.syntax()).0),
|| err("malformed macro invocation"), || err("malformed macro invocation"),
)?; )?;

View file

@ -8,9 +8,8 @@ use syntax::{
use test_utils::{bench, bench_fixture, skip_slow_tests}; use test_utils::{bench, bench_fixture, skip_slow_tests};
use crate::{ use crate::{
ast_to_token_tree,
parser::{Op, RepeatKind, Separator}, parser::{Op, RepeatKind, Separator},
MacroRules, syntax_node_to_token_tree, MacroRules,
}; };
#[test] #[test]
@ -65,7 +64,7 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
.filter_map(ast::MacroRules::cast) .filter_map(ast::MacroRules::cast)
.map(|rule| { .map(|rule| {
let id = rule.name().unwrap().to_string(); let id = rule.name().unwrap().to_string();
let (def_tt, _) = ast_to_token_tree(&rule.token_tree().unwrap()); let (def_tt, _) = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax());
(id, def_tt) (id, def_tt)
}) })
.collect() .collect()

View file

@ -120,7 +120,7 @@ mod tests {
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
use super::*; use super::*;
use crate::ast_to_token_tree; use crate::syntax_node_to_token_tree;
#[test] #[test]
fn test_expand_rule() { fn test_expand_rule() {
@ -159,7 +159,8 @@ mod tests {
let macro_definition = let macro_definition =
source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap(); source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap();
let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap()); let (definition_tt, _) =
syntax_node_to_token_tree(macro_definition.token_tree().unwrap().syntax());
crate::MacroRules::parse(&definition_tt).unwrap() crate::MacroRules::parse(&definition_tt).unwrap()
} }
@ -168,7 +169,8 @@ mod tests {
let macro_invocation = let macro_invocation =
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
let (invocation_tt, _) = ast_to_token_tree(&macro_invocation.token_tree().unwrap()); let (invocation_tt, _) =
syntax_node_to_token_tree(macro_invocation.token_tree().unwrap().syntax());
expand_rules(&rules.rules, &invocation_tt) expand_rules(&rules.rules, &invocation_tt)
} }

View file

@ -66,7 +66,7 @@ impl fmt::Display for ExpandError {
pub use crate::{ pub use crate::{
syntax_bridge::{ syntax_bridge::{
ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
token_tree_to_syntax_node, token_tree_to_syntax_node,
}, },
token_map::TokenMap, token_map::TokenMap,

View file

@ -13,12 +13,6 @@ use tt::buffer::{Cursor, TokenBuffer};
use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter}; use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter};
use crate::{ExpandError, TokenMap}; use crate::{ExpandError, TokenMap};
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
/// will consume).
pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> (tt::Subtree, TokenMap) {
syntax_node_to_token_tree(ast.syntax())
}
/// Convert the syntax node to a `TokenTree` (what macro /// Convert the syntax node to a `TokenTree` (what macro
/// will consume). /// will consume).
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
@ -812,7 +806,7 @@ mod tests {
// - T!['}'] // - T!['}']
// - WHITE_SPACE // - WHITE_SPACE
let token_tree = ast::TokenTree::cast(token_tree).unwrap(); let token_tree = ast::TokenTree::cast(token_tree).unwrap();
let tt = ast_to_token_tree(&token_tree).0; let tt = syntax_node_to_token_tree(token_tree.syntax()).0;
assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace)); assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace));
} }
@ -821,7 +815,7 @@ mod tests {
fn test_token_tree_multi_char_punct() { fn test_token_tree_multi_char_punct() {
let source_file = ast::SourceFile::parse("struct Foo { a: x::Y }").ok().unwrap(); let source_file = ast::SourceFile::parse("struct Foo { a: x::Y }").ok().unwrap();
let struct_def = source_file.syntax().descendants().find_map(ast::Struct::cast).unwrap(); let struct_def = source_file.syntax().descendants().find_map(ast::Struct::cast).unwrap();
let tt = ast_to_token_tree(&struct_def).0; let tt = syntax_node_to_token_tree(struct_def.syntax()).0;
token_tree_to_syntax_node(&tt, FragmentKind::Item).unwrap(); token_tree_to_syntax_node(&tt, FragmentKind::Item).unwrap();
} }
@ -829,7 +823,7 @@ mod tests {
fn test_missing_closing_delim() { fn test_missing_closing_delim() {
let source_file = ast::SourceFile::parse("m!(x").tree(); let source_file = ast::SourceFile::parse("m!(x").tree();
let node = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let node = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = ast_to_token_tree(&node).0.to_string(); let tt = syntax_node_to_token_tree(node.syntax()).0.to_string();
assert_eq_text!(&*tt, "( x"); assert_eq_text!(&*tt, "( x");
} }
} }

View file

@ -29,7 +29,8 @@ macro_rules! impl_fixture {
let macro_invocation = let macro_invocation =
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
let (invocation_tt, _) = ast_to_token_tree(&macro_invocation.token_tree().unwrap()); let (invocation_tt, _) =
syntax_node_to_token_tree(macro_invocation.token_tree().unwrap().syntax());
self.rules.expand(&invocation_tt).result() self.rules.expand(&invocation_tt).result()
} }
@ -100,7 +101,7 @@ macro_rules! impl_fixture {
.descendants() .descendants()
.find_map(ast::TokenTree::cast) .find_map(ast::TokenTree::cast)
.unwrap(); .unwrap();
let mut wrapped = ast_to_token_tree(&wrapped).0; let mut wrapped = syntax_node_to_token_tree(wrapped.syntax()).0;
wrapped.delimiter = None; wrapped.delimiter = None;
wrapped wrapped
}; };
@ -163,7 +164,8 @@ fn parse_macro_rules_to_tt(ra_fixture: &str) -> tt::Subtree {
let macro_definition = let macro_definition =
source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap(); source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap();
let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap()); let (definition_tt, _) =
syntax_node_to_token_tree(macro_definition.token_tree().unwrap().syntax());
let parsed = parse_to_token_tree( let parsed = parse_to_token_tree(
&ra_fixture[macro_definition.token_tree().unwrap().syntax().text_range()], &ra_fixture[macro_definition.token_tree().unwrap().syntax().text_range()],
@ -180,7 +182,7 @@ fn parse_macro_def_to_tt(ra_fixture: &str) -> tt::Subtree {
let macro_definition = let macro_definition =
source_file.syntax().descendants().find_map(ast::MacroDef::cast).unwrap(); source_file.syntax().descendants().find_map(ast::MacroDef::cast).unwrap();
let (definition_tt, _) = ast_to_token_tree(&macro_definition.body().unwrap()); let (definition_tt, _) = syntax_node_to_token_tree(macro_definition.body().unwrap().syntax());
let parsed = let parsed =
parse_to_token_tree(&ra_fixture[macro_definition.body().unwrap().syntax().text_range()]) parse_to_token_tree(&ra_fixture[macro_definition.body().unwrap().syntax().text_range()])

View file

@ -1,7 +1,5 @@
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
use crate::ast_to_token_tree;
use super::*; use super::*;
#[test] #[test]
@ -44,6 +42,7 @@ fn parse_macro_arm(arm_definition: &str) -> Result<crate::MacroRules, ParseError
let macro_definition = let macro_definition =
source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap(); source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap();
let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap()); let (definition_tt, _) =
syntax_node_to_token_tree(macro_definition.token_tree().unwrap().syntax());
crate::MacroRules::parse(&definition_tt) crate::MacroRules::parse(&definition_tt)
} }

View file

@ -191,7 +191,7 @@ mod tests {
use super::*; use super::*;
use cfg::CfgExpr; use cfg::CfgExpr;
use mbe::ast_to_token_tree; use mbe::syntax_node_to_token_tree;
use syntax::{ use syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
SmolStr, SmolStr,
@ -201,7 +201,7 @@ mod tests {
let cfg_expr = { let cfg_expr = {
let source_file = ast::SourceFile::parse(cfg).ok().unwrap(); let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let (tt, _) = ast_to_token_tree(&tt); let (tt, _) = syntax_node_to_token_tree(tt.syntax());
CfgExpr::parse(&tt) CfgExpr::parse(&tt)
}; };