Make ast_to_token_tree infallible

It could never return `None`, so reflect that in the return type
This commit is contained in:
Jonas Schievink 2021-04-04 01:46:45 +02:00
parent bcf600fc88
commit 3abcdc03ba
11 changed files with 32 additions and 41 deletions

View file

@ -8,7 +8,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
let (tt, _) = { let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
ast_to_token_tree(&tt).unwrap() ast_to_token_tree(&tt)
}; };
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected); assert_eq!(cfg, expected);
@ -18,7 +18,7 @@ fn check_dnf(input: &str, expect: Expect) {
let (tt, _) = { let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
ast_to_token_tree(&tt).unwrap() ast_to_token_tree(&tt)
}; };
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
@ -29,7 +29,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let (tt, _) = { let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
ast_to_token_tree(&tt).unwrap() ast_to_token_tree(&tt)
}; };
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg); let dnf = DnfExpr::new(cfg);
@ -42,7 +42,7 @@ fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let (tt, _) = { let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
ast_to_token_tree(&tt).unwrap() ast_to_token_tree(&tt)
}; };
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg); let dnf = DnfExpr::new(cfg);

View file

@ -533,7 +533,7 @@ impl Attr {
}; };
Some(AttrInput::Literal(value)) Some(AttrInput::Literal(value))
} else if let Some(tt) = ast.token_tree() { } else if let Some(tt) = ast.token_tree() {
Some(AttrInput::TokenTree(ast_to_token_tree(&tt)?.0)) Some(AttrInput::TokenTree(ast_to_token_tree(&tt).0))
} else { } else {
None None
}; };

View file

@ -584,7 +584,7 @@ mod tests {
}; };
let args = macro_call.token_tree().unwrap(); let args = macro_call.token_tree().unwrap();
let parsed_args = mbe::ast_to_token_tree(&args).unwrap().0; let parsed_args = mbe::ast_to_token_tree(&args).0;
let call_id = AstId::new(file_id.into(), ast_id_map.ast_id(&macro_call)); let call_id = AstId::new(file_id.into(), ast_id_map.ast_id(&macro_call));
let arg_id = db.intern_eager_expansion({ let arg_id = db.intern_eager_expansion({

View file

@ -119,7 +119,7 @@ pub fn expand_hypothetical(
token_to_map: syntax::SyntaxToken, token_to_map: syntax::SyntaxToken,
) -> Option<(SyntaxNode, syntax::SyntaxToken)> { ) -> Option<(SyntaxNode, syntax::SyntaxToken)> {
let macro_file = MacroFile { macro_call_id: actual_macro_call }; let macro_file = MacroFile { macro_call_id: actual_macro_call };
let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()).unwrap(); let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax());
let range = let range =
token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?;
let token_id = tmap_1.token_by_range(range)?; let token_id = tmap_1.token_by_range(range)?;
@ -143,10 +143,7 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander,
MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) { MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) {
syntax::ast::Macro::MacroRules(macro_rules) => { syntax::ast::Macro::MacroRules(macro_rules) => {
let arg = macro_rules.token_tree()?; let arg = macro_rules.token_tree()?;
let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| { let (tt, tmap) = mbe::ast_to_token_tree(&arg);
log::warn!("fail on macro_rules to token tree: {:#?}", arg);
None
})?;
let rules = match MacroRules::parse(&tt) { let rules = match MacroRules::parse(&tt) {
Ok(it) => it, Ok(it) => it,
Err(err) => { Err(err) => {
@ -159,10 +156,7 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander,
} }
syntax::ast::Macro::MacroDef(macro_def) => { syntax::ast::Macro::MacroDef(macro_def) => {
let arg = macro_def.body()?; let arg = macro_def.body()?;
let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| { let (tt, tmap) = mbe::ast_to_token_tree(&arg);
log::warn!("fail on macro_def to token tree: {:#?}", arg);
None
})?;
let rules = match MacroDef::parse(&tt) { let rules = match MacroDef::parse(&tt) {
Ok(it) => it, Ok(it) => it,
Err(err) => { Err(err) => {
@ -202,7 +196,7 @@ fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
let arg = db.macro_arg_text(id)?; let arg = db.macro_arg_text(id)?;
let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg))?; let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg));
Some(Arc::new((tt, tmap))) Some(Arc::new((tt, tmap)))
} }

View file

@ -106,7 +106,7 @@ pub fn expand_eager_macro(
mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError), mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError),
) -> Result<EagerMacroId, ErrorEmitted> { ) -> Result<EagerMacroId, ErrorEmitted> {
let parsed_args = diagnostic_sink.option_with( let parsed_args = diagnostic_sink.option_with(
|| Some(mbe::ast_to_token_tree(&macro_call.value.token_tree()?)?.0), || Some(mbe::ast_to_token_tree(&macro_call.value.token_tree()?).0),
|| err("malformed macro invocation"), || err("malformed macro invocation"),
)?; )?;
@ -161,7 +161,7 @@ pub fn expand_eager_macro(
} }
fn to_subtree(node: &SyntaxNode) -> Option<tt::Subtree> { fn to_subtree(node: &SyntaxNode) -> Option<tt::Subtree> {
let mut subtree = mbe::syntax_node_to_token_tree(node)?.0; let mut subtree = mbe::syntax_node_to_token_tree(node).0;
subtree.delimiter = None; subtree.delimiter = None;
Some(subtree) Some(subtree)
} }

View file

@ -65,7 +65,7 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
.filter_map(ast::MacroRules::cast) .filter_map(ast::MacroRules::cast)
.map(|rule| { .map(|rule| {
let id = rule.name().unwrap().to_string(); let id = rule.name().unwrap().to_string();
let (def_tt, _) = ast_to_token_tree(&rule.token_tree().unwrap()).unwrap(); let (def_tt, _) = ast_to_token_tree(&rule.token_tree().unwrap());
(id, def_tt) (id, def_tt)
}) })
.collect() .collect()

View file

@ -159,8 +159,7 @@ mod tests {
let macro_definition = let macro_definition =
source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap(); source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap();
let (definition_tt, _) = let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap());
ast_to_token_tree(&macro_definition.token_tree().unwrap()).unwrap();
crate::MacroRules::parse(&definition_tt).unwrap() crate::MacroRules::parse(&definition_tt).unwrap()
} }
@ -169,8 +168,7 @@ mod tests {
let macro_invocation = let macro_invocation =
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
let (invocation_tt, _) = let (invocation_tt, _) = ast_to_token_tree(&macro_invocation.token_tree().unwrap());
ast_to_token_tree(&macro_invocation.token_tree().unwrap()).unwrap();
expand_rules(&rules.rules, &invocation_tt) expand_rules(&rules.rules, &invocation_tt)
} }

View file

@ -43,18 +43,18 @@ pub struct TokenMap {
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
/// will consume). /// will consume).
pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> Option<(tt::Subtree, TokenMap)> { pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> (tt::Subtree, TokenMap) {
syntax_node_to_token_tree(ast.syntax()) syntax_node_to_token_tree(ast.syntax())
} }
/// Convert the syntax node to a `TokenTree` (what macro /// Convert the syntax node to a `TokenTree` (what macro
/// will consume). /// will consume).
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> { pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
let global_offset = node.text_range().start(); let global_offset = node.text_range().start();
let mut c = Convertor::new(node, global_offset); let mut c = Convertor::new(node, global_offset);
let subtree = c.go()?; let subtree = c.go();
c.id_alloc.map.entries.shrink_to_fit(); c.id_alloc.map.entries.shrink_to_fit();
Some((subtree, c.id_alloc.map)) (subtree, c.id_alloc.map)
} }
// The following items are what `rustc` macro can be parsed into : // The following items are what `rustc` macro can be parsed into :
@ -108,7 +108,7 @@ pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
}, },
}; };
let subtree = conv.go()?; let subtree = conv.go();
Some((subtree, conv.id_alloc.map)) Some((subtree, conv.id_alloc.map))
} }
@ -319,7 +319,7 @@ trait SrcToken: std::fmt::Debug {
trait TokenConvertor { trait TokenConvertor {
type Token: SrcToken; type Token: SrcToken;
fn go(&mut self) -> Option<tt::Subtree> { fn go(&mut self) -> tt::Subtree {
let mut subtree = tt::Subtree::default(); let mut subtree = tt::Subtree::default();
subtree.delimiter = None; subtree.delimiter = None;
while self.peek().is_some() { while self.peek().is_some() {
@ -327,10 +327,10 @@ trait TokenConvertor {
} }
if subtree.token_trees.len() == 1 { if subtree.token_trees.len() == 1 {
if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] { if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] {
return Some(first.clone()); return first.clone();
} }
} }
Some(subtree) subtree
} }
fn collect_leaf(&mut self, result: &mut Vec<tt::TokenTree>) { fn collect_leaf(&mut self, result: &mut Vec<tt::TokenTree>) {
@ -858,7 +858,7 @@ mod tests {
// - T!['}'] // - T!['}']
// - WHITE_SPACE // - WHITE_SPACE
let token_tree = ast::TokenTree::cast(token_tree).unwrap(); let token_tree = ast::TokenTree::cast(token_tree).unwrap();
let tt = ast_to_token_tree(&token_tree).unwrap().0; let tt = ast_to_token_tree(&token_tree).0;
assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace)); assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace));
} }
@ -867,7 +867,7 @@ mod tests {
fn test_token_tree_multi_char_punct() { fn test_token_tree_multi_char_punct() {
let source_file = ast::SourceFile::parse("struct Foo { a: x::Y }").ok().unwrap(); let source_file = ast::SourceFile::parse("struct Foo { a: x::Y }").ok().unwrap();
let struct_def = source_file.syntax().descendants().find_map(ast::Struct::cast).unwrap(); let struct_def = source_file.syntax().descendants().find_map(ast::Struct::cast).unwrap();
let tt = ast_to_token_tree(&struct_def).unwrap().0; let tt = ast_to_token_tree(&struct_def).0;
token_tree_to_syntax_node(&tt, FragmentKind::Item).unwrap(); token_tree_to_syntax_node(&tt, FragmentKind::Item).unwrap();
} }
} }

View file

@ -29,8 +29,7 @@ macro_rules! impl_fixture {
let macro_invocation = let macro_invocation =
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
let (invocation_tt, _) = ast_to_token_tree(&macro_invocation.token_tree().unwrap()) let (invocation_tt, _) = ast_to_token_tree(&macro_invocation.token_tree().unwrap());
.ok_or_else(|| ExpandError::ConversionError)?;
self.rules.expand(&invocation_tt).result() self.rules.expand(&invocation_tt).result()
} }
@ -101,7 +100,7 @@ macro_rules! impl_fixture {
.descendants() .descendants()
.find_map(ast::TokenTree::cast) .find_map(ast::TokenTree::cast)
.unwrap(); .unwrap();
let mut wrapped = ast_to_token_tree(&wrapped).unwrap().0; let mut wrapped = ast_to_token_tree(&wrapped).0;
wrapped.delimiter = None; wrapped.delimiter = None;
wrapped wrapped
}; };
@ -151,7 +150,7 @@ pub(crate) fn parse_macro_error(ra_fixture: &str) -> ParseError {
pub(crate) fn parse_to_token_tree_by_syntax(ra_fixture: &str) -> tt::Subtree { pub(crate) fn parse_to_token_tree_by_syntax(ra_fixture: &str) -> tt::Subtree {
let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap(); let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap();
let tt = syntax_node_to_token_tree(source_file.syntax()).unwrap().0; let tt = syntax_node_to_token_tree(source_file.syntax()).0;
let parsed = parse_to_token_tree(ra_fixture).unwrap().0; let parsed = parse_to_token_tree(ra_fixture).unwrap().0;
assert_eq!(tt, parsed); assert_eq!(tt, parsed);
@ -164,7 +163,7 @@ fn parse_macro_rules_to_tt(ra_fixture: &str) -> tt::Subtree {
let macro_definition = let macro_definition =
source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap(); source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap();
let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap()).unwrap(); let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap());
let parsed = parse_to_token_tree( let parsed = parse_to_token_tree(
&ra_fixture[macro_definition.token_tree().unwrap().syntax().text_range()], &ra_fixture[macro_definition.token_tree().unwrap().syntax().text_range()],
@ -181,7 +180,7 @@ fn parse_macro_def_to_tt(ra_fixture: &str) -> tt::Subtree {
let macro_definition = let macro_definition =
source_file.syntax().descendants().find_map(ast::MacroDef::cast).unwrap(); source_file.syntax().descendants().find_map(ast::MacroDef::cast).unwrap();
let (definition_tt, _) = ast_to_token_tree(&macro_definition.body().unwrap()).unwrap(); let (definition_tt, _) = ast_to_token_tree(&macro_definition.body().unwrap());
let parsed = let parsed =
parse_to_token_tree(&ra_fixture[macro_definition.body().unwrap().syntax().text_range()]) parse_to_token_tree(&ra_fixture[macro_definition.body().unwrap().syntax().text_range()])

View file

@ -44,6 +44,6 @@ fn parse_macro_arm(arm_definition: &str) -> Result<crate::MacroRules, ParseError
let macro_definition = let macro_definition =
source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap(); source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap();
let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap()).unwrap(); let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap());
crate::MacroRules::parse(&definition_tt) crate::MacroRules::parse(&definition_tt)
} }

View file

@ -201,7 +201,7 @@ mod tests {
let cfg_expr = { let cfg_expr = {
let source_file = ast::SourceFile::parse(cfg).ok().unwrap(); let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let (tt, _) = ast_to_token_tree(&tt).unwrap(); let (tt, _) = ast_to_token_tree(&tt);
CfgExpr::parse(&tt) CfgExpr::parse(&tt)
}; };