Make syntax bridge fully infallible

This commit is contained in:
Jonas Schievink 2022-01-24 17:27:39 +01:00
parent 17afa2e778
commit 5088926ec3
5 changed files with 18 additions and 31 deletions

View file

@ -735,7 +735,7 @@ impl Attr {
hygiene: &Hygiene,
id: AttrId,
) -> Option<Attr> {
let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem).ok()?;
let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
let ast = ast::Meta::cast(parse.syntax_node())?;
Self::from_src(db, ast, hygiene, id)

View file

@ -72,7 +72,7 @@ struct BasicAdtInfo {
}
fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems)?; // FragmentKind::Items doesn't parse attrs?
let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems); // FragmentKind::Items doesn't parse attrs?
let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
debug!("derive node didn't parse");
mbe::ExpandError::UnexpectedToken

View file

@ -202,8 +202,7 @@ pub fn expand_speculative(
};
let expand_to = macro_expand_to(db, actual_macro_call);
let (node, rev_tmap) =
token_tree_to_syntax_node(&speculative_expansion.value, expand_to).ok()?;
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
let range = rev_tmap.first_range_by_token(token_id, token_to_map.kind())?;
let token = node.syntax_node().covering_element(range).into_token()?;
@ -264,17 +263,7 @@ fn parse_macro_expansion(
tracing::debug!("expanded = {}", tt.as_debug_string());
tracing::debug!("kind = {:?}", expand_to);
let (parse, rev_token_map) = match token_tree_to_syntax_node(&tt, expand_to) {
Ok(it) => it,
Err(err) => {
tracing::debug!(
"failed to parse expansion to {:?} = {}",
expand_to,
tt.as_debug_string()
);
return ExpandResult::only_err(err);
}
};
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
match result.err {
Some(err) => {
@ -502,7 +491,7 @@ fn macro_expand_to(db: &dyn AstDatabase, id: MacroCallId) -> ExpandTo {
fn token_tree_to_syntax_node(
tt: &tt::Subtree,
expand_to: ExpandTo,
) -> Result<(Parse<SyntaxNode>, mbe::TokenMap), ExpandError> {
) -> (Parse<SyntaxNode>, mbe::TokenMap) {
let entry_point = match expand_to {
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,

View file

@ -104,12 +104,13 @@ pub fn expand_eager_macro(
macro_call: InFile<ast::MacroCall>,
def: MacroDefId,
resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>,
mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError),
diagnostic_sink: &mut dyn FnMut(mbe::ExpandError),
) -> Result<MacroCallId, ErrorEmitted> {
let parsed_args = diagnostic_sink.option_with(
|| Some(mbe::syntax_node_to_token_tree(macro_call.value.token_tree()?.syntax()).0),
|| err("malformed macro invocation"),
)?;
let parsed_args = macro_call
.value
.token_tree()
.map(|tt| mbe::syntax_node_to_token_tree(tt.syntax()).0)
.unwrap_or_default();
let ast_map = db.ast_id_map(macro_call.file_id);
let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(&macro_call.value));
@ -130,9 +131,7 @@ pub fn expand_eager_macro(
});
let arg_file_id = arg_id;
let parsed_args = diagnostic_sink
.result(mbe::token_tree_to_syntax_node(&parsed_args, mbe::TopEntryPoint::Expr))?
.0;
let parsed_args = mbe::token_tree_to_syntax_node(&parsed_args, mbe::TopEntryPoint::Expr).0;
let result = eager_macro_recur(
db,
InFile::new(arg_file_id.as_file(), parsed_args.syntax_node()),
@ -140,8 +139,7 @@ pub fn expand_eager_macro(
resolver,
diagnostic_sink,
)?;
let subtree =
diagnostic_sink.option(to_subtree(&result), || err("failed to parse macro result"))?;
let subtree = to_subtree(&result);
if let MacroDefKind::BuiltInEager(eager, _) = def.kind {
let res = eager.expand(db, arg_id, &subtree);
@ -165,10 +163,10 @@ pub fn expand_eager_macro(
}
}
fn to_subtree(node: &SyntaxNode) -> Option<tt::Subtree> {
fn to_subtree(node: &SyntaxNode) -> tt::Subtree {
let mut subtree = mbe::syntax_node_to_token_tree(node).0;
subtree.delimiter = None;
Some(subtree)
subtree
}
fn lazy_expand(

View file

@ -10,7 +10,7 @@ use syntax::{
};
use tt::buffer::{Cursor, TokenBuffer};
use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, ExpandError, TokenMap};
use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
/// Convert the syntax node to a `TokenTree` (what macro
/// will consume).
@ -46,7 +46,7 @@ pub fn syntax_node_to_token_tree_censored(
pub fn token_tree_to_syntax_node(
tt: &tt::Subtree,
entry_point: parser::TopEntryPoint,
) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
) -> (Parse<SyntaxNode>, TokenMap) {
let buffer = match tt {
tt::Subtree { delimiter: None, token_trees } => {
TokenBuffer::from_tokens(token_trees.as_slice())
@ -67,7 +67,7 @@ pub fn token_tree_to_syntax_node(
}
}
let (parse, range_map) = tree_sink.finish();
Ok((parse, range_map))
(parse, range_map)
}
/// Convert a string to a `TokenTree`