mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 13:03:31 +00:00
Make basic use of spans for macro expansion errors
This commit is contained in:
parent
7beac14cba
commit
ae9c553902
24 changed files with 392 additions and 333 deletions
|
@ -661,6 +661,7 @@ impl<'a> AssocItemCollector<'a> {
|
|||
self.diagnostics.push(DefDiagnostic::macro_error(
|
||||
self.module_id.local_id,
|
||||
ast_id,
|
||||
(*attr.path).clone(),
|
||||
err,
|
||||
));
|
||||
continue 'attrs;
|
||||
|
|
|
@ -6,8 +6,8 @@ use base_db::CrateId;
|
|||
use cfg::CfgOptions;
|
||||
use drop_bomb::DropBomb;
|
||||
use hir_expand::{
|
||||
attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandResult, HirFileId,
|
||||
InFile, MacroCallId,
|
||||
attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandErrorKind,
|
||||
ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
|
||||
};
|
||||
use limit::Limit;
|
||||
use span::SyntaxContextId;
|
||||
|
@ -160,26 +160,30 @@ impl Expander {
|
|||
// so don't return overflow error here to avoid diagnostics duplication.
|
||||
cov_mark::hit!(overflow_but_not_me);
|
||||
return ExpandResult::ok(None);
|
||||
} else if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() {
|
||||
self.recursion_depth = u32::MAX;
|
||||
cov_mark::hit!(your_stack_belongs_to_me);
|
||||
return ExpandResult::only_err(ExpandError::RecursionOverflow);
|
||||
}
|
||||
|
||||
let ExpandResult { value, err } = op(self);
|
||||
let Some(call_id) = value else {
|
||||
return ExpandResult { value: None, err };
|
||||
};
|
||||
if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() {
|
||||
self.recursion_depth = u32::MAX;
|
||||
cov_mark::hit!(your_stack_belongs_to_me);
|
||||
return ExpandResult::only_err(ExpandError::new(
|
||||
db.macro_arg_considering_derives(call_id, &call_id.lookup(db.upcast()).kind).2,
|
||||
ExpandErrorKind::RecursionOverflow,
|
||||
));
|
||||
}
|
||||
|
||||
let macro_file = call_id.as_macro_file();
|
||||
let res = db.parse_macro_expansion(macro_file);
|
||||
|
||||
let err = err.or(res.err);
|
||||
ExpandResult {
|
||||
value: match err {
|
||||
value: match &err {
|
||||
// If proc-macro is disabled or unresolved, we want to expand to a missing expression
|
||||
// instead of an empty tree which might end up in an empty block.
|
||||
Some(ExpandError::MissingProcMacroExpander(_)) => None,
|
||||
Some(e) if matches!(e.kind(), ExpandErrorKind::MissingProcMacroExpander(_)) => None,
|
||||
_ => (|| {
|
||||
let parse = res.value.0.cast::<T>()?;
|
||||
|
||||
|
|
|
@ -1434,7 +1434,10 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
|
|||
});
|
||||
|
||||
let Some((call_site, path)) = path else {
|
||||
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
|
||||
return Ok(ExpandResult::only_err(ExpandError::other(
|
||||
span_map.span_for_range(self.value.syntax().text_range()),
|
||||
"malformed macro invocation",
|
||||
)));
|
||||
};
|
||||
|
||||
macro_call_as_call_id_with_eager(
|
||||
|
|
|
@ -1084,7 +1084,7 @@ fn main() {
|
|||
macro_rules! concat_bytes {}
|
||||
|
||||
fn main() {
|
||||
let x = /* error: unexpected token in input */b"";
|
||||
let x = /* error: unexpected token */b"";
|
||||
}
|
||||
|
||||
"#]],
|
||||
|
|
|
@ -1324,6 +1324,7 @@ impl DefCollector<'_> {
|
|||
self.def_map.diagnostics.push(DefDiagnostic::macro_error(
|
||||
directive.module_id,
|
||||
ast_id,
|
||||
(**path).clone(),
|
||||
err,
|
||||
));
|
||||
return recollect_without(self);
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
use std::ops::Not;
|
||||
|
||||
use cfg::{CfgExpr, CfgOptions};
|
||||
use hir_expand::{attrs::AttrId, ExpandError, MacroCallKind};
|
||||
use hir_expand::{attrs::AttrId, ExpandErrorKind, MacroCallKind};
|
||||
use la_arena::Idx;
|
||||
use syntax::ast;
|
||||
|
||||
|
@ -25,7 +25,7 @@ pub enum DefDiagnosticKind {
|
|||
InvalidDeriveTarget { ast: AstId<ast::Item>, id: usize },
|
||||
MalformedDerive { ast: AstId<ast::Adt>, id: usize },
|
||||
MacroDefError { ast: AstId<ast::Macro>, message: String },
|
||||
MacroError { ast: AstId<ast::Item>, err: ExpandError },
|
||||
MacroError { ast: AstId<ast::Item>, path: ModPath, err: ExpandErrorKind },
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
|
@ -82,8 +82,13 @@ impl DefDiagnostic {
|
|||
Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } }
|
||||
}
|
||||
|
||||
pub fn macro_error(container: LocalModuleId, ast: AstId<ast::Item>, err: ExpandError) -> Self {
|
||||
Self { in_module: container, kind: DefDiagnosticKind::MacroError { ast, err } }
|
||||
pub fn macro_error(
|
||||
container: LocalModuleId,
|
||||
ast: AstId<ast::Item>,
|
||||
path: ModPath,
|
||||
err: ExpandErrorKind,
|
||||
) -> Self {
|
||||
Self { in_module: container, kind: DefDiagnosticKind::MacroError { ast, path, err } }
|
||||
}
|
||||
|
||||
pub fn unconfigured_code(
|
||||
|
|
|
@ -12,8 +12,7 @@ use crate::{
|
|||
builtin::quote::{dollar_crate, quote},
|
||||
db::ExpandDatabase,
|
||||
hygiene::span_with_def_site_ctxt,
|
||||
name,
|
||||
name::{AsName, Name},
|
||||
name::{self, AsName, Name},
|
||||
span_map::ExpansionSpanMap,
|
||||
tt, ExpandError, ExpandResult,
|
||||
};
|
||||
|
@ -129,13 +128,17 @@ impl VariantShape {
|
|||
}
|
||||
}
|
||||
|
||||
fn from(tm: &ExpansionSpanMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
|
||||
fn from(
|
||||
call_site: Span,
|
||||
tm: &ExpansionSpanMap,
|
||||
value: Option<FieldList>,
|
||||
) -> Result<Self, ExpandError> {
|
||||
let r = match value {
|
||||
None => VariantShape::Unit,
|
||||
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
|
||||
it.fields()
|
||||
.map(|it| it.name())
|
||||
.map(|it| name_to_token(tm, it))
|
||||
.map(|it| name_to_token(call_site, tm, it))
|
||||
.collect::<Result<_, _>>()?,
|
||||
),
|
||||
Some(FieldList::TupleFieldList(it)) => VariantShape::Tuple(it.fields().count()),
|
||||
|
@ -212,16 +215,17 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
|||
parser::Edition::CURRENT_FIXME,
|
||||
);
|
||||
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
|
||||
.ok_or_else(|| ExpandError::other("invalid item definition"))?;
|
||||
let item = macro_items.items().next().ok_or_else(|| ExpandError::other("no item found"))?;
|
||||
.ok_or_else(|| ExpandError::other(call_site, "invalid item definition"))?;
|
||||
let item =
|
||||
macro_items.items().next().ok_or_else(|| ExpandError::other(call_site, "no item found"))?;
|
||||
let adt = &ast::Adt::cast(item.syntax().clone())
|
||||
.ok_or_else(|| ExpandError::other("expected struct, enum or union"))?;
|
||||
.ok_or_else(|| ExpandError::other(call_site, "expected struct, enum or union"))?;
|
||||
let (name, generic_param_list, where_clause, shape) = match adt {
|
||||
ast::Adt::Struct(it) => (
|
||||
it.name(),
|
||||
it.generic_param_list(),
|
||||
it.where_clause(),
|
||||
AdtShape::Struct(VariantShape::from(tm, it.field_list())?),
|
||||
AdtShape::Struct(VariantShape::from(call_site, tm, it.field_list())?),
|
||||
),
|
||||
ast::Adt::Enum(it) => {
|
||||
let default_variant = it
|
||||
|
@ -241,8 +245,8 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
|||
.flat_map(|it| it.variants())
|
||||
.map(|it| {
|
||||
Ok((
|
||||
name_to_token(tm, it.name())?,
|
||||
VariantShape::from(tm, it.field_list())?,
|
||||
name_to_token(call_site, tm, it.name())?,
|
||||
VariantShape::from(call_site, tm, it.field_list())?,
|
||||
))
|
||||
})
|
||||
.collect::<Result<_, ExpandError>>()?,
|
||||
|
@ -357,17 +361,18 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
|||
)
|
||||
})
|
||||
.collect();
|
||||
let name_token = name_to_token(tm, name)?;
|
||||
let name_token = name_to_token(call_site, tm, name)?;
|
||||
Ok(BasicAdtInfo { name: name_token, shape, param_types, where_clause, associated_types })
|
||||
}
|
||||
|
||||
fn name_to_token(
|
||||
call_site: Span,
|
||||
token_map: &ExpansionSpanMap,
|
||||
name: Option<ast::Name>,
|
||||
) -> Result<tt::Ident, ExpandError> {
|
||||
let name = name.ok_or_else(|| {
|
||||
debug!("parsed item has no name");
|
||||
ExpandError::other("missing name")
|
||||
ExpandError::other(call_site, "missing name")
|
||||
})?;
|
||||
let span = token_map.span_at(name.syntax().text_range().start());
|
||||
|
||||
|
|
|
@ -460,15 +460,11 @@ fn compile_error_expand(
|
|||
let err = match &*tt.token_trees {
|
||||
[tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||
symbol: text,
|
||||
span: _,
|
||||
span,
|
||||
kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
|
||||
suffix: _,
|
||||
}))] =>
|
||||
// FIXME: Use the span here!
|
||||
{
|
||||
ExpandError::other(Box::from(unescape_str(text).as_str()))
|
||||
}
|
||||
_ => ExpandError::other("`compile_error!` argument must be a string"),
|
||||
}))] => ExpandError::other(*span, Box::from(unescape_str(text).as_str())),
|
||||
_ => ExpandError::other(span, "`compile_error!` argument must be a string"),
|
||||
};
|
||||
|
||||
ExpandResult { value: quote! {span =>}, err: Some(err) }
|
||||
|
@ -478,7 +474,7 @@ fn concat_expand(
|
|||
_db: &dyn ExpandDatabase,
|
||||
_arg_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
_: Span,
|
||||
call_site: Span,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let mut err = None;
|
||||
let mut text = String::new();
|
||||
|
@ -527,7 +523,9 @@ fn concat_expand(
|
|||
| tt::LitKind::ByteStrRaw(_)
|
||||
| tt::LitKind::CStr
|
||||
| tt::LitKind::CStrRaw(_)
|
||||
| tt::LitKind::Err(_) => err = Some(ExpandError::other("unexpected literal")),
|
||||
| tt::LitKind::Err(_) => {
|
||||
err = Some(ExpandError::other(it.span, "unexpected literal"))
|
||||
}
|
||||
}
|
||||
}
|
||||
// handle boolean literals
|
||||
|
@ -539,7 +537,7 @@ fn concat_expand(
|
|||
}
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
||||
_ => {
|
||||
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
||||
err.get_or_insert(ExpandError::other(call_site, "unexpected token"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -551,7 +549,7 @@ fn concat_bytes_expand(
|
|||
_db: &dyn ExpandDatabase,
|
||||
_arg_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
_: Span,
|
||||
call_site: Span,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let mut bytes = String::new();
|
||||
let mut err = None;
|
||||
|
@ -585,20 +583,22 @@ fn concat_bytes_expand(
|
|||
bytes.extend(text.as_str().escape_debug());
|
||||
}
|
||||
_ => {
|
||||
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
||||
err.get_or_insert(ExpandError::other(*span, "unexpected token"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
||||
tt::TokenTree::Subtree(tree) if tree.delimiter.kind == tt::DelimiterKind::Bracket => {
|
||||
if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes, &mut record_span) {
|
||||
if let Err(e) =
|
||||
concat_bytes_expand_subtree(tree, &mut bytes, &mut record_span, call_site)
|
||||
{
|
||||
err.get_or_insert(e);
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
||||
err.get_or_insert(ExpandError::other(call_site, "unexpected token"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -623,6 +623,7 @@ fn concat_bytes_expand_subtree(
|
|||
tree: &tt::Subtree,
|
||||
bytes: &mut String,
|
||||
mut record_span: impl FnMut(Span),
|
||||
err_span: Span,
|
||||
) -> Result<(), ExpandError> {
|
||||
for (ti, tt) in tree.token_trees.iter().enumerate() {
|
||||
match tt {
|
||||
|
@ -650,7 +651,7 @@ fn concat_bytes_expand_subtree(
|
|||
}
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if ti % 2 == 1 && punct.char == ',' => (),
|
||||
_ => {
|
||||
return Err(mbe::ExpandError::UnexpectedToken.into());
|
||||
return Err(ExpandError::other(err_span, "unexpected token"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -672,7 +673,7 @@ fn concat_idents_expand(
|
|||
}
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
||||
_ => {
|
||||
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
||||
err.get_or_insert(ExpandError::other(span, "unexpected token"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -686,16 +687,17 @@ fn relative_file(
|
|||
call_id: MacroCallId,
|
||||
path_str: &str,
|
||||
allow_recursion: bool,
|
||||
err_span: Span,
|
||||
) -> Result<EditionedFileId, ExpandError> {
|
||||
let lookup = call_id.lookup(db);
|
||||
let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id();
|
||||
let path = AnchoredPath { anchor: call_site, path: path_str };
|
||||
let res = db
|
||||
.resolve_path(path)
|
||||
.ok_or_else(|| ExpandError::other(format!("failed to load file `{path_str}`")))?;
|
||||
.ok_or_else(|| ExpandError::other(err_span, format!("failed to load file `{path_str}`")))?;
|
||||
// Prevent include itself
|
||||
if res == call_site && !allow_recursion {
|
||||
Err(ExpandError::other(format!("recursive inclusion of `{path_str}`")))
|
||||
Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`")))
|
||||
} else {
|
||||
Ok(EditionedFileId::new(res, db.crate_graph()[lookup.krate].edition))
|
||||
}
|
||||
|
@ -727,7 +729,7 @@ fn parse_string(tt: &tt::Subtree) -> Result<(Symbol, Span), ExpandError> {
|
|||
}
|
||||
_ => None,
|
||||
})
|
||||
.ok_or(mbe::ExpandError::ConversionError.into())
|
||||
.ok_or(ExpandError::other(tt.delimiter.open, "expected string literal"))
|
||||
}
|
||||
|
||||
fn include_expand(
|
||||
|
@ -751,7 +753,7 @@ fn include_expand(
|
|||
Some(it) => ExpandResult::ok(it),
|
||||
None => ExpandResult::new(
|
||||
tt::Subtree::empty(DelimSpan { open: span, close: span }),
|
||||
ExpandError::other("failed to parse included file"),
|
||||
ExpandError::other(span, "failed to parse included file"),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -761,7 +763,7 @@ pub fn include_input_to_file_id(
|
|||
arg_id: MacroCallId,
|
||||
arg: &tt::Subtree,
|
||||
) -> Result<EditionedFileId, ExpandError> {
|
||||
relative_file(db, arg_id, parse_string(arg)?.0.as_str(), false)
|
||||
relative_file(db, arg_id, parse_string(arg)?.0.as_str(), false, arg.delimiter.open)
|
||||
}
|
||||
|
||||
fn include_bytes_expand(
|
||||
|
@ -800,7 +802,7 @@ fn include_str_expand(
|
|||
// it's unusual to `include_str!` a Rust file), but we can return an empty string.
|
||||
// Ideally, we'd be able to offer a precise expansion if the user asks for macro
|
||||
// expansion.
|
||||
let file_id = match relative_file(db, arg_id, path.as_str(), true) {
|
||||
let file_id = match relative_file(db, arg_id, path.as_str(), true, span) {
|
||||
Ok(file_id) => file_id,
|
||||
Err(_) => {
|
||||
return ExpandResult::ok(quote!(span =>""));
|
||||
|
@ -836,7 +838,10 @@ fn env_expand(
|
|||
// The only variable rust-analyzer ever sets is `OUT_DIR`, so only diagnose that to avoid
|
||||
// unnecessary diagnostics for eg. `CARGO_PKG_NAME`.
|
||||
if key.as_str() == "OUT_DIR" {
|
||||
err = Some(ExpandError::other(r#"`OUT_DIR` not set, enable "build scripts" to fix"#));
|
||||
err = Some(ExpandError::other(
|
||||
span,
|
||||
r#"`OUT_DIR` not set, enable "build scripts" to fix"#,
|
||||
));
|
||||
}
|
||||
|
||||
// If the variable is unset, still return a dummy string to help type inference along.
|
||||
|
@ -885,7 +890,7 @@ fn quote_expand(
|
|||
) -> ExpandResult<tt::Subtree> {
|
||||
ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: span, close: span }),
|
||||
ExpandError::other("quote! is not implemented"),
|
||||
ExpandError::other(span, "quote! is not implemented"),
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -259,39 +259,38 @@ pub fn expand_speculative(
|
|||
|
||||
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
|
||||
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
||||
let mut speculative_expansion =
|
||||
match loc.def.kind {
|
||||
MacroDefKind::ProcMacro(ast, expander, _) => {
|
||||
let span = db.proc_macro_span(ast);
|
||||
tt.delimiter = tt::Delimiter::invisible_spanned(span);
|
||||
expander.expand(
|
||||
db,
|
||||
loc.def.krate,
|
||||
loc.krate,
|
||||
&tt,
|
||||
attr_arg.as_ref(),
|
||||
span_with_def_site_ctxt(db, span, actual_macro_call),
|
||||
span_with_call_site_ctxt(db, span, actual_macro_call),
|
||||
span_with_mixed_site_ctxt(db, span, actual_macro_call),
|
||||
)
|
||||
}
|
||||
MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
|
||||
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
|
||||
}
|
||||
MacroDefKind::Declarative(it) => db
|
||||
.decl_macro_expander(loc.krate, it)
|
||||
.expand_unhygienic(db, tt, loc.def.krate, span, loc.def.edition),
|
||||
MacroDefKind::BuiltIn(_, it) => {
|
||||
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInDerive(_, it) => {
|
||||
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInEager(_, it) => {
|
||||
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInAttr(_, it) => it.expand(db, actual_macro_call, &tt, span),
|
||||
};
|
||||
let mut speculative_expansion = match loc.def.kind {
|
||||
MacroDefKind::ProcMacro(ast, expander, _) => {
|
||||
let span = db.proc_macro_span(ast);
|
||||
tt.delimiter = tt::Delimiter::invisible_spanned(span);
|
||||
expander.expand(
|
||||
db,
|
||||
loc.def.krate,
|
||||
loc.krate,
|
||||
&tt,
|
||||
attr_arg.as_ref(),
|
||||
span_with_def_site_ctxt(db, span, actual_macro_call),
|
||||
span_with_call_site_ctxt(db, span, actual_macro_call),
|
||||
span_with_mixed_site_ctxt(db, span, actual_macro_call),
|
||||
)
|
||||
}
|
||||
MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
|
||||
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
|
||||
}
|
||||
MacroDefKind::Declarative(it) => {
|
||||
db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt, span, loc.def.edition)
|
||||
}
|
||||
MacroDefKind::BuiltIn(_, it) => {
|
||||
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInDerive(_, it) => {
|
||||
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInEager(_, it) => {
|
||||
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInAttr(_, it) => it.expand(db, actual_macro_call, &tt, span),
|
||||
};
|
||||
|
||||
let expand_to = loc.expand_to();
|
||||
|
||||
|
@ -735,11 +734,14 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {
|
|||
if TOKEN_LIMIT.check(count).is_err() {
|
||||
Err(ExpandResult {
|
||||
value: (),
|
||||
err: Some(ExpandError::other(format!(
|
||||
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
||||
count,
|
||||
TOKEN_LIMIT.inner(),
|
||||
))),
|
||||
err: Some(ExpandError::other(
|
||||
tt.delimiter.open,
|
||||
format!(
|
||||
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
||||
count,
|
||||
TOKEN_LIMIT.inner(),
|
||||
),
|
||||
)),
|
||||
})
|
||||
} else {
|
||||
Ok(())
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
//! Compiled declarative macro expanders (`macro_rules!`` and `macro`)
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use base_db::{CrateId, VersionReq};
|
||||
use base_db::CrateId;
|
||||
use intern::sym;
|
||||
use mbe::DocCommentDesugarMode;
|
||||
use span::{Edition, MacroCallId, Span, SyntaxContextId};
|
||||
|
@ -13,7 +12,7 @@ use crate::{
|
|||
attrs::RawAttrs,
|
||||
db::ExpandDatabase,
|
||||
hygiene::{apply_mark, Transparency},
|
||||
tt, AstId, ExpandError, ExpandResult, Lookup,
|
||||
tt, AstId, ExpandError, ExpandErrorKind, ExpandResult, Lookup,
|
||||
};
|
||||
|
||||
/// Old-style `macro_rules` or the new macros 2.0
|
||||
|
@ -23,9 +22,6 @@ pub struct DeclarativeMacroExpander {
|
|||
pub transparency: Transparency,
|
||||
}
|
||||
|
||||
// FIXME: Remove this once we drop support for 1.76
|
||||
static REQUIREMENT: OnceLock<VersionReq> = OnceLock::new();
|
||||
|
||||
impl DeclarativeMacroExpander {
|
||||
pub fn expand(
|
||||
&self,
|
||||
|
@ -35,29 +31,16 @@ impl DeclarativeMacroExpander {
|
|||
span: Span,
|
||||
) -> ExpandResult<(tt::Subtree, Option<u32>)> {
|
||||
let loc = db.lookup_intern_macro_call(call_id);
|
||||
let toolchain = db.toolchain(loc.def.krate);
|
||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
||||
&base_db::Version {
|
||||
pre: base_db::Prerelease::EMPTY,
|
||||
build: base_db::BuildMetadata::EMPTY,
|
||||
major: version.major,
|
||||
minor: version.minor,
|
||||
patch: version.patch,
|
||||
},
|
||||
)
|
||||
});
|
||||
match self.mac.err() {
|
||||
Some(_) => ExpandResult::new(
|
||||
(tt::Subtree::empty(tt::DelimSpan { open: span, close: span }), None),
|
||||
ExpandError::MacroDefinition,
|
||||
ExpandError::new(span, ExpandErrorKind::MacroDefinition),
|
||||
),
|
||||
None => self
|
||||
.mac
|
||||
.expand(
|
||||
&tt,
|
||||
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
|
||||
new_meta_vars,
|
||||
span,
|
||||
loc.def.edition,
|
||||
)
|
||||
|
@ -67,32 +50,18 @@ impl DeclarativeMacroExpander {
|
|||
|
||||
pub fn expand_unhygienic(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
tt: tt::Subtree,
|
||||
krate: CrateId,
|
||||
call_site: Span,
|
||||
def_site_edition: Edition,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let toolchain = db.toolchain(krate);
|
||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
||||
&base_db::Version {
|
||||
pre: base_db::Prerelease::EMPTY,
|
||||
build: base_db::BuildMetadata::EMPTY,
|
||||
major: version.major,
|
||||
minor: version.minor,
|
||||
patch: version.patch,
|
||||
},
|
||||
)
|
||||
});
|
||||
match self.mac.err() {
|
||||
Some(_) => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::MacroDefinition,
|
||||
ExpandError::new(call_site, ExpandErrorKind::MacroDefinition),
|
||||
),
|
||||
None => self
|
||||
.mac
|
||||
.expand(&tt, |_| (), new_meta_vars, call_site, def_site_edition)
|
||||
.expand(&tt, |_| (), call_site, def_site_edition)
|
||||
.map(TupleExt::head)
|
||||
.map_err(Into::into),
|
||||
}
|
||||
|
|
|
@ -176,14 +176,19 @@ fn eager_macro_recur(
|
|||
Some(path) => match macro_resolver(&path) {
|
||||
Some(def) => def,
|
||||
None => {
|
||||
error =
|
||||
Some(ExpandError::other(format!("unresolved macro {}", path.display(db))));
|
||||
error = Some(ExpandError::other(
|
||||
span_map.span_at(call.syntax().text_range().start()),
|
||||
format!("unresolved macro {}", path.display(db)),
|
||||
));
|
||||
offset += call.syntax().text_range().len();
|
||||
continue;
|
||||
}
|
||||
},
|
||||
None => {
|
||||
error = Some(ExpandError::other("malformed macro invocation"));
|
||||
error = Some(ExpandError::other(
|
||||
span_map.span_at(call.syntax().text_range().start()),
|
||||
"malformed macro invocation",
|
||||
));
|
||||
offset += call.syntax().text_range().len();
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -124,47 +124,77 @@ impl_intern_lookup!(
|
|||
pub type ExpandResult<T> = ValueResult<T, ExpandError>;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||
pub enum ExpandError {
|
||||
ProcMacroAttrExpansionDisabled,
|
||||
MissingProcMacroExpander(CrateId),
|
||||
/// The macro expansion is disabled.
|
||||
MacroDisabled,
|
||||
MacroDefinition,
|
||||
Mbe(mbe::ExpandError),
|
||||
RecursionOverflow,
|
||||
Other(Arc<Box<str>>),
|
||||
ProcMacroPanic(Arc<Box<str>>),
|
||||
pub struct ExpandError {
|
||||
inner: Arc<(ExpandErrorKind, Span)>,
|
||||
}
|
||||
|
||||
impl ExpandError {
|
||||
pub fn other(msg: impl Into<Box<str>>) -> Self {
|
||||
ExpandError::Other(Arc::new(msg.into()))
|
||||
pub fn new(span: Span, kind: ExpandErrorKind) -> Self {
|
||||
ExpandError { inner: Arc::new((kind, span)) }
|
||||
}
|
||||
pub fn other(span: Span, msg: impl Into<Box<str>>) -> Self {
|
||||
ExpandError { inner: Arc::new((ExpandErrorKind::Other(msg.into()), span)) }
|
||||
}
|
||||
pub fn kind(&self) -> &ExpandErrorKind {
|
||||
&self.inner.0
|
||||
}
|
||||
pub fn span(&self) -> Span {
|
||||
self.inner.1
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||
pub enum ExpandErrorKind {
|
||||
/// Attribute macro expansion is disabled.
|
||||
ProcMacroAttrExpansionDisabled,
|
||||
MissingProcMacroExpander(CrateId),
|
||||
/// The macro for this call is disabled.
|
||||
MacroDisabled,
|
||||
/// The macro definition has errors.
|
||||
MacroDefinition,
|
||||
Mbe(mbe::ExpandErrorKind),
|
||||
RecursionOverflow,
|
||||
Other(Box<str>),
|
||||
ProcMacroPanic(Box<str>),
|
||||
}
|
||||
|
||||
impl ExpandError {
|
||||
pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> (String, bool) {
|
||||
self.inner.0.render_to_string(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandErrorKind {
|
||||
pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> (String, bool) {
|
||||
match self {
|
||||
Self::ProcMacroAttrExpansionDisabled => {
|
||||
ExpandErrorKind::ProcMacroAttrExpansionDisabled => {
|
||||
("procedural attribute macro expansion is disabled".to_owned(), false)
|
||||
}
|
||||
Self::MacroDisabled => ("proc-macro is explicitly disabled".to_owned(), false),
|
||||
&Self::MissingProcMacroExpander(def_crate) => {
|
||||
ExpandErrorKind::MacroDisabled => {
|
||||
("proc-macro is explicitly disabled".to_owned(), false)
|
||||
}
|
||||
&ExpandErrorKind::MissingProcMacroExpander(def_crate) => {
|
||||
match db.proc_macros().get_error_for_crate(def_crate) {
|
||||
Some((e, hard_err)) => (e.to_owned(), hard_err),
|
||||
None => ("missing expander".to_owned(), true),
|
||||
}
|
||||
}
|
||||
Self::MacroDefinition => ("macro definition has parse errors".to_owned(), true),
|
||||
Self::Mbe(e) => (e.to_string(), true),
|
||||
Self::RecursionOverflow => ("overflow expanding the original macro".to_owned(), true),
|
||||
Self::Other(e) => ((***e).to_owned(), true),
|
||||
Self::ProcMacroPanic(e) => ((***e).to_owned(), true),
|
||||
ExpandErrorKind::MacroDefinition => {
|
||||
("macro definition has parse errors".to_owned(), true)
|
||||
}
|
||||
ExpandErrorKind::Mbe(e) => (e.to_string(), true),
|
||||
ExpandErrorKind::RecursionOverflow => {
|
||||
("overflow expanding the original macro".to_owned(), true)
|
||||
}
|
||||
ExpandErrorKind::Other(e) => ((**e).to_owned(), true),
|
||||
ExpandErrorKind::ProcMacroPanic(e) => ((**e).to_owned(), true),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<mbe::ExpandError> for ExpandError {
|
||||
fn from(mbe: mbe::ExpandError) -> Self {
|
||||
Self::Mbe(mbe)
|
||||
ExpandError { inner: Arc::new((ExpandErrorKind::Mbe(mbe.inner.1.clone()), mbe.inner.0)) }
|
||||
}
|
||||
}
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
|
|
|
@ -7,9 +7,8 @@ use base_db::{CrateId, Env};
|
|||
use intern::Symbol;
|
||||
use rustc_hash::FxHashMap;
|
||||
use span::Span;
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
|
||||
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandErrorKind, ExpandResult};
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
|
||||
pub enum ProcMacroKind {
|
||||
|
@ -76,15 +75,18 @@ impl FromIterator<(CrateId, ProcMacroLoadResult)> for ProcMacros {
|
|||
}
|
||||
|
||||
impl ProcMacros {
|
||||
fn get(&self, krate: CrateId, idx: u32) -> Result<&ProcMacro, ExpandError> {
|
||||
fn get(&self, krate: CrateId, idx: u32, err_span: Span) -> Result<&ProcMacro, ExpandError> {
|
||||
let proc_macros = match self.0.get(&krate) {
|
||||
Some(Ok(proc_macros)) => proc_macros,
|
||||
Some(Err(_)) | None => {
|
||||
return Err(ExpandError::other("internal error: no proc macros for crate"));
|
||||
return Err(ExpandError::other(
|
||||
err_span,
|
||||
"internal error: no proc macros for crate",
|
||||
));
|
||||
}
|
||||
};
|
||||
proc_macros.get(idx as usize).ok_or_else(|| {
|
||||
ExpandError::other(
|
||||
ExpandError::other(err_span,
|
||||
format!(
|
||||
"internal error: proc-macro index out of bounds: the length is {} but the index is {}",
|
||||
proc_macros.len(),
|
||||
|
@ -184,11 +186,11 @@ impl CustomProcMacroExpander {
|
|||
}
|
||||
|
||||
/// The macro is explicitly disabled due to proc-macro attribute expansion being disabled.
|
||||
pub const fn as_expand_error(&self, def_crate: CrateId) -> Option<ExpandError> {
|
||||
pub fn as_expand_error(&self, def_crate: CrateId) -> Option<ExpandErrorKind> {
|
||||
match self.proc_macro_id {
|
||||
Self::PROC_MACRO_ATTR_DISABLED => Some(ExpandError::ProcMacroAttrExpansionDisabled),
|
||||
Self::DISABLED_ID => Some(ExpandError::MacroDisabled),
|
||||
Self::MISSING_EXPANDER => Some(ExpandError::MissingProcMacroExpander(def_crate)),
|
||||
Self::PROC_MACRO_ATTR_DISABLED => Some(ExpandErrorKind::ProcMacroAttrExpansionDisabled),
|
||||
Self::DISABLED_ID => Some(ExpandErrorKind::MacroDisabled),
|
||||
Self::MISSING_EXPANDER => Some(ExpandErrorKind::MissingProcMacroExpander(def_crate)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -207,19 +209,19 @@ impl CustomProcMacroExpander {
|
|||
match self.proc_macro_id {
|
||||
Self::PROC_MACRO_ATTR_DISABLED => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::ProcMacroAttrExpansionDisabled,
|
||||
ExpandError::new(call_site, ExpandErrorKind::ProcMacroAttrExpansionDisabled),
|
||||
),
|
||||
Self::MISSING_EXPANDER => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::MissingProcMacroExpander(def_crate),
|
||||
ExpandError::new(call_site, ExpandErrorKind::MissingProcMacroExpander(def_crate)),
|
||||
),
|
||||
Self::DISABLED_ID => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::MacroDisabled,
|
||||
ExpandError::new(call_site, ExpandErrorKind::MacroDisabled),
|
||||
),
|
||||
id => {
|
||||
let proc_macros = db.proc_macros();
|
||||
let proc_macro = match proc_macros.get(def_crate, id) {
|
||||
let proc_macro = match proc_macros.get(def_crate, id, call_site) {
|
||||
Ok(proc_macro) => proc_macro,
|
||||
Err(e) => {
|
||||
return ExpandResult::new(
|
||||
|
@ -240,12 +242,18 @@ impl CustomProcMacroExpander {
|
|||
ProcMacroExpansionError::System(text)
|
||||
if proc_macro.kind == ProcMacroKind::Attr =>
|
||||
{
|
||||
ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) }
|
||||
ExpandResult {
|
||||
value: tt.clone(),
|
||||
err: Some(ExpandError::other(call_site, text)),
|
||||
}
|
||||
}
|
||||
ProcMacroExpansionError::System(text)
|
||||
| ProcMacroExpansionError::Panic(text) => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::ProcMacroPanic(Arc::new(text.into_boxed_str())),
|
||||
ExpandError::new(
|
||||
call_site,
|
||||
ExpandErrorKind::ProcMacroPanic(text.into_boxed_str()),
|
||||
),
|
||||
),
|
||||
},
|
||||
}
|
||||
|
|
|
@ -833,15 +833,23 @@ fn macro_call_diagnostics(
|
|||
let ValueResult { value: parse_errors, err } = &*e;
|
||||
if let Some(err) = err {
|
||||
let loc = db.lookup_intern_macro_call(macro_call_id);
|
||||
let (node, precise_location, _macro_name, _kind) =
|
||||
precise_macro_call_location(&loc.kind, db);
|
||||
let file_id = loc.kind.file_id();
|
||||
let node =
|
||||
InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id()));
|
||||
let (message, error) = err.render_to_string(db.upcast());
|
||||
let precise_location = Some(
|
||||
err.span().range
|
||||
+ db.ast_id_map(err.span().anchor.file_id.into())
|
||||
.get_erased(err.span().anchor.ast_id)
|
||||
.text_range()
|
||||
.start(),
|
||||
);
|
||||
acc.push(MacroError { node, precise_location, message, error }.into());
|
||||
}
|
||||
|
||||
if !parse_errors.is_empty() {
|
||||
let loc = db.lookup_intern_macro_call(macro_call_id);
|
||||
let (node, precise_location, _, _) = precise_macro_call_location(&loc.kind, db);
|
||||
let (node, precise_location) = precise_macro_call_location(&loc.kind, db);
|
||||
acc.push(
|
||||
MacroExpansionParseError { node, precise_location, errors: parse_errors.clone() }
|
||||
.into(),
|
||||
|
@ -891,14 +899,14 @@ fn emit_def_diagnostic_(
|
|||
acc.push(UnresolvedExternCrate { decl: InFile::new(ast.file_id, item) }.into());
|
||||
}
|
||||
|
||||
DefDiagnosticKind::MacroError { ast, err } => {
|
||||
DefDiagnosticKind::MacroError { ast, path, err } => {
|
||||
let item = ast.to_ptr(db.upcast());
|
||||
let (message, error) = err.render_to_string(db.upcast());
|
||||
acc.push(
|
||||
MacroError {
|
||||
node: InFile::new(ast.file_id, item.syntax_node_ptr()),
|
||||
precise_location: None,
|
||||
message,
|
||||
message: format!("{}: {message}", path.display(db.upcast())),
|
||||
error,
|
||||
}
|
||||
.into(),
|
||||
|
@ -1001,7 +1009,7 @@ fn emit_def_diagnostic_(
|
|||
})();
|
||||
}
|
||||
DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
|
||||
let (node, precise_location, _, _) = precise_macro_call_location(ast, db);
|
||||
let (node, precise_location) = precise_macro_call_location(ast, db);
|
||||
acc.push(
|
||||
UnresolvedMacroCall {
|
||||
macro_call: node,
|
||||
|
@ -1070,7 +1078,7 @@ fn emit_def_diagnostic_(
|
|||
fn precise_macro_call_location(
|
||||
ast: &MacroCallKind,
|
||||
db: &dyn HirDatabase,
|
||||
) -> (InFile<SyntaxNodePtr>, Option<TextRange>, Option<String>, MacroKind) {
|
||||
) -> (InFile<SyntaxNodePtr>, Option<TextRange>) {
|
||||
// FIXME: maybe we actually want slightly different ranges for the different macro diagnostics
|
||||
// - e.g. the full attribute for macro errors, but only the name for name resolution
|
||||
match ast {
|
||||
|
@ -1082,8 +1090,6 @@ fn precise_macro_call_location(
|
|||
.and_then(|it| it.segment())
|
||||
.and_then(|it| it.name_ref())
|
||||
.map(|it| it.syntax().text_range()),
|
||||
node.path().and_then(|it| it.segment()).map(|it| it.to_string()),
|
||||
MacroKind::ProcMacro,
|
||||
)
|
||||
}
|
||||
MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
|
||||
|
@ -1112,8 +1118,6 @@ fn precise_macro_call_location(
|
|||
(
|
||||
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
|
||||
token.as_ref().map(|tok| tok.text_range()),
|
||||
token.as_ref().map(ToString::to_string),
|
||||
MacroKind::Derive,
|
||||
)
|
||||
}
|
||||
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
|
||||
|
@ -1128,12 +1132,6 @@ fn precise_macro_call_location(
|
|||
(
|
||||
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
|
||||
Some(attr.syntax().text_range()),
|
||||
attr.path()
|
||||
.and_then(|path| path.segment())
|
||||
.and_then(|seg| seg.name_ref())
|
||||
.as_ref()
|
||||
.map(ToString::to_string),
|
||||
MacroKind::Attr,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -1800,9 +1798,16 @@ impl DefWithBody {
|
|||
BodyDiagnostic::MacroError { node, err } => {
|
||||
let (message, error) = err.render_to_string(db.upcast());
|
||||
|
||||
let precise_location = Some(
|
||||
err.span().range
|
||||
+ db.ast_id_map(err.span().anchor.file_id.into())
|
||||
.get_erased(err.span().anchor.ast_id)
|
||||
.text_range()
|
||||
.start(),
|
||||
);
|
||||
MacroError {
|
||||
node: (*node).map(|it| it.into()),
|
||||
precise_location: None,
|
||||
precise_location,
|
||||
message,
|
||||
error,
|
||||
}
|
||||
|
|
|
@ -168,12 +168,12 @@ fn main() {
|
|||
// Test a handful of built-in (eager) macros:
|
||||
|
||||
include!(invalid);
|
||||
//^^^^^^^ error: could not convert tokens
|
||||
//^^^^^^^ error: expected string literal
|
||||
include!("does not exist");
|
||||
//^^^^^^^ error: failed to load file `does not exist`
|
||||
|
||||
env!(invalid);
|
||||
//^^^ error: could not convert tokens
|
||||
//^^^ error: expected string literal
|
||||
|
||||
env!("OUT_DIR");
|
||||
//^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
|
||||
|
|
|
@ -45,7 +45,7 @@ fn benchmark_expand_macro_rules() {
|
|||
invocations
|
||||
.into_iter()
|
||||
.map(|(id, tt)| {
|
||||
let res = rules[&id].expand(&tt, |_| (), true, DUMMY, Edition::CURRENT);
|
||||
let res = rules[&id].expand(&tt, |_| (), DUMMY, Edition::CURRENT);
|
||||
assert!(res.err.is_none());
|
||||
res.value.0.token_trees.len()
|
||||
})
|
||||
|
@ -118,7 +118,7 @@ fn invocation_fixtures(
|
|||
},
|
||||
token_trees: token_trees.into_boxed_slice(),
|
||||
};
|
||||
if it.expand(&subtree, |_| (), true, DUMMY, Edition::CURRENT).err.is_none() {
|
||||
if it.expand(&subtree, |_| (), DUMMY, Edition::CURRENT).err.is_none() {
|
||||
res.push((name.clone(), subtree));
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -9,13 +9,12 @@ use intern::Symbol;
|
|||
use rustc_hash::FxHashMap;
|
||||
use span::{Edition, Span};
|
||||
|
||||
use crate::{parser::MetaVarKind, ExpandError, ExpandResult, MatchedArmIndex};
|
||||
use crate::{parser::MetaVarKind, ExpandError, ExpandErrorKind, ExpandResult, MatchedArmIndex};
|
||||
|
||||
pub(crate) fn expand_rules(
|
||||
rules: &[crate::Rule],
|
||||
input: &tt::Subtree<Span>,
|
||||
marker: impl Fn(&mut Span) + Copy,
|
||||
new_meta_vars: bool,
|
||||
call_site: Span,
|
||||
def_site_edition: Edition,
|
||||
) -> ExpandResult<(tt::Subtree<Span>, MatchedArmIndex)> {
|
||||
|
@ -27,13 +26,8 @@ pub(crate) fn expand_rules(
|
|||
// If we find a rule that applies without errors, we're done.
|
||||
// Unconditionally returning the transcription here makes the
|
||||
// `test_repeat_bad_var` test fail.
|
||||
let ExpandResult { value, err: transcribe_err } = transcriber::transcribe(
|
||||
&rule.rhs,
|
||||
&new_match.bindings,
|
||||
marker,
|
||||
new_meta_vars,
|
||||
call_site,
|
||||
);
|
||||
let ExpandResult { value, err: transcribe_err } =
|
||||
transcriber::transcribe(&rule.rhs, &new_match.bindings, marker, call_site);
|
||||
if transcribe_err.is_none() {
|
||||
return ExpandResult::ok((value, Some(idx as u32)));
|
||||
}
|
||||
|
@ -52,7 +46,7 @@ pub(crate) fn expand_rules(
|
|||
if let Some((match_, rule, idx)) = match_ {
|
||||
// if we got here, there was no match without errors
|
||||
let ExpandResult { value, err: transcribe_err } =
|
||||
transcriber::transcribe(&rule.rhs, &match_.bindings, marker, new_meta_vars, call_site);
|
||||
transcriber::transcribe(&rule.rhs, &match_.bindings, marker, call_site);
|
||||
ExpandResult { value: (value, idx.try_into().ok()), err: match_.err.or(transcribe_err) }
|
||||
} else {
|
||||
ExpandResult::new(
|
||||
|
@ -63,7 +57,7 @@ pub(crate) fn expand_rules(
|
|||
},
|
||||
None,
|
||||
),
|
||||
ExpandError::NoMatchingRule,
|
||||
ExpandError::new(call_site, ExpandErrorKind::NoMatchingRule),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -70,7 +70,7 @@ use crate::{
|
|||
expander::{Binding, Bindings, ExpandResult, Fragment},
|
||||
expect_fragment,
|
||||
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
||||
ExpandError, MetaTemplate, ValueResult,
|
||||
ExpandError, ExpandErrorKind, MetaTemplate, ValueResult,
|
||||
};
|
||||
|
||||
impl Bindings {
|
||||
|
@ -510,11 +510,17 @@ fn match_loop_inner<'t>(
|
|||
if matches!(rhs, tt::Leaf::Literal(it) if it.symbol == lhs.symbol) {
|
||||
item.dot.next();
|
||||
} else {
|
||||
res.add_err(ExpandError::UnexpectedToken);
|
||||
res.add_err(ExpandError::new(
|
||||
*rhs.span(),
|
||||
ExpandErrorKind::UnexpectedToken,
|
||||
));
|
||||
item.is_error = true;
|
||||
}
|
||||
} else {
|
||||
res.add_err(ExpandError::binding_error(format!("expected literal: `{lhs}`")));
|
||||
res.add_err(ExpandError::binding_error(
|
||||
src.clone().next().map_or(delim_span.close, |it| it.first_span()),
|
||||
format!("expected literal: `{lhs}`"),
|
||||
));
|
||||
item.is_error = true;
|
||||
}
|
||||
try_push!(next_items, item);
|
||||
|
@ -524,11 +530,17 @@ fn match_loop_inner<'t>(
|
|||
if matches!(rhs, tt::Leaf::Ident(it) if it.sym == lhs.sym) {
|
||||
item.dot.next();
|
||||
} else {
|
||||
res.add_err(ExpandError::UnexpectedToken);
|
||||
res.add_err(ExpandError::new(
|
||||
*rhs.span(),
|
||||
ExpandErrorKind::UnexpectedToken,
|
||||
));
|
||||
item.is_error = true;
|
||||
}
|
||||
} else {
|
||||
res.add_err(ExpandError::binding_error(format!("expected ident: `{lhs}`")));
|
||||
res.add_err(ExpandError::binding_error(
|
||||
src.clone().next().map_or(delim_span.close, |it| it.first_span()),
|
||||
format!("expected ident: `{lhs}`"),
|
||||
));
|
||||
item.is_error = true;
|
||||
}
|
||||
try_push!(next_items, item);
|
||||
|
@ -538,8 +550,8 @@ fn match_loop_inner<'t>(
|
|||
let error = if let Ok(rhs) = fork.expect_glued_punct() {
|
||||
let first_is_single_quote = rhs[0].char == '\'';
|
||||
let lhs = lhs.iter().map(|it| it.char);
|
||||
let rhs = rhs.iter().map(|it| it.char);
|
||||
if lhs.clone().eq(rhs) {
|
||||
let rhs_ = rhs.iter().map(|it| it.char);
|
||||
if lhs.clone().eq(rhs_) {
|
||||
// HACK: here we use `meta_result` to pass `TtIter` back to caller because
|
||||
// it might have been advanced multiple times. `ValueResult` is
|
||||
// insignificant.
|
||||
|
@ -552,13 +564,19 @@ fn match_loop_inner<'t>(
|
|||
if first_is_single_quote {
|
||||
// If the first punct token is a single quote, that's a part of a lifetime
|
||||
// ident, not a punct.
|
||||
ExpandError::UnexpectedToken
|
||||
ExpandError::new(
|
||||
rhs.get(1).map_or(rhs[0].span, |it| it.span),
|
||||
ExpandErrorKind::UnexpectedToken,
|
||||
)
|
||||
} else {
|
||||
let lhs = lhs.collect::<String>();
|
||||
ExpandError::binding_error(format!("expected punct: `{lhs}`"))
|
||||
ExpandError::binding_error(rhs[0].span, format!("expected punct: `{lhs}`"))
|
||||
}
|
||||
} else {
|
||||
ExpandError::UnexpectedToken
|
||||
ExpandError::new(
|
||||
src.clone().next().map_or(delim_span.close, |it| it.first_span()),
|
||||
ExpandErrorKind::UnexpectedToken,
|
||||
)
|
||||
};
|
||||
|
||||
res.add_err(error);
|
||||
|
@ -651,7 +669,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>, edition: Edition)
|
|||
if let Some(item) = error_recover_item {
|
||||
res.bindings = bindings_builder.build(&item);
|
||||
}
|
||||
res.add_err(ExpandError::UnexpectedToken);
|
||||
res.add_err(ExpandError::new(span.open, ExpandErrorKind::UnexpectedToken));
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
@ -670,7 +688,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>, edition: Edition)
|
|||
src = it;
|
||||
res.unmatched_tts += src.len();
|
||||
}
|
||||
res.add_err(ExpandError::LeftoverTokens);
|
||||
res.add_err(ExpandError::new(span.open, ExpandErrorKind::LeftoverTokens));
|
||||
|
||||
if let Some(error_recover_item) = error_recover_item {
|
||||
res.bindings = bindings_builder.build(&error_recover_item);
|
||||
|
@ -746,9 +764,10 @@ fn match_meta_var(
|
|||
) -> ExpandResult<Option<Fragment>> {
|
||||
let fragment = match kind {
|
||||
MetaVarKind::Path => {
|
||||
return expect_fragment(input, parser::PrefixEntryPoint::Path, edition).map(|it| {
|
||||
it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path)
|
||||
});
|
||||
return expect_fragment(input, parser::PrefixEntryPoint::Path, edition, delim_span)
|
||||
.map(|it| {
|
||||
it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path)
|
||||
});
|
||||
}
|
||||
MetaVarKind::Expr => {
|
||||
// `expr` should not match underscores, let expressions, or inline const. The latter
|
||||
|
@ -763,37 +782,54 @@ fn match_meta_var(
|
|||
|| it.sym == sym::let_
|
||||
|| it.sym == sym::const_ =>
|
||||
{
|
||||
return ExpandResult::only_err(ExpandError::NoMatchingRule)
|
||||
return ExpandResult::only_err(ExpandError::new(
|
||||
it.span,
|
||||
ExpandErrorKind::NoMatchingRule,
|
||||
))
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
return expect_fragment(input, parser::PrefixEntryPoint::Expr, edition).map(|tt| {
|
||||
tt.map(|tt| match tt {
|
||||
tt::TokenTree::Leaf(leaf) => tt::Subtree {
|
||||
delimiter: tt::Delimiter::invisible_spanned(*leaf.span()),
|
||||
token_trees: Box::new([leaf.into()]),
|
||||
},
|
||||
tt::TokenTree::Subtree(mut s) => {
|
||||
if s.delimiter.kind == tt::DelimiterKind::Invisible {
|
||||
s.delimiter.kind = tt::DelimiterKind::Parenthesis;
|
||||
return expect_fragment(input, parser::PrefixEntryPoint::Expr, edition, delim_span)
|
||||
.map(|tt| {
|
||||
tt.map(|tt| match tt {
|
||||
tt::TokenTree::Leaf(leaf) => tt::Subtree {
|
||||
delimiter: tt::Delimiter::invisible_spanned(*leaf.span()),
|
||||
token_trees: Box::new([leaf.into()]),
|
||||
},
|
||||
tt::TokenTree::Subtree(mut s) => {
|
||||
if s.delimiter.kind == tt::DelimiterKind::Invisible {
|
||||
s.delimiter.kind = tt::DelimiterKind::Parenthesis;
|
||||
}
|
||||
s
|
||||
}
|
||||
s
|
||||
}
|
||||
})
|
||||
.map(Fragment::Expr)
|
||||
});
|
||||
})
|
||||
.map(Fragment::Expr)
|
||||
});
|
||||
}
|
||||
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
|
||||
let span = input.next_span();
|
||||
let tt_result = match kind {
|
||||
MetaVarKind::Ident => input
|
||||
.expect_ident()
|
||||
.map(|ident| tt::Leaf::from(ident.clone()).into())
|
||||
.map_err(|()| ExpandError::binding_error("expected ident")),
|
||||
MetaVarKind::Tt => {
|
||||
expect_tt(input).map_err(|()| ExpandError::binding_error("expected token tree"))
|
||||
}
|
||||
MetaVarKind::Lifetime => expect_lifetime(input)
|
||||
.map_err(|()| ExpandError::binding_error("expected lifetime")),
|
||||
.map_err(|()| {
|
||||
ExpandError::binding_error(
|
||||
span.unwrap_or(delim_span.close),
|
||||
"expected ident",
|
||||
)
|
||||
}),
|
||||
MetaVarKind::Tt => expect_tt(input).map_err(|()| {
|
||||
ExpandError::binding_error(
|
||||
span.unwrap_or(delim_span.close),
|
||||
"expected token tree",
|
||||
)
|
||||
}),
|
||||
MetaVarKind::Lifetime => expect_lifetime(input).map_err(|()| {
|
||||
ExpandError::binding_error(
|
||||
span.unwrap_or(delim_span.close),
|
||||
"expected lifetime",
|
||||
)
|
||||
}),
|
||||
MetaVarKind::Literal => {
|
||||
let neg = eat_char(input, '-');
|
||||
input
|
||||
|
@ -808,7 +844,12 @@ fn match_meta_var(
|
|||
}),
|
||||
}
|
||||
})
|
||||
.map_err(|()| ExpandError::binding_error("expected literal"))
|
||||
.map_err(|()| {
|
||||
ExpandError::binding_error(
|
||||
span.unwrap_or(delim_span.close),
|
||||
"expected literal",
|
||||
)
|
||||
})
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
@ -823,7 +864,7 @@ fn match_meta_var(
|
|||
MetaVarKind::Item => parser::PrefixEntryPoint::Item,
|
||||
MetaVarKind::Vis => parser::PrefixEntryPoint::Vis,
|
||||
};
|
||||
expect_fragment(input, fragment, edition).map(|it| it.map(Fragment::Tokens))
|
||||
expect_fragment(input, fragment, edition, delim_span).map(|it| it.map(Fragment::Tokens))
|
||||
}
|
||||
|
||||
fn collect_vars(collector_fun: &mut impl FnMut(Symbol), pattern: &MetaTemplate) {
|
||||
|
|
|
@ -8,14 +8,17 @@ use tt::Delimiter;
|
|||
use crate::{
|
||||
expander::{Binding, Bindings, Fragment},
|
||||
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
||||
CountError, ExpandError, ExpandResult, MetaTemplate,
|
||||
ExpandError, ExpandErrorKind, ExpandResult, MetaTemplate,
|
||||
};
|
||||
|
||||
impl Bindings {
|
||||
fn get(&self, name: &Symbol) -> Result<&Binding, ExpandError> {
|
||||
fn get(&self, name: &Symbol, span: Span) -> Result<&Binding, ExpandError> {
|
||||
match self.inner.get(name) {
|
||||
Some(binding) => Ok(binding),
|
||||
None => Err(ExpandError::UnresolvedBinding(Box::new(Box::from(name.as_str())))),
|
||||
None => Err(ExpandError::new(
|
||||
span,
|
||||
ExpandErrorKind::UnresolvedBinding(Box::new(Box::from(name.as_str()))),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -27,10 +30,10 @@ impl Bindings {
|
|||
marker: impl Fn(&mut Span),
|
||||
) -> Result<Fragment, ExpandError> {
|
||||
macro_rules! binding_err {
|
||||
($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) };
|
||||
($($arg:tt)*) => { ExpandError::binding_error(span, format!($($arg)*)) };
|
||||
}
|
||||
|
||||
let mut b = self.get(name)?;
|
||||
let mut b = self.get(name, span)?;
|
||||
for nesting_state in nesting.iter_mut() {
|
||||
nesting_state.hit = true;
|
||||
b = match b {
|
||||
|
@ -142,10 +145,9 @@ pub(super) fn transcribe(
|
|||
template: &MetaTemplate,
|
||||
bindings: &Bindings,
|
||||
marker: impl Fn(&mut Span) + Copy,
|
||||
new_meta_vars: bool,
|
||||
call_site: Span,
|
||||
) -> ExpandResult<tt::Subtree<Span>> {
|
||||
let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), new_meta_vars, call_site };
|
||||
let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), call_site };
|
||||
let mut arena: Vec<tt::TokenTree<Span>> = Vec::new();
|
||||
expand_subtree(&mut ctx, template, None, &mut arena, marker)
|
||||
}
|
||||
|
@ -165,7 +167,6 @@ struct NestingState {
|
|||
struct ExpandCtx<'a> {
|
||||
bindings: &'a Bindings,
|
||||
nesting: Vec<NestingState>,
|
||||
new_meta_vars: bool,
|
||||
call_site: Span,
|
||||
}
|
||||
|
||||
|
@ -263,7 +264,7 @@ fn expand_subtree(
|
|||
);
|
||||
}
|
||||
Op::Count { name, depth } => {
|
||||
let mut binding = match ctx.bindings.get(name) {
|
||||
let mut binding = match ctx.bindings.get(name, ctx.call_site) {
|
||||
Ok(b) => b,
|
||||
Err(e) => {
|
||||
if err.is_none() {
|
||||
|
@ -299,29 +300,11 @@ fn expand_subtree(
|
|||
}
|
||||
}
|
||||
|
||||
let res = if ctx.new_meta_vars {
|
||||
count(binding, 0, depth.unwrap_or(0))
|
||||
} else {
|
||||
count_old(binding, 0, *depth)
|
||||
};
|
||||
let res = count(binding, 0, depth.unwrap_or(0));
|
||||
|
||||
let c = match res {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
// XXX: It *might* make sense to emit a dummy integer value like `0` here.
|
||||
// That would type inference a bit more robust in cases like
|
||||
// `v[${count(t)}]` where index doesn't matter, but also could lead to
|
||||
// wrong infefrence for cases like `tup.${count(t)}` where index itself
|
||||
// does matter.
|
||||
if err.is_none() {
|
||||
err = Some(e.into());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
};
|
||||
arena.push(
|
||||
tt::Leaf::Literal(tt::Literal {
|
||||
symbol: Symbol::integer(c),
|
||||
symbol: Symbol::integer(res),
|
||||
span: ctx.call_site,
|
||||
suffix: None,
|
||||
kind: tt::LitKind::Integer,
|
||||
|
@ -353,7 +336,7 @@ fn expand_var(
|
|||
|
||||
match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) {
|
||||
Ok(it) => ExpandResult::ok(it),
|
||||
Err(ExpandError::UnresolvedBinding(_)) => {
|
||||
Err(e) if matches!(e.inner.1, ExpandErrorKind::UnresolvedBinding(_)) => {
|
||||
// Note that it is possible to have a `$var` inside a macro which is not bound.
|
||||
// For example:
|
||||
// ```
|
||||
|
@ -435,7 +418,7 @@ fn expand_repeat(
|
|||
}
|
||||
.into(),
|
||||
),
|
||||
err: Some(ExpandError::LimitExceeded),
|
||||
err: Some(ExpandError::new(ctx.call_site, ExpandErrorKind::LimitExceeded)),
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -481,16 +464,16 @@ fn expand_repeat(
|
|||
let tt = tt::Subtree {
|
||||
delimiter: tt::Delimiter::invisible_spanned(ctx.call_site),
|
||||
token_trees: buf.into_boxed_slice(),
|
||||
}
|
||||
.into();
|
||||
};
|
||||
|
||||
if RepeatKind::OneOrMore == kind && counter == 0 {
|
||||
let span = tt.delimiter.open;
|
||||
return ExpandResult {
|
||||
value: Fragment::Tokens(tt),
|
||||
err: Some(ExpandError::UnexpectedToken),
|
||||
value: Fragment::Tokens(tt.into()),
|
||||
err: Some(ExpandError::new(span, ExpandErrorKind::UnexpectedToken)),
|
||||
};
|
||||
}
|
||||
ExpandResult { value: Fragment::Tokens(tt), err }
|
||||
ExpandResult { value: Fragment::Tokens(tt.into()), err }
|
||||
}
|
||||
|
||||
fn push_fragment(ctx: &ExpandCtx<'_>, buf: &mut Vec<tt::TokenTree<Span>>, fragment: Fragment) {
|
||||
|
@ -557,44 +540,16 @@ fn fix_up_and_push_path_tt(
|
|||
|
||||
/// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth
|
||||
/// defined by the metavar expression.
|
||||
fn count(binding: &Binding, depth_curr: usize, depth_max: usize) -> Result<usize, CountError> {
|
||||
fn count(binding: &Binding, depth_curr: usize, depth_max: usize) -> usize {
|
||||
match binding {
|
||||
Binding::Nested(bs) => {
|
||||
if depth_curr == depth_max {
|
||||
Ok(bs.len())
|
||||
bs.len()
|
||||
} else {
|
||||
bs.iter().map(|b| count(b, depth_curr + 1, depth_max)).sum()
|
||||
}
|
||||
}
|
||||
Binding::Empty => Ok(0),
|
||||
Binding::Fragment(_) | Binding::Missing(_) => Ok(1),
|
||||
}
|
||||
}
|
||||
|
||||
fn count_old(
|
||||
binding: &Binding,
|
||||
our_depth: usize,
|
||||
count_depth: Option<usize>,
|
||||
) -> Result<usize, CountError> {
|
||||
match binding {
|
||||
Binding::Nested(bs) => match count_depth {
|
||||
None => bs.iter().map(|b| count_old(b, our_depth + 1, None)).sum(),
|
||||
Some(0) => Ok(bs.len()),
|
||||
Some(d) => bs.iter().map(|b| count_old(b, our_depth + 1, Some(d - 1))).sum(),
|
||||
},
|
||||
Binding::Empty => Ok(0),
|
||||
Binding::Fragment(_) | Binding::Missing(_) => {
|
||||
if our_depth == 0 {
|
||||
// `${count(t)}` is placed inside the innermost repetition. This includes cases
|
||||
// where `t` is not a repeated fragment.
|
||||
Err(CountError::Misplaced)
|
||||
} else if count_depth.is_none() {
|
||||
Ok(1)
|
||||
} else {
|
||||
// We've reached at the innermost repeated fragment, but the user wants us to go
|
||||
// further!
|
||||
Err(CountError::OutOfBounds)
|
||||
}
|
||||
}
|
||||
Binding::Empty => 0,
|
||||
Binding::Fragment(_) | Binding::Missing(_) => 1,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,10 +15,11 @@ mod to_parser_input;
|
|||
mod benchmark;
|
||||
|
||||
use span::{Edition, Span, SyntaxContextId};
|
||||
use stdx::impl_from;
|
||||
use tt::iter::TtIter;
|
||||
use tt::DelimSpan;
|
||||
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::parser::{MetaTemplate, MetaVarKind, Op};
|
||||
|
||||
|
@ -64,39 +65,45 @@ impl fmt::Display for ParseError {
|
|||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||
pub enum ExpandError {
|
||||
pub struct ExpandError {
|
||||
pub inner: Arc<(Span, ExpandErrorKind)>,
|
||||
}
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||
pub enum ExpandErrorKind {
|
||||
BindingError(Box<Box<str>>),
|
||||
UnresolvedBinding(Box<Box<str>>),
|
||||
LeftoverTokens,
|
||||
ConversionError,
|
||||
LimitExceeded,
|
||||
NoMatchingRule,
|
||||
UnexpectedToken,
|
||||
CountError(CountError),
|
||||
}
|
||||
|
||||
impl_from!(CountError for ExpandError);
|
||||
|
||||
impl ExpandError {
|
||||
fn binding_error(e: impl Into<Box<str>>) -> ExpandError {
|
||||
ExpandError::BindingError(Box::new(e.into()))
|
||||
fn new(span: Span, kind: ExpandErrorKind) -> ExpandError {
|
||||
ExpandError { inner: Arc::new((span, kind)) }
|
||||
}
|
||||
fn binding_error(span: Span, e: impl Into<Box<str>>) -> ExpandError {
|
||||
ExpandError { inner: Arc::new((span, ExpandErrorKind::BindingError(Box::new(e.into())))) }
|
||||
}
|
||||
}
|
||||
impl fmt::Display for ExpandError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.inner.1.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ExpandError {
|
||||
impl fmt::Display for ExpandErrorKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
ExpandError::NoMatchingRule => f.write_str("no rule matches input tokens"),
|
||||
ExpandError::UnexpectedToken => f.write_str("unexpected token in input"),
|
||||
ExpandError::BindingError(e) => f.write_str(e),
|
||||
ExpandError::UnresolvedBinding(binding) => {
|
||||
ExpandErrorKind::NoMatchingRule => f.write_str("no rule matches input tokens"),
|
||||
ExpandErrorKind::UnexpectedToken => f.write_str("unexpected token in input"),
|
||||
ExpandErrorKind::BindingError(e) => f.write_str(e),
|
||||
ExpandErrorKind::UnresolvedBinding(binding) => {
|
||||
f.write_str("could not find binding ")?;
|
||||
f.write_str(binding)
|
||||
}
|
||||
ExpandError::ConversionError => f.write_str("could not convert tokens"),
|
||||
ExpandError::LimitExceeded => f.write_str("Expand exceed limit"),
|
||||
ExpandError::LeftoverTokens => f.write_str("leftover tokens"),
|
||||
ExpandError::CountError(e) => e.fmt(f),
|
||||
ExpandErrorKind::LimitExceeded => f.write_str("Expand exceed limit"),
|
||||
ExpandErrorKind::LeftoverTokens => f.write_str("leftover tokens"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -248,11 +255,10 @@ impl DeclarativeMacro {
|
|||
&self,
|
||||
tt: &tt::Subtree<Span>,
|
||||
marker: impl Fn(&mut Span) + Copy,
|
||||
new_meta_vars: bool,
|
||||
call_site: Span,
|
||||
def_site_edition: Edition,
|
||||
) -> ExpandResult<(tt::Subtree<Span>, MatchedArmIndex)> {
|
||||
expander::expand_rules(&self.rules, tt, marker, new_meta_vars, call_site, def_site_edition)
|
||||
expander::expand_rules(&self.rules, tt, marker, call_site, def_site_edition)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -355,11 +361,12 @@ impl<T: Default, E> From<Result<T, E>> for ValueResult<T, E> {
|
|||
}
|
||||
}
|
||||
|
||||
fn expect_fragment<S: Copy + fmt::Debug>(
|
||||
tt_iter: &mut TtIter<'_, S>,
|
||||
fn expect_fragment(
|
||||
tt_iter: &mut TtIter<'_, Span>,
|
||||
entry_point: ::parser::PrefixEntryPoint,
|
||||
edition: ::parser::Edition,
|
||||
) -> ExpandResult<Option<tt::TokenTree<S>>> {
|
||||
delim_span: DelimSpan<Span>,
|
||||
) -> ExpandResult<Option<tt::TokenTree<Span>>> {
|
||||
use ::parser;
|
||||
let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice());
|
||||
let parser_input = to_parser_input::to_parser_input(edition, &buffer);
|
||||
|
@ -387,7 +394,10 @@ fn expect_fragment<S: Copy + fmt::Debug>(
|
|||
}
|
||||
|
||||
let err = if error || !cursor.is_root() {
|
||||
Some(ExpandError::binding_error(format!("expected {entry_point:?}")))
|
||||
Some(ExpandError::binding_error(
|
||||
buffer.begin().token_tree().map_or(delim_span.close, |tt| tt.span()),
|
||||
format!("expected {entry_point:?}"),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
|
|
@ -212,15 +212,12 @@ where
|
|||
}
|
||||
|
||||
/// Split token tree with separate expr: $($e:expr)SEP*
|
||||
pub fn parse_exprs_with_sep<S>(
|
||||
tt: &tt::Subtree<S>,
|
||||
pub fn parse_exprs_with_sep(
|
||||
tt: &tt::Subtree<span::Span>,
|
||||
sep: char,
|
||||
span: S,
|
||||
span: span::Span,
|
||||
edition: Edition,
|
||||
) -> Vec<tt::Subtree<S>>
|
||||
where
|
||||
S: Copy + fmt::Debug,
|
||||
{
|
||||
) -> Vec<tt::Subtree<span::Span>> {
|
||||
if tt.token_trees.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
@ -229,7 +226,12 @@ where
|
|||
let mut res = Vec::new();
|
||||
|
||||
while iter.peek_n(0).is_some() {
|
||||
let expanded = crate::expect_fragment(&mut iter, parser::PrefixEntryPoint::Expr, edition);
|
||||
let expanded = crate::expect_fragment(
|
||||
&mut iter,
|
||||
parser::PrefixEntryPoint::Expr,
|
||||
edition,
|
||||
tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close },
|
||||
);
|
||||
|
||||
res.push(match expanded.value {
|
||||
None => break,
|
||||
|
|
|
@ -1053,6 +1053,7 @@ impl GlobalState {
|
|||
.on::<NO_RETRY, lsp_request::GotoDeclaration>(handlers::handle_goto_declaration)
|
||||
.on::<NO_RETRY, lsp_request::GotoImplementation>(handlers::handle_goto_implementation)
|
||||
.on::<NO_RETRY, lsp_request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
|
||||
// FIXME: This should not be tried as it contains offsets that can get outdated!
|
||||
.on::<RETRY, lsp_request::InlayHintRequest>(handlers::handle_inlay_hints)
|
||||
.on::<RETRY, lsp_request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve)
|
||||
.on::<NO_RETRY, lsp_request::CodeLensRequest>(handlers::handle_code_lens)
|
||||
|
|
|
@ -134,6 +134,15 @@ pub enum TokenTreeRef<'a, Span> {
|
|||
Leaf(&'a Leaf<Span>, &'a TokenTree<Span>),
|
||||
}
|
||||
|
||||
impl<'a, Span: Copy> TokenTreeRef<'a, Span> {
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
TokenTreeRef::Subtree(subtree, _) => subtree.delimiter.open,
|
||||
TokenTreeRef::Leaf(leaf, _) => *leaf.span(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Span: Clone> TokenTreeRef<'_, Span> {
|
||||
pub fn cloned(&self) -> TokenTree<Span> {
|
||||
match self {
|
||||
|
|
|
@ -143,6 +143,10 @@ impl<'a, S: Copy> TtIter<'a, S> {
|
|||
self.inner.as_slice().get(n)
|
||||
}
|
||||
|
||||
pub fn next_span(&self) -> Option<S> {
|
||||
Some(self.inner.as_slice().first()?.first_span())
|
||||
}
|
||||
|
||||
pub fn as_slice(&self) -> &'a [TokenTree<S>] {
|
||||
self.inner.as_slice()
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue