mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 05:23:24 +00:00
Make basic use of spans for macro expansion errors
This commit is contained in:
parent
7beac14cba
commit
ae9c553902
24 changed files with 392 additions and 333 deletions
|
@ -661,6 +661,7 @@ impl<'a> AssocItemCollector<'a> {
|
||||||
self.diagnostics.push(DefDiagnostic::macro_error(
|
self.diagnostics.push(DefDiagnostic::macro_error(
|
||||||
self.module_id.local_id,
|
self.module_id.local_id,
|
||||||
ast_id,
|
ast_id,
|
||||||
|
(*attr.path).clone(),
|
||||||
err,
|
err,
|
||||||
));
|
));
|
||||||
continue 'attrs;
|
continue 'attrs;
|
||||||
|
|
|
@ -6,8 +6,8 @@ use base_db::CrateId;
|
||||||
use cfg::CfgOptions;
|
use cfg::CfgOptions;
|
||||||
use drop_bomb::DropBomb;
|
use drop_bomb::DropBomb;
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandResult, HirFileId,
|
attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandErrorKind,
|
||||||
InFile, MacroCallId,
|
ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
|
||||||
};
|
};
|
||||||
use limit::Limit;
|
use limit::Limit;
|
||||||
use span::SyntaxContextId;
|
use span::SyntaxContextId;
|
||||||
|
@ -160,26 +160,30 @@ impl Expander {
|
||||||
// so don't return overflow error here to avoid diagnostics duplication.
|
// so don't return overflow error here to avoid diagnostics duplication.
|
||||||
cov_mark::hit!(overflow_but_not_me);
|
cov_mark::hit!(overflow_but_not_me);
|
||||||
return ExpandResult::ok(None);
|
return ExpandResult::ok(None);
|
||||||
} else if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() {
|
|
||||||
self.recursion_depth = u32::MAX;
|
|
||||||
cov_mark::hit!(your_stack_belongs_to_me);
|
|
||||||
return ExpandResult::only_err(ExpandError::RecursionOverflow);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let ExpandResult { value, err } = op(self);
|
let ExpandResult { value, err } = op(self);
|
||||||
let Some(call_id) = value else {
|
let Some(call_id) = value else {
|
||||||
return ExpandResult { value: None, err };
|
return ExpandResult { value: None, err };
|
||||||
};
|
};
|
||||||
|
if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() {
|
||||||
|
self.recursion_depth = u32::MAX;
|
||||||
|
cov_mark::hit!(your_stack_belongs_to_me);
|
||||||
|
return ExpandResult::only_err(ExpandError::new(
|
||||||
|
db.macro_arg_considering_derives(call_id, &call_id.lookup(db.upcast()).kind).2,
|
||||||
|
ExpandErrorKind::RecursionOverflow,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
let macro_file = call_id.as_macro_file();
|
let macro_file = call_id.as_macro_file();
|
||||||
let res = db.parse_macro_expansion(macro_file);
|
let res = db.parse_macro_expansion(macro_file);
|
||||||
|
|
||||||
let err = err.or(res.err);
|
let err = err.or(res.err);
|
||||||
ExpandResult {
|
ExpandResult {
|
||||||
value: match err {
|
value: match &err {
|
||||||
// If proc-macro is disabled or unresolved, we want to expand to a missing expression
|
// If proc-macro is disabled or unresolved, we want to expand to a missing expression
|
||||||
// instead of an empty tree which might end up in an empty block.
|
// instead of an empty tree which might end up in an empty block.
|
||||||
Some(ExpandError::MissingProcMacroExpander(_)) => None,
|
Some(e) if matches!(e.kind(), ExpandErrorKind::MissingProcMacroExpander(_)) => None,
|
||||||
_ => (|| {
|
_ => (|| {
|
||||||
let parse = res.value.0.cast::<T>()?;
|
let parse = res.value.0.cast::<T>()?;
|
||||||
|
|
||||||
|
|
|
@ -1434,7 +1434,10 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
|
||||||
});
|
});
|
||||||
|
|
||||||
let Some((call_site, path)) = path else {
|
let Some((call_site, path)) = path else {
|
||||||
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
|
return Ok(ExpandResult::only_err(ExpandError::other(
|
||||||
|
span_map.span_for_range(self.value.syntax().text_range()),
|
||||||
|
"malformed macro invocation",
|
||||||
|
)));
|
||||||
};
|
};
|
||||||
|
|
||||||
macro_call_as_call_id_with_eager(
|
macro_call_as_call_id_with_eager(
|
||||||
|
|
|
@ -1084,7 +1084,7 @@ fn main() {
|
||||||
macro_rules! concat_bytes {}
|
macro_rules! concat_bytes {}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let x = /* error: unexpected token in input */b"";
|
let x = /* error: unexpected token */b"";
|
||||||
}
|
}
|
||||||
|
|
||||||
"#]],
|
"#]],
|
||||||
|
|
|
@ -1324,6 +1324,7 @@ impl DefCollector<'_> {
|
||||||
self.def_map.diagnostics.push(DefDiagnostic::macro_error(
|
self.def_map.diagnostics.push(DefDiagnostic::macro_error(
|
||||||
directive.module_id,
|
directive.module_id,
|
||||||
ast_id,
|
ast_id,
|
||||||
|
(**path).clone(),
|
||||||
err,
|
err,
|
||||||
));
|
));
|
||||||
return recollect_without(self);
|
return recollect_without(self);
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
use std::ops::Not;
|
use std::ops::Not;
|
||||||
|
|
||||||
use cfg::{CfgExpr, CfgOptions};
|
use cfg::{CfgExpr, CfgOptions};
|
||||||
use hir_expand::{attrs::AttrId, ExpandError, MacroCallKind};
|
use hir_expand::{attrs::AttrId, ExpandErrorKind, MacroCallKind};
|
||||||
use la_arena::Idx;
|
use la_arena::Idx;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ pub enum DefDiagnosticKind {
|
||||||
InvalidDeriveTarget { ast: AstId<ast::Item>, id: usize },
|
InvalidDeriveTarget { ast: AstId<ast::Item>, id: usize },
|
||||||
MalformedDerive { ast: AstId<ast::Adt>, id: usize },
|
MalformedDerive { ast: AstId<ast::Adt>, id: usize },
|
||||||
MacroDefError { ast: AstId<ast::Macro>, message: String },
|
MacroDefError { ast: AstId<ast::Macro>, message: String },
|
||||||
MacroError { ast: AstId<ast::Item>, err: ExpandError },
|
MacroError { ast: AstId<ast::Item>, path: ModPath, err: ExpandErrorKind },
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
@ -82,8 +82,13 @@ impl DefDiagnostic {
|
||||||
Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } }
|
Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn macro_error(container: LocalModuleId, ast: AstId<ast::Item>, err: ExpandError) -> Self {
|
pub fn macro_error(
|
||||||
Self { in_module: container, kind: DefDiagnosticKind::MacroError { ast, err } }
|
container: LocalModuleId,
|
||||||
|
ast: AstId<ast::Item>,
|
||||||
|
path: ModPath,
|
||||||
|
err: ExpandErrorKind,
|
||||||
|
) -> Self {
|
||||||
|
Self { in_module: container, kind: DefDiagnosticKind::MacroError { ast, path, err } }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn unconfigured_code(
|
pub fn unconfigured_code(
|
||||||
|
|
|
@ -12,8 +12,7 @@ use crate::{
|
||||||
builtin::quote::{dollar_crate, quote},
|
builtin::quote::{dollar_crate, quote},
|
||||||
db::ExpandDatabase,
|
db::ExpandDatabase,
|
||||||
hygiene::span_with_def_site_ctxt,
|
hygiene::span_with_def_site_ctxt,
|
||||||
name,
|
name::{self, AsName, Name},
|
||||||
name::{AsName, Name},
|
|
||||||
span_map::ExpansionSpanMap,
|
span_map::ExpansionSpanMap,
|
||||||
tt, ExpandError, ExpandResult,
|
tt, ExpandError, ExpandResult,
|
||||||
};
|
};
|
||||||
|
@ -129,13 +128,17 @@ impl VariantShape {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from(tm: &ExpansionSpanMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
|
fn from(
|
||||||
|
call_site: Span,
|
||||||
|
tm: &ExpansionSpanMap,
|
||||||
|
value: Option<FieldList>,
|
||||||
|
) -> Result<Self, ExpandError> {
|
||||||
let r = match value {
|
let r = match value {
|
||||||
None => VariantShape::Unit,
|
None => VariantShape::Unit,
|
||||||
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
|
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
|
||||||
it.fields()
|
it.fields()
|
||||||
.map(|it| it.name())
|
.map(|it| it.name())
|
||||||
.map(|it| name_to_token(tm, it))
|
.map(|it| name_to_token(call_site, tm, it))
|
||||||
.collect::<Result<_, _>>()?,
|
.collect::<Result<_, _>>()?,
|
||||||
),
|
),
|
||||||
Some(FieldList::TupleFieldList(it)) => VariantShape::Tuple(it.fields().count()),
|
Some(FieldList::TupleFieldList(it)) => VariantShape::Tuple(it.fields().count()),
|
||||||
|
@ -212,16 +215,17 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
||||||
parser::Edition::CURRENT_FIXME,
|
parser::Edition::CURRENT_FIXME,
|
||||||
);
|
);
|
||||||
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
|
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
|
||||||
.ok_or_else(|| ExpandError::other("invalid item definition"))?;
|
.ok_or_else(|| ExpandError::other(call_site, "invalid item definition"))?;
|
||||||
let item = macro_items.items().next().ok_or_else(|| ExpandError::other("no item found"))?;
|
let item =
|
||||||
|
macro_items.items().next().ok_or_else(|| ExpandError::other(call_site, "no item found"))?;
|
||||||
let adt = &ast::Adt::cast(item.syntax().clone())
|
let adt = &ast::Adt::cast(item.syntax().clone())
|
||||||
.ok_or_else(|| ExpandError::other("expected struct, enum or union"))?;
|
.ok_or_else(|| ExpandError::other(call_site, "expected struct, enum or union"))?;
|
||||||
let (name, generic_param_list, where_clause, shape) = match adt {
|
let (name, generic_param_list, where_clause, shape) = match adt {
|
||||||
ast::Adt::Struct(it) => (
|
ast::Adt::Struct(it) => (
|
||||||
it.name(),
|
it.name(),
|
||||||
it.generic_param_list(),
|
it.generic_param_list(),
|
||||||
it.where_clause(),
|
it.where_clause(),
|
||||||
AdtShape::Struct(VariantShape::from(tm, it.field_list())?),
|
AdtShape::Struct(VariantShape::from(call_site, tm, it.field_list())?),
|
||||||
),
|
),
|
||||||
ast::Adt::Enum(it) => {
|
ast::Adt::Enum(it) => {
|
||||||
let default_variant = it
|
let default_variant = it
|
||||||
|
@ -241,8 +245,8 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
||||||
.flat_map(|it| it.variants())
|
.flat_map(|it| it.variants())
|
||||||
.map(|it| {
|
.map(|it| {
|
||||||
Ok((
|
Ok((
|
||||||
name_to_token(tm, it.name())?,
|
name_to_token(call_site, tm, it.name())?,
|
||||||
VariantShape::from(tm, it.field_list())?,
|
VariantShape::from(call_site, tm, it.field_list())?,
|
||||||
))
|
))
|
||||||
})
|
})
|
||||||
.collect::<Result<_, ExpandError>>()?,
|
.collect::<Result<_, ExpandError>>()?,
|
||||||
|
@ -357,17 +361,18 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
let name_token = name_to_token(tm, name)?;
|
let name_token = name_to_token(call_site, tm, name)?;
|
||||||
Ok(BasicAdtInfo { name: name_token, shape, param_types, where_clause, associated_types })
|
Ok(BasicAdtInfo { name: name_token, shape, param_types, where_clause, associated_types })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn name_to_token(
|
fn name_to_token(
|
||||||
|
call_site: Span,
|
||||||
token_map: &ExpansionSpanMap,
|
token_map: &ExpansionSpanMap,
|
||||||
name: Option<ast::Name>,
|
name: Option<ast::Name>,
|
||||||
) -> Result<tt::Ident, ExpandError> {
|
) -> Result<tt::Ident, ExpandError> {
|
||||||
let name = name.ok_or_else(|| {
|
let name = name.ok_or_else(|| {
|
||||||
debug!("parsed item has no name");
|
debug!("parsed item has no name");
|
||||||
ExpandError::other("missing name")
|
ExpandError::other(call_site, "missing name")
|
||||||
})?;
|
})?;
|
||||||
let span = token_map.span_at(name.syntax().text_range().start());
|
let span = token_map.span_at(name.syntax().text_range().start());
|
||||||
|
|
||||||
|
|
|
@ -460,15 +460,11 @@ fn compile_error_expand(
|
||||||
let err = match &*tt.token_trees {
|
let err = match &*tt.token_trees {
|
||||||
[tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
[tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
symbol: text,
|
symbol: text,
|
||||||
span: _,
|
span,
|
||||||
kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
|
kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
|
||||||
suffix: _,
|
suffix: _,
|
||||||
}))] =>
|
}))] => ExpandError::other(*span, Box::from(unescape_str(text).as_str())),
|
||||||
// FIXME: Use the span here!
|
_ => ExpandError::other(span, "`compile_error!` argument must be a string"),
|
||||||
{
|
|
||||||
ExpandError::other(Box::from(unescape_str(text).as_str()))
|
|
||||||
}
|
|
||||||
_ => ExpandError::other("`compile_error!` argument must be a string"),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
ExpandResult { value: quote! {span =>}, err: Some(err) }
|
ExpandResult { value: quote! {span =>}, err: Some(err) }
|
||||||
|
@ -478,7 +474,7 @@ fn concat_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_arg_id: MacroCallId,
|
_arg_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::Subtree,
|
||||||
_: Span,
|
call_site: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let mut err = None;
|
let mut err = None;
|
||||||
let mut text = String::new();
|
let mut text = String::new();
|
||||||
|
@ -527,7 +523,9 @@ fn concat_expand(
|
||||||
| tt::LitKind::ByteStrRaw(_)
|
| tt::LitKind::ByteStrRaw(_)
|
||||||
| tt::LitKind::CStr
|
| tt::LitKind::CStr
|
||||||
| tt::LitKind::CStrRaw(_)
|
| tt::LitKind::CStrRaw(_)
|
||||||
| tt::LitKind::Err(_) => err = Some(ExpandError::other("unexpected literal")),
|
| tt::LitKind::Err(_) => {
|
||||||
|
err = Some(ExpandError::other(it.span, "unexpected literal"))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// handle boolean literals
|
// handle boolean literals
|
||||||
|
@ -539,7 +537,7 @@ fn concat_expand(
|
||||||
}
|
}
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
||||||
_ => {
|
_ => {
|
||||||
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
err.get_or_insert(ExpandError::other(call_site, "unexpected token"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -551,7 +549,7 @@ fn concat_bytes_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_arg_id: MacroCallId,
|
_arg_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::Subtree,
|
||||||
_: Span,
|
call_site: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let mut bytes = String::new();
|
let mut bytes = String::new();
|
||||||
let mut err = None;
|
let mut err = None;
|
||||||
|
@ -585,20 +583,22 @@ fn concat_bytes_expand(
|
||||||
bytes.extend(text.as_str().escape_debug());
|
bytes.extend(text.as_str().escape_debug());
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
err.get_or_insert(ExpandError::other(*span, "unexpected token"));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
||||||
tt::TokenTree::Subtree(tree) if tree.delimiter.kind == tt::DelimiterKind::Bracket => {
|
tt::TokenTree::Subtree(tree) if tree.delimiter.kind == tt::DelimiterKind::Bracket => {
|
||||||
if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes, &mut record_span) {
|
if let Err(e) =
|
||||||
|
concat_bytes_expand_subtree(tree, &mut bytes, &mut record_span, call_site)
|
||||||
|
{
|
||||||
err.get_or_insert(e);
|
err.get_or_insert(e);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
err.get_or_insert(ExpandError::other(call_site, "unexpected token"));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -623,6 +623,7 @@ fn concat_bytes_expand_subtree(
|
||||||
tree: &tt::Subtree,
|
tree: &tt::Subtree,
|
||||||
bytes: &mut String,
|
bytes: &mut String,
|
||||||
mut record_span: impl FnMut(Span),
|
mut record_span: impl FnMut(Span),
|
||||||
|
err_span: Span,
|
||||||
) -> Result<(), ExpandError> {
|
) -> Result<(), ExpandError> {
|
||||||
for (ti, tt) in tree.token_trees.iter().enumerate() {
|
for (ti, tt) in tree.token_trees.iter().enumerate() {
|
||||||
match tt {
|
match tt {
|
||||||
|
@ -650,7 +651,7 @@ fn concat_bytes_expand_subtree(
|
||||||
}
|
}
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if ti % 2 == 1 && punct.char == ',' => (),
|
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if ti % 2 == 1 && punct.char == ',' => (),
|
||||||
_ => {
|
_ => {
|
||||||
return Err(mbe::ExpandError::UnexpectedToken.into());
|
return Err(ExpandError::other(err_span, "unexpected token"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -672,7 +673,7 @@ fn concat_idents_expand(
|
||||||
}
|
}
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
||||||
_ => {
|
_ => {
|
||||||
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
err.get_or_insert(ExpandError::other(span, "unexpected token"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -686,16 +687,17 @@ fn relative_file(
|
||||||
call_id: MacroCallId,
|
call_id: MacroCallId,
|
||||||
path_str: &str,
|
path_str: &str,
|
||||||
allow_recursion: bool,
|
allow_recursion: bool,
|
||||||
|
err_span: Span,
|
||||||
) -> Result<EditionedFileId, ExpandError> {
|
) -> Result<EditionedFileId, ExpandError> {
|
||||||
let lookup = call_id.lookup(db);
|
let lookup = call_id.lookup(db);
|
||||||
let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id();
|
let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id();
|
||||||
let path = AnchoredPath { anchor: call_site, path: path_str };
|
let path = AnchoredPath { anchor: call_site, path: path_str };
|
||||||
let res = db
|
let res = db
|
||||||
.resolve_path(path)
|
.resolve_path(path)
|
||||||
.ok_or_else(|| ExpandError::other(format!("failed to load file `{path_str}`")))?;
|
.ok_or_else(|| ExpandError::other(err_span, format!("failed to load file `{path_str}`")))?;
|
||||||
// Prevent include itself
|
// Prevent include itself
|
||||||
if res == call_site && !allow_recursion {
|
if res == call_site && !allow_recursion {
|
||||||
Err(ExpandError::other(format!("recursive inclusion of `{path_str}`")))
|
Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`")))
|
||||||
} else {
|
} else {
|
||||||
Ok(EditionedFileId::new(res, db.crate_graph()[lookup.krate].edition))
|
Ok(EditionedFileId::new(res, db.crate_graph()[lookup.krate].edition))
|
||||||
}
|
}
|
||||||
|
@ -727,7 +729,7 @@ fn parse_string(tt: &tt::Subtree) -> Result<(Symbol, Span), ExpandError> {
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.ok_or(mbe::ExpandError::ConversionError.into())
|
.ok_or(ExpandError::other(tt.delimiter.open, "expected string literal"))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn include_expand(
|
fn include_expand(
|
||||||
|
@ -751,7 +753,7 @@ fn include_expand(
|
||||||
Some(it) => ExpandResult::ok(it),
|
Some(it) => ExpandResult::ok(it),
|
||||||
None => ExpandResult::new(
|
None => ExpandResult::new(
|
||||||
tt::Subtree::empty(DelimSpan { open: span, close: span }),
|
tt::Subtree::empty(DelimSpan { open: span, close: span }),
|
||||||
ExpandError::other("failed to parse included file"),
|
ExpandError::other(span, "failed to parse included file"),
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -761,7 +763,7 @@ pub fn include_input_to_file_id(
|
||||||
arg_id: MacroCallId,
|
arg_id: MacroCallId,
|
||||||
arg: &tt::Subtree,
|
arg: &tt::Subtree,
|
||||||
) -> Result<EditionedFileId, ExpandError> {
|
) -> Result<EditionedFileId, ExpandError> {
|
||||||
relative_file(db, arg_id, parse_string(arg)?.0.as_str(), false)
|
relative_file(db, arg_id, parse_string(arg)?.0.as_str(), false, arg.delimiter.open)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn include_bytes_expand(
|
fn include_bytes_expand(
|
||||||
|
@ -800,7 +802,7 @@ fn include_str_expand(
|
||||||
// it's unusual to `include_str!` a Rust file), but we can return an empty string.
|
// it's unusual to `include_str!` a Rust file), but we can return an empty string.
|
||||||
// Ideally, we'd be able to offer a precise expansion if the user asks for macro
|
// Ideally, we'd be able to offer a precise expansion if the user asks for macro
|
||||||
// expansion.
|
// expansion.
|
||||||
let file_id = match relative_file(db, arg_id, path.as_str(), true) {
|
let file_id = match relative_file(db, arg_id, path.as_str(), true, span) {
|
||||||
Ok(file_id) => file_id,
|
Ok(file_id) => file_id,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
return ExpandResult::ok(quote!(span =>""));
|
return ExpandResult::ok(quote!(span =>""));
|
||||||
|
@ -836,7 +838,10 @@ fn env_expand(
|
||||||
// The only variable rust-analyzer ever sets is `OUT_DIR`, so only diagnose that to avoid
|
// The only variable rust-analyzer ever sets is `OUT_DIR`, so only diagnose that to avoid
|
||||||
// unnecessary diagnostics for eg. `CARGO_PKG_NAME`.
|
// unnecessary diagnostics for eg. `CARGO_PKG_NAME`.
|
||||||
if key.as_str() == "OUT_DIR" {
|
if key.as_str() == "OUT_DIR" {
|
||||||
err = Some(ExpandError::other(r#"`OUT_DIR` not set, enable "build scripts" to fix"#));
|
err = Some(ExpandError::other(
|
||||||
|
span,
|
||||||
|
r#"`OUT_DIR` not set, enable "build scripts" to fix"#,
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the variable is unset, still return a dummy string to help type inference along.
|
// If the variable is unset, still return a dummy string to help type inference along.
|
||||||
|
@ -885,7 +890,7 @@ fn quote_expand(
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
ExpandResult::new(
|
ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: span, close: span }),
|
tt::Subtree::empty(tt::DelimSpan { open: span, close: span }),
|
||||||
ExpandError::other("quote! is not implemented"),
|
ExpandError::other(span, "quote! is not implemented"),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -259,8 +259,7 @@ pub fn expand_speculative(
|
||||||
|
|
||||||
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
|
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
|
||||||
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
||||||
let mut speculative_expansion =
|
let mut speculative_expansion = match loc.def.kind {
|
||||||
match loc.def.kind {
|
|
||||||
MacroDefKind::ProcMacro(ast, expander, _) => {
|
MacroDefKind::ProcMacro(ast, expander, _) => {
|
||||||
let span = db.proc_macro_span(ast);
|
let span = db.proc_macro_span(ast);
|
||||||
tt.delimiter = tt::Delimiter::invisible_spanned(span);
|
tt.delimiter = tt::Delimiter::invisible_spanned(span);
|
||||||
|
@ -278,9 +277,9 @@ pub fn expand_speculative(
|
||||||
MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
|
MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
|
||||||
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
|
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
|
||||||
}
|
}
|
||||||
MacroDefKind::Declarative(it) => db
|
MacroDefKind::Declarative(it) => {
|
||||||
.decl_macro_expander(loc.krate, it)
|
db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt, span, loc.def.edition)
|
||||||
.expand_unhygienic(db, tt, loc.def.krate, span, loc.def.edition),
|
}
|
||||||
MacroDefKind::BuiltIn(_, it) => {
|
MacroDefKind::BuiltIn(_, it) => {
|
||||||
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
@ -735,11 +734,14 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {
|
||||||
if TOKEN_LIMIT.check(count).is_err() {
|
if TOKEN_LIMIT.check(count).is_err() {
|
||||||
Err(ExpandResult {
|
Err(ExpandResult {
|
||||||
value: (),
|
value: (),
|
||||||
err: Some(ExpandError::other(format!(
|
err: Some(ExpandError::other(
|
||||||
|
tt.delimiter.open,
|
||||||
|
format!(
|
||||||
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
||||||
count,
|
count,
|
||||||
TOKEN_LIMIT.inner(),
|
TOKEN_LIMIT.inner(),
|
||||||
))),
|
),
|
||||||
|
)),
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
//! Compiled declarative macro expanders (`macro_rules!`` and `macro`)
|
//! Compiled declarative macro expanders (`macro_rules!`` and `macro`)
|
||||||
use std::sync::OnceLock;
|
|
||||||
|
|
||||||
use base_db::{CrateId, VersionReq};
|
use base_db::CrateId;
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
use mbe::DocCommentDesugarMode;
|
use mbe::DocCommentDesugarMode;
|
||||||
use span::{Edition, MacroCallId, Span, SyntaxContextId};
|
use span::{Edition, MacroCallId, Span, SyntaxContextId};
|
||||||
|
@ -13,7 +12,7 @@ use crate::{
|
||||||
attrs::RawAttrs,
|
attrs::RawAttrs,
|
||||||
db::ExpandDatabase,
|
db::ExpandDatabase,
|
||||||
hygiene::{apply_mark, Transparency},
|
hygiene::{apply_mark, Transparency},
|
||||||
tt, AstId, ExpandError, ExpandResult, Lookup,
|
tt, AstId, ExpandError, ExpandErrorKind, ExpandResult, Lookup,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Old-style `macro_rules` or the new macros 2.0
|
/// Old-style `macro_rules` or the new macros 2.0
|
||||||
|
@ -23,9 +22,6 @@ pub struct DeclarativeMacroExpander {
|
||||||
pub transparency: Transparency,
|
pub transparency: Transparency,
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: Remove this once we drop support for 1.76
|
|
||||||
static REQUIREMENT: OnceLock<VersionReq> = OnceLock::new();
|
|
||||||
|
|
||||||
impl DeclarativeMacroExpander {
|
impl DeclarativeMacroExpander {
|
||||||
pub fn expand(
|
pub fn expand(
|
||||||
&self,
|
&self,
|
||||||
|
@ -35,29 +31,16 @@ impl DeclarativeMacroExpander {
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<(tt::Subtree, Option<u32>)> {
|
) -> ExpandResult<(tt::Subtree, Option<u32>)> {
|
||||||
let loc = db.lookup_intern_macro_call(call_id);
|
let loc = db.lookup_intern_macro_call(call_id);
|
||||||
let toolchain = db.toolchain(loc.def.krate);
|
|
||||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
|
||||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
|
||||||
&base_db::Version {
|
|
||||||
pre: base_db::Prerelease::EMPTY,
|
|
||||||
build: base_db::BuildMetadata::EMPTY,
|
|
||||||
major: version.major,
|
|
||||||
minor: version.minor,
|
|
||||||
patch: version.patch,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
});
|
|
||||||
match self.mac.err() {
|
match self.mac.err() {
|
||||||
Some(_) => ExpandResult::new(
|
Some(_) => ExpandResult::new(
|
||||||
(tt::Subtree::empty(tt::DelimSpan { open: span, close: span }), None),
|
(tt::Subtree::empty(tt::DelimSpan { open: span, close: span }), None),
|
||||||
ExpandError::MacroDefinition,
|
ExpandError::new(span, ExpandErrorKind::MacroDefinition),
|
||||||
),
|
),
|
||||||
None => self
|
None => self
|
||||||
.mac
|
.mac
|
||||||
.expand(
|
.expand(
|
||||||
&tt,
|
&tt,
|
||||||
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
|
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
|
||||||
new_meta_vars,
|
|
||||||
span,
|
span,
|
||||||
loc.def.edition,
|
loc.def.edition,
|
||||||
)
|
)
|
||||||
|
@ -67,32 +50,18 @@ impl DeclarativeMacroExpander {
|
||||||
|
|
||||||
pub fn expand_unhygienic(
|
pub fn expand_unhygienic(
|
||||||
&self,
|
&self,
|
||||||
db: &dyn ExpandDatabase,
|
|
||||||
tt: tt::Subtree,
|
tt: tt::Subtree,
|
||||||
krate: CrateId,
|
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
def_site_edition: Edition,
|
def_site_edition: Edition,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let toolchain = db.toolchain(krate);
|
|
||||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
|
||||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
|
||||||
&base_db::Version {
|
|
||||||
pre: base_db::Prerelease::EMPTY,
|
|
||||||
build: base_db::BuildMetadata::EMPTY,
|
|
||||||
major: version.major,
|
|
||||||
minor: version.minor,
|
|
||||||
patch: version.patch,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
});
|
|
||||||
match self.mac.err() {
|
match self.mac.err() {
|
||||||
Some(_) => ExpandResult::new(
|
Some(_) => ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||||
ExpandError::MacroDefinition,
|
ExpandError::new(call_site, ExpandErrorKind::MacroDefinition),
|
||||||
),
|
),
|
||||||
None => self
|
None => self
|
||||||
.mac
|
.mac
|
||||||
.expand(&tt, |_| (), new_meta_vars, call_site, def_site_edition)
|
.expand(&tt, |_| (), call_site, def_site_edition)
|
||||||
.map(TupleExt::head)
|
.map(TupleExt::head)
|
||||||
.map_err(Into::into),
|
.map_err(Into::into),
|
||||||
}
|
}
|
||||||
|
|
|
@ -176,14 +176,19 @@ fn eager_macro_recur(
|
||||||
Some(path) => match macro_resolver(&path) {
|
Some(path) => match macro_resolver(&path) {
|
||||||
Some(def) => def,
|
Some(def) => def,
|
||||||
None => {
|
None => {
|
||||||
error =
|
error = Some(ExpandError::other(
|
||||||
Some(ExpandError::other(format!("unresolved macro {}", path.display(db))));
|
span_map.span_at(call.syntax().text_range().start()),
|
||||||
|
format!("unresolved macro {}", path.display(db)),
|
||||||
|
));
|
||||||
offset += call.syntax().text_range().len();
|
offset += call.syntax().text_range().len();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
None => {
|
None => {
|
||||||
error = Some(ExpandError::other("malformed macro invocation"));
|
error = Some(ExpandError::other(
|
||||||
|
span_map.span_at(call.syntax().text_range().start()),
|
||||||
|
"malformed macro invocation",
|
||||||
|
));
|
||||||
offset += call.syntax().text_range().len();
|
offset += call.syntax().text_range().len();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
|
@ -124,47 +124,77 @@ impl_intern_lookup!(
|
||||||
pub type ExpandResult<T> = ValueResult<T, ExpandError>;
|
pub type ExpandResult<T> = ValueResult<T, ExpandError>;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||||
pub enum ExpandError {
|
pub struct ExpandError {
|
||||||
ProcMacroAttrExpansionDisabled,
|
inner: Arc<(ExpandErrorKind, Span)>,
|
||||||
MissingProcMacroExpander(CrateId),
|
|
||||||
/// The macro expansion is disabled.
|
|
||||||
MacroDisabled,
|
|
||||||
MacroDefinition,
|
|
||||||
Mbe(mbe::ExpandError),
|
|
||||||
RecursionOverflow,
|
|
||||||
Other(Arc<Box<str>>),
|
|
||||||
ProcMacroPanic(Arc<Box<str>>),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExpandError {
|
impl ExpandError {
|
||||||
pub fn other(msg: impl Into<Box<str>>) -> Self {
|
pub fn new(span: Span, kind: ExpandErrorKind) -> Self {
|
||||||
ExpandError::Other(Arc::new(msg.into()))
|
ExpandError { inner: Arc::new((kind, span)) }
|
||||||
}
|
}
|
||||||
|
pub fn other(span: Span, msg: impl Into<Box<str>>) -> Self {
|
||||||
|
ExpandError { inner: Arc::new((ExpandErrorKind::Other(msg.into()), span)) }
|
||||||
|
}
|
||||||
|
pub fn kind(&self) -> &ExpandErrorKind {
|
||||||
|
&self.inner.0
|
||||||
|
}
|
||||||
|
pub fn span(&self) -> Span {
|
||||||
|
self.inner.1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||||
|
pub enum ExpandErrorKind {
|
||||||
|
/// Attribute macro expansion is disabled.
|
||||||
|
ProcMacroAttrExpansionDisabled,
|
||||||
|
MissingProcMacroExpander(CrateId),
|
||||||
|
/// The macro for this call is disabled.
|
||||||
|
MacroDisabled,
|
||||||
|
/// The macro definition has errors.
|
||||||
|
MacroDefinition,
|
||||||
|
Mbe(mbe::ExpandErrorKind),
|
||||||
|
RecursionOverflow,
|
||||||
|
Other(Box<str>),
|
||||||
|
ProcMacroPanic(Box<str>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExpandError {
|
||||||
|
pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> (String, bool) {
|
||||||
|
self.inner.0.render_to_string(db)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExpandErrorKind {
|
||||||
pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> (String, bool) {
|
pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> (String, bool) {
|
||||||
match self {
|
match self {
|
||||||
Self::ProcMacroAttrExpansionDisabled => {
|
ExpandErrorKind::ProcMacroAttrExpansionDisabled => {
|
||||||
("procedural attribute macro expansion is disabled".to_owned(), false)
|
("procedural attribute macro expansion is disabled".to_owned(), false)
|
||||||
}
|
}
|
||||||
Self::MacroDisabled => ("proc-macro is explicitly disabled".to_owned(), false),
|
ExpandErrorKind::MacroDisabled => {
|
||||||
&Self::MissingProcMacroExpander(def_crate) => {
|
("proc-macro is explicitly disabled".to_owned(), false)
|
||||||
|
}
|
||||||
|
&ExpandErrorKind::MissingProcMacroExpander(def_crate) => {
|
||||||
match db.proc_macros().get_error_for_crate(def_crate) {
|
match db.proc_macros().get_error_for_crate(def_crate) {
|
||||||
Some((e, hard_err)) => (e.to_owned(), hard_err),
|
Some((e, hard_err)) => (e.to_owned(), hard_err),
|
||||||
None => ("missing expander".to_owned(), true),
|
None => ("missing expander".to_owned(), true),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Self::MacroDefinition => ("macro definition has parse errors".to_owned(), true),
|
ExpandErrorKind::MacroDefinition => {
|
||||||
Self::Mbe(e) => (e.to_string(), true),
|
("macro definition has parse errors".to_owned(), true)
|
||||||
Self::RecursionOverflow => ("overflow expanding the original macro".to_owned(), true),
|
}
|
||||||
Self::Other(e) => ((***e).to_owned(), true),
|
ExpandErrorKind::Mbe(e) => (e.to_string(), true),
|
||||||
Self::ProcMacroPanic(e) => ((***e).to_owned(), true),
|
ExpandErrorKind::RecursionOverflow => {
|
||||||
|
("overflow expanding the original macro".to_owned(), true)
|
||||||
|
}
|
||||||
|
ExpandErrorKind::Other(e) => ((**e).to_owned(), true),
|
||||||
|
ExpandErrorKind::ProcMacroPanic(e) => ((**e).to_owned(), true),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<mbe::ExpandError> for ExpandError {
|
impl From<mbe::ExpandError> for ExpandError {
|
||||||
fn from(mbe: mbe::ExpandError) -> Self {
|
fn from(mbe: mbe::ExpandError) -> Self {
|
||||||
Self::Mbe(mbe)
|
ExpandError { inner: Arc::new((ExpandErrorKind::Mbe(mbe.inner.1.clone()), mbe.inner.0)) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
|
|
@ -7,9 +7,8 @@ use base_db::{CrateId, Env};
|
||||||
use intern::Symbol;
|
use intern::Symbol;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use span::Span;
|
use span::Span;
|
||||||
use triomphe::Arc;
|
|
||||||
|
|
||||||
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
|
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandErrorKind, ExpandResult};
|
||||||
|
|
||||||
#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
|
#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
|
||||||
pub enum ProcMacroKind {
|
pub enum ProcMacroKind {
|
||||||
|
@ -76,15 +75,18 @@ impl FromIterator<(CrateId, ProcMacroLoadResult)> for ProcMacros {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ProcMacros {
|
impl ProcMacros {
|
||||||
fn get(&self, krate: CrateId, idx: u32) -> Result<&ProcMacro, ExpandError> {
|
fn get(&self, krate: CrateId, idx: u32, err_span: Span) -> Result<&ProcMacro, ExpandError> {
|
||||||
let proc_macros = match self.0.get(&krate) {
|
let proc_macros = match self.0.get(&krate) {
|
||||||
Some(Ok(proc_macros)) => proc_macros,
|
Some(Ok(proc_macros)) => proc_macros,
|
||||||
Some(Err(_)) | None => {
|
Some(Err(_)) | None => {
|
||||||
return Err(ExpandError::other("internal error: no proc macros for crate"));
|
return Err(ExpandError::other(
|
||||||
|
err_span,
|
||||||
|
"internal error: no proc macros for crate",
|
||||||
|
));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
proc_macros.get(idx as usize).ok_or_else(|| {
|
proc_macros.get(idx as usize).ok_or_else(|| {
|
||||||
ExpandError::other(
|
ExpandError::other(err_span,
|
||||||
format!(
|
format!(
|
||||||
"internal error: proc-macro index out of bounds: the length is {} but the index is {}",
|
"internal error: proc-macro index out of bounds: the length is {} but the index is {}",
|
||||||
proc_macros.len(),
|
proc_macros.len(),
|
||||||
|
@ -184,11 +186,11 @@ impl CustomProcMacroExpander {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The macro is explicitly disabled due to proc-macro attribute expansion being disabled.
|
/// The macro is explicitly disabled due to proc-macro attribute expansion being disabled.
|
||||||
pub const fn as_expand_error(&self, def_crate: CrateId) -> Option<ExpandError> {
|
pub fn as_expand_error(&self, def_crate: CrateId) -> Option<ExpandErrorKind> {
|
||||||
match self.proc_macro_id {
|
match self.proc_macro_id {
|
||||||
Self::PROC_MACRO_ATTR_DISABLED => Some(ExpandError::ProcMacroAttrExpansionDisabled),
|
Self::PROC_MACRO_ATTR_DISABLED => Some(ExpandErrorKind::ProcMacroAttrExpansionDisabled),
|
||||||
Self::DISABLED_ID => Some(ExpandError::MacroDisabled),
|
Self::DISABLED_ID => Some(ExpandErrorKind::MacroDisabled),
|
||||||
Self::MISSING_EXPANDER => Some(ExpandError::MissingProcMacroExpander(def_crate)),
|
Self::MISSING_EXPANDER => Some(ExpandErrorKind::MissingProcMacroExpander(def_crate)),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -207,19 +209,19 @@ impl CustomProcMacroExpander {
|
||||||
match self.proc_macro_id {
|
match self.proc_macro_id {
|
||||||
Self::PROC_MACRO_ATTR_DISABLED => ExpandResult::new(
|
Self::PROC_MACRO_ATTR_DISABLED => ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||||
ExpandError::ProcMacroAttrExpansionDisabled,
|
ExpandError::new(call_site, ExpandErrorKind::ProcMacroAttrExpansionDisabled),
|
||||||
),
|
),
|
||||||
Self::MISSING_EXPANDER => ExpandResult::new(
|
Self::MISSING_EXPANDER => ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||||
ExpandError::MissingProcMacroExpander(def_crate),
|
ExpandError::new(call_site, ExpandErrorKind::MissingProcMacroExpander(def_crate)),
|
||||||
),
|
),
|
||||||
Self::DISABLED_ID => ExpandResult::new(
|
Self::DISABLED_ID => ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||||
ExpandError::MacroDisabled,
|
ExpandError::new(call_site, ExpandErrorKind::MacroDisabled),
|
||||||
),
|
),
|
||||||
id => {
|
id => {
|
||||||
let proc_macros = db.proc_macros();
|
let proc_macros = db.proc_macros();
|
||||||
let proc_macro = match proc_macros.get(def_crate, id) {
|
let proc_macro = match proc_macros.get(def_crate, id, call_site) {
|
||||||
Ok(proc_macro) => proc_macro,
|
Ok(proc_macro) => proc_macro,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
return ExpandResult::new(
|
return ExpandResult::new(
|
||||||
|
@ -240,12 +242,18 @@ impl CustomProcMacroExpander {
|
||||||
ProcMacroExpansionError::System(text)
|
ProcMacroExpansionError::System(text)
|
||||||
if proc_macro.kind == ProcMacroKind::Attr =>
|
if proc_macro.kind == ProcMacroKind::Attr =>
|
||||||
{
|
{
|
||||||
ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) }
|
ExpandResult {
|
||||||
|
value: tt.clone(),
|
||||||
|
err: Some(ExpandError::other(call_site, text)),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
ProcMacroExpansionError::System(text)
|
ProcMacroExpansionError::System(text)
|
||||||
| ProcMacroExpansionError::Panic(text) => ExpandResult::new(
|
| ProcMacroExpansionError::Panic(text) => ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||||
ExpandError::ProcMacroPanic(Arc::new(text.into_boxed_str())),
|
ExpandError::new(
|
||||||
|
call_site,
|
||||||
|
ExpandErrorKind::ProcMacroPanic(text.into_boxed_str()),
|
||||||
|
),
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -833,15 +833,23 @@ fn macro_call_diagnostics(
|
||||||
let ValueResult { value: parse_errors, err } = &*e;
|
let ValueResult { value: parse_errors, err } = &*e;
|
||||||
if let Some(err) = err {
|
if let Some(err) = err {
|
||||||
let loc = db.lookup_intern_macro_call(macro_call_id);
|
let loc = db.lookup_intern_macro_call(macro_call_id);
|
||||||
let (node, precise_location, _macro_name, _kind) =
|
let file_id = loc.kind.file_id();
|
||||||
precise_macro_call_location(&loc.kind, db);
|
let node =
|
||||||
|
InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id()));
|
||||||
let (message, error) = err.render_to_string(db.upcast());
|
let (message, error) = err.render_to_string(db.upcast());
|
||||||
|
let precise_location = Some(
|
||||||
|
err.span().range
|
||||||
|
+ db.ast_id_map(err.span().anchor.file_id.into())
|
||||||
|
.get_erased(err.span().anchor.ast_id)
|
||||||
|
.text_range()
|
||||||
|
.start(),
|
||||||
|
);
|
||||||
acc.push(MacroError { node, precise_location, message, error }.into());
|
acc.push(MacroError { node, precise_location, message, error }.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
if !parse_errors.is_empty() {
|
if !parse_errors.is_empty() {
|
||||||
let loc = db.lookup_intern_macro_call(macro_call_id);
|
let loc = db.lookup_intern_macro_call(macro_call_id);
|
||||||
let (node, precise_location, _, _) = precise_macro_call_location(&loc.kind, db);
|
let (node, precise_location) = precise_macro_call_location(&loc.kind, db);
|
||||||
acc.push(
|
acc.push(
|
||||||
MacroExpansionParseError { node, precise_location, errors: parse_errors.clone() }
|
MacroExpansionParseError { node, precise_location, errors: parse_errors.clone() }
|
||||||
.into(),
|
.into(),
|
||||||
|
@ -891,14 +899,14 @@ fn emit_def_diagnostic_(
|
||||||
acc.push(UnresolvedExternCrate { decl: InFile::new(ast.file_id, item) }.into());
|
acc.push(UnresolvedExternCrate { decl: InFile::new(ast.file_id, item) }.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
DefDiagnosticKind::MacroError { ast, err } => {
|
DefDiagnosticKind::MacroError { ast, path, err } => {
|
||||||
let item = ast.to_ptr(db.upcast());
|
let item = ast.to_ptr(db.upcast());
|
||||||
let (message, error) = err.render_to_string(db.upcast());
|
let (message, error) = err.render_to_string(db.upcast());
|
||||||
acc.push(
|
acc.push(
|
||||||
MacroError {
|
MacroError {
|
||||||
node: InFile::new(ast.file_id, item.syntax_node_ptr()),
|
node: InFile::new(ast.file_id, item.syntax_node_ptr()),
|
||||||
precise_location: None,
|
precise_location: None,
|
||||||
message,
|
message: format!("{}: {message}", path.display(db.upcast())),
|
||||||
error,
|
error,
|
||||||
}
|
}
|
||||||
.into(),
|
.into(),
|
||||||
|
@ -1001,7 +1009,7 @@ fn emit_def_diagnostic_(
|
||||||
})();
|
})();
|
||||||
}
|
}
|
||||||
DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
|
DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
|
||||||
let (node, precise_location, _, _) = precise_macro_call_location(ast, db);
|
let (node, precise_location) = precise_macro_call_location(ast, db);
|
||||||
acc.push(
|
acc.push(
|
||||||
UnresolvedMacroCall {
|
UnresolvedMacroCall {
|
||||||
macro_call: node,
|
macro_call: node,
|
||||||
|
@ -1070,7 +1078,7 @@ fn emit_def_diagnostic_(
|
||||||
fn precise_macro_call_location(
|
fn precise_macro_call_location(
|
||||||
ast: &MacroCallKind,
|
ast: &MacroCallKind,
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
) -> (InFile<SyntaxNodePtr>, Option<TextRange>, Option<String>, MacroKind) {
|
) -> (InFile<SyntaxNodePtr>, Option<TextRange>) {
|
||||||
// FIXME: maybe we actually want slightly different ranges for the different macro diagnostics
|
// FIXME: maybe we actually want slightly different ranges for the different macro diagnostics
|
||||||
// - e.g. the full attribute for macro errors, but only the name for name resolution
|
// - e.g. the full attribute for macro errors, but only the name for name resolution
|
||||||
match ast {
|
match ast {
|
||||||
|
@ -1082,8 +1090,6 @@ fn precise_macro_call_location(
|
||||||
.and_then(|it| it.segment())
|
.and_then(|it| it.segment())
|
||||||
.and_then(|it| it.name_ref())
|
.and_then(|it| it.name_ref())
|
||||||
.map(|it| it.syntax().text_range()),
|
.map(|it| it.syntax().text_range()),
|
||||||
node.path().and_then(|it| it.segment()).map(|it| it.to_string()),
|
|
||||||
MacroKind::ProcMacro,
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
|
MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
|
||||||
|
@ -1112,8 +1118,6 @@ fn precise_macro_call_location(
|
||||||
(
|
(
|
||||||
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
|
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
|
||||||
token.as_ref().map(|tok| tok.text_range()),
|
token.as_ref().map(|tok| tok.text_range()),
|
||||||
token.as_ref().map(ToString::to_string),
|
|
||||||
MacroKind::Derive,
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
|
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
|
||||||
|
@ -1128,12 +1132,6 @@ fn precise_macro_call_location(
|
||||||
(
|
(
|
||||||
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
|
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
|
||||||
Some(attr.syntax().text_range()),
|
Some(attr.syntax().text_range()),
|
||||||
attr.path()
|
|
||||||
.and_then(|path| path.segment())
|
|
||||||
.and_then(|seg| seg.name_ref())
|
|
||||||
.as_ref()
|
|
||||||
.map(ToString::to_string),
|
|
||||||
MacroKind::Attr,
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1800,9 +1798,16 @@ impl DefWithBody {
|
||||||
BodyDiagnostic::MacroError { node, err } => {
|
BodyDiagnostic::MacroError { node, err } => {
|
||||||
let (message, error) = err.render_to_string(db.upcast());
|
let (message, error) = err.render_to_string(db.upcast());
|
||||||
|
|
||||||
|
let precise_location = Some(
|
||||||
|
err.span().range
|
||||||
|
+ db.ast_id_map(err.span().anchor.file_id.into())
|
||||||
|
.get_erased(err.span().anchor.ast_id)
|
||||||
|
.text_range()
|
||||||
|
.start(),
|
||||||
|
);
|
||||||
MacroError {
|
MacroError {
|
||||||
node: (*node).map(|it| it.into()),
|
node: (*node).map(|it| it.into()),
|
||||||
precise_location: None,
|
precise_location,
|
||||||
message,
|
message,
|
||||||
error,
|
error,
|
||||||
}
|
}
|
||||||
|
|
|
@ -168,12 +168,12 @@ fn main() {
|
||||||
// Test a handful of built-in (eager) macros:
|
// Test a handful of built-in (eager) macros:
|
||||||
|
|
||||||
include!(invalid);
|
include!(invalid);
|
||||||
//^^^^^^^ error: could not convert tokens
|
//^^^^^^^ error: expected string literal
|
||||||
include!("does not exist");
|
include!("does not exist");
|
||||||
//^^^^^^^ error: failed to load file `does not exist`
|
//^^^^^^^ error: failed to load file `does not exist`
|
||||||
|
|
||||||
env!(invalid);
|
env!(invalid);
|
||||||
//^^^ error: could not convert tokens
|
//^^^ error: expected string literal
|
||||||
|
|
||||||
env!("OUT_DIR");
|
env!("OUT_DIR");
|
||||||
//^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
|
//^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
|
||||||
|
|
|
@ -45,7 +45,7 @@ fn benchmark_expand_macro_rules() {
|
||||||
invocations
|
invocations
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(id, tt)| {
|
.map(|(id, tt)| {
|
||||||
let res = rules[&id].expand(&tt, |_| (), true, DUMMY, Edition::CURRENT);
|
let res = rules[&id].expand(&tt, |_| (), DUMMY, Edition::CURRENT);
|
||||||
assert!(res.err.is_none());
|
assert!(res.err.is_none());
|
||||||
res.value.0.token_trees.len()
|
res.value.0.token_trees.len()
|
||||||
})
|
})
|
||||||
|
@ -118,7 +118,7 @@ fn invocation_fixtures(
|
||||||
},
|
},
|
||||||
token_trees: token_trees.into_boxed_slice(),
|
token_trees: token_trees.into_boxed_slice(),
|
||||||
};
|
};
|
||||||
if it.expand(&subtree, |_| (), true, DUMMY, Edition::CURRENT).err.is_none() {
|
if it.expand(&subtree, |_| (), DUMMY, Edition::CURRENT).err.is_none() {
|
||||||
res.push((name.clone(), subtree));
|
res.push((name.clone(), subtree));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,13 +9,12 @@ use intern::Symbol;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use span::{Edition, Span};
|
use span::{Edition, Span};
|
||||||
|
|
||||||
use crate::{parser::MetaVarKind, ExpandError, ExpandResult, MatchedArmIndex};
|
use crate::{parser::MetaVarKind, ExpandError, ExpandErrorKind, ExpandResult, MatchedArmIndex};
|
||||||
|
|
||||||
pub(crate) fn expand_rules(
|
pub(crate) fn expand_rules(
|
||||||
rules: &[crate::Rule],
|
rules: &[crate::Rule],
|
||||||
input: &tt::Subtree<Span>,
|
input: &tt::Subtree<Span>,
|
||||||
marker: impl Fn(&mut Span) + Copy,
|
marker: impl Fn(&mut Span) + Copy,
|
||||||
new_meta_vars: bool,
|
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
def_site_edition: Edition,
|
def_site_edition: Edition,
|
||||||
) -> ExpandResult<(tt::Subtree<Span>, MatchedArmIndex)> {
|
) -> ExpandResult<(tt::Subtree<Span>, MatchedArmIndex)> {
|
||||||
|
@ -27,13 +26,8 @@ pub(crate) fn expand_rules(
|
||||||
// If we find a rule that applies without errors, we're done.
|
// If we find a rule that applies without errors, we're done.
|
||||||
// Unconditionally returning the transcription here makes the
|
// Unconditionally returning the transcription here makes the
|
||||||
// `test_repeat_bad_var` test fail.
|
// `test_repeat_bad_var` test fail.
|
||||||
let ExpandResult { value, err: transcribe_err } = transcriber::transcribe(
|
let ExpandResult { value, err: transcribe_err } =
|
||||||
&rule.rhs,
|
transcriber::transcribe(&rule.rhs, &new_match.bindings, marker, call_site);
|
||||||
&new_match.bindings,
|
|
||||||
marker,
|
|
||||||
new_meta_vars,
|
|
||||||
call_site,
|
|
||||||
);
|
|
||||||
if transcribe_err.is_none() {
|
if transcribe_err.is_none() {
|
||||||
return ExpandResult::ok((value, Some(idx as u32)));
|
return ExpandResult::ok((value, Some(idx as u32)));
|
||||||
}
|
}
|
||||||
|
@ -52,7 +46,7 @@ pub(crate) fn expand_rules(
|
||||||
if let Some((match_, rule, idx)) = match_ {
|
if let Some((match_, rule, idx)) = match_ {
|
||||||
// if we got here, there was no match without errors
|
// if we got here, there was no match without errors
|
||||||
let ExpandResult { value, err: transcribe_err } =
|
let ExpandResult { value, err: transcribe_err } =
|
||||||
transcriber::transcribe(&rule.rhs, &match_.bindings, marker, new_meta_vars, call_site);
|
transcriber::transcribe(&rule.rhs, &match_.bindings, marker, call_site);
|
||||||
ExpandResult { value: (value, idx.try_into().ok()), err: match_.err.or(transcribe_err) }
|
ExpandResult { value: (value, idx.try_into().ok()), err: match_.err.or(transcribe_err) }
|
||||||
} else {
|
} else {
|
||||||
ExpandResult::new(
|
ExpandResult::new(
|
||||||
|
@ -63,7 +57,7 @@ pub(crate) fn expand_rules(
|
||||||
},
|
},
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
ExpandError::NoMatchingRule,
|
ExpandError::new(call_site, ExpandErrorKind::NoMatchingRule),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -70,7 +70,7 @@ use crate::{
|
||||||
expander::{Binding, Bindings, ExpandResult, Fragment},
|
expander::{Binding, Bindings, ExpandResult, Fragment},
|
||||||
expect_fragment,
|
expect_fragment,
|
||||||
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
||||||
ExpandError, MetaTemplate, ValueResult,
|
ExpandError, ExpandErrorKind, MetaTemplate, ValueResult,
|
||||||
};
|
};
|
||||||
|
|
||||||
impl Bindings {
|
impl Bindings {
|
||||||
|
@ -510,11 +510,17 @@ fn match_loop_inner<'t>(
|
||||||
if matches!(rhs, tt::Leaf::Literal(it) if it.symbol == lhs.symbol) {
|
if matches!(rhs, tt::Leaf::Literal(it) if it.symbol == lhs.symbol) {
|
||||||
item.dot.next();
|
item.dot.next();
|
||||||
} else {
|
} else {
|
||||||
res.add_err(ExpandError::UnexpectedToken);
|
res.add_err(ExpandError::new(
|
||||||
|
*rhs.span(),
|
||||||
|
ExpandErrorKind::UnexpectedToken,
|
||||||
|
));
|
||||||
item.is_error = true;
|
item.is_error = true;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
res.add_err(ExpandError::binding_error(format!("expected literal: `{lhs}`")));
|
res.add_err(ExpandError::binding_error(
|
||||||
|
src.clone().next().map_or(delim_span.close, |it| it.first_span()),
|
||||||
|
format!("expected literal: `{lhs}`"),
|
||||||
|
));
|
||||||
item.is_error = true;
|
item.is_error = true;
|
||||||
}
|
}
|
||||||
try_push!(next_items, item);
|
try_push!(next_items, item);
|
||||||
|
@ -524,11 +530,17 @@ fn match_loop_inner<'t>(
|
||||||
if matches!(rhs, tt::Leaf::Ident(it) if it.sym == lhs.sym) {
|
if matches!(rhs, tt::Leaf::Ident(it) if it.sym == lhs.sym) {
|
||||||
item.dot.next();
|
item.dot.next();
|
||||||
} else {
|
} else {
|
||||||
res.add_err(ExpandError::UnexpectedToken);
|
res.add_err(ExpandError::new(
|
||||||
|
*rhs.span(),
|
||||||
|
ExpandErrorKind::UnexpectedToken,
|
||||||
|
));
|
||||||
item.is_error = true;
|
item.is_error = true;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
res.add_err(ExpandError::binding_error(format!("expected ident: `{lhs}`")));
|
res.add_err(ExpandError::binding_error(
|
||||||
|
src.clone().next().map_or(delim_span.close, |it| it.first_span()),
|
||||||
|
format!("expected ident: `{lhs}`"),
|
||||||
|
));
|
||||||
item.is_error = true;
|
item.is_error = true;
|
||||||
}
|
}
|
||||||
try_push!(next_items, item);
|
try_push!(next_items, item);
|
||||||
|
@ -538,8 +550,8 @@ fn match_loop_inner<'t>(
|
||||||
let error = if let Ok(rhs) = fork.expect_glued_punct() {
|
let error = if let Ok(rhs) = fork.expect_glued_punct() {
|
||||||
let first_is_single_quote = rhs[0].char == '\'';
|
let first_is_single_quote = rhs[0].char == '\'';
|
||||||
let lhs = lhs.iter().map(|it| it.char);
|
let lhs = lhs.iter().map(|it| it.char);
|
||||||
let rhs = rhs.iter().map(|it| it.char);
|
let rhs_ = rhs.iter().map(|it| it.char);
|
||||||
if lhs.clone().eq(rhs) {
|
if lhs.clone().eq(rhs_) {
|
||||||
// HACK: here we use `meta_result` to pass `TtIter` back to caller because
|
// HACK: here we use `meta_result` to pass `TtIter` back to caller because
|
||||||
// it might have been advanced multiple times. `ValueResult` is
|
// it might have been advanced multiple times. `ValueResult` is
|
||||||
// insignificant.
|
// insignificant.
|
||||||
|
@ -552,13 +564,19 @@ fn match_loop_inner<'t>(
|
||||||
if first_is_single_quote {
|
if first_is_single_quote {
|
||||||
// If the first punct token is a single quote, that's a part of a lifetime
|
// If the first punct token is a single quote, that's a part of a lifetime
|
||||||
// ident, not a punct.
|
// ident, not a punct.
|
||||||
ExpandError::UnexpectedToken
|
ExpandError::new(
|
||||||
|
rhs.get(1).map_or(rhs[0].span, |it| it.span),
|
||||||
|
ExpandErrorKind::UnexpectedToken,
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
let lhs = lhs.collect::<String>();
|
let lhs = lhs.collect::<String>();
|
||||||
ExpandError::binding_error(format!("expected punct: `{lhs}`"))
|
ExpandError::binding_error(rhs[0].span, format!("expected punct: `{lhs}`"))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
ExpandError::UnexpectedToken
|
ExpandError::new(
|
||||||
|
src.clone().next().map_or(delim_span.close, |it| it.first_span()),
|
||||||
|
ExpandErrorKind::UnexpectedToken,
|
||||||
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
res.add_err(error);
|
res.add_err(error);
|
||||||
|
@ -651,7 +669,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>, edition: Edition)
|
||||||
if let Some(item) = error_recover_item {
|
if let Some(item) = error_recover_item {
|
||||||
res.bindings = bindings_builder.build(&item);
|
res.bindings = bindings_builder.build(&item);
|
||||||
}
|
}
|
||||||
res.add_err(ExpandError::UnexpectedToken);
|
res.add_err(ExpandError::new(span.open, ExpandErrorKind::UnexpectedToken));
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
@ -670,7 +688,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>, edition: Edition)
|
||||||
src = it;
|
src = it;
|
||||||
res.unmatched_tts += src.len();
|
res.unmatched_tts += src.len();
|
||||||
}
|
}
|
||||||
res.add_err(ExpandError::LeftoverTokens);
|
res.add_err(ExpandError::new(span.open, ExpandErrorKind::LeftoverTokens));
|
||||||
|
|
||||||
if let Some(error_recover_item) = error_recover_item {
|
if let Some(error_recover_item) = error_recover_item {
|
||||||
res.bindings = bindings_builder.build(&error_recover_item);
|
res.bindings = bindings_builder.build(&error_recover_item);
|
||||||
|
@ -746,7 +764,8 @@ fn match_meta_var(
|
||||||
) -> ExpandResult<Option<Fragment>> {
|
) -> ExpandResult<Option<Fragment>> {
|
||||||
let fragment = match kind {
|
let fragment = match kind {
|
||||||
MetaVarKind::Path => {
|
MetaVarKind::Path => {
|
||||||
return expect_fragment(input, parser::PrefixEntryPoint::Path, edition).map(|it| {
|
return expect_fragment(input, parser::PrefixEntryPoint::Path, edition, delim_span)
|
||||||
|
.map(|it| {
|
||||||
it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path)
|
it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -763,11 +782,15 @@ fn match_meta_var(
|
||||||
|| it.sym == sym::let_
|
|| it.sym == sym::let_
|
||||||
|| it.sym == sym::const_ =>
|
|| it.sym == sym::const_ =>
|
||||||
{
|
{
|
||||||
return ExpandResult::only_err(ExpandError::NoMatchingRule)
|
return ExpandResult::only_err(ExpandError::new(
|
||||||
|
it.span,
|
||||||
|
ExpandErrorKind::NoMatchingRule,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
};
|
};
|
||||||
return expect_fragment(input, parser::PrefixEntryPoint::Expr, edition).map(|tt| {
|
return expect_fragment(input, parser::PrefixEntryPoint::Expr, edition, delim_span)
|
||||||
|
.map(|tt| {
|
||||||
tt.map(|tt| match tt {
|
tt.map(|tt| match tt {
|
||||||
tt::TokenTree::Leaf(leaf) => tt::Subtree {
|
tt::TokenTree::Leaf(leaf) => tt::Subtree {
|
||||||
delimiter: tt::Delimiter::invisible_spanned(*leaf.span()),
|
delimiter: tt::Delimiter::invisible_spanned(*leaf.span()),
|
||||||
|
@ -784,16 +807,29 @@ fn match_meta_var(
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
|
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
|
||||||
|
let span = input.next_span();
|
||||||
let tt_result = match kind {
|
let tt_result = match kind {
|
||||||
MetaVarKind::Ident => input
|
MetaVarKind::Ident => input
|
||||||
.expect_ident()
|
.expect_ident()
|
||||||
.map(|ident| tt::Leaf::from(ident.clone()).into())
|
.map(|ident| tt::Leaf::from(ident.clone()).into())
|
||||||
.map_err(|()| ExpandError::binding_error("expected ident")),
|
.map_err(|()| {
|
||||||
MetaVarKind::Tt => {
|
ExpandError::binding_error(
|
||||||
expect_tt(input).map_err(|()| ExpandError::binding_error("expected token tree"))
|
span.unwrap_or(delim_span.close),
|
||||||
}
|
"expected ident",
|
||||||
MetaVarKind::Lifetime => expect_lifetime(input)
|
)
|
||||||
.map_err(|()| ExpandError::binding_error("expected lifetime")),
|
}),
|
||||||
|
MetaVarKind::Tt => expect_tt(input).map_err(|()| {
|
||||||
|
ExpandError::binding_error(
|
||||||
|
span.unwrap_or(delim_span.close),
|
||||||
|
"expected token tree",
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
MetaVarKind::Lifetime => expect_lifetime(input).map_err(|()| {
|
||||||
|
ExpandError::binding_error(
|
||||||
|
span.unwrap_or(delim_span.close),
|
||||||
|
"expected lifetime",
|
||||||
|
)
|
||||||
|
}),
|
||||||
MetaVarKind::Literal => {
|
MetaVarKind::Literal => {
|
||||||
let neg = eat_char(input, '-');
|
let neg = eat_char(input, '-');
|
||||||
input
|
input
|
||||||
|
@ -808,7 +844,12 @@ fn match_meta_var(
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.map_err(|()| ExpandError::binding_error("expected literal"))
|
.map_err(|()| {
|
||||||
|
ExpandError::binding_error(
|
||||||
|
span.unwrap_or(delim_span.close),
|
||||||
|
"expected literal",
|
||||||
|
)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
|
@ -823,7 +864,7 @@ fn match_meta_var(
|
||||||
MetaVarKind::Item => parser::PrefixEntryPoint::Item,
|
MetaVarKind::Item => parser::PrefixEntryPoint::Item,
|
||||||
MetaVarKind::Vis => parser::PrefixEntryPoint::Vis,
|
MetaVarKind::Vis => parser::PrefixEntryPoint::Vis,
|
||||||
};
|
};
|
||||||
expect_fragment(input, fragment, edition).map(|it| it.map(Fragment::Tokens))
|
expect_fragment(input, fragment, edition, delim_span).map(|it| it.map(Fragment::Tokens))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_vars(collector_fun: &mut impl FnMut(Symbol), pattern: &MetaTemplate) {
|
fn collect_vars(collector_fun: &mut impl FnMut(Symbol), pattern: &MetaTemplate) {
|
||||||
|
|
|
@ -8,14 +8,17 @@ use tt::Delimiter;
|
||||||
use crate::{
|
use crate::{
|
||||||
expander::{Binding, Bindings, Fragment},
|
expander::{Binding, Bindings, Fragment},
|
||||||
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
||||||
CountError, ExpandError, ExpandResult, MetaTemplate,
|
ExpandError, ExpandErrorKind, ExpandResult, MetaTemplate,
|
||||||
};
|
};
|
||||||
|
|
||||||
impl Bindings {
|
impl Bindings {
|
||||||
fn get(&self, name: &Symbol) -> Result<&Binding, ExpandError> {
|
fn get(&self, name: &Symbol, span: Span) -> Result<&Binding, ExpandError> {
|
||||||
match self.inner.get(name) {
|
match self.inner.get(name) {
|
||||||
Some(binding) => Ok(binding),
|
Some(binding) => Ok(binding),
|
||||||
None => Err(ExpandError::UnresolvedBinding(Box::new(Box::from(name.as_str())))),
|
None => Err(ExpandError::new(
|
||||||
|
span,
|
||||||
|
ExpandErrorKind::UnresolvedBinding(Box::new(Box::from(name.as_str()))),
|
||||||
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -27,10 +30,10 @@ impl Bindings {
|
||||||
marker: impl Fn(&mut Span),
|
marker: impl Fn(&mut Span),
|
||||||
) -> Result<Fragment, ExpandError> {
|
) -> Result<Fragment, ExpandError> {
|
||||||
macro_rules! binding_err {
|
macro_rules! binding_err {
|
||||||
($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) };
|
($($arg:tt)*) => { ExpandError::binding_error(span, format!($($arg)*)) };
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut b = self.get(name)?;
|
let mut b = self.get(name, span)?;
|
||||||
for nesting_state in nesting.iter_mut() {
|
for nesting_state in nesting.iter_mut() {
|
||||||
nesting_state.hit = true;
|
nesting_state.hit = true;
|
||||||
b = match b {
|
b = match b {
|
||||||
|
@ -142,10 +145,9 @@ pub(super) fn transcribe(
|
||||||
template: &MetaTemplate,
|
template: &MetaTemplate,
|
||||||
bindings: &Bindings,
|
bindings: &Bindings,
|
||||||
marker: impl Fn(&mut Span) + Copy,
|
marker: impl Fn(&mut Span) + Copy,
|
||||||
new_meta_vars: bool,
|
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
) -> ExpandResult<tt::Subtree<Span>> {
|
) -> ExpandResult<tt::Subtree<Span>> {
|
||||||
let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), new_meta_vars, call_site };
|
let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), call_site };
|
||||||
let mut arena: Vec<tt::TokenTree<Span>> = Vec::new();
|
let mut arena: Vec<tt::TokenTree<Span>> = Vec::new();
|
||||||
expand_subtree(&mut ctx, template, None, &mut arena, marker)
|
expand_subtree(&mut ctx, template, None, &mut arena, marker)
|
||||||
}
|
}
|
||||||
|
@ -165,7 +167,6 @@ struct NestingState {
|
||||||
struct ExpandCtx<'a> {
|
struct ExpandCtx<'a> {
|
||||||
bindings: &'a Bindings,
|
bindings: &'a Bindings,
|
||||||
nesting: Vec<NestingState>,
|
nesting: Vec<NestingState>,
|
||||||
new_meta_vars: bool,
|
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -263,7 +264,7 @@ fn expand_subtree(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Op::Count { name, depth } => {
|
Op::Count { name, depth } => {
|
||||||
let mut binding = match ctx.bindings.get(name) {
|
let mut binding = match ctx.bindings.get(name, ctx.call_site) {
|
||||||
Ok(b) => b,
|
Ok(b) => b,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
if err.is_none() {
|
if err.is_none() {
|
||||||
|
@ -299,29 +300,11 @@ fn expand_subtree(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let res = if ctx.new_meta_vars {
|
let res = count(binding, 0, depth.unwrap_or(0));
|
||||||
count(binding, 0, depth.unwrap_or(0))
|
|
||||||
} else {
|
|
||||||
count_old(binding, 0, *depth)
|
|
||||||
};
|
|
||||||
|
|
||||||
let c = match res {
|
|
||||||
Ok(c) => c,
|
|
||||||
Err(e) => {
|
|
||||||
// XXX: It *might* make sense to emit a dummy integer value like `0` here.
|
|
||||||
// That would type inference a bit more robust in cases like
|
|
||||||
// `v[${count(t)}]` where index doesn't matter, but also could lead to
|
|
||||||
// wrong infefrence for cases like `tup.${count(t)}` where index itself
|
|
||||||
// does matter.
|
|
||||||
if err.is_none() {
|
|
||||||
err = Some(e.into());
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
arena.push(
|
arena.push(
|
||||||
tt::Leaf::Literal(tt::Literal {
|
tt::Leaf::Literal(tt::Literal {
|
||||||
symbol: Symbol::integer(c),
|
symbol: Symbol::integer(res),
|
||||||
span: ctx.call_site,
|
span: ctx.call_site,
|
||||||
suffix: None,
|
suffix: None,
|
||||||
kind: tt::LitKind::Integer,
|
kind: tt::LitKind::Integer,
|
||||||
|
@ -353,7 +336,7 @@ fn expand_var(
|
||||||
|
|
||||||
match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) {
|
match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) {
|
||||||
Ok(it) => ExpandResult::ok(it),
|
Ok(it) => ExpandResult::ok(it),
|
||||||
Err(ExpandError::UnresolvedBinding(_)) => {
|
Err(e) if matches!(e.inner.1, ExpandErrorKind::UnresolvedBinding(_)) => {
|
||||||
// Note that it is possible to have a `$var` inside a macro which is not bound.
|
// Note that it is possible to have a `$var` inside a macro which is not bound.
|
||||||
// For example:
|
// For example:
|
||||||
// ```
|
// ```
|
||||||
|
@ -435,7 +418,7 @@ fn expand_repeat(
|
||||||
}
|
}
|
||||||
.into(),
|
.into(),
|
||||||
),
|
),
|
||||||
err: Some(ExpandError::LimitExceeded),
|
err: Some(ExpandError::new(ctx.call_site, ExpandErrorKind::LimitExceeded)),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -481,16 +464,16 @@ fn expand_repeat(
|
||||||
let tt = tt::Subtree {
|
let tt = tt::Subtree {
|
||||||
delimiter: tt::Delimiter::invisible_spanned(ctx.call_site),
|
delimiter: tt::Delimiter::invisible_spanned(ctx.call_site),
|
||||||
token_trees: buf.into_boxed_slice(),
|
token_trees: buf.into_boxed_slice(),
|
||||||
}
|
};
|
||||||
.into();
|
|
||||||
|
|
||||||
if RepeatKind::OneOrMore == kind && counter == 0 {
|
if RepeatKind::OneOrMore == kind && counter == 0 {
|
||||||
|
let span = tt.delimiter.open;
|
||||||
return ExpandResult {
|
return ExpandResult {
|
||||||
value: Fragment::Tokens(tt),
|
value: Fragment::Tokens(tt.into()),
|
||||||
err: Some(ExpandError::UnexpectedToken),
|
err: Some(ExpandError::new(span, ExpandErrorKind::UnexpectedToken)),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
ExpandResult { value: Fragment::Tokens(tt), err }
|
ExpandResult { value: Fragment::Tokens(tt.into()), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_fragment(ctx: &ExpandCtx<'_>, buf: &mut Vec<tt::TokenTree<Span>>, fragment: Fragment) {
|
fn push_fragment(ctx: &ExpandCtx<'_>, buf: &mut Vec<tt::TokenTree<Span>>, fragment: Fragment) {
|
||||||
|
@ -557,44 +540,16 @@ fn fix_up_and_push_path_tt(
|
||||||
|
|
||||||
/// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth
|
/// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth
|
||||||
/// defined by the metavar expression.
|
/// defined by the metavar expression.
|
||||||
fn count(binding: &Binding, depth_curr: usize, depth_max: usize) -> Result<usize, CountError> {
|
fn count(binding: &Binding, depth_curr: usize, depth_max: usize) -> usize {
|
||||||
match binding {
|
match binding {
|
||||||
Binding::Nested(bs) => {
|
Binding::Nested(bs) => {
|
||||||
if depth_curr == depth_max {
|
if depth_curr == depth_max {
|
||||||
Ok(bs.len())
|
bs.len()
|
||||||
} else {
|
} else {
|
||||||
bs.iter().map(|b| count(b, depth_curr + 1, depth_max)).sum()
|
bs.iter().map(|b| count(b, depth_curr + 1, depth_max)).sum()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Binding::Empty => Ok(0),
|
Binding::Empty => 0,
|
||||||
Binding::Fragment(_) | Binding::Missing(_) => Ok(1),
|
Binding::Fragment(_) | Binding::Missing(_) => 1,
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn count_old(
|
|
||||||
binding: &Binding,
|
|
||||||
our_depth: usize,
|
|
||||||
count_depth: Option<usize>,
|
|
||||||
) -> Result<usize, CountError> {
|
|
||||||
match binding {
|
|
||||||
Binding::Nested(bs) => match count_depth {
|
|
||||||
None => bs.iter().map(|b| count_old(b, our_depth + 1, None)).sum(),
|
|
||||||
Some(0) => Ok(bs.len()),
|
|
||||||
Some(d) => bs.iter().map(|b| count_old(b, our_depth + 1, Some(d - 1))).sum(),
|
|
||||||
},
|
|
||||||
Binding::Empty => Ok(0),
|
|
||||||
Binding::Fragment(_) | Binding::Missing(_) => {
|
|
||||||
if our_depth == 0 {
|
|
||||||
// `${count(t)}` is placed inside the innermost repetition. This includes cases
|
|
||||||
// where `t` is not a repeated fragment.
|
|
||||||
Err(CountError::Misplaced)
|
|
||||||
} else if count_depth.is_none() {
|
|
||||||
Ok(1)
|
|
||||||
} else {
|
|
||||||
// We've reached at the innermost repeated fragment, but the user wants us to go
|
|
||||||
// further!
|
|
||||||
Err(CountError::OutOfBounds)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,10 +15,11 @@ mod to_parser_input;
|
||||||
mod benchmark;
|
mod benchmark;
|
||||||
|
|
||||||
use span::{Edition, Span, SyntaxContextId};
|
use span::{Edition, Span, SyntaxContextId};
|
||||||
use stdx::impl_from;
|
|
||||||
use tt::iter::TtIter;
|
use tt::iter::TtIter;
|
||||||
|
use tt::DelimSpan;
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::parser::{MetaTemplate, MetaVarKind, Op};
|
use crate::parser::{MetaTemplate, MetaVarKind, Op};
|
||||||
|
|
||||||
|
@ -64,39 +65,45 @@ impl fmt::Display for ParseError {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||||
pub enum ExpandError {
|
pub struct ExpandError {
|
||||||
|
pub inner: Arc<(Span, ExpandErrorKind)>,
|
||||||
|
}
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||||
|
pub enum ExpandErrorKind {
|
||||||
BindingError(Box<Box<str>>),
|
BindingError(Box<Box<str>>),
|
||||||
UnresolvedBinding(Box<Box<str>>),
|
UnresolvedBinding(Box<Box<str>>),
|
||||||
LeftoverTokens,
|
LeftoverTokens,
|
||||||
ConversionError,
|
|
||||||
LimitExceeded,
|
LimitExceeded,
|
||||||
NoMatchingRule,
|
NoMatchingRule,
|
||||||
UnexpectedToken,
|
UnexpectedToken,
|
||||||
CountError(CountError),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_from!(CountError for ExpandError);
|
|
||||||
|
|
||||||
impl ExpandError {
|
impl ExpandError {
|
||||||
fn binding_error(e: impl Into<Box<str>>) -> ExpandError {
|
fn new(span: Span, kind: ExpandErrorKind) -> ExpandError {
|
||||||
ExpandError::BindingError(Box::new(e.into()))
|
ExpandError { inner: Arc::new((span, kind)) }
|
||||||
|
}
|
||||||
|
fn binding_error(span: Span, e: impl Into<Box<str>>) -> ExpandError {
|
||||||
|
ExpandError { inner: Arc::new((span, ExpandErrorKind::BindingError(Box::new(e.into())))) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl fmt::Display for ExpandError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
self.inner.1.fmt(f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for ExpandError {
|
impl fmt::Display for ExpandErrorKind {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
ExpandError::NoMatchingRule => f.write_str("no rule matches input tokens"),
|
ExpandErrorKind::NoMatchingRule => f.write_str("no rule matches input tokens"),
|
||||||
ExpandError::UnexpectedToken => f.write_str("unexpected token in input"),
|
ExpandErrorKind::UnexpectedToken => f.write_str("unexpected token in input"),
|
||||||
ExpandError::BindingError(e) => f.write_str(e),
|
ExpandErrorKind::BindingError(e) => f.write_str(e),
|
||||||
ExpandError::UnresolvedBinding(binding) => {
|
ExpandErrorKind::UnresolvedBinding(binding) => {
|
||||||
f.write_str("could not find binding ")?;
|
f.write_str("could not find binding ")?;
|
||||||
f.write_str(binding)
|
f.write_str(binding)
|
||||||
}
|
}
|
||||||
ExpandError::ConversionError => f.write_str("could not convert tokens"),
|
ExpandErrorKind::LimitExceeded => f.write_str("Expand exceed limit"),
|
||||||
ExpandError::LimitExceeded => f.write_str("Expand exceed limit"),
|
ExpandErrorKind::LeftoverTokens => f.write_str("leftover tokens"),
|
||||||
ExpandError::LeftoverTokens => f.write_str("leftover tokens"),
|
|
||||||
ExpandError::CountError(e) => e.fmt(f),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -248,11 +255,10 @@ impl DeclarativeMacro {
|
||||||
&self,
|
&self,
|
||||||
tt: &tt::Subtree<Span>,
|
tt: &tt::Subtree<Span>,
|
||||||
marker: impl Fn(&mut Span) + Copy,
|
marker: impl Fn(&mut Span) + Copy,
|
||||||
new_meta_vars: bool,
|
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
def_site_edition: Edition,
|
def_site_edition: Edition,
|
||||||
) -> ExpandResult<(tt::Subtree<Span>, MatchedArmIndex)> {
|
) -> ExpandResult<(tt::Subtree<Span>, MatchedArmIndex)> {
|
||||||
expander::expand_rules(&self.rules, tt, marker, new_meta_vars, call_site, def_site_edition)
|
expander::expand_rules(&self.rules, tt, marker, call_site, def_site_edition)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -355,11 +361,12 @@ impl<T: Default, E> From<Result<T, E>> for ValueResult<T, E> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expect_fragment<S: Copy + fmt::Debug>(
|
fn expect_fragment(
|
||||||
tt_iter: &mut TtIter<'_, S>,
|
tt_iter: &mut TtIter<'_, Span>,
|
||||||
entry_point: ::parser::PrefixEntryPoint,
|
entry_point: ::parser::PrefixEntryPoint,
|
||||||
edition: ::parser::Edition,
|
edition: ::parser::Edition,
|
||||||
) -> ExpandResult<Option<tt::TokenTree<S>>> {
|
delim_span: DelimSpan<Span>,
|
||||||
|
) -> ExpandResult<Option<tt::TokenTree<Span>>> {
|
||||||
use ::parser;
|
use ::parser;
|
||||||
let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice());
|
let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice());
|
||||||
let parser_input = to_parser_input::to_parser_input(edition, &buffer);
|
let parser_input = to_parser_input::to_parser_input(edition, &buffer);
|
||||||
|
@ -387,7 +394,10 @@ fn expect_fragment<S: Copy + fmt::Debug>(
|
||||||
}
|
}
|
||||||
|
|
||||||
let err = if error || !cursor.is_root() {
|
let err = if error || !cursor.is_root() {
|
||||||
Some(ExpandError::binding_error(format!("expected {entry_point:?}")))
|
Some(ExpandError::binding_error(
|
||||||
|
buffer.begin().token_tree().map_or(delim_span.close, |tt| tt.span()),
|
||||||
|
format!("expected {entry_point:?}"),
|
||||||
|
))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
|
@ -212,15 +212,12 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Split token tree with separate expr: $($e:expr)SEP*
|
/// Split token tree with separate expr: $($e:expr)SEP*
|
||||||
pub fn parse_exprs_with_sep<S>(
|
pub fn parse_exprs_with_sep(
|
||||||
tt: &tt::Subtree<S>,
|
tt: &tt::Subtree<span::Span>,
|
||||||
sep: char,
|
sep: char,
|
||||||
span: S,
|
span: span::Span,
|
||||||
edition: Edition,
|
edition: Edition,
|
||||||
) -> Vec<tt::Subtree<S>>
|
) -> Vec<tt::Subtree<span::Span>> {
|
||||||
where
|
|
||||||
S: Copy + fmt::Debug,
|
|
||||||
{
|
|
||||||
if tt.token_trees.is_empty() {
|
if tt.token_trees.is_empty() {
|
||||||
return Vec::new();
|
return Vec::new();
|
||||||
}
|
}
|
||||||
|
@ -229,7 +226,12 @@ where
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
|
|
||||||
while iter.peek_n(0).is_some() {
|
while iter.peek_n(0).is_some() {
|
||||||
let expanded = crate::expect_fragment(&mut iter, parser::PrefixEntryPoint::Expr, edition);
|
let expanded = crate::expect_fragment(
|
||||||
|
&mut iter,
|
||||||
|
parser::PrefixEntryPoint::Expr,
|
||||||
|
edition,
|
||||||
|
tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close },
|
||||||
|
);
|
||||||
|
|
||||||
res.push(match expanded.value {
|
res.push(match expanded.value {
|
||||||
None => break,
|
None => break,
|
||||||
|
|
|
@ -1053,6 +1053,7 @@ impl GlobalState {
|
||||||
.on::<NO_RETRY, lsp_request::GotoDeclaration>(handlers::handle_goto_declaration)
|
.on::<NO_RETRY, lsp_request::GotoDeclaration>(handlers::handle_goto_declaration)
|
||||||
.on::<NO_RETRY, lsp_request::GotoImplementation>(handlers::handle_goto_implementation)
|
.on::<NO_RETRY, lsp_request::GotoImplementation>(handlers::handle_goto_implementation)
|
||||||
.on::<NO_RETRY, lsp_request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
|
.on::<NO_RETRY, lsp_request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
|
||||||
|
// FIXME: This should not be tried as it contains offsets that can get outdated!
|
||||||
.on::<RETRY, lsp_request::InlayHintRequest>(handlers::handle_inlay_hints)
|
.on::<RETRY, lsp_request::InlayHintRequest>(handlers::handle_inlay_hints)
|
||||||
.on::<RETRY, lsp_request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve)
|
.on::<RETRY, lsp_request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve)
|
||||||
.on::<NO_RETRY, lsp_request::CodeLensRequest>(handlers::handle_code_lens)
|
.on::<NO_RETRY, lsp_request::CodeLensRequest>(handlers::handle_code_lens)
|
||||||
|
|
|
@ -134,6 +134,15 @@ pub enum TokenTreeRef<'a, Span> {
|
||||||
Leaf(&'a Leaf<Span>, &'a TokenTree<Span>),
|
Leaf(&'a Leaf<Span>, &'a TokenTree<Span>),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a, Span: Copy> TokenTreeRef<'a, Span> {
|
||||||
|
pub fn span(&self) -> Span {
|
||||||
|
match self {
|
||||||
|
TokenTreeRef::Subtree(subtree, _) => subtree.delimiter.open,
|
||||||
|
TokenTreeRef::Leaf(leaf, _) => *leaf.span(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<Span: Clone> TokenTreeRef<'_, Span> {
|
impl<Span: Clone> TokenTreeRef<'_, Span> {
|
||||||
pub fn cloned(&self) -> TokenTree<Span> {
|
pub fn cloned(&self) -> TokenTree<Span> {
|
||||||
match self {
|
match self {
|
||||||
|
|
|
@ -143,6 +143,10 @@ impl<'a, S: Copy> TtIter<'a, S> {
|
||||||
self.inner.as_slice().get(n)
|
self.inner.as_slice().get(n)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn next_span(&self) -> Option<S> {
|
||||||
|
Some(self.inner.as_slice().first()?.first_span())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn as_slice(&self) -> &'a [TokenTree<S>] {
|
pub fn as_slice(&self) -> &'a [TokenTree<S>] {
|
||||||
self.inner.as_slice()
|
self.inner.as_slice()
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue