2019-10-29 13:03:29 +00:00
|
|
|
//! Defines database & queries for macro expansion.
|
|
|
|
|
2024-01-25 09:23:00 +00:00
|
|
|
use base_db::{salsa, CrateId, FileId, SourceDatabase};
|
2022-01-07 17:51:10 +00:00
|
|
|
use either::Either;
|
2021-07-10 20:49:17 +00:00
|
|
|
use limit::Limit;
|
2024-03-15 10:45:51 +00:00
|
|
|
use mbe::syntax_node_to_token_tree;
|
2023-11-29 14:48:40 +00:00
|
|
|
use rustc_hash::FxHashSet;
|
2024-03-15 09:14:00 +00:00
|
|
|
use span::{AstIdMap, Span, SyntaxContextData, SyntaxContextId};
|
2024-03-13 16:42:01 +00:00
|
|
|
use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
|
2023-05-02 14:12:22 +00:00
|
|
|
use triomphe::Arc;
|
2019-10-29 11:55:39 +00:00
|
|
|
|
|
|
|
use crate::{
|
2024-03-13 16:42:01 +00:00
|
|
|
attrs::{collect_attrs, AttrId},
|
2023-10-06 12:47:11 +00:00
|
|
|
builtin_attr_macro::pseudo_derive_attr_expansion,
|
|
|
|
builtin_fn_macro::EagerExpander,
|
2024-03-08 16:10:29 +00:00
|
|
|
cfg_process,
|
2024-01-25 09:07:29 +00:00
|
|
|
declarative::DeclarativeMacroExpander,
|
2024-03-13 16:42:01 +00:00
|
|
|
fixup::{self, SyntaxFixupUndoInfo},
|
2024-03-01 13:31:25 +00:00
|
|
|
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
|
2023-12-18 11:09:54 +00:00
|
|
|
proc_macro::ProcMacros,
|
2023-12-18 12:30:41 +00:00
|
|
|
span_map::{RealSpanMap, SpanMap, SpanMapRef},
|
2023-12-18 11:09:54 +00:00
|
|
|
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
|
|
|
|
CustomProcMacroExpander, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap,
|
|
|
|
HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
|
|
|
|
MacroFileId,
|
2019-10-29 11:55:39 +00:00
|
|
|
};
|
2024-03-18 17:20:16 +00:00
|
|
|
/// This is just to ensure the types of smart_macro_arg and macro_arg are the same
|
|
|
|
type MacroArgResult = (Arc<tt::Subtree>, SyntaxFixupUndoInfo, Span);
|
2020-12-10 16:50:56 +00:00
|
|
|
/// Total limit on the number of tokens produced by any macro invocation.
|
|
|
|
///
|
|
|
|
/// If an invocation produces more tokens than this limit, it will not be stored in the database and
|
|
|
|
/// an error will be emitted.
|
2021-07-31 15:25:45 +00:00
|
|
|
///
|
|
|
|
/// Actual max for `analysis-stats .` at some point: 30672.
|
2022-12-10 04:09:18 +00:00
|
|
|
static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
|
2020-12-10 16:50:56 +00:00
|
|
|
|
2019-11-10 03:03:24 +00:00
|
|
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
|
|
pub enum TokenExpander {
|
2023-09-29 10:37:57 +00:00
|
|
|
/// Old-style `macro_rules` or the new macros 2.0
|
2023-07-10 14:23:29 +00:00
|
|
|
DeclarativeMacro(Arc<DeclarativeMacroExpander>),
|
2021-05-04 15:20:10 +00:00
|
|
|
/// Stuff like `line!` and `file!`.
|
2023-07-10 14:23:29 +00:00
|
|
|
BuiltIn(BuiltinFnLikeExpander),
|
2023-04-13 15:41:24 +00:00
|
|
|
/// Built-in eagerly expanded fn-like macros (`include!`, `concat!`, etc.)
|
2023-07-10 14:23:29 +00:00
|
|
|
BuiltInEager(EagerExpander),
|
2021-06-09 16:02:31 +00:00
|
|
|
/// `global_allocator` and such.
|
2023-07-10 14:23:29 +00:00
|
|
|
BuiltInAttr(BuiltinAttrExpander),
|
2021-05-04 15:20:10 +00:00
|
|
|
/// `derive(Copy)` and such.
|
2023-07-10 14:23:29 +00:00
|
|
|
BuiltInDerive(BuiltinDeriveExpander),
|
2021-05-04 15:20:10 +00:00
|
|
|
/// The thing we love the most here in rust-analyzer -- procedural macros.
|
2023-12-18 11:09:54 +00:00
|
|
|
ProcMacro(CustomProcMacroExpander),
|
2019-11-10 03:03:24 +00:00
|
|
|
}
|
|
|
|
|
2023-03-13 15:33:52 +00:00
|
|
|
#[salsa::query_group(ExpandDatabaseStorage)]
|
|
|
|
pub trait ExpandDatabase: SourceDatabase {
|
2023-12-18 11:09:54 +00:00
|
|
|
/// The proc macros.
|
|
|
|
#[salsa::input]
|
|
|
|
fn proc_macros(&self) -> Arc<ProcMacros>;
|
|
|
|
|
2019-10-29 11:55:39 +00:00
|
|
|
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
|
|
|
|
|
2021-06-08 19:51:28 +00:00
|
|
|
/// Main public API -- parses a hir file, not caring whether it's a real
|
2021-05-04 19:40:10 +00:00
|
|
|
/// file or a macro expansion.
|
2019-10-29 11:55:39 +00:00
|
|
|
#[salsa::transparent]
|
2023-04-16 17:20:48 +00:00
|
|
|
fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode;
|
2023-04-16 12:15:59 +00:00
|
|
|
#[salsa::transparent]
|
2023-04-16 17:20:48 +00:00
|
|
|
fn parse_or_expand_with_err(&self, file_id: HirFileId) -> ExpandResult<Parse<SyntaxNode>>;
|
2021-05-04 19:40:10 +00:00
|
|
|
/// Implementation for the macro case.
|
2023-04-25 08:47:33 +00:00
|
|
|
// This query is LRU cached
|
2020-11-24 20:57:51 +00:00
|
|
|
fn parse_macro_expansion(
|
2020-11-24 18:00:23 +00:00
|
|
|
&self,
|
2023-11-25 13:39:55 +00:00
|
|
|
macro_file: MacroFileId,
|
2023-11-24 15:38:48 +00:00
|
|
|
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
|
2023-11-17 18:07:31 +00:00
|
|
|
#[salsa::transparent]
|
2024-01-25 09:07:29 +00:00
|
|
|
#[salsa::invoke(SpanMap::new)]
|
2023-11-24 15:38:48 +00:00
|
|
|
fn span_map(&self, file_id: HirFileId) -> SpanMap;
|
|
|
|
|
2024-01-31 08:57:17 +00:00
|
|
|
#[salsa::transparent]
|
|
|
|
#[salsa::invoke(crate::span_map::expansion_span_map)]
|
|
|
|
fn expansion_span_map(&self, file_id: MacroFileId) -> Arc<ExpansionSpanMap>;
|
2024-01-25 09:07:29 +00:00
|
|
|
#[salsa::invoke(crate::span_map::real_span_map)]
|
2023-11-24 15:38:48 +00:00
|
|
|
fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
|
2020-11-26 19:09:54 +00:00
|
|
|
|
2021-05-04 19:40:10 +00:00
|
|
|
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
|
|
|
|
/// reason why we use salsa at all.
|
|
|
|
///
|
|
|
|
/// We encode macro definitions into ids of macro calls, this what allows us
|
|
|
|
/// to be incremental.
|
2021-05-04 18:49:00 +00:00
|
|
|
#[salsa::interned]
|
2021-11-14 15:25:40 +00:00
|
|
|
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
|
2023-09-29 10:37:57 +00:00
|
|
|
#[salsa::interned]
|
2023-10-06 12:47:11 +00:00
|
|
|
fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
|
2023-12-02 12:03:46 +00:00
|
|
|
|
2023-10-06 12:47:11 +00:00
|
|
|
#[salsa::transparent]
|
2023-11-19 17:42:25 +00:00
|
|
|
fn setup_syntax_context_root(&self) -> ();
|
|
|
|
#[salsa::transparent]
|
2024-01-25 09:07:29 +00:00
|
|
|
#[salsa::invoke(crate::hygiene::dump_syntax_contexts)]
|
2023-12-02 12:03:46 +00:00
|
|
|
fn dump_syntax_contexts(&self) -> String;
|
2020-05-14 09:57:37 +00:00
|
|
|
|
2023-09-29 10:37:57 +00:00
|
|
|
/// Lowers syntactic macro call to a token tree representation. That's a firewall
|
2021-05-04 19:40:10 +00:00
|
|
|
/// query, only typing in the macro call itself changes the returned
|
|
|
|
/// subtree.
|
2024-03-18 17:20:16 +00:00
|
|
|
fn macro_arg(&self, id: MacroCallId) -> MacroArgResult;
|
|
|
|
#[salsa::transparent]
|
|
|
|
fn macro_arg_considering_derives(
|
|
|
|
&self,
|
|
|
|
id: MacroCallId,
|
|
|
|
kind: &MacroCallKind,
|
|
|
|
) -> MacroArgResult;
|
2023-07-13 07:17:07 +00:00
|
|
|
/// Fetches the expander for this macro.
|
2023-07-10 14:23:29 +00:00
|
|
|
#[salsa::transparent]
|
2024-01-25 09:07:29 +00:00
|
|
|
#[salsa::invoke(TokenExpander::macro_expander)]
|
2023-07-13 07:17:07 +00:00
|
|
|
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
|
|
|
|
/// Fetches (and compiles) the expander of this decl macro.
|
2024-01-25 09:07:29 +00:00
|
|
|
#[salsa::invoke(DeclarativeMacroExpander::expander)]
|
2023-07-10 14:23:29 +00:00
|
|
|
fn decl_macro_expander(
|
|
|
|
&self,
|
|
|
|
def_crate: CrateId,
|
|
|
|
id: AstId<ast::Macro>,
|
|
|
|
) -> Arc<DeclarativeMacroExpander>;
|
2021-05-04 19:40:10 +00:00
|
|
|
/// Special case of the previous query for procedural macros. We can't LRU
|
|
|
|
/// proc macros, since they are not deterministic in general, and
|
2023-04-25 08:47:33 +00:00
|
|
|
/// non-determinism breaks salsa in a very, very, very bad way.
|
2023-07-03 12:42:27 +00:00
|
|
|
/// @edwin0cheng heroically debugged this once! See #4315 for details
|
|
|
|
fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
|
2024-03-15 09:14:00 +00:00
|
|
|
/// Retrieves the span to be used for a proc-macro expansions spans.
|
|
|
|
/// This is a firewall query as it requires parsing the file, which we don't want proc-macros to
|
|
|
|
/// directly depend on as that would cause to frequent invalidations, mainly because of the
|
|
|
|
/// parse queries being LRU cached. If they weren't the invalidations would only happen if the
|
|
|
|
/// user wrote in the file that defines the proc-macro.
|
|
|
|
fn proc_macro_span(&self, fun: AstId<ast::Fn>) -> Span;
|
2023-04-16 16:29:42 +00:00
|
|
|
/// Firewall query that returns the errors from the `parse_macro_expansion` query.
|
|
|
|
fn parse_macro_expansion_error(
|
|
|
|
&self,
|
|
|
|
macro_call: MacroCallId,
|
2023-04-16 17:20:42 +00:00
|
|
|
) -> ExpandResult<Box<[SyntaxError]>>;
|
2023-11-17 18:07:31 +00:00
|
|
|
}
|
2021-01-04 02:53:31 +00:00
|
|
|
|
2020-03-08 10:02:14 +00:00
|
|
|
/// This expands the given macro call, but with different arguments. This is
|
|
|
|
/// used for completion, where we want to see what 'would happen' if we insert a
|
|
|
|
/// token. The `token_to_map` mapped down into the expansion, with the mapped
|
|
|
|
/// token returned.
|
2021-05-24 19:21:25 +00:00
|
|
|
pub fn expand_speculative(
|
2023-03-13 15:33:52 +00:00
|
|
|
db: &dyn ExpandDatabase,
|
2020-03-08 10:02:14 +00:00
|
|
|
actual_macro_call: MacroCallId,
|
2021-09-02 16:54:09 +00:00
|
|
|
speculative_args: &SyntaxNode,
|
2021-05-04 17:36:48 +00:00
|
|
|
token_to_map: SyntaxToken,
|
|
|
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
2021-11-14 15:25:40 +00:00
|
|
|
let loc = db.lookup_intern_macro_call(actual_macro_call);
|
2021-09-02 16:54:09 +00:00
|
|
|
|
2024-03-15 09:14:00 +00:00
|
|
|
// FIXME: This BOGUS here is dangerous once the proc-macro server can call back into the database!
|
2023-12-01 12:56:25 +00:00
|
|
|
let span_map = RealSpanMap::absolute(FileId::BOGUS);
|
2023-11-25 16:10:18 +00:00
|
|
|
let span_map = SpanMapRef::RealSpanMap(&span_map);
|
2023-11-29 14:48:40 +00:00
|
|
|
|
2024-03-15 11:47:05 +00:00
|
|
|
let (_, _, span) = db.macro_arg(actual_macro_call);
|
|
|
|
|
2023-11-29 14:48:40 +00:00
|
|
|
// Build the subtree and token mapping for the speculative args
|
|
|
|
let (mut tt, undo_info) = match loc.kind {
|
2023-12-20 13:00:14 +00:00
|
|
|
MacroCallKind::FnLike { .. } => (
|
2024-03-15 11:47:05 +00:00
|
|
|
mbe::syntax_node_to_token_tree(speculative_args, span_map, span),
|
2023-12-20 13:00:14 +00:00
|
|
|
SyntaxFixupUndoInfo::NONE,
|
|
|
|
),
|
2024-03-13 17:47:56 +00:00
|
|
|
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => (
|
2024-03-15 11:47:05 +00:00
|
|
|
mbe::syntax_node_to_token_tree(speculative_args, span_map, span),
|
2024-03-13 17:47:56 +00:00
|
|
|
SyntaxFixupUndoInfo::NONE,
|
|
|
|
),
|
2024-03-13 16:42:01 +00:00
|
|
|
MacroCallKind::Derive { derive_attr_index: index, .. }
|
2024-03-21 16:50:58 +00:00
|
|
|
| MacroCallKind::Attr { invoc_attr_index: index, .. } => {
|
2024-03-13 16:42:01 +00:00
|
|
|
let censor = if let MacroCallKind::Derive { .. } = loc.kind {
|
|
|
|
censor_derive_input(index, &ast::Adt::cast(speculative_args.clone())?)
|
|
|
|
} else {
|
2024-03-15 11:47:05 +00:00
|
|
|
attr_source(index, &ast::Item::cast(speculative_args.clone())?)
|
|
|
|
.into_iter()
|
|
|
|
.map(|it| it.syntax().clone().into())
|
|
|
|
.collect()
|
2024-03-13 16:42:01 +00:00
|
|
|
};
|
|
|
|
|
2024-03-11 15:05:59 +00:00
|
|
|
let censor_cfg =
|
|
|
|
cfg_process::process_cfg_attrs(speculative_args, &loc, db).unwrap_or_default();
|
2024-03-15 11:47:05 +00:00
|
|
|
let mut fixups = fixup::fixup_syntax(span_map, speculative_args, span);
|
2023-11-29 14:48:40 +00:00
|
|
|
fixups.append.retain(|it, _| match it {
|
|
|
|
syntax::NodeOrToken::Token(_) => true,
|
2024-03-11 15:05:59 +00:00
|
|
|
it => !censor.contains(it) && !censor_cfg.contains(it),
|
2023-11-29 14:48:40 +00:00
|
|
|
});
|
|
|
|
fixups.remove.extend(censor);
|
2024-03-11 15:05:59 +00:00
|
|
|
fixups.remove.extend(censor_cfg);
|
|
|
|
|
2023-11-29 14:48:40 +00:00
|
|
|
(
|
|
|
|
mbe::syntax_node_to_token_tree_modified(
|
|
|
|
speculative_args,
|
|
|
|
span_map,
|
|
|
|
fixups.append,
|
|
|
|
fixups.remove,
|
2024-03-15 11:47:05 +00:00
|
|
|
span,
|
2023-11-29 14:48:40 +00:00
|
|
|
),
|
|
|
|
fixups.undo_info,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
};
|
2021-09-13 22:04:04 +00:00
|
|
|
|
2023-09-29 10:37:57 +00:00
|
|
|
let attr_arg = match loc.kind {
|
2023-04-22 12:29:28 +00:00
|
|
|
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
|
|
|
let attr = if loc.def.is_attribute_derive() {
|
2022-03-10 19:53:50 +00:00
|
|
|
// for pseudo-derive expansion we actually pass the attribute itself only
|
|
|
|
ast::Attr::cast(speculative_args.clone())
|
|
|
|
} else {
|
|
|
|
// Attributes may have an input token tree, build the subtree and map for this as well
|
|
|
|
// then try finding a token id for our token if it is inside this input subtree.
|
|
|
|
let item = ast::Item::cast(speculative_args.clone())?;
|
2023-12-11 19:26:50 +00:00
|
|
|
collect_attrs(&item)
|
2023-01-09 19:47:51 +00:00
|
|
|
.nth(invoc_attr_index.ast_index())
|
2023-12-11 19:26:50 +00:00
|
|
|
.and_then(|x| Either::left(x.1))
|
2022-03-10 19:53:50 +00:00
|
|
|
}?;
|
2021-09-13 22:04:04 +00:00
|
|
|
match attr.token_tree() {
|
|
|
|
Some(token_tree) => {
|
2024-03-15 11:47:05 +00:00
|
|
|
let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map, span);
|
|
|
|
tree.delimiter = tt::Delimiter::invisible_spanned(span);
|
2023-09-29 10:37:57 +00:00
|
|
|
|
|
|
|
Some(tree)
|
2023-07-10 14:28:23 +00:00
|
|
|
}
|
2023-09-29 10:37:57 +00:00
|
|
|
_ => None,
|
2023-07-10 14:28:23 +00:00
|
|
|
}
|
2021-09-13 22:04:04 +00:00
|
|
|
}
|
2023-09-29 10:37:57 +00:00
|
|
|
_ => None,
|
2021-09-13 22:04:04 +00:00
|
|
|
};
|
2021-05-04 19:20:04 +00:00
|
|
|
|
2021-09-13 22:04:04 +00:00
|
|
|
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
|
|
|
|
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
2023-11-29 14:48:40 +00:00
|
|
|
let mut speculative_expansion = match loc.def.kind {
|
2024-03-15 09:14:00 +00:00
|
|
|
MacroDefKind::ProcMacro(expander, _, ast) => {
|
|
|
|
let span = db.proc_macro_span(ast);
|
2024-03-15 11:47:05 +00:00
|
|
|
tt.delimiter = tt::Delimiter::invisible_spanned(span);
|
2023-11-28 15:28:51 +00:00
|
|
|
expander.expand(
|
|
|
|
db,
|
|
|
|
loc.def.krate,
|
|
|
|
loc.krate,
|
|
|
|
&tt,
|
|
|
|
attr_arg.as_ref(),
|
2024-03-15 09:14:00 +00:00
|
|
|
span_with_def_site_ctxt(db, span, actual_macro_call),
|
|
|
|
span_with_call_site_ctxt(db, span, actual_macro_call),
|
|
|
|
span_with_mixed_site_ctxt(db, span, actual_macro_call),
|
2023-11-28 15:28:51 +00:00
|
|
|
)
|
2022-03-10 19:53:50 +00:00
|
|
|
}
|
|
|
|
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
|
2024-03-15 11:47:05 +00:00
|
|
|
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
|
|
|
|
}
|
|
|
|
MacroDefKind::Declarative(it) => {
|
|
|
|
db.decl_macro_expander(loc.krate, it).expand_unhygienic(db, tt, loc.def.krate, span)
|
|
|
|
}
|
|
|
|
MacroDefKind::BuiltIn(it, _) => {
|
|
|
|
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
2022-03-10 19:53:50 +00:00
|
|
|
}
|
2024-03-07 10:16:40 +00:00
|
|
|
MacroDefKind::BuiltInDerive(it, ..) => {
|
2024-03-15 11:47:05 +00:00
|
|
|
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
2024-03-07 10:16:40 +00:00
|
|
|
}
|
2023-07-10 14:23:29 +00:00
|
|
|
MacroDefKind::BuiltInEager(it, _) => {
|
2024-03-15 11:47:05 +00:00
|
|
|
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
2023-07-10 14:23:29 +00:00
|
|
|
}
|
2024-03-15 11:47:05 +00:00
|
|
|
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt, span),
|
2021-09-02 16:54:09 +00:00
|
|
|
};
|
2021-05-04 19:20:04 +00:00
|
|
|
|
2023-12-02 12:03:46 +00:00
|
|
|
let expand_to = loc.expand_to();
|
|
|
|
|
2023-11-29 14:48:40 +00:00
|
|
|
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
|
2023-11-28 09:55:21 +00:00
|
|
|
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
|
2021-05-04 18:49:00 +00:00
|
|
|
|
2022-10-10 20:47:52 +00:00
|
|
|
let syntax_node = node.syntax_node();
|
|
|
|
let token = rev_tmap
|
2023-12-01 12:56:25 +00:00
|
|
|
.ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
|
2022-10-10 20:47:52 +00:00
|
|
|
.filter_map(|range| syntax_node.covering_element(range).into_token())
|
|
|
|
.min_by_key(|t| {
|
|
|
|
// prefer tokens of the same kind and text
|
|
|
|
// Note the inversion of the score here, as we want to prefer the first token in case
|
|
|
|
// of all tokens having the same score
|
|
|
|
(t.kind() != token_to_map.kind()) as u8 + (t.text() != token_to_map.text()) as u8
|
|
|
|
})?;
|
2020-03-08 10:02:14 +00:00
|
|
|
Some((node.syntax_node(), token))
|
|
|
|
}
|
|
|
|
|
2024-03-01 14:39:44 +00:00
|
|
|
fn ast_id_map(db: &dyn ExpandDatabase, file_id: span::HirFileId) -> triomphe::Arc<AstIdMap> {
|
|
|
|
triomphe::Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
|
|
|
|
}
|
|
|
|
|
2023-04-16 17:20:48 +00:00
|
|
|
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
|
|
|
|
match file_id.repr() {
|
2023-09-29 10:37:57 +00:00
|
|
|
HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
|
2021-05-04 18:49:00 +00:00
|
|
|
HirFileIdRepr::MacroFile(macro_file) => {
|
2023-04-16 17:20:42 +00:00
|
|
|
db.parse_macro_expansion(macro_file).value.0.syntax_node()
|
2021-05-04 18:49:00 +00:00
|
|
|
}
|
2023-04-16 17:20:48 +00:00
|
|
|
}
|
2021-05-04 18:49:00 +00:00
|
|
|
}
|
|
|
|
|
2023-04-16 12:15:59 +00:00
|
|
|
fn parse_or_expand_with_err(
|
|
|
|
db: &dyn ExpandDatabase,
|
|
|
|
file_id: HirFileId,
|
2023-04-16 17:20:48 +00:00
|
|
|
) -> ExpandResult<Parse<SyntaxNode>> {
|
2023-04-16 12:15:59 +00:00
|
|
|
match file_id.repr() {
|
2023-04-16 17:20:48 +00:00
|
|
|
HirFileIdRepr::FileId(file_id) => ExpandResult::ok(db.parse(file_id).to_syntax()),
|
2023-04-16 12:15:59 +00:00
|
|
|
HirFileIdRepr::MacroFile(macro_file) => {
|
2023-04-16 17:20:48 +00:00
|
|
|
db.parse_macro_expansion(macro_file).map(|(it, _)| it)
|
2023-04-16 12:15:59 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-11-25 16:10:18 +00:00
|
|
|
// FIXME: We should verify that the parsed node is one of the many macro node variants we expect
|
|
|
|
// instead of having it be untyped
|
2021-05-04 18:49:00 +00:00
|
|
|
fn parse_macro_expansion(
|
2023-03-13 15:33:52 +00:00
|
|
|
db: &dyn ExpandDatabase,
|
2023-11-25 13:39:55 +00:00
|
|
|
macro_file: MacroFileId,
|
2023-11-24 15:38:48 +00:00
|
|
|
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
|
2024-01-18 02:27:38 +00:00
|
|
|
let _p = tracing::span!(tracing::Level::INFO, "parse_macro_expansion").entered();
|
2023-12-02 12:03:46 +00:00
|
|
|
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
|
|
|
let expand_to = loc.expand_to();
|
|
|
|
let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
|
2021-05-04 18:49:00 +00:00
|
|
|
|
2024-01-03 13:55:39 +00:00
|
|
|
let (parse, rev_token_map) = token_tree_to_syntax_node(
|
|
|
|
match &tt {
|
|
|
|
CowArc::Arc(it) => it,
|
|
|
|
CowArc::Owned(it) => it,
|
|
|
|
},
|
|
|
|
expand_to,
|
|
|
|
);
|
2021-05-04 18:49:00 +00:00
|
|
|
|
2023-04-16 17:20:42 +00:00
|
|
|
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
|
2021-05-04 18:49:00 +00:00
|
|
|
}
|
|
|
|
|
2023-07-03 12:42:27 +00:00
|
|
|
fn parse_macro_expansion_error(
|
|
|
|
db: &dyn ExpandDatabase,
|
|
|
|
macro_call_id: MacroCallId,
|
|
|
|
) -> ExpandResult<Box<[SyntaxError]>> {
|
2023-11-25 13:39:55 +00:00
|
|
|
db.parse_macro_expansion(MacroFileId { macro_call_id })
|
2024-03-04 10:39:38 +00:00
|
|
|
.map(|it| it.0.errors().into_boxed_slice())
|
2023-07-03 12:42:27 +00:00
|
|
|
}
|
|
|
|
|
2024-01-25 09:07:29 +00:00
|
|
|
pub(crate) fn parse_with_map(
|
|
|
|
db: &dyn ExpandDatabase,
|
|
|
|
file_id: HirFileId,
|
|
|
|
) -> (Parse<SyntaxNode>, SpanMap) {
|
2023-11-25 16:10:18 +00:00
|
|
|
match file_id.repr() {
|
|
|
|
HirFileIdRepr::FileId(file_id) => {
|
|
|
|
(db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
|
|
|
|
}
|
|
|
|
HirFileIdRepr::MacroFile(macro_file) => {
|
|
|
|
let (parse, map) = db.parse_macro_expansion(macro_file).value;
|
|
|
|
(parse, SpanMap::ExpansionSpanMap(map))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2024-03-18 17:20:16 +00:00
|
|
|
|
2024-03-14 12:52:13 +00:00
|
|
|
/// Imagine the word smart in quotes.
|
|
|
|
///
|
|
|
|
/// This resolves the [MacroCallId] to check if it is a derive macro if so get the [macro_arg] for the derive.
|
|
|
|
/// Other wise return the [macro_arg] for the macro_call_id.
|
|
|
|
///
|
|
|
|
/// This is not connected to the database so it does not cached the result. However, the inner [macro_arg] query is
|
2024-03-18 17:20:16 +00:00
|
|
|
fn macro_arg_considering_derives(
|
|
|
|
db: &dyn ExpandDatabase,
|
|
|
|
id: MacroCallId,
|
|
|
|
kind: &MacroCallKind,
|
|
|
|
) -> MacroArgResult {
|
|
|
|
match kind {
|
2024-03-14 12:52:13 +00:00
|
|
|
// Get the macro arg for the derive macro
|
2024-03-18 17:20:16 +00:00
|
|
|
MacroCallKind::Derive { derive_macro_id, .. } => db.macro_arg(*derive_macro_id),
|
2024-03-14 12:52:13 +00:00
|
|
|
// Normal macro arg
|
|
|
|
_ => db.macro_arg(id),
|
|
|
|
}
|
|
|
|
}
|
2023-11-25 16:10:18 +00:00
|
|
|
|
2024-03-18 11:10:02 +00:00
|
|
|
fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
|
2023-09-29 10:37:57 +00:00
|
|
|
let loc = db.lookup_intern_macro_call(id);
|
2024-03-13 17:05:27 +00:00
|
|
|
|
|
|
|
if let MacroCallLoc {
|
|
|
|
def: MacroDefId { kind: MacroDefKind::BuiltInEager(..), .. },
|
|
|
|
kind: MacroCallKind::FnLike { eager: Some(eager), .. },
|
|
|
|
..
|
|
|
|
} = &loc
|
2023-09-29 10:37:57 +00:00
|
|
|
{
|
2024-03-15 11:47:05 +00:00
|
|
|
return (eager.arg.clone(), SyntaxFixupUndoInfo::NONE, eager.span);
|
2024-03-13 16:42:01 +00:00
|
|
|
}
|
2024-02-12 14:26:17 +00:00
|
|
|
|
2024-03-13 16:42:01 +00:00
|
|
|
let (parse, map) = parse_with_map(db, loc.kind.file_id());
|
|
|
|
let root = parse.syntax_node();
|
2024-02-12 14:26:17 +00:00
|
|
|
|
2024-03-15 11:47:05 +00:00
|
|
|
let (censor, item_node, span) = match loc.kind {
|
2024-03-13 16:42:01 +00:00
|
|
|
MacroCallKind::FnLike { ast_id, .. } => {
|
2024-03-15 11:47:05 +00:00
|
|
|
let node = &ast_id.to_ptr(db).to_node(&root);
|
|
|
|
let path_range = node
|
|
|
|
.path()
|
|
|
|
.map_or_else(|| node.syntax().text_range(), |path| path.syntax().text_range());
|
|
|
|
let span = map.span_for_range(path_range);
|
|
|
|
|
2024-03-13 16:42:01 +00:00
|
|
|
let dummy_tt = |kind| {
|
2023-11-29 14:48:40 +00:00
|
|
|
(
|
2024-03-13 16:42:01 +00:00
|
|
|
Arc::new(tt::Subtree {
|
2024-03-15 11:47:05 +00:00
|
|
|
delimiter: tt::Delimiter { open: span, close: span, kind },
|
2024-03-13 16:42:01 +00:00
|
|
|
token_trees: Box::default(),
|
|
|
|
}),
|
|
|
|
SyntaxFixupUndoInfo::default(),
|
2024-03-15 11:47:05 +00:00
|
|
|
span,
|
2023-11-29 14:48:40 +00:00
|
|
|
)
|
2024-03-13 16:42:01 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
let Some(tt) = node.token_tree() else {
|
2024-03-15 10:45:51 +00:00
|
|
|
return dummy_tt(tt::DelimiterKind::Invisible);
|
2024-03-13 16:42:01 +00:00
|
|
|
};
|
|
|
|
let first = tt.left_delimiter_token().map(|it| it.kind()).unwrap_or(T!['(']);
|
|
|
|
let last = tt.right_delimiter_token().map(|it| it.kind()).unwrap_or(T![.]);
|
|
|
|
|
|
|
|
let mismatched_delimiters = !matches!(
|
|
|
|
(first, last),
|
|
|
|
(T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])
|
|
|
|
);
|
|
|
|
if mismatched_delimiters {
|
|
|
|
// Don't expand malformed (unbalanced) macro invocations. This is
|
|
|
|
// less than ideal, but trying to expand unbalanced macro calls
|
|
|
|
// sometimes produces pathological, deeply nested code which breaks
|
|
|
|
// all kinds of things.
|
|
|
|
//
|
|
|
|
// So instead, we'll return an empty subtree here
|
|
|
|
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
|
|
|
|
|
|
|
|
let kind = match first {
|
|
|
|
_ if loc.def.is_proc_macro() => tt::DelimiterKind::Invisible,
|
|
|
|
T!['('] => tt::DelimiterKind::Parenthesis,
|
|
|
|
T!['['] => tt::DelimiterKind::Bracket,
|
|
|
|
T!['{'] => tt::DelimiterKind::Brace,
|
|
|
|
_ => tt::DelimiterKind::Invisible,
|
|
|
|
};
|
2024-03-15 10:45:51 +00:00
|
|
|
return dummy_tt(kind);
|
2023-11-28 15:28:51 +00:00
|
|
|
}
|
2023-09-29 10:37:57 +00:00
|
|
|
|
2024-03-15 11:47:05 +00:00
|
|
|
let mut tt = mbe::syntax_node_to_token_tree(tt.syntax(), map.as_ref(), span);
|
2024-03-13 17:47:56 +00:00
|
|
|
if loc.def.is_proc_macro() {
|
|
|
|
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
|
|
|
tt.delimiter.kind = tt::DelimiterKind::Invisible;
|
|
|
|
}
|
2024-03-15 11:47:05 +00:00
|
|
|
return (Arc::new(tt), SyntaxFixupUndoInfo::NONE, span);
|
2023-09-29 10:37:57 +00:00
|
|
|
}
|
2024-03-21 16:50:58 +00:00
|
|
|
|
2024-03-18 11:10:02 +00:00
|
|
|
// MacroCallKind::Derive should not be here. As we are getting the argument for the derive macro
|
2024-03-21 16:50:58 +00:00
|
|
|
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
|
2024-03-13 16:42:01 +00:00
|
|
|
let node = ast_id.to_ptr(db).to_node(&root);
|
2024-03-15 11:47:05 +00:00
|
|
|
let censor_derive_input = censor_derive_input(derive_attr_index, &node);
|
|
|
|
let item_node = node.into();
|
|
|
|
let attr_source = attr_source(derive_attr_index, &item_node);
|
|
|
|
// FIXME: This is wrong, this should point to the path of the derive attribute`
|
|
|
|
let span =
|
|
|
|
map.span_for_range(attr_source.as_ref().and_then(|it| it.path()).map_or_else(
|
|
|
|
|| item_node.syntax().text_range(),
|
|
|
|
|it| it.syntax().text_range(),
|
|
|
|
));
|
|
|
|
(censor_derive_input, item_node, span)
|
2024-03-13 16:42:01 +00:00
|
|
|
}
|
|
|
|
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
|
|
|
|
let node = ast_id.to_ptr(db).to_node(&root);
|
2024-03-15 11:47:05 +00:00
|
|
|
let attr_source = attr_source(invoc_attr_index, &node);
|
2024-03-21 16:50:58 +00:00
|
|
|
|
2024-03-15 11:47:05 +00:00
|
|
|
let span = map.span_for_range(
|
|
|
|
attr_source
|
|
|
|
.as_ref()
|
|
|
|
.and_then(|it| it.path())
|
|
|
|
.map_or_else(|| node.syntax().text_range(), |it| it.syntax().text_range()),
|
|
|
|
);
|
2024-03-21 16:50:58 +00:00
|
|
|
// If derive attribute we need to censor the derive input
|
|
|
|
if matches!(loc.def.kind, MacroDefKind::BuiltInAttr(expander, ..) if expander.is_derive())
|
|
|
|
&& ast::Adt::can_cast(node.syntax().kind())
|
|
|
|
{
|
|
|
|
let adt = ast::Adt::cast(node.syntax().clone()).unwrap();
|
|
|
|
let censor_derive_input = censor_derive_input(invoc_attr_index, &adt);
|
|
|
|
(censor_derive_input, node, span)
|
|
|
|
} else {
|
|
|
|
(attr_source.into_iter().map(|it| it.syntax().clone().into()).collect(), node, span)
|
|
|
|
}
|
2023-09-29 10:37:57 +00:00
|
|
|
}
|
2024-03-13 16:42:01 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
let (mut tt, undo_info) = {
|
|
|
|
let syntax = item_node.syntax();
|
|
|
|
let censor_cfg = cfg_process::process_cfg_attrs(syntax, &loc, db).unwrap_or_default();
|
2024-03-15 11:47:05 +00:00
|
|
|
let mut fixups = fixup::fixup_syntax(map.as_ref(), syntax, span);
|
2024-03-13 16:42:01 +00:00
|
|
|
fixups.append.retain(|it, _| match it {
|
|
|
|
syntax::NodeOrToken::Token(_) => true,
|
|
|
|
it => !censor.contains(it) && !censor_cfg.contains(it),
|
|
|
|
});
|
|
|
|
fixups.remove.extend(censor);
|
|
|
|
fixups.remove.extend(censor_cfg);
|
|
|
|
|
|
|
|
(
|
|
|
|
mbe::syntax_node_to_token_tree_modified(
|
|
|
|
syntax,
|
|
|
|
map,
|
|
|
|
fixups.append,
|
|
|
|
fixups.remove,
|
2024-03-15 11:47:05 +00:00
|
|
|
span,
|
2024-03-13 16:42:01 +00:00
|
|
|
),
|
|
|
|
fixups.undo_info,
|
|
|
|
)
|
|
|
|
};
|
|
|
|
|
|
|
|
if loc.def.is_proc_macro() {
|
|
|
|
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
|
|
|
tt.delimiter.kind = tt::DelimiterKind::Invisible;
|
2023-07-13 07:17:07 +00:00
|
|
|
}
|
2024-03-13 16:42:01 +00:00
|
|
|
|
2024-03-15 11:47:05 +00:00
|
|
|
(Arc::new(tt), undo_info, span)
|
2021-09-02 16:54:09 +00:00
|
|
|
}
|
2024-03-09 18:25:56 +00:00
|
|
|
|
2023-09-29 10:37:57 +00:00
|
|
|
// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
|
2024-03-15 11:47:05 +00:00
|
|
|
/// Derives expect all `#[derive(..)]` invocations up to (and including) the currently invoked one to be stripped
|
2024-03-13 16:42:01 +00:00
|
|
|
fn censor_derive_input(derive_attr_index: AttrId, node: &ast::Adt) -> FxHashSet<SyntaxElement> {
|
2023-01-09 19:47:51 +00:00
|
|
|
// FIXME: handle `cfg_attr`
|
2024-03-13 16:42:01 +00:00
|
|
|
cov_mark::hit!(derive_censoring);
|
|
|
|
collect_attrs(node)
|
|
|
|
.take(derive_attr_index.ast_index() + 1)
|
|
|
|
.filter_map(|(_, attr)| Either::left(attr))
|
|
|
|
// FIXME, this resolution should not be done syntactically
|
|
|
|
// derive is a proper macro now, no longer builtin
|
|
|
|
// But we do not have resolution at this stage, this means
|
|
|
|
// we need to know about all macro calls for the given ast item here
|
|
|
|
// so we require some kind of mapping...
|
|
|
|
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
|
|
|
|
.map(|it| it.syntax().clone().into())
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
2024-03-15 11:47:05 +00:00
|
|
|
/// Attributes expect the invoking attribute to be stripped
|
|
|
|
fn attr_source(invoc_attr_index: AttrId, node: &ast::Item) -> Option<ast::Attr> {
|
2024-03-13 16:42:01 +00:00
|
|
|
// FIXME: handle `cfg_attr`
|
|
|
|
cov_mark::hit!(attribute_macro_attr_censoring);
|
2024-03-15 11:47:05 +00:00
|
|
|
collect_attrs(node).nth(invoc_attr_index.ast_index()).and_then(|(_, attr)| Either::left(attr))
|
2021-05-04 18:49:00 +00:00
|
|
|
}
|
|
|
|
|
2024-01-25 09:07:29 +00:00
|
|
|
impl TokenExpander {
|
|
|
|
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
|
|
|
match id.kind {
|
|
|
|
MacroDefKind::Declarative(ast_id) => {
|
|
|
|
TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
|
|
|
|
}
|
|
|
|
MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
|
|
|
|
MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
|
|
|
|
MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
|
|
|
|
MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
|
|
|
|
MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
|
2021-10-09 12:23:55 +00:00
|
|
|
}
|
2019-11-10 03:03:24 +00:00
|
|
|
}
|
2019-10-29 12:11:42 +00:00
|
|
|
}
|
|
|
|
|
2024-01-03 13:55:39 +00:00
|
|
|
enum CowArc<T> {
|
|
|
|
Arc(Arc<T>),
|
|
|
|
Owned(T),
|
|
|
|
}
|
|
|
|
|
2023-11-17 18:07:31 +00:00
|
|
|
fn macro_expand(
|
|
|
|
db: &dyn ExpandDatabase,
|
|
|
|
macro_call_id: MacroCallId,
|
2023-12-02 12:03:46 +00:00
|
|
|
loc: MacroCallLoc,
|
2024-01-03 13:55:39 +00:00
|
|
|
) -> ExpandResult<CowArc<tt::Subtree>> {
|
2024-01-18 02:27:38 +00:00
|
|
|
let _p = tracing::span!(tracing::Level::INFO, "macro_expand").entered();
|
2023-07-10 09:22:29 +00:00
|
|
|
|
2024-03-15 11:47:05 +00:00
|
|
|
let (ExpandResult { value: tt, err }, span) = match loc.def.kind {
|
2024-01-03 13:55:39 +00:00
|
|
|
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc),
|
2023-07-10 13:19:00 +00:00
|
|
|
_ => {
|
2024-03-18 17:20:16 +00:00
|
|
|
let (macro_arg, undo_info, span) =
|
|
|
|
db.macro_arg_considering_derives(macro_call_id, &loc.kind);
|
2023-07-13 07:17:07 +00:00
|
|
|
|
2023-09-29 10:37:57 +00:00
|
|
|
let arg = &*macro_arg;
|
2024-03-15 11:47:05 +00:00
|
|
|
let res =
|
|
|
|
match loc.def.kind {
|
|
|
|
MacroDefKind::Declarative(id) => db
|
|
|
|
.decl_macro_expander(loc.def.krate, id)
|
|
|
|
.expand(db, arg.clone(), macro_call_id, span),
|
|
|
|
MacroDefKind::BuiltIn(it, _) => {
|
|
|
|
it.expand(db, macro_call_id, arg, span).map_err(Into::into)
|
|
|
|
}
|
|
|
|
MacroDefKind::BuiltInDerive(it, _) => {
|
|
|
|
it.expand(db, macro_call_id, arg, span).map_err(Into::into)
|
|
|
|
}
|
|
|
|
MacroDefKind::BuiltInEager(it, _) => {
|
|
|
|
// This might look a bit odd, but we do not expand the inputs to eager macros here.
|
|
|
|
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
|
|
|
|
// That kind of expansion uses the ast id map of an eager macros input though which goes through
|
|
|
|
// the HirFileId machinery. As eager macro inputs are assigned a macro file id that query
|
|
|
|
// will end up going through here again, whereas we want to just want to inspect the raw input.
|
|
|
|
// As such we just return the input subtree here.
|
|
|
|
let eager = match &loc.kind {
|
|
|
|
MacroCallKind::FnLike { eager: None, .. } => {
|
|
|
|
return ExpandResult::ok(CowArc::Arc(macro_arg.clone()));
|
|
|
|
}
|
|
|
|
MacroCallKind::FnLike { eager: Some(eager), .. } => Some(&**eager),
|
|
|
|
_ => None,
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut res = it.expand(db, macro_call_id, arg, span).map_err(Into::into);
|
|
|
|
|
|
|
|
if let Some(EagerCallInfo { error, .. }) = eager {
|
|
|
|
// FIXME: We should report both errors!
|
|
|
|
res.err = error.clone().or(res.err);
|
2024-03-13 17:05:27 +00:00
|
|
|
}
|
2024-03-15 11:47:05 +00:00
|
|
|
res
|
2024-03-13 17:05:27 +00:00
|
|
|
}
|
2024-03-15 11:47:05 +00:00
|
|
|
MacroDefKind::BuiltInAttr(it, _) => {
|
|
|
|
let mut res = it.expand(db, macro_call_id, arg, span);
|
|
|
|
fixup::reverse_fixups(&mut res.value, &undo_info);
|
|
|
|
res
|
|
|
|
}
|
|
|
|
_ => unreachable!(),
|
|
|
|
};
|
|
|
|
(ExpandResult { value: res.value, err: res.err }, span)
|
2022-02-21 18:14:06 +00:00
|
|
|
}
|
2020-03-13 12:03:31 +00:00
|
|
|
};
|
2023-06-07 09:20:10 +00:00
|
|
|
|
2023-11-01 16:27:11 +00:00
|
|
|
// Skip checking token tree limit for include! macro call
|
2023-11-10 14:48:47 +00:00
|
|
|
if !loc.def.is_include() {
|
2023-11-01 16:27:11 +00:00
|
|
|
// Set a hard limit for the expanded tt
|
2024-01-03 13:55:39 +00:00
|
|
|
if let Err(value) = check_tt_count(&tt) {
|
|
|
|
return value.map(|()| {
|
|
|
|
CowArc::Owned(tt::Subtree {
|
2024-03-15 11:47:05 +00:00
|
|
|
delimiter: tt::Delimiter::invisible_spanned(span),
|
2024-02-03 22:47:13 +00:00
|
|
|
token_trees: Box::new([]),
|
2024-01-03 13:55:39 +00:00
|
|
|
})
|
|
|
|
});
|
2023-11-01 16:27:11 +00:00
|
|
|
}
|
2019-10-29 12:11:42 +00:00
|
|
|
}
|
2020-11-24 18:00:23 +00:00
|
|
|
|
2024-01-03 13:55:39 +00:00
|
|
|
ExpandResult { value: CowArc::Owned(tt), err }
|
2019-10-29 12:11:42 +00:00
|
|
|
}
|
|
|
|
|
2024-03-15 09:14:00 +00:00
|
|
|
fn proc_macro_span(db: &dyn ExpandDatabase, ast: AstId<ast::Fn>) -> Span {
|
|
|
|
let root = db.parse_or_expand(ast.file_id);
|
|
|
|
let ast_id_map = &db.ast_id_map(ast.file_id);
|
|
|
|
let span_map = &db.span_map(ast.file_id);
|
|
|
|
|
|
|
|
let node = ast_id_map.get(ast.value).to_node(&root);
|
|
|
|
let range = ast::HasName::name(&node)
|
|
|
|
.map_or_else(|| node.syntax().text_range(), |name| name.syntax().text_range());
|
|
|
|
span_map.span_for_range(range)
|
|
|
|
}
|
|
|
|
|
2023-07-03 12:42:27 +00:00
|
|
|
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
2023-04-22 12:29:28 +00:00
|
|
|
let loc = db.lookup_intern_macro_call(id);
|
2024-03-18 17:20:16 +00:00
|
|
|
let (macro_arg, undo_info, span) = db.macro_arg_considering_derives(id, &loc.kind.clone());
|
2023-07-13 07:17:07 +00:00
|
|
|
|
2024-03-15 09:14:00 +00:00
|
|
|
let (expander, ast) = match loc.def.kind {
|
|
|
|
MacroDefKind::ProcMacro(expander, _, ast) => (expander, ast),
|
2020-05-14 09:57:37 +00:00
|
|
|
_ => unreachable!(),
|
|
|
|
};
|
|
|
|
|
2021-05-31 11:37:11 +00:00
|
|
|
let attr_arg = match &loc.kind {
|
2023-12-01 12:56:25 +00:00
|
|
|
MacroCallKind::Attr { attr_args: Some(attr_args), .. } => Some(&**attr_args),
|
2021-05-31 11:37:11 +00:00
|
|
|
_ => None,
|
|
|
|
};
|
|
|
|
|
2024-03-15 11:47:05 +00:00
|
|
|
let ExpandResult { value: mut tt, err } = {
|
|
|
|
let span = db.proc_macro_span(ast);
|
|
|
|
expander.expand(
|
|
|
|
db,
|
|
|
|
loc.def.krate,
|
|
|
|
loc.krate,
|
|
|
|
¯o_arg,
|
|
|
|
attr_arg,
|
|
|
|
span_with_def_site_ctxt(db, span, id),
|
|
|
|
span_with_call_site_ctxt(db, span, id),
|
|
|
|
span_with_mixed_site_ctxt(db, span, id),
|
|
|
|
)
|
|
|
|
};
|
2023-07-03 12:42:27 +00:00
|
|
|
|
|
|
|
// Set a hard limit for the expanded tt
|
2024-01-03 13:55:39 +00:00
|
|
|
if let Err(value) = check_tt_count(&tt) {
|
|
|
|
return value.map(|()| {
|
|
|
|
Arc::new(tt::Subtree {
|
2024-03-15 11:47:05 +00:00
|
|
|
delimiter: tt::Delimiter::invisible_spanned(span),
|
2024-02-03 22:47:13 +00:00
|
|
|
token_trees: Box::new([]),
|
2024-01-03 13:55:39 +00:00
|
|
|
})
|
|
|
|
});
|
2023-07-03 12:42:27 +00:00
|
|
|
}
|
|
|
|
|
2023-11-29 14:48:40 +00:00
|
|
|
fixup::reverse_fixups(&mut tt, &undo_info);
|
|
|
|
|
2023-07-03 12:42:27 +00:00
|
|
|
ExpandResult { value: Arc::new(tt), err }
|
2020-05-14 09:57:37 +00:00
|
|
|
}
|
|
|
|
|
2021-09-05 19:30:06 +00:00
|
|
|
fn token_tree_to_syntax_node(
|
|
|
|
tt: &tt::Subtree,
|
|
|
|
expand_to: ExpandTo,
|
2023-11-24 15:38:48 +00:00
|
|
|
) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
|
2021-09-06 15:34:03 +00:00
|
|
|
let entry_point = match expand_to {
|
2021-12-27 14:54:51 +00:00
|
|
|
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
|
|
|
|
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
|
|
|
|
ExpandTo::Pattern => mbe::TopEntryPoint::Pattern,
|
|
|
|
ExpandTo::Type => mbe::TopEntryPoint::Type,
|
|
|
|
ExpandTo::Expr => mbe::TopEntryPoint::Expr,
|
2021-09-05 19:30:06 +00:00
|
|
|
};
|
2023-11-28 09:55:21 +00:00
|
|
|
mbe::token_tree_to_syntax_node(tt, entry_point)
|
2019-12-08 08:16:52 +00:00
|
|
|
}
|
2023-07-03 12:42:27 +00:00
|
|
|
|
2024-01-03 13:55:39 +00:00
|
|
|
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {
|
2023-07-03 12:42:27 +00:00
|
|
|
let count = tt.count();
|
|
|
|
if TOKEN_LIMIT.check(count).is_err() {
|
|
|
|
Err(ExpandResult {
|
2024-01-03 13:55:39 +00:00
|
|
|
value: (),
|
2023-07-03 12:42:27 +00:00
|
|
|
err: Some(ExpandError::other(format!(
|
|
|
|
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
|
|
|
count,
|
|
|
|
TOKEN_LIMIT.inner(),
|
|
|
|
))),
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
2023-11-19 17:42:25 +00:00
|
|
|
|
|
|
|
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
|
|
|
|
db.intern_syntax_context(SyntaxContextData::root());
|
|
|
|
}
|