2019-10-29 13:03:29 +00:00
|
|
|
//! Defines database & queries for macro expansion.
|
|
|
|
|
2023-07-10 14:23:29 +00:00
|
|
|
use base_db::{salsa, CrateId, Edition, SourceDatabase};
|
2022-01-07 17:51:10 +00:00
|
|
|
use either::Either;
|
2021-07-10 20:49:17 +00:00
|
|
|
use limit::Limit;
|
2023-07-13 07:17:07 +00:00
|
|
|
use mbe::{syntax_node_to_token_tree, ValueResult};
|
2022-02-08 17:13:18 +00:00
|
|
|
use rustc_hash::FxHashSet;
|
2021-03-25 19:52:35 +00:00
|
|
|
use syntax::{
|
2022-01-07 17:51:10 +00:00
|
|
|
ast::{self, HasAttrs, HasDocComments},
|
2023-04-16 16:29:42 +00:00
|
|
|
AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
2021-03-25 19:52:35 +00:00
|
|
|
};
|
2023-05-02 14:12:22 +00:00
|
|
|
use triomphe::Arc;
|
2019-10-29 11:55:39 +00:00
|
|
|
|
|
|
|
use crate::{
|
2023-04-13 15:41:24 +00:00
|
|
|
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
|
2023-07-10 14:23:29 +00:00
|
|
|
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
|
2023-06-07 09:20:10 +00:00
|
|
|
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
|
|
|
|
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
|
|
|
|
MacroDefKind, MacroFile, ProcMacroExpander,
|
2019-10-29 11:55:39 +00:00
|
|
|
};
|
|
|
|
|
2020-12-10 16:50:56 +00:00
|
|
|
/// Total limit on the number of tokens produced by any macro invocation.
|
|
|
|
///
|
|
|
|
/// If an invocation produces more tokens than this limit, it will not be stored in the database and
|
|
|
|
/// an error will be emitted.
|
2021-07-31 15:25:45 +00:00
|
|
|
///
|
|
|
|
/// Actual max for `analysis-stats .` at some point: 30672.
|
2022-12-10 04:09:18 +00:00
|
|
|
static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
|
2020-12-10 16:50:56 +00:00
|
|
|
|
2023-07-10 14:23:29 +00:00
|
|
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
|
|
/// Old-style `macro_rules` or the new macros 2.0
|
|
|
|
pub struct DeclarativeMacroExpander {
|
|
|
|
pub mac: mbe::DeclarativeMacro,
|
|
|
|
pub def_site_token_map: mbe::TokenMap,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl DeclarativeMacroExpander {
|
2023-07-10 14:28:23 +00:00
|
|
|
pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
|
2023-07-10 14:23:29 +00:00
|
|
|
match self.mac.err() {
|
|
|
|
Some(e) => ExpandResult::new(
|
|
|
|
tt::Subtree::empty(),
|
|
|
|
ExpandError::other(format!("invalid macro definition: {e}")),
|
|
|
|
),
|
|
|
|
None => self.mac.expand(tt).map_err(Into::into),
|
|
|
|
}
|
|
|
|
}
|
2023-07-10 14:28:23 +00:00
|
|
|
|
|
|
|
pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId {
|
|
|
|
self.mac.map_id_down(token_id)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
|
|
|
|
self.mac.map_id_up(token_id)
|
|
|
|
}
|
2023-07-10 14:23:29 +00:00
|
|
|
}
|
|
|
|
|
2019-11-10 03:03:24 +00:00
|
|
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
|
|
pub enum TokenExpander {
|
2023-07-10 14:23:29 +00:00
|
|
|
DeclarativeMacro(Arc<DeclarativeMacroExpander>),
|
2021-05-04 15:20:10 +00:00
|
|
|
/// Stuff like `line!` and `file!`.
|
2023-07-10 14:23:29 +00:00
|
|
|
BuiltIn(BuiltinFnLikeExpander),
|
2023-04-13 15:41:24 +00:00
|
|
|
/// Built-in eagerly expanded fn-like macros (`include!`, `concat!`, etc.)
|
2023-07-10 14:23:29 +00:00
|
|
|
BuiltInEager(EagerExpander),
|
2021-06-09 16:02:31 +00:00
|
|
|
/// `global_allocator` and such.
|
2023-07-10 14:23:29 +00:00
|
|
|
BuiltInAttr(BuiltinAttrExpander),
|
2021-05-04 15:20:10 +00:00
|
|
|
/// `derive(Copy)` and such.
|
2023-07-10 14:23:29 +00:00
|
|
|
BuiltInDerive(BuiltinDeriveExpander),
|
2021-05-04 15:20:10 +00:00
|
|
|
/// The thing we love the most here in rust-analyzer -- procedural macros.
|
2020-03-18 09:47:59 +00:00
|
|
|
ProcMacro(ProcMacroExpander),
|
2019-11-10 03:03:24 +00:00
|
|
|
}
|
|
|
|
|
2023-07-10 14:28:23 +00:00
|
|
|
// FIXME: Get rid of these methods
|
2019-11-10 03:03:24 +00:00
|
|
|
impl TokenExpander {
|
2021-05-04 17:29:30 +00:00
|
|
|
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
|
2019-11-10 03:03:24 +00:00
|
|
|
match self {
|
2023-07-10 14:28:23 +00:00
|
|
|
TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id),
|
2023-07-10 14:23:29 +00:00
|
|
|
TokenExpander::BuiltIn(..)
|
|
|
|
| TokenExpander::BuiltInEager(..)
|
|
|
|
| TokenExpander::BuiltInAttr(..)
|
|
|
|
| TokenExpander::BuiltInDerive(..)
|
2021-05-04 19:03:16 +00:00
|
|
|
| TokenExpander::ProcMacro(..) => id,
|
2019-11-17 16:11:43 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-04 17:29:30 +00:00
|
|
|
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
|
2019-11-17 16:11:43 +00:00
|
|
|
match self {
|
2023-07-10 14:28:23 +00:00
|
|
|
TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id),
|
2023-07-10 14:23:29 +00:00
|
|
|
TokenExpander::BuiltIn(..)
|
|
|
|
| TokenExpander::BuiltInEager(..)
|
|
|
|
| TokenExpander::BuiltInAttr(..)
|
|
|
|
| TokenExpander::BuiltInDerive(..)
|
2021-05-04 19:03:16 +00:00
|
|
|
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
|
2019-11-10 03:03:24 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-03-13 15:33:52 +00:00
|
|
|
#[salsa::query_group(ExpandDatabaseStorage)]
|
|
|
|
pub trait ExpandDatabase: SourceDatabase {
|
2019-10-29 11:55:39 +00:00
|
|
|
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
|
|
|
|
|
2021-06-08 19:51:28 +00:00
|
|
|
/// Main public API -- parses a hir file, not caring whether it's a real
|
2021-05-04 19:40:10 +00:00
|
|
|
/// file or a macro expansion.
|
2019-10-29 11:55:39 +00:00
|
|
|
#[salsa::transparent]
|
2023-04-16 17:20:48 +00:00
|
|
|
fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode;
|
2023-04-16 12:15:59 +00:00
|
|
|
#[salsa::transparent]
|
2023-04-16 17:20:48 +00:00
|
|
|
fn parse_or_expand_with_err(&self, file_id: HirFileId) -> ExpandResult<Parse<SyntaxNode>>;
|
2021-05-04 19:40:10 +00:00
|
|
|
/// Implementation for the macro case.
|
2023-04-25 08:47:33 +00:00
|
|
|
// This query is LRU cached
|
2020-11-24 20:57:51 +00:00
|
|
|
fn parse_macro_expansion(
|
2020-11-24 18:00:23 +00:00
|
|
|
&self,
|
|
|
|
macro_file: MacroFile,
|
2023-04-16 17:20:42 +00:00
|
|
|
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
|
2020-11-26 19:09:54 +00:00
|
|
|
|
2021-05-04 19:40:10 +00:00
|
|
|
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
|
|
|
|
/// reason why we use salsa at all.
|
|
|
|
///
|
|
|
|
/// We encode macro definitions into ids of macro calls, this what allows us
|
|
|
|
/// to be incremental.
|
2021-05-04 18:49:00 +00:00
|
|
|
#[salsa::interned]
|
2021-11-14 15:25:40 +00:00
|
|
|
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
|
2020-05-14 09:57:37 +00:00
|
|
|
|
2021-05-04 19:40:10 +00:00
|
|
|
/// Lowers syntactic macro call to a token tree representation.
|
2021-05-04 18:49:00 +00:00
|
|
|
#[salsa::transparent]
|
2022-02-09 10:58:52 +00:00
|
|
|
fn macro_arg(
|
|
|
|
&self,
|
|
|
|
id: MacroCallId,
|
2023-07-13 07:17:07 +00:00
|
|
|
) -> ValueResult<
|
|
|
|
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
|
|
|
|
Arc<Box<[SyntaxError]>>,
|
|
|
|
>;
|
2021-05-04 19:40:10 +00:00
|
|
|
/// Extracts syntax node, corresponding to a macro call. That's a firewall
|
|
|
|
/// query, only typing in the macro call itself changes the returned
|
|
|
|
/// subtree.
|
2023-07-13 07:17:07 +00:00
|
|
|
fn macro_arg_node(
|
|
|
|
&self,
|
|
|
|
id: MacroCallId,
|
|
|
|
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
|
|
|
|
/// Fetches the expander for this macro.
|
2023-07-10 14:23:29 +00:00
|
|
|
#[salsa::transparent]
|
2023-07-13 07:17:07 +00:00
|
|
|
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
|
|
|
|
/// Fetches (and compiles) the expander of this decl macro.
|
2023-07-10 14:23:29 +00:00
|
|
|
fn decl_macro_expander(
|
|
|
|
&self,
|
|
|
|
def_crate: CrateId,
|
|
|
|
id: AstId<ast::Macro>,
|
|
|
|
) -> Arc<DeclarativeMacroExpander>;
|
2021-05-04 18:49:00 +00:00
|
|
|
|
2023-04-16 16:29:42 +00:00
|
|
|
/// Expand macro call to a token tree.
|
2023-04-25 08:47:33 +00:00
|
|
|
// This query is LRU cached
|
2023-04-16 17:20:42 +00:00
|
|
|
fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
|
2023-06-07 09:20:10 +00:00
|
|
|
#[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)]
|
|
|
|
fn include_expand(
|
|
|
|
&self,
|
|
|
|
arg_id: MacroCallId,
|
|
|
|
) -> Result<
|
|
|
|
(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId),
|
|
|
|
ExpandError,
|
|
|
|
>;
|
2021-05-04 19:40:10 +00:00
|
|
|
/// Special case of the previous query for procedural macros. We can't LRU
|
|
|
|
/// proc macros, since they are not deterministic in general, and
|
2023-04-25 08:47:33 +00:00
|
|
|
/// non-determinism breaks salsa in a very, very, very bad way.
|
2023-07-03 12:42:27 +00:00
|
|
|
/// @edwin0cheng heroically debugged this once! See #4315 for details
|
|
|
|
fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
|
2023-04-16 16:29:42 +00:00
|
|
|
/// Firewall query that returns the errors from the `parse_macro_expansion` query.
|
|
|
|
fn parse_macro_expansion_error(
|
|
|
|
&self,
|
|
|
|
macro_call: MacroCallId,
|
2023-04-16 17:20:42 +00:00
|
|
|
) -> ExpandResult<Box<[SyntaxError]>>;
|
2021-01-04 02:53:31 +00:00
|
|
|
|
|
|
|
fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
|
2019-10-29 11:55:39 +00:00
|
|
|
}
|
|
|
|
|
2020-03-08 10:02:14 +00:00
|
|
|
/// This expands the given macro call, but with different arguments. This is
|
|
|
|
/// used for completion, where we want to see what 'would happen' if we insert a
|
|
|
|
/// token. The `token_to_map` mapped down into the expansion, with the mapped
|
|
|
|
/// token returned.
|
2021-05-24 19:21:25 +00:00
|
|
|
pub fn expand_speculative(
|
2023-03-13 15:33:52 +00:00
|
|
|
db: &dyn ExpandDatabase,
|
2020-03-08 10:02:14 +00:00
|
|
|
actual_macro_call: MacroCallId,
|
2021-09-02 16:54:09 +00:00
|
|
|
speculative_args: &SyntaxNode,
|
2021-05-04 17:36:48 +00:00
|
|
|
token_to_map: SyntaxToken,
|
|
|
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
2021-11-14 15:25:40 +00:00
|
|
|
let loc = db.lookup_intern_macro_call(actual_macro_call);
|
2021-09-13 22:04:04 +00:00
|
|
|
let token_range = token_to_map.text_range();
|
2021-09-02 16:54:09 +00:00
|
|
|
|
2021-09-13 22:04:04 +00:00
|
|
|
// Build the subtree and token mapping for the speculative args
|
2022-03-12 12:04:13 +00:00
|
|
|
let censor = censor_for_macro_input(&loc, speculative_args);
|
|
|
|
let mut fixups = fixup::fixup_syntax(speculative_args);
|
2022-07-16 10:38:33 +00:00
|
|
|
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
2022-02-09 15:30:10 +00:00
|
|
|
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
2022-03-12 12:04:13 +00:00
|
|
|
speculative_args,
|
2022-02-09 15:30:10 +00:00
|
|
|
fixups.token_map,
|
|
|
|
fixups.next_id,
|
2022-02-09 11:00:03 +00:00
|
|
|
fixups.replace,
|
|
|
|
fixups.append,
|
|
|
|
);
|
2021-09-13 22:04:04 +00:00
|
|
|
|
|
|
|
let (attr_arg, token_id) = match loc.kind {
|
2023-04-22 12:29:28 +00:00
|
|
|
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
|
|
|
let attr = if loc.def.is_attribute_derive() {
|
2022-03-10 19:53:50 +00:00
|
|
|
// for pseudo-derive expansion we actually pass the attribute itself only
|
|
|
|
ast::Attr::cast(speculative_args.clone())
|
|
|
|
} else {
|
|
|
|
// Attributes may have an input token tree, build the subtree and map for this as well
|
|
|
|
// then try finding a token id for our token if it is inside this input subtree.
|
|
|
|
let item = ast::Item::cast(speculative_args.clone())?;
|
2023-01-09 19:47:51 +00:00
|
|
|
item.doc_comments_and_attrs()
|
|
|
|
.nth(invoc_attr_index.ast_index())
|
|
|
|
.and_then(Either::left)
|
2022-03-10 19:53:50 +00:00
|
|
|
}?;
|
2021-09-13 22:04:04 +00:00
|
|
|
match attr.token_tree() {
|
|
|
|
Some(token_tree) => {
|
|
|
|
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
|
2023-01-31 10:49:49 +00:00
|
|
|
tree.delimiter = tt::Delimiter::unspecified();
|
2021-09-13 22:04:04 +00:00
|
|
|
|
|
|
|
let shift = mbe::Shift::new(&tt);
|
|
|
|
shift.shift_all(&mut tree);
|
|
|
|
|
|
|
|
let token_id = if token_tree.syntax().text_range().contains_range(token_range) {
|
|
|
|
let attr_input_start =
|
|
|
|
token_tree.left_delimiter_token()?.text_range().start();
|
|
|
|
let range = token_range.checked_sub(attr_input_start)?;
|
|
|
|
let token_id = shift.shift(map.token_by_range(range)?);
|
|
|
|
Some(token_id)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
(Some(tree), token_id)
|
|
|
|
}
|
|
|
|
_ => (None, None),
|
2021-09-02 16:54:09 +00:00
|
|
|
}
|
2021-09-13 22:04:04 +00:00
|
|
|
}
|
|
|
|
_ => (None, None),
|
|
|
|
};
|
|
|
|
let token_id = match token_id {
|
|
|
|
Some(token_id) => token_id,
|
|
|
|
// token wasn't inside an attribute input so it has to be in the general macro input
|
|
|
|
None => {
|
|
|
|
let range = token_range.checked_sub(speculative_args.text_range().start())?;
|
|
|
|
let token_id = spec_args_tmap.token_by_range(range)?;
|
2023-07-10 14:28:23 +00:00
|
|
|
match loc.def.kind {
|
|
|
|
MacroDefKind::Declarative(it) => {
|
|
|
|
db.decl_macro_expander(loc.krate, it).map_id_down(token_id)
|
|
|
|
}
|
|
|
|
_ => token_id,
|
|
|
|
}
|
2021-09-13 22:04:04 +00:00
|
|
|
}
|
|
|
|
};
|
2021-05-04 19:20:04 +00:00
|
|
|
|
2021-09-13 22:04:04 +00:00
|
|
|
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
|
|
|
|
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
2022-03-10 19:53:50 +00:00
|
|
|
let mut speculative_expansion = match loc.def.kind {
|
|
|
|
MacroDefKind::ProcMacro(expander, ..) => {
|
2023-01-31 10:49:49 +00:00
|
|
|
tt.delimiter = tt::Delimiter::unspecified();
|
2023-03-13 15:49:32 +00:00
|
|
|
expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
|
2022-03-10 19:53:50 +00:00
|
|
|
}
|
|
|
|
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
|
|
|
|
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
|
|
|
|
}
|
2023-07-10 13:19:00 +00:00
|
|
|
MacroDefKind::BuiltInDerive(expander, ..) => {
|
|
|
|
// this cast is a bit sus, can we avoid losing the typedness here?
|
|
|
|
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
|
|
|
|
expander.expand(db, actual_macro_call, &adt, &spec_args_tmap)
|
|
|
|
}
|
2023-07-10 14:28:23 +00:00
|
|
|
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
|
2023-07-10 14:23:29 +00:00
|
|
|
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
|
|
|
|
MacroDefKind::BuiltInEager(it, _) => {
|
|
|
|
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
|
|
|
|
}
|
|
|
|
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
|
2021-09-02 16:54:09 +00:00
|
|
|
};
|
2021-05-04 19:20:04 +00:00
|
|
|
|
2021-09-05 19:30:06 +00:00
|
|
|
let expand_to = macro_expand_to(db, actual_macro_call);
|
2022-02-09 15:30:10 +00:00
|
|
|
fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
|
2022-01-24 16:27:39 +00:00
|
|
|
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
|
2021-05-04 18:49:00 +00:00
|
|
|
|
2022-10-10 20:47:52 +00:00
|
|
|
let syntax_node = node.syntax_node();
|
|
|
|
let token = rev_tmap
|
|
|
|
.ranges_by_token(token_id, token_to_map.kind())
|
|
|
|
.filter_map(|range| syntax_node.covering_element(range).into_token())
|
|
|
|
.min_by_key(|t| {
|
|
|
|
// prefer tokens of the same kind and text
|
|
|
|
// Note the inversion of the score here, as we want to prefer the first token in case
|
|
|
|
// of all tokens having the same score
|
|
|
|
(t.kind() != token_to_map.kind()) as u8 + (t.text() != token_to_map.text()) as u8
|
|
|
|
})?;
|
2020-03-08 10:02:14 +00:00
|
|
|
Some((node.syntax_node(), token))
|
|
|
|
}
|
|
|
|
|
2023-03-13 15:33:52 +00:00
|
|
|
fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
|
2023-04-16 17:20:48 +00:00
|
|
|
Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
|
2019-10-29 11:55:39 +00:00
|
|
|
}
|
|
|
|
|
2023-04-16 17:20:48 +00:00
|
|
|
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
|
|
|
|
match file_id.repr() {
|
2023-04-16 17:20:42 +00:00
|
|
|
HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(),
|
2021-05-04 18:49:00 +00:00
|
|
|
HirFileIdRepr::MacroFile(macro_file) => {
|
2023-04-16 17:20:42 +00:00
|
|
|
db.parse_macro_expansion(macro_file).value.0.syntax_node()
|
2021-05-04 18:49:00 +00:00
|
|
|
}
|
2023-04-16 17:20:48 +00:00
|
|
|
}
|
2021-05-04 18:49:00 +00:00
|
|
|
}
|
|
|
|
|
2023-04-16 12:15:59 +00:00
|
|
|
fn parse_or_expand_with_err(
|
|
|
|
db: &dyn ExpandDatabase,
|
|
|
|
file_id: HirFileId,
|
2023-04-16 17:20:48 +00:00
|
|
|
) -> ExpandResult<Parse<SyntaxNode>> {
|
2023-04-16 12:15:59 +00:00
|
|
|
match file_id.repr() {
|
2023-04-16 17:20:48 +00:00
|
|
|
HirFileIdRepr::FileId(file_id) => ExpandResult::ok(db.parse(file_id).to_syntax()),
|
2023-04-16 12:15:59 +00:00
|
|
|
HirFileIdRepr::MacroFile(macro_file) => {
|
2023-04-16 17:20:48 +00:00
|
|
|
db.parse_macro_expansion(macro_file).map(|(it, _)| it)
|
2023-04-16 12:15:59 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-04 18:49:00 +00:00
|
|
|
fn parse_macro_expansion(
|
2023-03-13 15:33:52 +00:00
|
|
|
db: &dyn ExpandDatabase,
|
2021-05-04 18:49:00 +00:00
|
|
|
macro_file: MacroFile,
|
2023-04-16 17:20:42 +00:00
|
|
|
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
|
2021-05-04 18:49:00 +00:00
|
|
|
let _p = profile::span("parse_macro_expansion");
|
2023-04-16 17:20:42 +00:00
|
|
|
let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
|
2021-05-04 18:49:00 +00:00
|
|
|
|
2021-09-05 19:30:06 +00:00
|
|
|
let expand_to = macro_expand_to(db, macro_file.macro_call_id);
|
2021-05-04 18:49:00 +00:00
|
|
|
|
2021-08-15 12:46:13 +00:00
|
|
|
tracing::debug!("expanded = {}", tt.as_debug_string());
|
2021-09-05 19:30:06 +00:00
|
|
|
tracing::debug!("kind = {:?}", expand_to);
|
2021-05-04 18:49:00 +00:00
|
|
|
|
2022-01-24 16:27:39 +00:00
|
|
|
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
|
2021-05-04 18:49:00 +00:00
|
|
|
|
2023-04-16 17:20:42 +00:00
|
|
|
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
|
2021-05-04 18:49:00 +00:00
|
|
|
}
|
|
|
|
|
2023-07-03 12:42:27 +00:00
|
|
|
fn parse_macro_expansion_error(
|
|
|
|
db: &dyn ExpandDatabase,
|
|
|
|
macro_call_id: MacroCallId,
|
|
|
|
) -> ExpandResult<Box<[SyntaxError]>> {
|
|
|
|
db.parse_macro_expansion(MacroFile { macro_call_id })
|
|
|
|
.map(|it| it.0.errors().to_vec().into_boxed_slice())
|
|
|
|
}
|
|
|
|
|
2022-02-09 10:58:52 +00:00
|
|
|
fn macro_arg(
|
2023-03-13 15:33:52 +00:00
|
|
|
db: &dyn ExpandDatabase,
|
2022-02-09 10:58:52 +00:00
|
|
|
id: MacroCallId,
|
2023-07-13 07:17:07 +00:00
|
|
|
) -> ValueResult<
|
|
|
|
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
|
|
|
|
Arc<Box<[SyntaxError]>>,
|
|
|
|
> {
|
2021-11-14 15:25:40 +00:00
|
|
|
let loc = db.lookup_intern_macro_call(id);
|
2021-08-25 16:57:24 +00:00
|
|
|
|
2023-07-10 09:22:29 +00:00
|
|
|
if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() {
|
2023-07-13 07:17:07 +00:00
|
|
|
return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))));
|
2023-06-07 09:20:10 +00:00
|
|
|
}
|
|
|
|
|
2023-07-13 07:17:07 +00:00
|
|
|
let ValueResult { value, err } = db.macro_arg_node(id);
|
|
|
|
let Some(arg) = value else {
|
|
|
|
return ValueResult { value: None, err };
|
|
|
|
};
|
2023-06-07 09:20:10 +00:00
|
|
|
|
2021-08-25 16:57:24 +00:00
|
|
|
let node = SyntaxNode::new_root(arg);
|
2021-09-02 16:54:09 +00:00
|
|
|
let censor = censor_for_macro_input(&loc, &node);
|
2022-02-07 17:08:31 +00:00
|
|
|
let mut fixups = fixup::fixup_syntax(&node);
|
2022-07-16 10:38:33 +00:00
|
|
|
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
2022-02-09 15:30:10 +00:00
|
|
|
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
|
|
|
&node,
|
|
|
|
fixups.token_map,
|
|
|
|
fixups.next_id,
|
|
|
|
fixups.replace,
|
|
|
|
fixups.append,
|
|
|
|
);
|
2022-02-07 17:08:31 +00:00
|
|
|
|
2021-09-02 16:54:09 +00:00
|
|
|
if loc.def.is_proc_macro() {
|
|
|
|
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
2023-01-31 10:49:49 +00:00
|
|
|
tt.delimiter = tt::Delimiter::unspecified();
|
2021-09-02 16:54:09 +00:00
|
|
|
}
|
2023-07-13 07:17:07 +00:00
|
|
|
let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
|
|
|
|
match err {
|
|
|
|
Some(err) => ValueResult::new(val, err),
|
|
|
|
None => ValueResult::ok(val),
|
|
|
|
}
|
2021-09-02 16:54:09 +00:00
|
|
|
}
|
|
|
|
|
2023-07-10 13:19:00 +00:00
|
|
|
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
|
|
|
|
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
|
|
|
|
/// - attributes expect the invoking attribute to be stripped
|
2021-09-19 16:30:29 +00:00
|
|
|
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
|
2023-01-09 19:47:51 +00:00
|
|
|
// FIXME: handle `cfg_attr`
|
2021-09-19 16:30:29 +00:00
|
|
|
(|| {
|
|
|
|
let censor = match loc.kind {
|
|
|
|
MacroCallKind::FnLike { .. } => return None,
|
2021-10-10 13:50:28 +00:00
|
|
|
MacroCallKind::Derive { derive_attr_index, .. } => {
|
|
|
|
cov_mark::hit!(derive_censoring);
|
|
|
|
ast::Item::cast(node.clone())?
|
|
|
|
.attrs()
|
2023-01-09 19:47:51 +00:00
|
|
|
.take(derive_attr_index.ast_index() + 1)
|
2022-08-15 14:16:59 +00:00
|
|
|
// FIXME, this resolution should not be done syntactically
|
|
|
|
// derive is a proper macro now, no longer builtin
|
|
|
|
// But we do not have resolution at this stage, this means
|
|
|
|
// we need to know about all macro calls for the given ast item here
|
|
|
|
// so we require some kind of mapping...
|
2021-10-10 13:50:28 +00:00
|
|
|
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
|
|
|
|
.map(|it| it.syntax().clone())
|
|
|
|
.collect()
|
|
|
|
}
|
2023-04-22 12:29:28 +00:00
|
|
|
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None,
|
2021-10-10 13:50:28 +00:00
|
|
|
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
|
|
|
cov_mark::hit!(attribute_macro_attr_censoring);
|
|
|
|
ast::Item::cast(node.clone())?
|
2022-01-07 17:51:10 +00:00
|
|
|
.doc_comments_and_attrs()
|
2023-01-09 19:47:51 +00:00
|
|
|
.nth(invoc_attr_index.ast_index())
|
2022-01-30 21:18:32 +00:00
|
|
|
.and_then(Either::left)
|
2021-10-10 13:50:28 +00:00
|
|
|
.map(|attr| attr.syntax().clone())
|
|
|
|
.into_iter()
|
|
|
|
.collect()
|
|
|
|
}
|
2021-09-19 16:30:29 +00:00
|
|
|
};
|
|
|
|
Some(censor)
|
|
|
|
})()
|
|
|
|
.unwrap_or_default()
|
2021-05-04 18:49:00 +00:00
|
|
|
}
|
|
|
|
|
2023-07-13 07:17:07 +00:00
|
|
|
fn macro_arg_node(
|
|
|
|
db: &dyn ExpandDatabase,
|
|
|
|
id: MacroCallId,
|
|
|
|
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
|
|
|
|
let err = || -> Arc<Box<[_]>> {
|
|
|
|
Arc::new(Box::new([SyntaxError::new_at_offset(
|
|
|
|
"invalid macro call".to_owned(),
|
|
|
|
syntax::TextSize::from(0),
|
|
|
|
)]))
|
|
|
|
};
|
2021-11-14 15:25:40 +00:00
|
|
|
let loc = db.lookup_intern_macro_call(id);
|
2023-07-13 07:17:07 +00:00
|
|
|
let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
|
|
|
|
let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
|
|
|
|
Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr).0)
|
|
|
|
} else {
|
|
|
|
loc.kind
|
|
|
|
.arg(db)
|
|
|
|
.and_then(|arg| ast::TokenTree::cast(arg.value))
|
|
|
|
.map(|tt| tt.reparse_as_expr().to_syntax())
|
|
|
|
};
|
|
|
|
|
|
|
|
match res {
|
|
|
|
Some(res) if res.errors().is_empty() => res.syntax_node(),
|
|
|
|
Some(res) => {
|
|
|
|
return ValueResult::new(
|
|
|
|
Some(res.syntax_node().green().into()),
|
|
|
|
// Box::<[_]>::from(res.errors()), not stable yet
|
|
|
|
Arc::new(res.errors().to_vec().into_boxed_slice()),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
None => return ValueResult::only_err(err()),
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
match loc.kind.arg(db) {
|
|
|
|
Some(res) => res.value,
|
|
|
|
None => return ValueResult::only_err(err()),
|
|
|
|
}
|
|
|
|
};
|
fix: avoid pathological macro expansions
Today, rust-analyzer (and rustc, and bat, and IntelliJ) fail badly on
some kinds of maliciously constructed code, like a deep sequence of
nested parenthesis.
"Who writes 100k nested parenthesis" you'd ask?
Well, in a language with macros, a run-away macro expansion might do
that (see the added tests)! Such expansion can be broad, rather than
deep, so it bypasses recursion check at the macro-expansion layer, but
triggers deep recursion in parser.
In the ideal world, the parser would just handle deeply nested structs
gracefully. We'll get there some day, but at the moment, let's try to be
simple, and just avoid expanding macros with unbalanced parenthesis in
the first place.
closes #9358
2021-08-09 13:06:49 +00:00
|
|
|
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
|
|
|
|
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
|
|
|
|
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
|
|
|
|
let well_formed_tt =
|
|
|
|
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
|
|
|
|
if !well_formed_tt {
|
|
|
|
// Don't expand malformed (unbalanced) macro invocations. This is
|
|
|
|
// less than ideal, but trying to expand unbalanced macro calls
|
|
|
|
// sometimes produces pathological, deeply nested code which breaks
|
|
|
|
// all kinds of things.
|
|
|
|
//
|
|
|
|
// Some day, we'll have explicit recursion counters for all
|
|
|
|
// recursive things, at which point this code might be removed.
|
|
|
|
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
|
2023-07-13 07:17:07 +00:00
|
|
|
return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
|
|
|
|
"unbalanced token tree".to_owned(),
|
|
|
|
arg.text_range(),
|
|
|
|
)])));
|
fix: avoid pathological macro expansions
Today, rust-analyzer (and rustc, and bat, and IntelliJ) fail badly on
some kinds of maliciously constructed code, like a deep sequence of
nested parenthesis.
"Who writes 100k nested parenthesis" you'd ask?
Well, in a language with macros, a run-away macro expansion might do
that (see the added tests)! Such expansion can be broad, rather than
deep, so it bypasses recursion check at the macro-expansion layer, but
triggers deep recursion in parser.
In the ideal world, the parser would just handle deeply nested structs
gracefully. We'll get there some day, but at the moment, let's try to be
simple, and just avoid expanding macros with unbalanced parenthesis in
the first place.
closes #9358
2021-08-09 13:06:49 +00:00
|
|
|
}
|
|
|
|
}
|
2023-07-13 07:17:07 +00:00
|
|
|
ValueResult::ok(Some(arg.green().into()))
|
2021-05-04 18:49:00 +00:00
|
|
|
}
|
|
|
|
|
2023-07-10 14:23:29 +00:00
|
|
|
fn decl_macro_expander(
|
2023-03-13 15:33:52 +00:00
|
|
|
db: &dyn ExpandDatabase,
|
2023-07-10 14:23:29 +00:00
|
|
|
def_crate: CrateId,
|
|
|
|
id: AstId<ast::Macro>,
|
|
|
|
) -> Arc<DeclarativeMacroExpander> {
|
|
|
|
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
|
|
|
|
let (mac, def_site_token_map) = match id.to_node(db) {
|
|
|
|
ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
|
|
|
|
Some(arg) => {
|
|
|
|
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
|
|
|
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
|
|
|
|
(mac, def_site_token_map)
|
|
|
|
}
|
|
|
|
None => (
|
|
|
|
mbe::DeclarativeMacro::from_err(
|
|
|
|
mbe::ParseError::Expected("expected a token tree".into()),
|
|
|
|
is_2021,
|
|
|
|
),
|
|
|
|
Default::default(),
|
|
|
|
),
|
|
|
|
},
|
|
|
|
ast::Macro::MacroDef(macro_def) => match macro_def.body() {
|
|
|
|
Some(arg) => {
|
|
|
|
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
|
|
|
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
|
|
|
|
(mac, def_site_token_map)
|
|
|
|
}
|
|
|
|
None => (
|
|
|
|
mbe::DeclarativeMacro::from_err(
|
|
|
|
mbe::ParseError::Expected("expected a token tree".into()),
|
|
|
|
is_2021,
|
|
|
|
),
|
|
|
|
Default::default(),
|
|
|
|
),
|
|
|
|
},
|
|
|
|
};
|
|
|
|
Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
|
|
|
|
}
|
|
|
|
|
2023-07-13 07:17:07 +00:00
|
|
|
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
2019-11-11 10:45:55 +00:00
|
|
|
match id.kind {
|
2021-10-10 18:07:43 +00:00
|
|
|
MacroDefKind::Declarative(ast_id) => {
|
2023-07-10 14:23:29 +00:00
|
|
|
TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
|
2021-10-09 12:23:55 +00:00
|
|
|
}
|
2023-07-10 14:23:29 +00:00
|
|
|
MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
|
|
|
|
MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
|
|
|
|
MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
|
|
|
|
MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
|
|
|
|
MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
|
2019-11-10 03:03:24 +00:00
|
|
|
}
|
2019-10-29 12:11:42 +00:00
|
|
|
}
|
|
|
|
|
2023-04-16 17:20:42 +00:00
|
|
|
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
2020-11-27 17:07:16 +00:00
|
|
|
let _p = profile::span("macro_expand");
|
2023-04-22 12:29:28 +00:00
|
|
|
let loc = db.lookup_intern_macro_call(id);
|
2023-07-10 09:22:29 +00:00
|
|
|
|
2023-07-13 07:17:07 +00:00
|
|
|
let ExpandResult { value: tt, mut err } = match loc.def.kind {
|
2023-07-10 13:19:00 +00:00
|
|
|
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
|
|
|
|
MacroDefKind::BuiltInDerive(expander, ..) => {
|
2023-07-13 07:17:07 +00:00
|
|
|
let arg = db.macro_arg_node(id).value.unwrap();
|
2023-07-10 13:19:00 +00:00
|
|
|
|
|
|
|
let node = SyntaxNode::new_root(arg);
|
|
|
|
let censor = censor_for_macro_input(&loc, &node);
|
|
|
|
let mut fixups = fixup::fixup_syntax(&node);
|
|
|
|
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
|
|
|
let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications(
|
|
|
|
&node,
|
|
|
|
fixups.token_map,
|
|
|
|
fixups.next_id,
|
|
|
|
fixups.replace,
|
|
|
|
fixups.append,
|
|
|
|
);
|
|
|
|
|
|
|
|
// this cast is a bit sus, can we avoid losing the typedness here?
|
|
|
|
let adt = ast::Adt::cast(node).unwrap();
|
2023-07-13 07:17:07 +00:00
|
|
|
let mut res = expander.expand(db, id, &adt, &tmap);
|
|
|
|
fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
|
|
|
|
res
|
2023-07-10 13:19:00 +00:00
|
|
|
}
|
|
|
|
_ => {
|
2023-07-13 07:17:07 +00:00
|
|
|
let ValueResult { value, err } = db.macro_arg(id);
|
|
|
|
let Some(macro_arg) = value else {
|
2023-07-10 13:19:00 +00:00
|
|
|
return ExpandResult {
|
|
|
|
value: Arc::new(tt::Subtree {
|
|
|
|
delimiter: tt::Delimiter::UNSPECIFIED,
|
|
|
|
token_trees: Vec::new(),
|
|
|
|
}),
|
|
|
|
// FIXME: We should make sure to enforce an invariant that invalid macro
|
|
|
|
// calls do not reach this call path!
|
|
|
|
err: Some(ExpandError::other("invalid token tree")),
|
|
|
|
};
|
|
|
|
};
|
2023-07-13 07:17:07 +00:00
|
|
|
|
2023-07-10 13:19:00 +00:00
|
|
|
let (arg, arg_tm, undo_info) = &*macro_arg;
|
2023-07-10 14:23:29 +00:00
|
|
|
let mut res = match loc.def.kind {
|
|
|
|
MacroDefKind::Declarative(id) => {
|
2023-07-10 14:28:23 +00:00
|
|
|
db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
|
2023-07-10 14:23:29 +00:00
|
|
|
}
|
|
|
|
MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
|
2023-07-13 07:17:07 +00:00
|
|
|
// This might look a bit odd, but we do not expand the inputs to eager macros here.
|
|
|
|
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
|
|
|
|
// That kind of expansion uses the ast id map of an eager macros input though which goes through
|
|
|
|
// the HirFileId machinery. As eager macro inputs are assigned a macro file id that query
|
|
|
|
// will end up going through here again, whereas we want to just want to inspect the raw input.
|
|
|
|
// As such we just return the input subtree here.
|
|
|
|
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
|
|
|
|
let mut arg = arg.clone();
|
|
|
|
fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
|
|
|
|
|
|
|
|
return ExpandResult {
|
|
|
|
value: Arc::new(arg),
|
|
|
|
err: err.map(|err| {
|
|
|
|
let mut buf = String::new();
|
|
|
|
for err in &**err {
|
|
|
|
use std::fmt::Write;
|
|
|
|
_ = write!(buf, "{}, ", err);
|
|
|
|
}
|
|
|
|
buf.pop();
|
|
|
|
buf.pop();
|
|
|
|
ExpandError::other(buf)
|
|
|
|
}),
|
|
|
|
};
|
|
|
|
}
|
2023-07-10 14:23:29 +00:00
|
|
|
MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
|
|
|
|
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
|
|
|
|
_ => unreachable!(),
|
|
|
|
};
|
2023-07-10 13:19:00 +00:00
|
|
|
fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
|
2023-07-13 07:17:07 +00:00
|
|
|
res
|
2022-02-21 18:14:06 +00:00
|
|
|
}
|
2020-03-13 12:03:31 +00:00
|
|
|
};
|
2023-06-07 09:20:10 +00:00
|
|
|
|
|
|
|
if let Some(EagerCallInfo { error, .. }) = loc.eager.as_deref() {
|
|
|
|
// FIXME: We should report both errors!
|
|
|
|
err = error.clone().or(err);
|
|
|
|
}
|
|
|
|
|
2019-10-29 12:11:42 +00:00
|
|
|
// Set a hard limit for the expanded tt
|
2023-07-03 12:42:27 +00:00
|
|
|
if let Err(value) = check_tt_count(&tt) {
|
|
|
|
return value;
|
2019-10-29 12:11:42 +00:00
|
|
|
}
|
2020-11-24 18:00:23 +00:00
|
|
|
|
2023-04-16 17:20:42 +00:00
|
|
|
ExpandResult { value: Arc::new(tt), err }
|
2019-10-29 12:11:42 +00:00
|
|
|
}
|
|
|
|
|
2023-07-03 12:42:27 +00:00
|
|
|
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
2023-04-22 12:29:28 +00:00
|
|
|
let loc = db.lookup_intern_macro_call(id);
|
2023-07-13 07:17:07 +00:00
|
|
|
let Some(macro_arg) = db.macro_arg(id).value else {
|
2023-04-16 18:17:58 +00:00
|
|
|
return ExpandResult {
|
2023-07-03 12:42:27 +00:00
|
|
|
value: Arc::new(tt::Subtree {
|
2023-04-16 18:17:58 +00:00
|
|
|
delimiter: tt::Delimiter::UNSPECIFIED,
|
|
|
|
token_trees: Vec::new(),
|
2023-07-03 12:42:27 +00:00
|
|
|
}),
|
2023-07-13 07:17:07 +00:00
|
|
|
// FIXME: We should make sure to enforce an invariant that invalid macro
|
|
|
|
// calls do not reach this call path!
|
2023-07-03 18:34:09 +00:00
|
|
|
err: Some(ExpandError::other("invalid token tree")),
|
2023-04-16 18:17:58 +00:00
|
|
|
};
|
|
|
|
};
|
2023-07-13 07:17:07 +00:00
|
|
|
|
2023-07-03 12:42:27 +00:00
|
|
|
let (arg_tt, arg_tm, undo_info) = &*macro_arg;
|
2020-05-14 09:57:37 +00:00
|
|
|
|
|
|
|
let expander = match loc.def.kind {
|
2021-03-18 15:11:18 +00:00
|
|
|
MacroDefKind::ProcMacro(expander, ..) => expander,
|
2020-05-14 09:57:37 +00:00
|
|
|
_ => unreachable!(),
|
|
|
|
};
|
|
|
|
|
2021-05-31 11:37:11 +00:00
|
|
|
let attr_arg = match &loc.kind {
|
2021-08-21 16:06:03 +00:00
|
|
|
MacroCallKind::Attr { attr_args, .. } => {
|
2021-08-21 16:19:18 +00:00
|
|
|
let mut attr_args = attr_args.0.clone();
|
2023-07-03 12:42:27 +00:00
|
|
|
mbe::Shift::new(arg_tt).shift_all(&mut attr_args);
|
2021-08-21 16:06:03 +00:00
|
|
|
Some(attr_args)
|
|
|
|
}
|
2021-05-31 11:37:11 +00:00
|
|
|
_ => None,
|
|
|
|
};
|
|
|
|
|
2023-07-03 12:42:27 +00:00
|
|
|
let ExpandResult { value: mut tt, err } =
|
|
|
|
expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref());
|
|
|
|
|
|
|
|
// Set a hard limit for the expanded tt
|
|
|
|
if let Err(value) = check_tt_count(&tt) {
|
|
|
|
return value;
|
|
|
|
}
|
|
|
|
|
|
|
|
fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
|
|
|
|
|
|
|
|
ExpandResult { value: Arc::new(tt), err }
|
2020-05-14 09:57:37 +00:00
|
|
|
}
|
|
|
|
|
2023-03-13 15:33:52 +00:00
|
|
|
fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
|
2021-01-04 02:53:31 +00:00
|
|
|
Arc::new(HygieneFrame::new(db, file_id))
|
|
|
|
}
|
|
|
|
|
2023-03-13 15:33:52 +00:00
|
|
|
fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
|
2023-04-22 12:29:28 +00:00
|
|
|
db.lookup_intern_macro_call(id).expand_to()
|
2021-09-05 19:30:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn token_tree_to_syntax_node(
|
|
|
|
tt: &tt::Subtree,
|
|
|
|
expand_to: ExpandTo,
|
2022-01-24 16:27:39 +00:00
|
|
|
) -> (Parse<SyntaxNode>, mbe::TokenMap) {
|
2021-09-06 15:34:03 +00:00
|
|
|
let entry_point = match expand_to {
|
2021-12-27 14:54:51 +00:00
|
|
|
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
|
|
|
|
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
|
|
|
|
ExpandTo::Pattern => mbe::TopEntryPoint::Pattern,
|
|
|
|
ExpandTo::Type => mbe::TopEntryPoint::Type,
|
|
|
|
ExpandTo::Expr => mbe::TopEntryPoint::Expr,
|
2021-09-05 19:30:06 +00:00
|
|
|
};
|
2021-09-06 15:34:03 +00:00
|
|
|
mbe::token_tree_to_syntax_node(tt, entry_point)
|
2019-12-08 08:16:52 +00:00
|
|
|
}
|
2023-07-03 12:42:27 +00:00
|
|
|
|
|
|
|
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>> {
|
|
|
|
let count = tt.count();
|
|
|
|
if TOKEN_LIMIT.check(count).is_err() {
|
|
|
|
Err(ExpandResult {
|
|
|
|
value: Arc::new(tt::Subtree {
|
|
|
|
delimiter: tt::Delimiter::UNSPECIFIED,
|
|
|
|
token_trees: vec![],
|
|
|
|
}),
|
|
|
|
err: Some(ExpandError::other(format!(
|
|
|
|
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
|
|
|
count,
|
|
|
|
TOKEN_LIMIT.inner(),
|
|
|
|
))),
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|