rust-analyzer/crates/hir-expand/src/db.rs

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

556 lines
22 KiB
Rust
Raw Normal View History

2019-10-29 13:03:29 +00:00
//! Defines database & queries for macro expansion.
use std::sync::Arc;
2020-08-13 14:25:38 +00:00
use base_db::{salsa, SourceDatabase};
use either::Either;
2021-07-10 20:49:17 +00:00
use limit::Limit;
use mbe::syntax_node_to_token_tree;
use rustc_hash::FxHashSet;
use syntax::{
ast::{self, HasAttrs, HasDocComments},
2023-04-16 16:29:42 +00:00
AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
};
use crate::{
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, BuiltinAttrExpander,
BuiltinDeriveExpander, BuiltinFnLikeExpander, ExpandError, ExpandResult, ExpandTo, HirFileId,
HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile,
ProcMacroExpander,
};
/// Total limit on the number of tokens produced by any macro invocation.
///
/// If an invocation produces more tokens than this limit, it will not be stored in the database and
/// an error will be emitted.
///
/// Actual max for `analysis-stats .` at some point: 30672.
2022-12-10 04:09:18 +00:00
static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
2019-11-10 03:03:24 +00:00
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
2021-10-10 18:07:43 +00:00
/// Old-style `macro_rules` or the new macros 2.0
DeclarativeMacro { mac: mbe::DeclarativeMacro, def_site_token_map: mbe::TokenMap },
/// Stuff like `line!` and `file!`.
Builtin(BuiltinFnLikeExpander),
/// Built-in eagerly expanded fn-like macros (`include!`, `concat!`, etc.)
BuiltinEager(EagerExpander),
/// `global_allocator` and such.
BuiltinAttr(BuiltinAttrExpander),
/// `derive(Copy)` and such.
BuiltinDerive(BuiltinDeriveExpander),
/// The thing we love the most here in rust-analyzer -- procedural macros.
2020-03-18 09:47:59 +00:00
ProcMacro(ProcMacroExpander),
2019-11-10 03:03:24 +00:00
}
impl TokenExpander {
2021-05-04 17:29:30 +00:00
fn expand(
2019-11-10 03:03:24 +00:00
&self,
db: &dyn ExpandDatabase,
2021-05-19 18:19:08 +00:00
id: MacroCallId,
2019-11-10 03:03:24 +00:00
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
2019-11-10 03:03:24 +00:00
match self {
TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt).map_err(Into::into),
TokenExpander::Builtin(it) => it.expand(db, id, tt).map_err(Into::into),
TokenExpander::BuiltinEager(it) => {
it.expand(db, id, tt).map_err(Into::into).map(|res| res.subtree)
}
TokenExpander::BuiltinAttr(it) => it.expand(db, id, tt),
TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt),
2020-05-14 09:57:37 +00:00
TokenExpander::ProcMacro(_) => {
2021-01-08 14:46:48 +00:00
// We store the result in salsa db to prevent non-deterministic behavior in
2020-05-14 09:57:37 +00:00
// some proc-macro implementation
// See #4315 for details
db.expand_proc_macro(id)
2020-05-14 09:57:37 +00:00
}
2019-11-10 03:03:24 +00:00
}
}
2021-05-04 17:29:30 +00:00
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
2019-11-10 03:03:24 +00:00
match self {
2021-10-10 18:07:43 +00:00
TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_down(id),
TokenExpander::Builtin(..)
| TokenExpander::BuiltinEager(..)
| TokenExpander::BuiltinAttr(..)
| TokenExpander::BuiltinDerive(..)
| TokenExpander::ProcMacro(..) => id,
}
}
2021-05-04 17:29:30 +00:00
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
match self {
2021-10-10 18:07:43 +00:00
TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_up(id),
TokenExpander::Builtin(..)
| TokenExpander::BuiltinEager(..)
| TokenExpander::BuiltinAttr(..)
| TokenExpander::BuiltinDerive(..)
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
2019-11-10 03:03:24 +00:00
}
}
}
#[salsa::query_group(ExpandDatabaseStorage)]
pub trait ExpandDatabase: SourceDatabase {
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
2021-06-08 19:51:28 +00:00
/// Main public API -- parses a hir file, not caring whether it's a real
2021-05-04 19:40:10 +00:00
/// file or a macro expansion.
#[salsa::transparent]
fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>;
#[salsa::transparent]
fn parse_or_expand_with_err(
&self,
file_id: HirFileId,
) -> ExpandResult<Option<Parse<SyntaxNode>>>;
2021-05-04 19:40:10 +00:00
/// Implementation for the macro case.
fn parse_macro_expansion(
2020-11-24 18:00:23 +00:00
&self,
macro_file: MacroFile,
2023-04-16 17:20:42 +00:00
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
2021-05-04 19:40:10 +00:00
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
/// reason why we use salsa at all.
///
/// We encode macro definitions into ids of macro calls, this what allows us
/// to be incremental.
2021-05-04 18:49:00 +00:00
#[salsa::interned]
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
2020-05-14 09:57:37 +00:00
2021-05-04 19:40:10 +00:00
/// Lowers syntactic macro call to a token tree representation.
2021-05-04 18:49:00 +00:00
#[salsa::transparent]
fn macro_arg(
&self,
id: MacroCallId,
2023-04-16 17:20:42 +00:00
) -> Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>;
2021-05-04 19:40:10 +00:00
/// Extracts syntax node, corresponding to a macro call. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
2021-05-04 18:49:00 +00:00
fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>;
2021-05-04 19:40:10 +00:00
/// Gets the expander for this macro. This compiles declarative macros, and
/// just fetches procedural ones.
fn macro_def(&self, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError>;
2021-05-04 18:49:00 +00:00
2023-04-16 16:29:42 +00:00
/// Expand macro call to a token tree.
2023-04-16 17:20:42 +00:00
fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
2021-05-04 19:40:10 +00:00
/// Special case of the previous query for procedural macros. We can't LRU
/// proc macros, since they are not deterministic in general, and
/// non-determinism breaks salsa in a very, very, very bad way. @edwin0cheng
/// heroically debugged this once!
fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<tt::Subtree>;
2023-04-16 16:29:42 +00:00
/// Firewall query that returns the errors from the `parse_macro_expansion` query.
fn parse_macro_expansion_error(
&self,
macro_call: MacroCallId,
2023-04-16 17:20:42 +00:00
) -> ExpandResult<Box<[SyntaxError]>>;
2021-01-04 02:53:31 +00:00
fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
}
/// This expands the given macro call, but with different arguments. This is
/// used for completion, where we want to see what 'would happen' if we insert a
/// token. The `token_to_map` mapped down into the expansion, with the mapped
/// token returned.
pub fn expand_speculative(
db: &dyn ExpandDatabase,
actual_macro_call: MacroCallId,
speculative_args: &SyntaxNode,
2021-05-04 17:36:48 +00:00
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let loc = db.lookup_intern_macro_call(actual_macro_call);
let macro_def = db.macro_def(loc.def).ok()?;
let token_range = token_to_map.text_range();
// Build the subtree and token mapping for the speculative args
2022-03-12 12:04:13 +00:00
let censor = censor_for_macro_input(&loc, speculative_args);
let mut fixups = fixup::fixup_syntax(speculative_args);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
2022-02-09 15:30:10 +00:00
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
2022-03-12 12:04:13 +00:00
speculative_args,
2022-02-09 15:30:10 +00:00
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
let (attr_arg, token_id) = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, is_derive, .. } => {
let attr = if is_derive {
// for pseudo-derive expansion we actually pass the attribute itself only
ast::Attr::cast(speculative_args.clone())
} else {
// Attributes may have an input token tree, build the subtree and map for this as well
// then try finding a token id for our token if it is inside this input subtree.
let item = ast::Item::cast(speculative_args.clone())?;
item.doc_comments_and_attrs()
.nth(invoc_attr_index.ast_index())
.and_then(Either::left)
}?;
match attr.token_tree() {
Some(token_tree) => {
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
2023-01-31 10:49:49 +00:00
tree.delimiter = tt::Delimiter::unspecified();
let shift = mbe::Shift::new(&tt);
shift.shift_all(&mut tree);
let token_id = if token_tree.syntax().text_range().contains_range(token_range) {
let attr_input_start =
token_tree.left_delimiter_token()?.text_range().start();
let range = token_range.checked_sub(attr_input_start)?;
let token_id = shift.shift(map.token_by_range(range)?);
Some(token_id)
} else {
None
};
(Some(tree), token_id)
}
_ => (None, None),
}
}
_ => (None, None),
};
let token_id = match token_id {
Some(token_id) => token_id,
// token wasn't inside an attribute input so it has to be in the general macro input
None => {
let range = token_range.checked_sub(speculative_args.text_range().start())?;
let token_id = spec_args_tmap.token_by_range(range)?;
macro_def.map_id_down(token_id)
}
};
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => {
2023-01-31 10:49:49 +00:00
tt.delimiter = tt::Delimiter::unspecified();
2023-03-13 15:49:32 +00:00
expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
}
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
}
_ => macro_def.expand(db, actual_macro_call, &tt),
};
let expand_to = macro_expand_to(db, actual_macro_call);
2022-02-09 15:30:10 +00:00
fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
2022-01-24 16:27:39 +00:00
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
2021-05-04 18:49:00 +00:00
let syntax_node = node.syntax_node();
let token = rev_tmap
.ranges_by_token(token_id, token_to_map.kind())
.filter_map(|range| syntax_node.covering_element(range).into_token())
.min_by_key(|t| {
// prefer tokens of the same kind and text
// Note the inversion of the score here, as we want to prefer the first token in case
// of all tokens having the same score
(t.kind() != token_to_map.kind()) as u8 + (t.text() != token_to_map.text()) as u8
})?;
Some((node.syntax_node(), token))
}
fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
2021-05-04 18:49:00 +00:00
let map = db.parse_or_expand(file_id).map(|it| AstIdMap::from_source(&it)).unwrap_or_default();
Arc::new(map)
}
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
2023-04-16 17:20:42 +00:00
Some(match file_id.repr() {
HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(),
2021-05-04 18:49:00 +00:00
HirFileIdRepr::MacroFile(macro_file) => {
2023-04-16 17:20:42 +00:00
db.parse_macro_expansion(macro_file).value.0.syntax_node()
2021-05-04 18:49:00 +00:00
}
2023-04-16 17:20:42 +00:00
})
2021-05-04 18:49:00 +00:00
}
fn parse_or_expand_with_err(
db: &dyn ExpandDatabase,
file_id: HirFileId,
) -> ExpandResult<Option<Parse<SyntaxNode>>> {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => ExpandResult::ok(Some(db.parse(file_id).to_syntax())),
HirFileIdRepr::MacroFile(macro_file) => {
2023-04-16 17:20:42 +00:00
db.parse_macro_expansion(macro_file).map(|it| Some(it.0))
}
}
}
2021-05-04 18:49:00 +00:00
fn parse_macro_expansion(
db: &dyn ExpandDatabase,
2021-05-04 18:49:00 +00:00
macro_file: MacroFile,
2023-04-16 17:20:42 +00:00
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
2021-05-04 18:49:00 +00:00
let _p = profile::span("parse_macro_expansion");
2023-04-16 17:20:42 +00:00
let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
2021-05-04 18:49:00 +00:00
2023-03-13 15:49:32 +00:00
if let Some(err) = &err {
2023-04-16 13:46:12 +00:00
if tracing::enabled!(tracing::Level::DEBUG) {
// Note:
// The final goal we would like to make all parse_macro success,
// such that the following log will not call anyway.
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let node = loc.kind.to_node(db);
// collect parent information for warning log
let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| {
it.file_id.call_node(db)
})
.map(|n| format!("{:#}", n.value))
.collect::<Vec<_>>()
.join("\n");
tracing::debug!(
"fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}",
err,
node.value,
parents
);
}
2021-05-04 18:49:00 +00:00
}
let expand_to = macro_expand_to(db, macro_file.macro_call_id);
2021-05-04 18:49:00 +00:00
2021-08-15 12:46:13 +00:00
tracing::debug!("expanded = {}", tt.as_debug_string());
tracing::debug!("kind = {:?}", expand_to);
2021-05-04 18:49:00 +00:00
2022-01-24 16:27:39 +00:00
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
2021-05-04 18:49:00 +00:00
2023-04-16 17:20:42 +00:00
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
2021-05-04 18:49:00 +00:00
}
fn macro_arg(
db: &dyn ExpandDatabase,
id: MacroCallId,
2023-04-16 17:20:42 +00:00
) -> Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)> {
let Some(arg) = db.macro_arg_text(id) else {
return Arc::new((
tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
token_trees: Vec::new(),
},
Default::default(),
Default::default())
);
};
let loc = db.lookup_intern_macro_call(id);
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
let mut fixups = fixup::fixup_syntax(&node);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
2022-02-09 15:30:10 +00:00
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
&node,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
2023-01-31 10:49:49 +00:00
tt.delimiter = tt::Delimiter::unspecified();
}
2023-04-16 17:20:42 +00:00
Arc::new((tt, tmap, fixups.undo_info))
}
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
// FIXME: handle `cfg_attr`
(|| {
let censor = match loc.kind {
MacroCallKind::FnLike { .. } => return None,
MacroCallKind::Derive { derive_attr_index, .. } => {
cov_mark::hit!(derive_censoring);
ast::Item::cast(node.clone())?
.attrs()
.take(derive_attr_index.ast_index() + 1)
2022-08-15 14:16:59 +00:00
// FIXME, this resolution should not be done syntactically
// derive is a proper macro now, no longer builtin
// But we do not have resolution at this stage, this means
// we need to know about all macro calls for the given ast item here
// so we require some kind of mapping...
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
.map(|it| it.syntax().clone())
.collect()
}
MacroCallKind::Attr { is_derive: true, .. } => return None,
MacroCallKind::Attr { invoc_attr_index, .. } => {
cov_mark::hit!(attribute_macro_attr_censoring);
ast::Item::cast(node.clone())?
.doc_comments_and_attrs()
.nth(invoc_attr_index.ast_index())
.and_then(Either::left)
.map(|attr| attr.syntax().clone())
.into_iter()
.collect()
}
};
Some(censor)
})()
.unwrap_or_default()
2021-05-04 18:49:00 +00:00
}
fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode> {
let loc = db.lookup_intern_macro_call(id);
2021-05-04 18:49:00 +00:00
let arg = loc.kind.arg(db)?;
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
let well_formed_tt =
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
if !well_formed_tt {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// Some day, we'll have explicit recursion counters for all
// recursive things, at which point this code might be removed.
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
return None;
}
}
2021-05-06 05:22:51 +00:00
Some(arg.green().into())
2021-05-04 18:49:00 +00:00
}
fn macro_def(
db: &dyn ExpandDatabase,
id: MacroDefId,
) -> Result<Arc<TokenExpander>, mbe::ParseError> {
2019-11-11 10:45:55 +00:00
match id.kind {
2021-10-10 18:07:43 +00:00
MacroDefKind::Declarative(ast_id) => {
let (mac, def_site_token_map) = match ast_id.to_node(db) {
ast::Macro::MacroRules(macro_rules) => {
let arg = macro_rules
.token_tree()
.ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt)?;
(mac, def_site_token_map)
}
ast::Macro::MacroDef(macro_def) => {
let arg = macro_def
.body()
.ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = mbe::DeclarativeMacro::parse_macro2(&tt)?;
(mac, def_site_token_map)
}
};
Ok(Arc::new(TokenExpander::DeclarativeMacro { mac, def_site_token_map }))
}
MacroDefKind::BuiltIn(expander, _) => Ok(Arc::new(TokenExpander::Builtin(expander))),
MacroDefKind::BuiltInAttr(expander, _) => {
Ok(Arc::new(TokenExpander::BuiltinAttr(expander)))
}
MacroDefKind::BuiltInDerive(expander, _) => {
Ok(Arc::new(TokenExpander::BuiltinDerive(expander)))
2019-11-10 03:03:24 +00:00
}
MacroDefKind::BuiltInEager(expander, ..) => {
Ok(Arc::new(TokenExpander::BuiltinEager(expander)))
}
MacroDefKind::ProcMacro(expander, ..) => Ok(Arc::new(TokenExpander::ProcMacro(expander))),
2019-11-10 03:03:24 +00:00
}
2019-10-29 12:11:42 +00:00
}
2023-04-16 17:20:42 +00:00
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
2020-11-27 17:07:16 +00:00
let _p = profile::span("macro_expand");
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
2021-05-19 18:19:08 +00:00
if let Some(eager) = &loc.eager {
2023-04-16 17:20:42 +00:00
return ExpandResult { value: eager.arg_or_expansion.clone(), err: eager.error.clone() };
2021-05-19 18:19:08 +00:00
}
2020-02-17 11:32:13 +00:00
let expander = match db.macro_def(loc.def) {
Ok(it) => it,
// FIXME: This is weird -- we effectively report macro *definition*
// errors lazily, when we try to expand the macro. Instead, they should
2023-04-16 13:46:12 +00:00
// be reported at the definition site when we construct a def map.
// (Note we do report them also at the definition site in the late diagnostic pass)
Err(err) => {
2023-04-16 17:20:42 +00:00
return ExpandResult {
value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
token_trees: vec![],
}),
err: Some(ExpandError::Other(format!("invalid macro definition: {err}").into())),
}
}
};
2023-04-16 17:20:42 +00:00
let macro_arg = db.macro_arg(id);
2022-02-07 18:53:31 +00:00
let ExpandResult { value: mut tt, err } = expander.expand(db, id, &macro_arg.0);
2019-10-29 12:11:42 +00:00
// Set a hard limit for the expanded tt
let count = tt.count();
2021-07-10 20:49:17 +00:00
if TOKEN_LIMIT.check(count).is_err() {
2023-04-16 17:20:42 +00:00
return ExpandResult {
value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
token_trees: vec![],
}),
err: Some(ExpandError::Other(
format!(
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
count,
TOKEN_LIMIT.inner(),
)
.into(),
)),
};
2019-10-29 12:11:42 +00:00
}
2020-11-24 18:00:23 +00:00
fixup::reverse_fixups(&mut tt, &macro_arg.1, &macro_arg.2);
2022-02-07 18:53:31 +00:00
2023-04-16 17:20:42 +00:00
ExpandResult { value: Arc::new(tt), err }
2019-10-29 12:11:42 +00:00
}
2023-04-16 16:29:42 +00:00
fn parse_macro_expansion_error(
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
2023-04-16 17:20:42 +00:00
) -> ExpandResult<Box<[SyntaxError]>> {
2023-04-16 16:29:42 +00:00
db.parse_macro_expansion(MacroFile { macro_call_id })
2023-04-16 17:20:42 +00:00
.map(|it| it.0.errors().to_vec().into_boxed_slice())
2021-08-16 11:27:36 +00:00
}
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
2023-04-16 17:20:42 +00:00
let macro_arg = db.macro_arg(id);
2020-05-14 09:57:37 +00:00
let expander = match loc.def.kind {
2021-03-18 15:11:18 +00:00
MacroDefKind::ProcMacro(expander, ..) => expander,
2020-05-14 09:57:37 +00:00
_ => unreachable!(),
};
2021-05-31 11:37:11 +00:00
let attr_arg = match &loc.kind {
2021-08-21 16:06:03 +00:00
MacroCallKind::Attr { attr_args, .. } => {
2021-08-21 16:19:18 +00:00
let mut attr_args = attr_args.0.clone();
2021-08-21 16:06:03 +00:00
mbe::Shift::new(&macro_arg.0).shift_all(&mut attr_args);
Some(attr_args)
}
2021-05-31 11:37:11 +00:00
_ => None,
};
2023-03-13 15:49:32 +00:00
expander.expand(db, loc.def.krate, loc.krate, &macro_arg.0, attr_arg.as_ref())
2020-05-14 09:57:37 +00:00
}
fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
2021-01-04 02:53:31 +00:00
Arc::new(HygieneFrame::new(db, file_id))
}
fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
loc.kind.expand_to()
}
fn token_tree_to_syntax_node(
tt: &tt::Subtree,
expand_to: ExpandTo,
2022-01-24 16:27:39 +00:00
) -> (Parse<SyntaxNode>, mbe::TokenMap) {
let entry_point = match expand_to {
2021-12-27 14:54:51 +00:00
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
ExpandTo::Pattern => mbe::TopEntryPoint::Pattern,
ExpandTo::Type => mbe::TopEntryPoint::Type,
ExpandTo::Expr => mbe::TopEntryPoint::Expr,
};
mbe::token_tree_to_syntax_node(tt, entry_point)
2019-12-08 08:16:52 +00:00
}