internal: Cleanup Expander a bit

This commit is contained in:
Lukas Wirth 2023-12-22 12:46:22 +01:00
parent 2a5b60b186
commit 9d24764624
7 changed files with 68 additions and 91 deletions

View file

@ -8,7 +8,7 @@ use either::Either;
use hir_expand::{
ast_id_map::AstIdMap,
name::{name, AsName, Name},
AstId, ExpandError, InFile,
ExpandError, InFile,
};
use intern::Interned;
use profile::Count;
@ -66,7 +66,7 @@ pub(super) fn lower(
krate,
def_map: expander.module.def_map(db),
source_map: BodySourceMap::default(),
ast_id_map: db.ast_id_map(expander.current_file_id),
ast_id_map: db.ast_id_map(expander.current_file_id()),
body: Body {
exprs: Default::default(),
pats: Default::default(),
@ -408,7 +408,7 @@ impl ExprCollector<'_> {
ast::Expr::ParenExpr(e) => {
let inner = self.collect_expr_opt(e.expr());
// make the paren expr point to the inner expression as well
let src = self.expander.to_source(syntax_ptr);
let src = self.expander.in_file(syntax_ptr);
self.source_map.expr_map.insert(src, inner);
inner
}
@ -441,7 +441,7 @@ impl ExprCollector<'_> {
Some(e) => self.collect_expr(e),
None => self.missing_expr(),
};
let src = self.expander.to_source(AstPtr::new(&field));
let src = self.expander.in_file(AstPtr::new(&field));
self.source_map.field_map_back.insert(expr, src);
Some(RecordLitField { name, expr })
})
@ -644,7 +644,7 @@ impl ExprCollector<'_> {
Some(id) => {
// Make the macro-call point to its expanded expression so we can query
// semantics on syntax pointers to the macro
let src = self.expander.to_source(syntax_ptr);
let src = self.expander.in_file(syntax_ptr);
self.source_map.expr_map.insert(src, id);
id
}
@ -957,9 +957,9 @@ impl ExprCollector<'_> {
T: ast::AstNode,
{
// File containing the macro call. Expansion errors will be attached here.
let outer_file = self.expander.current_file_id;
let outer_file = self.expander.current_file_id();
let macro_call_ptr = self.expander.to_source(syntax_ptr);
let macro_call_ptr = self.expander.in_file(syntax_ptr);
let module = self.expander.module.local_id;
let res = match self.def_map.modules[module]
@ -1021,10 +1021,10 @@ impl ExprCollector<'_> {
Some((mark, expansion)) => {
// Keep collecting even with expansion errors so we can provide completions and
// other services in incomplete macro expressions.
self.source_map.expansions.insert(macro_call_ptr, self.expander.current_file_id);
self.source_map.expansions.insert(macro_call_ptr, self.expander.current_file_id());
let prev_ast_id_map = mem::replace(
&mut self.ast_id_map,
self.db.ast_id_map(self.expander.current_file_id),
self.db.ast_id_map(self.expander.current_file_id()),
);
if record_diagnostics {
@ -1074,7 +1074,7 @@ impl ExprCollector<'_> {
Some(tail) => {
// Make the macro-call point to its expanded expression so we can query
// semantics on syntax pointers to the macro
let src = self.expander.to_source(syntax_ptr);
let src = self.expander.in_file(syntax_ptr);
self.source_map.expr_map.insert(src, tail);
Some(tail)
}
@ -1148,7 +1148,7 @@ impl ExprCollector<'_> {
let block_id = if block_has_items {
let file_local_id = self.ast_id_map.ast_id(&block);
let ast_id = AstId::new(self.expander.current_file_id, file_local_id);
let ast_id = self.expander.in_file(file_local_id);
Some(self.db.intern_block(BlockLoc { ast_id, module: self.expander.module }))
} else {
None
@ -1341,7 +1341,7 @@ impl ExprCollector<'_> {
let ast_pat = f.pat()?;
let pat = self.collect_pat(ast_pat, binding_list);
let name = f.field_name()?.as_name();
let src = self.expander.to_source(AstPtr::new(&f));
let src = self.expander.in_file(AstPtr::new(&f));
self.source_map.pat_field_map_back.insert(pat, src);
Some(RecordFieldPat { name, pat })
})
@ -1399,7 +1399,7 @@ impl ExprCollector<'_> {
ast::Pat::MacroPat(mac) => match mac.macro_call() {
Some(call) => {
let macro_ptr = AstPtr::new(&call);
let src = self.expander.to_source(AstPtr::new(&Either::Left(pat)));
let src = self.expander.in_file(AstPtr::new(&Either::Left(pat)));
let pat =
self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
this.collect_pat_opt(expanded_pat, binding_list)
@ -1480,10 +1480,7 @@ impl ExprCollector<'_> {
}
self.source_map.diagnostics.push(BodyDiagnostic::InactiveCode {
node: InFile::new(
self.expander.current_file_id,
SyntaxNodePtr::new(owner.syntax()),
),
node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())),
cfg,
opts: self.expander.cfg_options().clone(),
});
@ -1522,10 +1519,7 @@ impl ExprCollector<'_> {
} else {
Err(BodyDiagnostic::UnreachableLabel {
name,
node: InFile::new(
self.expander.current_file_id,
AstPtr::new(&lifetime),
),
node: self.expander.in_file(AstPtr::new(&lifetime)),
})
};
}
@ -1534,7 +1528,7 @@ impl ExprCollector<'_> {
Err(BodyDiagnostic::UndeclaredLabel {
name,
node: InFile::new(self.expander.current_file_id, AstPtr::new(&lifetime)),
node: self.expander.in_file(AstPtr::new(&lifetime)),
})
}
@ -1998,7 +1992,7 @@ fn pat_literal_to_hir(lit: &ast::LiteralPat) -> Option<(Literal, ast::Literal)>
impl ExprCollector<'_> {
fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
let src = self.expander.to_source(ptr);
let src = self.expander.in_file(ptr);
let id = self.body.exprs.alloc(expr);
self.source_map.expr_map_back.insert(id, src.clone());
self.source_map.expr_map.insert(src, id);
@ -2026,7 +2020,7 @@ impl ExprCollector<'_> {
}
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
let src = self.expander.to_source(ptr);
let src = self.expander.in_file(ptr);
let id = self.body.pats.alloc(pat);
self.source_map.pat_map_back.insert(id, src.clone());
self.source_map.pat_map.insert(src, id);
@ -2041,7 +2035,7 @@ impl ExprCollector<'_> {
}
fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId {
let src = self.expander.to_source(ptr);
let src = self.expander.in_file(ptr);
let id = self.body.labels.alloc(label);
self.source_map.label_map_back.insert(id, src.clone());
self.source_map.label_map.insert(src, id);

View file

@ -242,12 +242,6 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
#[salsa::invoke(LangItems::crate_lang_items_query)]
fn crate_lang_items(&self, krate: CrateId) -> Arc<LangItems>;
#[salsa::transparent]
fn crate_limits(&self, crate_id: CrateId) -> CrateLimits;
#[salsa::transparent]
fn recursion_limit(&self, crate_id: CrateId) -> u32;
fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
}
@ -256,24 +250,6 @@ fn crate_def_map_wait(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
db.crate_def_map_query(krate)
}
pub struct CrateLimits {
/// The maximum depth for potentially infinitely-recursive compile-time operations like macro expansion or auto-dereference.
pub recursion_limit: u32,
}
fn crate_limits(db: &dyn DefDatabase, crate_id: CrateId) -> CrateLimits {
let def_map = db.crate_def_map(crate_id);
CrateLimits {
// 128 is the default in rustc.
recursion_limit: def_map.recursion_limit().unwrap_or(128),
}
}
fn recursion_limit(db: &dyn DefDatabase, crate_id: CrateId) -> u32 {
db.crate_limits(crate_id).recursion_limit
}
fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool {
let file = db.crate_graph()[crate_id].root_file_id;
let item_tree = db.file_item_tree(file.into());

View file

@ -8,7 +8,7 @@ use hir_expand::{
InFile, MacroCallId,
};
use limit::Limit;
use syntax::{ast, Parse, SyntaxNode};
use syntax::{ast, Parse};
use crate::{
attr::Attrs, db::DefDatabase, lower::LowerCtx, path::Path, AsMacroCall, MacroId, ModuleId,
@ -20,7 +20,7 @@ pub struct Expander {
cfg_options: CfgOptions,
span_map: SpanMap,
krate: CrateId,
pub(crate) current_file_id: HirFileId,
current_file_id: HirFileId,
pub(crate) module: ModuleId,
/// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
recursion_depth: u32,
@ -29,12 +29,13 @@ pub struct Expander {
impl Expander {
pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander {
let recursion_limit = db.recursion_limit(module.krate);
#[cfg(not(test))]
let recursion_limit = Limit::new(recursion_limit as usize);
// Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
#[cfg(test)]
let recursion_limit = Limit::new(std::cmp::min(32, recursion_limit as usize));
let recursion_limit = module.def_map(db).recursion_limit() as usize;
let recursion_limit = Limit::new(if cfg!(test) {
// Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
std::cmp::min(32, recursion_limit)
} else {
recursion_limit
});
Expander {
current_file_id,
module,
@ -56,7 +57,7 @@ impl Expander {
let mut unresolved_macro_err = None;
let result = self.within_limit(db, |this| {
let macro_call = InFile::new(this.current_file_id, &macro_call);
let macro_call = this.in_file(&macro_call);
match macro_call.as_call_id_with_errors(db.upcast(), this.module.krate(), |path| {
resolver(path).map(|it| db.macro_def(it))
}) {
@ -83,17 +84,6 @@ impl Expander {
self.within_limit(db, |_this| ExpandResult::ok(Some(call_id)))
}
fn enter_expand_inner(
db: &dyn DefDatabase,
call_id: MacroCallId,
error: Option<ExpandError>,
) -> ExpandResult<Option<InFile<Parse<SyntaxNode>>>> {
let macro_file = call_id.as_macro_file();
let ExpandResult { value, err } = db.parse_macro_expansion(macro_file);
ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) }
}
pub fn exit(&mut self, mut mark: Mark) {
self.span_map = mark.span_map;
self.current_file_id = mark.file_id;
@ -113,7 +103,7 @@ impl Expander {
LowerCtx::new(db, self.span_map.clone(), self.current_file_id)
}
pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
pub(crate) fn in_file<T>(&self, value: T) -> InFile<T> {
InFile { file_id: self.current_file_id, value }
}
@ -164,26 +154,34 @@ impl Expander {
return ExpandResult { value: None, err };
};
let res = Self::enter_expand_inner(db, call_id, err);
match res.err {
// If proc-macro is disabled or unresolved, we want to expand to a missing expression
// instead of an empty tree which might end up in an empty block.
Some(ExpandError::UnresolvedProcMacro(_)) => res.map(|_| None),
_ => res.map(|value| {
value.and_then(|InFile { file_id, value }| {
let parse = value.cast::<T>()?;
let macro_file = call_id.as_macro_file();
let res = db.parse_macro_expansion(macro_file);
let err = err.or(res.err);
ExpandResult {
value: match err {
// If proc-macro is disabled or unresolved, we want to expand to a missing expression
// instead of an empty tree which might end up in an empty block.
Some(ExpandError::UnresolvedProcMacro(_)) => None,
_ => (|| {
let parse = res.value.0.cast::<T>()?;
self.recursion_depth += 1;
let old_span_map = std::mem::replace(&mut self.span_map, db.span_map(file_id));
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
let old_span_map = std::mem::replace(
&mut self.span_map,
SpanMap::ExpansionSpanMap(res.value.1),
);
let old_file_id =
std::mem::replace(&mut self.current_file_id, macro_file.into());
let mark = Mark {
file_id: old_file_id,
span_map: old_span_map,
bomb: DropBomb::new("expansion mark dropped"),
};
Some((mark, parse))
})
}),
})(),
},
err,
}
}
}

View file

@ -626,8 +626,9 @@ impl DefMap {
self.diagnostics.as_slice()
}
pub fn recursion_limit(&self) -> Option<u32> {
self.data.recursion_limit
pub fn recursion_limit(&self) -> u32 {
// 128 is the default in rustc
self.data.recursion_limit.unwrap_or(128)
}
}

View file

@ -2,7 +2,10 @@
use std::{fmt, hash::BuildHasherDefault};
use base_db::CrateId;
use hir_expand::name::{name, Name};
use hir_expand::{
name::{name, Name},
MacroDefId,
};
use indexmap::IndexMap;
use intern::Interned;
use rustc_hash::FxHashSet;
@ -406,6 +409,15 @@ impl Resolver {
.take_macros_import()
}
pub fn resolve_path_as_macro_def(
&self,
db: &dyn DefDatabase,
path: &ModPath,
expected_macro_kind: Option<MacroSubNs>,
) -> Option<MacroDefId> {
self.resolve_path_as_macro(db, path, expected_macro_kind).map(|(it, _)| db.macro_def(it))
}
/// Returns a set of names available in the current scope.
///
/// Note that this is a somewhat fuzzy concept -- internally, the compiler

View file

@ -340,9 +340,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_call = InFile::new(file_id, actual_macro_call);
let krate = resolver.krate();
let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
resolver
.resolve_path_as_macro(self.db.upcast(), &path, Some(MacroSubNs::Bang))
.map(|(it, _)| self.db.macro_def(it))
resolver.resolve_path_as_macro_def(self.db.upcast(), &path, Some(MacroSubNs::Bang))
})?;
hir_expand::db::expand_speculative(
self.db.upcast(),

View file

@ -770,9 +770,7 @@ impl SourceAnalyzer {
) -> Option<MacroFileId> {
let krate = self.resolver.krate();
let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
self.resolver
.resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang))
.map(|(it, _)| db.macro_def(it))
self.resolver.resolve_path_as_macro_def(db.upcast(), &path, Some(MacroSubNs::Bang))
})?;
// why the 64?
Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64)