mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 12:33:33 +00:00
Auto merge of #17707 - Veykril:proc-macro-err-cleanup, r=Veykril
feat: Use spans for builtin and declarative macro expansion errors This should generally improve some error reporting for macro expansion errors. Especially for `compile_error!` within proc-macros
This commit is contained in:
commit
a021b85be5
51 changed files with 776 additions and 831 deletions
|
@ -16,9 +16,7 @@ use span::{Edition, EditionedFileId};
|
|||
use triomphe::Arc;
|
||||
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
|
||||
|
||||
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
|
||||
// then the crate for the proc-macro hasn't been build yet as the build data is missing.
|
||||
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
|
||||
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(String, AbsPathBuf), String>>;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub struct SourceRootId(pub u32);
|
||||
|
|
|
@ -10,7 +10,7 @@ use std::ops::{Deref, Index};
|
|||
|
||||
use base_db::CrateId;
|
||||
use cfg::{CfgExpr, CfgOptions};
|
||||
use hir_expand::{name::Name, InFile};
|
||||
use hir_expand::{name::Name, ExpandError, InFile};
|
||||
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
|
||||
use rustc_hash::FxHashMap;
|
||||
use smallvec::SmallVec;
|
||||
|
@ -115,8 +115,7 @@ pub struct SyntheticSyntax;
|
|||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub enum BodyDiagnostic {
|
||||
InactiveCode { node: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions },
|
||||
MacroError { node: InFile<AstPtr<ast::MacroCall>>, message: String },
|
||||
UnresolvedProcMacro { node: InFile<AstPtr<ast::MacroCall>>, krate: CrateId },
|
||||
MacroError { node: InFile<AstPtr<ast::MacroCall>>, err: ExpandError },
|
||||
UnresolvedMacroCall { node: InFile<AstPtr<ast::MacroCall>>, path: ModPath },
|
||||
UnreachableLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name },
|
||||
UndeclaredLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name },
|
||||
|
|
|
@ -7,7 +7,7 @@ use base_db::CrateId;
|
|||
use either::Either;
|
||||
use hir_expand::{
|
||||
name::{AsName, Name},
|
||||
ExpandError, InFile,
|
||||
InFile,
|
||||
};
|
||||
use intern::{sym, Interned, Symbol};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
@ -992,20 +992,11 @@ impl ExprCollector<'_> {
|
|||
}
|
||||
};
|
||||
if record_diagnostics {
|
||||
match &res.err {
|
||||
Some(ExpandError::UnresolvedProcMacro(krate)) => {
|
||||
self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedProcMacro {
|
||||
node: InFile::new(outer_file, syntax_ptr),
|
||||
krate: *krate,
|
||||
});
|
||||
}
|
||||
Some(err) => {
|
||||
self.source_map.diagnostics.push(BodyDiagnostic::MacroError {
|
||||
node: InFile::new(outer_file, syntax_ptr),
|
||||
message: err.to_string(),
|
||||
});
|
||||
}
|
||||
None => {}
|
||||
if let Some(err) = res.err {
|
||||
self.source_map.diagnostics.push(BodyDiagnostic::MacroError {
|
||||
node: InFile::new(outer_file, syntax_ptr),
|
||||
err,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -657,22 +657,18 @@ impl<'a> AssocItemCollector<'a> {
|
|||
// crate failed), skip expansion like we would if it was
|
||||
// disabled. This is analogous to the handling in
|
||||
// `DefCollector::collect_macros`.
|
||||
if exp.is_dummy() {
|
||||
self.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
|
||||
if let Some(err) = exp.as_expand_error(self.module_id.krate) {
|
||||
self.diagnostics.push(DefDiagnostic::macro_error(
|
||||
self.module_id.local_id,
|
||||
loc.kind,
|
||||
loc.def.krate,
|
||||
ast_id,
|
||||
(*attr.path).clone(),
|
||||
err,
|
||||
));
|
||||
|
||||
continue 'attrs;
|
||||
}
|
||||
if exp.is_disabled() {
|
||||
continue 'attrs;
|
||||
}
|
||||
}
|
||||
|
||||
self.macro_calls.push((ast_id, call_id));
|
||||
|
||||
let res =
|
||||
self.expander.enter_expand_id::<ast::MacroItems>(self.db, call_id);
|
||||
self.collect_macro_items(res);
|
||||
|
|
|
@ -6,8 +6,8 @@ use base_db::CrateId;
|
|||
use cfg::CfgOptions;
|
||||
use drop_bomb::DropBomb;
|
||||
use hir_expand::{
|
||||
attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandResult, HirFileId,
|
||||
InFile, MacroCallId,
|
||||
attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandErrorKind,
|
||||
ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
|
||||
};
|
||||
use limit::Limit;
|
||||
use span::SyntaxContextId;
|
||||
|
@ -160,26 +160,30 @@ impl Expander {
|
|||
// so don't return overflow error here to avoid diagnostics duplication.
|
||||
cov_mark::hit!(overflow_but_not_me);
|
||||
return ExpandResult::ok(None);
|
||||
} else if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() {
|
||||
self.recursion_depth = u32::MAX;
|
||||
cov_mark::hit!(your_stack_belongs_to_me);
|
||||
return ExpandResult::only_err(ExpandError::RecursionOverflow);
|
||||
}
|
||||
|
||||
let ExpandResult { value, err } = op(self);
|
||||
let Some(call_id) = value else {
|
||||
return ExpandResult { value: None, err };
|
||||
};
|
||||
if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() {
|
||||
self.recursion_depth = u32::MAX;
|
||||
cov_mark::hit!(your_stack_belongs_to_me);
|
||||
return ExpandResult::only_err(ExpandError::new(
|
||||
db.macro_arg_considering_derives(call_id, &call_id.lookup(db.upcast()).kind).2,
|
||||
ExpandErrorKind::RecursionOverflow,
|
||||
));
|
||||
}
|
||||
|
||||
let macro_file = call_id.as_macro_file();
|
||||
let res = db.parse_macro_expansion(macro_file);
|
||||
|
||||
let err = err.or(res.err);
|
||||
ExpandResult {
|
||||
value: match err {
|
||||
value: match &err {
|
||||
// If proc-macro is disabled or unresolved, we want to expand to a missing expression
|
||||
// instead of an empty tree which might end up in an empty block.
|
||||
Some(ExpandError::UnresolvedProcMacro(_)) => None,
|
||||
Some(e) if matches!(e.kind(), ExpandErrorKind::MissingProcMacroExpander(_)) => None,
|
||||
_ => (|| {
|
||||
let parse = res.value.0.cast::<T>()?;
|
||||
|
||||
|
|
|
@ -75,9 +75,7 @@ use base_db::{
|
|||
CrateId,
|
||||
};
|
||||
use hir_expand::{
|
||||
builtin_attr_macro::BuiltinAttrExpander,
|
||||
builtin_derive_macro::BuiltinDeriveExpander,
|
||||
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
|
||||
builtin::{BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerExpander},
|
||||
db::ExpandDatabase,
|
||||
eager::expand_eager_macro_input,
|
||||
impl_intern_lookup,
|
||||
|
@ -1436,7 +1434,10 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
|
|||
});
|
||||
|
||||
let Some((call_site, path)) = path else {
|
||||
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
|
||||
return Ok(ExpandResult::only_err(ExpandError::other(
|
||||
span_map.span_for_range(self.value.syntax().text_range()),
|
||||
"malformed macro invocation",
|
||||
)));
|
||||
};
|
||||
|
||||
macro_call_as_call_id_with_eager(
|
||||
|
|
|
@ -1084,7 +1084,7 @@ fn main() {
|
|||
macro_rules! concat_bytes {}
|
||||
|
||||
fn main() {
|
||||
let x = /* error: unexpected token in input */b"";
|
||||
let x = /* error: unexpected token */b"";
|
||||
}
|
||||
|
||||
"#]],
|
||||
|
|
|
@ -122,7 +122,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
|||
|
||||
let mut expn_text = String::new();
|
||||
if let Some(err) = exp.err {
|
||||
format_to!(expn_text, "/* error: {} */", err);
|
||||
format_to!(expn_text, "/* error: {} */", err.render_to_string(&db).0);
|
||||
}
|
||||
let (parse, token_map) = exp.value;
|
||||
if expect_errors {
|
||||
|
|
|
@ -145,8 +145,6 @@ struct DefMapCrateData {
|
|||
/// Side table for resolving derive helpers.
|
||||
exported_derives: FxHashMap<MacroDefId, Box<[Name]>>,
|
||||
fn_proc_macro_mapping: FxHashMap<FunctionId, ProcMacroId>,
|
||||
/// The error that occurred when failing to load the proc-macro dll.
|
||||
proc_macro_loading_error: Option<Box<str>>,
|
||||
|
||||
/// Custom attributes registered with `#![register_attr]`.
|
||||
registered_attrs: Vec<Symbol>,
|
||||
|
@ -169,7 +167,6 @@ impl DefMapCrateData {
|
|||
extern_prelude: FxIndexMap::default(),
|
||||
exported_derives: FxHashMap::default(),
|
||||
fn_proc_macro_mapping: FxHashMap::default(),
|
||||
proc_macro_loading_error: None,
|
||||
registered_attrs: Vec::new(),
|
||||
registered_tools: PREDEFINED_TOOLS.iter().map(|it| Symbol::intern(it)).collect(),
|
||||
unstable_features: FxHashSet::default(),
|
||||
|
@ -189,7 +186,6 @@ impl DefMapCrateData {
|
|||
registered_attrs,
|
||||
registered_tools,
|
||||
unstable_features,
|
||||
proc_macro_loading_error: _,
|
||||
rustc_coherence_is_core: _,
|
||||
no_core: _,
|
||||
no_std: _,
|
||||
|
@ -474,10 +470,6 @@ impl DefMap {
|
|||
self.data.fn_proc_macro_mapping.get(&id).copied()
|
||||
}
|
||||
|
||||
pub fn proc_macro_loading_error(&self) -> Option<&str> {
|
||||
self.data.proc_macro_loading_error.as_deref()
|
||||
}
|
||||
|
||||
pub fn krate(&self) -> CrateId {
|
||||
self.krate
|
||||
}
|
||||
|
|
|
@ -10,9 +10,7 @@ use cfg::{CfgExpr, CfgOptions};
|
|||
use either::Either;
|
||||
use hir_expand::{
|
||||
attrs::{Attr, AttrId},
|
||||
builtin_attr_macro::find_builtin_attr,
|
||||
builtin_derive_macro::find_builtin_derive,
|
||||
builtin_fn_macro::find_builtin_macro,
|
||||
builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro},
|
||||
name::{AsName, Name},
|
||||
proc_macro::CustomProcMacroExpander,
|
||||
ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind,
|
||||
|
@ -76,34 +74,11 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
|
|||
}
|
||||
|
||||
let proc_macros = if krate.is_proc_macro {
|
||||
match db.proc_macros().get(&def_map.krate) {
|
||||
Some(Ok(proc_macros)) => Ok({
|
||||
let ctx = db.syntax_context(tree_id.file_id());
|
||||
proc_macros
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, it)| {
|
||||
let name = Name::new_symbol(it.name.clone(), ctx);
|
||||
(
|
||||
name,
|
||||
if !db.expand_proc_attr_macros() {
|
||||
CustomProcMacroExpander::dummy()
|
||||
} else if it.disabled {
|
||||
CustomProcMacroExpander::disabled()
|
||||
} else {
|
||||
CustomProcMacroExpander::new(
|
||||
hir_expand::proc_macro::ProcMacroId::new(idx as u32),
|
||||
)
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}),
|
||||
Some(Err(e)) => Err(e.clone().into_boxed_str()),
|
||||
None => Err("No proc-macros present for crate".to_owned().into_boxed_str()),
|
||||
}
|
||||
db.proc_macros()
|
||||
.for_crate(def_map.krate, db.syntax_context(tree_id.file_id()))
|
||||
.unwrap_or_default()
|
||||
} else {
|
||||
Ok(vec![])
|
||||
Default::default()
|
||||
};
|
||||
|
||||
let mut collector = DefCollector {
|
||||
|
@ -252,10 +227,10 @@ struct DefCollector<'a> {
|
|||
mod_dirs: FxHashMap<LocalModuleId, ModDir>,
|
||||
cfg_options: &'a CfgOptions,
|
||||
/// List of procedural macros defined by this crate. This is read from the dynamic library
|
||||
/// built by the build system, and is the list of proc. macros we can actually expand. It is
|
||||
/// empty when proc. macro support is disabled (in which case we still do name resolution for
|
||||
/// them).
|
||||
proc_macros: Result<Vec<(Name, CustomProcMacroExpander)>, Box<str>>,
|
||||
/// built by the build system, and is the list of proc-macros we can actually expand. It is
|
||||
/// empty when proc-macro support is disabled (in which case we still do name resolution for
|
||||
/// them). The bool signals whether the proc-macro has been explicitly disabled for name-resolution.
|
||||
proc_macros: Box<[(Name, CustomProcMacroExpander, bool)]>,
|
||||
is_proc_macro: bool,
|
||||
from_glob_import: PerNsGlobImports,
|
||||
/// If we fail to resolve an attribute on a `ModItem`, we fall back to ignoring the attribute.
|
||||
|
@ -278,10 +253,6 @@ impl DefCollector<'_> {
|
|||
let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
|
||||
let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
|
||||
|
||||
if let Err(e) = &self.proc_macros {
|
||||
crate_data.proc_macro_loading_error = Some(e.clone());
|
||||
}
|
||||
|
||||
let mut process = true;
|
||||
|
||||
// Process other crate-level attributes.
|
||||
|
@ -608,11 +579,17 @@ impl DefCollector<'_> {
|
|||
fn_id: FunctionId,
|
||||
) {
|
||||
let kind = def.kind.to_basedb_kind();
|
||||
let (expander, kind) =
|
||||
match self.proc_macros.as_ref().map(|it| it.iter().find(|(n, _)| n == &def.name)) {
|
||||
Ok(Some(&(_, expander))) => (expander, kind),
|
||||
_ => (CustomProcMacroExpander::dummy(), kind),
|
||||
};
|
||||
let (expander, kind) = match self.proc_macros.iter().find(|(n, _, _)| n == &def.name) {
|
||||
Some(_)
|
||||
if kind == hir_expand::proc_macro::ProcMacroKind::Attr
|
||||
&& !self.db.expand_proc_attr_macros() =>
|
||||
{
|
||||
(CustomProcMacroExpander::disabled_proc_attr(), kind)
|
||||
}
|
||||
Some(&(_, _, true)) => (CustomProcMacroExpander::disabled(), kind),
|
||||
Some(&(_, expander, false)) => (expander, kind),
|
||||
None => (CustomProcMacroExpander::missing_expander(), kind),
|
||||
};
|
||||
|
||||
let proc_macro_id = ProcMacroLoc {
|
||||
container: self.def_map.crate_root(),
|
||||
|
@ -1415,25 +1392,23 @@ impl DefCollector<'_> {
|
|||
return recollect_without(self);
|
||||
}
|
||||
|
||||
let call_id = call_id();
|
||||
if let MacroDefKind::ProcMacro(_, exp, _) = def.kind {
|
||||
// If there's no expander for the proc macro (e.g.
|
||||
// because proc macros are disabled, or building the
|
||||
// proc macro crate failed), report this and skip
|
||||
// expansion like we would if it was disabled
|
||||
if exp.is_dummy() {
|
||||
self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
|
||||
if let Some(err) = exp.as_expand_error(def.krate) {
|
||||
self.def_map.diagnostics.push(DefDiagnostic::macro_error(
|
||||
directive.module_id,
|
||||
self.db.lookup_intern_macro_call(call_id).kind,
|
||||
def.krate,
|
||||
ast_id,
|
||||
(**path).clone(),
|
||||
err,
|
||||
));
|
||||
return recollect_without(self);
|
||||
}
|
||||
if exp.is_disabled() {
|
||||
return recollect_without(self);
|
||||
}
|
||||
}
|
||||
|
||||
let call_id = call_id();
|
||||
self.def_map.modules[directive.module_id]
|
||||
.scope
|
||||
.add_attr_macro_invoc(ast_id, call_id);
|
||||
|
@ -1472,7 +1447,6 @@ impl DefCollector<'_> {
|
|||
}
|
||||
let file_id = macro_call_id.as_file();
|
||||
|
||||
// Then, fetch and process the item tree. This will reuse the expansion result from above.
|
||||
let item_tree = self.db.file_item_tree(file_id);
|
||||
|
||||
let mod_dir = if macro_call_id.as_macro_file().is_include_macro(self.db.upcast()) {
|
||||
|
@ -2510,7 +2484,7 @@ mod tests {
|
|||
unresolved_macros: Vec::new(),
|
||||
mod_dirs: FxHashMap::default(),
|
||||
cfg_options: &CfgOptions::default(),
|
||||
proc_macros: Ok(vec![]),
|
||||
proc_macros: Default::default(),
|
||||
from_glob_import: Default::default(),
|
||||
skip_attrs: Default::default(),
|
||||
is_proc_macro: false,
|
||||
|
|
|
@ -2,9 +2,8 @@
|
|||
|
||||
use std::ops::Not;
|
||||
|
||||
use base_db::CrateId;
|
||||
use cfg::{CfgExpr, CfgOptions};
|
||||
use hir_expand::{attrs::AttrId, MacroCallKind};
|
||||
use hir_expand::{attrs::AttrId, ExpandErrorKind, MacroCallKind};
|
||||
use la_arena::Idx;
|
||||
use syntax::ast;
|
||||
|
||||
|
@ -17,48 +16,16 @@ use crate::{
|
|||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum DefDiagnosticKind {
|
||||
UnresolvedModule {
|
||||
ast: AstId<ast::Module>,
|
||||
candidates: Box<[String]>,
|
||||
},
|
||||
UnresolvedExternCrate {
|
||||
ast: AstId<ast::ExternCrate>,
|
||||
},
|
||||
UnresolvedImport {
|
||||
id: ItemTreeId<item_tree::Use>,
|
||||
index: Idx<ast::UseTree>,
|
||||
},
|
||||
UnconfiguredCode {
|
||||
tree: TreeId,
|
||||
item: AttrOwner,
|
||||
cfg: CfgExpr,
|
||||
opts: CfgOptions,
|
||||
},
|
||||
/// A proc-macro that is lacking an expander, this might be due to build scripts not yet having
|
||||
/// run or proc-macro expansion being disabled.
|
||||
UnresolvedProcMacro {
|
||||
ast: MacroCallKind,
|
||||
krate: CrateId,
|
||||
},
|
||||
UnresolvedMacroCall {
|
||||
ast: MacroCallKind,
|
||||
path: ModPath,
|
||||
},
|
||||
UnimplementedBuiltinMacro {
|
||||
ast: AstId<ast::Macro>,
|
||||
},
|
||||
InvalidDeriveTarget {
|
||||
ast: AstId<ast::Item>,
|
||||
id: usize,
|
||||
},
|
||||
MalformedDerive {
|
||||
ast: AstId<ast::Adt>,
|
||||
id: usize,
|
||||
},
|
||||
MacroDefError {
|
||||
ast: AstId<ast::Macro>,
|
||||
message: String,
|
||||
},
|
||||
UnresolvedModule { ast: AstId<ast::Module>, candidates: Box<[String]> },
|
||||
UnresolvedExternCrate { ast: AstId<ast::ExternCrate> },
|
||||
UnresolvedImport { id: ItemTreeId<item_tree::Use>, index: Idx<ast::UseTree> },
|
||||
UnconfiguredCode { tree: TreeId, item: AttrOwner, cfg: CfgExpr, opts: CfgOptions },
|
||||
UnresolvedMacroCall { ast: MacroCallKind, path: ModPath },
|
||||
UnimplementedBuiltinMacro { ast: AstId<ast::Macro> },
|
||||
InvalidDeriveTarget { ast: AstId<ast::Item>, id: usize },
|
||||
MalformedDerive { ast: AstId<ast::Adt>, id: usize },
|
||||
MacroDefError { ast: AstId<ast::Macro>, message: String },
|
||||
MacroError { ast: AstId<ast::Item>, path: ModPath, err: ExpandErrorKind },
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
|
@ -115,6 +82,15 @@ impl DefDiagnostic {
|
|||
Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } }
|
||||
}
|
||||
|
||||
pub fn macro_error(
|
||||
container: LocalModuleId,
|
||||
ast: AstId<ast::Item>,
|
||||
path: ModPath,
|
||||
err: ExpandErrorKind,
|
||||
) -> Self {
|
||||
Self { in_module: container, kind: DefDiagnosticKind::MacroError { ast, path, err } }
|
||||
}
|
||||
|
||||
pub fn unconfigured_code(
|
||||
container: LocalModuleId,
|
||||
tree: TreeId,
|
||||
|
@ -128,14 +104,6 @@ impl DefDiagnostic {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn unresolved_proc_macro(
|
||||
container: LocalModuleId,
|
||||
ast: MacroCallKind,
|
||||
krate: CrateId,
|
||||
) -> Self {
|
||||
Self { in_module: container, kind: DefDiagnosticKind::UnresolvedProcMacro { ast, krate } }
|
||||
}
|
||||
|
||||
// FIXME: Whats the difference between this and unresolved_proc_macro
|
||||
pub(crate) fn unresolved_macro_call(
|
||||
container: LocalModuleId,
|
||||
|
|
15
crates/hir-expand/src/builtin.rs
Normal file
15
crates/hir-expand/src/builtin.rs
Normal file
|
@ -0,0 +1,15 @@
|
|||
//! Builtin macros and attributes
|
||||
#[macro_use]
|
||||
mod quote;
|
||||
|
||||
mod attr_macro;
|
||||
mod derive_macro;
|
||||
mod fn_macro;
|
||||
|
||||
pub use self::{
|
||||
attr_macro::{find_builtin_attr, pseudo_derive_attr_expansion, BuiltinAttrExpander},
|
||||
derive_macro::{find_builtin_derive, BuiltinDeriveExpander},
|
||||
fn_macro::{
|
||||
find_builtin_macro, include_input_to_file_id, BuiltinFnLikeExpander, EagerExpander,
|
||||
},
|
||||
};
|
|
@ -9,18 +9,17 @@ use stdx::never;
|
|||
use tracing::debug;
|
||||
|
||||
use crate::{
|
||||
builtin::quote::{dollar_crate, quote},
|
||||
db::ExpandDatabase,
|
||||
hygiene::span_with_def_site_ctxt,
|
||||
name::{AsName, Name},
|
||||
quote::dollar_crate,
|
||||
name::{self, AsName, Name},
|
||||
span_map::ExpansionSpanMap,
|
||||
tt,
|
||||
tt, ExpandError, ExpandResult,
|
||||
};
|
||||
use syntax::ast::{
|
||||
self, AstNode, FieldList, HasAttrs, HasGenericParams, HasModuleItem, HasName, HasTypeBounds,
|
||||
};
|
||||
|
||||
use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult};
|
||||
|
||||
macro_rules! register_builtin {
|
||||
( $($trait:ident => $expand:ident),* ) => {
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
|
@ -129,13 +128,17 @@ impl VariantShape {
|
|||
}
|
||||
}
|
||||
|
||||
fn from(tm: &ExpansionSpanMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
|
||||
fn from(
|
||||
call_site: Span,
|
||||
tm: &ExpansionSpanMap,
|
||||
value: Option<FieldList>,
|
||||
) -> Result<Self, ExpandError> {
|
||||
let r = match value {
|
||||
None => VariantShape::Unit,
|
||||
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
|
||||
it.fields()
|
||||
.map(|it| it.name())
|
||||
.map(|it| name_to_token(tm, it))
|
||||
.map(|it| name_to_token(call_site, tm, it))
|
||||
.collect::<Result<_, _>>()?,
|
||||
),
|
||||
Some(FieldList::TupleFieldList(it)) => VariantShape::Tuple(it.fields().count()),
|
||||
|
@ -212,16 +215,17 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
|||
parser::Edition::CURRENT_FIXME,
|
||||
);
|
||||
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
|
||||
.ok_or_else(|| ExpandError::other("invalid item definition"))?;
|
||||
let item = macro_items.items().next().ok_or_else(|| ExpandError::other("no item found"))?;
|
||||
.ok_or_else(|| ExpandError::other(call_site, "invalid item definition"))?;
|
||||
let item =
|
||||
macro_items.items().next().ok_or_else(|| ExpandError::other(call_site, "no item found"))?;
|
||||
let adt = &ast::Adt::cast(item.syntax().clone())
|
||||
.ok_or_else(|| ExpandError::other("expected struct, enum or union"))?;
|
||||
.ok_or_else(|| ExpandError::other(call_site, "expected struct, enum or union"))?;
|
||||
let (name, generic_param_list, where_clause, shape) = match adt {
|
||||
ast::Adt::Struct(it) => (
|
||||
it.name(),
|
||||
it.generic_param_list(),
|
||||
it.where_clause(),
|
||||
AdtShape::Struct(VariantShape::from(tm, it.field_list())?),
|
||||
AdtShape::Struct(VariantShape::from(call_site, tm, it.field_list())?),
|
||||
),
|
||||
ast::Adt::Enum(it) => {
|
||||
let default_variant = it
|
||||
|
@ -241,8 +245,8 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
|||
.flat_map(|it| it.variants())
|
||||
.map(|it| {
|
||||
Ok((
|
||||
name_to_token(tm, it.name())?,
|
||||
VariantShape::from(tm, it.field_list())?,
|
||||
name_to_token(call_site, tm, it.name())?,
|
||||
VariantShape::from(call_site, tm, it.field_list())?,
|
||||
))
|
||||
})
|
||||
.collect::<Result<_, ExpandError>>()?,
|
||||
|
@ -357,17 +361,18 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
|||
)
|
||||
})
|
||||
.collect();
|
||||
let name_token = name_to_token(tm, name)?;
|
||||
let name_token = name_to_token(call_site, tm, name)?;
|
||||
Ok(BasicAdtInfo { name: name_token, shape, param_types, where_clause, associated_types })
|
||||
}
|
||||
|
||||
fn name_to_token(
|
||||
call_site: Span,
|
||||
token_map: &ExpansionSpanMap,
|
||||
name: Option<ast::Name>,
|
||||
) -> Result<tt::Ident, ExpandError> {
|
||||
let name = name.ok_or_else(|| {
|
||||
debug!("parsed item has no name");
|
||||
ExpandError::other("missing name")
|
||||
ExpandError::other(call_site, "missing name")
|
||||
})?;
|
||||
let span = token_map.span_at(name.syntax().text_range().start());
|
||||
|
|
@ -13,10 +13,10 @@ use syntax::{
|
|||
};
|
||||
|
||||
use crate::{
|
||||
builtin::quote::{dollar_crate, quote},
|
||||
db::ExpandDatabase,
|
||||
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt},
|
||||
name, quote,
|
||||
quote::dollar_crate,
|
||||
name,
|
||||
tt::{self, DelimSpan},
|
||||
ExpandError, ExpandResult, HirFileIdExt, Lookup as _, MacroCallId,
|
||||
};
|
||||
|
@ -145,7 +145,7 @@ register_builtin! {
|
|||
}
|
||||
|
||||
fn mk_pound(span: Span) -> tt::Subtree {
|
||||
crate::quote::IntoTt::to_subtree(
|
||||
crate::builtin::quote::IntoTt::to_subtree(
|
||||
vec![crate::tt::Leaf::Punct(crate::tt::Punct {
|
||||
char: '#',
|
||||
spacing: crate::tt::Spacing::Alone,
|
||||
|
@ -463,12 +463,8 @@ fn compile_error_expand(
|
|||
span: _,
|
||||
kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
|
||||
suffix: _,
|
||||
}))] =>
|
||||
// FIXME: Use the span here!
|
||||
{
|
||||
ExpandError::other(Box::from(unescape_str(text).as_str()))
|
||||
}
|
||||
_ => ExpandError::other("`compile_error!` argument must be a string"),
|
||||
}))] => ExpandError::other(span, Box::from(unescape_str(text).as_str())),
|
||||
_ => ExpandError::other(span, "`compile_error!` argument must be a string"),
|
||||
};
|
||||
|
||||
ExpandResult { value: quote! {span =>}, err: Some(err) }
|
||||
|
@ -478,7 +474,7 @@ fn concat_expand(
|
|||
_db: &dyn ExpandDatabase,
|
||||
_arg_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
_: Span,
|
||||
call_site: Span,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let mut err = None;
|
||||
let mut text = String::new();
|
||||
|
@ -527,7 +523,9 @@ fn concat_expand(
|
|||
| tt::LitKind::ByteStrRaw(_)
|
||||
| tt::LitKind::CStr
|
||||
| tt::LitKind::CStrRaw(_)
|
||||
| tt::LitKind::Err(_) => err = Some(ExpandError::other("unexpected literal")),
|
||||
| tt::LitKind::Err(_) => {
|
||||
err = Some(ExpandError::other(it.span, "unexpected literal"))
|
||||
}
|
||||
}
|
||||
}
|
||||
// handle boolean literals
|
||||
|
@ -539,7 +537,7 @@ fn concat_expand(
|
|||
}
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
||||
_ => {
|
||||
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
||||
err.get_or_insert(ExpandError::other(call_site, "unexpected token"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -551,7 +549,7 @@ fn concat_bytes_expand(
|
|||
_db: &dyn ExpandDatabase,
|
||||
_arg_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
_: Span,
|
||||
call_site: Span,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let mut bytes = String::new();
|
||||
let mut err = None;
|
||||
|
@ -585,20 +583,22 @@ fn concat_bytes_expand(
|
|||
bytes.extend(text.as_str().escape_debug());
|
||||
}
|
||||
_ => {
|
||||
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
||||
err.get_or_insert(ExpandError::other(*span, "unexpected token"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
||||
tt::TokenTree::Subtree(tree) if tree.delimiter.kind == tt::DelimiterKind::Bracket => {
|
||||
if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes, &mut record_span) {
|
||||
if let Err(e) =
|
||||
concat_bytes_expand_subtree(tree, &mut bytes, &mut record_span, call_site)
|
||||
{
|
||||
err.get_or_insert(e);
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
||||
err.get_or_insert(ExpandError::other(call_site, "unexpected token"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -623,6 +623,7 @@ fn concat_bytes_expand_subtree(
|
|||
tree: &tt::Subtree,
|
||||
bytes: &mut String,
|
||||
mut record_span: impl FnMut(Span),
|
||||
err_span: Span,
|
||||
) -> Result<(), ExpandError> {
|
||||
for (ti, tt) in tree.token_trees.iter().enumerate() {
|
||||
match tt {
|
||||
|
@ -650,7 +651,7 @@ fn concat_bytes_expand_subtree(
|
|||
}
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if ti % 2 == 1 && punct.char == ',' => (),
|
||||
_ => {
|
||||
return Err(mbe::ExpandError::UnexpectedToken.into());
|
||||
return Err(ExpandError::other(err_span, "unexpected token"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -672,7 +673,7 @@ fn concat_idents_expand(
|
|||
}
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
||||
_ => {
|
||||
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
||||
err.get_or_insert(ExpandError::other(span, "unexpected token"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -686,16 +687,17 @@ fn relative_file(
|
|||
call_id: MacroCallId,
|
||||
path_str: &str,
|
||||
allow_recursion: bool,
|
||||
err_span: Span,
|
||||
) -> Result<EditionedFileId, ExpandError> {
|
||||
let lookup = call_id.lookup(db);
|
||||
let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id();
|
||||
let path = AnchoredPath { anchor: call_site, path: path_str };
|
||||
let res = db
|
||||
.resolve_path(path)
|
||||
.ok_or_else(|| ExpandError::other(format!("failed to load file `{path_str}`")))?;
|
||||
.ok_or_else(|| ExpandError::other(err_span, format!("failed to load file `{path_str}`")))?;
|
||||
// Prevent include itself
|
||||
if res == call_site && !allow_recursion {
|
||||
Err(ExpandError::other(format!("recursive inclusion of `{path_str}`")))
|
||||
Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`")))
|
||||
} else {
|
||||
Ok(EditionedFileId::new(res, db.crate_graph()[lookup.krate].edition))
|
||||
}
|
||||
|
@ -704,18 +706,19 @@ fn relative_file(
|
|||
fn parse_string(tt: &tt::Subtree) -> Result<(Symbol, Span), ExpandError> {
|
||||
tt.token_trees
|
||||
.first()
|
||||
.ok_or(tt.delimiter.open.cover(tt.delimiter.close))
|
||||
.and_then(|tt| match tt {
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||
symbol: text,
|
||||
span,
|
||||
kind: tt::LitKind::Str,
|
||||
suffix: _,
|
||||
})) => Some((unescape_str(text), *span)),
|
||||
})) => Ok((unescape_str(text), *span)),
|
||||
// FIXME: We wrap expression fragments in parentheses which can break this expectation
|
||||
// here
|
||||
// Remove this once we handle none delims correctly
|
||||
tt::TokenTree::Subtree(t) if t.delimiter.kind == DelimiterKind::Parenthesis => {
|
||||
t.token_trees.first().and_then(|tt| match tt {
|
||||
tt::TokenTree::Subtree(tt) if tt.delimiter.kind == DelimiterKind::Parenthesis => {
|
||||
tt.token_trees.first().and_then(|tt| match tt {
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||
symbol: text,
|
||||
span,
|
||||
|
@ -725,9 +728,11 @@ fn parse_string(tt: &tt::Subtree) -> Result<(Symbol, Span), ExpandError> {
|
|||
_ => None,
|
||||
})
|
||||
}
|
||||
_ => None,
|
||||
.ok_or(tt.delimiter.open.cover(tt.delimiter.close)),
|
||||
::tt::TokenTree::Leaf(l) => Err(*l.span()),
|
||||
::tt::TokenTree::Subtree(tt) => Err(tt.delimiter.open.cover(tt.delimiter.close)),
|
||||
})
|
||||
.ok_or(mbe::ExpandError::ConversionError.into())
|
||||
.map_err(|span| ExpandError::other(span, "expected string literal"))
|
||||
}
|
||||
|
||||
fn include_expand(
|
||||
|
@ -751,7 +756,7 @@ fn include_expand(
|
|||
Some(it) => ExpandResult::ok(it),
|
||||
None => ExpandResult::new(
|
||||
tt::Subtree::empty(DelimSpan { open: span, close: span }),
|
||||
ExpandError::other("failed to parse included file"),
|
||||
ExpandError::other(span, "failed to parse included file"),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -761,7 +766,8 @@ pub fn include_input_to_file_id(
|
|||
arg_id: MacroCallId,
|
||||
arg: &tt::Subtree,
|
||||
) -> Result<EditionedFileId, ExpandError> {
|
||||
relative_file(db, arg_id, parse_string(arg)?.0.as_str(), false)
|
||||
let (s, span) = parse_string(arg)?;
|
||||
relative_file(db, arg_id, s.as_str(), false, span)
|
||||
}
|
||||
|
||||
fn include_bytes_expand(
|
||||
|
@ -800,7 +806,7 @@ fn include_str_expand(
|
|||
// it's unusual to `include_str!` a Rust file), but we can return an empty string.
|
||||
// Ideally, we'd be able to offer a precise expansion if the user asks for macro
|
||||
// expansion.
|
||||
let file_id = match relative_file(db, arg_id, path.as_str(), true) {
|
||||
let file_id = match relative_file(db, arg_id, path.as_str(), true, span) {
|
||||
Ok(file_id) => file_id,
|
||||
Err(_) => {
|
||||
return ExpandResult::ok(quote!(span =>""));
|
||||
|
@ -836,7 +842,10 @@ fn env_expand(
|
|||
// The only variable rust-analyzer ever sets is `OUT_DIR`, so only diagnose that to avoid
|
||||
// unnecessary diagnostics for eg. `CARGO_PKG_NAME`.
|
||||
if key.as_str() == "OUT_DIR" {
|
||||
err = Some(ExpandError::other(r#"`OUT_DIR` not set, enable "build scripts" to fix"#));
|
||||
err = Some(ExpandError::other(
|
||||
span,
|
||||
r#"`OUT_DIR` not set, enable "build scripts" to fix"#,
|
||||
));
|
||||
}
|
||||
|
||||
// If the variable is unset, still return a dummy string to help type inference along.
|
||||
|
@ -885,7 +894,7 @@ fn quote_expand(
|
|||
) -> ExpandResult<tt::Subtree> {
|
||||
ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: span, close: span }),
|
||||
ExpandError::other("quote! is not implemented"),
|
||||
ExpandError::other(span, "quote! is not implemented"),
|
||||
)
|
||||
}
|
||||
|
|
@ -17,22 +17,21 @@ pub(crate) fn dollar_crate(span: Span) -> tt::Ident<Span> {
|
|||
// 2. #()* pattern repetition not supported now
|
||||
// * But we can do it manually, see `test_quote_derive_copy_hack`
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! __quote {
|
||||
macro_rules! quote_impl__ {
|
||||
($span:ident) => {
|
||||
Vec::<$crate::tt::TokenTree>::new()
|
||||
};
|
||||
|
||||
( @SUBTREE($span:ident) $delim:ident $($tt:tt)* ) => {
|
||||
{
|
||||
let children = $crate::__quote!($span $($tt)*);
|
||||
let children = $crate::builtin::quote::__quote!($span $($tt)*);
|
||||
$crate::tt::Subtree {
|
||||
delimiter: crate::tt::Delimiter {
|
||||
kind: crate::tt::DelimiterKind::$delim,
|
||||
open: $span,
|
||||
close: $span,
|
||||
},
|
||||
token_trees: $crate::quote::IntoTt::to_tokens(children).into_boxed_slice(),
|
||||
token_trees: $crate::builtin::quote::IntoTt::to_tokens(children).into_boxed_slice(),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -69,9 +68,9 @@ macro_rules! __quote {
|
|||
// hash variable
|
||||
($span:ident # $first:ident $($tail:tt)* ) => {
|
||||
{
|
||||
let token = $crate::quote::ToTokenTree::to_token($first, $span);
|
||||
let token = $crate::builtin::quote::ToTokenTree::to_token($first, $span);
|
||||
let mut tokens = vec![token.into()];
|
||||
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
|
||||
let mut tail_tokens = $crate::builtin::quote::IntoTt::to_tokens($crate::builtin::quote::__quote!($span $($tail)*));
|
||||
tokens.append(&mut tail_tokens);
|
||||
tokens
|
||||
}
|
||||
|
@ -79,22 +78,22 @@ macro_rules! __quote {
|
|||
|
||||
($span:ident ## $first:ident $($tail:tt)* ) => {
|
||||
{
|
||||
let mut tokens = $first.into_iter().map(|it| $crate::quote::ToTokenTree::to_token(it, $span)).collect::<Vec<crate::tt::TokenTree>>();
|
||||
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
|
||||
let mut tokens = $first.into_iter().map(|it| $crate::builtin::quote::ToTokenTree::to_token(it, $span)).collect::<Vec<crate::tt::TokenTree>>();
|
||||
let mut tail_tokens = $crate::builtin::quote::IntoTt::to_tokens($crate::builtin::quote::__quote!($span $($tail)*));
|
||||
tokens.append(&mut tail_tokens);
|
||||
tokens
|
||||
}
|
||||
};
|
||||
|
||||
// Brace
|
||||
($span:ident { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE($span) Brace $($tt)*) };
|
||||
($span:ident { $($tt:tt)* } ) => { $crate::builtin::quote::__quote!(@SUBTREE($span) Brace $($tt)*) };
|
||||
// Bracket
|
||||
($span:ident [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE($span) Bracket $($tt)*) };
|
||||
($span:ident [ $($tt:tt)* ] ) => { $crate::builtin::quote::__quote!(@SUBTREE($span) Bracket $($tt)*) };
|
||||
// Parenthesis
|
||||
($span:ident ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE($span) Parenthesis $($tt)*) };
|
||||
($span:ident ( $($tt:tt)* ) ) => { $crate::builtin::quote::__quote!(@SUBTREE($span) Parenthesis $($tt)*) };
|
||||
|
||||
// Literal
|
||||
($span:ident $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt, $span).into()] };
|
||||
($span:ident $tt:literal ) => { vec![$crate::builtin::quote::ToTokenTree::to_token($tt, $span).into()] };
|
||||
// Ident
|
||||
($span:ident $tt:ident ) => {
|
||||
vec![ {
|
||||
|
@ -108,36 +107,37 @@ macro_rules! __quote {
|
|||
|
||||
// Puncts
|
||||
// FIXME: Not all puncts are handled
|
||||
($span:ident -> ) => {$crate::__quote!(@PUNCT($span) '-', '>')};
|
||||
($span:ident & ) => {$crate::__quote!(@PUNCT($span) '&')};
|
||||
($span:ident , ) => {$crate::__quote!(@PUNCT($span) ',')};
|
||||
($span:ident : ) => {$crate::__quote!(@PUNCT($span) ':')};
|
||||
($span:ident ; ) => {$crate::__quote!(@PUNCT($span) ';')};
|
||||
($span:ident :: ) => {$crate::__quote!(@PUNCT($span) ':', ':')};
|
||||
($span:ident . ) => {$crate::__quote!(@PUNCT($span) '.')};
|
||||
($span:ident < ) => {$crate::__quote!(@PUNCT($span) '<')};
|
||||
($span:ident > ) => {$crate::__quote!(@PUNCT($span) '>')};
|
||||
($span:ident ! ) => {$crate::__quote!(@PUNCT($span) '!')};
|
||||
($span:ident -> ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '-', '>')};
|
||||
($span:ident & ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '&')};
|
||||
($span:ident , ) => {$crate::builtin::quote::__quote!(@PUNCT($span) ',')};
|
||||
($span:ident : ) => {$crate::builtin::quote::__quote!(@PUNCT($span) ':')};
|
||||
($span:ident ; ) => {$crate::builtin::quote::__quote!(@PUNCT($span) ';')};
|
||||
($span:ident :: ) => {$crate::builtin::quote::__quote!(@PUNCT($span) ':', ':')};
|
||||
($span:ident . ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '.')};
|
||||
($span:ident < ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '<')};
|
||||
($span:ident > ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '>')};
|
||||
($span:ident ! ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '!')};
|
||||
|
||||
($span:ident $first:tt $($tail:tt)+ ) => {
|
||||
{
|
||||
let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $first ));
|
||||
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
|
||||
let mut tokens = $crate::builtin::quote::IntoTt::to_tokens($crate::builtin::quote::__quote!($span $first ));
|
||||
let mut tail_tokens = $crate::builtin::quote::IntoTt::to_tokens($crate::builtin::quote::__quote!($span $($tail)*));
|
||||
|
||||
tokens.append(&mut tail_tokens);
|
||||
tokens
|
||||
}
|
||||
};
|
||||
}
|
||||
pub(super) use quote_impl__ as __quote;
|
||||
|
||||
/// FIXME:
|
||||
/// It probably should implement in proc-macro
|
||||
#[macro_export]
|
||||
macro_rules! quote {
|
||||
macro_rules! quote_impl {
|
||||
($span:ident=> $($tt:tt)* ) => {
|
||||
$crate::quote::IntoTt::to_subtree($crate::__quote!($span $($tt)*), $span)
|
||||
$crate::builtin::quote::IntoTt::to_subtree($crate::builtin::quote::__quote!($span $($tt)*), $span)
|
||||
}
|
||||
}
|
||||
pub(super) use quote_impl as quote;
|
||||
|
||||
pub(crate) trait IntoTt {
|
||||
fn to_subtree(self, span: Span) -> crate::tt::Subtree;
|
||||
|
@ -232,6 +232,8 @@ mod tests {
|
|||
use span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
|
||||
use syntax::{TextRange, TextSize};
|
||||
|
||||
use super::quote;
|
||||
|
||||
const DUMMY: tt::Span = tt::Span {
|
||||
range: TextRange::empty(TextSize::new(0)),
|
||||
anchor: SpanAnchor {
|
|
@ -25,8 +25,7 @@ impl ChangeWithProcMacros {
|
|||
|
||||
pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) {
|
||||
self.source_change.apply(db);
|
||||
if let Some(mut proc_macros) = self.proc_macros {
|
||||
proc_macros.shrink_to_fit();
|
||||
if let Some(proc_macros) = self.proc_macros {
|
||||
db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
|
||||
}
|
||||
if let Some(target_data_layouts) = self.target_data_layouts {
|
||||
|
|
|
@ -11,8 +11,7 @@ use triomphe::Arc;
|
|||
|
||||
use crate::{
|
||||
attrs::{collect_attrs, AttrId},
|
||||
builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||
builtin_fn_macro::EagerExpander,
|
||||
builtin::pseudo_derive_attr_expansion,
|
||||
cfg_process,
|
||||
declarative::DeclarativeMacroExpander,
|
||||
fixup::{self, SyntaxFixupUndoInfo},
|
||||
|
@ -20,9 +19,9 @@ use crate::{
|
|||
proc_macro::ProcMacros,
|
||||
span_map::{RealSpanMap, SpanMap, SpanMapRef},
|
||||
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
|
||||
CustomProcMacroExpander, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap,
|
||||
HirFileId, HirFileIdRepr, Lookup, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
|
||||
MacroDefKind, MacroFileId,
|
||||
CustomProcMacroExpander, EagerCallInfo, EagerExpander, ExpandError, ExpandResult, ExpandTo,
|
||||
ExpansionSpanMap, HirFileId, HirFileIdRepr, Lookup, MacroCallId, MacroCallKind, MacroCallLoc,
|
||||
MacroDefId, MacroDefKind, MacroFileId,
|
||||
};
|
||||
/// This is just to ensure the types of smart_macro_arg and macro_arg are the same
|
||||
type MacroArgResult = (Arc<tt::Subtree>, SyntaxFixupUndoInfo, Span);
|
||||
|
@ -260,39 +259,38 @@ pub fn expand_speculative(
|
|||
|
||||
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
|
||||
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
||||
let mut speculative_expansion =
|
||||
match loc.def.kind {
|
||||
MacroDefKind::ProcMacro(ast, expander, _) => {
|
||||
let span = db.proc_macro_span(ast);
|
||||
tt.delimiter = tt::Delimiter::invisible_spanned(span);
|
||||
expander.expand(
|
||||
db,
|
||||
loc.def.krate,
|
||||
loc.krate,
|
||||
&tt,
|
||||
attr_arg.as_ref(),
|
||||
span_with_def_site_ctxt(db, span, actual_macro_call),
|
||||
span_with_call_site_ctxt(db, span, actual_macro_call),
|
||||
span_with_mixed_site_ctxt(db, span, actual_macro_call),
|
||||
)
|
||||
}
|
||||
MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
|
||||
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
|
||||
}
|
||||
MacroDefKind::Declarative(it) => db
|
||||
.decl_macro_expander(loc.krate, it)
|
||||
.expand_unhygienic(db, tt, loc.def.krate, span, loc.def.edition),
|
||||
MacroDefKind::BuiltIn(_, it) => {
|
||||
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInDerive(_, it) => {
|
||||
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInEager(_, it) => {
|
||||
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInAttr(_, it) => it.expand(db, actual_macro_call, &tt, span),
|
||||
};
|
||||
let mut speculative_expansion = match loc.def.kind {
|
||||
MacroDefKind::ProcMacro(ast, expander, _) => {
|
||||
let span = db.proc_macro_span(ast);
|
||||
tt.delimiter = tt::Delimiter::invisible_spanned(span);
|
||||
expander.expand(
|
||||
db,
|
||||
loc.def.krate,
|
||||
loc.krate,
|
||||
&tt,
|
||||
attr_arg.as_ref(),
|
||||
span_with_def_site_ctxt(db, span, actual_macro_call),
|
||||
span_with_call_site_ctxt(db, span, actual_macro_call),
|
||||
span_with_mixed_site_ctxt(db, span, actual_macro_call),
|
||||
)
|
||||
}
|
||||
MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
|
||||
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
|
||||
}
|
||||
MacroDefKind::Declarative(it) => {
|
||||
db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt, span, loc.def.edition)
|
||||
}
|
||||
MacroDefKind::BuiltIn(_, it) => {
|
||||
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInDerive(_, it) => {
|
||||
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInEager(_, it) => {
|
||||
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInAttr(_, it) => it.expand(db, actual_macro_call, &tt, span),
|
||||
};
|
||||
|
||||
let expand_to = loc.expand_to();
|
||||
|
||||
|
@ -736,11 +734,14 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {
|
|||
if TOKEN_LIMIT.check(count).is_err() {
|
||||
Err(ExpandResult {
|
||||
value: (),
|
||||
err: Some(ExpandError::other(format!(
|
||||
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
||||
count,
|
||||
TOKEN_LIMIT.inner(),
|
||||
))),
|
||||
err: Some(ExpandError::other(
|
||||
tt.delimiter.open,
|
||||
format!(
|
||||
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
||||
count,
|
||||
TOKEN_LIMIT.inner(),
|
||||
),
|
||||
)),
|
||||
})
|
||||
} else {
|
||||
Ok(())
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
//! Compiled declarative macro expanders (`macro_rules!`` and `macro`)
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use base_db::{CrateId, VersionReq};
|
||||
use base_db::CrateId;
|
||||
use intern::sym;
|
||||
use mbe::DocCommentDesugarMode;
|
||||
use span::{Edition, MacroCallId, Span, SyntaxContextId};
|
||||
|
@ -13,7 +12,7 @@ use crate::{
|
|||
attrs::RawAttrs,
|
||||
db::ExpandDatabase,
|
||||
hygiene::{apply_mark, Transparency},
|
||||
tt, AstId, ExpandError, ExpandResult, Lookup,
|
||||
tt, AstId, ExpandError, ExpandErrorKind, ExpandResult, Lookup,
|
||||
};
|
||||
|
||||
/// Old-style `macro_rules` or the new macros 2.0
|
||||
|
@ -23,9 +22,6 @@ pub struct DeclarativeMacroExpander {
|
|||
pub transparency: Transparency,
|
||||
}
|
||||
|
||||
// FIXME: Remove this once we drop support for 1.76
|
||||
static REQUIREMENT: OnceLock<VersionReq> = OnceLock::new();
|
||||
|
||||
impl DeclarativeMacroExpander {
|
||||
pub fn expand(
|
||||
&self,
|
||||
|
@ -35,29 +31,16 @@ impl DeclarativeMacroExpander {
|
|||
span: Span,
|
||||
) -> ExpandResult<(tt::Subtree, Option<u32>)> {
|
||||
let loc = db.lookup_intern_macro_call(call_id);
|
||||
let toolchain = db.toolchain(loc.def.krate);
|
||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
||||
&base_db::Version {
|
||||
pre: base_db::Prerelease::EMPTY,
|
||||
build: base_db::BuildMetadata::EMPTY,
|
||||
major: version.major,
|
||||
minor: version.minor,
|
||||
patch: version.patch,
|
||||
},
|
||||
)
|
||||
});
|
||||
match self.mac.err() {
|
||||
Some(_) => ExpandResult::new(
|
||||
(tt::Subtree::empty(tt::DelimSpan { open: span, close: span }), None),
|
||||
ExpandError::MacroDefinition,
|
||||
ExpandError::new(span, ExpandErrorKind::MacroDefinition),
|
||||
),
|
||||
None => self
|
||||
.mac
|
||||
.expand(
|
||||
&tt,
|
||||
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
|
||||
new_meta_vars,
|
||||
span,
|
||||
loc.def.edition,
|
||||
)
|
||||
|
@ -67,32 +50,18 @@ impl DeclarativeMacroExpander {
|
|||
|
||||
pub fn expand_unhygienic(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
tt: tt::Subtree,
|
||||
krate: CrateId,
|
||||
call_site: Span,
|
||||
def_site_edition: Edition,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let toolchain = db.toolchain(krate);
|
||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
||||
&base_db::Version {
|
||||
pre: base_db::Prerelease::EMPTY,
|
||||
build: base_db::BuildMetadata::EMPTY,
|
||||
major: version.major,
|
||||
minor: version.minor,
|
||||
patch: version.patch,
|
||||
},
|
||||
)
|
||||
});
|
||||
match self.mac.err() {
|
||||
Some(_) => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::MacroDefinition,
|
||||
ExpandError::new(call_site, ExpandErrorKind::MacroDefinition),
|
||||
),
|
||||
None => self
|
||||
.mac
|
||||
.expand(&tt, |_| (), new_meta_vars, call_site, def_site_edition)
|
||||
.expand(&tt, |_| (), call_site, def_site_edition)
|
||||
.map(TupleExt::head)
|
||||
.map_err(Into::into),
|
||||
}
|
||||
|
|
|
@ -176,14 +176,19 @@ fn eager_macro_recur(
|
|||
Some(path) => match macro_resolver(&path) {
|
||||
Some(def) => def,
|
||||
None => {
|
||||
error =
|
||||
Some(ExpandError::other(format!("unresolved macro {}", path.display(db))));
|
||||
error = Some(ExpandError::other(
|
||||
span_map.span_at(call.syntax().text_range().start()),
|
||||
format!("unresolved macro {}", path.display(db)),
|
||||
));
|
||||
offset += call.syntax().text_range().len();
|
||||
continue;
|
||||
}
|
||||
},
|
||||
None => {
|
||||
error = Some(ExpandError::other("malformed macro invocation"));
|
||||
error = Some(ExpandError::other(
|
||||
span_map.span_at(call.syntax().text_range().start()),
|
||||
"malformed macro invocation",
|
||||
));
|
||||
offset += call.syntax().text_range().len();
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -6,9 +6,7 @@
|
|||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||
|
||||
pub mod attrs;
|
||||
pub mod builtin_attr_macro;
|
||||
pub mod builtin_derive_macro;
|
||||
pub mod builtin_fn_macro;
|
||||
pub mod builtin;
|
||||
pub mod change;
|
||||
pub mod db;
|
||||
pub mod declarative;
|
||||
|
@ -19,7 +17,6 @@ pub mod inert_attr_macro;
|
|||
pub mod mod_path;
|
||||
pub mod name;
|
||||
pub mod proc_macro;
|
||||
pub mod quote;
|
||||
pub mod span_map;
|
||||
|
||||
mod cfg_process;
|
||||
|
@ -29,7 +26,7 @@ use attrs::collect_attrs;
|
|||
use rustc_hash::FxHashMap;
|
||||
use triomphe::Arc;
|
||||
|
||||
use std::{fmt, hash::Hash};
|
||||
use std::hash::Hash;
|
||||
|
||||
use base_db::{salsa::InternValueTrivial, CrateId};
|
||||
use either::Either;
|
||||
|
@ -44,9 +41,10 @@ use syntax::{
|
|||
|
||||
use crate::{
|
||||
attrs::AttrId,
|
||||
builtin_attr_macro::BuiltinAttrExpander,
|
||||
builtin_derive_macro::BuiltinDeriveExpander,
|
||||
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
|
||||
builtin::{
|
||||
include_input_to_file_id, BuiltinAttrExpander, BuiltinDeriveExpander,
|
||||
BuiltinFnLikeExpander, EagerExpander,
|
||||
},
|
||||
db::ExpandDatabase,
|
||||
mod_path::ModPath,
|
||||
proc_macro::{CustomProcMacroExpander, ProcMacroKind},
|
||||
|
@ -126,46 +124,79 @@ impl_intern_lookup!(
|
|||
pub type ExpandResult<T> = ValueResult<T, ExpandError>;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||
pub enum ExpandError {
|
||||
UnresolvedProcMacro(CrateId),
|
||||
/// The macro expansion is disabled.
|
||||
MacroDisabled,
|
||||
MacroDefinition,
|
||||
Mbe(mbe::ExpandError),
|
||||
RecursionOverflow,
|
||||
Other(Arc<Box<str>>),
|
||||
ProcMacroPanic(Arc<Box<str>>),
|
||||
pub struct ExpandError {
|
||||
inner: Arc<(ExpandErrorKind, Span)>,
|
||||
}
|
||||
|
||||
impl ExpandError {
|
||||
pub fn other(msg: impl Into<Box<str>>) -> Self {
|
||||
ExpandError::Other(Arc::new(msg.into()))
|
||||
pub fn new(span: Span, kind: ExpandErrorKind) -> Self {
|
||||
ExpandError { inner: Arc::new((kind, span)) }
|
||||
}
|
||||
pub fn other(span: Span, msg: impl Into<Box<str>>) -> Self {
|
||||
ExpandError { inner: Arc::new((ExpandErrorKind::Other(msg.into()), span)) }
|
||||
}
|
||||
pub fn kind(&self) -> &ExpandErrorKind {
|
||||
&self.inner.0
|
||||
}
|
||||
pub fn span(&self) -> Span {
|
||||
self.inner.1
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||
pub enum ExpandErrorKind {
|
||||
/// Attribute macro expansion is disabled.
|
||||
ProcMacroAttrExpansionDisabled,
|
||||
MissingProcMacroExpander(CrateId),
|
||||
/// The macro for this call is disabled.
|
||||
MacroDisabled,
|
||||
/// The macro definition has errors.
|
||||
MacroDefinition,
|
||||
Mbe(mbe::ExpandErrorKind),
|
||||
RecursionOverflow,
|
||||
Other(Box<str>),
|
||||
ProcMacroPanic(Box<str>),
|
||||
}
|
||||
|
||||
impl ExpandError {
|
||||
pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> (String, bool) {
|
||||
self.inner.0.render_to_string(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandErrorKind {
|
||||
pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> (String, bool) {
|
||||
match self {
|
||||
ExpandErrorKind::ProcMacroAttrExpansionDisabled => {
|
||||
("procedural attribute macro expansion is disabled".to_owned(), false)
|
||||
}
|
||||
ExpandErrorKind::MacroDisabled => {
|
||||
("proc-macro is explicitly disabled".to_owned(), false)
|
||||
}
|
||||
&ExpandErrorKind::MissingProcMacroExpander(def_crate) => {
|
||||
match db.proc_macros().get_error_for_crate(def_crate) {
|
||||
Some((e, hard_err)) => (e.to_owned(), hard_err),
|
||||
None => ("missing expander".to_owned(), true),
|
||||
}
|
||||
}
|
||||
ExpandErrorKind::MacroDefinition => {
|
||||
("macro definition has parse errors".to_owned(), true)
|
||||
}
|
||||
ExpandErrorKind::Mbe(e) => (e.to_string(), true),
|
||||
ExpandErrorKind::RecursionOverflow => {
|
||||
("overflow expanding the original macro".to_owned(), true)
|
||||
}
|
||||
ExpandErrorKind::Other(e) => ((**e).to_owned(), true),
|
||||
ExpandErrorKind::ProcMacroPanic(e) => ((**e).to_owned(), true),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<mbe::ExpandError> for ExpandError {
|
||||
fn from(mbe: mbe::ExpandError) -> Self {
|
||||
Self::Mbe(mbe)
|
||||
ExpandError { inner: Arc::new((ExpandErrorKind::Mbe(mbe.inner.1.clone()), mbe.inner.0)) }
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ExpandError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
ExpandError::UnresolvedProcMacro(_) => f.write_str("unresolved proc-macro"),
|
||||
ExpandError::Mbe(it) => it.fmt(f),
|
||||
ExpandError::RecursionOverflow => f.write_str("overflow expanding the original macro"),
|
||||
ExpandError::ProcMacroPanic(it) => {
|
||||
f.write_str("proc-macro panicked: ")?;
|
||||
f.write_str(it)
|
||||
}
|
||||
ExpandError::Other(it) => f.write_str(it),
|
||||
ExpandError::MacroDisabled => f.write_str("macro disabled"),
|
||||
ExpandError::MacroDefinition => f.write_str("macro definition has parse errors"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct MacroCallLoc {
|
||||
pub def: MacroDefId,
|
||||
|
@ -277,11 +308,9 @@ impl HirFileIdExt for HirFileId {
|
|||
let loc = db.lookup_intern_macro_call(file.macro_call_id);
|
||||
if loc.def.is_include() {
|
||||
if let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind {
|
||||
if let Ok(it) = builtin_fn_macro::include_input_to_file_id(
|
||||
db,
|
||||
file.macro_call_id,
|
||||
&eager.arg,
|
||||
) {
|
||||
if let Ok(it) =
|
||||
include_input_to_file_id(db, file.macro_call_id, &eager.arg)
|
||||
{
|
||||
break it;
|
||||
}
|
||||
}
|
||||
|
@ -572,9 +601,7 @@ impl MacroCallLoc {
|
|||
) -> Option<EditionedFileId> {
|
||||
if self.def.is_include() {
|
||||
if let MacroCallKind::FnLike { eager: Some(eager), .. } = &self.kind {
|
||||
if let Ok(it) =
|
||||
builtin_fn_macro::include_input_to_file_id(db, macro_call_id, &eager.arg)
|
||||
{
|
||||
if let Ok(it) = include_input_to_file_id(db, macro_call_id, &eager.arg) {
|
||||
return Some(it);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,19 +7,8 @@ use base_db::{CrateId, Env};
|
|||
use intern::Symbol;
|
||||
use rustc_hash::FxHashMap;
|
||||
use span::Span;
|
||||
use stdx::never;
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ProcMacroId(u32);
|
||||
|
||||
impl ProcMacroId {
|
||||
pub fn new(u32: u32) -> Self {
|
||||
ProcMacroId(u32)
|
||||
}
|
||||
}
|
||||
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandErrorKind, ExpandResult};
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
|
||||
pub enum ProcMacroKind {
|
||||
|
@ -28,7 +17,10 @@ pub enum ProcMacroKind {
|
|||
Attr,
|
||||
}
|
||||
|
||||
/// A proc-macro expander implementation.
|
||||
pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
|
||||
/// Run the expander with the given input subtree, optional attribute input subtree (for
|
||||
/// [`ProcMacroKind::Attr`]), environment variables, and span information.
|
||||
fn expand(
|
||||
&self,
|
||||
subtree: &tt::Subtree,
|
||||
|
@ -42,57 +34,165 @@ pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub enum ProcMacroExpansionError {
|
||||
/// The proc-macro panicked.
|
||||
Panic(String),
|
||||
/// Things like "proc macro server was killed by OOM".
|
||||
/// The server itself errored out.
|
||||
System(String),
|
||||
}
|
||||
|
||||
pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, String>;
|
||||
pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, (String, bool)>;
|
||||
type StoredProcMacroLoadResult = Result<Box<[ProcMacro]>, (Box<str>, bool)>;
|
||||
|
||||
pub type ProcMacros = FxHashMap<CrateId, ProcMacroLoadResult>;
|
||||
#[derive(Default, Debug)]
|
||||
pub struct ProcMacrosBuilder(FxHashMap<CrateId, StoredProcMacroLoadResult>);
|
||||
impl ProcMacrosBuilder {
|
||||
pub fn insert(&mut self, proc_macros_crate: CrateId, proc_macro: ProcMacroLoadResult) {
|
||||
self.0.insert(
|
||||
proc_macros_crate,
|
||||
match proc_macro {
|
||||
Ok(it) => Ok(it.into_boxed_slice()),
|
||||
Err((e, hard_err)) => Err((e.into_boxed_str(), hard_err)),
|
||||
},
|
||||
);
|
||||
}
|
||||
pub fn build(mut self) -> ProcMacros {
|
||||
self.0.shrink_to_fit();
|
||||
ProcMacros(self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct ProcMacros(FxHashMap<CrateId, StoredProcMacroLoadResult>);
|
||||
|
||||
impl FromIterator<(CrateId, ProcMacroLoadResult)> for ProcMacros {
|
||||
fn from_iter<T: IntoIterator<Item = (CrateId, ProcMacroLoadResult)>>(iter: T) -> Self {
|
||||
let mut builder = ProcMacrosBuilder::default();
|
||||
for (k, v) in iter {
|
||||
builder.insert(k, v);
|
||||
}
|
||||
builder.build()
|
||||
}
|
||||
}
|
||||
|
||||
impl ProcMacros {
|
||||
fn get(&self, krate: CrateId, idx: u32, err_span: Span) -> Result<&ProcMacro, ExpandError> {
|
||||
let proc_macros = match self.0.get(&krate) {
|
||||
Some(Ok(proc_macros)) => proc_macros,
|
||||
Some(Err(_)) | None => {
|
||||
return Err(ExpandError::other(
|
||||
err_span,
|
||||
"internal error: no proc macros for crate",
|
||||
));
|
||||
}
|
||||
};
|
||||
proc_macros.get(idx as usize).ok_or_else(|| {
|
||||
ExpandError::other(err_span,
|
||||
format!(
|
||||
"internal error: proc-macro index out of bounds: the length is {} but the index is {}",
|
||||
proc_macros.len(),
|
||||
idx
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_error_for_crate(&self, krate: CrateId) -> Option<(&str, bool)> {
|
||||
self.0.get(&krate).and_then(|it| it.as_ref().err()).map(|(e, hard_err)| (&**e, *hard_err))
|
||||
}
|
||||
|
||||
/// Fetch the [`CustomProcMacroExpander`]s and their corresponding names for the given crate.
|
||||
pub fn for_crate(
|
||||
&self,
|
||||
krate: CrateId,
|
||||
def_site_ctx: span::SyntaxContextId,
|
||||
) -> Option<Box<[(crate::name::Name, CustomProcMacroExpander, bool)]>> {
|
||||
match self.0.get(&krate) {
|
||||
Some(Ok(proc_macros)) => Some({
|
||||
proc_macros
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, it)| {
|
||||
let name = crate::name::Name::new_symbol(it.name.clone(), def_site_ctx);
|
||||
(name, CustomProcMacroExpander::new(idx as u32), it.disabled)
|
||||
})
|
||||
.collect()
|
||||
}),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A loaded proc-macro.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ProcMacro {
|
||||
/// The name of the proc macro.
|
||||
pub name: Symbol,
|
||||
pub kind: ProcMacroKind,
|
||||
/// The expander handle for this proc macro.
|
||||
pub expander: sync::Arc<dyn ProcMacroExpander>,
|
||||
/// Whether this proc-macro is disabled for early name resolution. Notably, the
|
||||
/// [`Self::expander`] is still usable.
|
||||
pub disabled: bool,
|
||||
}
|
||||
|
||||
/// A custom proc-macro expander handle. This handle together with its crate resolves to a [`ProcMacro`]
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
|
||||
pub struct CustomProcMacroExpander {
|
||||
proc_macro_id: ProcMacroId,
|
||||
proc_macro_id: u32,
|
||||
}
|
||||
|
||||
impl CustomProcMacroExpander {
|
||||
const DUMMY_ID: u32 = !0;
|
||||
const MISSING_EXPANDER: u32 = !0;
|
||||
const DISABLED_ID: u32 = !1;
|
||||
const PROC_MACRO_ATTR_DISABLED: u32 = !2;
|
||||
|
||||
pub fn new(proc_macro_id: ProcMacroId) -> Self {
|
||||
assert_ne!(proc_macro_id.0, Self::DUMMY_ID);
|
||||
assert_ne!(proc_macro_id.0, Self::DISABLED_ID);
|
||||
pub fn new(proc_macro_id: u32) -> Self {
|
||||
assert_ne!(proc_macro_id, Self::MISSING_EXPANDER);
|
||||
assert_ne!(proc_macro_id, Self::DISABLED_ID);
|
||||
assert_ne!(proc_macro_id, Self::PROC_MACRO_ATTR_DISABLED);
|
||||
Self { proc_macro_id }
|
||||
}
|
||||
|
||||
/// A dummy expander that always errors. This is used for proc-macros that are missing, usually
|
||||
/// due to them not being built yet.
|
||||
pub const fn dummy() -> Self {
|
||||
Self { proc_macro_id: ProcMacroId(Self::DUMMY_ID) }
|
||||
}
|
||||
|
||||
/// The macro was not yet resolved.
|
||||
pub const fn is_dummy(&self) -> bool {
|
||||
self.proc_macro_id.0 == Self::DUMMY_ID
|
||||
/// An expander that always errors due to the actual proc-macro expander missing.
|
||||
pub const fn missing_expander() -> Self {
|
||||
Self { proc_macro_id: Self::MISSING_EXPANDER }
|
||||
}
|
||||
|
||||
/// A dummy expander that always errors. This expander is used for macros that have been disabled.
|
||||
pub const fn disabled() -> Self {
|
||||
Self { proc_macro_id: ProcMacroId(Self::DISABLED_ID) }
|
||||
Self { proc_macro_id: Self::DISABLED_ID }
|
||||
}
|
||||
|
||||
/// A dummy expander that always errors. This expander is used for attribute macros when
|
||||
/// proc-macro attribute expansion is disabled.
|
||||
pub const fn disabled_proc_attr() -> Self {
|
||||
Self { proc_macro_id: Self::PROC_MACRO_ATTR_DISABLED }
|
||||
}
|
||||
|
||||
/// The macro-expander is missing or has yet to be build.
|
||||
pub const fn is_missing(&self) -> bool {
|
||||
self.proc_macro_id == Self::MISSING_EXPANDER
|
||||
}
|
||||
|
||||
/// The macro is explicitly disabled and cannot be expanded.
|
||||
pub const fn is_disabled(&self) -> bool {
|
||||
self.proc_macro_id.0 == Self::DISABLED_ID
|
||||
self.proc_macro_id == Self::DISABLED_ID
|
||||
}
|
||||
|
||||
/// The macro is explicitly disabled due to proc-macro attribute expansion being disabled.
|
||||
pub const fn is_disabled_proc_attr(&self) -> bool {
|
||||
self.proc_macro_id == Self::PROC_MACRO_ATTR_DISABLED
|
||||
}
|
||||
|
||||
/// The macro is explicitly disabled due to proc-macro attribute expansion being disabled.
|
||||
pub fn as_expand_error(&self, def_crate: CrateId) -> Option<ExpandErrorKind> {
|
||||
match self.proc_macro_id {
|
||||
Self::PROC_MACRO_ATTR_DISABLED => Some(ExpandErrorKind::ProcMacroAttrExpansionDisabled),
|
||||
Self::DISABLED_ID => Some(ExpandErrorKind::MacroDisabled),
|
||||
Self::MISSING_EXPANDER => Some(ExpandErrorKind::MissingProcMacroExpander(def_crate)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand(
|
||||
|
@ -107,38 +207,27 @@ impl CustomProcMacroExpander {
|
|||
mixed_site: Span,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
match self.proc_macro_id {
|
||||
ProcMacroId(Self::DUMMY_ID) => ExpandResult::new(
|
||||
Self::PROC_MACRO_ATTR_DISABLED => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::UnresolvedProcMacro(def_crate),
|
||||
ExpandError::new(call_site, ExpandErrorKind::ProcMacroAttrExpansionDisabled),
|
||||
),
|
||||
ProcMacroId(Self::DISABLED_ID) => ExpandResult::new(
|
||||
Self::MISSING_EXPANDER => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::MacroDisabled,
|
||||
ExpandError::new(call_site, ExpandErrorKind::MissingProcMacroExpander(def_crate)),
|
||||
),
|
||||
ProcMacroId(id) => {
|
||||
Self::DISABLED_ID => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::new(call_site, ExpandErrorKind::MacroDisabled),
|
||||
),
|
||||
id => {
|
||||
let proc_macros = db.proc_macros();
|
||||
let proc_macros = match proc_macros.get(&def_crate) {
|
||||
Some(Ok(proc_macros)) => proc_macros,
|
||||
Some(Err(_)) | None => {
|
||||
never!("Non-dummy expander even though there are no proc macros");
|
||||
let proc_macro = match proc_macros.get(def_crate, id, call_site) {
|
||||
Ok(proc_macro) => proc_macro,
|
||||
Err(e) => {
|
||||
return ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::other("Internal error"),
|
||||
);
|
||||
}
|
||||
};
|
||||
let proc_macro = match proc_macros.get(id as usize) {
|
||||
Some(proc_macro) => proc_macro,
|
||||
None => {
|
||||
never!(
|
||||
"Proc macro index out of bounds: the length is {} but the index is {}",
|
||||
proc_macros.len(),
|
||||
id
|
||||
);
|
||||
return ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::other("Internal error: proc-macro index out of bounds"),
|
||||
);
|
||||
e,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -153,12 +242,18 @@ impl CustomProcMacroExpander {
|
|||
ProcMacroExpansionError::System(text)
|
||||
if proc_macro.kind == ProcMacroKind::Attr =>
|
||||
{
|
||||
ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) }
|
||||
ExpandResult {
|
||||
value: tt.clone(),
|
||||
err: Some(ExpandError::other(call_site, text)),
|
||||
}
|
||||
}
|
||||
ProcMacroExpansionError::System(text)
|
||||
| ProcMacroExpansionError::Panic(text) => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::ProcMacroPanic(Arc::new(text.into_boxed_str())),
|
||||
ExpandError::new(
|
||||
call_site,
|
||||
ExpandErrorKind::ProcMacroPanic(text.into_boxed_str()),
|
||||
),
|
||||
),
|
||||
},
|
||||
}
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
pub use hir_ty::diagnostics::{CaseType, IncorrectCase};
|
||||
use hir_ty::{db::HirDatabase, diagnostics::BodyValidationDiagnostic, InferenceDiagnostic};
|
||||
|
||||
use base_db::CrateId;
|
||||
use cfg::{CfgExpr, CfgOptions};
|
||||
use either::Either;
|
||||
pub use hir_def::VariantId;
|
||||
|
@ -15,7 +14,7 @@ use hir_expand::{name::Name, HirFileId, InFile};
|
|||
use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange};
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{AssocItem, Field, Local, MacroKind, Trait, Type};
|
||||
use crate::{AssocItem, Field, Local, Trait, Type};
|
||||
|
||||
macro_rules! diagnostics {
|
||||
($($diag:ident,)*) => {
|
||||
|
@ -90,7 +89,6 @@ diagnostics![
|
|||
UnresolvedMethodCall,
|
||||
UnresolvedModule,
|
||||
UnresolvedIdent,
|
||||
UnresolvedProcMacro,
|
||||
UnusedMut,
|
||||
UnusedVariable,
|
||||
];
|
||||
|
@ -150,23 +148,12 @@ pub struct InactiveCode {
|
|||
pub opts: CfgOptions,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct UnresolvedProcMacro {
|
||||
pub node: InFile<SyntaxNodePtr>,
|
||||
/// If the diagnostic can be pinpointed more accurately than via `node`, this is the `TextRange`
|
||||
/// to use instead.
|
||||
pub precise_location: Option<TextRange>,
|
||||
pub macro_name: Option<String>,
|
||||
pub kind: MacroKind,
|
||||
/// The crate id of the proc-macro this macro belongs to, or `None` if the proc-macro can't be found.
|
||||
pub krate: CrateId,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct MacroError {
|
||||
pub node: InFile<SyntaxNodePtr>,
|
||||
pub precise_location: Option<TextRange>,
|
||||
pub message: String,
|
||||
pub error: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
|
|
|
@ -137,7 +137,7 @@ pub use {
|
|||
hygiene::{marks_rev, SyntaxContextExt},
|
||||
inert_attr_macro::AttributeTemplate,
|
||||
name::Name,
|
||||
proc_macro::ProcMacros,
|
||||
proc_macro::{ProcMacros, ProcMacrosBuilder},
|
||||
tt, ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt,
|
||||
},
|
||||
hir_ty::{
|
||||
|
@ -833,19 +833,27 @@ fn macro_call_diagnostics(
|
|||
let ValueResult { value: parse_errors, err } = &*e;
|
||||
if let Some(err) = err {
|
||||
let loc = db.lookup_intern_macro_call(macro_call_id);
|
||||
let (node, precise_location, macro_name, kind) = precise_macro_call_location(&loc.kind, db);
|
||||
let diag = match err {
|
||||
&hir_expand::ExpandError::UnresolvedProcMacro(krate) => {
|
||||
UnresolvedProcMacro { node, precise_location, macro_name, kind, krate }.into()
|
||||
}
|
||||
err => MacroError { node, precise_location, message: err.to_string() }.into(),
|
||||
let file_id = loc.kind.file_id();
|
||||
let node =
|
||||
InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id()));
|
||||
let (message, error) = err.render_to_string(db.upcast());
|
||||
let precise_location = if err.span().anchor.file_id == file_id {
|
||||
Some(
|
||||
err.span().range
|
||||
+ db.ast_id_map(err.span().anchor.file_id.into())
|
||||
.get_erased(err.span().anchor.ast_id)
|
||||
.text_range()
|
||||
.start(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
acc.push(diag);
|
||||
acc.push(MacroError { node, precise_location, message, error }.into());
|
||||
}
|
||||
|
||||
if !parse_errors.is_empty() {
|
||||
let loc = db.lookup_intern_macro_call(macro_call_id);
|
||||
let (node, precise_location, _, _) = precise_macro_call_location(&loc.kind, db);
|
||||
let (node, precise_location) = precise_macro_call_location(&loc.kind, db);
|
||||
acc.push(
|
||||
MacroExpansionParseError { node, precise_location, errors: parse_errors.clone() }
|
||||
.into(),
|
||||
|
@ -895,6 +903,19 @@ fn emit_def_diagnostic_(
|
|||
acc.push(UnresolvedExternCrate { decl: InFile::new(ast.file_id, item) }.into());
|
||||
}
|
||||
|
||||
DefDiagnosticKind::MacroError { ast, path, err } => {
|
||||
let item = ast.to_ptr(db.upcast());
|
||||
let (message, error) = err.render_to_string(db.upcast());
|
||||
acc.push(
|
||||
MacroError {
|
||||
node: InFile::new(ast.file_id, item.syntax_node_ptr()),
|
||||
precise_location: None,
|
||||
message: format!("{}: {message}", path.display(db.upcast())),
|
||||
error,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
DefDiagnosticKind::UnresolvedImport { id, index } => {
|
||||
let file_id = id.file_id();
|
||||
let item_tree = id.item_tree(db.upcast());
|
||||
|
@ -991,15 +1012,8 @@ fn emit_def_diagnostic_(
|
|||
Some(())
|
||||
})();
|
||||
}
|
||||
DefDiagnosticKind::UnresolvedProcMacro { ast, krate } => {
|
||||
let (node, precise_location, macro_name, kind) = precise_macro_call_location(ast, db);
|
||||
acc.push(
|
||||
UnresolvedProcMacro { node, precise_location, macro_name, kind, krate: *krate }
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
|
||||
let (node, precise_location, _, _) = precise_macro_call_location(ast, db);
|
||||
let (node, precise_location) = precise_macro_call_location(ast, db);
|
||||
acc.push(
|
||||
UnresolvedMacroCall {
|
||||
macro_call: node,
|
||||
|
@ -1068,7 +1082,7 @@ fn emit_def_diagnostic_(
|
|||
fn precise_macro_call_location(
|
||||
ast: &MacroCallKind,
|
||||
db: &dyn HirDatabase,
|
||||
) -> (InFile<SyntaxNodePtr>, Option<TextRange>, Option<String>, MacroKind) {
|
||||
) -> (InFile<SyntaxNodePtr>, Option<TextRange>) {
|
||||
// FIXME: maybe we actually want slightly different ranges for the different macro diagnostics
|
||||
// - e.g. the full attribute for macro errors, but only the name for name resolution
|
||||
match ast {
|
||||
|
@ -1080,8 +1094,6 @@ fn precise_macro_call_location(
|
|||
.and_then(|it| it.segment())
|
||||
.and_then(|it| it.name_ref())
|
||||
.map(|it| it.syntax().text_range()),
|
||||
node.path().and_then(|it| it.segment()).map(|it| it.to_string()),
|
||||
MacroKind::ProcMacro,
|
||||
)
|
||||
}
|
||||
MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
|
||||
|
@ -1110,8 +1122,6 @@ fn precise_macro_call_location(
|
|||
(
|
||||
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
|
||||
token.as_ref().map(|tok| tok.text_range()),
|
||||
token.as_ref().map(ToString::to_string),
|
||||
MacroKind::Derive,
|
||||
)
|
||||
}
|
||||
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
|
||||
|
@ -1126,12 +1136,6 @@ fn precise_macro_call_location(
|
|||
(
|
||||
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
|
||||
Some(attr.syntax().text_range()),
|
||||
attr.path()
|
||||
.and_then(|path| path.segment())
|
||||
.and_then(|seg| seg.name_ref())
|
||||
.as_ref()
|
||||
.map(ToString::to_string),
|
||||
MacroKind::Attr,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -1795,20 +1799,28 @@ impl DefWithBody {
|
|||
BodyDiagnostic::InactiveCode { node, cfg, opts } => {
|
||||
InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into()
|
||||
}
|
||||
BodyDiagnostic::MacroError { node, message } => MacroError {
|
||||
node: (*node).map(|it| it.into()),
|
||||
precise_location: None,
|
||||
message: message.to_string(),
|
||||
BodyDiagnostic::MacroError { node, err } => {
|
||||
let (message, error) = err.render_to_string(db.upcast());
|
||||
|
||||
let precise_location = if err.span().anchor.file_id == node.file_id {
|
||||
Some(
|
||||
err.span().range
|
||||
+ db.ast_id_map(err.span().anchor.file_id.into())
|
||||
.get_erased(err.span().anchor.ast_id)
|
||||
.text_range()
|
||||
.start(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
MacroError {
|
||||
node: (*node).map(|it| it.into()),
|
||||
precise_location,
|
||||
message,
|
||||
error,
|
||||
}
|
||||
.into()
|
||||
}
|
||||
.into(),
|
||||
BodyDiagnostic::UnresolvedProcMacro { node, krate } => UnresolvedProcMacro {
|
||||
node: (*node).map(|it| it.into()),
|
||||
precise_location: None,
|
||||
macro_name: None,
|
||||
kind: MacroKind::ProcMacro,
|
||||
krate: *krate,
|
||||
}
|
||||
.into(),
|
||||
BodyDiagnostic::UnresolvedMacroCall { node, path } => UnresolvedMacroCall {
|
||||
macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
|
||||
precise_location: None,
|
||||
|
|
|
@ -19,7 +19,7 @@ use hir_def::{
|
|||
};
|
||||
use hir_expand::{
|
||||
attrs::collect_attrs,
|
||||
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
|
||||
builtin::{BuiltinFnLikeExpander, EagerExpander},
|
||||
db::ExpandDatabase,
|
||||
files::InRealFile,
|
||||
name::AsName,
|
||||
|
|
|
@ -7,7 +7,10 @@ pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) ->
|
|||
// Use more accurate position if available.
|
||||
let display_range = ctx.resolve_precise_location(&d.node, d.precise_location);
|
||||
Diagnostic::new(
|
||||
DiagnosticCode::Ra("macro-error", Severity::Error),
|
||||
DiagnosticCode::Ra(
|
||||
"macro-error",
|
||||
if d.error { Severity::Error } else { Severity::WeakWarning },
|
||||
),
|
||||
d.message.clone(),
|
||||
display_range,
|
||||
)
|
||||
|
@ -45,7 +48,7 @@ macro_rules! include { () => {} }
|
|||
macro_rules! compile_error { () => {} }
|
||||
|
||||
include!("doesntexist");
|
||||
//^^^^^^^ error: failed to load file `doesntexist`
|
||||
//^^^^^^^^^^^^^ error: failed to load file `doesntexist`
|
||||
|
||||
compile_error!("compile_error macro works");
|
||||
//^^^^^^^^^^^^^ error: compile_error macro works
|
||||
|
@ -125,7 +128,7 @@ macro_rules! env { () => {} }
|
|||
macro_rules! concat { () => {} }
|
||||
|
||||
include!(concat!(env!("OUT_DIR"), "/out.rs"));
|
||||
//^^^^^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
|
||||
//^^^^^^^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
@ -160,20 +163,25 @@ macro_rules! include {}
|
|||
|
||||
#[rustc_builtin_macro]
|
||||
macro_rules! compile_error {}
|
||||
#[rustc_builtin_macro]
|
||||
macro_rules! concat {}
|
||||
|
||||
fn main() {
|
||||
// Test a handful of built-in (eager) macros:
|
||||
|
||||
include!(invalid);
|
||||
//^^^^^^^ error: could not convert tokens
|
||||
//^^^^^^^ error: expected string literal
|
||||
include!("does not exist");
|
||||
//^^^^^^^ error: failed to load file `does not exist`
|
||||
//^^^^^^^^^^^^^^^^ error: failed to load file `does not exist`
|
||||
|
||||
include!(concat!("does ", "not ", "exist"));
|
||||
//^^^^^^^^^^^^^^^^^^^^^^^^^^ error: failed to load file `does not exist`
|
||||
|
||||
env!(invalid);
|
||||
//^^^ error: could not convert tokens
|
||||
//^^^^^^^ error: expected string literal
|
||||
|
||||
env!("OUT_DIR");
|
||||
//^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
|
||||
//^^^^^^^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
|
||||
|
||||
compile_error!("compile_error works");
|
||||
//^^^^^^^^^^^^^ error: compile_error works
|
||||
|
@ -198,7 +206,7 @@ fn f() {
|
|||
m!();
|
||||
|
||||
m!(hi);
|
||||
//^ error: leftover tokens
|
||||
//^ error: leftover tokens
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
use hir::db::DefDatabase;
|
||||
|
||||
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
|
||||
|
||||
// Diagnostic: unresolved-proc-macro
|
||||
//
|
||||
// This diagnostic is shown when a procedural macro can not be found. This usually means that
|
||||
// procedural macro support is simply disabled (and hence is only a weak hint instead of an error),
|
||||
// but can also indicate project setup problems.
|
||||
//
|
||||
// If you are seeing a lot of "proc macro not expanded" warnings, you can add this option to the
|
||||
// `rust-analyzer.diagnostics.disabled` list to prevent them from showing. Alternatively you can
|
||||
// enable support for procedural macros (see `rust-analyzer.procMacro.attributes.enable`).
|
||||
pub(crate) fn unresolved_proc_macro(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::UnresolvedProcMacro,
|
||||
proc_macros_enabled: bool,
|
||||
proc_attr_macros_enabled: bool,
|
||||
) -> Diagnostic {
|
||||
// Use more accurate position if available.
|
||||
let display_range = ctx.resolve_precise_location(&d.node, d.precise_location);
|
||||
|
||||
let config_enabled = match d.kind {
|
||||
hir::MacroKind::Attr => proc_macros_enabled && proc_attr_macros_enabled,
|
||||
_ => proc_macros_enabled,
|
||||
};
|
||||
|
||||
let not_expanded_message = match &d.macro_name {
|
||||
Some(name) => format!("proc macro `{name}` not expanded"),
|
||||
None => "proc macro not expanded".to_owned(),
|
||||
};
|
||||
let severity = if config_enabled { Severity::Error } else { Severity::WeakWarning };
|
||||
let def_map = ctx.sema.db.crate_def_map(d.krate);
|
||||
let message = if config_enabled {
|
||||
def_map.proc_macro_loading_error().unwrap_or("internal error")
|
||||
} else {
|
||||
match d.kind {
|
||||
hir::MacroKind::Attr if proc_macros_enabled => "attribute macro expansion is disabled",
|
||||
_ => "proc-macro expansion is disabled",
|
||||
}
|
||||
};
|
||||
let message = format!("{not_expanded_message}: {message}");
|
||||
|
||||
Diagnostic::new(DiagnosticCode::Ra("unresolved-proc-macro", severity), message, display_range)
|
||||
}
|
|
@ -62,7 +62,6 @@ mod handlers {
|
|||
pub(crate) mod unresolved_macro_call;
|
||||
pub(crate) mod unresolved_method;
|
||||
pub(crate) mod unresolved_module;
|
||||
pub(crate) mod unresolved_proc_macro;
|
||||
pub(crate) mod unused_variables;
|
||||
|
||||
// The handlers below are unusual, the implement the diagnostics as well.
|
||||
|
@ -405,7 +404,6 @@ pub fn diagnostics(
|
|||
AnyDiagnostic::UnresolvedMacroCall(d) => handlers::unresolved_macro_call::unresolved_macro_call(&ctx, &d),
|
||||
AnyDiagnostic::UnresolvedMethodCall(d) => handlers::unresolved_method::unresolved_method(&ctx, &d),
|
||||
AnyDiagnostic::UnresolvedModule(d) => handlers::unresolved_module::unresolved_module(&ctx, &d),
|
||||
AnyDiagnostic::UnresolvedProcMacro(d) => handlers::unresolved_proc_macro::unresolved_proc_macro(&ctx, &d, config.proc_macros_enabled, config.proc_attr_macros_enabled),
|
||||
AnyDiagnostic::UnusedMut(d) => match handlers::mutability_errors::unused_mut(&ctx, &d) {
|
||||
Some(it) => it,
|
||||
None => continue,
|
||||
|
|
|
@ -43,7 +43,6 @@ mod parent_module;
|
|||
mod references;
|
||||
mod rename;
|
||||
mod runnables;
|
||||
mod shuffle_crate_graph;
|
||||
mod signature_help;
|
||||
mod ssr;
|
||||
mod static_index;
|
||||
|
@ -202,10 +201,6 @@ impl AnalysisHost {
|
|||
pub fn raw_database_mut(&mut self) -> &mut RootDatabase {
|
||||
&mut self.db
|
||||
}
|
||||
|
||||
pub fn shuffle_crate_graph(&mut self) {
|
||||
shuffle_crate_graph::shuffle_crate_graph(&mut self.db);
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AnalysisHost {
|
||||
|
|
|
@ -1,58 +0,0 @@
|
|||
use hir::{db::ExpandDatabase, ProcMacros};
|
||||
use ide_db::{
|
||||
base_db::{salsa::Durability, CrateGraph, SourceDatabase},
|
||||
FxHashMap, RootDatabase,
|
||||
};
|
||||
use triomphe::Arc;
|
||||
|
||||
// Feature: Shuffle Crate Graph
|
||||
//
|
||||
// Randomizes all crate IDs in the crate graph, for debugging.
|
||||
//
|
||||
// |===
|
||||
// | Editor | Action Name
|
||||
//
|
||||
// | VS Code | **rust-analyzer: Shuffle Crate Graph**
|
||||
// |===
|
||||
pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) {
|
||||
let crate_graph = db.crate_graph();
|
||||
let proc_macros = db.proc_macros();
|
||||
|
||||
let mut shuffled_ids = crate_graph.iter().collect::<Vec<_>>();
|
||||
|
||||
let mut rng = oorandom::Rand32::new(stdx::rand::seed());
|
||||
stdx::rand::shuffle(&mut shuffled_ids, |i| rng.rand_range(0..i as u32) as usize);
|
||||
|
||||
let mut new_graph = CrateGraph::default();
|
||||
let mut new_proc_macros = ProcMacros::default();
|
||||
|
||||
let mut map = FxHashMap::default();
|
||||
for old_id in shuffled_ids.iter().copied() {
|
||||
let data = &crate_graph[old_id];
|
||||
let new_id = new_graph.add_crate_root(
|
||||
data.root_file_id,
|
||||
data.edition,
|
||||
data.display_name.clone(),
|
||||
data.version.clone(),
|
||||
data.cfg_options.clone(),
|
||||
data.potential_cfg_options.clone(),
|
||||
data.env.clone(),
|
||||
data.is_proc_macro,
|
||||
data.origin.clone(),
|
||||
);
|
||||
new_proc_macros.insert(new_id, proc_macros[&old_id].clone());
|
||||
map.insert(old_id, new_id);
|
||||
}
|
||||
|
||||
for old_id in shuffled_ids.iter().copied() {
|
||||
let data = &crate_graph[old_id];
|
||||
for dep in &data.dependencies {
|
||||
let mut new_dep = dep.clone();
|
||||
new_dep.crate_id = map[&dep.crate_id];
|
||||
new_graph.add_dep(map[&old_id], new_dep).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
db.set_crate_graph_with_durability(Arc::new(new_graph), Durability::HIGH);
|
||||
db.set_proc_macros_with_durability(Arc::new(new_proc_macros), Durability::HIGH);
|
||||
}
|
|
@ -68,11 +68,14 @@ pub fn load_workspace(
|
|||
let proc_macro_server = match &load_config.with_proc_macro_server {
|
||||
ProcMacroServerChoice::Sysroot => ws
|
||||
.find_sysroot_proc_macro_srv()
|
||||
.and_then(|it| ProcMacroServer::spawn(&it, extra_env).map_err(Into::into)),
|
||||
.and_then(|it| ProcMacroServer::spawn(&it, extra_env).map_err(Into::into))
|
||||
.map_err(|e| (e, true)),
|
||||
ProcMacroServerChoice::Explicit(path) => {
|
||||
ProcMacroServer::spawn(path, extra_env).map_err(Into::into)
|
||||
ProcMacroServer::spawn(path, extra_env).map_err(Into::into).map_err(|e| (e, true))
|
||||
}
|
||||
ProcMacroServerChoice::None => {
|
||||
Err((anyhow::format_err!("proc macro server disabled"), false))
|
||||
}
|
||||
ProcMacroServerChoice::None => Err(anyhow::format_err!("proc macro server disabled")),
|
||||
};
|
||||
|
||||
let (crate_graph, proc_macros) = ws.to_crate_graph(
|
||||
|
@ -87,7 +90,7 @@ pub fn load_workspace(
|
|||
let proc_macros = {
|
||||
let proc_macro_server = match &proc_macro_server {
|
||||
Ok(it) => Ok(it),
|
||||
Err(e) => Err(e.to_string()),
|
||||
Err((e, hard_err)) => Err((e.to_string(), *hard_err)),
|
||||
};
|
||||
proc_macros
|
||||
.into_iter()
|
||||
|
@ -95,7 +98,7 @@ pub fn load_workspace(
|
|||
(
|
||||
crate_id,
|
||||
path.map_or_else(
|
||||
|_| Err("proc macro crate is missing dylib".to_owned()),
|
||||
|e| Err((e, true)),
|
||||
|(_, path)| {
|
||||
proc_macro_server.as_ref().map_err(Clone::clone).and_then(
|
||||
|proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
|
||||
|
@ -355,8 +358,7 @@ impl SourceRootConfig {
|
|||
}
|
||||
}
|
||||
|
||||
/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
|
||||
/// with an identity dummy expander.
|
||||
/// Load the proc-macros for the given lib path, disabling all expanders whose names are in `ignored_macros`.
|
||||
pub fn load_proc_macro(
|
||||
server: &ProcMacroServer,
|
||||
path: &AbsPath,
|
||||
|
@ -383,7 +385,7 @@ pub fn load_proc_macro(
|
|||
}
|
||||
Err(e) => {
|
||||
tracing::warn!("proc-macro loading for {path} failed: {e}");
|
||||
Err(e)
|
||||
Err((e, true))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ fn benchmark_expand_macro_rules() {
|
|||
invocations
|
||||
.into_iter()
|
||||
.map(|(id, tt)| {
|
||||
let res = rules[&id].expand(&tt, |_| (), true, DUMMY, Edition::CURRENT);
|
||||
let res = rules[&id].expand(&tt, |_| (), DUMMY, Edition::CURRENT);
|
||||
assert!(res.err.is_none());
|
||||
res.value.0.token_trees.len()
|
||||
})
|
||||
|
@ -118,7 +118,7 @@ fn invocation_fixtures(
|
|||
},
|
||||
token_trees: token_trees.into_boxed_slice(),
|
||||
};
|
||||
if it.expand(&subtree, |_| (), true, DUMMY, Edition::CURRENT).err.is_none() {
|
||||
if it.expand(&subtree, |_| (), DUMMY, Edition::CURRENT).err.is_none() {
|
||||
res.push((name.clone(), subtree));
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -9,13 +9,12 @@ use intern::Symbol;
|
|||
use rustc_hash::FxHashMap;
|
||||
use span::{Edition, Span};
|
||||
|
||||
use crate::{parser::MetaVarKind, ExpandError, ExpandResult, MatchedArmIndex};
|
||||
use crate::{parser::MetaVarKind, ExpandError, ExpandErrorKind, ExpandResult, MatchedArmIndex};
|
||||
|
||||
pub(crate) fn expand_rules(
|
||||
rules: &[crate::Rule],
|
||||
input: &tt::Subtree<Span>,
|
||||
marker: impl Fn(&mut Span) + Copy,
|
||||
new_meta_vars: bool,
|
||||
call_site: Span,
|
||||
def_site_edition: Edition,
|
||||
) -> ExpandResult<(tt::Subtree<Span>, MatchedArmIndex)> {
|
||||
|
@ -27,13 +26,8 @@ pub(crate) fn expand_rules(
|
|||
// If we find a rule that applies without errors, we're done.
|
||||
// Unconditionally returning the transcription here makes the
|
||||
// `test_repeat_bad_var` test fail.
|
||||
let ExpandResult { value, err: transcribe_err } = transcriber::transcribe(
|
||||
&rule.rhs,
|
||||
&new_match.bindings,
|
||||
marker,
|
||||
new_meta_vars,
|
||||
call_site,
|
||||
);
|
||||
let ExpandResult { value, err: transcribe_err } =
|
||||
transcriber::transcribe(&rule.rhs, &new_match.bindings, marker, call_site);
|
||||
if transcribe_err.is_none() {
|
||||
return ExpandResult::ok((value, Some(idx as u32)));
|
||||
}
|
||||
|
@ -52,7 +46,7 @@ pub(crate) fn expand_rules(
|
|||
if let Some((match_, rule, idx)) = match_ {
|
||||
// if we got here, there was no match without errors
|
||||
let ExpandResult { value, err: transcribe_err } =
|
||||
transcriber::transcribe(&rule.rhs, &match_.bindings, marker, new_meta_vars, call_site);
|
||||
transcriber::transcribe(&rule.rhs, &match_.bindings, marker, call_site);
|
||||
ExpandResult { value: (value, idx.try_into().ok()), err: match_.err.or(transcribe_err) }
|
||||
} else {
|
||||
ExpandResult::new(
|
||||
|
@ -63,7 +57,7 @@ pub(crate) fn expand_rules(
|
|||
},
|
||||
None,
|
||||
),
|
||||
ExpandError::NoMatchingRule,
|
||||
ExpandError::new(call_site, ExpandErrorKind::NoMatchingRule),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -70,7 +70,7 @@ use crate::{
|
|||
expander::{Binding, Bindings, ExpandResult, Fragment},
|
||||
expect_fragment,
|
||||
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
||||
ExpandError, MetaTemplate, ValueResult,
|
||||
ExpandError, ExpandErrorKind, MetaTemplate, ValueResult,
|
||||
};
|
||||
|
||||
impl Bindings {
|
||||
|
@ -510,11 +510,17 @@ fn match_loop_inner<'t>(
|
|||
if matches!(rhs, tt::Leaf::Literal(it) if it.symbol == lhs.symbol) {
|
||||
item.dot.next();
|
||||
} else {
|
||||
res.add_err(ExpandError::UnexpectedToken);
|
||||
res.add_err(ExpandError::new(
|
||||
*rhs.span(),
|
||||
ExpandErrorKind::UnexpectedToken,
|
||||
));
|
||||
item.is_error = true;
|
||||
}
|
||||
} else {
|
||||
res.add_err(ExpandError::binding_error(format!("expected literal: `{lhs}`")));
|
||||
res.add_err(ExpandError::binding_error(
|
||||
src.clone().next().map_or(delim_span.close, |it| it.first_span()),
|
||||
format!("expected literal: `{lhs}`"),
|
||||
));
|
||||
item.is_error = true;
|
||||
}
|
||||
try_push!(next_items, item);
|
||||
|
@ -524,11 +530,17 @@ fn match_loop_inner<'t>(
|
|||
if matches!(rhs, tt::Leaf::Ident(it) if it.sym == lhs.sym) {
|
||||
item.dot.next();
|
||||
} else {
|
||||
res.add_err(ExpandError::UnexpectedToken);
|
||||
res.add_err(ExpandError::new(
|
||||
*rhs.span(),
|
||||
ExpandErrorKind::UnexpectedToken,
|
||||
));
|
||||
item.is_error = true;
|
||||
}
|
||||
} else {
|
||||
res.add_err(ExpandError::binding_error(format!("expected ident: `{lhs}`")));
|
||||
res.add_err(ExpandError::binding_error(
|
||||
src.clone().next().map_or(delim_span.close, |it| it.first_span()),
|
||||
format!("expected ident: `{lhs}`"),
|
||||
));
|
||||
item.is_error = true;
|
||||
}
|
||||
try_push!(next_items, item);
|
||||
|
@ -538,8 +550,8 @@ fn match_loop_inner<'t>(
|
|||
let error = if let Ok(rhs) = fork.expect_glued_punct() {
|
||||
let first_is_single_quote = rhs[0].char == '\'';
|
||||
let lhs = lhs.iter().map(|it| it.char);
|
||||
let rhs = rhs.iter().map(|it| it.char);
|
||||
if lhs.clone().eq(rhs) {
|
||||
let rhs_ = rhs.iter().map(|it| it.char);
|
||||
if lhs.clone().eq(rhs_) {
|
||||
// HACK: here we use `meta_result` to pass `TtIter` back to caller because
|
||||
// it might have been advanced multiple times. `ValueResult` is
|
||||
// insignificant.
|
||||
|
@ -552,13 +564,19 @@ fn match_loop_inner<'t>(
|
|||
if first_is_single_quote {
|
||||
// If the first punct token is a single quote, that's a part of a lifetime
|
||||
// ident, not a punct.
|
||||
ExpandError::UnexpectedToken
|
||||
ExpandError::new(
|
||||
rhs.get(1).map_or(rhs[0].span, |it| it.span),
|
||||
ExpandErrorKind::UnexpectedToken,
|
||||
)
|
||||
} else {
|
||||
let lhs = lhs.collect::<String>();
|
||||
ExpandError::binding_error(format!("expected punct: `{lhs}`"))
|
||||
ExpandError::binding_error(rhs[0].span, format!("expected punct: `{lhs}`"))
|
||||
}
|
||||
} else {
|
||||
ExpandError::UnexpectedToken
|
||||
ExpandError::new(
|
||||
src.clone().next().map_or(delim_span.close, |it| it.first_span()),
|
||||
ExpandErrorKind::UnexpectedToken,
|
||||
)
|
||||
};
|
||||
|
||||
res.add_err(error);
|
||||
|
@ -651,7 +669,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>, edition: Edition)
|
|||
if let Some(item) = error_recover_item {
|
||||
res.bindings = bindings_builder.build(&item);
|
||||
}
|
||||
res.add_err(ExpandError::UnexpectedToken);
|
||||
res.add_err(ExpandError::new(span.open, ExpandErrorKind::UnexpectedToken));
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
@ -670,7 +688,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>, edition: Edition)
|
|||
src = it;
|
||||
res.unmatched_tts += src.len();
|
||||
}
|
||||
res.add_err(ExpandError::LeftoverTokens);
|
||||
res.add_err(ExpandError::new(span.open, ExpandErrorKind::LeftoverTokens));
|
||||
|
||||
if let Some(error_recover_item) = error_recover_item {
|
||||
res.bindings = bindings_builder.build(&error_recover_item);
|
||||
|
@ -746,9 +764,10 @@ fn match_meta_var(
|
|||
) -> ExpandResult<Option<Fragment>> {
|
||||
let fragment = match kind {
|
||||
MetaVarKind::Path => {
|
||||
return expect_fragment(input, parser::PrefixEntryPoint::Path, edition).map(|it| {
|
||||
it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path)
|
||||
});
|
||||
return expect_fragment(input, parser::PrefixEntryPoint::Path, edition, delim_span)
|
||||
.map(|it| {
|
||||
it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path)
|
||||
});
|
||||
}
|
||||
MetaVarKind::Expr => {
|
||||
// `expr` should not match underscores, let expressions, or inline const. The latter
|
||||
|
@ -763,37 +782,54 @@ fn match_meta_var(
|
|||
|| it.sym == sym::let_
|
||||
|| it.sym == sym::const_ =>
|
||||
{
|
||||
return ExpandResult::only_err(ExpandError::NoMatchingRule)
|
||||
return ExpandResult::only_err(ExpandError::new(
|
||||
it.span,
|
||||
ExpandErrorKind::NoMatchingRule,
|
||||
))
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
return expect_fragment(input, parser::PrefixEntryPoint::Expr, edition).map(|tt| {
|
||||
tt.map(|tt| match tt {
|
||||
tt::TokenTree::Leaf(leaf) => tt::Subtree {
|
||||
delimiter: tt::Delimiter::invisible_spanned(*leaf.span()),
|
||||
token_trees: Box::new([leaf.into()]),
|
||||
},
|
||||
tt::TokenTree::Subtree(mut s) => {
|
||||
if s.delimiter.kind == tt::DelimiterKind::Invisible {
|
||||
s.delimiter.kind = tt::DelimiterKind::Parenthesis;
|
||||
return expect_fragment(input, parser::PrefixEntryPoint::Expr, edition, delim_span)
|
||||
.map(|tt| {
|
||||
tt.map(|tt| match tt {
|
||||
tt::TokenTree::Leaf(leaf) => tt::Subtree {
|
||||
delimiter: tt::Delimiter::invisible_spanned(*leaf.span()),
|
||||
token_trees: Box::new([leaf.into()]),
|
||||
},
|
||||
tt::TokenTree::Subtree(mut s) => {
|
||||
if s.delimiter.kind == tt::DelimiterKind::Invisible {
|
||||
s.delimiter.kind = tt::DelimiterKind::Parenthesis;
|
||||
}
|
||||
s
|
||||
}
|
||||
s
|
||||
}
|
||||
})
|
||||
.map(Fragment::Expr)
|
||||
});
|
||||
})
|
||||
.map(Fragment::Expr)
|
||||
});
|
||||
}
|
||||
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
|
||||
let span = input.next_span();
|
||||
let tt_result = match kind {
|
||||
MetaVarKind::Ident => input
|
||||
.expect_ident()
|
||||
.map(|ident| tt::Leaf::from(ident.clone()).into())
|
||||
.map_err(|()| ExpandError::binding_error("expected ident")),
|
||||
MetaVarKind::Tt => {
|
||||
expect_tt(input).map_err(|()| ExpandError::binding_error("expected token tree"))
|
||||
}
|
||||
MetaVarKind::Lifetime => expect_lifetime(input)
|
||||
.map_err(|()| ExpandError::binding_error("expected lifetime")),
|
||||
.map_err(|()| {
|
||||
ExpandError::binding_error(
|
||||
span.unwrap_or(delim_span.close),
|
||||
"expected ident",
|
||||
)
|
||||
}),
|
||||
MetaVarKind::Tt => expect_tt(input).map_err(|()| {
|
||||
ExpandError::binding_error(
|
||||
span.unwrap_or(delim_span.close),
|
||||
"expected token tree",
|
||||
)
|
||||
}),
|
||||
MetaVarKind::Lifetime => expect_lifetime(input).map_err(|()| {
|
||||
ExpandError::binding_error(
|
||||
span.unwrap_or(delim_span.close),
|
||||
"expected lifetime",
|
||||
)
|
||||
}),
|
||||
MetaVarKind::Literal => {
|
||||
let neg = eat_char(input, '-');
|
||||
input
|
||||
|
@ -808,7 +844,12 @@ fn match_meta_var(
|
|||
}),
|
||||
}
|
||||
})
|
||||
.map_err(|()| ExpandError::binding_error("expected literal"))
|
||||
.map_err(|()| {
|
||||
ExpandError::binding_error(
|
||||
span.unwrap_or(delim_span.close),
|
||||
"expected literal",
|
||||
)
|
||||
})
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
@ -823,7 +864,7 @@ fn match_meta_var(
|
|||
MetaVarKind::Item => parser::PrefixEntryPoint::Item,
|
||||
MetaVarKind::Vis => parser::PrefixEntryPoint::Vis,
|
||||
};
|
||||
expect_fragment(input, fragment, edition).map(|it| it.map(Fragment::Tokens))
|
||||
expect_fragment(input, fragment, edition, delim_span).map(|it| it.map(Fragment::Tokens))
|
||||
}
|
||||
|
||||
fn collect_vars(collector_fun: &mut impl FnMut(Symbol), pattern: &MetaTemplate) {
|
||||
|
|
|
@ -8,14 +8,17 @@ use tt::Delimiter;
|
|||
use crate::{
|
||||
expander::{Binding, Bindings, Fragment},
|
||||
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
||||
CountError, ExpandError, ExpandResult, MetaTemplate,
|
||||
ExpandError, ExpandErrorKind, ExpandResult, MetaTemplate,
|
||||
};
|
||||
|
||||
impl Bindings {
|
||||
fn get(&self, name: &Symbol) -> Result<&Binding, ExpandError> {
|
||||
fn get(&self, name: &Symbol, span: Span) -> Result<&Binding, ExpandError> {
|
||||
match self.inner.get(name) {
|
||||
Some(binding) => Ok(binding),
|
||||
None => Err(ExpandError::UnresolvedBinding(Box::new(Box::from(name.as_str())))),
|
||||
None => Err(ExpandError::new(
|
||||
span,
|
||||
ExpandErrorKind::UnresolvedBinding(Box::new(Box::from(name.as_str()))),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -27,10 +30,10 @@ impl Bindings {
|
|||
marker: impl Fn(&mut Span),
|
||||
) -> Result<Fragment, ExpandError> {
|
||||
macro_rules! binding_err {
|
||||
($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) };
|
||||
($($arg:tt)*) => { ExpandError::binding_error(span, format!($($arg)*)) };
|
||||
}
|
||||
|
||||
let mut b = self.get(name)?;
|
||||
let mut b = self.get(name, span)?;
|
||||
for nesting_state in nesting.iter_mut() {
|
||||
nesting_state.hit = true;
|
||||
b = match b {
|
||||
|
@ -142,10 +145,9 @@ pub(super) fn transcribe(
|
|||
template: &MetaTemplate,
|
||||
bindings: &Bindings,
|
||||
marker: impl Fn(&mut Span) + Copy,
|
||||
new_meta_vars: bool,
|
||||
call_site: Span,
|
||||
) -> ExpandResult<tt::Subtree<Span>> {
|
||||
let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), new_meta_vars, call_site };
|
||||
let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), call_site };
|
||||
let mut arena: Vec<tt::TokenTree<Span>> = Vec::new();
|
||||
expand_subtree(&mut ctx, template, None, &mut arena, marker)
|
||||
}
|
||||
|
@ -165,7 +167,6 @@ struct NestingState {
|
|||
struct ExpandCtx<'a> {
|
||||
bindings: &'a Bindings,
|
||||
nesting: Vec<NestingState>,
|
||||
new_meta_vars: bool,
|
||||
call_site: Span,
|
||||
}
|
||||
|
||||
|
@ -263,7 +264,7 @@ fn expand_subtree(
|
|||
);
|
||||
}
|
||||
Op::Count { name, depth } => {
|
||||
let mut binding = match ctx.bindings.get(name) {
|
||||
let mut binding = match ctx.bindings.get(name, ctx.call_site) {
|
||||
Ok(b) => b,
|
||||
Err(e) => {
|
||||
if err.is_none() {
|
||||
|
@ -299,29 +300,11 @@ fn expand_subtree(
|
|||
}
|
||||
}
|
||||
|
||||
let res = if ctx.new_meta_vars {
|
||||
count(binding, 0, depth.unwrap_or(0))
|
||||
} else {
|
||||
count_old(binding, 0, *depth)
|
||||
};
|
||||
let res = count(binding, 0, depth.unwrap_or(0));
|
||||
|
||||
let c = match res {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
// XXX: It *might* make sense to emit a dummy integer value like `0` here.
|
||||
// That would type inference a bit more robust in cases like
|
||||
// `v[${count(t)}]` where index doesn't matter, but also could lead to
|
||||
// wrong infefrence for cases like `tup.${count(t)}` where index itself
|
||||
// does matter.
|
||||
if err.is_none() {
|
||||
err = Some(e.into());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
};
|
||||
arena.push(
|
||||
tt::Leaf::Literal(tt::Literal {
|
||||
symbol: Symbol::integer(c),
|
||||
symbol: Symbol::integer(res),
|
||||
span: ctx.call_site,
|
||||
suffix: None,
|
||||
kind: tt::LitKind::Integer,
|
||||
|
@ -353,7 +336,7 @@ fn expand_var(
|
|||
|
||||
match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) {
|
||||
Ok(it) => ExpandResult::ok(it),
|
||||
Err(ExpandError::UnresolvedBinding(_)) => {
|
||||
Err(e) if matches!(e.inner.1, ExpandErrorKind::UnresolvedBinding(_)) => {
|
||||
// Note that it is possible to have a `$var` inside a macro which is not bound.
|
||||
// For example:
|
||||
// ```
|
||||
|
@ -435,7 +418,7 @@ fn expand_repeat(
|
|||
}
|
||||
.into(),
|
||||
),
|
||||
err: Some(ExpandError::LimitExceeded),
|
||||
err: Some(ExpandError::new(ctx.call_site, ExpandErrorKind::LimitExceeded)),
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -481,16 +464,16 @@ fn expand_repeat(
|
|||
let tt = tt::Subtree {
|
||||
delimiter: tt::Delimiter::invisible_spanned(ctx.call_site),
|
||||
token_trees: buf.into_boxed_slice(),
|
||||
}
|
||||
.into();
|
||||
};
|
||||
|
||||
if RepeatKind::OneOrMore == kind && counter == 0 {
|
||||
let span = tt.delimiter.open;
|
||||
return ExpandResult {
|
||||
value: Fragment::Tokens(tt),
|
||||
err: Some(ExpandError::UnexpectedToken),
|
||||
value: Fragment::Tokens(tt.into()),
|
||||
err: Some(ExpandError::new(span, ExpandErrorKind::UnexpectedToken)),
|
||||
};
|
||||
}
|
||||
ExpandResult { value: Fragment::Tokens(tt), err }
|
||||
ExpandResult { value: Fragment::Tokens(tt.into()), err }
|
||||
}
|
||||
|
||||
fn push_fragment(ctx: &ExpandCtx<'_>, buf: &mut Vec<tt::TokenTree<Span>>, fragment: Fragment) {
|
||||
|
@ -557,44 +540,16 @@ fn fix_up_and_push_path_tt(
|
|||
|
||||
/// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth
|
||||
/// defined by the metavar expression.
|
||||
fn count(binding: &Binding, depth_curr: usize, depth_max: usize) -> Result<usize, CountError> {
|
||||
fn count(binding: &Binding, depth_curr: usize, depth_max: usize) -> usize {
|
||||
match binding {
|
||||
Binding::Nested(bs) => {
|
||||
if depth_curr == depth_max {
|
||||
Ok(bs.len())
|
||||
bs.len()
|
||||
} else {
|
||||
bs.iter().map(|b| count(b, depth_curr + 1, depth_max)).sum()
|
||||
}
|
||||
}
|
||||
Binding::Empty => Ok(0),
|
||||
Binding::Fragment(_) | Binding::Missing(_) => Ok(1),
|
||||
}
|
||||
}
|
||||
|
||||
fn count_old(
|
||||
binding: &Binding,
|
||||
our_depth: usize,
|
||||
count_depth: Option<usize>,
|
||||
) -> Result<usize, CountError> {
|
||||
match binding {
|
||||
Binding::Nested(bs) => match count_depth {
|
||||
None => bs.iter().map(|b| count_old(b, our_depth + 1, None)).sum(),
|
||||
Some(0) => Ok(bs.len()),
|
||||
Some(d) => bs.iter().map(|b| count_old(b, our_depth + 1, Some(d - 1))).sum(),
|
||||
},
|
||||
Binding::Empty => Ok(0),
|
||||
Binding::Fragment(_) | Binding::Missing(_) => {
|
||||
if our_depth == 0 {
|
||||
// `${count(t)}` is placed inside the innermost repetition. This includes cases
|
||||
// where `t` is not a repeated fragment.
|
||||
Err(CountError::Misplaced)
|
||||
} else if count_depth.is_none() {
|
||||
Ok(1)
|
||||
} else {
|
||||
// We've reached at the innermost repeated fragment, but the user wants us to go
|
||||
// further!
|
||||
Err(CountError::OutOfBounds)
|
||||
}
|
||||
}
|
||||
Binding::Empty => 0,
|
||||
Binding::Fragment(_) | Binding::Missing(_) => 1,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,10 +15,11 @@ mod to_parser_input;
|
|||
mod benchmark;
|
||||
|
||||
use span::{Edition, Span, SyntaxContextId};
|
||||
use stdx::impl_from;
|
||||
use tt::iter::TtIter;
|
||||
use tt::DelimSpan;
|
||||
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::parser::{MetaTemplate, MetaVarKind, Op};
|
||||
|
||||
|
@ -64,39 +65,45 @@ impl fmt::Display for ParseError {
|
|||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||
pub enum ExpandError {
|
||||
pub struct ExpandError {
|
||||
pub inner: Arc<(Span, ExpandErrorKind)>,
|
||||
}
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||
pub enum ExpandErrorKind {
|
||||
BindingError(Box<Box<str>>),
|
||||
UnresolvedBinding(Box<Box<str>>),
|
||||
LeftoverTokens,
|
||||
ConversionError,
|
||||
LimitExceeded,
|
||||
NoMatchingRule,
|
||||
UnexpectedToken,
|
||||
CountError(CountError),
|
||||
}
|
||||
|
||||
impl_from!(CountError for ExpandError);
|
||||
|
||||
impl ExpandError {
|
||||
fn binding_error(e: impl Into<Box<str>>) -> ExpandError {
|
||||
ExpandError::BindingError(Box::new(e.into()))
|
||||
fn new(span: Span, kind: ExpandErrorKind) -> ExpandError {
|
||||
ExpandError { inner: Arc::new((span, kind)) }
|
||||
}
|
||||
fn binding_error(span: Span, e: impl Into<Box<str>>) -> ExpandError {
|
||||
ExpandError { inner: Arc::new((span, ExpandErrorKind::BindingError(Box::new(e.into())))) }
|
||||
}
|
||||
}
|
||||
impl fmt::Display for ExpandError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.inner.1.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ExpandError {
|
||||
impl fmt::Display for ExpandErrorKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
ExpandError::NoMatchingRule => f.write_str("no rule matches input tokens"),
|
||||
ExpandError::UnexpectedToken => f.write_str("unexpected token in input"),
|
||||
ExpandError::BindingError(e) => f.write_str(e),
|
||||
ExpandError::UnresolvedBinding(binding) => {
|
||||
ExpandErrorKind::NoMatchingRule => f.write_str("no rule matches input tokens"),
|
||||
ExpandErrorKind::UnexpectedToken => f.write_str("unexpected token in input"),
|
||||
ExpandErrorKind::BindingError(e) => f.write_str(e),
|
||||
ExpandErrorKind::UnresolvedBinding(binding) => {
|
||||
f.write_str("could not find binding ")?;
|
||||
f.write_str(binding)
|
||||
}
|
||||
ExpandError::ConversionError => f.write_str("could not convert tokens"),
|
||||
ExpandError::LimitExceeded => f.write_str("Expand exceed limit"),
|
||||
ExpandError::LeftoverTokens => f.write_str("leftover tokens"),
|
||||
ExpandError::CountError(e) => e.fmt(f),
|
||||
ExpandErrorKind::LimitExceeded => f.write_str("Expand exceed limit"),
|
||||
ExpandErrorKind::LeftoverTokens => f.write_str("leftover tokens"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -248,11 +255,10 @@ impl DeclarativeMacro {
|
|||
&self,
|
||||
tt: &tt::Subtree<Span>,
|
||||
marker: impl Fn(&mut Span) + Copy,
|
||||
new_meta_vars: bool,
|
||||
call_site: Span,
|
||||
def_site_edition: Edition,
|
||||
) -> ExpandResult<(tt::Subtree<Span>, MatchedArmIndex)> {
|
||||
expander::expand_rules(&self.rules, tt, marker, new_meta_vars, call_site, def_site_edition)
|
||||
expander::expand_rules(&self.rules, tt, marker, call_site, def_site_edition)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -355,11 +361,12 @@ impl<T: Default, E> From<Result<T, E>> for ValueResult<T, E> {
|
|||
}
|
||||
}
|
||||
|
||||
fn expect_fragment<S: Copy + fmt::Debug>(
|
||||
tt_iter: &mut TtIter<'_, S>,
|
||||
fn expect_fragment(
|
||||
tt_iter: &mut TtIter<'_, Span>,
|
||||
entry_point: ::parser::PrefixEntryPoint,
|
||||
edition: ::parser::Edition,
|
||||
) -> ExpandResult<Option<tt::TokenTree<S>>> {
|
||||
delim_span: DelimSpan<Span>,
|
||||
) -> ExpandResult<Option<tt::TokenTree<Span>>> {
|
||||
use ::parser;
|
||||
let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice());
|
||||
let parser_input = to_parser_input::to_parser_input(edition, &buffer);
|
||||
|
@ -387,7 +394,10 @@ fn expect_fragment<S: Copy + fmt::Debug>(
|
|||
}
|
||||
|
||||
let err = if error || !cursor.is_root() {
|
||||
Some(ExpandError::binding_error(format!("expected {entry_point:?}")))
|
||||
Some(ExpandError::binding_error(
|
||||
buffer.begin().token_tree().map_or(delim_span.close, |tt| tt.span()),
|
||||
format!("expected {entry_point:?}"),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
|
|
@ -212,15 +212,12 @@ where
|
|||
}
|
||||
|
||||
/// Split token tree with separate expr: $($e:expr)SEP*
|
||||
pub fn parse_exprs_with_sep<S>(
|
||||
tt: &tt::Subtree<S>,
|
||||
pub fn parse_exprs_with_sep(
|
||||
tt: &tt::Subtree<span::Span>,
|
||||
sep: char,
|
||||
span: S,
|
||||
span: span::Span,
|
||||
edition: Edition,
|
||||
) -> Vec<tt::Subtree<S>>
|
||||
where
|
||||
S: Copy + fmt::Debug,
|
||||
{
|
||||
) -> Vec<tt::Subtree<span::Span>> {
|
||||
if tt.token_trees.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
@ -229,7 +226,12 @@ where
|
|||
let mut res = Vec::new();
|
||||
|
||||
while iter.peek_n(0).is_some() {
|
||||
let expanded = crate::expect_fragment(&mut iter, parser::PrefixEntryPoint::Expr, edition);
|
||||
let expanded = crate::expect_fragment(
|
||||
&mut iter,
|
||||
parser::PrefixEntryPoint::Expr,
|
||||
edition,
|
||||
tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close },
|
||||
);
|
||||
|
||||
res.push(match expanded.value {
|
||||
None => break,
|
||||
|
|
|
@ -934,7 +934,10 @@ fn project_json_to_crate_graph(
|
|||
if *is_proc_macro {
|
||||
if let Some(path) = proc_macro_dylib_path.clone() {
|
||||
let node = Ok((
|
||||
display_name.as_ref().map(|it| it.canonical_name().as_str().to_owned()),
|
||||
display_name
|
||||
.as_ref()
|
||||
.map(|it| it.canonical_name().as_str().to_owned())
|
||||
.unwrap_or_else(|| format!("crate{}", idx.0)),
|
||||
path,
|
||||
));
|
||||
proc_macros.insert(crate_graph_crate_id, node);
|
||||
|
@ -1355,8 +1358,8 @@ fn add_target_crate_root(
|
|||
);
|
||||
if let TargetKind::Lib { is_proc_macro: true } = kind {
|
||||
let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) {
|
||||
Some(it) => it.cloned().map(|path| Ok((Some(cargo_name.to_owned()), path))),
|
||||
None => Some(Err("crate has not yet been built".to_owned())),
|
||||
Some(it) => it.cloned().map(|path| Ok((cargo_name.to_owned(), path))),
|
||||
None => Some(Err("proc-macro crate is missing its build data".to_owned())),
|
||||
};
|
||||
if let Some(proc_macro) = proc_macro {
|
||||
proc_macros.insert(crate_id, proc_macro);
|
||||
|
|
|
@ -134,11 +134,6 @@ pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> anyhow::Res
|
|||
Ok(out)
|
||||
}
|
||||
|
||||
pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
|
||||
state.analysis_host.shuffle_crate_graph();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn handle_syntax_tree(
|
||||
snap: GlobalStateSnapshot,
|
||||
params: lsp_ext::SyntaxTreeParams,
|
||||
|
|
|
@ -75,14 +75,6 @@ impl Request for MemoryUsage {
|
|||
const METHOD: &'static str = "rust-analyzer/memoryUsage";
|
||||
}
|
||||
|
||||
pub enum ShuffleCrateGraph {}
|
||||
|
||||
impl Request for ShuffleCrateGraph {
|
||||
type Params = ();
|
||||
type Result = ();
|
||||
const METHOD: &'static str = "rust-analyzer/shuffleCrateGraph";
|
||||
}
|
||||
|
||||
pub enum ReloadWorkspace {}
|
||||
|
||||
impl Request for ReloadWorkspace {
|
||||
|
|
|
@ -1018,7 +1018,6 @@ impl GlobalState {
|
|||
.on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
|
||||
.on_sync_mut::<lsp_ext::RebuildProcMacros>(handlers::handle_proc_macros_rebuild)
|
||||
.on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage)
|
||||
.on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph)
|
||||
.on_sync_mut::<lsp_ext::RunTest>(handlers::handle_run_test)
|
||||
// Request handlers which are related to the user typing
|
||||
// are run on the main thread to reduce latency:
|
||||
|
@ -1054,6 +1053,7 @@ impl GlobalState {
|
|||
.on::<NO_RETRY, lsp_request::GotoDeclaration>(handlers::handle_goto_declaration)
|
||||
.on::<NO_RETRY, lsp_request::GotoImplementation>(handlers::handle_goto_implementation)
|
||||
.on::<NO_RETRY, lsp_request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
|
||||
// FIXME: This should not be tried as it contains offsets that can get outdated!
|
||||
.on::<RETRY, lsp_request::InlayHintRequest>(handlers::handle_inlay_hints)
|
||||
.on::<RETRY, lsp_request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve)
|
||||
.on::<NO_RETRY, lsp_request::CodeLensRequest>(handlers::handle_code_lens)
|
||||
|
|
|
@ -16,8 +16,7 @@
|
|||
use std::{iter, mem};
|
||||
|
||||
use flycheck::{FlycheckConfig, FlycheckHandle};
|
||||
use hir::{db::DefDatabase, ChangeWithProcMacros, ProcMacros};
|
||||
use ide::CrateId;
|
||||
use hir::{db::DefDatabase, ChangeWithProcMacros, ProcMacros, ProcMacrosBuilder};
|
||||
use ide_db::{
|
||||
base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, Version},
|
||||
FxHashMap,
|
||||
|
@ -371,43 +370,44 @@ impl GlobalState {
|
|||
}
|
||||
};
|
||||
|
||||
let mut res = FxHashMap::default();
|
||||
let mut builder = ProcMacrosBuilder::default();
|
||||
let chain = proc_macro_clients
|
||||
.iter()
|
||||
.map(|res| res.as_ref().map_err(|e| e.to_string()))
|
||||
.chain(iter::repeat_with(|| Err("Proc macros servers are not running".into())));
|
||||
.chain(iter::repeat_with(|| Err("proc-macro-srv is not running".into())));
|
||||
for (client, paths) in chain.zip(paths) {
|
||||
res.extend(paths.into_iter().map(move |(crate_id, res)| {
|
||||
(
|
||||
crate_id,
|
||||
res.map_or_else(
|
||||
|_| Err("proc macro crate is missing dylib".to_owned()),
|
||||
|(crate_name, path)| {
|
||||
progress(path.to_string());
|
||||
client.as_ref().map_err(Clone::clone).and_then(|client| {
|
||||
load_proc_macro(
|
||||
client,
|
||||
&path,
|
||||
crate_name
|
||||
.as_deref()
|
||||
.and_then(|crate_name| {
|
||||
ignored_proc_macros.iter().find_map(
|
||||
|(name, macros)| {
|
||||
eq_ignore_underscore(name, crate_name)
|
||||
paths
|
||||
.into_iter()
|
||||
.map(move |(crate_id, res)| {
|
||||
(
|
||||
crate_id,
|
||||
res.map_or_else(
|
||||
|e| Err((e, true)),
|
||||
|(crate_name, path)| {
|
||||
progress(path.to_string());
|
||||
client.as_ref().map_err(|it| (it.clone(), true)).and_then(
|
||||
|client| {
|
||||
load_proc_macro(
|
||||
client,
|
||||
&path,
|
||||
ignored_proc_macros
|
||||
.iter()
|
||||
.find_map(|(name, macros)| {
|
||||
eq_ignore_underscore(name, &crate_name)
|
||||
.then_some(&**macros)
|
||||
},
|
||||
)
|
||||
})
|
||||
.unwrap_or_default(),
|
||||
})
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
},
|
||||
)
|
||||
})
|
||||
},
|
||||
),
|
||||
)
|
||||
}));
|
||||
},
|
||||
),
|
||||
)
|
||||
})
|
||||
.for_each(|(krate, res)| builder.insert(krate, res));
|
||||
}
|
||||
|
||||
sender.send(Task::LoadProcMacros(ProcMacroProgress::End(res))).unwrap();
|
||||
sender.send(Task::LoadProcMacros(ProcMacroProgress::End(builder.build()))).unwrap();
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -667,10 +667,17 @@ impl GlobalState {
|
|||
change.set_proc_macros(
|
||||
crate_graph
|
||||
.iter()
|
||||
.map(|id| (id, Err("Proc-macros have not been built yet".to_owned())))
|
||||
.map(|id| (id, Err(("proc-macro has not been built yet".to_owned(), true))))
|
||||
.collect(),
|
||||
);
|
||||
self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths);
|
||||
} else {
|
||||
change.set_proc_macros(
|
||||
crate_graph
|
||||
.iter()
|
||||
.map(|id| (id, Err(("proc-macro expansion is disabled".to_owned(), false))))
|
||||
.collect(),
|
||||
);
|
||||
}
|
||||
change.set_crate_graph(crate_graph);
|
||||
change.set_target_data_layouts(layouts);
|
||||
|
@ -809,12 +816,7 @@ pub fn ws_to_crate_graph(
|
|||
workspaces: &[ProjectWorkspace],
|
||||
extra_env: &FxHashMap<String, String>,
|
||||
mut load: impl FnMut(&AbsPath) -> Option<vfs::FileId>,
|
||||
) -> (
|
||||
CrateGraph,
|
||||
Vec<FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>>,
|
||||
Vec<Result<Arc<str>, Arc<str>>>,
|
||||
Vec<Option<Version>>,
|
||||
) {
|
||||
) -> (CrateGraph, Vec<ProcMacroPaths>, Vec<Result<Arc<str>, Arc<str>>>, Vec<Option<Version>>) {
|
||||
let mut crate_graph = CrateGraph::default();
|
||||
let mut proc_macro_paths = Vec::default();
|
||||
let mut layouts = Vec::default();
|
||||
|
|
|
@ -33,6 +33,16 @@ pub const FIXUP_ERASED_FILE_AST_ID_MARKER: ErasedFileAstId =
|
|||
|
||||
pub type Span = SpanData<SyntaxContextId>;
|
||||
|
||||
impl Span {
|
||||
pub fn cover(self, other: Span) -> Span {
|
||||
if self.anchor != other.anchor {
|
||||
return self;
|
||||
}
|
||||
let range = self.range.cover(other.range);
|
||||
Span { range, ..self }
|
||||
}
|
||||
}
|
||||
|
||||
/// Spans represent a region of code, used by the IDE to be able link macro inputs and outputs
|
||||
/// together. Positions in spans are relative to some [`SpanAnchor`] to make them more incremental
|
||||
/// friendly.
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//! A set of high-level utility fixture methods to use in tests.
|
||||
use std::{iter, mem, ops::Not, str::FromStr, sync};
|
||||
use std::{iter, mem, str::FromStr, sync};
|
||||
|
||||
use base_db::{
|
||||
CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env, FileChange,
|
||||
|
@ -11,7 +11,7 @@ use hir_expand::{
|
|||
db::ExpandDatabase,
|
||||
files::FilePosition,
|
||||
proc_macro::{
|
||||
ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacros,
|
||||
ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacrosBuilder,
|
||||
},
|
||||
FileRange,
|
||||
};
|
||||
|
@ -303,7 +303,7 @@ impl ChangeFixture {
|
|||
}
|
||||
}
|
||||
|
||||
let mut proc_macros = ProcMacros::default();
|
||||
let mut proc_macros = ProcMacrosBuilder::default();
|
||||
if !proc_macro_names.is_empty() {
|
||||
let proc_lib_file = file_id;
|
||||
|
||||
|
@ -354,7 +354,7 @@ impl ChangeFixture {
|
|||
|
||||
let mut change = ChangeWithProcMacros {
|
||||
source_change,
|
||||
proc_macros: proc_macros.is_empty().not().then_some(proc_macros),
|
||||
proc_macros: Some(proc_macros.build()),
|
||||
toolchains: Some(iter::repeat(toolchain).take(crate_graph.len()).collect()),
|
||||
target_data_layouts: Some(
|
||||
iter::repeat(target_data_layout).take(crate_graph.len()).collect(),
|
||||
|
|
|
@ -134,6 +134,15 @@ pub enum TokenTreeRef<'a, Span> {
|
|||
Leaf(&'a Leaf<Span>, &'a TokenTree<Span>),
|
||||
}
|
||||
|
||||
impl<'a, Span: Copy> TokenTreeRef<'a, Span> {
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
TokenTreeRef::Subtree(subtree, _) => subtree.delimiter.open,
|
||||
TokenTreeRef::Leaf(leaf, _) => *leaf.span(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Span: Clone> TokenTreeRef<'_, Span> {
|
||||
pub fn cloned(&self) -> TokenTree<Span> {
|
||||
match self {
|
||||
|
|
|
@ -143,6 +143,10 @@ impl<'a, S: Copy> TtIter<'a, S> {
|
|||
self.inner.as_slice().get(n)
|
||||
}
|
||||
|
||||
pub fn next_span(&self) -> Option<S> {
|
||||
Some(self.inner.as_slice().first()?.first_span())
|
||||
}
|
||||
|
||||
pub fn as_slice(&self) -> &'a [TokenTree<S>] {
|
||||
self.inner.as_slice()
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<!---
|
||||
lsp/ext.rs hash: f41950db4c7b3a5a
|
||||
lsp/ext.rs hash: e92e1f12229b0071
|
||||
|
||||
If you need to change the above hash to make the test pass, please check if you
|
||||
need to adjust this doc as well and ping this issue:
|
||||
|
@ -789,14 +789,6 @@ Renders rust-analyzer's crate graph as an SVG image.
|
|||
|
||||
If `full` is `true`, the graph includes non-workspace crates (crates.io dependencies as well as sysroot crates).
|
||||
|
||||
## Shuffle Crate Graph
|
||||
|
||||
**Method:** `rust-analyzer/shuffleCrateGraph`
|
||||
|
||||
**Request:** `null`
|
||||
|
||||
Shuffles the crate IDs in the crate graph, for debugging purposes.
|
||||
|
||||
## Expand Macro
|
||||
|
||||
**Method:** `rust-analyzer/expandMacro`
|
||||
|
|
|
@ -136,11 +136,6 @@
|
|||
"title": "Debug ItemTree",
|
||||
"category": "rust-analyzer (debug command)"
|
||||
},
|
||||
{
|
||||
"command": "rust-analyzer.shuffleCrateGraph",
|
||||
"title": "Shuffle Crate Graph",
|
||||
"category": "rust-analyzer (debug command)"
|
||||
},
|
||||
{
|
||||
"command": "rust-analyzer.memoryUsage",
|
||||
"title": "Memory Usage (Clears Database)",
|
||||
|
|
|
@ -100,12 +100,6 @@ export function memoryUsage(ctx: CtxInit): Cmd {
|
|||
};
|
||||
}
|
||||
|
||||
export function shuffleCrateGraph(ctx: CtxInit): Cmd {
|
||||
return async () => {
|
||||
return ctx.client.sendRequest(ra.shuffleCrateGraph);
|
||||
};
|
||||
}
|
||||
|
||||
export function triggerParameterHints(_: CtxInit): Cmd {
|
||||
return async () => {
|
||||
const parameterHintsEnabled = vscode.workspace
|
||||
|
|
|
@ -45,7 +45,6 @@ export const rebuildProcMacros = new lc.RequestType0<null, void>("rust-analyzer/
|
|||
export const runFlycheck = new lc.NotificationType<{
|
||||
textDocument: lc.TextDocumentIdentifier | null;
|
||||
}>("rust-analyzer/runFlycheck");
|
||||
export const shuffleCrateGraph = new lc.RequestType0<null, void>("rust-analyzer/shuffleCrateGraph");
|
||||
export const syntaxTree = new lc.RequestType<SyntaxTreeParams, string, void>(
|
||||
"rust-analyzer/syntaxTree",
|
||||
);
|
||||
|
|
|
@ -141,7 +141,6 @@ function createCommands(): Record<string, CommandFactory> {
|
|||
|
||||
analyzerStatus: { enabled: commands.analyzerStatus },
|
||||
memoryUsage: { enabled: commands.memoryUsage },
|
||||
shuffleCrateGraph: { enabled: commands.shuffleCrateGraph },
|
||||
reloadWorkspace: { enabled: commands.reloadWorkspace },
|
||||
rebuildProcMacros: { enabled: commands.rebuildProcMacros },
|
||||
matchingBrace: { enabled: commands.matchingBrace },
|
||||
|
|
Loading…
Reference in a new issue