mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 05:23:24 +00:00
Merge #2479
2479: Add expansion infrastructure for derive macros r=matklad a=flodiebold I thought I'd experiment a bit with attribute macro/derive expansion, and here's what I've got so far. It has dummy implementations of the Copy / Clone derives, to show that the approach works; it doesn't add any attribute macro support, but I think that fits into the architecture. Basically, during raw item collection, we look at the attributes and generate macro calls for them if necessary. Currently I only do this for derives, and just add the derive macro calls as separate calls next to the item. I think for derives, it's important that they don't obscure the actual item, since they can't actually change it (e.g. sending the item token tree through macro expansion unnecessarily might make completion within it more complicated). Attribute macros would have to be recognized at that stage and replace the item (i.e., the raw item collector will just emit an attribute macro call, and not the item). I think when we implement this, we should try to recognize known inert attributes, so that we don't do macro expansion unnecessarily; anything that isn't known needs to be treated as a possible attribute macro call (since the raw item collector can't resolve the macro yet). There's basically no name resolution for attribute macros implemented, I just hardcoded the built-in derives. In the future, the built-ins should work within the normal name resolution infrastructure; the problem there is that the builtin stubs in `std` use macros 2.0, which we don't support yet (and adding support is outside the scope of this). One aspect that I don't really have a solution for, but I don't know how important it is, is removing the attribute itself from its input. I'm pretty sure rustc leaves out the attribute macro from the input, but to do that, we'd have to create a completely new syntax node. I guess we could do it when / after converting to a token tree. Co-authored-by: Florian Diebold <flodiebold@gmail.com>
This commit is contained in:
commit
6e10a9f578
20 changed files with 632 additions and 81 deletions
|
@ -105,7 +105,10 @@ impl HasSource for TypeAlias {
|
|||
impl HasSource for MacroDef {
|
||||
type Ast = ast::MacroCall;
|
||||
fn source(self, db: &impl DefDatabase) -> InFile<ast::MacroCall> {
|
||||
InFile { file_id: self.id.ast_id.file_id, value: self.id.ast_id.to_node(db) }
|
||||
InFile {
|
||||
file_id: self.id.ast_id.expect("MacroDef without ast_id").file_id,
|
||||
value: self.id.ast_id.expect("MacroDef without ast_id").to_node(db),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl HasSource for ImplBlock {
|
||||
|
|
|
@ -93,9 +93,9 @@ impl FromSource for MacroDef {
|
|||
|
||||
let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax()));
|
||||
let module = Module::from_definition(db, InFile::new(src.file_id, module_src))?;
|
||||
let krate = module.krate().crate_id();
|
||||
let krate = Some(module.krate().crate_id());
|
||||
|
||||
let ast_id = AstId::new(src.file_id, db.ast_id_map(src.file_id).ast_id(&src.value));
|
||||
let ast_id = Some(AstId::new(src.file_id, db.ast_id_map(src.file_id).ast_id(&src.value)));
|
||||
|
||||
let id: MacroDefId = MacroDefId { krate, ast_id, kind };
|
||||
Some(MacroDef { id })
|
||||
|
|
|
@ -20,7 +20,8 @@ use hir_def::{
|
|||
AssocItemId, DefWithBodyId,
|
||||
};
|
||||
use hir_expand::{
|
||||
hygiene::Hygiene, name::AsName, AstId, HirFileId, InFile, MacroCallId, MacroFileKind,
|
||||
hygiene::Hygiene, name::AsName, AstId, HirFileId, InFile, MacroCallId, MacroCallKind,
|
||||
MacroFileKind,
|
||||
};
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode},
|
||||
|
@ -456,7 +457,7 @@ impl SourceAnalyzer {
|
|||
db.ast_id_map(macro_call.file_id).ast_id(macro_call.value),
|
||||
);
|
||||
Some(Expansion {
|
||||
macro_call_id: def.as_call_id(db, ast_id),
|
||||
macro_call_id: def.as_call_id(db, MacroCallKind::FnLike(ast_id)),
|
||||
macro_file_kind: to_macro_file_kind(macro_call.value),
|
||||
})
|
||||
}
|
||||
|
|
|
@ -61,7 +61,9 @@ impl Attrs {
|
|||
AdtId::UnionId(it) => attrs_from_ast(it.lookup_intern(db).ast_id, db),
|
||||
},
|
||||
AttrDefId::TraitId(it) => attrs_from_ast(it.lookup_intern(db).ast_id, db),
|
||||
AttrDefId::MacroDefId(it) => attrs_from_ast(it.ast_id, db),
|
||||
AttrDefId::MacroDefId(it) => {
|
||||
it.ast_id.map_or_else(Default::default, |ast_id| attrs_from_ast(ast_id, db))
|
||||
}
|
||||
AttrDefId::ImplId(it) => attrs_from_ast(it.lookup_intern(db).ast_id, db),
|
||||
AttrDefId::ConstId(it) => attrs_from_loc(it.lookup(db), db),
|
||||
AttrDefId::StaticId(it) => attrs_from_loc(it.lookup(db), db),
|
||||
|
|
|
@ -6,7 +6,9 @@ pub mod scope;
|
|||
use std::{ops::Index, sync::Arc};
|
||||
|
||||
use either::Either;
|
||||
use hir_expand::{hygiene::Hygiene, AstId, HirFileId, InFile, MacroDefId, MacroFileKind};
|
||||
use hir_expand::{
|
||||
hygiene::Hygiene, AstId, HirFileId, InFile, MacroCallKind, MacroDefId, MacroFileKind,
|
||||
};
|
||||
use ra_arena::{map::ArenaMap, Arena};
|
||||
use ra_syntax::{ast, AstNode, AstPtr};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
@ -46,7 +48,7 @@ impl Expander {
|
|||
|
||||
if let Some(path) = macro_call.path().and_then(|path| self.parse_path(path)) {
|
||||
if let Some(def) = self.resolve_path_as_macro(db, &path) {
|
||||
let call_id = def.as_call_id(db, ast_id);
|
||||
let call_id = def.as_call_id(db, MacroCallKind::FnLike(ast_id));
|
||||
let file_id = call_id.as_file(MacroFileKind::Expr);
|
||||
if let Some(node) = db.parse_or_expand(file_id) {
|
||||
if let Some(expr) = ast::Expr::cast(node) {
|
||||
|
|
|
@ -60,7 +60,7 @@ impl Documentation {
|
|||
docs_from_ast(&src.value[it.local_id])
|
||||
}
|
||||
AttrDefId::TraitId(it) => docs_from_ast(&it.source(db).value),
|
||||
AttrDefId::MacroDefId(it) => docs_from_ast(&it.ast_id.to_node(db)),
|
||||
AttrDefId::MacroDefId(it) => docs_from_ast(&it.ast_id?.to_node(db)),
|
||||
AttrDefId::ConstId(it) => docs_from_ast(&it.lookup(db).source(db).value),
|
||||
AttrDefId::StaticId(it) => docs_from_ast(&it.lookup(db).source(db).value),
|
||||
AttrDefId::FunctionId(it) => docs_from_ast(&it.lookup(db).source(db).value),
|
||||
|
|
|
@ -4,9 +4,10 @@
|
|||
//! resolves imports and expands macros.
|
||||
|
||||
use hir_expand::{
|
||||
builtin_derive::find_builtin_derive,
|
||||
builtin_macro::find_builtin_macro,
|
||||
name::{self, AsName, Name},
|
||||
HirFileId, MacroCallId, MacroDefId, MacroDefKind, MacroFileKind,
|
||||
HirFileId, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, MacroFileKind,
|
||||
};
|
||||
use ra_cfg::CfgOptions;
|
||||
use ra_db::{CrateId, FileId};
|
||||
|
@ -58,6 +59,7 @@ pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> C
|
|||
glob_imports: FxHashMap::default(),
|
||||
unresolved_imports: Vec::new(),
|
||||
unexpanded_macros: Vec::new(),
|
||||
unexpanded_attribute_macros: Vec::new(),
|
||||
mod_dirs: FxHashMap::default(),
|
||||
macro_stack_monitor: MacroStackMonitor::default(),
|
||||
poison_macros: FxHashSet::default(),
|
||||
|
@ -102,6 +104,7 @@ struct DefCollector<'a, DB> {
|
|||
glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, LocalImportId)>>,
|
||||
unresolved_imports: Vec<(LocalModuleId, LocalImportId, raw::ImportData)>,
|
||||
unexpanded_macros: Vec<(LocalModuleId, AstId<ast::MacroCall>, Path)>,
|
||||
unexpanded_attribute_macros: Vec<(LocalModuleId, AstId<ast::ModuleItem>, Path)>,
|
||||
mod_dirs: FxHashMap<LocalModuleId, ModDir>,
|
||||
|
||||
/// Some macro use `$tt:tt which mean we have to handle the macro perfectly
|
||||
|
@ -470,6 +473,8 @@ where
|
|||
|
||||
fn resolve_macros(&mut self) -> ReachedFixedPoint {
|
||||
let mut macros = std::mem::replace(&mut self.unexpanded_macros, Vec::new());
|
||||
let mut attribute_macros =
|
||||
std::mem::replace(&mut self.unexpanded_attribute_macros, Vec::new());
|
||||
let mut resolved = Vec::new();
|
||||
let mut res = ReachedFixedPoint::Yes;
|
||||
macros.retain(|(module_id, ast_id, path)| {
|
||||
|
@ -482,7 +487,19 @@ where
|
|||
);
|
||||
|
||||
if let Some(def) = resolved_res.resolved_def.take_macros() {
|
||||
let call_id = def.as_call_id(self.db, *ast_id);
|
||||
let call_id = def.as_call_id(self.db, MacroCallKind::FnLike(*ast_id));
|
||||
resolved.push((*module_id, call_id, def));
|
||||
res = ReachedFixedPoint::No;
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
});
|
||||
attribute_macros.retain(|(module_id, ast_id, path)| {
|
||||
let resolved_res = self.resolve_attribute_macro(path);
|
||||
|
||||
if let Some(def) = resolved_res {
|
||||
let call_id = def.as_call_id(self.db, MacroCallKind::Attr(*ast_id));
|
||||
resolved.push((*module_id, call_id, def));
|
||||
res = ReachedFixedPoint::No;
|
||||
return false;
|
||||
|
@ -492,6 +509,7 @@ where
|
|||
});
|
||||
|
||||
self.unexpanded_macros = macros;
|
||||
self.unexpanded_attribute_macros = attribute_macros;
|
||||
|
||||
for (module_id, macro_call_id, macro_def_id) in resolved {
|
||||
self.collect_macro_expansion(module_id, macro_call_id, macro_def_id);
|
||||
|
@ -500,6 +518,20 @@ where
|
|||
res
|
||||
}
|
||||
|
||||
fn resolve_attribute_macro(&self, path: &Path) -> Option<MacroDefId> {
|
||||
// FIXME this is currently super hacky, just enough to support the
|
||||
// built-in derives
|
||||
if let Some(name) = path.as_ident() {
|
||||
// FIXME this should actually be handled with the normal name
|
||||
// resolution; the std lib defines built-in stubs for the derives,
|
||||
// but these are new-style `macro`s, which we don't support yet
|
||||
if let Some(def_id) = find_builtin_derive(name) {
|
||||
return Some(def_id);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn collect_macro_expansion(
|
||||
&mut self,
|
||||
module_id: LocalModuleId,
|
||||
|
@ -587,7 +619,9 @@ where
|
|||
.def_collector
|
||||
.unresolved_imports
|
||||
.push((self.module_id, import_id, self.raw_items[import_id].clone())),
|
||||
raw::RawItemKind::Def(def) => self.define_def(&self.raw_items[def]),
|
||||
raw::RawItemKind::Def(def) => {
|
||||
self.define_def(&self.raw_items[def], &item.attrs)
|
||||
}
|
||||
raw::RawItemKind::Macro(mac) => self.collect_macro(&self.raw_items[mac]),
|
||||
raw::RawItemKind::Impl(imp) => {
|
||||
let module = ModuleId {
|
||||
|
@ -682,10 +716,16 @@ where
|
|||
res
|
||||
}
|
||||
|
||||
fn define_def(&mut self, def: &raw::DefData) {
|
||||
fn define_def(&mut self, def: &raw::DefData, attrs: &Attrs) {
|
||||
let module = ModuleId { krate: self.def_collector.def_map.krate, local_id: self.module_id };
|
||||
let ctx = LocationCtx::new(self.def_collector.db, module, self.file_id);
|
||||
|
||||
// FIXME: check attrs to see if this is an attribute macro invocation;
|
||||
// in which case we don't add the invocation, just a single attribute
|
||||
// macro invocation
|
||||
|
||||
self.collect_derives(attrs, def);
|
||||
|
||||
let name = def.name.clone();
|
||||
let def: PerNs = match def.kind {
|
||||
raw::DefKind::Function(ast_id) => {
|
||||
|
@ -736,6 +776,23 @@ where
|
|||
self.def_collector.update(self.module_id, None, &[(name, resolution)])
|
||||
}
|
||||
|
||||
fn collect_derives(&mut self, attrs: &Attrs, def: &raw::DefData) {
|
||||
for derive_subtree in attrs.by_key("derive").tt_values() {
|
||||
// for #[derive(Copy, Clone)], `derive_subtree` is the `(Copy, Clone)` subtree
|
||||
for tt in &derive_subtree.token_trees {
|
||||
let ident = match &tt {
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => ident,
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(_)) => continue, // , is ok
|
||||
_ => continue, // anything else would be an error (which we currently ignore)
|
||||
};
|
||||
let path = Path::from_tt_ident(ident);
|
||||
|
||||
let ast_id = AstId::new(self.file_id, def.kind.ast_id());
|
||||
self.def_collector.unexpanded_attribute_macros.push((self.module_id, ast_id, path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_macro(&mut self, mac: &raw::MacroData) {
|
||||
let ast_id = AstId::new(self.file_id, mac.ast_id);
|
||||
|
||||
|
@ -759,8 +816,8 @@ where
|
|||
if is_macro_rules(&mac.path) {
|
||||
if let Some(name) = &mac.name {
|
||||
let macro_id = MacroDefId {
|
||||
ast_id,
|
||||
krate: self.def_collector.def_map.krate,
|
||||
ast_id: Some(ast_id),
|
||||
krate: Some(self.def_collector.def_map.krate),
|
||||
kind: MacroDefKind::Declarative,
|
||||
};
|
||||
self.def_collector.define_macro(self.module_id, name.clone(), macro_id, mac.export);
|
||||
|
@ -773,7 +830,8 @@ where
|
|||
if let Some(macro_def) = mac.path.as_ident().and_then(|name| {
|
||||
self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name)
|
||||
}) {
|
||||
let macro_call_id = macro_def.as_call_id(self.def_collector.db, ast_id);
|
||||
let macro_call_id =
|
||||
macro_def.as_call_id(self.def_collector.db, MacroCallKind::FnLike(ast_id));
|
||||
|
||||
self.def_collector.collect_macro_expansion(self.module_id, macro_call_id, macro_def);
|
||||
return;
|
||||
|
@ -829,6 +887,7 @@ mod tests {
|
|||
glob_imports: FxHashMap::default(),
|
||||
unresolved_imports: Vec::new(),
|
||||
unexpanded_macros: Vec::new(),
|
||||
unexpanded_attribute_macros: Vec::new(),
|
||||
mod_dirs: FxHashMap::default(),
|
||||
macro_stack_monitor: monitor,
|
||||
poison_macros: FxHashSet::default(),
|
||||
|
|
|
@ -184,6 +184,21 @@ pub(super) enum DefKind {
|
|||
TypeAlias(FileAstId<ast::TypeAliasDef>),
|
||||
}
|
||||
|
||||
impl DefKind {
|
||||
pub fn ast_id(&self) -> FileAstId<ast::ModuleItem> {
|
||||
match self {
|
||||
DefKind::Function(it) => it.upcast(),
|
||||
DefKind::Struct(it) => it.upcast(),
|
||||
DefKind::Union(it) => it.upcast(),
|
||||
DefKind::Enum(it) => it.upcast(),
|
||||
DefKind::Const(it) => it.upcast(),
|
||||
DefKind::Static(it) => it.upcast(),
|
||||
DefKind::Trait(it) => it.upcast(),
|
||||
DefKind::TypeAlias(it) => it.upcast(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub(super) struct Macro(RawId);
|
||||
impl_arena_id!(Macro);
|
||||
|
|
|
@ -600,3 +600,27 @@ fn macro_dollar_crate_is_correct_in_indirect_deps() {
|
|||
⋮bar: t v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expand_derive() {
|
||||
let map = compute_crate_def_map(
|
||||
"
|
||||
//- /main.rs
|
||||
#[derive(Clone)]
|
||||
struct Foo;
|
||||
",
|
||||
);
|
||||
assert_eq!(map.modules[map.root].impls.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expand_multiple_derive() {
|
||||
let map = compute_crate_def_map(
|
||||
"
|
||||
//- /main.rs
|
||||
#[derive(Copy, Clone)]
|
||||
struct Foo;
|
||||
",
|
||||
);
|
||||
assert_eq!(map.modules[map.root].impls.len(), 2);
|
||||
}
|
||||
|
|
|
@ -199,6 +199,11 @@ impl Path {
|
|||
name_ref.as_name().into()
|
||||
}
|
||||
|
||||
/// Converts an `tt::Ident` into a single-identifier `Path`.
|
||||
pub(crate) fn from_tt_ident(ident: &tt::Ident) -> Path {
|
||||
ident.as_name().into()
|
||||
}
|
||||
|
||||
/// `true` is this path is a single identifier, like `foo`
|
||||
pub fn is_ident(&self) -> bool {
|
||||
self.kind == PathKind::Plain && self.segments.len() == 1
|
||||
|
|
|
@ -39,6 +39,16 @@ impl<N: AstNode> Hash for FileAstId<N> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<N: AstNode> FileAstId<N> {
|
||||
// Can't make this a From implementation because of coherence
|
||||
pub fn upcast<M: AstNode>(self) -> FileAstId<M>
|
||||
where
|
||||
M: From<N>,
|
||||
{
|
||||
FileAstId { raw: self.raw, _ty: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
struct ErasedFileAstId(RawId);
|
||||
impl_arena_id!(ErasedFileAstId);
|
||||
|
@ -53,7 +63,7 @@ impl AstIdMap {
|
|||
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
|
||||
assert!(node.parent().is_none());
|
||||
let mut res = AstIdMap { arena: Arena::default() };
|
||||
// By walking the tree in bread-first order we make sure that parents
|
||||
// By walking the tree in breadth-first order we make sure that parents
|
||||
// get lower ids then children. That is, adding a new child does not
|
||||
// change parent's id. This means that, say, adding a new function to a
|
||||
// trait does not change ids of top-level items, which helps caching.
|
||||
|
|
301
crates/ra_hir_expand/src/builtin_derive.rs
Normal file
301
crates/ra_hir_expand/src/builtin_derive.rs
Normal file
|
@ -0,0 +1,301 @@
|
|||
//! Builtin derives.
|
||||
|
||||
use log::debug;
|
||||
|
||||
use ra_parser::FragmentKind;
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode, ModuleItemOwner, NameOwner, TypeParamsOwner},
|
||||
match_ast,
|
||||
};
|
||||
|
||||
use crate::db::AstDatabase;
|
||||
use crate::{name, quote, MacroCallId, MacroDefId, MacroDefKind};
|
||||
|
||||
macro_rules! register_builtin {
|
||||
( $(($name:ident, $kind: ident) => $expand:ident),* ) => {
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum BuiltinDeriveExpander {
|
||||
$($kind),*
|
||||
}
|
||||
|
||||
impl BuiltinDeriveExpander {
|
||||
pub fn expand(
|
||||
&self,
|
||||
db: &dyn AstDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||
let expander = match *self {
|
||||
$( BuiltinDeriveExpander::$kind => $expand, )*
|
||||
};
|
||||
expander(db, id, tt)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_builtin_derive(ident: &name::Name) -> Option<MacroDefId> {
|
||||
let kind = match ident {
|
||||
$( id if id == &name::$name => BuiltinDeriveExpander::$kind, )*
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(MacroDefId { krate: None, ast_id: None, kind: MacroDefKind::BuiltInDerive(kind) })
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
register_builtin! {
|
||||
(COPY_TRAIT, Copy) => copy_expand,
|
||||
(CLONE_TRAIT, Clone) => clone_expand,
|
||||
(DEFAULT_TRAIT, Default) => default_expand,
|
||||
(DEBUG_TRAIT, Debug) => debug_expand,
|
||||
(HASH_TRAIT, Hash) => hash_expand,
|
||||
(ORD_TRAIT, Ord) => ord_expand,
|
||||
(PARTIAL_ORD_TRAIT, PartialOrd) => partial_ord_expand,
|
||||
(EQ_TRAIT, Eq) => eq_expand,
|
||||
(PARTIAL_EQ_TRAIT, PartialEq) => partial_eq_expand
|
||||
}
|
||||
|
||||
struct BasicAdtInfo {
|
||||
name: tt::Ident,
|
||||
type_params: usize,
|
||||
}
|
||||
|
||||
fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
|
||||
let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, FragmentKind::Items)?; // FragmentKind::Items doesn't parse attrs?
|
||||
let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
|
||||
debug!("derive node didn't parse");
|
||||
mbe::ExpandError::UnexpectedToken
|
||||
})?;
|
||||
let item = macro_items.items().next().ok_or_else(|| {
|
||||
debug!("no module item parsed");
|
||||
mbe::ExpandError::NoMatchingRule
|
||||
})?;
|
||||
let node = item.syntax();
|
||||
let (name, params) = match_ast! {
|
||||
match node {
|
||||
ast::StructDef(it) => { (it.name(), it.type_param_list()) },
|
||||
ast::EnumDef(it) => { (it.name(), it.type_param_list()) },
|
||||
ast::UnionDef(it) => { (it.name(), it.type_param_list()) },
|
||||
_ => {
|
||||
debug!("unexpected node is {:?}", node);
|
||||
return Err(mbe::ExpandError::ConversionError)
|
||||
},
|
||||
}
|
||||
};
|
||||
let name = name.ok_or_else(|| {
|
||||
debug!("parsed item has no name");
|
||||
mbe::ExpandError::NoMatchingRule
|
||||
})?;
|
||||
let name_token_id = token_map.token_by_range(name.syntax().text_range()).ok_or_else(|| {
|
||||
debug!("name token not found");
|
||||
mbe::ExpandError::ConversionError
|
||||
})?;
|
||||
let name_token = tt::Ident { id: name_token_id, text: name.text().clone() };
|
||||
let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count());
|
||||
Ok(BasicAdtInfo { name: name_token, type_params })
|
||||
}
|
||||
|
||||
fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
|
||||
let mut result = Vec::<tt::TokenTree>::new();
|
||||
result.push(tt::Leaf::Punct(tt::Punct { char: '<', spacing: tt::Spacing::Alone }).into());
|
||||
for i in 0..n {
|
||||
if i > 0 {
|
||||
result
|
||||
.push(tt::Leaf::Punct(tt::Punct { char: ',', spacing: tt::Spacing::Alone }).into());
|
||||
}
|
||||
result.push(
|
||||
tt::Leaf::Ident(tt::Ident {
|
||||
id: tt::TokenId::unspecified(),
|
||||
text: format!("T{}", i).into(),
|
||||
})
|
||||
.into(),
|
||||
);
|
||||
result.extend(bound.iter().cloned());
|
||||
}
|
||||
result.push(tt::Leaf::Punct(tt::Punct { char: '>', spacing: tt::Spacing::Alone }).into());
|
||||
result
|
||||
}
|
||||
|
||||
fn expand_simple_derive(
|
||||
tt: &tt::Subtree,
|
||||
trait_path: tt::Subtree,
|
||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||
let info = parse_adt(tt)?;
|
||||
let name = info.name;
|
||||
let trait_path_clone = trait_path.token_trees.clone();
|
||||
let bound = (quote! { : ##trait_path_clone }).token_trees;
|
||||
let type_params = make_type_args(info.type_params, bound);
|
||||
let type_args = make_type_args(info.type_params, Vec::new());
|
||||
let trait_path = trait_path.token_trees;
|
||||
let expanded = quote! {
|
||||
impl ##type_params ##trait_path for #name ##type_args {}
|
||||
};
|
||||
Ok(expanded)
|
||||
}
|
||||
|
||||
fn copy_expand(
|
||||
_db: &dyn AstDatabase,
|
||||
_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||
expand_simple_derive(tt, quote! { std::marker::Copy })
|
||||
}
|
||||
|
||||
fn clone_expand(
|
||||
_db: &dyn AstDatabase,
|
||||
_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||
expand_simple_derive(tt, quote! { std::clone::Clone })
|
||||
}
|
||||
|
||||
fn default_expand(
|
||||
_db: &dyn AstDatabase,
|
||||
_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||
expand_simple_derive(tt, quote! { std::default::Default })
|
||||
}
|
||||
|
||||
fn debug_expand(
|
||||
_db: &dyn AstDatabase,
|
||||
_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||
expand_simple_derive(tt, quote! { std::fmt::Debug })
|
||||
}
|
||||
|
||||
fn hash_expand(
|
||||
_db: &dyn AstDatabase,
|
||||
_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||
expand_simple_derive(tt, quote! { std::hash::Hash })
|
||||
}
|
||||
|
||||
fn eq_expand(
|
||||
_db: &dyn AstDatabase,
|
||||
_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||
expand_simple_derive(tt, quote! { std::cmp::Eq })
|
||||
}
|
||||
|
||||
fn partial_eq_expand(
|
||||
_db: &dyn AstDatabase,
|
||||
_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||
expand_simple_derive(tt, quote! { std::cmp::PartialEq })
|
||||
}
|
||||
|
||||
fn ord_expand(
|
||||
_db: &dyn AstDatabase,
|
||||
_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||
expand_simple_derive(tt, quote! { std::cmp::Ord })
|
||||
}
|
||||
|
||||
fn partial_ord_expand(
|
||||
_db: &dyn AstDatabase,
|
||||
_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||
expand_simple_derive(tt, quote! { std::cmp::PartialOrd })
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{test_db::TestDB, AstId, MacroCallKind, MacroCallLoc, MacroFileKind};
|
||||
use ra_db::{fixture::WithFixture, SourceDatabase};
|
||||
|
||||
fn expand_builtin_derive(s: &str, expander: BuiltinDeriveExpander) -> String {
|
||||
let (db, file_id) = TestDB::with_single_file(&s);
|
||||
let parsed = db.parse(file_id);
|
||||
let items: Vec<_> =
|
||||
parsed.syntax_node().descendants().filter_map(|it| ast::ModuleItem::cast(it)).collect();
|
||||
|
||||
let ast_id_map = db.ast_id_map(file_id.into());
|
||||
|
||||
// the first one should be a macro_rules
|
||||
let def =
|
||||
MacroDefId { krate: None, ast_id: None, kind: MacroDefKind::BuiltInDerive(expander) };
|
||||
|
||||
let loc = MacroCallLoc {
|
||||
def,
|
||||
kind: MacroCallKind::Attr(AstId::new(file_id.into(), ast_id_map.ast_id(&items[0]))),
|
||||
};
|
||||
|
||||
let id = db.intern_macro(loc);
|
||||
let parsed = db.parse_or_expand(id.as_file(MacroFileKind::Items)).unwrap();
|
||||
|
||||
// FIXME text() for syntax nodes parsed from token tree looks weird
|
||||
// because there's no whitespace, see below
|
||||
parsed.text().to_string()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_copy_expand_simple() {
|
||||
let expanded = expand_builtin_derive(
|
||||
r#"
|
||||
#[derive(Copy)]
|
||||
struct Foo;
|
||||
"#,
|
||||
BuiltinDeriveExpander::Copy,
|
||||
);
|
||||
|
||||
assert_eq!(expanded, "impl <>std::marker::CopyforFoo <>{}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_copy_expand_with_type_params() {
|
||||
let expanded = expand_builtin_derive(
|
||||
r#"
|
||||
#[derive(Copy)]
|
||||
struct Foo<A, B>;
|
||||
"#,
|
||||
BuiltinDeriveExpander::Copy,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
expanded,
|
||||
"impl<T0:std::marker::Copy,T1:std::marker::Copy>std::marker::CopyforFoo<T0,T1>{}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_copy_expand_with_lifetimes() {
|
||||
let expanded = expand_builtin_derive(
|
||||
r#"
|
||||
#[derive(Copy)]
|
||||
struct Foo<A, B, 'a, 'b>;
|
||||
"#,
|
||||
BuiltinDeriveExpander::Copy,
|
||||
);
|
||||
|
||||
// We currently just ignore lifetimes
|
||||
|
||||
assert_eq!(
|
||||
expanded,
|
||||
"impl<T0:std::marker::Copy,T1:std::marker::Copy>std::marker::CopyforFoo<T0,T1>{}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clone_expand() {
|
||||
let expanded = expand_builtin_derive(
|
||||
r#"
|
||||
#[derive(Clone)]
|
||||
struct Foo<A, B>;
|
||||
"#,
|
||||
BuiltinDeriveExpander::Clone,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
expanded,
|
||||
"impl<T0:std::clone::Clone,T1:std::clone::Clone>std::clone::CloneforFoo<T0,T1>{}"
|
||||
);
|
||||
}
|
||||
}
|
|
@ -39,7 +39,7 @@ macro_rules! register_builtin {
|
|||
_ => return None,
|
||||
};
|
||||
|
||||
Some(MacroDefId { krate, ast_id, kind: MacroDefKind::BuiltIn(kind) })
|
||||
Some(MacroDefId { krate: Some(krate), ast_id: Some(ast_id), kind: MacroDefKind::BuiltIn(kind) })
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -82,10 +82,9 @@ fn line_expand(
|
|||
_tt: &tt::Subtree,
|
||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||
let loc = db.lookup_intern_macro(id);
|
||||
let macro_call = loc.ast_id.to_node(db);
|
||||
|
||||
let arg = macro_call.token_tree().ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
|
||||
let arg_start = arg.syntax().text_range().start();
|
||||
let arg = loc.kind.arg(db).ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
|
||||
let arg_start = arg.text_range().start();
|
||||
|
||||
let file = id.as_file(MacroFileKind::Expr);
|
||||
let line_num = to_line_number(db, file, arg_start);
|
||||
|
@ -103,11 +102,10 @@ fn stringify_expand(
|
|||
_tt: &tt::Subtree,
|
||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||
let loc = db.lookup_intern_macro(id);
|
||||
let macro_call = loc.ast_id.to_node(db);
|
||||
|
||||
let macro_content = {
|
||||
let arg = macro_call.token_tree().ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
|
||||
let macro_args = arg.syntax().clone();
|
||||
let arg = loc.kind.arg(db).ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
|
||||
let macro_args = arg.clone();
|
||||
let text = macro_args.text();
|
||||
let without_parens = TextUnit::of_char('(')..text.len() - TextUnit::of_char(')');
|
||||
text.slice(without_parens).to_string()
|
||||
|
@ -148,7 +146,10 @@ fn column_expand(
|
|||
_tt: &tt::Subtree,
|
||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||
let loc = db.lookup_intern_macro(id);
|
||||
let macro_call = loc.ast_id.to_node(db);
|
||||
let macro_call = match loc.kind {
|
||||
crate::MacroCallKind::FnLike(ast_id) => ast_id.to_node(db),
|
||||
_ => panic!("column macro called as attr"),
|
||||
};
|
||||
|
||||
let _arg = macro_call.token_tree().ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
|
||||
let col_start = macro_call.syntax().text_range().start();
|
||||
|
@ -164,15 +165,10 @@ fn column_expand(
|
|||
}
|
||||
|
||||
fn file_expand(
|
||||
db: &dyn AstDatabase,
|
||||
id: MacroCallId,
|
||||
_db: &dyn AstDatabase,
|
||||
_id: MacroCallId,
|
||||
_tt: &tt::Subtree,
|
||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||
let loc = db.lookup_intern_macro(id);
|
||||
let macro_call = loc.ast_id.to_node(db);
|
||||
|
||||
let _ = macro_call.token_tree().ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
|
||||
|
||||
// FIXME: RA purposefully lacks knowledge of absolute file names
|
||||
// so just return "".
|
||||
let file_name = "";
|
||||
|
@ -207,7 +203,7 @@ fn compile_error_expand(
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{test_db::TestDB, MacroCallLoc};
|
||||
use crate::{test_db::TestDB, MacroCallKind, MacroCallLoc};
|
||||
use ra_db::{fixture::WithFixture, SourceDatabase};
|
||||
|
||||
fn expand_builtin_macro(s: &str, expander: BuiltinFnLikeExpander) -> String {
|
||||
|
@ -220,14 +216,17 @@ mod tests {
|
|||
|
||||
// the first one should be a macro_rules
|
||||
let def = MacroDefId {
|
||||
krate: CrateId(0),
|
||||
ast_id: AstId::new(file_id.into(), ast_id_map.ast_id(¯o_calls[0])),
|
||||
krate: Some(CrateId(0)),
|
||||
ast_id: Some(AstId::new(file_id.into(), ast_id_map.ast_id(¯o_calls[0]))),
|
||||
kind: MacroDefKind::BuiltIn(expander),
|
||||
};
|
||||
|
||||
let loc = MacroCallLoc {
|
||||
def,
|
||||
ast_id: AstId::new(file_id.into(), ast_id_map.ast_id(¯o_calls[1])),
|
||||
kind: MacroCallKind::FnLike(AstId::new(
|
||||
file_id.into(),
|
||||
ast_id_map.ast_id(¯o_calls[1]),
|
||||
)),
|
||||
};
|
||||
|
||||
let id = db.intern_macro(loc);
|
||||
|
|
|
@ -9,14 +9,15 @@ use ra_prof::profile;
|
|||
use ra_syntax::{AstNode, Parse, SyntaxNode};
|
||||
|
||||
use crate::{
|
||||
ast_id_map::AstIdMap, BuiltinFnLikeExpander, HirFileId, HirFileIdRepr, MacroCallId,
|
||||
MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, MacroFileKind,
|
||||
ast_id_map::AstIdMap, BuiltinDeriveExpander, BuiltinFnLikeExpander, HirFileId, HirFileIdRepr,
|
||||
MacroCallId, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, MacroFileKind,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum TokenExpander {
|
||||
MacroRules(mbe::MacroRules),
|
||||
Builtin(BuiltinFnLikeExpander),
|
||||
BuiltinDerive(BuiltinDeriveExpander),
|
||||
}
|
||||
|
||||
impl TokenExpander {
|
||||
|
@ -29,6 +30,7 @@ impl TokenExpander {
|
|||
match self {
|
||||
TokenExpander::MacroRules(it) => it.expand(tt),
|
||||
TokenExpander::Builtin(it) => it.expand(db, id, tt),
|
||||
TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -36,6 +38,7 @@ impl TokenExpander {
|
|||
match self {
|
||||
TokenExpander::MacroRules(it) => it.map_id_down(id),
|
||||
TokenExpander::Builtin(..) => id,
|
||||
TokenExpander::BuiltinDerive(..) => id,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -43,6 +46,7 @@ impl TokenExpander {
|
|||
match self {
|
||||
TokenExpander::MacroRules(it) => it.map_id_up(id),
|
||||
TokenExpander::Builtin(..) => (id, mbe::Origin::Def),
|
||||
TokenExpander::BuiltinDerive(..) => (id, mbe::Origin::Def),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -76,7 +80,7 @@ pub(crate) fn macro_def(
|
|||
) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> {
|
||||
match id.kind {
|
||||
MacroDefKind::Declarative => {
|
||||
let macro_call = id.ast_id.to_node(db);
|
||||
let macro_call = id.ast_id?.to_node(db);
|
||||
let arg = macro_call.token_tree()?;
|
||||
let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| {
|
||||
log::warn!("fail on macro_def to token tree: {:#?}", arg);
|
||||
|
@ -91,6 +95,10 @@ pub(crate) fn macro_def(
|
|||
MacroDefKind::BuiltIn(expander) => {
|
||||
Some(Arc::new((TokenExpander::Builtin(expander.clone()), mbe::TokenMap::default())))
|
||||
}
|
||||
MacroDefKind::BuiltInDerive(expander) => Some(Arc::new((
|
||||
TokenExpander::BuiltinDerive(expander.clone()),
|
||||
mbe::TokenMap::default(),
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -99,9 +107,8 @@ pub(crate) fn macro_arg(
|
|||
id: MacroCallId,
|
||||
) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
|
||||
let loc = db.lookup_intern_macro(id);
|
||||
let macro_call = loc.ast_id.to_node(db);
|
||||
let arg = macro_call.token_tree()?;
|
||||
let (tt, tmap) = mbe::ast_to_token_tree(&arg)?;
|
||||
let arg = loc.kind.arg(db)?;
|
||||
let (tt, tmap) = mbe::syntax_node_to_token_tree(&arg)?;
|
||||
Some(Arc::new((tt, tmap)))
|
||||
}
|
||||
|
||||
|
|
|
@ -25,8 +25,9 @@ impl Hygiene {
|
|||
HirFileIdRepr::MacroFile(macro_file) => {
|
||||
let loc = db.lookup_intern_macro(macro_file.macro_call_id);
|
||||
match loc.def.kind {
|
||||
MacroDefKind::Declarative => Some(loc.def.krate),
|
||||
MacroDefKind::Declarative => loc.def.krate,
|
||||
MacroDefKind::BuiltIn(_) => None,
|
||||
MacroDefKind::BuiltInDerive(_) => None,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -9,6 +9,7 @@ pub mod ast_id_map;
|
|||
pub mod name;
|
||||
pub mod hygiene;
|
||||
pub mod diagnostics;
|
||||
pub mod builtin_derive;
|
||||
pub mod builtin_macro;
|
||||
pub mod quote;
|
||||
|
||||
|
@ -23,6 +24,7 @@ use ra_syntax::{
|
|||
};
|
||||
|
||||
use crate::ast_id_map::FileAstId;
|
||||
use crate::builtin_derive::BuiltinDeriveExpander;
|
||||
use crate::builtin_macro::BuiltinFnLikeExpander;
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -69,7 +71,7 @@ impl HirFileId {
|
|||
HirFileIdRepr::FileId(file_id) => file_id,
|
||||
HirFileIdRepr::MacroFile(macro_file) => {
|
||||
let loc = db.lookup_intern_macro(macro_file.macro_call_id);
|
||||
loc.ast_id.file_id.original_file(db)
|
||||
loc.kind.file_id().original_file(db)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -81,8 +83,8 @@ impl HirFileId {
|
|||
HirFileIdRepr::MacroFile(macro_file) => {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id);
|
||||
|
||||
let arg_tt = loc.ast_id.to_node(db).token_tree()?;
|
||||
let def_tt = loc.def.ast_id.to_node(db).token_tree()?;
|
||||
let arg_tt = loc.kind.arg(db)?;
|
||||
let def_tt = loc.def.ast_id?.to_node(db).token_tree()?;
|
||||
|
||||
let macro_def = db.macro_def(loc.def)?;
|
||||
let (parse, exp_map) = db.parse_macro(macro_file)?;
|
||||
|
@ -90,8 +92,8 @@ impl HirFileId {
|
|||
|
||||
Some(ExpansionInfo {
|
||||
expanded: InFile::new(self, parse.syntax_node()),
|
||||
arg: InFile::new(loc.ast_id.file_id, arg_tt),
|
||||
def: InFile::new(loc.ast_id.file_id, def_tt),
|
||||
arg: InFile::new(loc.kind.file_id(), arg_tt),
|
||||
def: InFile::new(loc.def.ast_id?.file_id, def_tt),
|
||||
macro_arg,
|
||||
macro_def,
|
||||
exp_map,
|
||||
|
@ -129,18 +131,20 @@ impl salsa::InternKey for MacroCallId {
|
|||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct MacroDefId {
|
||||
pub krate: CrateId,
|
||||
pub ast_id: AstId<ast::MacroCall>,
|
||||
// FIXME: krate and ast_id are currently optional because we don't have a
|
||||
// definition location for built-in derives. There is one, though: the
|
||||
// standard library defines them. The problem is that it uses the new
|
||||
// `macro` syntax for this, which we don't support yet. As soon as we do
|
||||
// (which will probably require touching this code), we can instead use
|
||||
// that (and also remove the hacks for resolving built-in derives).
|
||||
pub krate: Option<CrateId>,
|
||||
pub ast_id: Option<AstId<ast::MacroCall>>,
|
||||
pub kind: MacroDefKind,
|
||||
}
|
||||
|
||||
impl MacroDefId {
|
||||
pub fn as_call_id(
|
||||
self,
|
||||
db: &dyn db::AstDatabase,
|
||||
ast_id: AstId<ast::MacroCall>,
|
||||
) -> MacroCallId {
|
||||
db.intern_macro(MacroCallLoc { def: self, ast_id })
|
||||
pub fn as_call_id(self, db: &dyn db::AstDatabase, kind: MacroCallKind) -> MacroCallId {
|
||||
db.intern_macro(MacroCallLoc { def: self, kind })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -148,12 +152,38 @@ impl MacroDefId {
|
|||
pub enum MacroDefKind {
|
||||
Declarative,
|
||||
BuiltIn(BuiltinFnLikeExpander),
|
||||
// FIXME: maybe just Builtin and rename BuiltinFnLikeExpander to BuiltinExpander
|
||||
BuiltInDerive(BuiltinDeriveExpander),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct MacroCallLoc {
|
||||
pub(crate) def: MacroDefId,
|
||||
pub(crate) ast_id: AstId<ast::MacroCall>,
|
||||
pub(crate) kind: MacroCallKind,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum MacroCallKind {
|
||||
FnLike(AstId<ast::MacroCall>),
|
||||
Attr(AstId<ast::ModuleItem>),
|
||||
}
|
||||
|
||||
impl MacroCallKind {
|
||||
pub fn file_id(&self) -> HirFileId {
|
||||
match self {
|
||||
MacroCallKind::FnLike(ast_id) => ast_id.file_id,
|
||||
MacroCallKind::Attr(ast_id) => ast_id.file_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn arg(&self, db: &dyn db::AstDatabase) -> Option<SyntaxNode> {
|
||||
match self {
|
||||
MacroCallKind::FnLike(ast_id) => {
|
||||
Some(ast_id.to_node(db).token_tree()?.syntax().clone())
|
||||
}
|
||||
MacroCallKind::Attr(ast_id) => Some(ast_id.to_node(db).syntax().clone()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MacroCallId {
|
||||
|
@ -167,7 +197,7 @@ impl MacroCallId {
|
|||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ExpansionInfo {
|
||||
expanded: InFile<SyntaxNode>,
|
||||
arg: InFile<ast::TokenTree>,
|
||||
arg: InFile<SyntaxNode>,
|
||||
def: InFile<ast::TokenTree>,
|
||||
|
||||
macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>,
|
||||
|
@ -178,8 +208,7 @@ pub struct ExpansionInfo {
|
|||
impl ExpansionInfo {
|
||||
pub fn map_token_down(&self, token: InFile<&SyntaxToken>) -> Option<InFile<SyntaxToken>> {
|
||||
assert_eq!(token.file_id, self.arg.file_id);
|
||||
let range =
|
||||
token.value.text_range().checked_sub(self.arg.value.syntax().text_range().start())?;
|
||||
let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
|
||||
let token_id = self.macro_arg.1.token_by_range(range)?;
|
||||
let token_id = self.macro_def.0.map_id_down(token_id);
|
||||
|
||||
|
@ -195,15 +224,14 @@ impl ExpansionInfo {
|
|||
|
||||
let (token_id, origin) = self.macro_def.0.map_id_up(token_id);
|
||||
let (token_map, tt) = match origin {
|
||||
mbe::Origin::Call => (&self.macro_arg.1, &self.arg),
|
||||
mbe::Origin::Def => (&self.macro_def.1, &self.def),
|
||||
mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
|
||||
mbe::Origin::Def => {
|
||||
(&self.macro_def.1, self.def.as_ref().map(|tt| tt.syntax().clone()))
|
||||
}
|
||||
};
|
||||
|
||||
let range = token_map.range_by_token(token_id)?;
|
||||
let token = algo::find_covering_element(
|
||||
tt.value.syntax(),
|
||||
range + tt.value.syntax().text_range().start(),
|
||||
)
|
||||
let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start())
|
||||
.into_token()?;
|
||||
Some(tt.with_value(token))
|
||||
}
|
||||
|
|
|
@ -83,6 +83,12 @@ impl AsName for ast::Name {
|
|||
}
|
||||
}
|
||||
|
||||
impl AsName for tt::Ident {
|
||||
fn as_name(&self) -> Name {
|
||||
Name::resolve(&self.text)
|
||||
}
|
||||
}
|
||||
|
||||
impl AsName for ast::FieldKind {
|
||||
fn as_name(&self) -> Name {
|
||||
match self {
|
||||
|
@ -153,3 +159,14 @@ pub const COLUMN_MACRO: Name = Name::new_inline_ascii(6, b"column");
|
|||
pub const COMPILE_ERROR_MACRO: Name = Name::new_inline_ascii(13, b"compile_error");
|
||||
pub const LINE_MACRO: Name = Name::new_inline_ascii(4, b"line");
|
||||
pub const STRINGIFY_MACRO: Name = Name::new_inline_ascii(9, b"stringify");
|
||||
|
||||
// Builtin derives
|
||||
pub const COPY_TRAIT: Name = Name::new_inline_ascii(4, b"Copy");
|
||||
pub const CLONE_TRAIT: Name = Name::new_inline_ascii(5, b"Clone");
|
||||
pub const DEFAULT_TRAIT: Name = Name::new_inline_ascii(7, b"Default");
|
||||
pub const DEBUG_TRAIT: Name = Name::new_inline_ascii(5, b"Debug");
|
||||
pub const HASH_TRAIT: Name = Name::new_inline_ascii(4, b"Hash");
|
||||
pub const ORD_TRAIT: Name = Name::new_inline_ascii(3, b"Ord");
|
||||
pub const PARTIAL_ORD_TRAIT: Name = Name::new_inline_ascii(10, b"PartialOrd");
|
||||
pub const EQ_TRAIT: Name = Name::new_inline_ascii(2, b"Eq");
|
||||
pub const PARTIAL_EQ_TRAIT: Name = Name::new_inline_ascii(9, b"PartialEq");
|
||||
|
|
|
@ -60,6 +60,15 @@ macro_rules! __quote {
|
|||
}
|
||||
};
|
||||
|
||||
( ## $first:ident $($tail:tt)* ) => {
|
||||
{
|
||||
let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<tt::TokenTree>>();
|
||||
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
|
||||
tokens.append(&mut tail_tokens);
|
||||
tokens
|
||||
}
|
||||
};
|
||||
|
||||
// Brace
|
||||
( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) };
|
||||
// Bracket
|
||||
|
@ -85,6 +94,7 @@ macro_rules! __quote {
|
|||
( & ) => {$crate::__quote!(@PUNCT '&')};
|
||||
( , ) => {$crate::__quote!(@PUNCT ',')};
|
||||
( : ) => {$crate::__quote!(@PUNCT ':')};
|
||||
( :: ) => {$crate::__quote!(@PUNCT ':', ':')};
|
||||
( . ) => {$crate::__quote!(@PUNCT '.')};
|
||||
|
||||
( $first:tt $($tail:tt)+ ) => {
|
||||
|
|
|
@ -266,3 +266,54 @@ fn main() {
|
|||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn infer_derive_clone_simple() {
|
||||
let (db, pos) = TestDB::with_position(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:std
|
||||
#[derive(Clone)]
|
||||
struct S;
|
||||
fn test() {
|
||||
S.clone()<|>;
|
||||
}
|
||||
|
||||
//- /lib.rs crate:std
|
||||
#[prelude_import]
|
||||
use clone::*;
|
||||
mod clone {
|
||||
trait Clone {
|
||||
fn clone(&self) -> Self;
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
assert_eq!("S", type_at_pos(&db, pos));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn infer_derive_clone_with_params() {
|
||||
let (db, pos) = TestDB::with_position(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:std
|
||||
#[derive(Clone)]
|
||||
struct S;
|
||||
#[derive(Clone)]
|
||||
struct Wrapper<T>(T);
|
||||
struct NonClone;
|
||||
fn test() {
|
||||
(Wrapper(S).clone(), Wrapper(NonClone).clone())<|>;
|
||||
}
|
||||
|
||||
//- /lib.rs crate:std
|
||||
#[prelude_import]
|
||||
use clone::*;
|
||||
mod clone {
|
||||
trait Clone {
|
||||
fn clone(&self) -> Self;
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
assert_eq!("(Wrapper<S>, {unknown})", type_at_pos(&db, pos));
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
use ra_parser::{FragmentKind, ParseError, TreeSink};
|
||||
use ra_syntax::{
|
||||
ast, AstNode, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
|
||||
ast, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
|
||||
SyntaxTreeBuilder, TextRange, TextUnit, T,
|
||||
};
|
||||
use std::iter::successors;
|
||||
|
@ -20,7 +20,7 @@ pub struct TokenMap {
|
|||
|
||||
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
|
||||
/// will consume).
|
||||
pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> {
|
||||
pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> Option<(tt::Subtree, TokenMap)> {
|
||||
syntax_node_to_token_tree(ast.syntax())
|
||||
}
|
||||
|
||||
|
@ -208,13 +208,8 @@ impl Convertor {
|
|||
} else if token.kind().is_trivia() {
|
||||
continue;
|
||||
} else if token.kind().is_punct() {
|
||||
assert!(
|
||||
token.text().len() == 1,
|
||||
"Input ast::token punct must be single char."
|
||||
);
|
||||
let char = token.text().chars().next().unwrap();
|
||||
|
||||
let spacing = match child_iter.peek() {
|
||||
// we need to pull apart joined punctuation tokens
|
||||
let last_spacing = match child_iter.peek() {
|
||||
Some(NodeOrToken::Token(token)) => {
|
||||
if token.kind().is_punct() {
|
||||
tt::Spacing::Joint
|
||||
|
@ -224,8 +219,12 @@ impl Convertor {
|
|||
}
|
||||
_ => tt::Spacing::Alone,
|
||||
};
|
||||
|
||||
let spacing_iter = std::iter::repeat(tt::Spacing::Joint)
|
||||
.take(token.text().len() - 1)
|
||||
.chain(std::iter::once(last_spacing));
|
||||
for (char, spacing) in token.text().chars().zip(spacing_iter) {
|
||||
token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into());
|
||||
}
|
||||
} else {
|
||||
let child: tt::TokenTree =
|
||||
if token.kind() == T![true] || token.kind() == T![false] {
|
||||
|
@ -246,8 +245,14 @@ impl Convertor {
|
|||
}
|
||||
}
|
||||
NodeOrToken::Node(node) => {
|
||||
let child = self.go(&node)?.into();
|
||||
token_trees.push(child);
|
||||
let child_subtree = self.go(&node)?;
|
||||
if child_subtree.delimiter == tt::Delimiter::None
|
||||
&& node.kind() != SyntaxKind::TOKEN_TREE
|
||||
{
|
||||
token_trees.extend(child_subtree.token_trees);
|
||||
} else {
|
||||
token_trees.push(child_subtree.into());
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -389,7 +394,10 @@ mod tests {
|
|||
use super::*;
|
||||
use crate::tests::{create_rules, expand};
|
||||
use ra_parser::TokenSource;
|
||||
use ra_syntax::algo::{insert_children, InsertPosition};
|
||||
use ra_syntax::{
|
||||
algo::{insert_children, InsertPosition},
|
||||
ast::AstNode,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn convert_tt_token_source() {
|
||||
|
@ -491,4 +499,12 @@ mod tests {
|
|||
|
||||
assert_eq!(tt.delimiter, tt::Delimiter::Brace);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_token_tree_multi_char_punct() {
|
||||
let source_file = ast::SourceFile::parse("struct Foo { a: x::Y }").ok().unwrap();
|
||||
let struct_def = source_file.syntax().descendants().find_map(ast::StructDef::cast).unwrap();
|
||||
let tt = ast_to_token_tree(&struct_def).unwrap().0;
|
||||
token_tree_to_syntax_node(&tt, FragmentKind::Item).unwrap();
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue