hir_ty: Expand macros at type position

This commit is contained in:
cynecx 2021-04-10 17:49:12 +02:00
parent fb2d284f28
commit cf3b4f1e20
17 changed files with 434 additions and 81 deletions

View file

@ -6,6 +6,7 @@ use std::{cell::RefCell, fmt, iter::successors};
use base_db::{FileId, FileRange}; use base_db::{FileId, FileRange};
use hir_def::{ use hir_def::{
body,
resolver::{self, HasResolver, Resolver, TypeNs}, resolver::{self, HasResolver, Resolver, TypeNs},
AsMacroCall, FunctionId, TraitId, VariantId, AsMacroCall, FunctionId, TraitId, VariantId,
}; };
@ -854,7 +855,8 @@ impl<'a> SemanticsScope<'a> {
/// necessary a heuristic, as it doesn't take hygiene into account. /// necessary a heuristic, as it doesn't take hygiene into account.
pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> { pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
let hygiene = Hygiene::new(self.db.upcast(), self.file_id); let hygiene = Hygiene::new(self.db.upcast(), self.file_id);
let path = Path::from_src(path.clone(), &hygiene)?; let ctx = body::LowerCtx::with_hygiene(&hygiene);
let path = Path::from_src(path.clone(), &ctx)?;
resolve_hir_path(self.db, &self.resolver, &path) resolve_hir_path(self.db, &self.resolver, &path)
} }
} }

View file

@ -9,6 +9,7 @@ use std::{iter::once, sync::Arc};
use hir_def::{ use hir_def::{
body::{ body::{
self,
scope::{ExprScopes, ScopeId}, scope::{ExprScopes, ScopeId},
Body, BodySourceMap, Body, BodySourceMap,
}, },
@ -202,8 +203,8 @@ impl SourceAnalyzer {
db: &dyn HirDatabase, db: &dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>, macro_call: InFile<&ast::MacroCall>,
) -> Option<MacroDef> { ) -> Option<MacroDef> {
let hygiene = Hygiene::new(db.upcast(), macro_call.file_id); let ctx = body::LowerCtx::new(db.upcast(), macro_call.file_id);
let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &hygiene))?; let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?;
self.resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(|it| it.into()) self.resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(|it| it.into())
} }
@ -281,7 +282,9 @@ impl SourceAnalyzer {
} }
// This must be a normal source file rather than macro file. // This must be a normal source file rather than macro file.
let hir_path = Path::from_src(path.clone(), &Hygiene::new(db.upcast(), self.file_id))?; let hygiene = Hygiene::new(db.upcast(), self.file_id);
let ctx = body::LowerCtx::with_hygiene(&hygiene);
let hir_path = Path::from_src(path.clone(), &ctx)?;
// Case where path is a qualifier of another path, e.g. foo::bar::Baz where we // Case where path is a qualifier of another path, e.g. foo::bar::Baz where we
// trying to resolve foo::bar. // trying to resolve foo::bar.

View file

@ -19,9 +19,9 @@ use hir_expand::{
use la_arena::{Arena, ArenaMap}; use la_arena::{Arena, ArenaMap};
use profile::Count; use profile::Count;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use syntax::{ast, AstNode, AstPtr}; use syntax::{ast, AstNode, AstPtr, SyntaxNode};
pub(crate) use lower::LowerCtx; pub use lower::LowerCtx;
use crate::{ use crate::{
attr::{Attrs, RawAttrs}, attr::{Attrs, RawAttrs},
@ -98,11 +98,14 @@ impl Expander {
} }
} }
pub(crate) fn enter_expand<T: ast::AstNode>( fn enter_expand_intern(
&mut self, &mut self,
db: &dyn DefDatabase, db: &dyn DefDatabase,
macro_call: ast::MacroCall, macro_call: ast::MacroCall,
) -> Result<ExpandResult<Option<(Mark, T)>>, UnresolvedMacro> { ) -> Result<
ExpandResult<Option<(SyntaxNode, impl FnMut(&dyn DefDatabase) -> Mark + '_)>>,
UnresolvedMacro,
> {
if self.recursion_limit + 1 > EXPANSION_RECURSION_LIMIT { if self.recursion_limit + 1 > EXPANSION_RECURSION_LIMIT {
cov_mark::hit!(your_stack_belongs_to_me); cov_mark::hit!(your_stack_belongs_to_me);
return Ok(ExpandResult::str_err( return Ok(ExpandResult::str_err(
@ -147,6 +150,55 @@ impl Expander {
} }
}; };
let this = self;
let advance_state = move |db: &dyn DefDatabase| {
this.recursion_limit += 1;
let mark = Mark {
file_id: this.current_file_id,
ast_id_map: mem::take(&mut this.ast_id_map),
bomb: DropBomb::new("expansion mark dropped"),
};
this.cfg_expander.hygiene = Hygiene::new(db.upcast(), file_id);
this.current_file_id = file_id;
this.ast_id_map = db.ast_id_map(file_id);
mark
};
Ok(ExpandResult { value: Some((raw_node, advance_state)), err })
}
pub(crate) fn enter_expand_raw(
&mut self,
db: &dyn DefDatabase,
macro_call: ast::MacroCall,
) -> Result<ExpandResult<Option<(Mark, SyntaxNode)>>, UnresolvedMacro> {
let (raw_node, mut advance_state, err) = match self.enter_expand_intern(db, macro_call)? {
ExpandResult { value: Some((raw_node, advance_state)), err } => {
(raw_node, advance_state, err)
}
ExpandResult { value: None, err } => return Ok(ExpandResult { value: None, err }),
};
log::debug!("macro expansion {:#?}", raw_node);
let mark = advance_state(db);
Ok(ExpandResult { value: Some((mark, raw_node)), err })
}
pub(crate) fn enter_expand<T: ast::AstNode>(
&mut self,
db: &dyn DefDatabase,
macro_call: ast::MacroCall,
) -> Result<ExpandResult<Option<(Mark, T)>>, UnresolvedMacro> {
let (raw_node, mut advance_state, err) = match self.enter_expand_intern(db, macro_call)? {
ExpandResult { value: Some((raw_node, advance_state)), err } => {
(raw_node, advance_state, err)
}
ExpandResult { value: None, err } => return Ok(ExpandResult { value: None, err }),
};
let node = match T::cast(raw_node) { let node = match T::cast(raw_node) {
Some(it) => it, Some(it) => it,
None => { None => {
@ -157,15 +209,7 @@ impl Expander {
log::debug!("macro expansion {:#?}", node.syntax()); log::debug!("macro expansion {:#?}", node.syntax());
self.recursion_limit += 1; let mark = advance_state(db);
let mark = Mark {
file_id: self.current_file_id,
ast_id_map: mem::take(&mut self.ast_id_map),
bomb: DropBomb::new("expansion mark dropped"),
};
self.cfg_expander.hygiene = Hygiene::new(db.upcast(), file_id);
self.current_file_id = file_id;
self.ast_id_map = db.ast_id_map(file_id);
Ok(ExpandResult { value: Some((mark, node)), err }) Ok(ExpandResult { value: Some((mark, node)), err })
} }
@ -191,7 +235,8 @@ impl Expander {
} }
fn parse_path(&mut self, path: ast::Path) -> Option<Path> { fn parse_path(&mut self, path: ast::Path) -> Option<Path> {
Path::from_src(path, &self.cfg_expander.hygiene) let ctx = LowerCtx::with_hygiene(&self.cfg_expander.hygiene);
Path::from_src(path, &ctx)
} }
fn resolve_path_as_macro(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<MacroDefId> { fn resolve_path_as_macro(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<MacroDefId> {
@ -204,6 +249,7 @@ impl Expander {
} }
} }
#[derive(Debug)]
pub(crate) struct Mark { pub(crate) struct Mark {
file_id: HirFileId, file_id: HirFileId,
ast_id_map: Arc<AstIdMap>, ast_id_map: Arc<AstIdMap>,

View file

@ -1,10 +1,11 @@
//! Transforms `ast::Expr` into an equivalent `hir_def::expr::Expr` //! Transforms `ast::Expr` into an equivalent `hir_def::expr::Expr`
//! representation. //! representation.
use std::mem; use std::{mem, sync::Arc};
use either::Either; use either::Either;
use hir_expand::{ use hir_expand::{
ast_id_map::{AstIdMap, FileAstId},
hygiene::Hygiene, hygiene::Hygiene,
name::{name, AsName, Name}, name::{name, AsName, Name},
ExpandError, HirFileId, ExpandError, HirFileId,
@ -39,20 +40,39 @@ use crate::{
use super::{diagnostics::BodyDiagnostic, ExprSource, PatSource}; use super::{diagnostics::BodyDiagnostic, ExprSource, PatSource};
pub(crate) struct LowerCtx { pub struct LowerCtx {
hygiene: Hygiene, hygiene: Hygiene,
file_id: Option<HirFileId>,
source_ast_id_map: Option<Arc<AstIdMap>>,
} }
impl LowerCtx { impl LowerCtx {
pub(crate) fn new(db: &dyn DefDatabase, file_id: HirFileId) -> Self { pub fn new(db: &dyn DefDatabase, file_id: HirFileId) -> Self {
LowerCtx { hygiene: Hygiene::new(db.upcast(), file_id) } LowerCtx {
hygiene: Hygiene::new(db.upcast(), file_id),
file_id: Some(file_id),
source_ast_id_map: Some(db.ast_id_map(file_id)),
}
} }
pub(crate) fn with_hygiene(hygiene: &Hygiene) -> Self {
LowerCtx { hygiene: hygiene.clone() } pub fn with_hygiene(hygiene: &Hygiene) -> Self {
LowerCtx { hygiene: hygiene.clone(), file_id: None, source_ast_id_map: None }
}
pub(crate) fn hygiene(&self) -> &Hygiene {
&self.hygiene
}
pub(crate) fn file_id(&self) -> HirFileId {
self.file_id.unwrap()
} }
pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> { pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> {
Path::from_src(ast, &self.hygiene) Path::from_src(ast, self)
}
pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> Option<FileAstId<N>> {
self.source_ast_id_map.as_ref().map(|ast_id_map| ast_id_map.ast_id(item))
} }
} }

View file

@ -123,10 +123,11 @@ impl TypeAliasData {
let loc = typ.lookup(db); let loc = typ.lookup(db);
let item_tree = loc.id.item_tree(db); let item_tree = loc.id.item_tree(db);
let typ = &item_tree[loc.id.value]; let typ = &item_tree[loc.id.value];
let type_ref = typ.type_ref.clone();
Arc::new(TypeAliasData { Arc::new(TypeAliasData {
name: typ.name.clone(), name: typ.name.clone(),
type_ref: typ.type_ref.clone(), type_ref: type_ref,
visibility: item_tree[typ.visibility].clone(), visibility: item_tree[typ.visibility].clone(),
is_extern: typ.is_extern, is_extern: typ.is_extern,
bounds: typ.bounds.to_vec(), bounds: typ.bounds.to_vec(),
@ -202,12 +203,13 @@ impl ImplData {
let item_tree = impl_loc.id.item_tree(db); let item_tree = impl_loc.id.item_tree(db);
let impl_def = &item_tree[impl_loc.id.value]; let impl_def = &item_tree[impl_loc.id.value];
let target_trait = impl_def.target_trait.clone(); let target_trait = impl_def.target_trait.clone();
let self_ty = impl_def.self_ty.clone();
let is_negative = impl_def.is_negative; let is_negative = impl_def.is_negative;
let module_id = impl_loc.container; let module_id = impl_loc.container;
let container = AssocContainerId::ImplId(id); let container = AssocContainerId::ImplId(id);
let mut expander = Expander::new(db, impl_loc.id.file_id(), module_id); let file_id = impl_loc.id.file_id();
let self_ty = impl_def.self_ty.clone();
let mut expander = Expander::new(db, file_id, module_id);
let items = collect_items( let items = collect_items(
db, db,
module_id, module_id,

View file

@ -189,12 +189,16 @@ impl Ctx {
block_stack.push(self.source_ast_id_map.ast_id(&block)); block_stack.push(self.source_ast_id_map.ast_id(&block));
}, },
ast::Item(item) => { ast::Item(item) => {
// FIXME: This triggers for macro calls in expression/pattern/type position // FIXME: This triggers for macro calls in expression/pattern
let mod_items = self.lower_mod_item(&item, true); if let Some(SyntaxKind::MACRO_TYPE) = node.parent().map(|p| p.kind()) {
let current_block = block_stack.last(); // Ignore macros at type position
if let (Some(mod_items), Some(block)) = (mod_items, current_block) { } else {
if !mod_items.0.is_empty() { let mod_items = self.lower_mod_item(&item, true);
self.data().inner_items.entry(*block).or_default().extend(mod_items.0.iter().copied()); let current_block = block_stack.last();
if let (Some(mod_items), Some(block)) = (mod_items, current_block) {
if !mod_items.0.is_empty() {
self.data().inner_items.entry(*block).or_default().extend(mod_items.0.iter().copied());
}
} }
} }
}, },

View file

@ -676,6 +676,7 @@ impl<T: ast::AstNode> AstIdWithPath<T> {
} }
} }
#[derive(Debug)]
pub struct UnresolvedMacro { pub struct UnresolvedMacro {
pub path: ModPath, pub path: ModPath,
} }

View file

@ -48,7 +48,8 @@ pub enum ImportAlias {
impl ModPath { impl ModPath {
pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> { pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> {
lower::lower_path(path, hygiene).map(|it| (*it.mod_path).clone()) let ctx = LowerCtx::with_hygiene(hygiene);
lower::lower_path(path, &ctx).map(|it| (*it.mod_path).clone())
} }
pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath { pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
@ -167,8 +168,8 @@ pub enum GenericArg {
impl Path { impl Path {
/// Converts an `ast::Path` to `Path`. Works with use trees. /// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call. /// It correctly handles `$crate` based path from macro call.
pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option<Path> { pub fn from_src(path: ast::Path, ctx: &LowerCtx) -> Option<Path> {
lower::lower_path(path, hygiene) lower::lower_path(path, ctx)
} }
/// Converts a known mod path to `Path`. /// Converts a known mod path to `Path`.

View file

@ -6,10 +6,7 @@ use crate::intern::Interned;
use std::sync::Arc; use std::sync::Arc;
use either::Either; use either::Either;
use hir_expand::{ use hir_expand::name::{name, AsName};
hygiene::Hygiene,
name::{name, AsName},
};
use syntax::ast::{self, AstNode, TypeBoundsOwner}; use syntax::ast::{self, AstNode, TypeBoundsOwner};
use super::AssociatedTypeBinding; use super::AssociatedTypeBinding;
@ -23,12 +20,12 @@ pub(super) use lower_use::lower_use_tree;
/// Converts an `ast::Path` to `Path`. Works with use trees. /// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call. /// It correctly handles `$crate` based path from macro call.
pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path> { pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx) -> Option<Path> {
let mut kind = PathKind::Plain; let mut kind = PathKind::Plain;
let mut type_anchor = None; let mut type_anchor = None;
let mut segments = Vec::new(); let mut segments = Vec::new();
let mut generic_args = Vec::new(); let mut generic_args = Vec::new();
let ctx = LowerCtx::with_hygiene(hygiene); let hygiene = ctx.hygiene();
loop { loop {
let segment = path.segment()?; let segment = path.segment()?;
@ -43,10 +40,10 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path>
Either::Left(name) => { Either::Left(name) => {
let args = segment let args = segment
.generic_arg_list() .generic_arg_list()
.and_then(|it| lower_generic_args(&ctx, it)) .and_then(|it| lower_generic_args(ctx, it))
.or_else(|| { .or_else(|| {
lower_generic_args_from_fn_path( lower_generic_args_from_fn_path(
&ctx, ctx,
segment.param_list(), segment.param_list(),
segment.ret_type(), segment.ret_type(),
) )
@ -64,7 +61,7 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path>
ast::PathSegmentKind::Type { type_ref, trait_ref } => { ast::PathSegmentKind::Type { type_ref, trait_ref } => {
assert!(path.qualifier().is_none()); // this can only occur at the first segment assert!(path.qualifier().is_none()); // this can only occur at the first segment
let self_type = TypeRef::from_ast(&ctx, type_ref?); let self_type = TypeRef::from_ast(ctx, type_ref?);
match trait_ref { match trait_ref {
// <T>::foo // <T>::foo
@ -74,7 +71,7 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path>
} }
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo // <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => { Some(trait_ref) => {
let path = Path::from_src(trait_ref.path()?, hygiene)?; let path = Path::from_src(trait_ref.path()?, ctx)?;
let mod_path = (*path.mod_path).clone(); let mod_path = (*path.mod_path).clone();
let num_segments = path.mod_path.segments.len(); let num_segments = path.mod_path.segments.len();
kind = mod_path.kind; kind = mod_path.kind;

View file

@ -1,9 +1,16 @@
//! HIR for references to types. Paths in these are not yet resolved. They can //! HIR for references to types. Paths in these are not yet resolved. They can
//! be directly created from an ast::TypeRef, without further queries. //! be directly created from an ast::TypeRef, without further queries.
use hir_expand::name::Name; use std::borrow::Cow;
use syntax::ast;
use crate::{body::LowerCtx, path::Path}; use hir_expand::{ast_id_map::FileAstId, name::Name, ExpandResult, InFile};
use syntax::{algo::SyntaxRewriter, ast, AstNode, SyntaxKind, SyntaxNode};
use crate::{
body::{Expander, LowerCtx},
db::DefDatabase,
path::Path,
ModuleId,
};
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum Mutability { pub enum Mutability {
@ -68,6 +75,7 @@ impl TraitRef {
} }
} }
} }
/// Compare ty::Ty /// Compare ty::Ty
#[derive(Clone, PartialEq, Eq, Hash, Debug)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub enum TypeRef { pub enum TypeRef {
@ -84,6 +92,7 @@ pub enum TypeRef {
// For // For
ImplTrait(Vec<TypeBound>), ImplTrait(Vec<TypeBound>),
DynTrait(Vec<TypeBound>), DynTrait(Vec<TypeBound>),
Macro(InFile<FileAstId<ast::MacroCall>>),
Error, Error,
} }
@ -176,8 +185,13 @@ impl TypeRef {
ast::Type::DynTraitType(inner) => { ast::Type::DynTraitType(inner) => {
TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
} }
// FIXME: Macros in type position are not yet supported. ast::Type::MacroType(mt) => match mt.macro_call() {
ast::Type::MacroType(_) => TypeRef::Error, Some(mc) => ctx
.ast_id(&mc)
.map(|mc| TypeRef::Macro(InFile::new(ctx.file_id(), mc)))
.unwrap_or(TypeRef::Error),
None => TypeRef::Error,
},
} }
} }
@ -193,6 +207,16 @@ impl TypeRef {
TypeRef::Tuple(Vec::new()) TypeRef::Tuple(Vec::new())
} }
pub fn has_macro_calls(&self) -> bool {
let mut has_macro_call = false;
self.walk(&mut |ty_ref| {
if let TypeRef::Macro(_) = ty_ref {
has_macro_call |= true
}
});
has_macro_call
}
pub fn walk(&self, f: &mut impl FnMut(&TypeRef)) { pub fn walk(&self, f: &mut impl FnMut(&TypeRef)) {
go(self, f); go(self, f);
@ -215,7 +239,7 @@ impl TypeRef {
} }
} }
TypeRef::Path(path) => go_path(path, f), TypeRef::Path(path) => go_path(path, f),
TypeRef::Never | TypeRef::Placeholder | TypeRef::Error => {} TypeRef::Never | TypeRef::Placeholder | TypeRef::Macro(_) | TypeRef::Error => {}
}; };
} }
@ -290,3 +314,69 @@ impl TypeBound {
} }
} }
} }
pub fn expand_type_ref<'a>(
db: &dyn DefDatabase,
module_id: ModuleId,
type_ref: &'a TypeRef,
) -> Option<Cow<'a, TypeRef>> {
let macro_call = match type_ref {
TypeRef::Macro(macro_call) => macro_call,
_ => return Some(Cow::Borrowed(type_ref)),
};
let file_id = macro_call.file_id;
let macro_call = macro_call.to_node(db.upcast());
let mut expander = Expander::new(db, file_id, module_id);
let expanded = expand(db, &mut expander, &macro_call, true)?;
let node = ast::Type::cast(expanded)?;
let ctx = LowerCtx::new(db, file_id);
return Some(Cow::Owned(TypeRef::from_ast(&ctx, node)));
fn expand(
db: &dyn DefDatabase,
expander: &mut Expander,
macro_call: &ast::MacroCall,
expect_type: bool,
) -> Option<SyntaxNode> {
let (mark, mut expanded) = match expander.enter_expand_raw(db, macro_call.clone()) {
Ok(ExpandResult { value: Some((mark, expanded)), .. }) => (mark, expanded),
_ => return None,
};
if expect_type && !ast::Type::can_cast(expanded.kind()) {
expander.exit(db, mark);
return None;
}
if ast::MacroType::can_cast(expanded.kind()) {
expanded = expanded.first_child()?; // MACRO_CALL
}
let mut rewriter = SyntaxRewriter::default();
let children = expanded.descendants().filter_map(ast::MacroCall::cast);
for child in children {
if let Some(new_node) = expand(db, expander, &child, false) {
if expanded == *child.syntax() {
expanded = new_node;
} else {
let parent = child.syntax().parent();
let old_node = match &parent {
Some(node) if node.kind() == SyntaxKind::MACRO_TYPE => node,
_ => child.syntax(),
};
rewriter.replace(old_node, &new_node)
}
}
}
expander.exit(db, mark);
let res = rewriter.rewrite(&expanded);
Some(res)
}
}

View file

@ -440,6 +440,7 @@ fn to_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind {
MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items, MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items,
MACRO_STMTS => FragmentKind::Statements, MACRO_STMTS => FragmentKind::Statements,
MACRO_PAT => FragmentKind::Pattern, MACRO_PAT => FragmentKind::Pattern,
MACRO_TYPE => FragmentKind::Type,
ITEM_LIST => FragmentKind::Items, ITEM_LIST => FragmentKind::Items,
LET_STMT => { LET_STMT => {
// FIXME: Handle LHS Pattern // FIXME: Handle LHS Pattern

View file

@ -31,6 +31,7 @@ use parser::FragmentKind;
use std::sync::Arc; use std::sync::Arc;
use syntax::{algo::SyntaxRewriter, SyntaxNode}; use syntax::{algo::SyntaxRewriter, SyntaxNode};
#[derive(Debug)]
pub struct ErrorEmitted { pub struct ErrorEmitted {
_private: (), _private: (),
} }

View file

@ -997,7 +997,7 @@ impl HirDisplay for TypeRef {
write!(f, "dyn ")?; write!(f, "dyn ")?;
f.write_joined(bounds, " + ")?; f.write_joined(bounds, " + ")?;
} }
TypeRef::Error => write!(f, "{{error}}")?, TypeRef::Error | TypeRef::Macro(_) => write!(f, "{{error}}")?,
} }
Ok(()) Ok(())
} }

View file

@ -15,7 +15,7 @@ use hir_def::{
generics::{TypeParamProvenance, WherePredicate, WherePredicateTypeTarget}, generics::{TypeParamProvenance, WherePredicate, WherePredicateTypeTarget},
path::{GenericArg, Path, PathSegment, PathSegments}, path::{GenericArg, Path, PathSegment, PathSegments},
resolver::{HasResolver, Resolver, TypeNs}, resolver::{HasResolver, Resolver, TypeNs},
type_ref::{TraitRef as HirTraitRef, TypeBound, TypeRef}, type_ref::{expand_type_ref, TraitRef as HirTraitRef, TypeBound, TypeRef},
AdtId, AssocContainerId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, AdtId, AssocContainerId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId,
GenericDefId, HasModule, ImplId, LocalFieldId, Lookup, StaticId, StructId, TraitId, GenericDefId, HasModule, ImplId, LocalFieldId, Lookup, StaticId, StructId, TraitId,
TypeAliasId, TypeParamId, UnionId, VariantId, TypeAliasId, TypeParamId, UnionId, VariantId,
@ -287,6 +287,16 @@ impl<'a> TyLoweringContext<'a> {
} }
} }
} }
mt @ TypeRef::Macro(_) => {
if let Some(module_id) = self.resolver.module() {
match expand_type_ref(self.db.upcast(), module_id, mt) {
Some(type_ref) => self.lower_ty(type_ref.as_ref()),
None => TyKind::Error.intern(&Interner),
}
} else {
TyKind::Error.intern(&Interner)
}
}
TypeRef::Error => TyKind::Error.intern(&Interner), TypeRef::Error => TyKind::Error.intern(&Interner),
}; };
(ty, res) (ty, res)

View file

@ -1074,3 +1074,172 @@ fn macro_in_arm() {
"#]], "#]],
); );
} }
#[test]
fn macro_in_type_alias_position() {
check_infer(
r#"
macro_rules! U32 {
() => { u32 };
}
trait Foo {
type Ty;
}
impl<T> Foo for T {
type Ty = U32!();
}
type TayTo = U32!();
fn testy() {
let a: <() as Foo>::Ty;
let b: TayTo;
}
"#,
expect![[r#"
147..196 '{ ...yTo; }': ()
157..158 'a': u32
185..186 'b': u32
"#]],
);
}
#[test]
fn nested_macro_in_type_alias_position() {
check_infer(
r#"
macro_rules! U32Inner2 {
() => { u32 };
}
macro_rules! U32Inner1 {
() => { U32Inner2!() };
}
macro_rules! U32 {
() => { U32Inner1!() };
}
trait Foo {
type Ty;
}
impl<T> Foo for T {
type Ty = U32!();
}
type TayTo = U32!();
fn testy() {
let a: <() as Foo>::Ty;
let b: TayTo;
}
"#,
expect![[r#"
259..308 '{ ...yTo; }': ()
269..270 'a': u32
297..298 'b': u32
"#]],
);
}
#[test]
fn macros_in_type_alias_position_generics() {
check_infer(
r#"
struct Foo<A, B>(A, B);
macro_rules! U32 {
() => { u32 };
}
macro_rules! Bar {
() => { Foo<U32!(), U32!()> };
}
trait Moo {
type Ty;
}
impl<T> Moo for T {
type Ty = Bar!();
}
type TayTo = Bar!();
fn main() {
let a: <() as Moo>::Ty;
let b: TayTo;
}
"#,
expect![[r#"
228..277 '{ ...yTo; }': ()
238..239 'a': Foo<u32, u32>
266..267 'b': Foo<u32, u32>
"#]],
);
}
#[test]
fn macros_in_type_position() {
check_infer(
r#"
struct Foo<A, B>(A, B);
macro_rules! U32 {
() => { u32 };
}
macro_rules! Bar {
() => { Foo<U32!(), U32!()> };
}
fn main() {
let a: Bar!();
}
"#,
expect![[r#"
133..155 '{ ...!(); }': ()
143..144 'a': Foo<u32, u32>
"#]],
);
}
#[test]
fn macros_in_type_generics() {
check_infer(
r#"
struct Foo<A, B>(A, B);
macro_rules! U32 {
() => { u32 };
}
macro_rules! Bar {
() => { Foo<U32!(), U32!()> };
}
trait Moo {
type Ty;
}
impl<T> Moo for T {
type Ty = Foo<Bar!(), Bar!()>;
}
type TayTo = Foo<Bar!(), U32!()>;
fn main() {
let a: <() as Moo>::Ty;
let b: TayTo;
}
"#,
expect![[r#"
254..303 '{ ...yTo; }': ()
264..265 'a': Foo<Foo<u32, u32>, Foo<u32, u32>>
292..293 'b': Foo<Foo<u32, u32>, u32>
"#]],
);
}

View file

@ -283,17 +283,21 @@ pub(super) fn path_type(p: &mut Parser) {
// type B = crate::foo!(); // type B = crate::foo!();
fn path_or_macro_type_(p: &mut Parser, allow_bounds: bool) { fn path_or_macro_type_(p: &mut Parser, allow_bounds: bool) {
assert!(paths::is_path_start(p)); assert!(paths::is_path_start(p));
let r = p.start();
let m = p.start(); let m = p.start();
paths::type_path(p); paths::type_path(p);
let kind = if p.at(T![!]) && !p.at(T![!=]) { let kind = if p.at(T![!]) && !p.at(T![!=]) {
items::macro_call_after_excl(p); items::macro_call_after_excl(p);
MACRO_CALL m.complete(p, MACRO_CALL);
MACRO_TYPE
} else { } else {
m.abandon(p);
PATH_TYPE PATH_TYPE
}; };
let path = m.complete(p, kind); let path = r.complete(p, kind);
if allow_bounds { if allow_bounds {
opt_type_bounds_as_dyn_trait_type(p, path); opt_type_bounds_as_dyn_trait_type(p, path);
@ -319,7 +323,7 @@ pub(super) fn path_type_(p: &mut Parser, allow_bounds: bool) {
fn opt_type_bounds_as_dyn_trait_type(p: &mut Parser, type_marker: CompletedMarker) { fn opt_type_bounds_as_dyn_trait_type(p: &mut Parser, type_marker: CompletedMarker) {
assert!(matches!( assert!(matches!(
type_marker.kind(), type_marker.kind(),
SyntaxKind::PATH_TYPE | SyntaxKind::FOR_TYPE | SyntaxKind::MACRO_CALL SyntaxKind::PATH_TYPE | SyntaxKind::FOR_TYPE | SyntaxKind::MACRO_TYPE
)); ));
if !p.at(T![+]) { if !p.at(T![+]) {
return; return;

View file

@ -7,15 +7,16 @@ SOURCE_FILE@0..41
WHITESPACE@6..7 " " WHITESPACE@6..7 " "
EQ@7..8 "=" EQ@7..8 "="
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
MACRO_CALL@9..15 MACRO_TYPE@9..15
PATH@9..12 MACRO_CALL@9..15
PATH_SEGMENT@9..12 PATH@9..12
NAME_REF@9..12 PATH_SEGMENT@9..12
IDENT@9..12 "foo" NAME_REF@9..12
BANG@12..13 "!" IDENT@9..12 "foo"
TOKEN_TREE@13..15 BANG@12..13 "!"
L_PAREN@13..14 "(" TOKEN_TREE@13..15
R_PAREN@14..15 ")" L_PAREN@13..14 "("
R_PAREN@14..15 ")"
SEMICOLON@15..16 ";" SEMICOLON@15..16 ";"
WHITESPACE@16..17 "\n" WHITESPACE@16..17 "\n"
TYPE_ALIAS@17..40 TYPE_ALIAS@17..40
@ -26,19 +27,20 @@ SOURCE_FILE@0..41
WHITESPACE@23..24 " " WHITESPACE@23..24 " "
EQ@24..25 "=" EQ@24..25 "="
WHITESPACE@25..26 " " WHITESPACE@25..26 " "
MACRO_CALL@26..39 MACRO_TYPE@26..39
PATH@26..36 MACRO_CALL@26..39
PATH@26..31 PATH@26..36
PATH_SEGMENT@26..31 PATH@26..31
NAME_REF@26..31 PATH_SEGMENT@26..31
CRATE_KW@26..31 "crate" NAME_REF@26..31
COLON2@31..33 "::" CRATE_KW@26..31 "crate"
PATH_SEGMENT@33..36 COLON2@31..33 "::"
NAME_REF@33..36 PATH_SEGMENT@33..36
IDENT@33..36 "foo" NAME_REF@33..36
BANG@36..37 "!" IDENT@33..36 "foo"
TOKEN_TREE@37..39 BANG@36..37 "!"
L_PAREN@37..38 "(" TOKEN_TREE@37..39
R_PAREN@38..39 ")" L_PAREN@37..38 "("
R_PAREN@38..39 ")"
SEMICOLON@39..40 ";" SEMICOLON@39..40 ";"
WHITESPACE@40..41 "\n" WHITESPACE@40..41 "\n"