mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 21:13:37 +00:00
Merge commit 'baee6b338b0ea076cd7a9f18d47f175dd2ba0e5d' into sync-from-ra
This commit is contained in:
parent
0155385b57
commit
aa55ce9567
139 changed files with 4248 additions and 1042 deletions
|
@ -485,6 +485,7 @@ impl AttrsWithOwner {
|
|||
},
|
||||
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),
|
||||
AttrDefId::ExternCrateId(it) => attrs_from_item_tree_loc(db, it),
|
||||
AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it),
|
||||
};
|
||||
|
||||
let attrs = raw_attrs.filter(db.upcast(), def.krate(db));
|
||||
|
@ -570,6 +571,7 @@ impl AttrsWithOwner {
|
|||
},
|
||||
AttrDefId::ExternBlockId(id) => any_has_attrs(db, id),
|
||||
AttrDefId::ExternCrateId(id) => any_has_attrs(db, id),
|
||||
AttrDefId::UseId(id) => any_has_attrs(db, id),
|
||||
};
|
||||
|
||||
AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs))
|
||||
|
|
|
@ -313,13 +313,7 @@ impl ExprCollector<'_> {
|
|||
let body = self.collect_labelled_block_opt(label, e.loop_body());
|
||||
self.alloc_expr(Expr::Loop { body, label }, syntax_ptr)
|
||||
}
|
||||
ast::Expr::WhileExpr(e) => {
|
||||
let label = e.label().map(|label| self.collect_label(label));
|
||||
let body = self.collect_labelled_block_opt(label, e.loop_body());
|
||||
let condition = self.collect_expr_opt(e.condition());
|
||||
|
||||
self.alloc_expr(Expr::While { condition, body, label }, syntax_ptr)
|
||||
}
|
||||
ast::Expr::WhileExpr(e) => self.collect_while_loop(syntax_ptr, e),
|
||||
ast::Expr::ForExpr(e) => self.collect_for_loop(syntax_ptr, e),
|
||||
ast::Expr::CallExpr(e) => {
|
||||
let is_rustc_box = {
|
||||
|
@ -731,6 +725,32 @@ impl ExprCollector<'_> {
|
|||
expr_id
|
||||
}
|
||||
|
||||
/// Desugar `ast::WhileExpr` from: `[opt_ident]: while <cond> <body>` into:
|
||||
/// ```ignore (pseudo-rust)
|
||||
/// [opt_ident]: loop {
|
||||
/// if <cond> {
|
||||
/// <body>
|
||||
/// }
|
||||
/// else {
|
||||
/// break;
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
/// FIXME: Rustc wraps the condition in a construct equivalent to `{ let _t = <cond>; _t }`
|
||||
/// to preserve drop semantics. We should probably do the same in future.
|
||||
fn collect_while_loop(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::WhileExpr) -> ExprId {
|
||||
let label = e.label().map(|label| self.collect_label(label));
|
||||
let body = self.collect_labelled_block_opt(label, e.loop_body());
|
||||
let condition = self.collect_expr_opt(e.condition());
|
||||
let break_expr =
|
||||
self.alloc_expr(Expr::Break { expr: None, label: None }, syntax_ptr.clone());
|
||||
let if_expr = self.alloc_expr(
|
||||
Expr::If { condition, then_branch: body, else_branch: Some(break_expr) },
|
||||
syntax_ptr.clone(),
|
||||
);
|
||||
self.alloc_expr(Expr::Loop { body: if_expr, label }, syntax_ptr)
|
||||
}
|
||||
|
||||
/// Desugar `ast::ForExpr` from: `[opt_ident]: for <pat> in <head> <body>` into:
|
||||
/// ```ignore (pseudo-rust)
|
||||
/// match IntoIterator::into_iter(<head>) {
|
||||
|
@ -893,15 +913,14 @@ impl ExprCollector<'_> {
|
|||
self.alloc_expr(Expr::Match { expr, arms }, syntax_ptr)
|
||||
}
|
||||
|
||||
fn collect_macro_call<F, T, U>(
|
||||
fn collect_macro_call<T, U>(
|
||||
&mut self,
|
||||
mcall: ast::MacroCall,
|
||||
syntax_ptr: AstPtr<ast::MacroCall>,
|
||||
record_diagnostics: bool,
|
||||
collector: F,
|
||||
collector: impl FnOnce(&mut Self, Option<T>) -> U,
|
||||
) -> U
|
||||
where
|
||||
F: FnOnce(&mut Self, Option<T>) -> U,
|
||||
T: ast::AstNode,
|
||||
{
|
||||
// File containing the macro call. Expansion errors will be attached here.
|
||||
|
|
|
@ -178,14 +178,6 @@ impl Printer<'_> {
|
|||
w!(self, "loop ");
|
||||
self.print_expr(*body);
|
||||
}
|
||||
Expr::While { condition, body, label } => {
|
||||
if let Some(lbl) = label {
|
||||
w!(self, "{}: ", self.body[*lbl].name.display(self.db));
|
||||
}
|
||||
w!(self, "while ");
|
||||
self.print_expr(*condition);
|
||||
self.print_expr(*body);
|
||||
}
|
||||
Expr::Call { callee, args, is_assignee_expr: _ } => {
|
||||
self.print_expr(*callee);
|
||||
w!(self, "(");
|
||||
|
|
|
@ -228,11 +228,6 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
|
|||
scopes.set_scope(expr, scope);
|
||||
compute_block_scopes(statements, *tail, body, scopes, &mut scope);
|
||||
}
|
||||
Expr::While { condition, body: body_expr, label } => {
|
||||
let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
|
||||
compute_expr_scopes(*condition, body, scopes, &mut scope);
|
||||
compute_expr_scopes(*body_expr, body, scopes, &mut scope);
|
||||
}
|
||||
Expr::Loop { body: body_expr, label } => {
|
||||
let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
|
||||
compute_expr_scopes(*body_expr, body, scopes, &mut scope);
|
||||
|
|
|
@ -14,8 +14,8 @@ use crate::{
|
|||
item_scope::ItemScope,
|
||||
nameres::DefMap,
|
||||
src::{HasChildSource, HasSource},
|
||||
AdtId, AssocItemId, DefWithBodyId, EnumId, EnumVariantId, FieldId, ImplId, Lookup, MacroId,
|
||||
ModuleDefId, ModuleId, TraitId, VariantId,
|
||||
AdtId, AssocItemId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FieldId, ImplId,
|
||||
Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId, VariantId,
|
||||
};
|
||||
|
||||
pub trait ChildBySource {
|
||||
|
@ -91,6 +91,8 @@ impl ChildBySource for ItemScope {
|
|||
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
|
||||
self.declarations().for_each(|item| add_module_def(db, res, file_id, item));
|
||||
self.impls().for_each(|imp| add_impl(db, res, file_id, imp));
|
||||
self.extern_crate_decls().for_each(|ext| add_extern_crate(db, res, file_id, ext));
|
||||
self.use_decls().for_each(|ext| add_use(db, res, file_id, ext));
|
||||
self.unnamed_consts().for_each(|konst| {
|
||||
let loc = konst.lookup(db);
|
||||
if loc.id.file_id() == file_id {
|
||||
|
@ -167,6 +169,23 @@ impl ChildBySource for ItemScope {
|
|||
map[keys::IMPL].insert(loc.source(db).value, imp)
|
||||
}
|
||||
}
|
||||
fn add_extern_crate(
|
||||
db: &dyn DefDatabase,
|
||||
map: &mut DynMap,
|
||||
file_id: HirFileId,
|
||||
ext: ExternCrateId,
|
||||
) {
|
||||
let loc = ext.lookup(db);
|
||||
if loc.id.file_id() == file_id {
|
||||
map[keys::EXTERN_CRATE].insert(loc.source(db).value, ext)
|
||||
}
|
||||
}
|
||||
fn add_use(db: &dyn DefDatabase, map: &mut DynMap, file_id: HirFileId, ext: UseId) {
|
||||
let loc = ext.lookup(db);
|
||||
if loc.id.file_id() == file_id {
|
||||
map[keys::USE].insert(loc.source(db).value, ext)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
pub mod adt;
|
||||
|
||||
use base_db::CrateId;
|
||||
use hir_expand::{
|
||||
name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefKind,
|
||||
};
|
||||
|
@ -467,6 +468,7 @@ pub struct ExternCrateDeclData {
|
|||
pub name: Name,
|
||||
pub alias: Option<ImportAlias>,
|
||||
pub visibility: RawVisibility,
|
||||
pub crate_id: Option<CrateId>,
|
||||
}
|
||||
|
||||
impl ExternCrateDeclData {
|
||||
|
@ -478,10 +480,21 @@ impl ExternCrateDeclData {
|
|||
let item_tree = loc.id.item_tree(db);
|
||||
let extern_crate = &item_tree[loc.id.value];
|
||||
|
||||
let name = extern_crate.name.clone();
|
||||
let crate_id = if name == hir_expand::name![self] {
|
||||
Some(loc.container.krate())
|
||||
} else {
|
||||
db.crate_def_map(loc.container.krate())
|
||||
.extern_prelude()
|
||||
.find(|&(prelude_name, ..)| *prelude_name == name)
|
||||
.map(|(_, root)| root.krate())
|
||||
};
|
||||
|
||||
Arc::new(Self {
|
||||
name: extern_crate.name.clone(),
|
||||
visibility: item_tree[extern_crate.visibility].clone(),
|
||||
alias: extern_crate.alias.clone(),
|
||||
crate_id,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,17 +23,17 @@ use crate::{
|
|||
visibility::{self, Visibility},
|
||||
AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId,
|
||||
EnumId, EnumLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId,
|
||||
FunctionLoc, GenericDefId, ImplId, ImplLoc, ImportId, ImportLoc, InTypeConstId, InTypeConstLoc,
|
||||
LocalEnumVariantId, LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc,
|
||||
ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitAliasId,
|
||||
TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, VariantId,
|
||||
FunctionLoc, GenericDefId, ImplId, ImplLoc, InTypeConstId, InTypeConstLoc, LocalEnumVariantId,
|
||||
LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc,
|
||||
StaticId, StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc,
|
||||
TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId,
|
||||
};
|
||||
|
||||
#[salsa::query_group(InternDatabaseStorage)]
|
||||
pub trait InternDatabase: SourceDatabase {
|
||||
// region: items
|
||||
#[salsa::interned]
|
||||
fn intern_import(&self, loc: ImportLoc) -> ImportId;
|
||||
fn intern_use(&self, loc: UseLoc) -> UseId;
|
||||
#[salsa::interned]
|
||||
fn intern_extern_crate(&self, loc: ExternCrateLoc) -> ExternCrateId;
|
||||
#[salsa::interned]
|
||||
|
|
|
@ -10,7 +10,7 @@ use crate::{
|
|||
dyn_map::{DynMap, Policy},
|
||||
ConstId, EnumId, EnumVariantId, ExternCrateId, FieldId, FunctionId, ImplId, LifetimeParamId,
|
||||
Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
|
||||
TypeOrConstParamId, UnionId,
|
||||
TypeOrConstParamId, UnionId, UseId,
|
||||
};
|
||||
|
||||
pub type Key<K, V> = crate::dyn_map::Key<K, V, AstPtrPolicy<K, V>>;
|
||||
|
@ -26,6 +26,7 @@ pub const STRUCT: Key<ast::Struct, StructId> = Key::new();
|
|||
pub const UNION: Key<ast::Union, UnionId> = Key::new();
|
||||
pub const ENUM: Key<ast::Enum, EnumId> = Key::new();
|
||||
pub const EXTERN_CRATE: Key<ast::ExternCrate, ExternCrateId> = Key::new();
|
||||
pub const USE: Key<ast::Use, UseId> = Key::new();
|
||||
|
||||
pub const VARIANT: Key<ast::Variant, EnumVariantId> = Key::new();
|
||||
pub const TUPLE_FIELD: Key<ast::TupleField, FieldId> = Key::new();
|
||||
|
|
|
@ -164,18 +164,26 @@ impl Expander {
|
|||
return ExpandResult { value: None, err };
|
||||
};
|
||||
|
||||
Self::enter_expand_inner(db, call_id, err).map(|value| {
|
||||
value.and_then(|InFile { file_id, value }| {
|
||||
let parse = value.cast::<T>()?;
|
||||
let res = Self::enter_expand_inner(db, call_id, err);
|
||||
match res.err {
|
||||
// If proc-macro is disabled or unresolved, we want to expand to a missing expression
|
||||
// instead of an empty tree which might end up in an empty block.
|
||||
Some(ExpandError::UnresolvedProcMacro(_)) => res.map(|_| None),
|
||||
_ => res.map(|value| {
|
||||
value.and_then(|InFile { file_id, value }| {
|
||||
let parse = value.cast::<T>()?;
|
||||
|
||||
self.recursion_depth += 1;
|
||||
self.hygiene = Hygiene::new(db.upcast(), file_id);
|
||||
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
|
||||
let mark =
|
||||
Mark { file_id: old_file_id, bomb: DropBomb::new("expansion mark dropped") };
|
||||
Some((mark, parse))
|
||||
})
|
||||
})
|
||||
self.recursion_depth += 1;
|
||||
self.hygiene = Hygiene::new(db.upcast(), file_id);
|
||||
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
|
||||
let mark = Mark {
|
||||
file_id: old_file_id,
|
||||
bomb: DropBomb::new("expansion mark dropped"),
|
||||
};
|
||||
Some((mark, parse))
|
||||
})
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -191,11 +191,6 @@ pub enum Expr {
|
|||
body: ExprId,
|
||||
label: Option<LabelId>,
|
||||
},
|
||||
While {
|
||||
condition: ExprId,
|
||||
body: ExprId,
|
||||
label: Option<LabelId>,
|
||||
},
|
||||
Call {
|
||||
callee: ExprId,
|
||||
args: Box<[ExprId]>,
|
||||
|
@ -379,10 +374,6 @@ impl Expr {
|
|||
}
|
||||
}
|
||||
Expr::Loop { body, .. } => f(*body),
|
||||
Expr::While { condition, body, .. } => {
|
||||
f(*condition);
|
||||
f(*body);
|
||||
}
|
||||
Expr::Call { callee, args, .. } => {
|
||||
f(*callee);
|
||||
args.iter().copied().for_each(f);
|
||||
|
|
|
@ -16,6 +16,7 @@ use syntax::ast;
|
|||
use crate::{
|
||||
db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, ConstId,
|
||||
ExternCrateId, HasModule, ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId,
|
||||
UseId,
|
||||
};
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
|
@ -113,6 +114,17 @@ impl ItemScope {
|
|||
self.declarations.iter().copied()
|
||||
}
|
||||
|
||||
pub fn extern_crate_decls(
|
||||
&self,
|
||||
) -> impl Iterator<Item = ExternCrateId> + ExactSizeIterator + '_ {
|
||||
self.extern_crate_decls.iter().copied()
|
||||
}
|
||||
|
||||
pub fn use_decls(&self) -> impl Iterator<Item = UseId> + ExactSizeIterator + '_ {
|
||||
// FIXME: to be implemented
|
||||
std::iter::empty()
|
||||
}
|
||||
|
||||
pub fn impls(&self) -> impl Iterator<Item = ImplId> + ExactSizeIterator + '_ {
|
||||
self.impls.iter().copied()
|
||||
}
|
||||
|
|
|
@ -188,7 +188,7 @@ impl ItemTree {
|
|||
fn shrink_to_fit(&mut self) {
|
||||
if let Some(data) = &mut self.data {
|
||||
let ItemTreeData {
|
||||
imports,
|
||||
uses,
|
||||
extern_crates,
|
||||
extern_blocks,
|
||||
functions,
|
||||
|
@ -211,7 +211,7 @@ impl ItemTree {
|
|||
vis,
|
||||
} = &mut **data;
|
||||
|
||||
imports.shrink_to_fit();
|
||||
uses.shrink_to_fit();
|
||||
extern_crates.shrink_to_fit();
|
||||
extern_blocks.shrink_to_fit();
|
||||
functions.shrink_to_fit();
|
||||
|
@ -262,7 +262,7 @@ static VIS_PUB_CRATE: RawVisibility = RawVisibility::Module(ModPath::from_kind(P
|
|||
|
||||
#[derive(Default, Debug, Eq, PartialEq)]
|
||||
struct ItemTreeData {
|
||||
imports: Arena<Import>,
|
||||
uses: Arena<Use>,
|
||||
extern_crates: Arena<ExternCrate>,
|
||||
extern_blocks: Arena<ExternBlock>,
|
||||
functions: Arena<Function>,
|
||||
|
@ -486,7 +486,7 @@ macro_rules! mod_items {
|
|||
}
|
||||
|
||||
mod_items! {
|
||||
Import in imports -> ast::Use,
|
||||
Use in uses -> ast::Use,
|
||||
ExternCrate in extern_crates -> ast::ExternCrate,
|
||||
ExternBlock in extern_blocks -> ast::ExternBlock,
|
||||
Function in functions -> ast::Fn,
|
||||
|
@ -541,7 +541,7 @@ impl<N: ItemTreeNode> Index<FileItemTreeId<N>> for ItemTree {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Import {
|
||||
pub struct Use {
|
||||
pub visibility: RawVisibilityId,
|
||||
pub ast_id: FileAstId<ast::Use>,
|
||||
pub use_tree: UseTree,
|
||||
|
@ -744,7 +744,7 @@ pub struct MacroDef {
|
|||
pub ast_id: FileAstId<ast::MacroDef>,
|
||||
}
|
||||
|
||||
impl Import {
|
||||
impl Use {
|
||||
/// Maps a `UseTree` contained in this import back to its AST node.
|
||||
pub fn use_tree_to_ast(
|
||||
&self,
|
||||
|
@ -870,7 +870,7 @@ macro_rules! impl_froms {
|
|||
impl ModItem {
|
||||
pub fn as_assoc_item(&self) -> Option<AssocItem> {
|
||||
match self {
|
||||
ModItem::Import(_)
|
||||
ModItem::Use(_)
|
||||
| ModItem::ExternCrate(_)
|
||||
| ModItem::ExternBlock(_)
|
||||
| ModItem::Struct(_)
|
||||
|
@ -892,7 +892,7 @@ impl ModItem {
|
|||
|
||||
pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::Item> {
|
||||
match self {
|
||||
ModItem::Import(it) => tree[it.index].ast_id().upcast(),
|
||||
ModItem::Use(it) => tree[it.index].ast_id().upcast(),
|
||||
ModItem::ExternCrate(it) => tree[it.index].ast_id().upcast(),
|
||||
ModItem::ExternBlock(it) => tree[it.index].ast_id().upcast(),
|
||||
ModItem::Function(it) => tree[it.index].ast_id().upcast(),
|
||||
|
|
|
@ -502,13 +502,13 @@ impl<'a> Ctx<'a> {
|
|||
Some(id(self.data().impls.alloc(res)))
|
||||
}
|
||||
|
||||
fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Import>> {
|
||||
fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Use>> {
|
||||
let visibility = self.lower_visibility(use_item);
|
||||
let ast_id = self.source_ast_id_map.ast_id(use_item);
|
||||
let (use_tree, _) = lower_use_tree(self.db, self.hygiene(), use_item.use_tree()?)?;
|
||||
|
||||
let res = Import { visibility, ast_id, use_tree };
|
||||
Some(id(self.data().imports.alloc(res)))
|
||||
let res = Use { visibility, ast_id, use_tree };
|
||||
Some(id(self.data().uses.alloc(res)))
|
||||
}
|
||||
|
||||
fn lower_extern_crate(
|
||||
|
|
|
@ -198,8 +198,8 @@ impl Printer<'_> {
|
|||
self.print_attrs_of(item);
|
||||
|
||||
match item {
|
||||
ModItem::Import(it) => {
|
||||
let Import { visibility, use_tree, ast_id: _ } = &self.tree[it];
|
||||
ModItem::Use(it) => {
|
||||
let Use { visibility, use_tree, ast_id: _ } = &self.tree[it];
|
||||
self.print_visibility(*visibility);
|
||||
w!(self, "use ");
|
||||
self.print_use_tree(use_tree);
|
||||
|
|
|
@ -88,8 +88,8 @@ use crate::{
|
|||
builtin_type::BuiltinType,
|
||||
data::adt::VariantData,
|
||||
item_tree::{
|
||||
Const, Enum, ExternCrate, Function, Impl, Import, ItemTreeId, ItemTreeNode, MacroDef,
|
||||
MacroRules, Static, Struct, Trait, TraitAlias, TypeAlias, Union,
|
||||
Const, Enum, ExternCrate, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules,
|
||||
Static, Struct, Trait, TraitAlias, TypeAlias, Union, Use,
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -121,6 +121,12 @@ impl From<CrateRootModuleId> for ModuleDefId {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<CrateId> for CrateRootModuleId {
|
||||
fn from(krate: CrateId) -> Self {
|
||||
CrateRootModuleId { krate }
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<ModuleId> for CrateRootModuleId {
|
||||
type Error = ();
|
||||
|
||||
|
@ -318,9 +324,9 @@ type ImplLoc = ItemLoc<Impl>;
|
|||
impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
|
||||
pub struct ImportId(salsa::InternId);
|
||||
type ImportLoc = ItemLoc<Import>;
|
||||
impl_intern!(ImportId, ImportLoc, intern_import, lookup_intern_import);
|
||||
pub struct UseId(salsa::InternId);
|
||||
type UseLoc = ItemLoc<Use>;
|
||||
impl_intern!(UseId, UseLoc, intern_use, lookup_intern_use);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
|
||||
pub struct ExternCrateId(salsa::InternId);
|
||||
|
@ -836,6 +842,7 @@ pub enum AttrDefId {
|
|||
GenericParamId(GenericParamId),
|
||||
ExternBlockId(ExternBlockId),
|
||||
ExternCrateId(ExternCrateId),
|
||||
UseId(UseId),
|
||||
}
|
||||
|
||||
impl_from!(
|
||||
|
@ -1073,6 +1080,7 @@ impl AttrDefId {
|
|||
}
|
||||
AttrDefId::MacroId(it) => it.module(db).krate,
|
||||
AttrDefId::ExternCrateId(it) => it.lookup(db).container.krate,
|
||||
AttrDefId::UseId(it) => it.lookup(db).container.krate,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1083,7 +1091,7 @@ pub trait AsMacroCall {
|
|||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
krate: CrateId,
|
||||
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
|
||||
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
|
||||
) -> Option<MacroCallId> {
|
||||
self.as_call_id_with_errors(db, krate, resolver).ok()?.value
|
||||
}
|
||||
|
@ -1092,7 +1100,7 @@ pub trait AsMacroCall {
|
|||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
krate: CrateId,
|
||||
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
|
||||
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
|
||||
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro>;
|
||||
}
|
||||
|
||||
|
@ -1101,7 +1109,7 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
|
|||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
krate: CrateId,
|
||||
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
|
||||
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
|
||||
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
|
||||
let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
|
||||
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
|
||||
|
@ -1112,12 +1120,13 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
|
|||
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
|
||||
};
|
||||
|
||||
macro_call_as_call_id_(
|
||||
macro_call_as_call_id_with_eager(
|
||||
db,
|
||||
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
|
||||
expands_to,
|
||||
krate,
|
||||
resolver,
|
||||
resolver,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -1140,33 +1149,39 @@ fn macro_call_as_call_id(
|
|||
call: &AstIdWithPath<ast::MacroCall>,
|
||||
expand_to: ExpandTo,
|
||||
krate: CrateId,
|
||||
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
|
||||
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
|
||||
) -> Result<Option<MacroCallId>, UnresolvedMacro> {
|
||||
macro_call_as_call_id_(db, call, expand_to, krate, resolver).map(|res| res.value)
|
||||
macro_call_as_call_id_with_eager(db, call, expand_to, krate, resolver, resolver)
|
||||
.map(|res| res.value)
|
||||
}
|
||||
|
||||
fn macro_call_as_call_id_(
|
||||
fn macro_call_as_call_id_with_eager(
|
||||
db: &dyn ExpandDatabase,
|
||||
call: &AstIdWithPath<ast::MacroCall>,
|
||||
expand_to: ExpandTo,
|
||||
krate: CrateId,
|
||||
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
|
||||
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
|
||||
eager_resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
|
||||
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
|
||||
let def =
|
||||
resolver(call.path.clone()).ok_or_else(|| UnresolvedMacro { path: call.path.clone() })?;
|
||||
|
||||
let res = if let MacroDefKind::BuiltInEager(..) = def.kind {
|
||||
let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db));
|
||||
expand_eager_macro_input(db, krate, macro_call, def, &resolver)?
|
||||
} else {
|
||||
ExpandResult {
|
||||
let res = match def.kind {
|
||||
MacroDefKind::BuiltInEager(..) => {
|
||||
let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db));
|
||||
expand_eager_macro_input(db, krate, macro_call, def, &|path| {
|
||||
eager_resolver(path).filter(MacroDefId::is_fn_like)
|
||||
})
|
||||
}
|
||||
_ if def.is_fn_like() => ExpandResult {
|
||||
value: Some(def.as_lazy_macro(
|
||||
db,
|
||||
krate,
|
||||
MacroCallKind::FnLike { ast_id: call.ast_id, expand_to },
|
||||
)),
|
||||
err: None,
|
||||
}
|
||||
},
|
||||
_ => return Err(UnresolvedMacro { path: call.path.clone() }),
|
||||
};
|
||||
Ok(res)
|
||||
}
|
||||
|
@ -1251,6 +1266,7 @@ fn derive_macro_as_call_id(
|
|||
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
|
||||
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
|
||||
let (macro_id, def_id) = resolver(item_attr.path.clone())
|
||||
.filter(|(_, def_id)| def_id.is_derive())
|
||||
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
|
||||
let call_id = def_id.as_lazy_macro(
|
||||
db.upcast(),
|
||||
|
|
|
@ -238,7 +238,7 @@ fn main() {
|
|||
/* error: expected expression */;
|
||||
/* error: expected expression, expected COMMA */;
|
||||
/* error: expected expression */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(), ::core::fmt::Display::fmt), ]);
|
||||
/* error: expected expression, expected R_PAREN */;
|
||||
/* error: expected expression, expected expression */;
|
||||
::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(5), ::core::fmt::Display::fmt), ]);
|
||||
}
|
||||
"##]],
|
||||
|
|
|
@ -99,6 +99,30 @@ fn#19 main#20(#21)#21 {#22
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn eager_expands_with_unresolved_within() {
|
||||
check(
|
||||
r#"
|
||||
#[rustc_builtin_macro]
|
||||
#[macro_export]
|
||||
macro_rules! format_args {}
|
||||
|
||||
fn main(foo: ()) {
|
||||
format_args!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
|
||||
}
|
||||
"#,
|
||||
expect![[r##"
|
||||
#[rustc_builtin_macro]
|
||||
#[macro_export]
|
||||
macro_rules! format_args {}
|
||||
|
||||
fn main(foo: ()) {
|
||||
/* error: unresolved macro identity */::core::fmt::Arguments::new_v1(&["", " ", " ", ], &[::core::fmt::ArgumentV1::new(&(::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(0), ::core::fmt::Display::fmt), ])), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(foo), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(identity!(10)), ::core::fmt::Display::fmt), ])
|
||||
}
|
||||
"##]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn token_mapping_eager() {
|
||||
check(
|
||||
|
@ -848,6 +872,37 @@ fn foo() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_type_path_is_transcribed_as_expr_path() {
|
||||
check(
|
||||
r#"
|
||||
macro_rules! m {
|
||||
($p:path) => { let $p; }
|
||||
}
|
||||
fn test() {
|
||||
m!(S)
|
||||
m!(S<i32>)
|
||||
m!(S<S<i32>>)
|
||||
m!(S<{ module::CONST < 42 }>)
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
macro_rules! m {
|
||||
($p:path) => { let $p; }
|
||||
}
|
||||
fn test() {
|
||||
let S;
|
||||
let S:: <i32> ;
|
||||
let S:: <S:: <i32>> ;
|
||||
let S:: < {
|
||||
module::CONST<42
|
||||
}
|
||||
> ;
|
||||
}
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expr() {
|
||||
check(
|
||||
|
|
|
@ -38,7 +38,7 @@ use crate::{
|
|||
self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, ItemTreeNode,
|
||||
MacroCall, MacroDef, MacroRules, Mod, ModItem, ModKind, TreeId,
|
||||
},
|
||||
macro_call_as_call_id, macro_id_to_def_id,
|
||||
macro_call_as_call_id, macro_call_as_call_id_with_eager, macro_id_to_def_id,
|
||||
nameres::{
|
||||
diagnostics::DefDiagnostic,
|
||||
mod_resolution::ModDir,
|
||||
|
@ -52,10 +52,10 @@ use crate::{
|
|||
tt,
|
||||
visibility::{RawVisibility, Visibility},
|
||||
AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantId,
|
||||
ExternBlockLoc, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, ImportLoc, Intern,
|
||||
ItemContainerId, LocalModuleId, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId,
|
||||
MacroRulesLoc, ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, StructLoc,
|
||||
TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro,
|
||||
ExternBlockLoc, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId,
|
||||
LocalModuleId, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, MacroRulesLoc,
|
||||
ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc,
|
||||
TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseLoc,
|
||||
};
|
||||
|
||||
static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
|
||||
|
@ -146,7 +146,7 @@ impl PartialResolvedImport {
|
|||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
enum ImportSource {
|
||||
Import { id: ItemTreeId<item_tree::Import>, use_tree: Idx<ast::UseTree> },
|
||||
Use { id: ItemTreeId<item_tree::Use>, use_tree: Idx<ast::UseTree> },
|
||||
ExternCrate(ItemTreeId<item_tree::ExternCrate>),
|
||||
}
|
||||
|
||||
|
@ -166,7 +166,7 @@ impl Import {
|
|||
db: &dyn DefDatabase,
|
||||
krate: CrateId,
|
||||
tree: &ItemTree,
|
||||
id: ItemTreeId<item_tree::Import>,
|
||||
id: ItemTreeId<item_tree::Use>,
|
||||
mut cb: impl FnMut(Self),
|
||||
) {
|
||||
let it = &tree[id.value];
|
||||
|
@ -181,7 +181,7 @@ impl Import {
|
|||
kind,
|
||||
is_prelude,
|
||||
is_macro_use: false,
|
||||
source: ImportSource::Import { id, use_tree: idx },
|
||||
source: ImportSource::Use { id, use_tree: idx },
|
||||
});
|
||||
});
|
||||
}
|
||||
|
@ -1474,7 +1474,7 @@ impl DefCollector<'_> {
|
|||
}
|
||||
|
||||
for directive in &self.unresolved_imports {
|
||||
if let ImportSource::Import { id: import, use_tree } = directive.import.source {
|
||||
if let ImportSource::Use { id: import, use_tree } = directive.import.source {
|
||||
if matches!(
|
||||
(directive.import.path.segments().first(), &directive.import.path.kind),
|
||||
(Some(krate), PathKind::Plain | PathKind::Abs) if diagnosed_extern_crates.contains(krate)
|
||||
|
@ -1576,12 +1576,10 @@ impl ModCollector<'_, '_> {
|
|||
|
||||
match item {
|
||||
ModItem::Mod(m) => self.collect_module(m, &attrs),
|
||||
ModItem::Import(import_id) => {
|
||||
let _import_id = ImportLoc {
|
||||
container: module,
|
||||
id: ItemTreeId::new(self.tree_id, import_id),
|
||||
}
|
||||
.intern(db);
|
||||
ModItem::Use(import_id) => {
|
||||
let _import_id =
|
||||
UseLoc { container: module, id: ItemTreeId::new(self.tree_id, import_id) }
|
||||
.intern(db);
|
||||
Import::from_use(
|
||||
db,
|
||||
krate,
|
||||
|
@ -2187,7 +2185,7 @@ impl ModCollector<'_, '_> {
|
|||
// scopes without eager expansion.
|
||||
|
||||
// Case 1: try to resolve macro calls with single-segment name and expand macro_rules
|
||||
if let Ok(res) = macro_call_as_call_id(
|
||||
if let Ok(res) = macro_call_as_call_id_with_eager(
|
||||
db.upcast(),
|
||||
&ast_id,
|
||||
mac.expand_to,
|
||||
|
@ -2210,19 +2208,34 @@ impl ModCollector<'_, '_> {
|
|||
.map(|it| macro_id_to_def_id(self.def_collector.db, it))
|
||||
})
|
||||
},
|
||||
) {
|
||||
// Legacy macros need to be expanded immediately, so that any macros they produce
|
||||
// are in scope.
|
||||
if let Some(val) = res {
|
||||
self.def_collector.collect_macro_expansion(
|
||||
|path| {
|
||||
let resolved_res = self.def_collector.def_map.resolve_path_fp_with_macro(
|
||||
db,
|
||||
ResolveMode::Other,
|
||||
self.module_id,
|
||||
val,
|
||||
self.macro_depth + 1,
|
||||
container,
|
||||
&path,
|
||||
BuiltinShadowMode::Module,
|
||||
Some(MacroSubNs::Bang),
|
||||
);
|
||||
}
|
||||
resolved_res.resolved_def.take_macros().map(|it| macro_id_to_def_id(db, it))
|
||||
},
|
||||
) {
|
||||
// FIXME: if there were errors, this mightve been in the eager expansion from an
|
||||
// unresolved macro, so we need to push this into late macro resolution. see fixme above
|
||||
if res.err.is_none() {
|
||||
// Legacy macros need to be expanded immediately, so that any macros they produce
|
||||
// are in scope.
|
||||
if let Some(val) = res.value {
|
||||
self.def_collector.collect_macro_expansion(
|
||||
self.module_id,
|
||||
val,
|
||||
self.macro_depth + 1,
|
||||
container,
|
||||
);
|
||||
}
|
||||
|
||||
return;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Case 2: resolve in module scope, expand during name resolution.
|
||||
|
|
|
@ -19,7 +19,7 @@ pub enum DefDiagnosticKind {
|
|||
|
||||
UnresolvedExternCrate { ast: AstId<ast::ExternCrate> },
|
||||
|
||||
UnresolvedImport { id: ItemTreeId<item_tree::Import>, index: Idx<ast::UseTree> },
|
||||
UnresolvedImport { id: ItemTreeId<item_tree::Use>, index: Idx<ast::UseTree> },
|
||||
|
||||
UnconfiguredCode { ast: ErasedAstId, cfg: CfgExpr, opts: CfgOptions },
|
||||
|
||||
|
@ -70,7 +70,7 @@ impl DefDiagnostic {
|
|||
|
||||
pub(super) fn unresolved_import(
|
||||
container: LocalModuleId,
|
||||
id: ItemTreeId<item_tree::Import>,
|
||||
id: ItemTreeId<item_tree::Use>,
|
||||
index: Idx<ast::UseTree>,
|
||||
) -> Self {
|
||||
Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } }
|
||||
|
|
|
@ -213,17 +213,17 @@ pub type Ty = ();
|
|||
|
||||
for (_, res) in module_data.scope.resolutions() {
|
||||
match res.values.or(res.types).unwrap().0 {
|
||||
ModuleDefId::FunctionId(f) => drop(db.function_data(f)),
|
||||
ModuleDefId::FunctionId(f) => _ = db.function_data(f),
|
||||
ModuleDefId::AdtId(adt) => match adt {
|
||||
AdtId::StructId(it) => drop(db.struct_data(it)),
|
||||
AdtId::UnionId(it) => drop(db.union_data(it)),
|
||||
AdtId::EnumId(it) => drop(db.enum_data(it)),
|
||||
AdtId::StructId(it) => _ = db.struct_data(it),
|
||||
AdtId::UnionId(it) => _ = db.union_data(it),
|
||||
AdtId::EnumId(it) => _ = db.enum_data(it),
|
||||
},
|
||||
ModuleDefId::ConstId(it) => drop(db.const_data(it)),
|
||||
ModuleDefId::StaticId(it) => drop(db.static_data(it)),
|
||||
ModuleDefId::TraitId(it) => drop(db.trait_data(it)),
|
||||
ModuleDefId::TraitAliasId(it) => drop(db.trait_alias_data(it)),
|
||||
ModuleDefId::TypeAliasId(it) => drop(db.type_alias_data(it)),
|
||||
ModuleDefId::ConstId(it) => _ = db.const_data(it),
|
||||
ModuleDefId::StaticId(it) => _ = db.static_data(it),
|
||||
ModuleDefId::TraitId(it) => _ = db.trait_data(it),
|
||||
ModuleDefId::TraitAliasId(it) => _ = db.trait_alias_data(it),
|
||||
ModuleDefId::TypeAliasId(it) => _ = db.type_alias_data(it),
|
||||
ModuleDefId::EnumVariantId(_)
|
||||
| ModuleDefId::ModuleId(_)
|
||||
| ModuleDefId::MacroId(_)
|
||||
|
|
|
@ -25,7 +25,7 @@ use crate::{
|
|||
EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId, GenericParamId,
|
||||
HasModule, ImplId, ItemContainerId, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId,
|
||||
MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId,
|
||||
TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, VariantId,
|
||||
TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, UseId, VariantId,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -1024,6 +1024,12 @@ impl HasResolver for ExternCrateId {
|
|||
}
|
||||
}
|
||||
|
||||
impl HasResolver for UseId {
|
||||
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
|
||||
self.lookup(db).container.resolver(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasResolver for TypeOwnerId {
|
||||
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
|
||||
match self {
|
||||
|
|
|
@ -430,14 +430,13 @@ fn macro_arg_node(
|
|||
let loc = db.lookup_intern_macro_call(id);
|
||||
let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
|
||||
let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
|
||||
Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr).0)
|
||||
Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0)
|
||||
} else {
|
||||
loc.kind
|
||||
.arg(db)
|
||||
.and_then(|arg| ast::TokenTree::cast(arg.value))
|
||||
.map(|tt| tt.reparse_as_expr().to_syntax())
|
||||
.map(|tt| tt.reparse_as_comma_separated_expr().to_syntax())
|
||||
};
|
||||
|
||||
match res {
|
||||
Some(res) if res.errors().is_empty() => res.syntax_node(),
|
||||
Some(res) => {
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
//!
|
||||
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
|
||||
use base_db::CrateId;
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
|
||||
use triomphe::Arc;
|
||||
|
||||
|
@ -29,7 +29,7 @@ use crate::{
|
|||
hygiene::Hygiene,
|
||||
mod_path::ModPath,
|
||||
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
|
||||
MacroCallLoc, MacroDefId, MacroDefKind, UnresolvedMacro,
|
||||
MacroCallLoc, MacroDefId, MacroDefKind,
|
||||
};
|
||||
|
||||
pub fn expand_eager_macro_input(
|
||||
|
@ -38,7 +38,7 @@ pub fn expand_eager_macro_input(
|
|||
macro_call: InFile<ast::MacroCall>,
|
||||
def: MacroDefId,
|
||||
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
||||
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
|
||||
) -> ExpandResult<Option<MacroCallId>> {
|
||||
let ast_map = db.ast_id_map(macro_call.file_id);
|
||||
// the expansion which the ast id map is built upon has no whitespace, so the offsets are wrong as macro_call is from the token tree that has whitespace!
|
||||
let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(¯o_call.value));
|
||||
|
@ -71,41 +71,46 @@ pub fn expand_eager_macro_input(
|
|||
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
|
||||
krate,
|
||||
resolver,
|
||||
)?
|
||||
)
|
||||
};
|
||||
let err = parse_err.or(err);
|
||||
|
||||
let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
|
||||
return Ok(ExpandResult { value: None, err });
|
||||
return ExpandResult { value: None, err };
|
||||
};
|
||||
|
||||
let og_tmap = mbe::syntax_node_to_token_map(
|
||||
macro_call.value.token_tree().expect("macro_arg_text succeeded").syntax(),
|
||||
);
|
||||
|
||||
let (mut subtree, expanded_eager_input_token_map) =
|
||||
mbe::syntax_node_to_token_tree(&expanded_eager_input);
|
||||
|
||||
// The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
|
||||
// so we need to remap them to the original input of the eager macro.
|
||||
subtree.visit_ids(&|id| {
|
||||
// Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
|
||||
let og_tmap = if let Some(tt) = macro_call.value.token_tree() {
|
||||
let mut ids_used = FxHashSet::default();
|
||||
let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
|
||||
// The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
|
||||
// so we need to remap them to the original input of the eager macro.
|
||||
subtree.visit_ids(&mut |id| {
|
||||
// Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
|
||||
|
||||
if let Some(range) =
|
||||
expanded_eager_input_token_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
|
||||
{
|
||||
// remap from expanded eager input to eager input expansion
|
||||
if let Some(og_range) = mapping.get(&range) {
|
||||
// remap from eager input expansion to original eager input
|
||||
if let Some(&og_range) = ws_mapping.get(og_range) {
|
||||
if let Some(og_token) = og_tmap.token_by_range(og_range) {
|
||||
return og_token;
|
||||
if let Some(range) = expanded_eager_input_token_map
|
||||
.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
|
||||
{
|
||||
// remap from expanded eager input to eager input expansion
|
||||
if let Some(og_range) = mapping.get(&range) {
|
||||
// remap from eager input expansion to original eager input
|
||||
if let Some(&og_range) = ws_mapping.get(og_range) {
|
||||
if let Some(og_token) = og_tmap.token_by_range(og_range) {
|
||||
ids_used.insert(og_token);
|
||||
return og_token;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
tt::TokenId::UNSPECIFIED
|
||||
});
|
||||
tt::TokenId::UNSPECIFIED
|
||||
});
|
||||
og_tmap.filter(|id| ids_used.contains(&id));
|
||||
og_tmap
|
||||
} else {
|
||||
Default::default()
|
||||
};
|
||||
subtree.delimiter = crate::tt::Delimiter::unspecified();
|
||||
|
||||
let loc = MacroCallLoc {
|
||||
|
@ -119,7 +124,7 @@ pub fn expand_eager_macro_input(
|
|||
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
|
||||
};
|
||||
|
||||
Ok(ExpandResult { value: Some(db.intern_macro_call(loc)), err })
|
||||
ExpandResult { value: Some(db.intern_macro_call(loc)), err }
|
||||
}
|
||||
|
||||
fn lazy_expand(
|
||||
|
@ -145,13 +150,13 @@ fn eager_macro_recur(
|
|||
curr: InFile<SyntaxNode>,
|
||||
krate: CrateId,
|
||||
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
||||
) -> Result<ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>>, UnresolvedMacro> {
|
||||
) -> ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>> {
|
||||
let original = curr.value.clone_for_update();
|
||||
let mut mapping = FxHashMap::default();
|
||||
|
||||
let mut replacements = Vec::new();
|
||||
|
||||
// Note: We only report a single error inside of eager expansions
|
||||
// FIXME: We only report a single error inside of eager expansions
|
||||
let mut error = None;
|
||||
let mut offset = 0i32;
|
||||
let apply_offset = |it: TextSize, offset: i32| {
|
||||
|
@ -182,7 +187,14 @@ fn eager_macro_recur(
|
|||
}
|
||||
};
|
||||
let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
|
||||
Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?,
|
||||
Some(path) => match macro_resolver(path.clone()) {
|
||||
Some(def) => def,
|
||||
None => {
|
||||
error =
|
||||
Some(ExpandError::other(format!("unresolved macro {}", path.display(db))));
|
||||
continue;
|
||||
}
|
||||
},
|
||||
None => {
|
||||
error = Some(ExpandError::other("malformed macro invocation"));
|
||||
continue;
|
||||
|
@ -190,32 +202,31 @@ fn eager_macro_recur(
|
|||
};
|
||||
let ExpandResult { value, err } = match def.kind {
|
||||
MacroDefKind::BuiltInEager(..) => {
|
||||
let ExpandResult { value, err } = match expand_eager_macro_input(
|
||||
let ExpandResult { value, err } = expand_eager_macro_input(
|
||||
db,
|
||||
krate,
|
||||
curr.with_value(call.clone()),
|
||||
def,
|
||||
macro_resolver,
|
||||
) {
|
||||
Ok(it) => it,
|
||||
Err(err) => return Err(err),
|
||||
};
|
||||
);
|
||||
match value {
|
||||
Some(call_id) => {
|
||||
let ExpandResult { value, err: err2 } =
|
||||
db.parse_macro_expansion(call_id.as_macro_file());
|
||||
|
||||
let call_tt_start =
|
||||
call.token_tree().unwrap().syntax().text_range().start();
|
||||
let call_start = apply_offset(call.syntax().text_range().start(), offset);
|
||||
if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
|
||||
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||
value
|
||||
.1
|
||||
.first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
|
||||
.map(|r| (r + call_start, range + call_tt_start))
|
||||
}));
|
||||
};
|
||||
if let Some(tt) = call.token_tree() {
|
||||
let call_tt_start = tt.syntax().text_range().start();
|
||||
let call_start =
|
||||
apply_offset(call.syntax().text_range().start(), offset);
|
||||
if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
|
||||
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||
value
|
||||
.1
|
||||
.first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
|
||||
.map(|r| (r + call_start, range + call_tt_start))
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
ExpandResult {
|
||||
value: Some(value.0.syntax_node().clone_for_update()),
|
||||
|
@ -247,25 +258,27 @@ fn eager_macro_recur(
|
|||
parse.as_ref().map(|it| it.syntax_node()),
|
||||
krate,
|
||||
macro_resolver,
|
||||
)?;
|
||||
);
|
||||
let err = err.or(error);
|
||||
|
||||
let call_tt_start = call.token_tree().unwrap().syntax().text_range().start();
|
||||
let call_start = apply_offset(call.syntax().text_range().start(), offset);
|
||||
if let Some((_tt, arg_map, _)) = parse
|
||||
.file_id
|
||||
.macro_file()
|
||||
.and_then(|id| db.macro_arg(id.macro_call_id).value)
|
||||
.as_deref()
|
||||
{
|
||||
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||
tm.first_range_by_token(
|
||||
decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
|
||||
syntax::SyntaxKind::TOMBSTONE,
|
||||
)
|
||||
.map(|r| (r + call_start, range + call_tt_start))
|
||||
}));
|
||||
};
|
||||
if let Some(tt) = call.token_tree() {
|
||||
let call_tt_start = tt.syntax().text_range().start();
|
||||
let call_start = apply_offset(call.syntax().text_range().start(), offset);
|
||||
if let Some((_tt, arg_map, _)) = parse
|
||||
.file_id
|
||||
.macro_file()
|
||||
.and_then(|id| db.macro_arg(id.macro_call_id).value)
|
||||
.as_deref()
|
||||
{
|
||||
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||
tm.first_range_by_token(
|
||||
decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
|
||||
syntax::SyntaxKind::TOMBSTONE,
|
||||
)
|
||||
.map(|r| (r + call_start, range + call_tt_start))
|
||||
}));
|
||||
}
|
||||
}
|
||||
// FIXME: Do we need to re-use _m here?
|
||||
ExpandResult { value: value.map(|(n, _m)| n), err }
|
||||
}
|
||||
|
@ -275,7 +288,7 @@ fn eager_macro_recur(
|
|||
}
|
||||
// check if the whole original syntax is replaced
|
||||
if call.syntax() == &original {
|
||||
return Ok(ExpandResult { value: value.zip(Some(mapping)), err: error });
|
||||
return ExpandResult { value: value.zip(Some(mapping)), err: error };
|
||||
}
|
||||
|
||||
if let Some(insert) = value {
|
||||
|
@ -286,5 +299,5 @@ fn eager_macro_recur(
|
|||
}
|
||||
|
||||
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
|
||||
Ok(ExpandResult { value: Some((original, mapping)), err: error })
|
||||
ExpandResult { value: Some((original, mapping)), err: error }
|
||||
}
|
||||
|
|
|
@ -173,7 +173,7 @@ fn make_hygiene_info(
|
|||
db: &dyn ExpandDatabase,
|
||||
macro_file: MacroFile,
|
||||
loc: &MacroCallLoc,
|
||||
) -> Option<HygieneInfo> {
|
||||
) -> HygieneInfo {
|
||||
let def = loc.def.ast_id().left().and_then(|id| {
|
||||
let def_tt = match id.to_node(db) {
|
||||
ast::Macro::MacroRules(mac) => mac.token_tree()?,
|
||||
|
@ -204,7 +204,7 @@ fn make_hygiene_info(
|
|||
))
|
||||
});
|
||||
|
||||
Some(HygieneInfo {
|
||||
HygieneInfo {
|
||||
file: macro_file,
|
||||
attr_input_or_mac_def_start: attr_input_or_mac_def
|
||||
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
|
||||
|
@ -212,7 +212,7 @@ fn make_hygiene_info(
|
|||
macro_arg,
|
||||
macro_def,
|
||||
exp_map,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl HygieneFrame {
|
||||
|
@ -221,8 +221,7 @@ impl HygieneFrame {
|
|||
None => (None, None, false),
|
||||
Some(macro_file) => {
|
||||
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||
let info =
|
||||
make_hygiene_info(db, macro_file, &loc).map(|info| (loc.kind.file_id(), info));
|
||||
let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id()));
|
||||
match loc.def.kind {
|
||||
MacroDefKind::Declarative(_) => {
|
||||
(info, Some(loc.def.krate), loc.def.local_inner)
|
||||
|
@ -236,17 +235,14 @@ impl HygieneFrame {
|
|||
}
|
||||
};
|
||||
|
||||
let (calling_file, info) = match info {
|
||||
None => {
|
||||
return HygieneFrame {
|
||||
expansion: None,
|
||||
local_inner,
|
||||
krate,
|
||||
call_site: None,
|
||||
def_site: None,
|
||||
};
|
||||
let Some((info, calling_file)) = info else {
|
||||
return HygieneFrame {
|
||||
expansion: None,
|
||||
local_inner,
|
||||
krate,
|
||||
call_site: None,
|
||||
def_site: None,
|
||||
}
|
||||
Some(it) => it,
|
||||
};
|
||||
|
||||
let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
|
||||
|
|
|
@ -415,6 +415,24 @@ impl MacroDefId {
|
|||
)
|
||||
}
|
||||
|
||||
pub fn is_derive(&self) -> bool {
|
||||
matches!(
|
||||
self.kind,
|
||||
MacroDefKind::BuiltInDerive(..)
|
||||
| MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_fn_like(&self) -> bool {
|
||||
matches!(
|
||||
self.kind,
|
||||
MacroDefKind::BuiltIn(..)
|
||||
| MacroDefKind::ProcMacro(_, ProcMacroKind::FuncLike, _)
|
||||
| MacroDefKind::BuiltInEager(..)
|
||||
| MacroDefKind::Declarative(..)
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_attribute_derive(&self) -> bool {
|
||||
matches!(self.kind, MacroDefKind::BuiltInAttr(expander, ..) if expander.is_derive())
|
||||
}
|
||||
|
|
|
@ -1186,6 +1186,25 @@ fn pattern_matching_ergonomics() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn destructing_assignment() {
|
||||
check_number(
|
||||
r#"
|
||||
//- minicore: add
|
||||
const fn f(i: &mut u8) -> &mut u8 {
|
||||
*i += 1;
|
||||
i
|
||||
}
|
||||
const GOAL: u8 = {
|
||||
let mut i = 4;
|
||||
_ = f(&mut i);
|
||||
i
|
||||
};
|
||||
"#,
|
||||
5,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn let_else() {
|
||||
check_number(
|
||||
|
@ -1428,14 +1447,14 @@ fn builtin_derive_macro() {
|
|||
#[derive(Clone)]
|
||||
struct Y {
|
||||
field1: i32,
|
||||
field2: u8,
|
||||
field2: ((i32, u8), i64),
|
||||
}
|
||||
|
||||
const GOAL: u8 = {
|
||||
let x = X(2, Z::Foo(Y { field1: 4, field2: 5 }), 8);
|
||||
let x = X(2, Z::Foo(Y { field1: 4, field2: ((32, 5), 12) }), 8);
|
||||
let x = x.clone();
|
||||
let Z::Foo(t) = x.1;
|
||||
t.field2
|
||||
t.field2.0 .1
|
||||
};
|
||||
"#,
|
||||
5,
|
||||
|
@ -1632,6 +1651,34 @@ const GOAL: i32 = {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn closure_capture_unsized_type() {
|
||||
check_number(
|
||||
r#"
|
||||
//- minicore: fn, copy, slice, index, coerce_unsized
|
||||
fn f<T: A>(x: &<T as A>::Ty) -> &<T as A>::Ty {
|
||||
let c = || &*x;
|
||||
c()
|
||||
}
|
||||
|
||||
trait A {
|
||||
type Ty;
|
||||
}
|
||||
|
||||
impl A for i32 {
|
||||
type Ty = [u8];
|
||||
}
|
||||
|
||||
const GOAL: u8 = {
|
||||
let k: &[u8] = &[1, 2, 3];
|
||||
let k = f::<i32>(k);
|
||||
k[0] + k[1] + k[2]
|
||||
}
|
||||
"#,
|
||||
6,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn closure_and_impl_fn() {
|
||||
check_number(
|
||||
|
@ -1717,6 +1764,24 @@ fn function_pointer_in_constants() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_pointer_and_niche_optimization() {
|
||||
check_number(
|
||||
r#"
|
||||
//- minicore: option
|
||||
const GOAL: i32 = {
|
||||
let f: fn(i32) -> i32 = |x| x + 2;
|
||||
let init = Some(f);
|
||||
match init {
|
||||
Some(t) => t(3),
|
||||
None => 222,
|
||||
}
|
||||
};
|
||||
"#,
|
||||
5,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_pointer() {
|
||||
check_number(
|
||||
|
@ -2331,11 +2396,14 @@ fn const_loop() {
|
|||
fn const_transfer_memory() {
|
||||
check_number(
|
||||
r#"
|
||||
const A1: &i32 = &2;
|
||||
const A2: &i32 = &5;
|
||||
const GOAL: i32 = *A1 + *A2;
|
||||
//- minicore: slice, index, coerce_unsized
|
||||
const A1: &i32 = &1;
|
||||
const A2: &i32 = &10;
|
||||
const A3: [&i32; 3] = [&1, &2, &100];
|
||||
const A4: (i32, &i32) = (1, &1000);
|
||||
const GOAL: i32 = *A1 + *A2 + *A3[2] + *A4.1;
|
||||
"#,
|
||||
7,
|
||||
1111,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -2521,12 +2589,16 @@ fn const_trait_assoc() {
|
|||
);
|
||||
check_number(
|
||||
r#"
|
||||
//- minicore: size_of
|
||||
//- minicore: size_of, fn
|
||||
//- /a/lib.rs crate:a
|
||||
use core::mem::size_of;
|
||||
pub struct S<T>(T);
|
||||
impl<T> S<T> {
|
||||
pub const X: usize = core::mem::size_of::<T>();
|
||||
pub const X: usize = {
|
||||
let k: T;
|
||||
let f = || core::mem::size_of::<T>();
|
||||
f()
|
||||
};
|
||||
}
|
||||
//- /main.rs crate:main deps:a
|
||||
use a::{S};
|
||||
|
@ -2602,9 +2674,9 @@ fn exec_limits() {
|
|||
}
|
||||
sum
|
||||
}
|
||||
const GOAL: i32 = f(10000);
|
||||
const GOAL: i32 = f(1000);
|
||||
"#,
|
||||
10000 * 10000,
|
||||
1000 * 1000,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -2651,7 +2723,7 @@ fn unsized_field() {
|
|||
//- minicore: coerce_unsized, index, slice, transmute
|
||||
use core::mem::transmute;
|
||||
|
||||
struct Slice([u8]);
|
||||
struct Slice([usize]);
|
||||
struct Slice2(Slice);
|
||||
|
||||
impl Slice2 {
|
||||
|
@ -2659,19 +2731,19 @@ fn unsized_field() {
|
|||
&self.0
|
||||
}
|
||||
|
||||
fn as_bytes(&self) -> &[u8] {
|
||||
fn as_bytes(&self) -> &[usize] {
|
||||
&self.as_inner().0
|
||||
}
|
||||
}
|
||||
|
||||
const GOAL: u8 = unsafe {
|
||||
let x: &[u8] = &[1, 2, 3];
|
||||
const GOAL: usize = unsafe {
|
||||
let x: &[usize] = &[1, 2, 3];
|
||||
let x: &Slice2 = transmute(x);
|
||||
let x = x.as_bytes();
|
||||
x[0] + x[1] + x[2]
|
||||
x[0] + x[1] + x[2] + x.len() * 100
|
||||
};
|
||||
"#,
|
||||
6,
|
||||
306,
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -251,6 +251,28 @@ fn wrapping_add() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ptr_offset_from() {
|
||||
check_number(
|
||||
r#"
|
||||
//- minicore: index, slice, coerce_unsized
|
||||
extern "rust-intrinsic" {
|
||||
pub fn ptr_offset_from<T>(ptr: *const T, base: *const T) -> isize;
|
||||
pub fn ptr_offset_from_unsigned<T>(ptr: *const T, base: *const T) -> usize;
|
||||
}
|
||||
|
||||
const GOAL: isize = {
|
||||
let x = [1, 2, 3, 4, 5i32];
|
||||
let r1 = -ptr_offset_from(&x[0], &x[4]);
|
||||
let r2 = ptr_offset_from(&x[3], &x[1]);
|
||||
let r3 = ptr_offset_from_unsigned(&x[3], &x[0]) as isize;
|
||||
r3 * 100 + r2 * 10 + r1
|
||||
};
|
||||
"#,
|
||||
324,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn saturating() {
|
||||
check_number(
|
||||
|
@ -438,6 +460,8 @@ fn atomic() {
|
|||
pub fn atomic_nand_seqcst<T: Copy>(dst: *mut T, src: T) -> T;
|
||||
pub fn atomic_or_release<T: Copy>(dst: *mut T, src: T) -> T;
|
||||
pub fn atomic_xor_seqcst<T: Copy>(dst: *mut T, src: T) -> T;
|
||||
pub fn atomic_fence_seqcst();
|
||||
pub fn atomic_singlethreadfence_acqrel();
|
||||
}
|
||||
|
||||
fn should_not_reach() {
|
||||
|
@ -452,6 +476,7 @@ fn atomic() {
|
|||
if (30, true) != atomic_cxchg_release_seqcst(&mut y, 30, 40) {
|
||||
should_not_reach();
|
||||
}
|
||||
atomic_fence_seqcst();
|
||||
if (40, false) != atomic_cxchg_release_seqcst(&mut y, 30, 50) {
|
||||
should_not_reach();
|
||||
}
|
||||
|
@ -459,6 +484,7 @@ fn atomic() {
|
|||
should_not_reach();
|
||||
}
|
||||
let mut z = atomic_xsub_seqcst(&mut x, -200);
|
||||
atomic_singlethreadfence_acqrel();
|
||||
atomic_xor_seqcst(&mut x, 1024);
|
||||
atomic_load_seqcst(&x) + z * 3 + atomic_load_seqcst(&y) * 2
|
||||
};
|
||||
|
|
|
@ -176,6 +176,7 @@ impl<'a> DeclValidator<'a> {
|
|||
AttrDefId::ImplId(iid) => Some(iid.lookup(self.db.upcast()).container.into()),
|
||||
AttrDefId::ExternBlockId(id) => Some(id.lookup(self.db.upcast()).container.into()),
|
||||
AttrDefId::ExternCrateId(id) => Some(id.lookup(self.db.upcast()).container.into()),
|
||||
AttrDefId::UseId(id) => Some(id.lookup(self.db.upcast()).container.into()),
|
||||
// These warnings should not explore macro definitions at all
|
||||
AttrDefId::MacroId(_) => None,
|
||||
AttrDefId::AdtId(aid) => match aid {
|
||||
|
|
|
@ -48,22 +48,15 @@ use crate::{
|
|||
};
|
||||
|
||||
pub trait HirWrite: fmt::Write {
|
||||
fn start_location_link(&mut self, location: ModuleDefId);
|
||||
fn end_location_link(&mut self);
|
||||
fn start_location_link(&mut self, _location: ModuleDefId) {}
|
||||
fn end_location_link(&mut self) {}
|
||||
}
|
||||
|
||||
// String will ignore link metadata
|
||||
impl HirWrite for String {
|
||||
fn start_location_link(&mut self, _: ModuleDefId) {}
|
||||
|
||||
fn end_location_link(&mut self) {}
|
||||
}
|
||||
impl HirWrite for String {}
|
||||
|
||||
// `core::Formatter` will ignore metadata
|
||||
impl HirWrite for fmt::Formatter<'_> {
|
||||
fn start_location_link(&mut self, _: ModuleDefId) {}
|
||||
fn end_location_link(&mut self) {}
|
||||
}
|
||||
impl HirWrite for fmt::Formatter<'_> {}
|
||||
|
||||
pub struct HirFormatter<'a> {
|
||||
pub db: &'a dyn HirDatabase,
|
||||
|
@ -885,6 +878,13 @@ impl HirDisplay for Ty {
|
|||
TyKind::FnDef(def, parameters) => {
|
||||
let def = from_chalk(db, *def);
|
||||
let sig = db.callable_item_signature(def).substitute(Interner, parameters);
|
||||
|
||||
if f.display_target.is_source_code() {
|
||||
// `FnDef` is anonymous and there's no surface syntax for it. Show it as a
|
||||
// function pointer type.
|
||||
return sig.hir_fmt(f);
|
||||
}
|
||||
|
||||
f.start_location_link(def.into());
|
||||
match def {
|
||||
CallableDefId::FunctionId(ff) => {
|
||||
|
|
|
@ -13,6 +13,15 @@
|
|||
//! to certain types. To record this, we use the union-find implementation from
|
||||
//! the `ena` crate, which is extracted from rustc.
|
||||
|
||||
mod cast;
|
||||
pub(crate) mod closure;
|
||||
mod coerce;
|
||||
mod expr;
|
||||
mod mutability;
|
||||
mod pat;
|
||||
mod path;
|
||||
pub(crate) mod unify;
|
||||
|
||||
use std::{convert::identity, ops::Index};
|
||||
|
||||
use chalk_ir::{
|
||||
|
@ -60,15 +69,8 @@ pub use coerce::could_coerce;
|
|||
#[allow(unreachable_pub)]
|
||||
pub use unify::could_unify;
|
||||
|
||||
pub(crate) use self::closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
|
||||
|
||||
pub(crate) mod unify;
|
||||
mod path;
|
||||
mod expr;
|
||||
mod pat;
|
||||
mod coerce;
|
||||
pub(crate) mod closure;
|
||||
mod mutability;
|
||||
use cast::CastCheck;
|
||||
pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
|
||||
|
||||
/// The entry point of type inference.
|
||||
pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
|
||||
|
@ -508,6 +510,8 @@ pub(crate) struct InferenceContext<'a> {
|
|||
diverges: Diverges,
|
||||
breakables: Vec<BreakableContext>,
|
||||
|
||||
deferred_cast_checks: Vec<CastCheck>,
|
||||
|
||||
// fields related to closure capture
|
||||
current_captures: Vec<CapturedItemWithoutTy>,
|
||||
current_closure: Option<ClosureId>,
|
||||
|
@ -582,7 +586,8 @@ impl<'a> InferenceContext<'a> {
|
|||
resolver,
|
||||
diverges: Diverges::Maybe,
|
||||
breakables: Vec::new(),
|
||||
current_captures: vec![],
|
||||
deferred_cast_checks: Vec::new(),
|
||||
current_captures: Vec::new(),
|
||||
current_closure: None,
|
||||
deferred_closures: FxHashMap::default(),
|
||||
closure_dependencies: FxHashMap::default(),
|
||||
|
@ -594,7 +599,7 @@ impl<'a> InferenceContext<'a> {
|
|||
// used this function for another workaround, mention it here. If you really need this function and believe that
|
||||
// there is no problem in it being `pub(crate)`, remove this comment.
|
||||
pub(crate) fn resolve_all(self) -> InferenceResult {
|
||||
let InferenceContext { mut table, mut result, .. } = self;
|
||||
let InferenceContext { mut table, mut result, deferred_cast_checks, .. } = self;
|
||||
// Destructure every single field so whenever new fields are added to `InferenceResult` we
|
||||
// don't forget to handle them here.
|
||||
let InferenceResult {
|
||||
|
@ -622,6 +627,13 @@ impl<'a> InferenceContext<'a> {
|
|||
|
||||
table.fallback_if_possible();
|
||||
|
||||
// Comment from rustc:
|
||||
// Even though coercion casts provide type hints, we check casts after fallback for
|
||||
// backwards compatibility. This makes fallback a stronger type hint than a cast coercion.
|
||||
for cast in deferred_cast_checks {
|
||||
cast.check(&mut table);
|
||||
}
|
||||
|
||||
// FIXME resolve obligations as well (use Guidance if necessary)
|
||||
table.resolve_obligations_as_possible();
|
||||
|
||||
|
|
46
crates/hir-ty/src/infer/cast.rs
Normal file
46
crates/hir-ty/src/infer/cast.rs
Normal file
|
@ -0,0 +1,46 @@
|
|||
//! Type cast logic. Basically coercion + additional casts.
|
||||
|
||||
use crate::{infer::unify::InferenceTable, Interner, Ty, TyExt, TyKind};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct CastCheck {
|
||||
expr_ty: Ty,
|
||||
cast_ty: Ty,
|
||||
}
|
||||
|
||||
impl CastCheck {
|
||||
pub(super) fn new(expr_ty: Ty, cast_ty: Ty) -> Self {
|
||||
Self { expr_ty, cast_ty }
|
||||
}
|
||||
|
||||
pub(super) fn check(self, table: &mut InferenceTable<'_>) {
|
||||
// FIXME: This function currently only implements the bits that influence the type
|
||||
// inference. We should return the adjustments on success and report diagnostics on error.
|
||||
let expr_ty = table.resolve_ty_shallow(&self.expr_ty);
|
||||
let cast_ty = table.resolve_ty_shallow(&self.cast_ty);
|
||||
|
||||
if expr_ty.contains_unknown() || cast_ty.contains_unknown() {
|
||||
return;
|
||||
}
|
||||
|
||||
if table.coerce(&expr_ty, &cast_ty).is_ok() {
|
||||
return;
|
||||
}
|
||||
|
||||
if check_ref_to_ptr_cast(expr_ty, cast_ty, table) {
|
||||
// Note that this type of cast is actually split into a coercion to a
|
||||
// pointer type and a cast:
|
||||
// &[T; N] -> *[T; N] -> *T
|
||||
return;
|
||||
}
|
||||
|
||||
// FIXME: Check other kinds of non-coercion casts and report error if any?
|
||||
}
|
||||
}
|
||||
|
||||
fn check_ref_to_ptr_cast(expr_ty: Ty, cast_ty: Ty, table: &mut InferenceTable<'_>) -> bool {
|
||||
let Some((expr_inner_ty, _, _)) = expr_ty.as_reference() else { return false; };
|
||||
let Some((cast_inner_ty, _)) = cast_ty.as_raw_ptr() else { return false; };
|
||||
let TyKind::Array(expr_elt_ty, _) = expr_inner_ty.kind(Interner) else { return false; };
|
||||
table.coerce(expr_elt_ty, cast_inner_ty).is_ok()
|
||||
}
|
|
@ -488,10 +488,6 @@ impl InferenceContext<'_> {
|
|||
self.consume_expr(*tail);
|
||||
}
|
||||
}
|
||||
Expr::While { condition, body, label: _ } => {
|
||||
self.consume_expr(*condition);
|
||||
self.consume_expr(*body);
|
||||
}
|
||||
Expr::Call { callee, args, is_assignee_expr: _ } => {
|
||||
self.consume_expr(*callee);
|
||||
self.consume_exprs(args.iter().copied());
|
||||
|
|
|
@ -46,8 +46,8 @@ use crate::{
|
|||
};
|
||||
|
||||
use super::{
|
||||
coerce::auto_deref_adjust_steps, find_breakable, BreakableContext, Diverges, Expectation,
|
||||
InferenceContext, InferenceDiagnostic, TypeMismatch,
|
||||
cast::CastCheck, coerce::auto_deref_adjust_steps, find_breakable, BreakableContext, Diverges,
|
||||
Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch,
|
||||
};
|
||||
|
||||
impl InferenceContext<'_> {
|
||||
|
@ -198,19 +198,6 @@ impl InferenceContext<'_> {
|
|||
None => self.result.standard_types.never.clone(),
|
||||
}
|
||||
}
|
||||
&Expr::While { condition, body, label } => {
|
||||
self.with_breakable_ctx(BreakableKind::Loop, None, label, |this| {
|
||||
this.infer_expr(
|
||||
condition,
|
||||
&Expectation::HasType(this.result.standard_types.bool_.clone()),
|
||||
);
|
||||
this.infer_expr(body, &Expectation::HasType(TyBuilder::unit()));
|
||||
});
|
||||
|
||||
// the body may not run, so it diverging doesn't mean we diverge
|
||||
self.diverges = Diverges::Maybe;
|
||||
TyBuilder::unit()
|
||||
}
|
||||
Expr::Closure { body, args, ret_type, arg_types, closure_kind, capture_by: _ } => {
|
||||
assert_eq!(args.len(), arg_types.len());
|
||||
|
||||
|
@ -574,16 +561,8 @@ impl InferenceContext<'_> {
|
|||
}
|
||||
Expr::Cast { expr, type_ref } => {
|
||||
let cast_ty = self.make_ty(type_ref);
|
||||
// FIXME: propagate the "castable to" expectation
|
||||
let inner_ty = self.infer_expr_no_expect(*expr);
|
||||
match (inner_ty.kind(Interner), cast_ty.kind(Interner)) {
|
||||
(TyKind::Ref(_, _, inner), TyKind::Raw(_, cast)) => {
|
||||
// FIXME: record invalid cast diagnostic in case of mismatch
|
||||
self.unify(inner, cast);
|
||||
}
|
||||
// FIXME check the other kinds of cast...
|
||||
_ => (),
|
||||
}
|
||||
let expr_ty = self.infer_expr(*expr, &Expectation::Castable(cast_ty.clone()));
|
||||
self.deferred_cast_checks.push(CastCheck::new(expr_ty, cast_ty.clone()));
|
||||
cast_ty
|
||||
}
|
||||
Expr::Ref { expr, rawness, mutability } => {
|
||||
|
@ -1592,7 +1571,7 @@ impl InferenceContext<'_> {
|
|||
output: Ty,
|
||||
inputs: Vec<Ty>,
|
||||
) -> Vec<Ty> {
|
||||
if let Some(expected_ty) = expected_output.to_option(&mut self.table) {
|
||||
if let Some(expected_ty) = expected_output.only_has_type(&mut self.table) {
|
||||
self.table.fudge_inference(|table| {
|
||||
if table.try_unify(&expected_ty, &output).is_ok() {
|
||||
table.resolve_with_fallback(inputs, &|var, kind, _, _| match kind {
|
||||
|
|
|
@ -69,10 +69,6 @@ impl InferenceContext<'_> {
|
|||
self.infer_mut_expr(*tail, Mutability::Not);
|
||||
}
|
||||
}
|
||||
&Expr::While { condition: c, body, label: _ } => {
|
||||
self.infer_mut_expr(c, Mutability::Not);
|
||||
self.infer_mut_expr(body, Mutability::Not);
|
||||
}
|
||||
Expr::MethodCall { receiver: it, method_name: _, args, generic_args: _ }
|
||||
| Expr::Call { callee: it, args, is_assignee_expr: _ } => {
|
||||
self.infer_mut_not_expr_iter(args.iter().copied().chain(Some(*it)));
|
||||
|
|
|
@ -14,7 +14,7 @@ use triomphe::Arc;
|
|||
|
||||
use crate::{
|
||||
consteval::try_const_usize, db::HirDatabase, infer::normalize, layout::adt::struct_variant_idx,
|
||||
utils::ClosureSubst, Interner, Substitution, TraitEnvironment, Ty,
|
||||
utils::ClosureSubst, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty,
|
||||
};
|
||||
|
||||
pub use self::{
|
||||
|
@ -279,7 +279,15 @@ pub fn layout_of_ty_query(
|
|||
// return Ok(tcx.mk_layout(LayoutS::scalar(cx, data_ptr)));
|
||||
// }
|
||||
|
||||
let unsized_part = struct_tail_erasing_lifetimes(db, pointee.clone());
|
||||
let mut unsized_part = struct_tail_erasing_lifetimes(db, pointee.clone());
|
||||
if let TyKind::AssociatedType(id, subst) = unsized_part.kind(Interner) {
|
||||
unsized_part = TyKind::Alias(chalk_ir::AliasTy::Projection(ProjectionTy {
|
||||
associated_ty_id: *id,
|
||||
substitution: subst.clone(),
|
||||
}))
|
||||
.intern(Interner);
|
||||
}
|
||||
unsized_part = normalize(db, trait_env.clone(), unsized_part);
|
||||
let metadata = match unsized_part.kind(Interner) {
|
||||
TyKind::Slice(_) | TyKind::Str => {
|
||||
scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false))
|
||||
|
@ -362,8 +370,16 @@ pub fn layout_of_ty_query(
|
|||
return Err(LayoutError::NotImplemented)
|
||||
}
|
||||
TyKind::Error => return Err(LayoutError::HasErrorType),
|
||||
TyKind::AssociatedType(_, _)
|
||||
| TyKind::Alias(_)
|
||||
TyKind::AssociatedType(id, subst) => {
|
||||
// Try again with `TyKind::Alias` to normalize the associated type.
|
||||
let ty = TyKind::Alias(chalk_ir::AliasTy::Projection(ProjectionTy {
|
||||
associated_ty_id: *id,
|
||||
substitution: subst.clone(),
|
||||
}))
|
||||
.intern(Interner);
|
||||
return db.layout_of_ty(ty, trait_env);
|
||||
}
|
||||
TyKind::Alias(_)
|
||||
| TyKind::Placeholder(_)
|
||||
| TyKind::BoundVar(_)
|
||||
| TyKind::InferenceVar(_, _) => return Err(LayoutError::HasPlaceholder),
|
||||
|
|
|
@ -234,6 +234,7 @@ impl Place {
|
|||
self.local == child.local && child.projection.starts_with(&self.projection)
|
||||
}
|
||||
|
||||
/// The place itself is not included
|
||||
fn iterate_over_parents(&self) -> impl Iterator<Item = Place> + '_ {
|
||||
(0..self.projection.len())
|
||||
.map(|x| &self.projection[0..x])
|
||||
|
|
|
@ -1,6 +1,13 @@
|
|||
//! This module provides a MIR interpreter, which is used in const eval.
|
||||
|
||||
use std::{borrow::Cow, cell::RefCell, collections::HashMap, fmt::Write, iter, mem, ops::Range};
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
cell::RefCell,
|
||||
collections::{HashMap, HashSet},
|
||||
fmt::Write,
|
||||
iter, mem,
|
||||
ops::Range,
|
||||
};
|
||||
|
||||
use base_db::{CrateId, FileId};
|
||||
use chalk_ir::Mutability;
|
||||
|
@ -39,7 +46,8 @@ use crate::{
|
|||
|
||||
use super::{
|
||||
return_slot, AggregateKind, BasicBlockId, BinOp, CastKind, LocalId, MirBody, MirLowerError,
|
||||
MirSpan, Operand, Place, ProjectionElem, Rvalue, StatementKind, TerminatorKind, UnOp,
|
||||
MirSpan, Operand, Place, PlaceElem, ProjectionElem, Rvalue, StatementKind, TerminatorKind,
|
||||
UnOp,
|
||||
};
|
||||
|
||||
mod shim;
|
||||
|
@ -68,18 +76,22 @@ pub struct VTableMap {
|
|||
}
|
||||
|
||||
impl VTableMap {
|
||||
const OFFSET: usize = 1000; // We should add some offset to ids to make 0 (null) an invalid id.
|
||||
|
||||
fn id(&mut self, ty: Ty) -> usize {
|
||||
if let Some(it) = self.ty_to_id.get(&ty) {
|
||||
return *it;
|
||||
}
|
||||
let id = self.id_to_ty.len();
|
||||
let id = self.id_to_ty.len() + VTableMap::OFFSET;
|
||||
self.id_to_ty.push(ty.clone());
|
||||
self.ty_to_id.insert(ty, id);
|
||||
id
|
||||
}
|
||||
|
||||
pub(crate) fn ty(&self, id: usize) -> Result<&Ty> {
|
||||
self.id_to_ty.get(id).ok_or(MirEvalError::InvalidVTableId(id))
|
||||
id.checked_sub(VTableMap::OFFSET)
|
||||
.and_then(|id| self.id_to_ty.get(id))
|
||||
.ok_or(MirEvalError::InvalidVTableId(id))
|
||||
}
|
||||
|
||||
fn ty_of_bytes(&self, bytes: &[u8]) -> Result<&Ty> {
|
||||
|
@ -116,13 +128,18 @@ impl TlsData {
|
|||
}
|
||||
|
||||
struct StackFrame {
|
||||
body: Arc<MirBody>,
|
||||
locals: Locals,
|
||||
destination: Option<BasicBlockId>,
|
||||
prev_stack_ptr: usize,
|
||||
span: (MirSpan, DefWithBodyId),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum MirOrDynIndex {
|
||||
Mir(Arc<MirBody>),
|
||||
Dyn(usize),
|
||||
}
|
||||
|
||||
pub struct Evaluator<'a> {
|
||||
db: &'a dyn HirDatabase,
|
||||
trait_env: Arc<TraitEnvironment>,
|
||||
|
@ -141,6 +158,17 @@ pub struct Evaluator<'a> {
|
|||
stdout: Vec<u8>,
|
||||
stderr: Vec<u8>,
|
||||
layout_cache: RefCell<FxHashMap<Ty, Arc<Layout>>>,
|
||||
projected_ty_cache: RefCell<FxHashMap<(Ty, PlaceElem), Ty>>,
|
||||
not_special_fn_cache: RefCell<FxHashSet<FunctionId>>,
|
||||
mir_or_dyn_index_cache: RefCell<FxHashMap<(FunctionId, Substitution), MirOrDynIndex>>,
|
||||
/// Constantly dropping and creating `Locals` is very costly. We store
|
||||
/// old locals that we normaly want to drop here, to reuse their allocations
|
||||
/// later.
|
||||
unused_locals_store: RefCell<FxHashMap<DefWithBodyId, Vec<Locals>>>,
|
||||
cached_ptr_size: usize,
|
||||
cached_fn_trait_func: Option<FunctionId>,
|
||||
cached_fn_mut_trait_func: Option<FunctionId>,
|
||||
cached_fn_once_trait_func: Option<FunctionId>,
|
||||
crate_id: CrateId,
|
||||
// FIXME: This is a workaround, see the comment on `interpret_mir`
|
||||
assert_placeholder_ty_is_unused: bool,
|
||||
|
@ -313,6 +341,7 @@ pub enum MirEvalError {
|
|||
InvalidVTableId(usize),
|
||||
CoerceUnsizedError(Ty),
|
||||
LangItemNotFound(LangItem),
|
||||
BrokenLayout(Layout),
|
||||
}
|
||||
|
||||
impl MirEvalError {
|
||||
|
@ -399,6 +428,7 @@ impl MirEvalError {
|
|||
| MirEvalError::TargetDataLayoutNotAvailable
|
||||
| MirEvalError::CoerceUnsizedError(_)
|
||||
| MirEvalError::LangItemNotFound(_)
|
||||
| MirEvalError::BrokenLayout(_)
|
||||
| MirEvalError::InvalidVTableId(_) => writeln!(f, "{:?}", err)?,
|
||||
}
|
||||
Ok(())
|
||||
|
@ -433,6 +463,7 @@ impl std::fmt::Debug for MirEvalError {
|
|||
Self::CoerceUnsizedError(arg0) => {
|
||||
f.debug_tuple("CoerceUnsizedError").field(arg0).finish()
|
||||
}
|
||||
Self::BrokenLayout(arg0) => f.debug_tuple("BrokenLayout").field(arg0).finish(),
|
||||
Self::InvalidVTableId(arg0) => f.debug_tuple("InvalidVTableId").field(arg0).finish(),
|
||||
Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(),
|
||||
Self::InvalidConst(arg0) => {
|
||||
|
@ -464,8 +495,16 @@ impl DropFlags {
|
|||
|
||||
fn remove_place(&mut self, p: &Place) -> bool {
|
||||
// FIXME: replace parents with parts
|
||||
if let Some(parent) = p.iterate_over_parents().find(|it| self.need_drop.contains(&it)) {
|
||||
self.need_drop.remove(&parent);
|
||||
return true;
|
||||
}
|
||||
self.need_drop.remove(p)
|
||||
}
|
||||
|
||||
fn clear(&mut self) {
|
||||
self.need_drop.clear();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -508,6 +547,11 @@ pub fn interpret_mir(
|
|||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
const EXECUTION_LIMIT: usize = 100_000;
|
||||
#[cfg(not(test))]
|
||||
const EXECUTION_LIMIT: usize = 10_000_000;
|
||||
|
||||
impl Evaluator<'_> {
|
||||
pub fn new<'a>(
|
||||
db: &'a dyn HirDatabase,
|
||||
|
@ -531,9 +575,29 @@ impl Evaluator<'_> {
|
|||
stderr: vec![],
|
||||
assert_placeholder_ty_is_unused,
|
||||
stack_depth_limit: 100,
|
||||
execution_limit: 1000_000,
|
||||
execution_limit: EXECUTION_LIMIT,
|
||||
memory_limit: 1000_000_000, // 2GB, 1GB for stack and 1GB for heap
|
||||
layout_cache: RefCell::new(HashMap::default()),
|
||||
projected_ty_cache: RefCell::new(HashMap::default()),
|
||||
not_special_fn_cache: RefCell::new(HashSet::default()),
|
||||
mir_or_dyn_index_cache: RefCell::new(HashMap::default()),
|
||||
unused_locals_store: RefCell::new(HashMap::default()),
|
||||
cached_ptr_size: match db.target_data_layout(crate_id) {
|
||||
Some(it) => it.pointer_size.bytes_usize(),
|
||||
None => 8,
|
||||
},
|
||||
cached_fn_trait_func: db
|
||||
.lang_item(crate_id, LangItem::Fn)
|
||||
.and_then(|x| x.as_trait())
|
||||
.and_then(|x| db.trait_data(x).method_by_name(&name![call])),
|
||||
cached_fn_mut_trait_func: db
|
||||
.lang_item(crate_id, LangItem::FnMut)
|
||||
.and_then(|x| x.as_trait())
|
||||
.and_then(|x| db.trait_data(x).method_by_name(&name![call_mut])),
|
||||
cached_fn_once_trait_func: db
|
||||
.lang_item(crate_id, LangItem::FnOnce)
|
||||
.and_then(|x| x.as_trait())
|
||||
.and_then(|x| db.trait_data(x).method_by_name(&name![call_once])),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -554,10 +618,34 @@ impl Evaluator<'_> {
|
|||
}
|
||||
|
||||
fn ptr_size(&self) -> usize {
|
||||
match self.db.target_data_layout(self.crate_id) {
|
||||
Some(it) => it.pointer_size.bytes_usize(),
|
||||
None => 8,
|
||||
self.cached_ptr_size
|
||||
}
|
||||
|
||||
fn projected_ty(&self, ty: Ty, proj: PlaceElem) -> Ty {
|
||||
let pair = (ty, proj);
|
||||
if let Some(r) = self.projected_ty_cache.borrow().get(&pair) {
|
||||
return r.clone();
|
||||
}
|
||||
let (ty, proj) = pair;
|
||||
let r = proj.projected_ty(
|
||||
ty.clone(),
|
||||
self.db,
|
||||
|c, subst, f| {
|
||||
let (def, _) = self.db.lookup_intern_closure(c.into());
|
||||
let infer = self.db.infer(def);
|
||||
let (captures, _) = infer.closure_info(&c);
|
||||
let parent_subst = ClosureSubst(subst).parent_subst();
|
||||
captures
|
||||
.get(f)
|
||||
.expect("broken closure field")
|
||||
.ty
|
||||
.clone()
|
||||
.substitute(Interner, parent_subst)
|
||||
},
|
||||
self.crate_id,
|
||||
);
|
||||
self.projected_ty_cache.borrow_mut().insert((ty, proj), r.clone());
|
||||
r
|
||||
}
|
||||
|
||||
fn place_addr_and_ty_and_metadata<'a>(
|
||||
|
@ -570,23 +658,7 @@ impl Evaluator<'_> {
|
|||
let mut metadata: Option<IntervalOrOwned> = None; // locals are always sized
|
||||
for proj in &*p.projection {
|
||||
let prev_ty = ty.clone();
|
||||
ty = proj.projected_ty(
|
||||
ty,
|
||||
self.db,
|
||||
|c, subst, f| {
|
||||
let (def, _) = self.db.lookup_intern_closure(c.into());
|
||||
let infer = self.db.infer(def);
|
||||
let (captures, _) = infer.closure_info(&c);
|
||||
let parent_subst = ClosureSubst(subst).parent_subst();
|
||||
captures
|
||||
.get(f)
|
||||
.expect("broken closure field")
|
||||
.ty
|
||||
.clone()
|
||||
.substitute(Interner, parent_subst)
|
||||
},
|
||||
self.crate_id,
|
||||
);
|
||||
ty = self.projected_ty(ty, proj.clone());
|
||||
match proj {
|
||||
ProjectionElem::Deref => {
|
||||
metadata = if self.size_align_of(&ty, locals)?.is_none() {
|
||||
|
@ -680,8 +752,10 @@ impl Evaluator<'_> {
|
|||
.offset(u32::from(f.local_id.into_raw()) as usize)
|
||||
.bytes_usize();
|
||||
addr = addr.offset(offset);
|
||||
// FIXME: support structs with unsized fields
|
||||
metadata = None;
|
||||
// Unsized field metadata is equal to the metadata of the struct
|
||||
if self.size_align_of(&ty, locals)?.is_some() {
|
||||
metadata = None;
|
||||
}
|
||||
}
|
||||
ProjectionElem::OpaqueCast(_) => not_supported!("opaque cast"),
|
||||
}
|
||||
|
@ -702,9 +776,7 @@ impl Evaluator<'_> {
|
|||
}
|
||||
|
||||
fn layout_adt(&self, adt: AdtId, subst: Substitution) -> Result<Arc<Layout>> {
|
||||
self.db.layout_of_adt(adt, subst.clone(), self.trait_env.clone()).map_err(|e| {
|
||||
MirEvalError::LayoutError(e, TyKind::Adt(chalk_ir::AdtId(adt), subst).intern(Interner))
|
||||
})
|
||||
self.layout(&TyKind::Adt(chalk_ir::AdtId(adt), subst).intern(Interner))
|
||||
}
|
||||
|
||||
fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals) -> Result<Ty> {
|
||||
|
@ -740,18 +812,18 @@ impl Evaluator<'_> {
|
|||
return Err(MirEvalError::StackOverflow);
|
||||
}
|
||||
let mut current_block_idx = body.start_block;
|
||||
let (mut locals, prev_stack_ptr) = self.create_locals_for_body(body.clone(), None)?;
|
||||
let (mut locals, prev_stack_ptr) = self.create_locals_for_body(&body, None)?;
|
||||
self.fill_locals_for_body(&body, &mut locals, args)?;
|
||||
let prev_code_stack = mem::take(&mut self.code_stack);
|
||||
let span = (MirSpan::Unknown, body.owner);
|
||||
self.code_stack.push(StackFrame { body, locals, destination: None, prev_stack_ptr, span });
|
||||
self.code_stack.push(StackFrame { locals, destination: None, prev_stack_ptr, span });
|
||||
'stack: loop {
|
||||
let Some(mut my_stack_frame) = self.code_stack.pop() else {
|
||||
not_supported!("missing stack frame");
|
||||
};
|
||||
let e = (|| {
|
||||
let mut locals = &mut my_stack_frame.locals;
|
||||
let body = &*my_stack_frame.body;
|
||||
let body = locals.body.clone();
|
||||
loop {
|
||||
let current_block = &body.basic_blocks[current_block_idx];
|
||||
if let Some(it) = self.execution_limit.checked_sub(1) {
|
||||
|
@ -820,7 +892,7 @@ impl Evaluator<'_> {
|
|||
locals.drop_flags.add_place(destination.clone());
|
||||
if let Some(stack_frame) = stack_frame {
|
||||
self.code_stack.push(my_stack_frame);
|
||||
current_block_idx = stack_frame.body.start_block;
|
||||
current_block_idx = stack_frame.locals.body.start_block;
|
||||
self.code_stack.push(stack_frame);
|
||||
return Ok(None);
|
||||
} else {
|
||||
|
@ -861,18 +933,24 @@ impl Evaluator<'_> {
|
|||
let my_code_stack = mem::replace(&mut self.code_stack, prev_code_stack);
|
||||
let mut error_stack = vec![];
|
||||
for frame in my_code_stack.into_iter().rev() {
|
||||
if let DefWithBodyId::FunctionId(f) = frame.body.owner {
|
||||
if let DefWithBodyId::FunctionId(f) = frame.locals.body.owner {
|
||||
error_stack.push((Either::Left(f), frame.span.0, frame.span.1));
|
||||
}
|
||||
}
|
||||
return Err(MirEvalError::InFunction(Box::new(e), error_stack));
|
||||
}
|
||||
};
|
||||
let return_interval = my_stack_frame.locals.ptr[return_slot()];
|
||||
self.unused_locals_store
|
||||
.borrow_mut()
|
||||
.entry(my_stack_frame.locals.body.owner)
|
||||
.or_default()
|
||||
.push(my_stack_frame.locals);
|
||||
match my_stack_frame.destination {
|
||||
None => {
|
||||
self.code_stack = prev_code_stack;
|
||||
self.stack_depth_limit += 1;
|
||||
return Ok(my_stack_frame.locals.ptr[return_slot()].get(self)?.to_vec());
|
||||
return Ok(return_interval.get(self)?.to_vec());
|
||||
}
|
||||
Some(bb) => {
|
||||
// We don't support const promotion, so we can't truncate the stack yet.
|
||||
|
@ -910,39 +988,45 @@ impl Evaluator<'_> {
|
|||
|
||||
fn create_locals_for_body(
|
||||
&mut self,
|
||||
body: Arc<MirBody>,
|
||||
body: &Arc<MirBody>,
|
||||
destination: Option<Interval>,
|
||||
) -> Result<(Locals, usize)> {
|
||||
let mut locals =
|
||||
Locals { ptr: ArenaMap::new(), body: body.clone(), drop_flags: DropFlags::default() };
|
||||
let (locals_ptr, stack_size) = {
|
||||
match self.unused_locals_store.borrow_mut().entry(body.owner).or_default().pop() {
|
||||
None => Locals {
|
||||
ptr: ArenaMap::new(),
|
||||
body: body.clone(),
|
||||
drop_flags: DropFlags::default(),
|
||||
},
|
||||
Some(mut l) => {
|
||||
l.drop_flags.clear();
|
||||
l.body = body.clone();
|
||||
l
|
||||
}
|
||||
};
|
||||
let stack_size = {
|
||||
let mut stack_ptr = self.stack.len();
|
||||
let addr = body
|
||||
.locals
|
||||
.iter()
|
||||
.map(|(id, it)| {
|
||||
if id == return_slot() {
|
||||
if let Some(destination) = destination {
|
||||
return Ok((id, destination));
|
||||
}
|
||||
for (id, it) in body.locals.iter() {
|
||||
if id == return_slot() {
|
||||
if let Some(destination) = destination {
|
||||
locals.ptr.insert(id, destination);
|
||||
continue;
|
||||
}
|
||||
let (size, align) = self.size_align_of_sized(
|
||||
&it.ty,
|
||||
&locals,
|
||||
"no unsized local in extending stack",
|
||||
)?;
|
||||
while stack_ptr % align != 0 {
|
||||
stack_ptr += 1;
|
||||
}
|
||||
let my_ptr = stack_ptr;
|
||||
stack_ptr += size;
|
||||
Ok((id, Interval { addr: Stack(my_ptr), size }))
|
||||
})
|
||||
.collect::<Result<ArenaMap<LocalId, _>>>()?;
|
||||
let stack_size = stack_ptr - self.stack.len();
|
||||
(addr, stack_size)
|
||||
}
|
||||
let (size, align) = self.size_align_of_sized(
|
||||
&it.ty,
|
||||
&locals,
|
||||
"no unsized local in extending stack",
|
||||
)?;
|
||||
while stack_ptr % align != 0 {
|
||||
stack_ptr += 1;
|
||||
}
|
||||
let my_ptr = stack_ptr;
|
||||
stack_ptr += size;
|
||||
locals.ptr.insert(id, Interval { addr: Stack(my_ptr), size });
|
||||
}
|
||||
stack_ptr - self.stack.len()
|
||||
};
|
||||
locals.ptr = locals_ptr;
|
||||
let prev_stack_pointer = self.stack.len();
|
||||
if stack_size > self.memory_limit {
|
||||
return Err(MirEvalError::Panic(format!(
|
||||
|
@ -1543,12 +1627,18 @@ impl Evaluator<'_> {
|
|||
) -> Result<Vec<u8>> {
|
||||
let mut result = vec![0; size];
|
||||
if let Some((offset, size, value)) = tag {
|
||||
result[offset..offset + size].copy_from_slice(&value.to_le_bytes()[0..size]);
|
||||
match result.get_mut(offset..offset + size) {
|
||||
Some(it) => it.copy_from_slice(&value.to_le_bytes()[0..size]),
|
||||
None => return Err(MirEvalError::BrokenLayout(variant_layout.clone())),
|
||||
}
|
||||
}
|
||||
for (i, op) in values.enumerate() {
|
||||
let offset = variant_layout.fields.offset(i).bytes_usize();
|
||||
let op = op.get(&self)?;
|
||||
result[offset..offset + op.len()].copy_from_slice(op);
|
||||
match result.get_mut(offset..offset + op.len()) {
|
||||
Some(it) => it.copy_from_slice(op),
|
||||
None => return Err(MirEvalError::BrokenLayout(variant_layout.clone())),
|
||||
}
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
|
@ -1671,6 +1761,11 @@ impl Evaluator<'_> {
|
|||
}
|
||||
|
||||
fn size_align_of(&self, ty: &Ty, locals: &Locals) -> Result<Option<(usize, usize)>> {
|
||||
if let Some(layout) = self.layout_cache.borrow().get(ty) {
|
||||
return Ok(layout
|
||||
.is_sized()
|
||||
.then(|| (layout.size.bytes_usize(), layout.align.abi.bytes() as usize)));
|
||||
}
|
||||
if let DefWithBodyId::VariantId(f) = locals.body.owner {
|
||||
if let Some((adt, _)) = ty.as_adt() {
|
||||
if AdtId::from(f.parent) == adt {
|
||||
|
@ -1731,16 +1826,15 @@ impl Evaluator<'_> {
|
|||
}
|
||||
|
||||
fn detect_fn_trait(&self, def: FunctionId) -> Option<FnTrait> {
|
||||
use LangItem::*;
|
||||
let ItemContainerId::TraitId(parent) = self.db.lookup_intern_function(def).container else {
|
||||
return None;
|
||||
};
|
||||
let l = self.db.lang_attr(parent.into())?;
|
||||
match l {
|
||||
FnOnce => Some(FnTrait::FnOnce),
|
||||
FnMut => Some(FnTrait::FnMut),
|
||||
Fn => Some(FnTrait::Fn),
|
||||
_ => None,
|
||||
let def = Some(def);
|
||||
if def == self.cached_fn_trait_func {
|
||||
Some(FnTrait::Fn)
|
||||
} else if def == self.cached_fn_mut_trait_func {
|
||||
Some(FnTrait::FnMut)
|
||||
} else if def == self.cached_fn_once_trait_func {
|
||||
Some(FnTrait::FnOnce)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1796,6 +1890,17 @@ impl Evaluator<'_> {
|
|||
}
|
||||
}
|
||||
}
|
||||
chalk_ir::TyKind::Array(inner, len) => {
|
||||
let len = match try_const_usize(this.db, &len) {
|
||||
Some(it) => it as usize,
|
||||
None => not_supported!("non evaluatable array len in patching addresses"),
|
||||
};
|
||||
let size = this.size_of_sized(inner, locals, "inner of array")?;
|
||||
for i in 0..len {
|
||||
let offset = i * size;
|
||||
rec(this, &bytes[offset..offset + size], inner, locals, mm)?;
|
||||
}
|
||||
}
|
||||
chalk_ir::TyKind::Tuple(_, subst) => {
|
||||
let layout = this.layout(ty)?;
|
||||
for (id, ty) in subst.iter(Interner).enumerate() {
|
||||
|
@ -1904,10 +2009,31 @@ impl Evaluator<'_> {
|
|||
AdtId::UnionId(_) => (),
|
||||
AdtId::EnumId(_) => (),
|
||||
},
|
||||
TyKind::Tuple(_, subst) => {
|
||||
for (id, ty) in subst.iter(Interner).enumerate() {
|
||||
let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
|
||||
let offset = layout.fields.offset(id).bytes_usize();
|
||||
self.patch_addresses(patch_map, old_vtable, addr.offset(offset), ty, locals)?;
|
||||
}
|
||||
}
|
||||
TyKind::Array(inner, len) => {
|
||||
let len = match try_const_usize(self.db, &len) {
|
||||
Some(it) => it as usize,
|
||||
None => not_supported!("non evaluatable array len in patching addresses"),
|
||||
};
|
||||
let size = self.size_of_sized(inner, locals, "inner of array")?;
|
||||
for i in 0..len {
|
||||
self.patch_addresses(
|
||||
patch_map,
|
||||
old_vtable,
|
||||
addr.offset(i * size),
|
||||
inner,
|
||||
locals,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
TyKind::AssociatedType(_, _)
|
||||
| TyKind::Scalar(_)
|
||||
| TyKind::Tuple(_, _)
|
||||
| TyKind::Array(_, _)
|
||||
| TyKind::Slice(_)
|
||||
| TyKind::Raw(_, _)
|
||||
| TyKind::OpaqueType(_, _)
|
||||
|
@ -2051,6 +2177,40 @@ impl Evaluator<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_mir_or_dyn_index(
|
||||
&self,
|
||||
def: FunctionId,
|
||||
generic_args: Substitution,
|
||||
locals: &Locals,
|
||||
span: MirSpan,
|
||||
) -> Result<MirOrDynIndex> {
|
||||
let pair = (def, generic_args);
|
||||
if let Some(r) = self.mir_or_dyn_index_cache.borrow().get(&pair) {
|
||||
return Ok(r.clone());
|
||||
}
|
||||
let (def, generic_args) = pair;
|
||||
let r = if let Some(self_ty_idx) =
|
||||
is_dyn_method(self.db, self.trait_env.clone(), def, generic_args.clone())
|
||||
{
|
||||
MirOrDynIndex::Dyn(self_ty_idx)
|
||||
} else {
|
||||
let (imp, generic_args) =
|
||||
self.db.lookup_impl_method(self.trait_env.clone(), def, generic_args.clone());
|
||||
let mir_body = self
|
||||
.db
|
||||
.monomorphized_mir_body(imp.into(), generic_args, self.trait_env.clone())
|
||||
.map_err(|e| {
|
||||
MirEvalError::InFunction(
|
||||
Box::new(MirEvalError::MirLowerError(imp, e)),
|
||||
vec![(Either::Left(imp), span, locals.body.owner)],
|
||||
)
|
||||
})?;
|
||||
MirOrDynIndex::Mir(mir_body)
|
||||
};
|
||||
self.mir_or_dyn_index_cache.borrow_mut().insert((def, generic_args), r.clone());
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
fn exec_fn_with_args(
|
||||
&mut self,
|
||||
def: FunctionId,
|
||||
|
@ -2072,93 +2232,76 @@ impl Evaluator<'_> {
|
|||
return Ok(None);
|
||||
}
|
||||
let arg_bytes = args.iter().map(|it| IntervalOrOwned::Borrowed(it.interval));
|
||||
if let Some(self_ty_idx) =
|
||||
is_dyn_method(self.db, self.trait_env.clone(), def, generic_args.clone())
|
||||
{
|
||||
// In the layout of current possible receiver, which at the moment of writing this code is one of
|
||||
// `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible recievers,
|
||||
// the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on
|
||||
// the type.
|
||||
let first_arg = arg_bytes.clone().next().unwrap();
|
||||
let first_arg = first_arg.get(self)?;
|
||||
let ty =
|
||||
self.vtable_map.ty_of_bytes(&first_arg[self.ptr_size()..self.ptr_size() * 2])?;
|
||||
let mut args_for_target = args.to_vec();
|
||||
args_for_target[0] = IntervalAndTy {
|
||||
interval: args_for_target[0].interval.slice(0..self.ptr_size()),
|
||||
ty: ty.clone(),
|
||||
};
|
||||
let ty = GenericArgData::Ty(ty.clone()).intern(Interner);
|
||||
let generics_for_target = Substitution::from_iter(
|
||||
Interner,
|
||||
generic_args.iter(Interner).enumerate().map(|(i, it)| {
|
||||
if i == self_ty_idx {
|
||||
&ty
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}),
|
||||
);
|
||||
return self.exec_fn_with_args(
|
||||
def,
|
||||
&args_for_target,
|
||||
generics_for_target,
|
||||
match self.get_mir_or_dyn_index(def, generic_args.clone(), locals, span)? {
|
||||
MirOrDynIndex::Dyn(self_ty_idx) => {
|
||||
// In the layout of current possible receiver, which at the moment of writing this code is one of
|
||||
// `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible recievers,
|
||||
// the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on
|
||||
// the type.
|
||||
let first_arg = arg_bytes.clone().next().unwrap();
|
||||
let first_arg = first_arg.get(self)?;
|
||||
let ty = self
|
||||
.vtable_map
|
||||
.ty_of_bytes(&first_arg[self.ptr_size()..self.ptr_size() * 2])?;
|
||||
let mut args_for_target = args.to_vec();
|
||||
args_for_target[0] = IntervalAndTy {
|
||||
interval: args_for_target[0].interval.slice(0..self.ptr_size()),
|
||||
ty: ty.clone(),
|
||||
};
|
||||
let ty = GenericArgData::Ty(ty.clone()).intern(Interner);
|
||||
let generics_for_target = Substitution::from_iter(
|
||||
Interner,
|
||||
generic_args.iter(Interner).enumerate().map(|(i, it)| {
|
||||
if i == self_ty_idx {
|
||||
&ty
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}),
|
||||
);
|
||||
return self.exec_fn_with_args(
|
||||
def,
|
||||
&args_for_target,
|
||||
generics_for_target,
|
||||
locals,
|
||||
destination,
|
||||
target_bb,
|
||||
span,
|
||||
);
|
||||
}
|
||||
MirOrDynIndex::Mir(body) => self.exec_looked_up_function(
|
||||
body,
|
||||
locals,
|
||||
def,
|
||||
arg_bytes,
|
||||
span,
|
||||
destination,
|
||||
target_bb,
|
||||
span,
|
||||
);
|
||||
),
|
||||
}
|
||||
let (imp, generic_args) =
|
||||
self.db.lookup_impl_method(self.trait_env.clone(), def, generic_args);
|
||||
self.exec_looked_up_function(
|
||||
generic_args,
|
||||
locals,
|
||||
imp,
|
||||
arg_bytes,
|
||||
span,
|
||||
destination,
|
||||
target_bb,
|
||||
)
|
||||
}
|
||||
|
||||
fn exec_looked_up_function(
|
||||
&mut self,
|
||||
generic_args: Substitution,
|
||||
mir_body: Arc<MirBody>,
|
||||
locals: &Locals,
|
||||
imp: FunctionId,
|
||||
def: FunctionId,
|
||||
arg_bytes: impl Iterator<Item = IntervalOrOwned>,
|
||||
span: MirSpan,
|
||||
destination: Interval,
|
||||
target_bb: Option<BasicBlockId>,
|
||||
) -> Result<Option<StackFrame>> {
|
||||
let def = imp.into();
|
||||
let mir_body = self
|
||||
.db
|
||||
.monomorphized_mir_body(def, generic_args, self.trait_env.clone())
|
||||
.map_err(|e| {
|
||||
MirEvalError::InFunction(
|
||||
Box::new(MirEvalError::MirLowerError(imp, e)),
|
||||
vec![(Either::Left(imp), span, locals.body.owner)],
|
||||
)
|
||||
})?;
|
||||
Ok(if let Some(target_bb) = target_bb {
|
||||
let (mut locals, prev_stack_ptr) =
|
||||
self.create_locals_for_body(mir_body.clone(), Some(destination))?;
|
||||
self.create_locals_for_body(&mir_body, Some(destination))?;
|
||||
self.fill_locals_for_body(&mir_body, &mut locals, arg_bytes.into_iter())?;
|
||||
let span = (span, locals.body.owner);
|
||||
Some(StackFrame {
|
||||
body: mir_body,
|
||||
locals,
|
||||
destination: Some(target_bb),
|
||||
prev_stack_ptr,
|
||||
span,
|
||||
})
|
||||
Some(StackFrame { locals, destination: Some(target_bb), prev_stack_ptr, span })
|
||||
} else {
|
||||
let result = self.interpret_mir(mir_body, arg_bytes).map_err(|e| {
|
||||
MirEvalError::InFunction(
|
||||
Box::new(e),
|
||||
vec![(Either::Left(imp), span, locals.body.owner)],
|
||||
vec![(Either::Left(def), span, locals.body.owner)],
|
||||
)
|
||||
})?;
|
||||
destination.write_from_bytes(self, &result)?;
|
||||
|
@ -2330,16 +2473,15 @@ impl Evaluator<'_> {
|
|||
// we can ignore drop in them.
|
||||
return Ok(());
|
||||
};
|
||||
let (impl_drop_candidate, subst) = self.db.lookup_impl_method(
|
||||
self.trait_env.clone(),
|
||||
drop_fn,
|
||||
Substitution::from1(Interner, ty.clone()),
|
||||
);
|
||||
if impl_drop_candidate != drop_fn {
|
||||
|
||||
let generic_args = Substitution::from1(Interner, ty.clone());
|
||||
if let Ok(MirOrDynIndex::Mir(body)) =
|
||||
self.get_mir_or_dyn_index(drop_fn, generic_args, locals, span)
|
||||
{
|
||||
self.exec_looked_up_function(
|
||||
subst,
|
||||
body,
|
||||
locals,
|
||||
impl_drop_candidate,
|
||||
drop_fn,
|
||||
[IntervalOrOwned::Owned(addr.to_bytes())].into_iter(),
|
||||
span,
|
||||
Interval { addr: Address::Invalid(0), size: 0 },
|
||||
|
|
|
@ -36,6 +36,9 @@ impl Evaluator<'_> {
|
|||
destination: Interval,
|
||||
span: MirSpan,
|
||||
) -> Result<bool> {
|
||||
if self.not_special_fn_cache.borrow().contains(&def) {
|
||||
return Ok(false);
|
||||
}
|
||||
let function_data = self.db.function_data(def);
|
||||
let is_intrinsic = match &function_data.abi {
|
||||
Some(abi) => *abi == Interned::new_str("rust-intrinsic"),
|
||||
|
@ -124,9 +127,88 @@ impl Evaluator<'_> {
|
|||
destination.write_from_bytes(self, &result)?;
|
||||
return Ok(true);
|
||||
}
|
||||
if let ItemContainerId::TraitId(t) = def.lookup(self.db.upcast()).container {
|
||||
if self.db.lang_attr(t.into()) == Some(LangItem::Clone) {
|
||||
let [self_ty] = generic_args.as_slice(Interner) else {
|
||||
not_supported!("wrong generic arg count for clone");
|
||||
};
|
||||
let Some(self_ty) = self_ty.ty(Interner) else {
|
||||
not_supported!("wrong generic arg kind for clone");
|
||||
};
|
||||
// Clone has special impls for tuples and function pointers
|
||||
if matches!(self_ty.kind(Interner), TyKind::Function(_) | TyKind::Tuple(..)) {
|
||||
self.exec_clone(def, args, self_ty.clone(), locals, destination, span)?;
|
||||
return Ok(true);
|
||||
}
|
||||
// Return early to prevent caching clone as non special fn.
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
self.not_special_fn_cache.borrow_mut().insert(def);
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
/// Clone has special impls for tuples and function pointers
|
||||
fn exec_clone(
|
||||
&mut self,
|
||||
def: FunctionId,
|
||||
args: &[IntervalAndTy],
|
||||
self_ty: Ty,
|
||||
locals: &Locals,
|
||||
destination: Interval,
|
||||
span: MirSpan,
|
||||
) -> Result<()> {
|
||||
match self_ty.kind(Interner) {
|
||||
TyKind::Function(_) => {
|
||||
let [arg] = args else {
|
||||
not_supported!("wrong arg count for clone");
|
||||
};
|
||||
let addr = Address::from_bytes(arg.get(self)?)?;
|
||||
return destination
|
||||
.write_from_interval(self, Interval { addr, size: destination.size });
|
||||
}
|
||||
TyKind::Tuple(_, subst) => {
|
||||
let [arg] = args else {
|
||||
not_supported!("wrong arg count for clone");
|
||||
};
|
||||
let addr = Address::from_bytes(arg.get(self)?)?;
|
||||
let layout = self.layout(&self_ty)?;
|
||||
for (i, ty) in subst.iter(Interner).enumerate() {
|
||||
let ty = ty.assert_ty_ref(Interner);
|
||||
let size = self.layout(ty)?.size.bytes_usize();
|
||||
let tmp = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
|
||||
let arg = IntervalAndTy {
|
||||
interval: Interval { addr: tmp, size: self.ptr_size() },
|
||||
ty: TyKind::Ref(Mutability::Not, static_lifetime(), ty.clone())
|
||||
.intern(Interner),
|
||||
};
|
||||
let offset = layout.fields.offset(i).bytes_usize();
|
||||
self.write_memory(tmp, &addr.offset(offset).to_bytes())?;
|
||||
self.exec_clone(
|
||||
def,
|
||||
&[arg],
|
||||
ty.clone(),
|
||||
locals,
|
||||
destination.slice(offset..offset + size),
|
||||
span,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
self.exec_fn_with_args(
|
||||
def,
|
||||
args,
|
||||
Substitution::from1(Interner, self_ty),
|
||||
locals,
|
||||
destination,
|
||||
None,
|
||||
span,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn exec_alloc_fn(
|
||||
&mut self,
|
||||
alloc_fn: &str,
|
||||
|
@ -618,12 +700,15 @@ impl Evaluator<'_> {
|
|||
else {
|
||||
return Err(MirEvalError::TypeError("type_name generic arg is not provided"));
|
||||
};
|
||||
let Ok(ty_name) = ty.display_source_code(
|
||||
let ty_name = match ty.display_source_code(
|
||||
self.db,
|
||||
locals.body.owner.module(self.db.upcast()),
|
||||
true,
|
||||
) else {
|
||||
not_supported!("fail in generating type_name using source code display");
|
||||
) {
|
||||
Ok(ty_name) => ty_name,
|
||||
// Fallback to human readable display in case of `Err`. Ideally we want to use `display_source_code` to
|
||||
// render full paths.
|
||||
Err(_) => ty.display(self.db).to_string(),
|
||||
};
|
||||
let len = ty_name.len();
|
||||
let addr = self.heap_allocate(len, 1)?;
|
||||
|
@ -679,7 +764,22 @@ impl Evaluator<'_> {
|
|||
let ans = lhs.wrapping_add(rhs);
|
||||
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
|
||||
}
|
||||
"wrapping_sub" | "unchecked_sub" | "ptr_offset_from_unsigned" | "ptr_offset_from" => {
|
||||
"ptr_offset_from_unsigned" | "ptr_offset_from" => {
|
||||
let [lhs, rhs] = args else {
|
||||
return Err(MirEvalError::TypeError("wrapping_sub args are not provided"));
|
||||
};
|
||||
let lhs = i128::from_le_bytes(pad16(lhs.get(self)?, false));
|
||||
let rhs = i128::from_le_bytes(pad16(rhs.get(self)?, false));
|
||||
let ans = lhs.wrapping_sub(rhs);
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
|
||||
else {
|
||||
return Err(MirEvalError::TypeError("ptr_offset_from generic arg is not provided"));
|
||||
};
|
||||
let size = self.size_of_sized(ty, locals, "ptr_offset_from arg")? as i128;
|
||||
let ans = ans / size;
|
||||
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
|
||||
}
|
||||
"wrapping_sub" | "unchecked_sub" => {
|
||||
let [lhs, rhs] = args else {
|
||||
return Err(MirEvalError::TypeError("wrapping_sub args are not provided"));
|
||||
};
|
||||
|
@ -1057,7 +1157,14 @@ impl Evaluator<'_> {
|
|||
_span: MirSpan,
|
||||
) -> Result<()> {
|
||||
// We are a single threaded runtime with no UB checking and no optimization, so
|
||||
// we can implement these as normal functions.
|
||||
// we can implement atomic intrinsics as normal functions.
|
||||
|
||||
if name.starts_with("singlethreadfence_") || name.starts_with("fence_") {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// The rest of atomic intrinsics have exactly one generic arg
|
||||
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
|
||||
return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided"));
|
||||
};
|
||||
|
|
|
@ -182,6 +182,50 @@ fn main() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn drop_struct_field() {
|
||||
check_pass(
|
||||
r#"
|
||||
//- minicore: drop, add, option, cell, builtin_impls
|
||||
|
||||
use core::cell::Cell;
|
||||
|
||||
fn should_not_reach() {
|
||||
_ // FIXME: replace this function with panic when that works
|
||||
}
|
||||
|
||||
struct X<'a>(&'a Cell<i32>);
|
||||
impl<'a> Drop for X<'a> {
|
||||
fn drop(&mut self) {
|
||||
self.0.set(self.0.get() + 1)
|
||||
}
|
||||
}
|
||||
|
||||
struct Tuple<'a>(X<'a>, X<'a>, X<'a>);
|
||||
|
||||
fn main() {
|
||||
let s = Cell::new(0);
|
||||
{
|
||||
let x0 = X(&s);
|
||||
let xt = Tuple(x0, X(&s), X(&s));
|
||||
let x1 = xt.1;
|
||||
if s.get() != 0 {
|
||||
should_not_reach();
|
||||
}
|
||||
drop(xt.0);
|
||||
if s.get() != 1 {
|
||||
should_not_reach();
|
||||
}
|
||||
}
|
||||
// FIXME: this should be 3
|
||||
if s.get() != 2 {
|
||||
should_not_reach();
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn drop_in_place() {
|
||||
check_pass(
|
||||
|
@ -613,6 +657,50 @@ fn main() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn self_with_capital_s() {
|
||||
check_pass(
|
||||
r#"
|
||||
//- minicore: fn, add, copy
|
||||
|
||||
struct S1;
|
||||
|
||||
impl S1 {
|
||||
fn f() {
|
||||
Self;
|
||||
}
|
||||
}
|
||||
|
||||
struct S2 {
|
||||
f1: i32,
|
||||
}
|
||||
|
||||
impl S2 {
|
||||
fn f() {
|
||||
Self { f1: 5 };
|
||||
}
|
||||
}
|
||||
|
||||
struct S3(i32);
|
||||
|
||||
impl S3 {
|
||||
fn f() {
|
||||
Self(2);
|
||||
Self;
|
||||
let this = Self;
|
||||
this(2);
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
S1::f();
|
||||
S2::f();
|
||||
S3::f();
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syscalls() {
|
||||
check_pass(
|
||||
|
|
|
@ -486,13 +486,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
);
|
||||
Ok(Some(current))
|
||||
}
|
||||
ValueNs::FunctionId(_) | ValueNs::StructId(_) => {
|
||||
ValueNs::FunctionId(_) | ValueNs::StructId(_) | ValueNs::ImplSelf(_) => {
|
||||
// It's probably a unit struct or a zero sized function, so no action is needed.
|
||||
Ok(Some(current))
|
||||
}
|
||||
it => {
|
||||
not_supported!("unknown name {it:?} in value name space");
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::If { condition, then_branch, else_branch } => {
|
||||
|
@ -585,36 +582,6 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
Ok(())
|
||||
})
|
||||
}
|
||||
Expr::While { condition, body, label } => {
|
||||
self.lower_loop(current, place, *label, expr_id.into(), |this, begin| {
|
||||
let scope = this.push_drop_scope();
|
||||
let Some((discr, to_switch)) =
|
||||
this.lower_expr_to_some_operand(*condition, begin)?
|
||||
else {
|
||||
return Ok(());
|
||||
};
|
||||
let fail_cond = this.new_basic_block();
|
||||
let after_cond = this.new_basic_block();
|
||||
this.set_terminator(
|
||||
to_switch,
|
||||
TerminatorKind::SwitchInt {
|
||||
discr,
|
||||
targets: SwitchTargets::static_if(1, after_cond, fail_cond),
|
||||
},
|
||||
expr_id.into(),
|
||||
);
|
||||
let fail_cond = this.drop_until_scope(this.drop_scopes.len() - 1, fail_cond);
|
||||
let end = this.current_loop_end()?;
|
||||
this.set_goto(fail_cond, end, expr_id.into());
|
||||
if let Some((_, block)) = this.lower_expr_as_place(after_cond, *body, true)? {
|
||||
let block = scope.pop_and_drop(this, block);
|
||||
this.set_goto(block, begin, expr_id.into());
|
||||
} else {
|
||||
scope.pop_assume_dropped(this);
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
Expr::Call { callee, args, .. } => {
|
||||
if let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) {
|
||||
let ty = chalk_ir::TyKind::FnDef(
|
||||
|
@ -660,6 +627,11 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
expr_id.into(),
|
||||
)
|
||||
}
|
||||
TyKind::Closure(_, _) => {
|
||||
not_supported!(
|
||||
"method resolution not emitted for closure (Are Fn traits available?)"
|
||||
);
|
||||
}
|
||||
TyKind::Error => {
|
||||
return Err(MirLowerError::MissingFunctionDefinition(self.owner, expr_id))
|
||||
}
|
||||
|
@ -1026,18 +998,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
self.push_assignment(current, lhs_place, r_value, expr_id.into());
|
||||
return Ok(Some(current));
|
||||
} else {
|
||||
let Some((lhs_place, current)) =
|
||||
self.lower_expr_as_place(current, *lhs, false)?
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Some((rhs_op, current)) =
|
||||
self.lower_expr_to_some_operand(*rhs, current)?
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
self.push_assignment(current, lhs_place, rhs_op.into(), expr_id.into());
|
||||
return Ok(Some(current));
|
||||
return self.lower_assignment(current, *lhs, *rhs, expr_id.into());
|
||||
}
|
||||
}
|
||||
let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)?
|
||||
|
@ -1283,6 +1244,30 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
}
|
||||
}
|
||||
|
||||
fn lower_assignment(
|
||||
&mut self,
|
||||
current: BasicBlockId,
|
||||
lhs: ExprId,
|
||||
rhs: ExprId,
|
||||
span: MirSpan,
|
||||
) -> Result<Option<BasicBlockId>> {
|
||||
let Some((rhs_op, current)) =
|
||||
self.lower_expr_to_some_operand(rhs, current)?
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
if matches!(&self.body.exprs[lhs], Expr::Underscore) {
|
||||
return Ok(Some(current));
|
||||
}
|
||||
let Some((lhs_place, current)) =
|
||||
self.lower_expr_as_place(current, lhs, false)?
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
self.push_assignment(current, lhs_place, rhs_op.into(), span);
|
||||
Ok(Some(current))
|
||||
}
|
||||
|
||||
fn placeholder_subst(&mut self) -> Substitution {
|
||||
let placeholder_subst = match self.owner.as_generic_def_id() {
|
||||
Some(it) => TyBuilder::placeholder_subst(self.db, it),
|
||||
|
|
|
@ -227,3 +227,22 @@ fn f(a: impl Foo<i8, Assoc<i16> = i32>) {
|
|||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fn_def_is_shown_as_fn_ptr() {
|
||||
check_types_source_code(
|
||||
r#"
|
||||
fn foo(_: i32) -> i64 { 42 }
|
||||
struct S<T>(T);
|
||||
enum E { A(usize) }
|
||||
fn test() {
|
||||
let f = foo;
|
||||
//^ fn(i32) -> i64
|
||||
let f = S::<i8>;
|
||||
//^ fn(i8) -> S<i8>
|
||||
let f = E::A;
|
||||
//^ fn(usize) -> E
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
|
|
@ -209,6 +209,8 @@ fn expr_macro_def_expanded_in_various_places() {
|
|||
104..105 '_': IntoIterator::Item<isize>
|
||||
117..119 '{}': ()
|
||||
124..134 '|| spam!()': impl Fn() -> isize
|
||||
140..156 'while ...!() {}': !
|
||||
140..156 'while ...!() {}': ()
|
||||
140..156 'while ...!() {}': ()
|
||||
154..156 '{}': ()
|
||||
161..174 'break spam!()': !
|
||||
|
@ -300,6 +302,8 @@ fn expr_macro_rules_expanded_in_various_places() {
|
|||
118..119 '_': IntoIterator::Item<isize>
|
||||
131..133 '{}': ()
|
||||
138..148 '|| spam!()': impl Fn() -> isize
|
||||
154..170 'while ...!() {}': !
|
||||
154..170 'while ...!() {}': ()
|
||||
154..170 'while ...!() {}': ()
|
||||
168..170 '{}': ()
|
||||
175..188 'break spam!()': !
|
||||
|
|
|
@ -412,17 +412,23 @@ fn diverging_expression_3_break() {
|
|||
355..654 '{ ...; }; }': ()
|
||||
398..399 'x': u32
|
||||
407..433 '{ whil...; }; }': u32
|
||||
409..430 'while ...eak; }': !
|
||||
409..430 'while ...eak; }': ()
|
||||
409..430 'while ...eak; }': ()
|
||||
415..419 'true': bool
|
||||
420..430 '{ break; }': ()
|
||||
422..427 'break': !
|
||||
537..538 'x': u32
|
||||
546..564 '{ whil... {}; }': u32
|
||||
548..561 'while true {}': !
|
||||
548..561 'while true {}': ()
|
||||
548..561 'while true {}': ()
|
||||
554..558 'true': bool
|
||||
559..561 '{}': ()
|
||||
615..616 'x': u32
|
||||
624..651 '{ whil...; }; }': u32
|
||||
626..648 'while ...urn; }': !
|
||||
626..648 'while ...urn; }': ()
|
||||
626..648 'while ...urn; }': ()
|
||||
632..636 'true': bool
|
||||
637..648 '{ return; }': ()
|
||||
|
|
|
@ -1267,6 +1267,8 @@ fn test() {
|
|||
"#,
|
||||
expect![[r#"
|
||||
10..59 '{ ... } }': ()
|
||||
16..57 'while ... }': !
|
||||
16..57 'while ... }': ()
|
||||
16..57 'while ... }': ()
|
||||
22..30 '{ true }': bool
|
||||
24..28 'true': bool
|
||||
|
@ -1978,3 +1980,23 @@ fn x(a: [i32; 4]) {
|
|||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dont_unify_on_casts() {
|
||||
// #15246
|
||||
check_types(
|
||||
r#"
|
||||
fn unify(_: [bool; 1]) {}
|
||||
fn casted(_: *const bool) {}
|
||||
fn default<T>() -> T { loop {} }
|
||||
|
||||
fn test() {
|
||||
let foo = default();
|
||||
//^^^ [bool; 1]
|
||||
|
||||
casted(&foo as *const _);
|
||||
unify(foo);
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
|
|
@ -3513,7 +3513,6 @@ fn func() {
|
|||
);
|
||||
}
|
||||
|
||||
// FIXME
|
||||
#[test]
|
||||
fn castable_to() {
|
||||
check_infer(
|
||||
|
@ -3538,10 +3537,10 @@ fn func() {
|
|||
120..122 '{}': ()
|
||||
138..184 '{ ...0]>; }': ()
|
||||
148..149 'x': Box<[i32; 0]>
|
||||
152..160 'Box::new': fn new<[{unknown}; 0]>([{unknown}; 0]) -> Box<[{unknown}; 0]>
|
||||
152..164 'Box::new([])': Box<[{unknown}; 0]>
|
||||
152..160 'Box::new': fn new<[i32; 0]>([i32; 0]) -> Box<[i32; 0]>
|
||||
152..164 'Box::new([])': Box<[i32; 0]>
|
||||
152..181 'Box::n...2; 0]>': Box<[i32; 0]>
|
||||
161..163 '[]': [{unknown}; 0]
|
||||
161..163 '[]': [i32; 0]
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
@ -3577,6 +3576,21 @@ fn f<T>(t: Ark<T>) {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ref_to_array_to_ptr_cast() {
|
||||
check_types(
|
||||
r#"
|
||||
fn default<T>() -> T { loop {} }
|
||||
fn foo() {
|
||||
let arr = [default()];
|
||||
//^^^ [i32; 1]
|
||||
let ref_to_arr = &arr;
|
||||
let casted = ref_to_arr as *const i32;
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn const_dependent_on_local() {
|
||||
check_types(
|
||||
|
|
|
@ -12,9 +12,9 @@ use hir_ty::db::HirDatabase;
|
|||
use syntax::{ast, AstNode};
|
||||
|
||||
use crate::{
|
||||
Adt, AssocItem, Const, ConstParam, Enum, Field, Function, GenericParam, Impl, LifetimeParam,
|
||||
Macro, Module, ModuleDef, Static, Struct, Trait, TraitAlias, TypeAlias, TypeParam, Union,
|
||||
Variant,
|
||||
Adt, AssocItem, Const, ConstParam, Enum, ExternCrateDecl, Field, Function, GenericParam, Impl,
|
||||
LifetimeParam, Macro, Module, ModuleDef, Static, Struct, Trait, TraitAlias, TypeAlias,
|
||||
TypeParam, Union, Variant,
|
||||
};
|
||||
|
||||
pub trait HasAttrs {
|
||||
|
@ -120,6 +120,39 @@ impl HasAttrs for AssocItem {
|
|||
}
|
||||
}
|
||||
|
||||
impl HasAttrs for ExternCrateDecl {
|
||||
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
|
||||
let def = AttrDefId::ExternCrateId(self.into());
|
||||
db.attrs_with_owner(def)
|
||||
}
|
||||
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
|
||||
let crate_docs = self.resolved_crate(db)?.root_module().attrs(db).docs().map(String::from);
|
||||
let def = AttrDefId::ExternCrateId(self.into());
|
||||
let decl_docs = db.attrs(def).docs().map(String::from);
|
||||
match (decl_docs, crate_docs) {
|
||||
(None, None) => None,
|
||||
(Some(decl_docs), None) => Some(decl_docs),
|
||||
(None, Some(crate_docs)) => Some(crate_docs),
|
||||
(Some(mut decl_docs), Some(crate_docs)) => {
|
||||
decl_docs.push('\n');
|
||||
decl_docs.push('\n');
|
||||
decl_docs += &crate_docs;
|
||||
Some(decl_docs)
|
||||
}
|
||||
}
|
||||
.map(Documentation::new)
|
||||
}
|
||||
fn resolve_doc_path(
|
||||
self,
|
||||
db: &dyn HirDatabase,
|
||||
link: &str,
|
||||
ns: Option<Namespace>,
|
||||
) -> Option<ModuleDef> {
|
||||
let def = AttrDefId::ExternCrateId(self.into());
|
||||
resolve_doc_path(db, def, link, ns).map(ModuleDef::from)
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves the item `link` points to in the scope of `def`.
|
||||
fn resolve_doc_path(
|
||||
db: &dyn HirDatabase,
|
||||
|
@ -140,6 +173,7 @@ fn resolve_doc_path(
|
|||
AttrDefId::TypeAliasId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::ImplId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::ExternBlockId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::UseId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::MacroId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::ExternCrateId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::GenericParamId(it) => match it {
|
||||
|
|
|
@ -10,8 +10,3 @@ pub use hir_expand::db::{
|
|||
MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
|
||||
};
|
||||
pub use hir_ty::db::*;
|
||||
|
||||
#[test]
|
||||
fn hir_database_is_object_safe() {
|
||||
fn _assert_object_safe(_: &dyn HirDatabase) {}
|
||||
}
|
||||
|
|
|
@ -18,9 +18,9 @@ use hir_ty::{
|
|||
};
|
||||
|
||||
use crate::{
|
||||
Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, Field, Function, GenericParam,
|
||||
HasCrate, HasVisibility, LifetimeParam, Macro, Module, Static, Struct, Trait, TraitAlias,
|
||||
TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant,
|
||||
Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, ExternCrateDecl, Field,
|
||||
Function, GenericParam, HasCrate, HasVisibility, LifetimeParam, Macro, Module, Static, Struct,
|
||||
Trait, TraitAlias, TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant,
|
||||
};
|
||||
|
||||
impl HirDisplay for Function {
|
||||
|
@ -238,6 +238,18 @@ impl HirDisplay for Type {
|
|||
}
|
||||
}
|
||||
|
||||
impl HirDisplay for ExternCrateDecl {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
||||
f.write_str("extern crate ")?;
|
||||
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
|
||||
if let Some(alias) = self.alias(f.db) {
|
||||
write!(f, " as {alias}",)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl HirDisplay for GenericParam {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
match self {
|
||||
|
|
|
@ -15,7 +15,7 @@ use crate::{
|
|||
};
|
||||
|
||||
macro_rules! from_id {
|
||||
($(($id:path, $ty:path)),*) => {$(
|
||||
($(($id:path, $ty:path)),* $(,)?) => {$(
|
||||
impl From<$id> for $ty {
|
||||
fn from(id: $id) -> $ty {
|
||||
$ty { id }
|
||||
|
@ -47,7 +47,8 @@ from_id![
|
|||
(hir_def::TypeParamId, crate::TypeParam),
|
||||
(hir_def::ConstParamId, crate::ConstParam),
|
||||
(hir_def::LifetimeParamId, crate::LifetimeParam),
|
||||
(hir_def::MacroId, crate::Macro)
|
||||
(hir_def::MacroId, crate::Macro),
|
||||
(hir_def::ExternCrateId, crate::ExternCrateDecl),
|
||||
];
|
||||
|
||||
impl From<AdtId> for Adt {
|
||||
|
|
|
@ -11,9 +11,9 @@ use hir_expand::{HirFileId, InFile};
|
|||
use syntax::ast;
|
||||
|
||||
use crate::{
|
||||
db::HirDatabase, Adt, Const, Enum, Field, FieldSource, Function, Impl, LifetimeParam,
|
||||
LocalSource, Macro, Module, Static, Struct, Trait, TraitAlias, TypeAlias, TypeOrConstParam,
|
||||
Union, Variant,
|
||||
db::HirDatabase, Adt, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl,
|
||||
LifetimeParam, LocalSource, Macro, Module, Static, Struct, Trait, TraitAlias, TypeAlias,
|
||||
TypeOrConstParam, Union, Variant,
|
||||
};
|
||||
|
||||
pub trait HasSource {
|
||||
|
@ -207,3 +207,11 @@ impl HasSource for LocalSource {
|
|||
Some(self.source)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSource for ExternCrateDecl {
|
||||
type Ast = ast::ExternCrate;
|
||||
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,14 +48,15 @@ use hir_def::{
|
|||
layout::{self, ReprOptions, TargetDataLayout},
|
||||
macro_id_to_def_id,
|
||||
nameres::{self, diagnostics::DefDiagnostic},
|
||||
path::ImportAlias,
|
||||
per_ns::PerNs,
|
||||
resolver::{HasResolver, Resolver},
|
||||
src::HasSource as _,
|
||||
AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
|
||||
EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, InTypeConstId, ItemContainerId,
|
||||
LifetimeParamId, LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId,
|
||||
StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId,
|
||||
UnionId,
|
||||
AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId,
|
||||
EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, HasModule, ImplId,
|
||||
InTypeConstId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId, Lookup,
|
||||
MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
|
||||
TypeOrConstParamId, TypeParamId, UnionId,
|
||||
};
|
||||
use hir_expand::{name::name, MacroCallKind};
|
||||
use hir_ty::{
|
||||
|
@ -200,9 +201,8 @@ impl Crate {
|
|||
db.crate_graph().transitive_rev_deps(self.id).map(|id| Crate { id })
|
||||
}
|
||||
|
||||
pub fn root_module(self, db: &dyn HirDatabase) -> Module {
|
||||
let def_map = db.crate_def_map(self.id);
|
||||
Module { id: def_map.crate_root().into() }
|
||||
pub fn root_module(self) -> Module {
|
||||
Module { id: CrateRootModuleId::from(self.id).into() }
|
||||
}
|
||||
|
||||
pub fn modules(self, db: &dyn HirDatabase) -> Vec<Module> {
|
||||
|
@ -247,7 +247,7 @@ impl Crate {
|
|||
/// Try to get the root URL of the documentation of a crate.
|
||||
pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option<String> {
|
||||
// Look for #![doc(html_root_url = "...")]
|
||||
let attrs = db.attrs(AttrDefId::ModuleId(self.root_module(db).into()));
|
||||
let attrs = db.attrs(AttrDefId::ModuleId(self.root_module().into()));
|
||||
let doc_url = attrs.by_key("doc").find_string_value_in_tt("html_root_url");
|
||||
doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
|
||||
}
|
||||
|
@ -2128,6 +2128,47 @@ impl HasVisibility for Function {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct ExternCrateDecl {
|
||||
pub(crate) id: ExternCrateId,
|
||||
}
|
||||
|
||||
impl ExternCrateDecl {
|
||||
pub fn module(self, db: &dyn HirDatabase) -> Module {
|
||||
self.id.module(db.upcast()).into()
|
||||
}
|
||||
|
||||
pub fn resolved_crate(self, db: &dyn HirDatabase) -> Option<Crate> {
|
||||
db.extern_crate_decl_data(self.id).crate_id.map(Into::into)
|
||||
}
|
||||
|
||||
pub fn name(self, db: &dyn HirDatabase) -> Name {
|
||||
db.extern_crate_decl_data(self.id).name.clone()
|
||||
}
|
||||
|
||||
pub fn alias(self, db: &dyn HirDatabase) -> Option<ImportAlias> {
|
||||
db.extern_crate_decl_data(self.id).alias.clone()
|
||||
}
|
||||
|
||||
/// Returns the name under which this crate is made accessible, taking `_` into account.
|
||||
pub fn alias_or_name(self, db: &dyn HirDatabase) -> Option<Name> {
|
||||
let extern_crate_decl_data = db.extern_crate_decl_data(self.id);
|
||||
match &extern_crate_decl_data.alias {
|
||||
Some(ImportAlias::Underscore) => None,
|
||||
Some(ImportAlias::Alias(alias)) => Some(alias.clone()),
|
||||
None => Some(extern_crate_decl_data.name.clone()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasVisibility for ExternCrateDecl {
|
||||
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
|
||||
db.extern_crate_decl_data(self.id)
|
||||
.visibility
|
||||
.resolve(db.upcast(), &self.id.resolver(db.upcast()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct InTypeConst {
|
||||
pub(crate) id: InTypeConstId,
|
||||
|
@ -4715,6 +4756,12 @@ pub trait HasContainer {
|
|||
fn container(&self, db: &dyn HirDatabase) -> ItemContainer;
|
||||
}
|
||||
|
||||
impl HasContainer for ExternCrateDecl {
|
||||
fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
|
||||
container_id_to_hir(self.id.lookup(db.upcast()).container.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl HasContainer for Module {
|
||||
fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
|
||||
// FIXME: handle block expressions as modules (their parent is in a different DefMap)
|
||||
|
|
|
@ -15,11 +15,7 @@ use hir_def::{
|
|||
type_ref::Mutability,
|
||||
AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
|
||||
};
|
||||
use hir_expand::{
|
||||
db::ExpandDatabase,
|
||||
name::{known, AsName},
|
||||
ExpansionInfo, MacroCallId,
|
||||
};
|
||||
use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, MacroCallId};
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
@ -439,10 +435,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
self.imp.resolve_path(path)
|
||||
}
|
||||
|
||||
pub fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
|
||||
self.imp.resolve_extern_crate(extern_crate)
|
||||
}
|
||||
|
||||
pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
|
||||
self.imp.resolve_variant(record_lit).map(VariantDef::from)
|
||||
}
|
||||
|
@ -1242,18 +1234,6 @@ impl<'db> SemanticsImpl<'db> {
|
|||
self.analyze(path.syntax())?.resolve_path(self.db, path)
|
||||
}
|
||||
|
||||
fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
|
||||
let krate = self.scope(extern_crate.syntax())?.krate();
|
||||
let name = extern_crate.name_ref()?.as_name();
|
||||
if name == known::SELF_PARAM {
|
||||
return Some(krate);
|
||||
}
|
||||
krate
|
||||
.dependencies(self.db)
|
||||
.into_iter()
|
||||
.find_map(|dep| (dep.name == name).then_some(dep.krate))
|
||||
}
|
||||
|
||||
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
|
||||
self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
|
||||
}
|
||||
|
@ -1603,6 +1583,7 @@ to_def_impls![
|
|||
(crate::Local, ast::SelfParam, self_param_to_def),
|
||||
(crate::Label, ast::Label, label_to_def),
|
||||
(crate::Adt, ast::Adt, adt_to_def),
|
||||
(crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
|
||||
];
|
||||
|
||||
fn find_root(node: &SyntaxNode) -> SyntaxNode {
|
||||
|
|
|
@ -93,9 +93,9 @@ use hir_def::{
|
|||
DynMap,
|
||||
},
|
||||
hir::{BindingId, LabelId},
|
||||
AdtId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FieldId, FunctionId,
|
||||
GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, StructId,
|
||||
TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, VariantId,
|
||||
AdtId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FieldId,
|
||||
FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId,
|
||||
StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
|
||||
};
|
||||
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
@ -203,6 +203,16 @@ impl SourceToDefCtx<'_, '_> {
|
|||
) -> Option<EnumVariantId> {
|
||||
self.to_def(src, keys::VARIANT)
|
||||
}
|
||||
pub(super) fn extern_crate_to_def(
|
||||
&mut self,
|
||||
src: InFile<ast::ExternCrate>,
|
||||
) -> Option<ExternCrateId> {
|
||||
self.to_def(src, keys::EXTERN_CRATE)
|
||||
}
|
||||
#[allow(dead_code)]
|
||||
pub(super) fn use_to_def(&mut self, src: InFile<ast::Use>) -> Option<UseId> {
|
||||
self.to_def(src, keys::USE)
|
||||
}
|
||||
pub(super) fn adt_to_def(
|
||||
&mut self,
|
||||
InFile { file_id, value }: InFile<ast::Adt>,
|
||||
|
|
|
@ -3,7 +3,10 @@ use syntax::ast::{self, make, AstNode};
|
|||
|
||||
use crate::{
|
||||
assist_context::{AssistContext, Assists},
|
||||
utils::{add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, DefaultMethods},
|
||||
utils::{
|
||||
add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, DefaultMethods,
|
||||
IgnoreAssocItems,
|
||||
},
|
||||
AssistId, AssistKind,
|
||||
};
|
||||
|
||||
|
@ -43,6 +46,7 @@ pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext<'_
|
|||
acc,
|
||||
ctx,
|
||||
DefaultMethods::No,
|
||||
IgnoreAssocItems::DocHiddenAttrPresent,
|
||||
"add_impl_missing_members",
|
||||
"Implement missing members",
|
||||
)
|
||||
|
@ -87,6 +91,7 @@ pub(crate) fn add_missing_default_members(
|
|||
acc,
|
||||
ctx,
|
||||
DefaultMethods::Only,
|
||||
IgnoreAssocItems::DocHiddenAttrPresent,
|
||||
"add_impl_default_members",
|
||||
"Implement default members",
|
||||
)
|
||||
|
@ -96,6 +101,7 @@ fn add_missing_impl_members_inner(
|
|||
acc: &mut Assists,
|
||||
ctx: &AssistContext<'_>,
|
||||
mode: DefaultMethods,
|
||||
ignore_items: IgnoreAssocItems,
|
||||
assist_id: &'static str,
|
||||
label: &'static str,
|
||||
) -> Option<()> {
|
||||
|
@ -115,10 +121,21 @@ fn add_missing_impl_members_inner(
|
|||
let trait_ref = impl_.trait_ref(ctx.db())?;
|
||||
let trait_ = trait_ref.trait_();
|
||||
|
||||
let mut ign_item = ignore_items;
|
||||
|
||||
if let IgnoreAssocItems::DocHiddenAttrPresent = ignore_items {
|
||||
// Relax condition for local crates.
|
||||
let db = ctx.db();
|
||||
if trait_.module(db).krate().origin(db).is_local() {
|
||||
ign_item = IgnoreAssocItems::No;
|
||||
}
|
||||
}
|
||||
|
||||
let missing_items = filter_assoc_items(
|
||||
&ctx.sema,
|
||||
&ide_db::traits::get_missing_assoc_items(&ctx.sema, &impl_def),
|
||||
mode,
|
||||
ign_item,
|
||||
);
|
||||
|
||||
if missing_items.is_empty() {
|
||||
|
@ -1966,4 +1983,169 @@ impl AnotherTrait<i32> for () {
|
|||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doc_hidden_default_impls_ignored() {
|
||||
// doc(hidden) attr is ignored trait and impl both belong to the local crate.
|
||||
check_assist(
|
||||
add_missing_default_members,
|
||||
r#"
|
||||
struct Foo;
|
||||
trait Trait {
|
||||
#[doc(hidden)]
|
||||
fn func_with_default_impl() -> u32 {
|
||||
42
|
||||
}
|
||||
fn another_default_impl() -> u32 {
|
||||
43
|
||||
}
|
||||
}
|
||||
impl Tra$0it for Foo {}"#,
|
||||
r#"
|
||||
struct Foo;
|
||||
trait Trait {
|
||||
#[doc(hidden)]
|
||||
fn func_with_default_impl() -> u32 {
|
||||
42
|
||||
}
|
||||
fn another_default_impl() -> u32 {
|
||||
43
|
||||
}
|
||||
}
|
||||
impl Trait for Foo {
|
||||
$0fn func_with_default_impl() -> u32 {
|
||||
42
|
||||
}
|
||||
|
||||
fn another_default_impl() -> u32 {
|
||||
43
|
||||
}
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doc_hidden_default_impls_lang_crates() {
|
||||
// Not applicable because Eq has a single method and this has a #[doc(hidden)] attr set.
|
||||
check_assist_not_applicable(
|
||||
add_missing_default_members,
|
||||
r#"
|
||||
//- minicore: eq
|
||||
use core::cmp::Eq;
|
||||
struct Foo;
|
||||
impl E$0q for Foo { /* $0 */ }
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doc_hidden_default_impls_lib_crates() {
|
||||
check_assist(
|
||||
add_missing_default_members,
|
||||
r#"
|
||||
//- /main.rs crate:a deps:b
|
||||
struct B;
|
||||
impl b::Exte$0rnTrait for B {}
|
||||
//- /lib.rs crate:b new_source_root:library
|
||||
pub trait ExternTrait {
|
||||
#[doc(hidden)]
|
||||
fn hidden_default() -> Option<()> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn unhidden_default() -> Option<()> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn unhidden_nondefault() -> Option<()>;
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct B;
|
||||
impl b::ExternTrait for B {
|
||||
$0fn unhidden_default() -> Option<()> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doc_hidden_default_impls_local_crates() {
|
||||
check_assist(
|
||||
add_missing_default_members,
|
||||
r#"
|
||||
trait LocalTrait {
|
||||
#[doc(hidden)]
|
||||
fn no_skip_default() -> Option<()> {
|
||||
todo!()
|
||||
}
|
||||
fn no_skip_default_2() -> Option<()> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
struct B;
|
||||
impl Loc$0alTrait for B {}
|
||||
"#,
|
||||
r#"
|
||||
trait LocalTrait {
|
||||
#[doc(hidden)]
|
||||
fn no_skip_default() -> Option<()> {
|
||||
todo!()
|
||||
}
|
||||
fn no_skip_default_2() -> Option<()> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
struct B;
|
||||
impl LocalTrait for B {
|
||||
$0fn no_skip_default() -> Option<()> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn no_skip_default_2() -> Option<()> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doc_hidden_default_impls_workspace_crates() {
|
||||
check_assist(
|
||||
add_missing_default_members,
|
||||
r#"
|
||||
//- /lib.rs crate:b new_source_root:local
|
||||
trait LocalTrait {
|
||||
#[doc(hidden)]
|
||||
fn no_skip_default() -> Option<()> {
|
||||
todo!()
|
||||
}
|
||||
fn no_skip_default_2() -> Option<()> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
//- /main.rs crate:a deps:b
|
||||
struct B;
|
||||
impl b::Loc$0alTrait for B {}
|
||||
"#,
|
||||
r#"
|
||||
struct B;
|
||||
impl b::LocalTrait for B {
|
||||
$0fn no_skip_default() -> Option<()> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn no_skip_default_2() -> Option<()> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,9 +37,9 @@ use crate::{utils, AssistContext, AssistId, AssistKind, Assists};
|
|||
pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
let match_expr = ctx.find_node_at_offset_with_descend::<ast::MatchExpr>()?;
|
||||
let match_arm_list = match_expr.match_arm_list()?;
|
||||
let target_range = ctx.sema.original_range(match_expr.syntax()).range;
|
||||
let arm_list_range = ctx.sema.original_range_opt(match_arm_list.syntax())?;
|
||||
|
||||
if let None = cursor_at_trivial_match_arm_list(ctx, &match_expr, &match_arm_list) {
|
||||
if cursor_at_trivial_match_arm_list(ctx, &match_expr, &match_arm_list).is_none() {
|
||||
let arm_list_range = ctx.sema.original_range(match_arm_list.syntax()).range;
|
||||
let cursor_in_range = arm_list_range.contains_range(ctx.selection_trimmed());
|
||||
if cursor_in_range {
|
||||
|
@ -198,7 +198,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
|||
acc.add(
|
||||
AssistId("add_missing_match_arms", AssistKind::QuickFix),
|
||||
"Fill match arms",
|
||||
target_range,
|
||||
ctx.sema.original_range(match_expr.syntax()).range,
|
||||
|edit| {
|
||||
let new_match_arm_list = match_arm_list.clone_for_update();
|
||||
|
||||
|
@ -262,9 +262,8 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
|||
// Just replace the element that the original range came from
|
||||
let old_place = {
|
||||
// Find the original element
|
||||
let old_file_range = ctx.sema.original_range(match_arm_list.syntax());
|
||||
let file = ctx.sema.parse(old_file_range.file_id);
|
||||
let old_place = file.syntax().covering_element(old_file_range.range);
|
||||
let file = ctx.sema.parse(arm_list_range.file_id);
|
||||
let old_place = file.syntax().covering_element(arm_list_range.range);
|
||||
|
||||
// Make `old_place` mut
|
||||
match old_place {
|
||||
|
@ -1922,4 +1921,24 @@ fn foo(t: E) {
|
|||
}"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn not_applicable_when_match_arm_list_cannot_be_upmapped() {
|
||||
check_assist_not_applicable(
|
||||
add_missing_match_arms,
|
||||
r#"
|
||||
macro_rules! foo {
|
||||
($($t:tt)*) => {
|
||||
$($t)* {}
|
||||
}
|
||||
}
|
||||
|
||||
enum E { A }
|
||||
|
||||
fn main() {
|
||||
foo!(match E::A$0);
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,7 +42,9 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
|
|||
let name_ref = ast::NameRef::cast(ident.parent()?)?;
|
||||
let def = match NameRefClass::classify(&ctx.sema, &name_ref)? {
|
||||
NameRefClass::Definition(def) => def,
|
||||
NameRefClass::FieldShorthand { .. } => return None,
|
||||
NameRefClass::FieldShorthand { .. } | NameRefClass::ExternCrateShorthand { .. } => {
|
||||
return None
|
||||
}
|
||||
};
|
||||
let fun = match def {
|
||||
Definition::Function(it) => it,
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
use hir::Semantics;
|
||||
use ide_db::RootDatabase;
|
||||
use stdx::format_to;
|
||||
use syntax::ast::{self, AstNode};
|
||||
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
@ -24,6 +27,7 @@ pub(crate) fn convert_two_arm_bool_match_to_matches_macro(
|
|||
acc: &mut Assists,
|
||||
ctx: &AssistContext<'_>,
|
||||
) -> Option<()> {
|
||||
use ArmBodyExpression::*;
|
||||
let match_expr = ctx.find_node_at_offset::<ast::MatchExpr>()?;
|
||||
let match_arm_list = match_expr.match_arm_list()?;
|
||||
let mut arms = match_arm_list.arms();
|
||||
|
@ -33,21 +37,20 @@ pub(crate) fn convert_two_arm_bool_match_to_matches_macro(
|
|||
cov_mark::hit!(non_two_arm_match);
|
||||
return None;
|
||||
}
|
||||
let first_arm_expr = first_arm.expr();
|
||||
let second_arm_expr = second_arm.expr();
|
||||
let first_arm_expr = first_arm.expr()?;
|
||||
let second_arm_expr = second_arm.expr()?;
|
||||
let first_arm_body = is_bool_literal_expr(&ctx.sema, &first_arm_expr)?;
|
||||
let second_arm_body = is_bool_literal_expr(&ctx.sema, &second_arm_expr)?;
|
||||
|
||||
let invert_matches = if is_bool_literal_expr(&first_arm_expr, true)
|
||||
&& is_bool_literal_expr(&second_arm_expr, false)
|
||||
{
|
||||
false
|
||||
} else if is_bool_literal_expr(&first_arm_expr, false)
|
||||
&& is_bool_literal_expr(&second_arm_expr, true)
|
||||
{
|
||||
true
|
||||
} else {
|
||||
if !matches!(
|
||||
(&first_arm_body, &second_arm_body),
|
||||
(Literal(true), Literal(false))
|
||||
| (Literal(false), Literal(true))
|
||||
| (Expression(_), Literal(false))
|
||||
) {
|
||||
cov_mark::hit!(non_invert_bool_literal_arms);
|
||||
return None;
|
||||
};
|
||||
}
|
||||
|
||||
let target_range = ctx.sema.original_range(match_expr.syntax()).range;
|
||||
let expr = match_expr.expr()?;
|
||||
|
@ -59,28 +62,55 @@ pub(crate) fn convert_two_arm_bool_match_to_matches_macro(
|
|||
|builder| {
|
||||
let mut arm_str = String::new();
|
||||
if let Some(pat) = &first_arm.pat() {
|
||||
arm_str += &pat.to_string();
|
||||
format_to!(arm_str, "{pat}");
|
||||
}
|
||||
if let Some(guard) = &first_arm.guard() {
|
||||
arm_str += &format!(" {guard}");
|
||||
}
|
||||
if invert_matches {
|
||||
builder.replace(target_range, format!("!matches!({expr}, {arm_str})"));
|
||||
} else {
|
||||
builder.replace(target_range, format!("matches!({expr}, {arm_str})"));
|
||||
}
|
||||
|
||||
let replace_with = match (first_arm_body, second_arm_body) {
|
||||
(Literal(true), Literal(false)) => {
|
||||
format!("matches!({expr}, {arm_str})")
|
||||
}
|
||||
(Literal(false), Literal(true)) => {
|
||||
format!("!matches!({expr}, {arm_str})")
|
||||
}
|
||||
(Expression(body_expr), Literal(false)) => {
|
||||
arm_str.push_str(match &first_arm.guard() {
|
||||
Some(_) => " && ",
|
||||
_ => " if ",
|
||||
});
|
||||
format!("matches!({expr}, {arm_str}{body_expr})")
|
||||
}
|
||||
_ => {
|
||||
unreachable!()
|
||||
}
|
||||
};
|
||||
builder.replace(target_range, replace_with);
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn is_bool_literal_expr(expr: &Option<ast::Expr>, expect_bool: bool) -> bool {
|
||||
if let Some(ast::Expr::Literal(lit)) = expr {
|
||||
enum ArmBodyExpression {
|
||||
Literal(bool),
|
||||
Expression(ast::Expr),
|
||||
}
|
||||
|
||||
fn is_bool_literal_expr(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
expr: &ast::Expr,
|
||||
) -> Option<ArmBodyExpression> {
|
||||
if let ast::Expr::Literal(lit) = expr {
|
||||
if let ast::LiteralKind::Bool(b) = lit.kind() {
|
||||
return b == expect_bool;
|
||||
return Some(ArmBodyExpression::Literal(b));
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
if !sema.type_of_expr(expr)?.original.is_bool() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(ArmBodyExpression::Expression(expr.clone()))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -121,21 +151,6 @@ fn foo(a: Option<u32>) -> bool {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn not_applicable_non_bool_literal_arms() {
|
||||
cov_mark::check!(non_invert_bool_literal_arms);
|
||||
check_assist_not_applicable(
|
||||
convert_two_arm_bool_match_to_matches_macro,
|
||||
r#"
|
||||
fn foo(a: Option<u32>) -> bool {
|
||||
match a$0 {
|
||||
Some(val) => val == 3,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
#[test]
|
||||
fn not_applicable_both_false_arms() {
|
||||
cov_mark::check!(non_invert_bool_literal_arms);
|
||||
|
@ -291,4 +306,40 @@ fn main() {
|
|||
}",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn convert_non_literal_bool() {
|
||||
check_assist(
|
||||
convert_two_arm_bool_match_to_matches_macro,
|
||||
r#"
|
||||
fn main() {
|
||||
match 0$0 {
|
||||
a @ 0..15 => a == 0,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
fn main() {
|
||||
matches!(0, a @ 0..15 if a == 0)
|
||||
}
|
||||
"#,
|
||||
);
|
||||
check_assist(
|
||||
convert_two_arm_bool_match_to_matches_macro,
|
||||
r#"
|
||||
fn main() {
|
||||
match 0$0 {
|
||||
a @ 0..15 if thing() => a == 0,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
fn main() {
|
||||
matches!(0, a @ 0..15 if thing() && a == 0)
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -114,7 +114,7 @@ fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleDat
|
|||
let usages = ctx.sema.to_def(&ident_pat).map(|def| {
|
||||
Definition::Local(def)
|
||||
.usages(&ctx.sema)
|
||||
.in_scope(SearchScope::single_file(ctx.file_id()))
|
||||
.in_scope(&SearchScope::single_file(ctx.file_id()))
|
||||
.all()
|
||||
});
|
||||
|
||||
|
|
|
@ -120,7 +120,7 @@ fn find_parent_and_path(
|
|||
|
||||
fn def_is_referenced_in(def: Definition, ctx: &AssistContext<'_>) -> bool {
|
||||
let search_scope = SearchScope::single_file(ctx.file_id());
|
||||
def.usages(&ctx.sema).in_scope(search_scope).at_least_one()
|
||||
def.usages(&ctx.sema).in_scope(&search_scope).at_least_one()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
|
|
@ -384,7 +384,7 @@ impl LocalUsages {
|
|||
Self(
|
||||
Definition::Local(var)
|
||||
.usages(&ctx.sema)
|
||||
.in_scope(SearchScope::single_file(ctx.file_id()))
|
||||
.in_scope(&SearchScope::single_file(ctx.file_id()))
|
||||
.all(),
|
||||
)
|
||||
}
|
||||
|
|
|
@ -478,7 +478,7 @@ impl Module {
|
|||
let selection_range = ctx.selection_trimmed();
|
||||
let curr_file_id = ctx.file_id();
|
||||
let search_scope = SearchScope::single_file(curr_file_id);
|
||||
let usage_res = def.usages(&ctx.sema).in_scope(search_scope).all();
|
||||
let usage_res = def.usages(&ctx.sema).in_scope(&search_scope).all();
|
||||
let file = ctx.sema.parse(curr_file_id);
|
||||
|
||||
let mut exists_inside_sel = false;
|
||||
|
|
|
@ -15,6 +15,7 @@ use crate::{
|
|||
// Generates default implementation from new method.
|
||||
//
|
||||
// ```
|
||||
// # //- minicore: default
|
||||
// struct Example { _inner: () }
|
||||
//
|
||||
// impl Example {
|
||||
|
@ -54,6 +55,7 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'
|
|||
}
|
||||
|
||||
let impl_ = fn_node.syntax().ancestors().find_map(ast::Impl::cast)?;
|
||||
let self_ty = impl_.self_ty()?;
|
||||
if is_default_implemented(ctx, &impl_) {
|
||||
cov_mark::hit!(default_block_is_already_present);
|
||||
cov_mark::hit!(struct_in_module_with_default);
|
||||
|
@ -70,15 +72,19 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'
|
|||
let default_code = " fn default() -> Self {
|
||||
Self::new()
|
||||
}";
|
||||
let code = generate_trait_impl_text_from_impl(&impl_, "Default", default_code);
|
||||
let code = generate_trait_impl_text_from_impl(&impl_, self_ty, "Default", default_code);
|
||||
builder.insert(insert_location.end(), code);
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// FIXME: based on from utils::generate_impl_text_inner
|
||||
fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code: &str) -> String {
|
||||
let impl_ty = impl_.self_ty().unwrap();
|
||||
fn generate_trait_impl_text_from_impl(
|
||||
impl_: &ast::Impl,
|
||||
self_ty: ast::Type,
|
||||
trait_text: &str,
|
||||
code: &str,
|
||||
) -> String {
|
||||
let generic_params = impl_.generic_param_list().map(|generic_params| {
|
||||
let lifetime_params =
|
||||
generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
|
||||
|
@ -109,7 +115,7 @@ fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code:
|
|||
if let Some(generic_params) = &generic_params {
|
||||
format_to!(buf, "{generic_params}")
|
||||
}
|
||||
format_to!(buf, " {trait_text} for {impl_ty}");
|
||||
format_to!(buf, " {trait_text} for {self_ty}");
|
||||
|
||||
match impl_.where_clause() {
|
||||
Some(where_clause) => {
|
||||
|
@ -136,7 +142,9 @@ fn is_default_implemented(ctx: &AssistContext<'_>, impl_: &Impl) -> bool {
|
|||
let default = FamousDefs(&ctx.sema, krate).core_default_Default();
|
||||
let default_trait = match default {
|
||||
Some(value) => value,
|
||||
None => return false,
|
||||
// Return `true` to avoid providing the assist because it makes no sense
|
||||
// to impl `Default` when it's missing.
|
||||
None => return true,
|
||||
};
|
||||
|
||||
ty.impls_trait(db, default_trait, &[])
|
||||
|
@ -480,6 +488,7 @@ impl Example {
|
|||
check_assist_not_applicable(
|
||||
generate_default_from_new,
|
||||
r#"
|
||||
//- minicore: default
|
||||
struct Example { _inner: () }
|
||||
|
||||
impl Example {
|
||||
|
@ -655,4 +664,23 @@ mod test {
|
|||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn not_applicable_when_default_lang_item_is_missing() {
|
||||
check_assist_not_applicable(
|
||||
generate_default_from_new,
|
||||
r#"
|
||||
struct S;
|
||||
impl S {
|
||||
fn new$0() -> Self {}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn not_applicable_for_missing_self_ty() {
|
||||
// Regression test for #15398.
|
||||
check_assist_not_applicable(generate_default_from_new, "impl { fn new$0() -> Self {} }");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -213,7 +213,9 @@ impl Struct {
|
|||
// continue;
|
||||
// }
|
||||
let signature = delegee.signature(db);
|
||||
let delegate = generate_impl(ctx, self, &field.ty, &field.name, delegee);
|
||||
let Some(delegate) = generate_impl(ctx, self, &field.ty, &field.name, delegee) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
acc.add_group(
|
||||
&GroupLabel("Delegate trait impl for field...".to_owned()),
|
||||
|
@ -237,7 +239,7 @@ fn generate_impl(
|
|||
field_ty: &ast::Type,
|
||||
field_name: &String,
|
||||
delegee: &Delegee,
|
||||
) -> ast::Impl {
|
||||
) -> Option<ast::Impl> {
|
||||
let delegate: ast::Impl;
|
||||
let source: ast::Impl;
|
||||
let genpar: Option<ast::GenericParamList>;
|
||||
|
@ -247,7 +249,7 @@ fn generate_impl(
|
|||
|
||||
match delegee {
|
||||
Delegee::Bound(delegee) => {
|
||||
let in_file = ctx.sema.source(delegee.0.to_owned()).unwrap();
|
||||
let in_file = ctx.sema.source(delegee.0.to_owned())?;
|
||||
let source: ast::Trait = in_file.value;
|
||||
|
||||
delegate = make::impl_trait(
|
||||
|
@ -293,15 +295,15 @@ fn generate_impl(
|
|||
None => {}
|
||||
};
|
||||
|
||||
let target = ctx.sema.scope(strukt.strukt.syntax()).unwrap();
|
||||
let source = ctx.sema.scope(source.syntax()).unwrap();
|
||||
let target = ctx.sema.scope(strukt.strukt.syntax())?;
|
||||
let source = ctx.sema.scope(source.syntax())?;
|
||||
|
||||
let transform =
|
||||
PathTransform::trait_impl(&target, &source, delegee.0, delegate.clone());
|
||||
transform.apply(&delegate.syntax());
|
||||
}
|
||||
Delegee::Impls(delegee) => {
|
||||
let in_file = ctx.sema.source(delegee.1.to_owned()).unwrap();
|
||||
let in_file = ctx.sema.source(delegee.1.to_owned())?;
|
||||
source = in_file.value;
|
||||
delegate = make::impl_trait(
|
||||
delegee.0.is_unsafe(db),
|
||||
|
@ -341,8 +343,8 @@ fn generate_impl(
|
|||
}
|
||||
});
|
||||
|
||||
let target = ctx.sema.scope(strukt.strukt.syntax()).unwrap();
|
||||
let source = ctx.sema.scope(source.syntax()).unwrap();
|
||||
let target = ctx.sema.scope(strukt.strukt.syntax())?;
|
||||
let source = ctx.sema.scope(source.syntax())?;
|
||||
|
||||
let transform =
|
||||
PathTransform::trait_impl(&target, &source, delegee.0, delegate.clone());
|
||||
|
@ -350,7 +352,7 @@ fn generate_impl(
|
|||
}
|
||||
}
|
||||
|
||||
delegate
|
||||
Some(delegate)
|
||||
}
|
||||
|
||||
fn process_assoc_item(
|
||||
|
@ -359,19 +361,19 @@ fn process_assoc_item(
|
|||
base_name: &str,
|
||||
) -> Option<ast::AssocItem> {
|
||||
match item {
|
||||
AssocItem::Const(c) => Some(const_assoc_item(c, qual_path_ty)),
|
||||
AssocItem::Fn(f) => Some(func_assoc_item(f, qual_path_ty, base_name)),
|
||||
AssocItem::Const(c) => const_assoc_item(c, qual_path_ty),
|
||||
AssocItem::Fn(f) => func_assoc_item(f, qual_path_ty, base_name),
|
||||
AssocItem::MacroCall(_) => {
|
||||
// FIXME : Handle MacroCall case.
|
||||
// return Some(macro_assoc_item(mac, qual_path_ty));
|
||||
// macro_assoc_item(mac, qual_path_ty)
|
||||
None
|
||||
}
|
||||
AssocItem::TypeAlias(ta) => Some(ty_assoc_item(ta, qual_path_ty)),
|
||||
AssocItem::TypeAlias(ta) => ty_assoc_item(ta, qual_path_ty),
|
||||
}
|
||||
}
|
||||
|
||||
fn const_assoc_item(item: syntax::ast::Const, qual_path_ty: ast::Path) -> AssocItem {
|
||||
let path_expr_segment = make::path_from_text(item.name().unwrap().to_string().as_str());
|
||||
fn const_assoc_item(item: syntax::ast::Const, qual_path_ty: ast::Path) -> Option<AssocItem> {
|
||||
let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str());
|
||||
|
||||
// We want rhs of the const assignment to be a qualified path
|
||||
// The general case for const assigment can be found [here](`https://doc.rust-lang.org/reference/items/constant-items.html`)
|
||||
|
@ -380,19 +382,19 @@ fn const_assoc_item(item: syntax::ast::Const, qual_path_ty: ast::Path) -> AssocI
|
|||
// FIXME : We can't rely on `make::path_qualified` for now but it would be nice to replace the following with it.
|
||||
// make::path_qualified(qual_path_ty, path_expr_segment.as_single_segment().unwrap());
|
||||
let qualpath = qualpath(qual_path_ty, path_expr_segment);
|
||||
let inner = make::item_const(
|
||||
item.visibility(),
|
||||
item.name().unwrap(),
|
||||
item.ty().unwrap(),
|
||||
make::expr_path(qualpath),
|
||||
)
|
||||
.clone_for_update();
|
||||
let inner =
|
||||
make::item_const(item.visibility(), item.name()?, item.ty()?, make::expr_path(qualpath))
|
||||
.clone_for_update();
|
||||
|
||||
AssocItem::Const(inner)
|
||||
Some(AssocItem::Const(inner))
|
||||
}
|
||||
|
||||
fn func_assoc_item(item: syntax::ast::Fn, qual_path_ty: Path, base_name: &str) -> AssocItem {
|
||||
let path_expr_segment = make::path_from_text(item.name().unwrap().to_string().as_str());
|
||||
fn func_assoc_item(
|
||||
item: syntax::ast::Fn,
|
||||
qual_path_ty: Path,
|
||||
base_name: &str,
|
||||
) -> Option<AssocItem> {
|
||||
let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str());
|
||||
let qualpath = qualpath(qual_path_ty, path_expr_segment);
|
||||
|
||||
let call = match item.param_list() {
|
||||
|
@ -415,7 +417,7 @@ fn func_assoc_item(item: syntax::ast::Fn, qual_path_ty: Path, base_name: &str) -
|
|||
if param_count > 0 {
|
||||
// Add SelfParam and a TOKEN::COMMA
|
||||
ted::insert_all(
|
||||
Position::after(args.l_paren_token().unwrap()),
|
||||
Position::after(args.l_paren_token()?),
|
||||
vec![
|
||||
NodeOrToken::Node(tail_expr_self.syntax().clone_for_update()),
|
||||
NodeOrToken::Token(make::token(SyntaxKind::WHITESPACE)),
|
||||
|
@ -425,7 +427,7 @@ fn func_assoc_item(item: syntax::ast::Fn, qual_path_ty: Path, base_name: &str) -
|
|||
} else {
|
||||
// Add SelfParam only
|
||||
ted::insert(
|
||||
Position::after(args.l_paren_token().unwrap()),
|
||||
Position::after(args.l_paren_token()?),
|
||||
NodeOrToken::Node(tail_expr_self.syntax().clone_for_update()),
|
||||
);
|
||||
}
|
||||
|
@ -444,10 +446,10 @@ fn func_assoc_item(item: syntax::ast::Fn, qual_path_ty: Path, base_name: &str) -
|
|||
let body = make::block_expr(vec![], Some(call)).clone_for_update();
|
||||
let func = make::fn_(
|
||||
item.visibility(),
|
||||
item.name().unwrap(),
|
||||
item.name()?,
|
||||
item.generic_param_list(),
|
||||
item.where_clause(),
|
||||
item.param_list().unwrap(),
|
||||
item.param_list()?,
|
||||
body,
|
||||
item.ret_type(),
|
||||
item.async_token().is_some(),
|
||||
|
@ -456,14 +458,14 @@ fn func_assoc_item(item: syntax::ast::Fn, qual_path_ty: Path, base_name: &str) -
|
|||
)
|
||||
.clone_for_update();
|
||||
|
||||
AssocItem::Fn(func.indent(edit::IndentLevel(1)).clone_for_update())
|
||||
Some(AssocItem::Fn(func.indent(edit::IndentLevel(1)).clone_for_update()))
|
||||
}
|
||||
|
||||
fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> AssocItem {
|
||||
let path_expr_segment = make::path_from_text(item.name().unwrap().to_string().as_str());
|
||||
fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option<AssocItem> {
|
||||
let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str());
|
||||
let qualpath = qualpath(qual_path_ty, path_expr_segment);
|
||||
let ty = make::ty_path(qualpath);
|
||||
let ident = item.name().unwrap().to_string();
|
||||
let ident = item.name()?.to_string();
|
||||
|
||||
let alias = make::ty_alias(
|
||||
ident.as_str(),
|
||||
|
@ -474,7 +476,7 @@ fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> AssocItem
|
|||
)
|
||||
.clone_for_update();
|
||||
|
||||
AssocItem::TypeAlias(alias)
|
||||
Some(AssocItem::TypeAlias(alias))
|
||||
}
|
||||
|
||||
fn qualpath(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Path {
|
||||
|
|
|
@ -1878,7 +1878,6 @@ where
|
|||
|
||||
#[test]
|
||||
fn add_function_with_fn_arg() {
|
||||
// FIXME: The argument in `bar` is wrong.
|
||||
check_assist(
|
||||
generate_function,
|
||||
r"
|
||||
|
@ -1899,7 +1898,7 @@ fn foo() {
|
|||
bar(Baz::new);
|
||||
}
|
||||
|
||||
fn bar(new: fn) ${0:-> _} {
|
||||
fn bar(new: fn() -> Baz) ${0:-> _} {
|
||||
todo!()
|
||||
}
|
||||
",
|
||||
|
|
|
@ -80,7 +80,7 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
|
|||
|
||||
let is_recursive_fn = usages
|
||||
.clone()
|
||||
.in_scope(SearchScope::file_range(FileRange {
|
||||
.in_scope(&SearchScope::file_range(FileRange {
|
||||
file_id: def_file,
|
||||
range: func_body.syntax().text_range(),
|
||||
}))
|
||||
|
|
|
@ -37,11 +37,10 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
|
|||
pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?;
|
||||
let expanded = insert_ws_into(ctx.sema.expand(&unexpanded)?.clone_for_update());
|
||||
|
||||
let text_range = unexpanded.syntax().text_range();
|
||||
|
||||
acc.add(
|
||||
AssistId("inline_macro", AssistKind::RefactorRewrite),
|
||||
AssistId("inline_macro", AssistKind::RefactorInline),
|
||||
format!("Inline macro"),
|
||||
text_range,
|
||||
|builder| builder.replace(text_range, expanded.to_string()),
|
||||
|
|
|
@ -82,17 +82,19 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
|
|||
return None;
|
||||
}
|
||||
|
||||
let usages =
|
||||
Definition::Const(def).usages(&ctx.sema).in_scope(SearchScope::file_range(FileRange {
|
||||
file_id: ctx.file_id(),
|
||||
range: parent_fn.syntax().text_range(),
|
||||
}));
|
||||
|
||||
acc.add(
|
||||
AssistId("move_const_to_impl", crate::AssistKind::RefactorRewrite),
|
||||
"Move const to impl block",
|
||||
const_.syntax().text_range(),
|
||||
|builder| {
|
||||
let usages = Definition::Const(def)
|
||||
.usages(&ctx.sema)
|
||||
.in_scope(&SearchScope::file_range(FileRange {
|
||||
file_id: ctx.file_id(),
|
||||
range: parent_fn.syntax().text_range(),
|
||||
}))
|
||||
.all();
|
||||
|
||||
let range_to_delete = match const_.syntax().next_sibling_or_token() {
|
||||
Some(s) if matches!(s.kind(), SyntaxKind::WHITESPACE) => {
|
||||
// Remove following whitespaces too.
|
||||
|
@ -103,7 +105,7 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
|
|||
builder.delete(range_to_delete);
|
||||
|
||||
let const_ref = format!("Self::{}", name.display(ctx.db()));
|
||||
for range in usages.all().file_ranges().map(|it| it.range) {
|
||||
for range in usages.file_ranges().map(|it| it.range) {
|
||||
builder.replace(range, const_ref.clone());
|
||||
}
|
||||
|
||||
|
|
739
crates/ide-assists/src/handlers/remove_unused_imports.rs
Normal file
739
crates/ide-assists/src/handlers/remove_unused_imports.rs
Normal file
|
@ -0,0 +1,739 @@
|
|||
use std::collections::{hash_map::Entry, HashMap};
|
||||
|
||||
use hir::{InFile, Module, ModuleSource};
|
||||
use ide_db::{
|
||||
base_db::FileRange,
|
||||
defs::Definition,
|
||||
search::{FileReference, ReferenceCategory, SearchScope},
|
||||
RootDatabase,
|
||||
};
|
||||
use syntax::{ast, AstNode};
|
||||
use text_edit::TextRange;
|
||||
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: remove_unused_imports
|
||||
//
|
||||
// Removes any use statements in the current selection that are unused.
|
||||
//
|
||||
// ```
|
||||
// struct X();
|
||||
// mod foo {
|
||||
// use super::X$0;
|
||||
// }
|
||||
// ```
|
||||
// ->
|
||||
// ```
|
||||
// struct X();
|
||||
// mod foo {
|
||||
// }
|
||||
// ```
|
||||
pub(crate) fn remove_unused_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
// First, grab the uses that intersect with the current selection.
|
||||
let selected_el = match ctx.covering_element() {
|
||||
syntax::NodeOrToken::Node(n) => n,
|
||||
syntax::NodeOrToken::Token(t) => t.parent()?,
|
||||
};
|
||||
|
||||
// This applies to all uses that are selected, or are ancestors of our selection.
|
||||
let uses_up = selected_el.ancestors().skip(1).filter_map(ast::Use::cast);
|
||||
let uses_down = selected_el
|
||||
.descendants()
|
||||
.filter(|x| x.text_range().intersect(ctx.selection_trimmed()).is_some())
|
||||
.filter_map(ast::Use::cast);
|
||||
let uses = uses_up.chain(uses_down).collect::<Vec<_>>();
|
||||
|
||||
// Maps use nodes to the scope that we should search through to find
|
||||
let mut search_scopes = HashMap::<Module, Vec<SearchScope>>::new();
|
||||
|
||||
// iterator over all unused use trees
|
||||
let mut unused = uses
|
||||
.into_iter()
|
||||
.flat_map(|u| u.syntax().descendants().filter_map(ast::UseTree::cast))
|
||||
.filter(|u| u.use_tree_list().is_none())
|
||||
.filter_map(|u| {
|
||||
// Find any uses trees that are unused
|
||||
|
||||
let use_module = ctx.sema.scope(&u.syntax()).map(|s| s.module())?;
|
||||
let scope = match search_scopes.entry(use_module) {
|
||||
Entry::Occupied(o) => o.into_mut(),
|
||||
Entry::Vacant(v) => v.insert(module_search_scope(ctx.db(), use_module)),
|
||||
};
|
||||
|
||||
// Gets the path associated with this use tree. If there isn't one, then ignore this use tree.
|
||||
let path = if let Some(path) = u.path() {
|
||||
path
|
||||
} else if u.star_token().is_some() {
|
||||
// This case maps to the situation where the * token is braced.
|
||||
// In this case, the parent use tree's path is the one we should use to resolve the glob.
|
||||
match u.syntax().ancestors().skip(1).find_map(ast::UseTree::cast) {
|
||||
Some(parent_u) if parent_u.path().is_some() => parent_u.path().unwrap(),
|
||||
_ => return None,
|
||||
}
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
|
||||
// Get the actual definition associated with this use item.
|
||||
let res = match ctx.sema.resolve_path(&path) {
|
||||
Some(x) => x,
|
||||
None => {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
let def = match res {
|
||||
hir::PathResolution::Def(d) => Definition::from(d),
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
if u.star_token().is_some() {
|
||||
// Check if any of the children of this module are used
|
||||
let def_mod = match def {
|
||||
Definition::Module(module) => module,
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
if !def_mod
|
||||
.scope(ctx.db(), Some(use_module))
|
||||
.iter()
|
||||
.filter_map(|(_, x)| match x {
|
||||
hir::ScopeDef::ModuleDef(d) => Some(Definition::from(*d)),
|
||||
_ => None,
|
||||
})
|
||||
.any(|d| used_once_in_scope(ctx, d, scope))
|
||||
{
|
||||
return Some(u);
|
||||
}
|
||||
} else if let Definition::Trait(ref t) = def {
|
||||
// If the trait or any item is used.
|
||||
if !std::iter::once(def)
|
||||
.chain(t.items(ctx.db()).into_iter().map(Definition::from))
|
||||
.any(|d| used_once_in_scope(ctx, d, scope))
|
||||
{
|
||||
return Some(u);
|
||||
}
|
||||
} else {
|
||||
if !used_once_in_scope(ctx, def, &scope) {
|
||||
return Some(u);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
})
|
||||
.peekable();
|
||||
|
||||
// Peek so we terminate early if an unused use is found. Only do the rest of the work if the user selects the assist.
|
||||
if unused.peek().is_some() {
|
||||
acc.add(
|
||||
AssistId("remove_unused_imports", AssistKind::QuickFix),
|
||||
"Remove all the unused imports",
|
||||
selected_el.text_range(),
|
||||
|builder| {
|
||||
let unused: Vec<ast::UseTree> = unused.map(|x| builder.make_mut(x)).collect();
|
||||
for node in unused {
|
||||
node.remove_recursive();
|
||||
}
|
||||
},
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn used_once_in_scope(ctx: &AssistContext<'_>, def: Definition, scopes: &Vec<SearchScope>) -> bool {
|
||||
let mut found = false;
|
||||
|
||||
for scope in scopes {
|
||||
let mut search_non_import = |_, r: FileReference| {
|
||||
// The import itself is a use; we must skip that.
|
||||
if r.category != Some(ReferenceCategory::Import) {
|
||||
found = true;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
};
|
||||
def.usages(&ctx.sema).in_scope(scope).search(&mut search_non_import);
|
||||
if found {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
found
|
||||
}
|
||||
|
||||
/// Build a search scope spanning the given module but none of its submodules.
|
||||
fn module_search_scope(db: &RootDatabase, module: hir::Module) -> Vec<SearchScope> {
|
||||
let (file_id, range) = {
|
||||
let InFile { file_id, value } = module.definition_source(db);
|
||||
if let Some((file_id, call_source)) = file_id.original_call_node(db) {
|
||||
(file_id, Some(call_source.text_range()))
|
||||
} else {
|
||||
(
|
||||
file_id.original_file(db),
|
||||
match value {
|
||||
ModuleSource::SourceFile(_) => None,
|
||||
ModuleSource::Module(it) => Some(it.syntax().text_range()),
|
||||
ModuleSource::BlockExpr(it) => Some(it.syntax().text_range()),
|
||||
},
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
fn split_at_subrange(first: TextRange, second: TextRange) -> (TextRange, Option<TextRange>) {
|
||||
let intersect = first.intersect(second);
|
||||
if let Some(intersect) = intersect {
|
||||
let start_range = TextRange::new(first.start(), intersect.start());
|
||||
|
||||
if intersect.end() < first.end() {
|
||||
(start_range, Some(TextRange::new(intersect.end(), first.end())))
|
||||
} else {
|
||||
(start_range, None)
|
||||
}
|
||||
} else {
|
||||
(first, None)
|
||||
}
|
||||
}
|
||||
|
||||
let mut scopes = Vec::new();
|
||||
if let Some(range) = range {
|
||||
let mut ranges = vec![range];
|
||||
|
||||
for child in module.children(db) {
|
||||
let rng = match child.definition_source(db).value {
|
||||
ModuleSource::SourceFile(_) => continue,
|
||||
ModuleSource::Module(it) => it.syntax().text_range(),
|
||||
ModuleSource::BlockExpr(_) => continue,
|
||||
};
|
||||
let mut new_ranges = Vec::new();
|
||||
for old_range in ranges.iter_mut() {
|
||||
let split = split_at_subrange(old_range.clone(), rng);
|
||||
*old_range = split.0;
|
||||
new_ranges.extend(split.1);
|
||||
}
|
||||
|
||||
ranges.append(&mut new_ranges);
|
||||
}
|
||||
|
||||
for range in ranges {
|
||||
scopes.push(SearchScope::file_range(FileRange { file_id, range }));
|
||||
}
|
||||
} else {
|
||||
scopes.push(SearchScope::single_file(file_id));
|
||||
}
|
||||
|
||||
scopes
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::tests::{check_assist, check_assist_not_applicable};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn remove_unused() {
|
||||
check_assist(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
struct Y();
|
||||
mod z {
|
||||
$0use super::X;
|
||||
use super::Y;$0
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct X();
|
||||
struct Y();
|
||||
mod z {
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_unused_is_precise() {
|
||||
check_assist(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
mod z {
|
||||
$0use super::X;$0
|
||||
|
||||
fn w() {
|
||||
struct X();
|
||||
let x = X();
|
||||
}
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct X();
|
||||
mod z {
|
||||
|
||||
fn w() {
|
||||
struct X();
|
||||
let x = X();
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trait_name_use_is_use() {
|
||||
check_assist_not_applicable(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
trait Y {
|
||||
fn f();
|
||||
}
|
||||
|
||||
impl Y for X {
|
||||
fn f() {}
|
||||
}
|
||||
mod z {
|
||||
$0use super::X;
|
||||
use super::Y;$0
|
||||
|
||||
fn w() {
|
||||
X::f();
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trait_item_use_is_use() {
|
||||
check_assist_not_applicable(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
trait Y {
|
||||
fn f(self);
|
||||
}
|
||||
|
||||
impl Y for X {
|
||||
fn f(self) {}
|
||||
}
|
||||
mod z {
|
||||
$0use super::X;
|
||||
use super::Y;$0
|
||||
|
||||
fn w() {
|
||||
let x = X();
|
||||
x.f();
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ranamed_trait_item_use_is_use() {
|
||||
check_assist_not_applicable(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
trait Y {
|
||||
fn f(self);
|
||||
}
|
||||
|
||||
impl Y for X {
|
||||
fn f(self) {}
|
||||
}
|
||||
mod z {
|
||||
$0use super::X;
|
||||
use super::Y as Z;$0
|
||||
|
||||
fn w() {
|
||||
let x = X();
|
||||
x.f();
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ranamed_underscore_trait_item_use_is_use() {
|
||||
check_assist_not_applicable(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
trait Y {
|
||||
fn f(self);
|
||||
}
|
||||
|
||||
impl Y for X {
|
||||
fn f(self) {}
|
||||
}
|
||||
mod z {
|
||||
$0use super::X;
|
||||
use super::Y as _;$0
|
||||
|
||||
fn w() {
|
||||
let x = X();
|
||||
x.f();
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dont_remove_used() {
|
||||
check_assist_not_applicable(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
struct Y();
|
||||
mod z {
|
||||
$0use super::X;
|
||||
use super::Y;$0
|
||||
|
||||
fn w() {
|
||||
let x = X();
|
||||
let y = Y();
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_unused_in_braces() {
|
||||
check_assist(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
struct Y();
|
||||
mod z {
|
||||
$0use super::{X, Y};$0
|
||||
|
||||
fn w() {
|
||||
let x = X();
|
||||
}
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct X();
|
||||
struct Y();
|
||||
mod z {
|
||||
use super::{X};
|
||||
|
||||
fn w() {
|
||||
let x = X();
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_unused_under_cursor() {
|
||||
check_assist(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
mod z {
|
||||
use super::X$0;
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct X();
|
||||
mod z {
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_multi_use_block() {
|
||||
check_assist(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
$0mod y {
|
||||
use super::X;
|
||||
}
|
||||
mod z {
|
||||
use super::X;
|
||||
}$0
|
||||
"#,
|
||||
r#"
|
||||
struct X();
|
||||
mod y {
|
||||
}
|
||||
mod z {
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_nested() {
|
||||
check_assist(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
mod y {
|
||||
struct Y();
|
||||
mod z {
|
||||
use crate::{X, y::Y}$0;
|
||||
fn f() {
|
||||
let x = X();
|
||||
}
|
||||
}
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct X();
|
||||
mod y {
|
||||
struct Y();
|
||||
mod z {
|
||||
use crate::{X};
|
||||
fn f() {
|
||||
let x = X();
|
||||
}
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_nested_first_item() {
|
||||
check_assist(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
mod y {
|
||||
struct Y();
|
||||
mod z {
|
||||
use crate::{X, y::Y}$0;
|
||||
fn f() {
|
||||
let y = Y();
|
||||
}
|
||||
}
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct X();
|
||||
mod y {
|
||||
struct Y();
|
||||
mod z {
|
||||
use crate::{y::Y};
|
||||
fn f() {
|
||||
let y = Y();
|
||||
}
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_nested_all_unused() {
|
||||
check_assist(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
mod y {
|
||||
struct Y();
|
||||
mod z {
|
||||
use crate::{X, y::Y}$0;
|
||||
}
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct X();
|
||||
mod y {
|
||||
struct Y();
|
||||
mod z {
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_unused_glob() {
|
||||
check_assist(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
struct Y();
|
||||
mod z {
|
||||
use super::*$0;
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct X();
|
||||
struct Y();
|
||||
mod z {
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_unused_braced_glob() {
|
||||
check_assist(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
struct Y();
|
||||
mod z {
|
||||
use super::{*}$0;
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct X();
|
||||
struct Y();
|
||||
mod z {
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dont_remove_used_glob() {
|
||||
check_assist_not_applicable(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
struct Y();
|
||||
mod z {
|
||||
use super::*$0;
|
||||
|
||||
fn f() {
|
||||
let x = X();
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn only_remove_from_selection() {
|
||||
check_assist(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
struct Y();
|
||||
mod z {
|
||||
$0use super::X;$0
|
||||
use super::Y;
|
||||
}
|
||||
mod w {
|
||||
use super::Y;
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct X();
|
||||
struct Y();
|
||||
mod z {
|
||||
use super::Y;
|
||||
}
|
||||
mod w {
|
||||
use super::Y;
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_several_files() {
|
||||
check_assist(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
//- /foo.rs
|
||||
pub struct X();
|
||||
pub struct Y();
|
||||
|
||||
//- /main.rs
|
||||
$0use foo::X;
|
||||
use foo::Y;
|
||||
$0
|
||||
mod foo;
|
||||
mod z {
|
||||
use crate::foo::X;
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
|
||||
mod foo;
|
||||
mod z {
|
||||
use crate::foo::X;
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn use_in_submodule_doesnt_count() {
|
||||
check_assist(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
struct X();
|
||||
mod z {
|
||||
use super::X$0;
|
||||
|
||||
mod w {
|
||||
use crate::X;
|
||||
|
||||
fn f() {
|
||||
let x = X();
|
||||
}
|
||||
}
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct X();
|
||||
mod z {
|
||||
|
||||
mod w {
|
||||
use crate::X;
|
||||
|
||||
fn f() {
|
||||
let x = X();
|
||||
}
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn use_in_submodule_file_doesnt_count() {
|
||||
check_assist(
|
||||
remove_unused_imports,
|
||||
r#"
|
||||
//- /z/foo.rs
|
||||
use crate::X;
|
||||
fn f() {
|
||||
let x = X();
|
||||
}
|
||||
|
||||
//- /main.rs
|
||||
pub struct X();
|
||||
|
||||
mod z {
|
||||
use crate::X$0;
|
||||
mod foo;
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
pub struct X();
|
||||
|
||||
mod z {
|
||||
mod foo;
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
|
@ -10,7 +10,7 @@ use crate::{
|
|||
assist_context::{AssistContext, Assists, SourceChangeBuilder},
|
||||
utils::{
|
||||
add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body,
|
||||
generate_trait_impl_text, render_snippet, Cursor, DefaultMethods,
|
||||
generate_trait_impl_text, render_snippet, Cursor, DefaultMethods, IgnoreAssocItems,
|
||||
},
|
||||
AssistId, AssistKind,
|
||||
};
|
||||
|
@ -172,7 +172,17 @@ fn impl_def_from_trait(
|
|||
) -> Option<(ast::Impl, ast::AssocItem)> {
|
||||
let trait_ = trait_?;
|
||||
let target_scope = sema.scope(annotated_name.syntax())?;
|
||||
let trait_items = filter_assoc_items(sema, &trait_.items(sema.db), DefaultMethods::No);
|
||||
|
||||
// Keep assoc items of local crates even if they have #[doc(hidden)] attr.
|
||||
let ignore_items = if trait_.module(sema.db).krate().origin(sema.db).is_local() {
|
||||
IgnoreAssocItems::No
|
||||
} else {
|
||||
IgnoreAssocItems::DocHiddenAttrPresent
|
||||
};
|
||||
|
||||
let trait_items =
|
||||
filter_assoc_items(sema, &trait_.items(sema.db), DefaultMethods::No, ignore_items);
|
||||
|
||||
if trait_items.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
|
|
@ -157,7 +157,7 @@ fn find_usages(
|
|||
file_id: FileId,
|
||||
) -> UsageSearchResult {
|
||||
let file_range = FileRange { file_id, range: fn_.syntax().text_range() };
|
||||
type_param_def.usages(sema).in_scope(SearchScope::file_range(file_range)).all()
|
||||
type_param_def.usages(sema).in_scope(&SearchScope::file_range(file_range)).all()
|
||||
}
|
||||
|
||||
fn check_valid_usages(usages: &UsageSearchResult, param_list_range: TextRange) -> bool {
|
||||
|
|
|
@ -184,6 +184,7 @@ mod handlers {
|
|||
mod raw_string;
|
||||
mod remove_dbg;
|
||||
mod remove_mut;
|
||||
mod remove_unused_imports;
|
||||
mod remove_unused_param;
|
||||
mod remove_parentheses;
|
||||
mod reorder_fields;
|
||||
|
@ -294,6 +295,7 @@ mod handlers {
|
|||
raw_string::make_usual_string,
|
||||
raw_string::remove_hash,
|
||||
remove_mut::remove_mut,
|
||||
remove_unused_imports::remove_unused_imports,
|
||||
remove_unused_param::remove_unused_param,
|
||||
remove_parentheses::remove_parentheses,
|
||||
reorder_fields::reorder_fields,
|
||||
|
|
|
@ -132,8 +132,13 @@ fn check_doc_test(assist_id: &str, before: &str, after: &str) {
|
|||
.filter(|it| !it.source_file_edits.is_empty() || !it.file_system_edits.is_empty())
|
||||
.expect("Assist did not contain any source changes");
|
||||
let mut actual = before;
|
||||
if let Some(source_file_edit) = source_change.get_source_edit(file_id) {
|
||||
if let Some((source_file_edit, snippet_edit)) =
|
||||
source_change.get_source_and_snippet_edit(file_id)
|
||||
{
|
||||
source_file_edit.apply(&mut actual);
|
||||
if let Some(snippet_edit) = snippet_edit {
|
||||
snippet_edit.apply(&mut actual);
|
||||
}
|
||||
}
|
||||
actual
|
||||
};
|
||||
|
@ -191,9 +196,12 @@ fn check_with_config(
|
|||
&& source_change.file_system_edits.len() == 0;
|
||||
|
||||
let mut buf = String::new();
|
||||
for (file_id, edit) in source_change.source_file_edits {
|
||||
for (file_id, (edit, snippet_edit)) in source_change.source_file_edits {
|
||||
let mut text = db.file_text(file_id).as_ref().to_owned();
|
||||
edit.apply(&mut text);
|
||||
if let Some(snippet_edit) = snippet_edit {
|
||||
snippet_edit.apply(&mut text);
|
||||
}
|
||||
if !skip_header {
|
||||
let sr = db.file_source_root(file_id);
|
||||
let sr = db.source_root(sr);
|
||||
|
@ -485,18 +493,21 @@ pub fn test_some_range(a: int) -> bool {
|
|||
source_file_edits: {
|
||||
FileId(
|
||||
0,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "let $0var_name = 5;\n ",
|
||||
delete: 45..45,
|
||||
},
|
||||
Indel {
|
||||
insert: "var_name",
|
||||
delete: 59..60,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "let $0var_name = 5;\n ",
|
||||
delete: 45..45,
|
||||
},
|
||||
Indel {
|
||||
insert: "var_name",
|
||||
delete: 59..60,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
},
|
||||
file_system_edits: [],
|
||||
is_snippet: true,
|
||||
|
@ -544,18 +555,21 @@ pub fn test_some_range(a: int) -> bool {
|
|||
source_file_edits: {
|
||||
FileId(
|
||||
0,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "let $0var_name = 5;\n ",
|
||||
delete: 45..45,
|
||||
},
|
||||
Indel {
|
||||
insert: "var_name",
|
||||
delete: 59..60,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "let $0var_name = 5;\n ",
|
||||
delete: 45..45,
|
||||
},
|
||||
Indel {
|
||||
insert: "var_name",
|
||||
delete: 59..60,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
},
|
||||
file_system_edits: [],
|
||||
is_snippet: true,
|
||||
|
@ -581,18 +595,21 @@ pub fn test_some_range(a: int) -> bool {
|
|||
source_file_edits: {
|
||||
FileId(
|
||||
0,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "fun_name()",
|
||||
delete: 59..60,
|
||||
},
|
||||
Indel {
|
||||
insert: "\n\nfn $0fun_name() -> i32 {\n 5\n}",
|
||||
delete: 110..110,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "fun_name()",
|
||||
delete: 59..60,
|
||||
},
|
||||
Indel {
|
||||
insert: "\n\nfn $0fun_name() -> i32 {\n 5\n}",
|
||||
delete: 110..110,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
},
|
||||
file_system_edits: [],
|
||||
is_snippet: true,
|
||||
|
|
|
@ -952,6 +952,7 @@ fn doctest_generate_default_from_new() {
|
|||
check_doc_test(
|
||||
"generate_default_from_new",
|
||||
r#####"
|
||||
//- minicore: default
|
||||
struct Example { _inner: () }
|
||||
|
||||
impl Example {
|
||||
|
@ -2233,6 +2234,24 @@ fn main() {
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_remove_unused_imports() {
|
||||
check_doc_test(
|
||||
"remove_unused_imports",
|
||||
r#####"
|
||||
struct X();
|
||||
mod foo {
|
||||
use super::X$0;
|
||||
}
|
||||
"#####,
|
||||
r#####"
|
||||
struct X();
|
||||
mod foo {
|
||||
}
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_remove_unused_param() {
|
||||
check_doc_test(
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
use std::ops;
|
||||
|
||||
pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
|
||||
use hir::{db::HirDatabase, HirDisplay, InFile, Semantics};
|
||||
use hir::{db::HirDatabase, HasAttrs as HirHasAttrs, HirDisplay, InFile, Semantics};
|
||||
use ide_db::{
|
||||
famous_defs::FamousDefs, path_transform::PathTransform,
|
||||
syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, SnippetCap,
|
||||
|
@ -84,6 +84,12 @@ pub fn test_related_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> {
|
|||
})
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq)]
|
||||
pub enum IgnoreAssocItems {
|
||||
DocHiddenAttrPresent,
|
||||
No,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
pub enum DefaultMethods {
|
||||
Only,
|
||||
|
@ -94,11 +100,16 @@ pub fn filter_assoc_items(
|
|||
sema: &Semantics<'_, RootDatabase>,
|
||||
items: &[hir::AssocItem],
|
||||
default_methods: DefaultMethods,
|
||||
ignore_items: IgnoreAssocItems,
|
||||
) -> Vec<InFile<ast::AssocItem>> {
|
||||
return items
|
||||
.iter()
|
||||
// Note: This throws away items with no source.
|
||||
.copied()
|
||||
.filter(|assoc_item| {
|
||||
!(ignore_items == IgnoreAssocItems::DocHiddenAttrPresent
|
||||
&& assoc_item.attrs(sema.db).has_doc_hidden())
|
||||
})
|
||||
// Note: This throws away items with no source.
|
||||
.filter_map(|assoc_item| {
|
||||
let item = match assoc_item {
|
||||
hir::AssocItem::Function(it) => sema.source(it)?.map(ast::AssocItem::Fn),
|
||||
|
|
|
@ -427,9 +427,26 @@ impl Builder {
|
|||
let insert_text = self.insert_text.unwrap_or_else(|| label.to_string());
|
||||
|
||||
if !self.doc_aliases.is_empty() {
|
||||
let doc_aliases = self.doc_aliases.into_iter().join(", ");
|
||||
let doc_aliases = self.doc_aliases.iter().join(", ");
|
||||
label = SmolStr::from(format!("{label} (alias {doc_aliases})"));
|
||||
lookup = SmolStr::from(format!("{lookup} {doc_aliases}"));
|
||||
let lookup_doc_aliases = self
|
||||
.doc_aliases
|
||||
.iter()
|
||||
// Don't include aliases in `lookup` that aren't valid identifiers as including
|
||||
// them results in weird completion filtering behavior e.g. `Partial>` matching
|
||||
// `PartialOrd` because it has an alias of ">".
|
||||
.filter(|alias| {
|
||||
let mut chars = alias.chars();
|
||||
chars.next().is_some_and(char::is_alphabetic)
|
||||
&& chars.all(|c| c.is_alphanumeric() || c == '_')
|
||||
})
|
||||
// Deliberately concatenated without separators as adding separators e.g.
|
||||
// `alias1, alias2` results in LSP clients continuing to display the completion even
|
||||
// after typing a comma or space.
|
||||
.join("");
|
||||
if !lookup_doc_aliases.is_empty() {
|
||||
lookup = SmolStr::from(format!("{lookup}{lookup_doc_aliases}"));
|
||||
}
|
||||
}
|
||||
if let [import_edit] = &*self.imports_to_add {
|
||||
// snippets can have multiple imports, but normal completions only have up to one
|
||||
|
|
|
@ -1280,3 +1280,26 @@ fn here_we_go() {
|
|||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn completion_filtering_excludes_non_identifier_doc_aliases() {
|
||||
check_edit(
|
||||
"PartialOrdcmporder",
|
||||
r#"
|
||||
#[doc(alias = ">")]
|
||||
#[doc(alias = "cmp")]
|
||||
#[doc(alias = "order")]
|
||||
trait PartialOrd {}
|
||||
|
||||
struct Foo<T: Partial$0
|
||||
"#,
|
||||
r#"
|
||||
#[doc(alias = ">")]
|
||||
#[doc(alias = "cmp")]
|
||||
#[doc(alias = "order")]
|
||||
trait PartialOrd {}
|
||||
|
||||
struct Foo<T: PartialOrd
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
|
|
@ -7,10 +7,10 @@
|
|||
|
||||
use arrayvec::ArrayVec;
|
||||
use hir::{
|
||||
Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper, Field,
|
||||
Function, GenericParam, HasVisibility, Impl, Label, Local, Macro, Module, ModuleDef, Name,
|
||||
PathResolution, Semantics, Static, ToolModule, Trait, TraitAlias, TypeAlias, Variant,
|
||||
Visibility,
|
||||
Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper,
|
||||
ExternCrateDecl, Field, Function, GenericParam, HasVisibility, Impl, Label, Local, Macro,
|
||||
Module, ModuleDef, Name, PathResolution, Semantics, Static, ToolModule, Trait, TraitAlias,
|
||||
TypeAlias, Variant, Visibility,
|
||||
};
|
||||
use stdx::impl_from;
|
||||
use syntax::{
|
||||
|
@ -42,6 +42,7 @@ pub enum Definition {
|
|||
DeriveHelper(DeriveHelper),
|
||||
BuiltinAttr(BuiltinAttr),
|
||||
ToolModule(ToolModule),
|
||||
ExternCrateDecl(ExternCrateDecl),
|
||||
}
|
||||
|
||||
impl Definition {
|
||||
|
@ -73,6 +74,7 @@ impl Definition {
|
|||
Definition::Local(it) => it.module(db),
|
||||
Definition::GenericParam(it) => it.module(db),
|
||||
Definition::Label(it) => it.module(db),
|
||||
Definition::ExternCrateDecl(it) => it.module(db),
|
||||
Definition::DeriveHelper(it) => it.derive().module(db),
|
||||
Definition::BuiltinAttr(_) | Definition::BuiltinType(_) | Definition::ToolModule(_) => {
|
||||
return None
|
||||
|
@ -93,6 +95,7 @@ impl Definition {
|
|||
Definition::TraitAlias(it) => it.visibility(db),
|
||||
Definition::TypeAlias(it) => it.visibility(db),
|
||||
Definition::Variant(it) => it.visibility(db),
|
||||
Definition::ExternCrateDecl(it) => it.visibility(db),
|
||||
Definition::BuiltinType(_) => Visibility::Public,
|
||||
Definition::Macro(_) => return None,
|
||||
Definition::BuiltinAttr(_)
|
||||
|
@ -127,6 +130,7 @@ impl Definition {
|
|||
Definition::BuiltinAttr(_) => return None, // FIXME
|
||||
Definition::ToolModule(_) => return None, // FIXME
|
||||
Definition::DeriveHelper(it) => it.name(db),
|
||||
Definition::ExternCrateDecl(it) => return it.alias_or_name(db),
|
||||
};
|
||||
Some(name)
|
||||
}
|
||||
|
@ -196,6 +200,10 @@ impl IdentClass {
|
|||
res.push(Definition::Local(local_ref));
|
||||
res.push(Definition::Field(field_ref));
|
||||
}
|
||||
IdentClass::NameRefClass(NameRefClass::ExternCrateShorthand { decl, krate }) => {
|
||||
res.push(Definition::ExternCrateDecl(decl));
|
||||
res.push(Definition::Module(krate.root_module()));
|
||||
}
|
||||
IdentClass::Operator(
|
||||
OperatorClass::Await(func)
|
||||
| OperatorClass::Prefix(func)
|
||||
|
@ -222,6 +230,10 @@ impl IdentClass {
|
|||
res.push(Definition::Local(local_ref));
|
||||
res.push(Definition::Field(field_ref));
|
||||
}
|
||||
IdentClass::NameRefClass(NameRefClass::ExternCrateShorthand { decl, krate }) => {
|
||||
res.push(Definition::ExternCrateDecl(decl));
|
||||
res.push(Definition::Module(krate.root_module()));
|
||||
}
|
||||
IdentClass::Operator(_) => (),
|
||||
}
|
||||
res
|
||||
|
@ -310,6 +322,7 @@ impl NameClass {
|
|||
ast::Item::Enum(it) => Definition::Adt(hir::Adt::Enum(sema.to_def(&it)?)),
|
||||
ast::Item::Struct(it) => Definition::Adt(hir::Adt::Struct(sema.to_def(&it)?)),
|
||||
ast::Item::Union(it) => Definition::Adt(hir::Adt::Union(sema.to_def(&it)?)),
|
||||
ast::Item::ExternCrate(it) => Definition::ExternCrateDecl(sema.to_def(&it)?),
|
||||
_ => return None,
|
||||
};
|
||||
Some(definition)
|
||||
|
@ -346,10 +359,8 @@ impl NameClass {
|
|||
let path = use_tree.path()?;
|
||||
sema.resolve_path(&path).map(Definition::from)
|
||||
} else {
|
||||
let extern_crate = rename.syntax().parent().and_then(ast::ExternCrate::cast)?;
|
||||
let krate = sema.resolve_extern_crate(&extern_crate)?;
|
||||
let root_module = krate.root_module(sema.db);
|
||||
Some(Definition::Module(root_module))
|
||||
sema.to_def(&rename.syntax().parent().and_then(ast::ExternCrate::cast)?)
|
||||
.map(Definition::ExternCrateDecl)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -427,7 +438,19 @@ impl OperatorClass {
|
|||
#[derive(Debug)]
|
||||
pub enum NameRefClass {
|
||||
Definition(Definition),
|
||||
FieldShorthand { local_ref: Local, field_ref: Field },
|
||||
FieldShorthand {
|
||||
local_ref: Local,
|
||||
field_ref: Field,
|
||||
},
|
||||
/// The specific situation where we have an extern crate decl without a rename
|
||||
/// Here we have both a declaration and a reference.
|
||||
/// ```rs
|
||||
/// extern crate foo;
|
||||
/// ```
|
||||
ExternCrateShorthand {
|
||||
decl: ExternCrateDecl,
|
||||
krate: Crate,
|
||||
},
|
||||
}
|
||||
|
||||
impl NameRefClass {
|
||||
|
@ -513,10 +536,14 @@ impl NameRefClass {
|
|||
}
|
||||
None
|
||||
},
|
||||
ast::ExternCrate(extern_crate) => {
|
||||
let krate = sema.resolve_extern_crate(&extern_crate)?;
|
||||
let root_module = krate.root_module(sema.db);
|
||||
Some(NameRefClass::Definition(Definition::Module(root_module)))
|
||||
ast::ExternCrate(extern_crate_ast) => {
|
||||
let extern_crate = sema.to_def(&extern_crate_ast)?;
|
||||
let krate = extern_crate.resolved_crate(sema.db)?;
|
||||
Some(if extern_crate_ast.rename().is_some() {
|
||||
NameRefClass::Definition(Definition::Module(krate.root_module()))
|
||||
} else {
|
||||
NameRefClass::ExternCrateShorthand { krate, decl: extern_crate }
|
||||
})
|
||||
},
|
||||
_ => None
|
||||
}
|
||||
|
|
|
@ -167,7 +167,7 @@ impl FamousDefs<'_, '_> {
|
|||
lang_crate => lang_crate,
|
||||
};
|
||||
let std_crate = self.find_lang_crate(lang_crate)?;
|
||||
let mut module = std_crate.root_module(db);
|
||||
let mut module = std_crate.root_module();
|
||||
for segment in path {
|
||||
module = module.children(db).find_map(|child| {
|
||||
let name = child.name(db)?;
|
||||
|
|
|
@ -82,8 +82,9 @@ impl Definition {
|
|||
}
|
||||
|
||||
/// Textual range of the identifier which will change when renaming this
|
||||
/// `Definition`. Note that some definitions, like builtin types, can't be
|
||||
/// renamed.
|
||||
/// `Definition`. Note that builtin types can't be
|
||||
/// renamed and extern crate names will report its range, though a rename will introduce
|
||||
/// an alias instead.
|
||||
pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
|
||||
let res = match self {
|
||||
Definition::Macro(mac) => {
|
||||
|
@ -146,6 +147,16 @@ impl Definition {
|
|||
let lifetime = src.value.lifetime()?;
|
||||
src.with_value(lifetime.syntax()).original_file_range_opt(sema.db)
|
||||
}
|
||||
Definition::ExternCrateDecl(it) => {
|
||||
let src = it.source(sema.db)?;
|
||||
if let Some(rename) = src.value.rename() {
|
||||
let name = rename.name()?;
|
||||
src.with_value(name.syntax()).original_file_range_opt(sema.db)
|
||||
} else {
|
||||
let name = src.value.name_ref()?;
|
||||
src.with_value(name.syntax()).original_file_range_opt(sema.db)
|
||||
}
|
||||
}
|
||||
Definition::BuiltinType(_) => return None,
|
||||
Definition::SelfType(_) => return None,
|
||||
Definition::BuiltinAttr(_) => return None,
|
||||
|
@ -526,6 +537,9 @@ fn source_edit_from_def(
|
|||
TextRange::new(range.start() + syntax::TextSize::from(1), range.end()),
|
||||
new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(),
|
||||
),
|
||||
Definition::ExternCrateDecl(decl) if decl.alias(sema.db).is_none() => {
|
||||
(TextRange::empty(range.end()), format!(" as {new_name}"))
|
||||
}
|
||||
_ => (range, new_name.to_owned()),
|
||||
};
|
||||
edit.replace(range, new_name);
|
||||
|
|
|
@ -127,7 +127,7 @@ impl SearchScope {
|
|||
}
|
||||
|
||||
/// Build a search scope spanning the given module and all its submodules.
|
||||
fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope {
|
||||
pub fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope {
|
||||
let mut entries = IntMap::default();
|
||||
|
||||
let (file_id, range) = {
|
||||
|
@ -329,7 +329,7 @@ impl Definition {
|
|||
pub struct FindUsages<'a> {
|
||||
def: Definition,
|
||||
sema: &'a Semantics<'a, RootDatabase>,
|
||||
scope: Option<SearchScope>,
|
||||
scope: Option<&'a SearchScope>,
|
||||
/// The container of our definition should it be an assoc item
|
||||
assoc_item_container: Option<hir::AssocItemContainer>,
|
||||
/// whether to search for the `Self` type of the definition
|
||||
|
@ -338,7 +338,7 @@ pub struct FindUsages<'a> {
|
|||
search_self_mod: bool,
|
||||
}
|
||||
|
||||
impl FindUsages<'_> {
|
||||
impl<'a> FindUsages<'a> {
|
||||
/// Enable searching for `Self` when the definition is a type or `self` for modules.
|
||||
pub fn include_self_refs(mut self) -> Self {
|
||||
self.include_self_kw_refs = def_to_ty(self.sema, &self.def);
|
||||
|
@ -347,12 +347,12 @@ impl FindUsages<'_> {
|
|||
}
|
||||
|
||||
/// Limit the search to a given [`SearchScope`].
|
||||
pub fn in_scope(self, scope: SearchScope) -> Self {
|
||||
pub fn in_scope(self, scope: &'a SearchScope) -> Self {
|
||||
self.set_scope(Some(scope))
|
||||
}
|
||||
|
||||
/// Limit the search to a given [`SearchScope`].
|
||||
pub fn set_scope(mut self, scope: Option<SearchScope>) -> Self {
|
||||
pub fn set_scope(mut self, scope: Option<&'a SearchScope>) -> Self {
|
||||
assert!(self.scope.is_none());
|
||||
self.scope = scope;
|
||||
self
|
||||
|
@ -376,7 +376,7 @@ impl FindUsages<'_> {
|
|||
res
|
||||
}
|
||||
|
||||
fn search(&self, sink: &mut dyn FnMut(FileId, FileReference) -> bool) {
|
||||
pub fn search(&self, sink: &mut dyn FnMut(FileId, FileReference) -> bool) {
|
||||
let _p = profile::span("FindUsages:search");
|
||||
let sema = self.sema;
|
||||
|
||||
|
|
|
@ -7,17 +7,17 @@ use std::{collections::hash_map::Entry, iter, mem};
|
|||
|
||||
use crate::SnippetCap;
|
||||
use base_db::{AnchoredPathBuf, FileId};
|
||||
use itertools::Itertools;
|
||||
use nohash_hasher::IntMap;
|
||||
use stdx::never;
|
||||
use syntax::{
|
||||
algo, ast, ted, AstNode, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
|
||||
TextSize,
|
||||
algo, AstNode, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize,
|
||||
};
|
||||
use text_edit::{TextEdit, TextEditBuilder};
|
||||
|
||||
#[derive(Default, Debug, Clone)]
|
||||
pub struct SourceChange {
|
||||
pub source_file_edits: IntMap<FileId, TextEdit>,
|
||||
pub source_file_edits: IntMap<FileId, (TextEdit, Option<SnippetEdit>)>,
|
||||
pub file_system_edits: Vec<FileSystemEdit>,
|
||||
pub is_snippet: bool,
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ impl SourceChange {
|
|||
/// Creates a new SourceChange with the given label
|
||||
/// from the edits.
|
||||
pub fn from_edits(
|
||||
source_file_edits: IntMap<FileId, TextEdit>,
|
||||
source_file_edits: IntMap<FileId, (TextEdit, Option<SnippetEdit>)>,
|
||||
file_system_edits: Vec<FileSystemEdit>,
|
||||
) -> Self {
|
||||
SourceChange { source_file_edits, file_system_edits, is_snippet: false }
|
||||
|
@ -34,7 +34,7 @@ impl SourceChange {
|
|||
|
||||
pub fn from_text_edit(file_id: FileId, edit: TextEdit) -> Self {
|
||||
SourceChange {
|
||||
source_file_edits: iter::once((file_id, edit)).collect(),
|
||||
source_file_edits: iter::once((file_id, (edit, None))).collect(),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
@ -42,12 +42,31 @@ impl SourceChange {
|
|||
/// Inserts a [`TextEdit`] for the given [`FileId`]. This properly handles merging existing
|
||||
/// edits for a file if some already exist.
|
||||
pub fn insert_source_edit(&mut self, file_id: FileId, edit: TextEdit) {
|
||||
self.insert_source_and_snippet_edit(file_id, edit, None)
|
||||
}
|
||||
|
||||
/// Inserts a [`TextEdit`] and potentially a [`SnippetEdit`] for the given [`FileId`].
|
||||
/// This properly handles merging existing edits for a file if some already exist.
|
||||
pub fn insert_source_and_snippet_edit(
|
||||
&mut self,
|
||||
file_id: FileId,
|
||||
edit: TextEdit,
|
||||
snippet_edit: Option<SnippetEdit>,
|
||||
) {
|
||||
match self.source_file_edits.entry(file_id) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
never!(entry.get_mut().union(edit).is_err(), "overlapping edits for same file");
|
||||
let value = entry.get_mut();
|
||||
never!(value.0.union(edit).is_err(), "overlapping edits for same file");
|
||||
never!(
|
||||
value.1.is_some() && snippet_edit.is_some(),
|
||||
"overlapping snippet edits for same file"
|
||||
);
|
||||
if value.1.is_none() {
|
||||
value.1 = snippet_edit;
|
||||
}
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(edit);
|
||||
entry.insert((edit, snippet_edit));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -56,7 +75,10 @@ impl SourceChange {
|
|||
self.file_system_edits.push(edit);
|
||||
}
|
||||
|
||||
pub fn get_source_edit(&self, file_id: FileId) -> Option<&TextEdit> {
|
||||
pub fn get_source_and_snippet_edit(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
) -> Option<&(TextEdit, Option<SnippetEdit>)> {
|
||||
self.source_file_edits.get(&file_id)
|
||||
}
|
||||
|
||||
|
@ -70,7 +92,18 @@ impl SourceChange {
|
|||
|
||||
impl Extend<(FileId, TextEdit)> for SourceChange {
|
||||
fn extend<T: IntoIterator<Item = (FileId, TextEdit)>>(&mut self, iter: T) {
|
||||
iter.into_iter().for_each(|(file_id, edit)| self.insert_source_edit(file_id, edit));
|
||||
self.extend(iter.into_iter().map(|(file_id, edit)| (file_id, (edit, None))))
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<(FileId, (TextEdit, Option<SnippetEdit>))> for SourceChange {
|
||||
fn extend<T: IntoIterator<Item = (FileId, (TextEdit, Option<SnippetEdit>))>>(
|
||||
&mut self,
|
||||
iter: T,
|
||||
) {
|
||||
iter.into_iter().for_each(|(file_id, (edit, snippet_edit))| {
|
||||
self.insert_source_and_snippet_edit(file_id, edit, snippet_edit)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -82,6 +115,8 @@ impl Extend<FileSystemEdit> for SourceChange {
|
|||
|
||||
impl From<IntMap<FileId, TextEdit>> for SourceChange {
|
||||
fn from(source_file_edits: IntMap<FileId, TextEdit>) -> SourceChange {
|
||||
let source_file_edits =
|
||||
source_file_edits.into_iter().map(|(file_id, edit)| (file_id, (edit, None))).collect();
|
||||
SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false }
|
||||
}
|
||||
}
|
||||
|
@ -94,6 +129,65 @@ impl FromIterator<(FileId, TextEdit)> for SourceChange {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct SnippetEdit(Vec<(u32, TextRange)>);
|
||||
|
||||
impl SnippetEdit {
|
||||
pub fn new(snippets: Vec<Snippet>) -> Self {
|
||||
let mut snippet_ranges = snippets
|
||||
.into_iter()
|
||||
.zip(1..)
|
||||
.with_position()
|
||||
.map(|pos| {
|
||||
let (snippet, index) = match pos {
|
||||
itertools::Position::First(it) | itertools::Position::Middle(it) => it,
|
||||
// last/only snippet gets index 0
|
||||
itertools::Position::Last((snippet, _))
|
||||
| itertools::Position::Only((snippet, _)) => (snippet, 0),
|
||||
};
|
||||
|
||||
let range = match snippet {
|
||||
Snippet::Tabstop(pos) => TextRange::empty(pos),
|
||||
Snippet::Placeholder(range) => range,
|
||||
};
|
||||
(index, range)
|
||||
})
|
||||
.collect_vec();
|
||||
|
||||
snippet_ranges.sort_by_key(|(_, range)| range.start());
|
||||
|
||||
// Ensure that none of the ranges overlap
|
||||
let disjoint_ranges = snippet_ranges
|
||||
.iter()
|
||||
.zip(snippet_ranges.iter().skip(1))
|
||||
.all(|((_, left), (_, right))| left.end() <= right.start() || left == right);
|
||||
stdx::always!(disjoint_ranges);
|
||||
|
||||
SnippetEdit(snippet_ranges)
|
||||
}
|
||||
|
||||
/// Inserts all of the snippets into the given text.
|
||||
pub fn apply(&self, text: &mut String) {
|
||||
// Start from the back so that we don't have to adjust ranges
|
||||
for (index, range) in self.0.iter().rev() {
|
||||
if range.is_empty() {
|
||||
// is a tabstop
|
||||
text.insert_str(range.start().into(), &format!("${index}"));
|
||||
} else {
|
||||
// is a placeholder
|
||||
text.insert(range.end().into(), '}');
|
||||
text.insert_str(range.start().into(), &format!("${{{index}:"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets the underlying snippet index + text range
|
||||
/// Tabstops are represented by an empty range, and placeholders use the range that they were given
|
||||
pub fn into_edit_ranges(self) -> Vec<(u32, TextRange)> {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SourceChangeBuilder {
|
||||
pub edit: TextEditBuilder,
|
||||
pub file_id: FileId,
|
||||
|
@ -152,24 +246,19 @@ impl SourceChangeBuilder {
|
|||
}
|
||||
|
||||
fn commit(&mut self) {
|
||||
// Render snippets first so that they get bundled into the tree diff
|
||||
if let Some(mut snippets) = self.snippet_builder.take() {
|
||||
// Last snippet always has stop index 0
|
||||
let last_stop = snippets.places.pop().unwrap();
|
||||
last_stop.place(0);
|
||||
|
||||
for (index, stop) in snippets.places.into_iter().enumerate() {
|
||||
stop.place(index + 1)
|
||||
}
|
||||
}
|
||||
let snippet_edit = self.snippet_builder.take().map(|builder| {
|
||||
SnippetEdit::new(
|
||||
builder.places.into_iter().map(PlaceSnippet::finalize_position).collect_vec(),
|
||||
)
|
||||
});
|
||||
|
||||
if let Some(tm) = self.mutated_tree.take() {
|
||||
algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit)
|
||||
algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit);
|
||||
}
|
||||
|
||||
let edit = mem::take(&mut self.edit).finish();
|
||||
if !edit.is_empty() {
|
||||
self.source_change.insert_source_edit(self.file_id, edit);
|
||||
if !edit.is_empty() || snippet_edit.is_some() {
|
||||
self.source_change.insert_source_and_snippet_edit(self.file_id, edit, snippet_edit);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -275,6 +364,16 @@ impl SourceChangeBuilder {
|
|||
|
||||
pub fn finish(mut self) -> SourceChange {
|
||||
self.commit();
|
||||
|
||||
// Only one file can have snippet edits
|
||||
stdx::never!(self
|
||||
.source_change
|
||||
.source_file_edits
|
||||
.iter()
|
||||
.filter(|(_, (_, snippet_edit))| snippet_edit.is_some())
|
||||
.at_most_one()
|
||||
.is_err());
|
||||
|
||||
mem::take(&mut self.source_change)
|
||||
}
|
||||
}
|
||||
|
@ -296,6 +395,13 @@ impl From<FileSystemEdit> for SourceChange {
|
|||
}
|
||||
}
|
||||
|
||||
pub enum Snippet {
|
||||
/// A tabstop snippet (e.g. `$0`).
|
||||
Tabstop(TextSize),
|
||||
/// A placeholder snippet (e.g. `${0:placeholder}`).
|
||||
Placeholder(TextRange),
|
||||
}
|
||||
|
||||
enum PlaceSnippet {
|
||||
/// Place a tabstop before an element
|
||||
Before(SyntaxElement),
|
||||
|
@ -306,57 +412,11 @@ enum PlaceSnippet {
|
|||
}
|
||||
|
||||
impl PlaceSnippet {
|
||||
/// Places the snippet before or over an element with the given tab stop index
|
||||
fn place(self, order: usize) {
|
||||
// ensure the target element is still attached
|
||||
match &self {
|
||||
PlaceSnippet::Before(element)
|
||||
| PlaceSnippet::After(element)
|
||||
| PlaceSnippet::Over(element) => {
|
||||
// element should still be in the tree, but if it isn't
|
||||
// then it's okay to just ignore this place
|
||||
if stdx::never!(element.parent().is_none()) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn finalize_position(self) -> Snippet {
|
||||
match self {
|
||||
PlaceSnippet::Before(element) => {
|
||||
ted::insert_raw(ted::Position::before(&element), Self::make_tab_stop(order));
|
||||
}
|
||||
PlaceSnippet::After(element) => {
|
||||
ted::insert_raw(ted::Position::after(&element), Self::make_tab_stop(order));
|
||||
}
|
||||
PlaceSnippet::Over(element) => {
|
||||
let position = ted::Position::before(&element);
|
||||
element.detach();
|
||||
|
||||
let snippet = ast::SourceFile::parse(&format!("${{{order}:_}}"))
|
||||
.syntax_node()
|
||||
.clone_for_update();
|
||||
|
||||
let placeholder =
|
||||
snippet.descendants().find_map(ast::UnderscoreExpr::cast).unwrap();
|
||||
ted::replace(placeholder.syntax(), element);
|
||||
|
||||
ted::insert_raw(position, snippet);
|
||||
}
|
||||
PlaceSnippet::Before(it) => Snippet::Tabstop(it.text_range().start()),
|
||||
PlaceSnippet::After(it) => Snippet::Tabstop(it.text_range().end()),
|
||||
PlaceSnippet::Over(it) => Snippet::Placeholder(it.text_range()),
|
||||
}
|
||||
}
|
||||
|
||||
fn make_tab_stop(order: usize) -> SyntaxNode {
|
||||
let stop = ast::SourceFile::parse(&format!("stop!(${order})"))
|
||||
.syntax_node()
|
||||
.descendants()
|
||||
.find_map(ast::TokenTree::cast)
|
||||
.unwrap()
|
||||
.syntax()
|
||||
.clone_for_update();
|
||||
|
||||
stop.first_token().unwrap().detach();
|
||||
stop.last_token().unwrap().detach();
|
||||
|
||||
stop
|
||||
}
|
||||
}
|
||||
|
|
|
@ -51,6 +51,9 @@ macro_rules! compile_error { () => {} }
|
|||
|
||||
compile_error!("compile_error macro works");
|
||||
//^^^^^^^^^^^^^ error: compile_error macro works
|
||||
|
||||
compile_error! { "compile_error macro braced works" }
|
||||
//^^^^^^^^^^^^^ error: compile_error macro braced works
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
@ -77,7 +80,7 @@ macro_rules! m {
|
|||
|
||||
fn f() {
|
||||
m!();
|
||||
//^^^^ error: unresolved macro `$crate::private::concat!`
|
||||
//^^^^ error: unresolved macro $crate::private::concat
|
||||
}
|
||||
|
||||
//- /core.rs crate:core
|
||||
|
|
|
@ -208,7 +208,7 @@ fn get_default_constructor(
|
|||
}
|
||||
|
||||
let krate = ctx.sema.to_module_def(d.file.original_file(ctx.sema.db))?.krate();
|
||||
let module = krate.root_module(ctx.sema.db);
|
||||
let module = krate.root_module();
|
||||
|
||||
// Look for a ::new() associated function
|
||||
let has_new_func = ty
|
||||
|
|
|
@ -49,8 +49,11 @@ fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
|
|||
let file_id = *source_change.source_file_edits.keys().next().unwrap();
|
||||
let mut actual = db.file_text(file_id).to_string();
|
||||
|
||||
for edit in source_change.source_file_edits.values() {
|
||||
for (edit, snippet_edit) in source_change.source_file_edits.values() {
|
||||
edit.apply(&mut actual);
|
||||
if let Some(snippet_edit) = snippet_edit {
|
||||
snippet_edit.apply(&mut actual);
|
||||
}
|
||||
}
|
||||
actual
|
||||
};
|
||||
|
|
|
@ -121,7 +121,7 @@ impl MatchFinder<'_> {
|
|||
// cache miss. This is a limitation of NLL and is fixed with Polonius. For now we do two
|
||||
// lookups in the case of a cache hit.
|
||||
if usage_cache.find(&definition).is_none() {
|
||||
let usages = definition.usages(&self.sema).in_scope(self.search_scope()).all();
|
||||
let usages = definition.usages(&self.sema).in_scope(&self.search_scope()).all();
|
||||
usage_cache.usages.push((definition, usages));
|
||||
return &usage_cache.usages.last().unwrap().1;
|
||||
}
|
||||
|
|
|
@ -153,6 +153,9 @@ pub(crate) fn external_docs(
|
|||
NameRefClass::FieldShorthand { local_ref: _, field_ref } => {
|
||||
Definition::Field(field_ref)
|
||||
}
|
||||
NameRefClass::ExternCrateShorthand { decl, .. } => {
|
||||
Definition::ExternCrateDecl(decl)
|
||||
}
|
||||
},
|
||||
ast::Name(name) => match NameClass::classify(sema, &name)? {
|
||||
NameClass::Definition(it) | NameClass::ConstReference(it) => it,
|
||||
|
@ -209,6 +212,7 @@ pub(crate) fn resolve_doc_path_for_def(
|
|||
Definition::Macro(it) => it.resolve_doc_path(db, link, ns),
|
||||
Definition::Field(it) => it.resolve_doc_path(db, link, ns),
|
||||
Definition::SelfType(it) => it.resolve_doc_path(db, link, ns),
|
||||
Definition::ExternCrateDecl(it) => it.resolve_doc_path(db, link, ns),
|
||||
Definition::BuiltinAttr(_)
|
||||
| Definition::ToolModule(_)
|
||||
| Definition::BuiltinType(_)
|
||||
|
@ -617,6 +621,9 @@ fn filename_and_frag_for_def(
|
|||
// FIXME fragment numbering
|
||||
return Some((adt, file, Some(String::from("impl"))));
|
||||
}
|
||||
Definition::ExternCrateDecl(it) => {
|
||||
format!("{}/index.html", it.name(db).display(db.upcast()))
|
||||
}
|
||||
Definition::Local(_)
|
||||
| Definition::GenericParam(_)
|
||||
| Definition::Label(_)
|
||||
|
|
|
@ -37,11 +37,15 @@ pub(crate) fn goto_declaration(
|
|||
match parent {
|
||||
ast::NameRef(name_ref) => match NameRefClass::classify(&sema, &name_ref)? {
|
||||
NameRefClass::Definition(it) => Some(it),
|
||||
NameRefClass::FieldShorthand { field_ref, .. } => return field_ref.try_to_nav(db),
|
||||
NameRefClass::FieldShorthand { field_ref, .. } =>
|
||||
return field_ref.try_to_nav(db),
|
||||
NameRefClass::ExternCrateShorthand { decl, .. } =>
|
||||
return decl.try_to_nav(db),
|
||||
},
|
||||
ast::Name(name) => match NameClass::classify(&sema, &name)? {
|
||||
NameClass::Definition(it) | NameClass::ConstReference(it) => Some(it),
|
||||
NameClass::PatFieldShorthand { field_ref, .. } => return field_ref.try_to_nav(db),
|
||||
NameClass::PatFieldShorthand { field_ref, .. } =>
|
||||
return field_ref.try_to_nav(db),
|
||||
},
|
||||
_ => None
|
||||
}
|
||||
|
@ -53,6 +57,7 @@ pub(crate) fn goto_declaration(
|
|||
Definition::Const(c) => c.as_assoc_item(db),
|
||||
Definition::TypeAlias(ta) => ta.as_assoc_item(db),
|
||||
Definition::Function(f) => f.as_assoc_item(db),
|
||||
Definition::ExternCrateDecl(it) => return it.try_to_nav(db),
|
||||
_ => None,
|
||||
}?;
|
||||
|
||||
|
@ -211,4 +216,30 @@ fn main() {
|
|||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_decl_for_extern_crate() {
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:std
|
||||
extern crate std$0;
|
||||
/// ^^^
|
||||
//- /std/lib.rs crate:std
|
||||
// empty
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_decl_for_renamed_extern_crate() {
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:std
|
||||
extern crate std as abc$0;
|
||||
/// ^^^
|
||||
//- /std/lib.rs crate:std
|
||||
// empty
|
||||
"#,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
use std::mem::discriminant;
|
||||
|
||||
use crate::{doc_links::token_as_doc_comment, FilePosition, NavigationTarget, RangeInfo, TryToNav};
|
||||
use crate::{
|
||||
doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget,
|
||||
RangeInfo, TryToNav,
|
||||
};
|
||||
use hir::{AsAssocItem, AssocItem, Semantics};
|
||||
use ide_db::{
|
||||
base_db::{AnchoredPath, FileId, FileLoader},
|
||||
|
@ -73,6 +76,13 @@ pub(crate) fn goto_definition(
|
|||
.definitions()
|
||||
.into_iter()
|
||||
.flat_map(|def| {
|
||||
if let Definition::ExternCrateDecl(crate_def) = def {
|
||||
return crate_def
|
||||
.resolved_crate(db)
|
||||
.map(|it| it.root_module().to_nav(sema.db))
|
||||
.into_iter()
|
||||
.collect();
|
||||
}
|
||||
try_filter_trait_item_definition(sema, &def)
|
||||
.unwrap_or_else(|| def_to_nav(sema.db, def))
|
||||
})
|
||||
|
|
|
@ -34,54 +34,50 @@ pub(crate) fn goto_implementation(
|
|||
_ => 0,
|
||||
})?;
|
||||
let range = original_token.text_range();
|
||||
let navs = sema
|
||||
.descend_into_macros(original_token)
|
||||
.into_iter()
|
||||
.filter_map(|token| token.parent().and_then(ast::NameLike::cast))
|
||||
.filter_map(|node| match &node {
|
||||
ast::NameLike::Name(name) => {
|
||||
NameClass::classify(&sema, name).map(|class| match class {
|
||||
NameClass::Definition(it) | NameClass::ConstReference(it) => it,
|
||||
NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
|
||||
Definition::Local(local_def)
|
||||
}
|
||||
})
|
||||
}
|
||||
ast::NameLike::NameRef(name_ref) => {
|
||||
NameRefClass::classify(&sema, name_ref).map(|class| match class {
|
||||
NameRefClass::Definition(def) => def,
|
||||
NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
|
||||
Definition::Local(local_ref)
|
||||
}
|
||||
})
|
||||
}
|
||||
ast::NameLike::Lifetime(_) => None,
|
||||
})
|
||||
.unique()
|
||||
.filter_map(|def| {
|
||||
let navs = match def {
|
||||
Definition::Trait(trait_) => impls_for_trait(&sema, trait_),
|
||||
Definition::Adt(adt) => impls_for_ty(&sema, adt.ty(sema.db)),
|
||||
Definition::TypeAlias(alias) => impls_for_ty(&sema, alias.ty(sema.db)),
|
||||
Definition::BuiltinType(builtin) => impls_for_ty(&sema, builtin.ty(sema.db)),
|
||||
Definition::Function(f) => {
|
||||
let assoc = f.as_assoc_item(sema.db)?;
|
||||
let name = assoc.name(sema.db)?;
|
||||
let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
|
||||
impls_for_trait_item(&sema, trait_, name)
|
||||
let navs =
|
||||
sema.descend_into_macros(original_token)
|
||||
.into_iter()
|
||||
.filter_map(|token| token.parent().and_then(ast::NameLike::cast))
|
||||
.filter_map(|node| match &node {
|
||||
ast::NameLike::Name(name) => {
|
||||
NameClass::classify(&sema, name).and_then(|class| match class {
|
||||
NameClass::Definition(it) | NameClass::ConstReference(it) => Some(it),
|
||||
NameClass::PatFieldShorthand { .. } => None,
|
||||
})
|
||||
}
|
||||
Definition::Const(c) => {
|
||||
let assoc = c.as_assoc_item(sema.db)?;
|
||||
let name = assoc.name(sema.db)?;
|
||||
let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
|
||||
impls_for_trait_item(&sema, trait_, name)
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
Some(navs)
|
||||
})
|
||||
.flatten()
|
||||
.collect();
|
||||
ast::NameLike::NameRef(name_ref) => NameRefClass::classify(&sema, name_ref)
|
||||
.and_then(|class| match class {
|
||||
NameRefClass::Definition(def) => Some(def),
|
||||
NameRefClass::FieldShorthand { .. }
|
||||
| NameRefClass::ExternCrateShorthand { .. } => None,
|
||||
}),
|
||||
ast::NameLike::Lifetime(_) => None,
|
||||
})
|
||||
.unique()
|
||||
.filter_map(|def| {
|
||||
let navs = match def {
|
||||
Definition::Trait(trait_) => impls_for_trait(&sema, trait_),
|
||||
Definition::Adt(adt) => impls_for_ty(&sema, adt.ty(sema.db)),
|
||||
Definition::TypeAlias(alias) => impls_for_ty(&sema, alias.ty(sema.db)),
|
||||
Definition::BuiltinType(builtin) => impls_for_ty(&sema, builtin.ty(sema.db)),
|
||||
Definition::Function(f) => {
|
||||
let assoc = f.as_assoc_item(sema.db)?;
|
||||
let name = assoc.name(sema.db)?;
|
||||
let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
|
||||
impls_for_trait_item(&sema, trait_, name)
|
||||
}
|
||||
Definition::Const(c) => {
|
||||
let assoc = c.as_assoc_item(sema.db)?;
|
||||
let name = assoc.name(sema.db)?;
|
||||
let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
|
||||
impls_for_trait_item(&sema, trait_, name)
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
Some(navs)
|
||||
})
|
||||
.flatten()
|
||||
.collect();
|
||||
|
||||
Some(RangeInfo { range, info: navs })
|
||||
}
|
||||
|
|
|
@ -100,10 +100,7 @@ fn highlight_closure_captures(
|
|||
.flat_map(|local| {
|
||||
let usages = Definition::Local(local)
|
||||
.usages(sema)
|
||||
.set_scope(Some(SearchScope::file_range(FileRange {
|
||||
file_id,
|
||||
range: search_range,
|
||||
})))
|
||||
.in_scope(&SearchScope::file_range(FileRange { file_id, range: search_range }))
|
||||
.include_self_refs()
|
||||
.all()
|
||||
.references
|
||||
|
@ -139,7 +136,7 @@ fn highlight_references(
|
|||
.iter()
|
||||
.filter_map(|&d| {
|
||||
d.usages(sema)
|
||||
.set_scope(Some(SearchScope::single_file(file_id)))
|
||||
.in_scope(&SearchScope::single_file(file_id))
|
||||
.include_self_refs()
|
||||
.all()
|
||||
.references
|
||||
|
@ -183,7 +180,7 @@ fn highlight_references(
|
|||
.filter_map(|item| {
|
||||
Definition::from(item)
|
||||
.usages(sema)
|
||||
.set_scope(Some(SearchScope::file_range(FileRange {
|
||||
.set_scope(Some(&SearchScope::file_range(FileRange {
|
||||
file_id,
|
||||
range: trait_item_use_scope.text_range(),
|
||||
})))
|
||||
|
|
|
@ -9,7 +9,7 @@ use either::Either;
|
|||
use hir::{db::DefDatabase, HasSource, LangItem, Semantics};
|
||||
use ide_db::{
|
||||
base_db::FileRange,
|
||||
defs::{Definition, IdentClass, OperatorClass},
|
||||
defs::{Definition, IdentClass, NameRefClass, OperatorClass},
|
||||
famous_defs::FamousDefs,
|
||||
helpers::pick_best_token,
|
||||
FxIndexSet, RootDatabase,
|
||||
|
@ -186,7 +186,20 @@ fn hover_simple(
|
|||
// rendering poll is very confusing
|
||||
return None;
|
||||
}
|
||||
Some(class.definitions().into_iter().zip(iter::once(node).cycle()))
|
||||
if let IdentClass::NameRefClass(NameRefClass::ExternCrateShorthand {
|
||||
decl,
|
||||
..
|
||||
}) = class
|
||||
{
|
||||
return Some(vec![(Definition::ExternCrateDecl(decl), node)]);
|
||||
}
|
||||
Some(
|
||||
class
|
||||
.definitions()
|
||||
.into_iter()
|
||||
.zip(iter::once(node).cycle())
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
})
|
||||
.flatten()
|
||||
.unique_by(|&(def, _)| def)
|
||||
|
|
|
@ -257,7 +257,7 @@ pub(super) fn keyword(
|
|||
let KeywordHint { description, keyword_mod, actions } = keyword_hints(sema, token, parent);
|
||||
|
||||
let doc_owner = find_std_module(&famous_defs, &keyword_mod)?;
|
||||
let docs = doc_owner.attrs(sema.db).docs()?;
|
||||
let docs = doc_owner.docs(sema.db)?;
|
||||
let markup = process_markup(
|
||||
sema.db,
|
||||
Definition::Module(doc_owner),
|
||||
|
@ -472,6 +472,7 @@ pub(super) fn definition(
|
|||
}
|
||||
Definition::GenericParam(it) => label_and_docs(db, it),
|
||||
Definition::Label(it) => return Some(Markup::fenced_block(&it.name(db).display(db))),
|
||||
Definition::ExternCrateDecl(it) => label_and_docs(db, it),
|
||||
// FIXME: We should be able to show more info about these
|
||||
Definition::BuiltinAttr(it) => return render_builtin_attr(db, it),
|
||||
Definition::ToolModule(it) => return Some(Markup::fenced_block(&it.name(db))),
|
||||
|
@ -620,7 +621,7 @@ where
|
|||
D: HasAttrs + HirDisplay,
|
||||
{
|
||||
let label = def.display(db).to_string();
|
||||
let docs = def.attrs(db).docs();
|
||||
let docs = def.docs(db);
|
||||
(label, docs)
|
||||
}
|
||||
|
||||
|
@ -645,7 +646,7 @@ where
|
|||
) {
|
||||
format_to!(label, "{layout}");
|
||||
}
|
||||
let docs = def.attrs(db).docs();
|
||||
let docs = def.docs(db);
|
||||
(label, docs)
|
||||
}
|
||||
|
||||
|
@ -677,7 +678,7 @@ where
|
|||
) {
|
||||
format_to!(label, "{layout}");
|
||||
}
|
||||
let docs = def.attrs(db).docs();
|
||||
let docs = def.docs(db);
|
||||
(label, docs)
|
||||
}
|
||||
|
||||
|
@ -696,7 +697,7 @@ where
|
|||
} else {
|
||||
def.display(db).to_string()
|
||||
};
|
||||
let docs = def.attrs(db).docs();
|
||||
let docs = def.docs(db);
|
||||
(label, docs)
|
||||
}
|
||||
|
||||
|
@ -727,14 +728,14 @@ fn builtin(famous_defs: &FamousDefs<'_, '_>, builtin: hir::BuiltinType) -> Optio
|
|||
// std exposes prim_{} modules with docstrings on the root to document the builtins
|
||||
let primitive_mod = format!("prim_{}", builtin.name().display(famous_defs.0.db));
|
||||
let doc_owner = find_std_module(famous_defs, &primitive_mod)?;
|
||||
let docs = doc_owner.attrs(famous_defs.0.db).docs()?;
|
||||
let docs = doc_owner.docs(famous_defs.0.db)?;
|
||||
markup(Some(docs.into()), builtin.name().display(famous_defs.0.db).to_string(), None)
|
||||
}
|
||||
|
||||
fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option<hir::Module> {
|
||||
let db = famous_defs.0.db;
|
||||
let std_crate = famous_defs.std()?;
|
||||
let std_root_module = std_crate.root_module(db);
|
||||
let std_root_module = std_crate.root_module();
|
||||
std_root_module.children(db).find(|module| {
|
||||
module.name(db).map_or(false, |module| module.display(db).to_string() == name)
|
||||
})
|
||||
|
|
|
@ -1616,6 +1616,9 @@ fn test_hover_extern_crate() {
|
|||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:std
|
||||
//! Crate docs
|
||||
|
||||
/// Decl docs!
|
||||
extern crate st$0d;
|
||||
//- /std/lib.rs crate:std
|
||||
//! Standard library for this test
|
||||
|
@ -1624,23 +1627,32 @@ extern crate st$0d;
|
|||
//! abc123
|
||||
"#,
|
||||
expect![[r#"
|
||||
*std*
|
||||
*std*
|
||||
|
||||
```rust
|
||||
extern crate std
|
||||
```
|
||||
```rust
|
||||
main
|
||||
```
|
||||
|
||||
---
|
||||
```rust
|
||||
extern crate std
|
||||
```
|
||||
|
||||
Standard library for this test
|
||||
---
|
||||
|
||||
Printed?
|
||||
abc123
|
||||
"#]],
|
||||
Decl docs!
|
||||
|
||||
Standard library for this test
|
||||
|
||||
Printed?
|
||||
abc123
|
||||
"#]],
|
||||
);
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:std
|
||||
//! Crate docs
|
||||
|
||||
/// Decl docs!
|
||||
extern crate std as ab$0c;
|
||||
//- /std/lib.rs crate:std
|
||||
//! Standard library for this test
|
||||
|
@ -1649,19 +1661,25 @@ extern crate std as ab$0c;
|
|||
//! abc123
|
||||
"#,
|
||||
expect![[r#"
|
||||
*abc*
|
||||
*abc*
|
||||
|
||||
```rust
|
||||
extern crate std
|
||||
```
|
||||
```rust
|
||||
main
|
||||
```
|
||||
|
||||
---
|
||||
```rust
|
||||
extern crate std as abc
|
||||
```
|
||||
|
||||
Standard library for this test
|
||||
---
|
||||
|
||||
Printed?
|
||||
abc123
|
||||
"#]],
|
||||
Decl docs!
|
||||
|
||||
Standard library for this test
|
||||
|
||||
Printed?
|
||||
abc123
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -127,7 +127,7 @@ pub use ide_db::{
|
|||
label::Label,
|
||||
line_index::{LineCol, LineIndex},
|
||||
search::{ReferenceCategory, SearchScope},
|
||||
source_change::{FileSystemEdit, SourceChange},
|
||||
source_change::{FileSystemEdit, SnippetEdit, SourceChange},
|
||||
symbol_index::Query,
|
||||
RootDatabase, SymbolKind,
|
||||
};
|
||||
|
|
|
@ -177,6 +177,17 @@ pub(crate) fn def_to_moniker(
|
|||
});
|
||||
}
|
||||
|
||||
// Qualify locals/parameters by their parent definition name.
|
||||
if let Definition::Local(it) = def {
|
||||
let parent_name = it.parent(db).name(db);
|
||||
if let Some(name) = parent_name {
|
||||
description.push(MonikerDescriptor {
|
||||
name: name.display(db).to_string(),
|
||||
desc: MonikerDescriptorKind::Method,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let name_desc = match def {
|
||||
// These are handled by top-level guard (for performance).
|
||||
Definition::GenericParam(_)
|
||||
|
@ -247,6 +258,10 @@ pub(crate) fn def_to_moniker(
|
|||
name: s.name(db).display(db).to_string(),
|
||||
desc: MonikerDescriptorKind::Meta,
|
||||
},
|
||||
Definition::ExternCrateDecl(m) => MonikerDescriptor {
|
||||
name: m.name(db).display(db).to_string(),
|
||||
desc: MonikerDescriptorKind::Namespace,
|
||||
},
|
||||
};
|
||||
|
||||
description.push(name_desc);
|
||||
|
|
|
@ -102,7 +102,7 @@ impl NavigationTarget {
|
|||
full_range,
|
||||
SymbolKind::Module,
|
||||
);
|
||||
res.docs = module.attrs(db).docs();
|
||||
res.docs = module.docs(db);
|
||||
res.description = Some(module.display(db).to_string());
|
||||
return res;
|
||||
}
|
||||
|
@ -217,6 +217,7 @@ impl TryToNav for Definition {
|
|||
Definition::Trait(it) => it.try_to_nav(db),
|
||||
Definition::TraitAlias(it) => it.try_to_nav(db),
|
||||
Definition::TypeAlias(it) => it.try_to_nav(db),
|
||||
Definition::ExternCrateDecl(it) => Some(it.try_to_nav(db)?),
|
||||
Definition::BuiltinType(_) => None,
|
||||
Definition::ToolModule(_) => None,
|
||||
Definition::BuiltinAttr(_) => None,
|
||||
|
@ -375,6 +376,30 @@ impl TryToNav for hir::Impl {
|
|||
}
|
||||
}
|
||||
|
||||
impl TryToNav for hir::ExternCrateDecl {
|
||||
fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
|
||||
let src = self.source(db)?;
|
||||
let InFile { file_id, value } = src;
|
||||
let focus = value
|
||||
.rename()
|
||||
.map_or_else(|| value.name_ref().map(Either::Left), |it| it.name().map(Either::Right));
|
||||
let (file_id, full_range, focus_range) =
|
||||
orig_range_with_focus(db, file_id, value.syntax(), focus);
|
||||
let mut res = NavigationTarget::from_syntax(
|
||||
file_id,
|
||||
self.alias_or_name(db).unwrap_or_else(|| self.name(db)).to_smol_str(),
|
||||
focus_range,
|
||||
full_range,
|
||||
SymbolKind::Module,
|
||||
);
|
||||
|
||||
res.docs = self.docs(db);
|
||||
res.description = Some(self.display(db).to_string());
|
||||
res.container_name = container_name(db, *self);
|
||||
Some(res)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryToNav for hir::Field {
|
||||
fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
|
||||
let src = self.source(db)?;
|
||||
|
|
|
@ -74,7 +74,7 @@ pub(crate) fn find_all_refs(
|
|||
}
|
||||
});
|
||||
let mut usages =
|
||||
def.usages(sema).set_scope(search_scope.clone()).include_self_refs().all();
|
||||
def.usages(sema).set_scope(search_scope.as_ref()).include_self_refs().all();
|
||||
|
||||
if literal_search {
|
||||
retain_adt_literal_usages(&mut usages, def, sema);
|
||||
|
@ -137,6 +137,9 @@ pub(crate) fn find_defs<'a>(
|
|||
NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
|
||||
Definition::Local(local_ref)
|
||||
}
|
||||
NameRefClass::ExternCrateShorthand { decl, .. } => {
|
||||
Definition::ExternCrateDecl(decl)
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? {
|
||||
|
|
|
@ -145,7 +145,14 @@ fn find_definitions(
|
|||
if name
|
||||
.syntax()
|
||||
.parent()
|
||||
.map_or(false, |it| ast::Rename::can_cast(it.kind())) =>
|
||||
.map_or(false, |it| ast::Rename::can_cast(it.kind()))
|
||||
// FIXME: uncomment this once we resolve to usages to extern crate declarations
|
||||
// && name
|
||||
// .syntax()
|
||||
// .ancestors()
|
||||
// .nth(2)
|
||||
// .map_or(true, |it| !ast::ExternCrate::can_cast(it.kind()))
|
||||
=>
|
||||
{
|
||||
bail!("Renaming aliases is currently unsupported")
|
||||
}
|
||||
|
@ -165,7 +172,12 @@ fn find_definitions(
|
|||
NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
|
||||
Definition::Local(local_ref)
|
||||
}
|
||||
NameRefClass::ExternCrateShorthand { decl, .. } => {
|
||||
Definition::ExternCrateDecl(decl)
|
||||
}
|
||||
})
|
||||
// FIXME: uncomment this once we resolve to usages to extern crate declarations
|
||||
.filter(|def| !matches!(def, Definition::ExternCrateDecl(..)))
|
||||
.ok_or_else(|| format_err!("No references found at position"))
|
||||
.and_then(|def| {
|
||||
// if the name differs from the definitions name it has to be an alias
|
||||
|
@ -367,7 +379,7 @@ mod tests {
|
|||
let mut file_id: Option<FileId> = None;
|
||||
for edit in source_change.source_file_edits {
|
||||
file_id = Some(edit.0);
|
||||
for indel in edit.1.into_iter() {
|
||||
for indel in edit.1 .0.into_iter() {
|
||||
text_edit_builder.replace(indel.delete, indel.insert);
|
||||
}
|
||||
}
|
||||
|
@ -895,14 +907,17 @@ mod foo$0;
|
|||
source_file_edits: {
|
||||
FileId(
|
||||
1,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo2",
|
||||
delete: 4..7,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo2",
|
||||
delete: 4..7,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
},
|
||||
file_system_edits: [
|
||||
MoveFile {
|
||||
|
@ -944,24 +959,30 @@ use crate::foo$0::FooContent;
|
|||
source_file_edits: {
|
||||
FileId(
|
||||
0,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "quux",
|
||||
delete: 8..11,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "quux",
|
||||
delete: 8..11,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
FileId(
|
||||
2,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "quux",
|
||||
delete: 11..14,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "quux",
|
||||
delete: 11..14,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
},
|
||||
file_system_edits: [
|
||||
MoveFile {
|
||||
|
@ -997,14 +1018,17 @@ mod fo$0o;
|
|||
source_file_edits: {
|
||||
FileId(
|
||||
0,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo2",
|
||||
delete: 4..7,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo2",
|
||||
delete: 4..7,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
},
|
||||
file_system_edits: [
|
||||
MoveDir {
|
||||
|
@ -1047,14 +1071,17 @@ mod outer { mod fo$0o; }
|
|||
source_file_edits: {
|
||||
FileId(
|
||||
0,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "bar",
|
||||
delete: 16..19,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "bar",
|
||||
delete: 16..19,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
},
|
||||
file_system_edits: [
|
||||
MoveFile {
|
||||
|
@ -1120,24 +1147,30 @@ pub mod foo$0;
|
|||
source_file_edits: {
|
||||
FileId(
|
||||
0,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo2",
|
||||
delete: 27..30,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo2",
|
||||
delete: 27..30,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
FileId(
|
||||
1,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo2",
|
||||
delete: 8..11,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo2",
|
||||
delete: 8..11,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
},
|
||||
file_system_edits: [
|
||||
MoveFile {
|
||||
|
@ -1187,14 +1220,17 @@ mod quux;
|
|||
source_file_edits: {
|
||||
FileId(
|
||||
0,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo2",
|
||||
delete: 4..7,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo2",
|
||||
delete: 4..7,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
},
|
||||
file_system_edits: [
|
||||
MoveFile {
|
||||
|
@ -1325,18 +1361,21 @@ pub fn baz() {}
|
|||
source_file_edits: {
|
||||
FileId(
|
||||
0,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "r#fn",
|
||||
delete: 4..7,
|
||||
},
|
||||
Indel {
|
||||
insert: "r#fn",
|
||||
delete: 22..25,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "r#fn",
|
||||
delete: 4..7,
|
||||
},
|
||||
Indel {
|
||||
insert: "r#fn",
|
||||
delete: 22..25,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
},
|
||||
file_system_edits: [
|
||||
MoveFile {
|
||||
|
@ -1395,18 +1434,21 @@ pub fn baz() {}
|
|||
source_file_edits: {
|
||||
FileId(
|
||||
0,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo",
|
||||
delete: 4..8,
|
||||
},
|
||||
Indel {
|
||||
insert: "foo",
|
||||
delete: 23..27,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo",
|
||||
delete: 4..8,
|
||||
},
|
||||
Indel {
|
||||
insert: "foo",
|
||||
delete: 23..27,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
},
|
||||
file_system_edits: [
|
||||
MoveFile {
|
||||
|
@ -2487,4 +2529,109 @@ fn main() {
|
|||
",
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extern_crate() {
|
||||
check_prepare(
|
||||
r"
|
||||
//- /lib.rs crate:main deps:foo
|
||||
extern crate foo$0;
|
||||
use foo as qux;
|
||||
//- /foo.rs crate:foo
|
||||
",
|
||||
expect![[r#"No references found at position"#]],
|
||||
);
|
||||
// FIXME: replace above check_prepare with this once we resolve to usages to extern crate declarations
|
||||
// check(
|
||||
// "bar",
|
||||
// r"
|
||||
// //- /lib.rs crate:main deps:foo
|
||||
// extern crate foo$0;
|
||||
// use foo as qux;
|
||||
// //- /foo.rs crate:foo
|
||||
// ",
|
||||
// r"
|
||||
// extern crate foo as bar;
|
||||
// use bar as qux;
|
||||
// ",
|
||||
// );
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extern_crate_rename() {
|
||||
check_prepare(
|
||||
r"
|
||||
//- /lib.rs crate:main deps:foo
|
||||
extern crate foo as qux$0;
|
||||
use qux as frob;
|
||||
//- /foo.rs crate:foo
|
||||
",
|
||||
expect!["Renaming aliases is currently unsupported"],
|
||||
);
|
||||
// FIXME: replace above check_prepare with this once we resolve to usages to extern crate
|
||||
// declarations
|
||||
// check(
|
||||
// "bar",
|
||||
// r"
|
||||
// //- /lib.rs crate:main deps:foo
|
||||
// extern crate foo as qux$0;
|
||||
// use qux as frob;
|
||||
// //- /foo.rs crate:foo
|
||||
// ",
|
||||
// r"
|
||||
// extern crate foo as bar;
|
||||
// use bar as frob;
|
||||
// ",
|
||||
// );
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extern_crate_self() {
|
||||
check_prepare(
|
||||
r"
|
||||
extern crate self$0;
|
||||
use self as qux;
|
||||
",
|
||||
expect!["No references found at position"],
|
||||
);
|
||||
// FIXME: replace above check_prepare with this once we resolve to usages to extern crate declarations
|
||||
// check(
|
||||
// "bar",
|
||||
// r"
|
||||
// extern crate self$0;
|
||||
// use self as qux;
|
||||
// ",
|
||||
// r"
|
||||
// extern crate self as bar;
|
||||
// use self as qux;
|
||||
// ",
|
||||
// );
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extern_crate_self_rename() {
|
||||
check_prepare(
|
||||
r"
|
||||
//- /lib.rs crate:main deps:foo
|
||||
extern crate self as qux$0;
|
||||
use qux as frob;
|
||||
//- /foo.rs crate:foo
|
||||
",
|
||||
expect!["Renaming aliases is currently unsupported"],
|
||||
);
|
||||
// FIXME: replace above check_prepare with this once we resolve to usages to extern crate declarations
|
||||
// check(
|
||||
// "bar",
|
||||
// r"
|
||||
// //- /lib.rs crate:main deps:foo
|
||||
// extern crate self as qux$0;
|
||||
// use qux as frob;
|
||||
// //- /foo.rs crate:foo
|
||||
// ",
|
||||
// r"
|
||||
// extern crate self as bar;
|
||||
// use bar as frob;
|
||||
// ",
|
||||
// );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -232,7 +232,7 @@ fn find_related_tests(
|
|||
for def in defs {
|
||||
let defs = def
|
||||
.usages(sema)
|
||||
.set_scope(search_scope.clone())
|
||||
.set_scope(search_scope.as_ref())
|
||||
.all()
|
||||
.references
|
||||
.into_values()
|
||||
|
@ -309,7 +309,7 @@ pub(crate) fn runnable_fn(
|
|||
) -> Option<Runnable> {
|
||||
let name = def.name(sema.db).to_smol_str();
|
||||
|
||||
let root = def.module(sema.db).krate().root_module(sema.db);
|
||||
let root = def.module(sema.db).krate().root_module();
|
||||
|
||||
let kind = if name == "main" && def.module(sema.db) == root {
|
||||
RunnableKind::Bin
|
||||
|
|
|
@ -126,14 +126,17 @@ mod tests {
|
|||
source_file_edits: {
|
||||
FileId(
|
||||
0,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "3",
|
||||
delete: 33..34,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "3",
|
||||
delete: 33..34,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
},
|
||||
file_system_edits: [],
|
||||
is_snippet: false,
|
||||
|
@ -163,24 +166,30 @@ mod tests {
|
|||
source_file_edits: {
|
||||
FileId(
|
||||
0,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "3",
|
||||
delete: 33..34,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "3",
|
||||
delete: 33..34,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
FileId(
|
||||
1,
|
||||
): TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "3",
|
||||
delete: 11..12,
|
||||
},
|
||||
],
|
||||
},
|
||||
): (
|
||||
TextEdit {
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "3",
|
||||
delete: 11..12,
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
),
|
||||
},
|
||||
file_system_edits: [],
|
||||
is_snippet: false,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue