2020-02-18 17:35:10 +00:00
|
|
|
//! See `Semantics`.
|
|
|
|
|
2020-02-29 17:32:18 +00:00
|
|
|
mod source_to_def;
|
|
|
|
|
2023-12-03 19:20:38 +00:00
|
|
|
use std::{
|
|
|
|
cell::RefCell,
|
|
|
|
fmt, iter, mem,
|
2023-12-08 14:26:38 +00:00
|
|
|
ops::{self, ControlFlow, Not},
|
2023-12-03 19:20:38 +00:00
|
|
|
};
|
2020-02-18 17:35:10 +00:00
|
|
|
|
2020-08-13 14:25:38 +00:00
|
|
|
use base_db::{FileId, FileRange};
|
2022-12-21 20:34:49 +00:00
|
|
|
use either::Either;
|
2020-02-18 17:35:10 +00:00
|
|
|
use hir_def::{
|
2023-04-06 17:36:25 +00:00
|
|
|
hir::Expr,
|
2023-04-17 15:31:39 +00:00
|
|
|
lower::LowerCtx,
|
2023-05-11 06:52:13 +00:00
|
|
|
nameres::MacroSubNs,
|
2020-08-15 16:50:41 +00:00
|
|
|
resolver::{self, HasResolver, Resolver, TypeNs},
|
2022-03-20 13:38:16 +00:00
|
|
|
type_ref::Mutability,
|
2023-12-08 15:36:41 +00:00
|
|
|
AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
|
2020-02-18 17:35:10 +00:00
|
|
|
};
|
2023-11-25 13:39:55 +00:00
|
|
|
use hir_expand::{
|
2023-12-11 19:26:50 +00:00
|
|
|
attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo,
|
|
|
|
InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
|
2023-11-25 13:39:55 +00:00
|
|
|
};
|
2020-04-06 14:58:16 +00:00
|
|
|
use itertools::Itertools;
|
2020-08-12 16:26:51 +00:00
|
|
|
use rustc_hash::{FxHashMap, FxHashSet};
|
2021-08-28 19:18:56 +00:00
|
|
|
use smallvec::{smallvec, SmallVec};
|
2023-11-24 15:38:48 +00:00
|
|
|
use stdx::TupleExt;
|
2020-12-13 21:13:16 +00:00
|
|
|
use syntax::{
|
2021-09-18 11:19:29 +00:00
|
|
|
algo::skip_trivia_token,
|
2023-12-11 19:26:50 +00:00
|
|
|
ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _},
|
2023-12-05 14:42:39 +00:00
|
|
|
match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
|
|
|
|
TextRange, TextSize,
|
2020-12-13 21:13:16 +00:00
|
|
|
};
|
2020-02-18 17:35:10 +00:00
|
|
|
|
|
|
|
use crate::{
|
|
|
|
db::HirDatabase,
|
2020-02-29 17:32:18 +00:00
|
|
|
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
|
2022-01-01 19:31:04 +00:00
|
|
|
source_analyzer::{resolve_hir_path, SourceAnalyzer},
|
2022-12-20 19:33:27 +00:00
|
|
|
Access, Adjust, Adjustment, AutoBorrow, BindingMode, BuiltinAttr, Callable, ConstParam, Crate,
|
2022-11-04 16:11:15 +00:00
|
|
|
DeriveHelper, Field, Function, HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local,
|
2023-12-13 03:22:42 +00:00
|
|
|
Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, Struct, ToolModule, Trait,
|
2024-01-06 14:04:58 +00:00
|
|
|
TupleField, Type, TypeAlias, TypeParam, VariantDef,
|
2020-02-18 17:35:10 +00:00
|
|
|
};
|
|
|
|
|
2023-12-03 19:20:38 +00:00
|
|
|
pub enum DescendPreference {
|
|
|
|
SameText,
|
|
|
|
SameKind,
|
|
|
|
None,
|
|
|
|
}
|
|
|
|
|
2023-12-05 14:42:39 +00:00
|
|
|
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
2020-03-05 10:08:31 +00:00
|
|
|
pub enum PathResolution {
|
|
|
|
/// An item
|
|
|
|
Def(ModuleDef),
|
|
|
|
/// A local binding (only value namespace)
|
|
|
|
Local(Local),
|
2021-06-08 14:50:10 +00:00
|
|
|
/// A type parameter
|
2020-03-05 10:08:31 +00:00
|
|
|
TypeParam(TypeParam),
|
2021-06-08 14:50:10 +00:00
|
|
|
/// A const parameter
|
2021-01-01 09:06:42 +00:00
|
|
|
ConstParam(ConstParam),
|
2020-12-17 11:36:15 +00:00
|
|
|
SelfType(Impl),
|
2021-12-03 15:32:14 +00:00
|
|
|
BuiltinAttr(BuiltinAttr),
|
2021-12-03 16:15:19 +00:00
|
|
|
ToolModule(ToolModule),
|
2022-07-24 12:05:37 +00:00
|
|
|
DeriveHelper(DeriveHelper),
|
2020-04-27 22:40:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl PathResolution {
|
2022-02-03 11:43:15 +00:00
|
|
|
pub(crate) fn in_type_ns(&self) -> Option<TypeNs> {
|
2020-04-27 22:40:32 +00:00
|
|
|
match self {
|
2020-04-29 22:05:03 +00:00
|
|
|
PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())),
|
2020-04-27 22:40:32 +00:00
|
|
|
PathResolution::Def(ModuleDef::BuiltinType(builtin)) => {
|
2021-02-11 18:52:33 +00:00
|
|
|
Some(TypeNs::BuiltinType((*builtin).into()))
|
2020-04-27 22:40:32 +00:00
|
|
|
}
|
2021-06-17 15:37:14 +00:00
|
|
|
PathResolution::Def(
|
|
|
|
ModuleDef::Const(_)
|
|
|
|
| ModuleDef::Variant(_)
|
2022-03-08 22:51:48 +00:00
|
|
|
| ModuleDef::Macro(_)
|
2021-06-17 15:37:14 +00:00
|
|
|
| ModuleDef::Function(_)
|
|
|
|
| ModuleDef::Module(_)
|
|
|
|
| ModuleDef::Static(_)
|
2023-03-03 15:24:07 +00:00
|
|
|
| ModuleDef::Trait(_)
|
|
|
|
| ModuleDef::TraitAlias(_),
|
2021-06-17 15:37:14 +00:00
|
|
|
) => None,
|
2020-04-27 22:40:32 +00:00
|
|
|
PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
|
2020-04-29 22:05:03 +00:00
|
|
|
Some(TypeNs::TypeAliasId((*alias).into()))
|
2020-04-27 22:40:32 +00:00
|
|
|
}
|
2021-12-03 15:32:14 +00:00
|
|
|
PathResolution::BuiltinAttr(_)
|
2021-12-03 16:15:19 +00:00
|
|
|
| PathResolution::ToolModule(_)
|
2021-12-03 15:32:14 +00:00
|
|
|
| PathResolution::Local(_)
|
2022-07-24 12:05:37 +00:00
|
|
|
| PathResolution::DeriveHelper(_)
|
2021-12-03 15:32:14 +00:00
|
|
|
| PathResolution::ConstParam(_) => None,
|
2020-04-29 22:05:03 +00:00
|
|
|
PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
|
|
|
|
PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
|
2020-04-27 22:40:32 +00:00
|
|
|
}
|
|
|
|
}
|
2020-03-05 10:08:31 +00:00
|
|
|
}
|
|
|
|
|
2021-08-02 18:42:25 +00:00
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct TypeInfo {
|
|
|
|
/// The original type of the expression or pattern.
|
2021-08-03 15:28:51 +00:00
|
|
|
pub original: Type,
|
|
|
|
/// The adjusted type, if an adjustment happened.
|
|
|
|
pub adjusted: Option<Type>,
|
2021-08-02 18:42:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl TypeInfo {
|
2021-08-03 15:28:51 +00:00
|
|
|
pub fn original(self) -> Type {
|
|
|
|
self.original
|
2021-08-02 18:42:25 +00:00
|
|
|
}
|
|
|
|
|
2021-08-03 15:28:51 +00:00
|
|
|
pub fn has_adjustment(&self) -> bool {
|
|
|
|
self.adjusted.is_some()
|
2021-08-02 18:42:25 +00:00
|
|
|
}
|
|
|
|
|
2021-08-03 15:28:51 +00:00
|
|
|
/// The adjusted type, or the original in case no adjustments occurred.
|
|
|
|
pub fn adjusted(self) -> Type {
|
|
|
|
self.adjusted.unwrap_or(self.original)
|
2021-08-02 18:42:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-18 17:35:10 +00:00
|
|
|
/// Primary API to get semantic information, like types, from syntax trees.
|
|
|
|
pub struct Semantics<'db, DB> {
|
|
|
|
pub db: &'db DB,
|
2020-07-01 11:32:18 +00:00
|
|
|
imp: SemanticsImpl<'db>,
|
2020-07-01 09:43:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub struct SemanticsImpl<'db> {
|
|
|
|
pub db: &'db dyn HirDatabase,
|
2020-02-29 17:32:18 +00:00
|
|
|
s2d_cache: RefCell<SourceToDefCache>,
|
2023-11-28 09:55:21 +00:00
|
|
|
/// Rootnode to HirFileId cache
|
2020-02-18 17:35:10 +00:00
|
|
|
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
|
2023-12-08 14:26:38 +00:00
|
|
|
// These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
|
|
|
|
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
|
2023-11-28 09:55:21 +00:00
|
|
|
/// MacroCall to its expansion's MacroFileId cache
|
|
|
|
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<DB> fmt::Debug for Semantics<'_, DB> {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
|
|
write!(f, "Semantics {{ ... }}")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
impl<'db, DB> ops::Deref for Semantics<'db, DB> {
|
|
|
|
type Target = SemanticsImpl<'db>;
|
|
|
|
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
|
|
&self.imp
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-18 17:35:10 +00:00
|
|
|
impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
2022-07-20 13:02:08 +00:00
|
|
|
pub fn new(db: &DB) -> Semantics<'_, DB> {
|
2020-07-01 09:43:36 +00:00
|
|
|
let impl_ = SemanticsImpl::new(db);
|
2020-07-01 11:32:18 +00:00
|
|
|
Semantics { db, imp: impl_ }
|
2020-07-01 09:43:36 +00:00
|
|
|
}
|
|
|
|
|
2021-08-20 11:49:28 +00:00
|
|
|
pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
|
2021-12-21 12:38:58 +00:00
|
|
|
self.imp.find_file(syntax_node).file_id
|
2021-08-20 11:49:28 +00:00
|
|
|
}
|
|
|
|
|
2021-01-30 15:19:21 +00:00
|
|
|
pub fn token_ancestors_with_macros(
|
|
|
|
&self,
|
|
|
|
token: SyntaxToken,
|
|
|
|
) -> impl Iterator<Item = SyntaxNode> + '_ {
|
|
|
|
token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
|
|
|
|
}
|
2021-09-13 23:59:45 +00:00
|
|
|
|
2021-08-22 12:31:37 +00:00
|
|
|
/// Find an AstNode by offset inside SyntaxNode, if it is inside *Macrofile*,
|
2020-07-01 09:43:36 +00:00
|
|
|
/// search up until it is of the target AstNode type
|
|
|
|
pub fn find_node_at_offset_with_macros<N: AstNode>(
|
|
|
|
&self,
|
|
|
|
node: &SyntaxNode,
|
|
|
|
offset: TextSize,
|
|
|
|
) -> Option<N> {
|
2020-07-01 11:32:18 +00:00
|
|
|
self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
|
2020-07-01 09:43:36 +00:00
|
|
|
}
|
|
|
|
|
2021-08-22 12:31:37 +00:00
|
|
|
/// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
|
2020-07-01 09:43:36 +00:00
|
|
|
/// descend it and find again
|
|
|
|
pub fn find_node_at_offset_with_descend<N: AstNode>(
|
|
|
|
&self,
|
|
|
|
node: &SyntaxNode,
|
|
|
|
offset: TextSize,
|
|
|
|
) -> Option<N> {
|
2021-08-28 22:36:26 +00:00
|
|
|
self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
|
2020-07-01 09:43:36 +00:00
|
|
|
}
|
|
|
|
|
2021-08-28 19:37:27 +00:00
|
|
|
/// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
|
|
|
|
/// descend it and find again
|
|
|
|
pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
|
|
|
|
&'slf self,
|
|
|
|
node: &SyntaxNode,
|
|
|
|
offset: TextSize,
|
|
|
|
) -> impl Iterator<Item = N> + 'slf {
|
2021-08-28 22:36:26 +00:00
|
|
|
self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
|
2021-08-28 19:37:27 +00:00
|
|
|
}
|
|
|
|
|
2022-08-05 12:16:36 +00:00
|
|
|
pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
|
|
|
|
self.imp.resolve_await_to_poll(await_expr).map(Function::from)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<Function> {
|
|
|
|
self.imp.resolve_prefix_expr(prefix_expr).map(Function::from)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<Function> {
|
|
|
|
self.imp.resolve_index_expr(index_expr).map(Function::from)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<Function> {
|
|
|
|
self.imp.resolve_bin_expr(bin_expr).map(Function::from)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<Function> {
|
|
|
|
self.imp.resolve_try_expr(try_expr).map(Function::from)
|
|
|
|
}
|
|
|
|
|
2020-07-30 14:21:30 +00:00
|
|
|
pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
|
2020-07-10 12:11:31 +00:00
|
|
|
self.imp.resolve_variant(record_lit).map(VariantDef::from)
|
2020-07-01 09:43:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn to_module_def(&self, file: FileId) -> Option<Module> {
|
2021-03-15 13:51:20 +00:00
|
|
|
self.imp.to_module_def(file).next()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
|
2020-07-01 11:32:18 +00:00
|
|
|
self.imp.to_module_def(file)
|
2020-07-01 09:43:36 +00:00
|
|
|
}
|
2023-12-13 03:22:42 +00:00
|
|
|
|
|
|
|
pub fn to_struct_def(&self, s: &ast::Struct) -> Option<Struct> {
|
|
|
|
self.imp.to_def(s).map(Struct::from)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn to_impl_def(&self, i: &ast::Impl) -> Option<Impl> {
|
|
|
|
self.imp.to_def(i).map(Impl::from)
|
|
|
|
}
|
2020-07-01 09:43:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'db> SemanticsImpl<'db> {
|
2020-07-11 10:31:50 +00:00
|
|
|
fn new(db: &'db dyn HirDatabase) -> Self {
|
2020-07-24 12:12:13 +00:00
|
|
|
SemanticsImpl {
|
|
|
|
db,
|
|
|
|
s2d_cache: Default::default(),
|
|
|
|
cache: Default::default(),
|
|
|
|
expansion_info_cache: Default::default(),
|
2021-11-05 13:25:47 +00:00
|
|
|
macro_call_cache: Default::default(),
|
2020-07-24 12:12:13 +00:00
|
|
|
}
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
|
2020-02-18 17:35:10 +00:00
|
|
|
let tree = self.db.parse(file_id).tree();
|
|
|
|
self.cache(tree.syntax().clone(), file_id.into());
|
|
|
|
tree
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
|
2023-04-16 17:20:48 +00:00
|
|
|
let node = self.db.parse_or_expand(file_id);
|
2021-11-27 11:49:51 +00:00
|
|
|
self.cache(node.clone(), file_id);
|
2023-04-16 17:20:48 +00:00
|
|
|
node
|
2021-11-27 11:49:51 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
|
2022-03-31 09:12:08 +00:00
|
|
|
let sa = self.analyze_no_infer(macro_call.syntax())?;
|
2020-12-11 13:50:47 +00:00
|
|
|
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
|
2023-11-28 09:55:21 +00:00
|
|
|
let node = self.parse_or_expand(file_id.into());
|
2020-02-18 17:35:10 +00:00
|
|
|
Some(node)
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
/// If `item` has an attribute macro attached to it, expands it.
|
|
|
|
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
|
2022-01-31 12:56:14 +00:00
|
|
|
let src = self.wrap_node_infile(item.clone());
|
2021-06-07 14:05:36 +00:00
|
|
|
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
|
2023-04-16 17:20:48 +00:00
|
|
|
Some(self.parse_or_expand(macro_call_id.as_file()))
|
2022-03-10 19:53:50 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
|
2022-03-10 19:53:50 +00:00
|
|
|
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
|
2023-03-08 12:51:14 +00:00
|
|
|
let src = self.wrap_node_infile(attr.clone());
|
2022-03-10 19:53:50 +00:00
|
|
|
let call_id = self.with_ctx(|ctx| {
|
|
|
|
ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
|
|
|
|
})?;
|
2023-04-16 17:20:48 +00:00
|
|
|
Some(self.parse_or_expand(call_id.as_file()))
|
2021-06-07 14:05:36 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
|
2022-03-08 22:51:48 +00:00
|
|
|
let calls = self.derive_macro_calls(attr)?;
|
|
|
|
self.with_ctx(|ctx| {
|
|
|
|
Some(
|
|
|
|
calls
|
|
|
|
.into_iter()
|
|
|
|
.map(|call| {
|
|
|
|
macro_call_to_macro_id(ctx, self.db.upcast(), call?).map(|id| Macro { id })
|
|
|
|
})
|
|
|
|
.collect(),
|
|
|
|
)
|
|
|
|
})
|
2021-12-28 13:47:21 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<SyntaxNode>> {
|
2021-12-28 13:51:17 +00:00
|
|
|
let res: Vec<_> = self
|
|
|
|
.derive_macro_calls(attr)?
|
2022-01-01 19:31:04 +00:00
|
|
|
.into_iter()
|
|
|
|
.flat_map(|call| {
|
|
|
|
let file_id = call?.as_file();
|
2023-04-16 17:20:48 +00:00
|
|
|
let node = self.db.parse_or_expand(file_id);
|
2021-12-28 13:50:13 +00:00
|
|
|
self.cache(node.clone(), file_id);
|
|
|
|
Some(node)
|
|
|
|
})
|
|
|
|
.collect();
|
2021-12-28 13:51:17 +00:00
|
|
|
Some(res)
|
2021-12-28 13:50:13 +00:00
|
|
|
}
|
|
|
|
|
2022-01-01 19:31:04 +00:00
|
|
|
fn derive_macro_calls(&self, attr: &ast::Attr) -> Option<Vec<Option<MacroCallId>>> {
|
2022-01-02 15:58:21 +00:00
|
|
|
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
|
|
|
|
let file_id = self.find_file(adt.syntax()).file_id;
|
|
|
|
let adt = InFile::new(file_id, &adt);
|
2021-12-21 12:38:58 +00:00
|
|
|
let src = InFile::new(file_id, attr.clone());
|
2021-08-26 01:32:34 +00:00
|
|
|
self.with_ctx(|ctx| {
|
2022-02-21 01:42:58 +00:00
|
|
|
let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?;
|
2021-12-28 13:50:13 +00:00
|
|
|
Some(res.to_vec())
|
2021-08-26 01:32:34 +00:00
|
|
|
})
|
2021-08-24 14:33:52 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn is_derive_annotated(&self, adt: &ast::Adt) -> bool {
|
2022-02-21 12:21:25 +00:00
|
|
|
let file_id = self.find_file(adt.syntax()).file_id;
|
|
|
|
let adt = InFile::new(file_id, adt);
|
|
|
|
self.with_ctx(|ctx| ctx.has_derives(adt))
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
|
2021-12-21 12:38:58 +00:00
|
|
|
let file_id = self.find_file(item.syntax()).file_id;
|
|
|
|
let src = InFile::new(file_id, item.clone());
|
2021-06-07 17:32:28 +00:00
|
|
|
self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some())
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
/// Expand the macro call with a different token tree, mapping the `token_to_map` down into the
|
|
|
|
/// expansion. `token_to_map` should be a token from the `speculative args` node.
|
|
|
|
pub fn speculative_expand(
|
2020-03-07 14:27:03 +00:00
|
|
|
&self,
|
|
|
|
actual_macro_call: &ast::MacroCall,
|
2021-05-24 19:21:25 +00:00
|
|
|
speculative_args: &ast::TokenTree,
|
2020-03-07 14:27:03 +00:00
|
|
|
token_to_map: SyntaxToken,
|
|
|
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
2021-12-21 12:38:58 +00:00
|
|
|
let SourceAnalyzer { file_id, resolver, .. } =
|
2022-03-31 09:12:08 +00:00
|
|
|
self.analyze_no_infer(actual_macro_call.syntax())?;
|
2021-12-21 12:38:58 +00:00
|
|
|
let macro_call = InFile::new(file_id, actual_macro_call);
|
2022-03-31 09:12:08 +00:00
|
|
|
let krate = resolver.krate();
|
2020-07-01 09:43:36 +00:00
|
|
|
let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
|
2023-12-22 11:46:22 +00:00
|
|
|
resolver.resolve_path_as_macro_def(self.db.upcast(), &path, Some(MacroSubNs::Bang))
|
2020-07-01 09:43:36 +00:00
|
|
|
})?;
|
2021-05-24 19:21:25 +00:00
|
|
|
hir_expand::db::expand_speculative(
|
2020-07-01 09:43:36 +00:00
|
|
|
self.db.upcast(),
|
|
|
|
macro_call_id,
|
2021-09-02 16:54:09 +00:00
|
|
|
speculative_args.syntax(),
|
|
|
|
token_to_map,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
/// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
|
|
|
|
/// expansion. `token_to_map` should be a token from the `speculative args` node.
|
|
|
|
pub fn speculative_expand_attr_macro(
|
2021-09-02 16:54:09 +00:00
|
|
|
&self,
|
|
|
|
actual_macro_call: &ast::Item,
|
|
|
|
speculative_args: &ast::Item,
|
|
|
|
token_to_map: SyntaxToken,
|
|
|
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
2022-01-31 12:56:14 +00:00
|
|
|
let macro_call = self.wrap_node_infile(actual_macro_call.clone());
|
2021-09-02 16:54:09 +00:00
|
|
|
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call))?;
|
|
|
|
hir_expand::db::expand_speculative(
|
|
|
|
self.db.upcast(),
|
|
|
|
macro_call_id,
|
|
|
|
speculative_args.syntax(),
|
2020-07-01 09:43:36 +00:00
|
|
|
token_to_map,
|
|
|
|
)
|
2020-03-07 14:27:03 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn speculative_expand_derive_as_pseudo_attr_macro(
|
2022-03-10 19:53:50 +00:00
|
|
|
&self,
|
|
|
|
actual_macro_call: &ast::Attr,
|
|
|
|
speculative_args: &ast::Attr,
|
|
|
|
token_to_map: SyntaxToken,
|
|
|
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
|
|
|
let attr = self.wrap_node_infile(actual_macro_call.clone());
|
|
|
|
let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
|
|
|
|
let macro_call_id = self.with_ctx(|ctx| {
|
|
|
|
ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it)
|
|
|
|
})?;
|
|
|
|
hir_expand::db::expand_speculative(
|
|
|
|
self.db.upcast(),
|
|
|
|
macro_call_id,
|
|
|
|
speculative_args.syntax(),
|
|
|
|
token_to_map,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2023-12-05 15:30:57 +00:00
|
|
|
pub fn as_format_args_parts(
|
2023-12-05 14:42:39 +00:00
|
|
|
&self,
|
2023-12-05 15:30:57 +00:00
|
|
|
string: &ast::String,
|
|
|
|
) -> Option<Vec<(TextRange, Option<PathResolution>)>> {
|
|
|
|
if let Some(quote) = string.open_quote_text_range() {
|
|
|
|
return self
|
|
|
|
.descend_into_macros(DescendPreference::SameText, string.syntax().clone())
|
|
|
|
.into_iter()
|
|
|
|
.find_map(|token| {
|
|
|
|
let string = ast::String::cast(token)?;
|
|
|
|
let literal =
|
|
|
|
string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
|
|
|
|
let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
|
|
|
|
let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
|
|
|
|
let format_args = self.wrap_node_infile(format_args);
|
|
|
|
let res = source_analyzer
|
|
|
|
.as_format_args_parts(self.db, format_args.as_ref())?
|
|
|
|
.map(|(range, res)| (range + quote.end(), res))
|
|
|
|
.collect();
|
|
|
|
Some(res)
|
|
|
|
});
|
|
|
|
}
|
|
|
|
None
|
2023-12-05 14:42:39 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn check_for_format_args_template(
|
|
|
|
&self,
|
|
|
|
original_token: SyntaxToken,
|
|
|
|
offset: TextSize,
|
|
|
|
) -> Option<(TextRange, Option<PathResolution>)> {
|
|
|
|
if let Some(original_string) = ast::String::cast(original_token.clone()) {
|
|
|
|
if let Some(quote) = original_string.open_quote_text_range() {
|
|
|
|
return self
|
2024-01-06 23:17:48 +00:00
|
|
|
.descend_into_macros(DescendPreference::SameText, original_token)
|
2023-12-05 14:42:39 +00:00
|
|
|
.into_iter()
|
|
|
|
.find_map(|token| {
|
|
|
|
self.resolve_offset_in_format_args(
|
|
|
|
ast::String::cast(token)?,
|
|
|
|
offset - quote.end(),
|
|
|
|
)
|
|
|
|
})
|
|
|
|
.map(|(range, res)| (range + quote.end(), res));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
2023-12-05 15:30:57 +00:00
|
|
|
fn resolve_offset_in_format_args(
|
|
|
|
&self,
|
|
|
|
string: ast::String,
|
|
|
|
offset: TextSize,
|
|
|
|
) -> Option<(TextRange, Option<PathResolution>)> {
|
|
|
|
debug_assert!(offset <= string.syntax().text_range().len());
|
|
|
|
let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
|
|
|
|
let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
|
|
|
|
let source_analyzer = &self.analyze_no_infer(format_args.syntax())?;
|
|
|
|
let format_args = self.wrap_node_infile(format_args);
|
|
|
|
source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset)
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
/// Maps a node down by mapping its first and last token down.
|
|
|
|
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
|
|
|
|
// This might not be the correct way to do this, but it works for now
|
2021-09-18 11:19:29 +00:00
|
|
|
let mut res = smallvec![];
|
|
|
|
let tokens = (|| {
|
2023-12-03 19:20:38 +00:00
|
|
|
// FIXME: the trivia skipping should not be necessary
|
2021-09-18 11:19:29 +00:00
|
|
|
let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
|
|
|
|
let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
|
|
|
|
Some((first, last))
|
|
|
|
})();
|
|
|
|
let (first, last) = match tokens {
|
|
|
|
Some(it) => it,
|
|
|
|
None => return res,
|
|
|
|
};
|
|
|
|
|
|
|
|
if first == last {
|
2023-12-03 19:20:38 +00:00
|
|
|
// node is just the token, so descend the token
|
2023-12-05 14:42:39 +00:00
|
|
|
self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
|
|
|
|
if let Some(node) = value
|
|
|
|
.parent_ancestors()
|
|
|
|
.take_while(|it| it.text_range() == value.text_range())
|
|
|
|
.find_map(N::cast)
|
|
|
|
{
|
2022-04-15 17:42:48 +00:00
|
|
|
res.push(node)
|
|
|
|
}
|
2023-12-03 19:20:38 +00:00
|
|
|
ControlFlow::Continue(())
|
2022-04-15 17:42:48 +00:00
|
|
|
});
|
2021-09-18 11:19:29 +00:00
|
|
|
} else {
|
|
|
|
// Descend first and last token, then zip them to look for the node they belong to
|
|
|
|
let mut scratch: SmallVec<[_; 1]> = smallvec![];
|
2023-12-05 14:42:39 +00:00
|
|
|
self.descend_into_macros_impl(first, &mut |token| {
|
2022-04-15 17:42:48 +00:00
|
|
|
scratch.push(token);
|
2023-12-03 19:20:38 +00:00
|
|
|
ControlFlow::Continue(())
|
2022-04-15 17:42:48 +00:00
|
|
|
});
|
2021-09-18 11:19:29 +00:00
|
|
|
|
|
|
|
let mut scratch = scratch.into_iter();
|
2021-11-05 15:02:56 +00:00
|
|
|
self.descend_into_macros_impl(
|
|
|
|
last,
|
2021-12-20 12:19:48 +00:00
|
|
|
&mut |InFile { value: last, file_id: last_fid }| {
|
2021-11-05 15:02:56 +00:00
|
|
|
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
|
|
|
|
if first_fid == last_fid {
|
|
|
|
if let Some(p) = first.parent() {
|
|
|
|
let range = first.text_range().cover(last.text_range());
|
|
|
|
let node = find_root(&p)
|
|
|
|
.covering_element(range)
|
|
|
|
.ancestors()
|
|
|
|
.take_while(|it| it.text_range() == range)
|
|
|
|
.find_map(N::cast);
|
|
|
|
if let Some(node) = node {
|
|
|
|
res.push(node);
|
|
|
|
}
|
2021-09-18 11:19:29 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-12-03 19:20:38 +00:00
|
|
|
ControlFlow::Continue(())
|
2021-11-05 15:02:56 +00:00
|
|
|
},
|
|
|
|
);
|
2021-09-18 11:19:29 +00:00
|
|
|
}
|
|
|
|
res
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
/// Descend the token into its macro call if it is part of one, returning the tokens in the
|
2023-12-19 07:30:48 +00:00
|
|
|
/// expansion that it is associated with.
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn descend_into_macros(
|
2023-08-16 08:07:18 +00:00
|
|
|
&self,
|
2023-12-03 19:20:38 +00:00
|
|
|
mode: DescendPreference,
|
2023-08-16 08:07:18 +00:00
|
|
|
token: SyntaxToken,
|
|
|
|
) -> SmallVec<[SyntaxToken; 1]> {
|
2023-12-03 19:20:38 +00:00
|
|
|
enum Dp<'t> {
|
|
|
|
SameText(&'t str),
|
|
|
|
SameKind(SyntaxKind),
|
|
|
|
None,
|
|
|
|
}
|
|
|
|
let fetch_kind = |token: &SyntaxToken| match token.parent() {
|
|
|
|
Some(node) => match node.kind() {
|
|
|
|
kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
|
|
|
|
_ => token.kind(),
|
|
|
|
},
|
|
|
|
None => token.kind(),
|
|
|
|
};
|
|
|
|
let mode = match mode {
|
|
|
|
DescendPreference::SameText => Dp::SameText(token.text()),
|
|
|
|
DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
|
|
|
|
DescendPreference::None => Dp::None,
|
|
|
|
};
|
2022-04-15 17:42:48 +00:00
|
|
|
let mut res = smallvec![];
|
2023-12-05 14:42:39 +00:00
|
|
|
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
|
2023-12-03 19:20:38 +00:00
|
|
|
let is_a_match = match mode {
|
|
|
|
Dp::SameText(text) => value.text() == text,
|
|
|
|
Dp::SameKind(preferred_kind) => {
|
|
|
|
let kind = fetch_kind(&value);
|
|
|
|
kind == preferred_kind
|
|
|
|
// special case for derive macros
|
|
|
|
|| (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
|
|
|
|
}
|
|
|
|
Dp::None => true,
|
|
|
|
};
|
|
|
|
if is_a_match {
|
2022-04-15 17:42:48 +00:00
|
|
|
res.push(value);
|
|
|
|
}
|
2023-12-03 19:20:38 +00:00
|
|
|
ControlFlow::Continue(())
|
2022-04-15 17:42:48 +00:00
|
|
|
});
|
|
|
|
if res.is_empty() {
|
|
|
|
res.push(token);
|
|
|
|
}
|
2021-09-18 11:19:29 +00:00
|
|
|
res
|
|
|
|
}
|
|
|
|
|
2023-12-03 19:20:38 +00:00
|
|
|
pub fn descend_into_macros_single(
|
2023-08-16 08:07:18 +00:00
|
|
|
&self,
|
2023-12-03 19:20:38 +00:00
|
|
|
mode: DescendPreference,
|
2023-08-16 08:07:18 +00:00
|
|
|
token: SyntaxToken,
|
|
|
|
) -> SyntaxToken {
|
2023-12-03 19:20:38 +00:00
|
|
|
enum Dp<'t> {
|
|
|
|
SameText(&'t str),
|
|
|
|
SameKind(SyntaxKind),
|
|
|
|
None,
|
|
|
|
}
|
2022-07-22 15:29:03 +00:00
|
|
|
let fetch_kind = |token: &SyntaxToken| match token.parent() {
|
|
|
|
Some(node) => match node.kind() {
|
2023-12-03 19:20:38 +00:00
|
|
|
kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
|
2022-07-22 15:29:03 +00:00
|
|
|
_ => token.kind(),
|
|
|
|
},
|
|
|
|
None => token.kind(),
|
|
|
|
};
|
2023-12-03 19:20:38 +00:00
|
|
|
let mode = match mode {
|
|
|
|
DescendPreference::SameText => Dp::SameText(token.text()),
|
|
|
|
DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
|
|
|
|
DescendPreference::None => Dp::None,
|
|
|
|
};
|
|
|
|
let mut res = token.clone();
|
2023-12-05 14:42:39 +00:00
|
|
|
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
|
2023-12-03 19:20:38 +00:00
|
|
|
let is_a_match = match mode {
|
|
|
|
Dp::SameText(text) => value.text() == text,
|
|
|
|
Dp::SameKind(preferred_kind) => {
|
|
|
|
let kind = fetch_kind(&value);
|
|
|
|
kind == preferred_kind
|
|
|
|
// special case for derive macros
|
|
|
|
|| (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
|
2022-07-22 15:29:03 +00:00
|
|
|
}
|
2023-12-03 19:20:38 +00:00
|
|
|
Dp::None => true,
|
|
|
|
};
|
2023-12-05 16:04:59 +00:00
|
|
|
res = value;
|
2023-12-03 19:20:38 +00:00
|
|
|
if is_a_match {
|
|
|
|
ControlFlow::Break(())
|
|
|
|
} else {
|
|
|
|
ControlFlow::Continue(())
|
2022-07-22 15:29:03 +00:00
|
|
|
}
|
|
|
|
});
|
2021-11-05 15:02:56 +00:00
|
|
|
res
|
|
|
|
}
|
|
|
|
|
|
|
|
fn descend_into_macros_impl(
|
|
|
|
&self,
|
|
|
|
token: SyntaxToken,
|
2023-12-03 19:20:38 +00:00
|
|
|
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
|
2021-11-05 15:02:56 +00:00
|
|
|
) {
|
2020-08-12 14:32:36 +00:00
|
|
|
let _p = profile::span("descend_into_macros");
|
2023-11-28 09:55:21 +00:00
|
|
|
let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
|
2021-01-30 15:19:21 +00:00
|
|
|
Some(it) => it,
|
2021-09-18 11:19:29 +00:00
|
|
|
None => return,
|
2021-01-30 15:19:21 +00:00
|
|
|
};
|
2023-11-28 09:55:21 +00:00
|
|
|
|
2023-12-08 14:26:38 +00:00
|
|
|
let span = match sa.file_id.file_id() {
|
|
|
|
Some(file_id) => self.db.real_span_map(file_id).span_for_range(token.text_range()),
|
|
|
|
None => {
|
|
|
|
stdx::never!();
|
|
|
|
return;
|
2023-11-24 15:38:48 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2023-12-08 14:26:38 +00:00
|
|
|
let mut cache = self.expansion_info_cache.borrow_mut();
|
|
|
|
let mut mcache = self.macro_call_cache.borrow_mut();
|
2023-11-28 09:55:21 +00:00
|
|
|
let def_map = sa.resolver.def_map();
|
2021-09-27 16:44:40 +00:00
|
|
|
|
2023-12-08 14:26:38 +00:00
|
|
|
let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
|
2023-12-03 19:20:38 +00:00
|
|
|
let expansion_info = cache
|
|
|
|
.entry(macro_file)
|
|
|
|
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
|
2021-11-05 14:52:10 +00:00
|
|
|
|
2023-12-03 19:20:38 +00:00
|
|
|
{
|
2023-12-08 14:26:38 +00:00
|
|
|
let InMacroFile { file_id, value } = expansion_info.expanded();
|
|
|
|
self.cache(value, file_id.into());
|
2023-12-03 19:20:38 +00:00
|
|
|
}
|
2021-11-05 14:52:10 +00:00
|
|
|
|
2023-12-08 14:26:38 +00:00
|
|
|
let InMacroFile { file_id, value: mapped_tokens } =
|
|
|
|
expansion_info.map_range_down(span)?;
|
|
|
|
let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect();
|
2022-04-15 17:42:48 +00:00
|
|
|
|
2023-12-03 19:20:38 +00:00
|
|
|
// if the length changed we have found a mapping for the token
|
2023-12-08 14:26:38 +00:00
|
|
|
let res = mapped_tokens.is_empty().not().then_some(());
|
|
|
|
// requeue the tokens we got from mapping our current token down
|
|
|
|
stack.push((HirFileId::from(file_id), mapped_tokens));
|
|
|
|
res
|
2023-12-03 19:20:38 +00:00
|
|
|
};
|
2021-09-27 16:44:40 +00:00
|
|
|
|
2023-12-08 14:26:38 +00:00
|
|
|
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(sa.file_id, smallvec![token])];
|
|
|
|
|
|
|
|
while let Some((file_id, mut tokens)) = stack.pop() {
|
|
|
|
while let Some(token) = tokens.pop() {
|
|
|
|
let was_not_remapped = (|| {
|
|
|
|
// First expand into attribute invocations
|
|
|
|
let containing_attribute_macro_call = self.with_ctx(|ctx| {
|
|
|
|
token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
|
|
|
|
if item.attrs().next().is_none() {
|
|
|
|
// Don't force populate the dyn cache for items that don't have an attribute anyways
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
Some((
|
|
|
|
ctx.item_to_macro_call(InFile::new(file_id, item.clone()))?,
|
|
|
|
item,
|
|
|
|
))
|
|
|
|
})
|
|
|
|
});
|
|
|
|
if let Some((call_id, item)) = containing_attribute_macro_call {
|
|
|
|
let file_id = call_id.as_macro_file();
|
|
|
|
let attr_id = match self.db.lookup_intern_macro_call(call_id).kind {
|
|
|
|
hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => {
|
|
|
|
invoc_attr_index.ast_index()
|
|
|
|
}
|
|
|
|
_ => 0,
|
|
|
|
};
|
2023-12-11 19:26:50 +00:00
|
|
|
// FIXME: here, the attribute's text range is used to strip away all
|
2023-12-29 03:50:24 +00:00
|
|
|
// entries from the start of the attribute "list" up the invoking
|
2023-12-11 19:26:50 +00:00
|
|
|
// attribute. But in
|
|
|
|
// ```
|
|
|
|
// mod foo {
|
|
|
|
// #![inner]
|
|
|
|
// }
|
|
|
|
// ```
|
|
|
|
// we don't wanna strip away stuff in the `mod foo {` range, that is
|
|
|
|
// here if the id corresponds to an inner attribute we got strip all
|
|
|
|
// text ranges of the outer ones, and then all of the inner ones up
|
|
|
|
// to the invoking attribute so that the inbetween is ignored.
|
2023-12-08 14:26:38 +00:00
|
|
|
let text_range = item.syntax().text_range();
|
2023-12-11 19:26:50 +00:00
|
|
|
let start = collect_attrs(&item)
|
2023-12-08 14:26:38 +00:00
|
|
|
.nth(attr_id)
|
2023-12-11 19:26:50 +00:00
|
|
|
.map(|attr| match attr.1 {
|
2023-12-08 14:26:38 +00:00
|
|
|
Either::Left(it) => it.syntax().text_range().start(),
|
|
|
|
Either::Right(it) => it.syntax().text_range().start(),
|
|
|
|
})
|
|
|
|
.unwrap_or_else(|| text_range.start());
|
|
|
|
let text_range = TextRange::new(start, text_range.end());
|
|
|
|
// remove any other token in this macro input, all their mappings are the
|
|
|
|
// same as this one
|
|
|
|
tokens.retain(|t| !text_range.contains_range(t.text_range()));
|
|
|
|
return process_expansion_for_token(&mut stack, file_id);
|
|
|
|
}
|
2021-09-13 23:20:43 +00:00
|
|
|
|
2023-12-08 14:26:38 +00:00
|
|
|
// Then check for token trees, that means we are either in a function-like macro or
|
|
|
|
// secondary attribute inputs
|
|
|
|
let tt = token.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
|
|
|
|
let parent = tt.syntax().parent()?;
|
2022-02-21 01:42:58 +00:00
|
|
|
|
2023-12-08 14:26:38 +00:00
|
|
|
if tt.left_delimiter_token().map_or(false, |it| it == token) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
if tt.right_delimiter_token().map_or(false, |it| it == token) {
|
|
|
|
return None;
|
|
|
|
}
|
2021-09-27 16:44:40 +00:00
|
|
|
|
2023-12-08 14:26:38 +00:00
|
|
|
if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
|
|
|
|
let mcall: hir_expand::files::InFileWrapper<HirFileId, ast::MacroCall> =
|
|
|
|
InFile::new(file_id, macro_call);
|
|
|
|
let file_id = match mcache.get(&mcall) {
|
|
|
|
Some(&it) => it,
|
|
|
|
None => {
|
|
|
|
let it = sa.expand(self.db, mcall.as_ref())?;
|
|
|
|
mcache.insert(mcall, it);
|
|
|
|
it
|
2022-07-24 10:04:15 +00:00
|
|
|
}
|
2023-12-08 14:26:38 +00:00
|
|
|
};
|
|
|
|
let text_range = tt.syntax().text_range();
|
|
|
|
// remove any other token in this macro input, all their mappings are the
|
|
|
|
// same as this one
|
|
|
|
tokens.retain(|t| !text_range.contains_range(t.text_range()));
|
|
|
|
process_expansion_for_token(&mut stack, file_id)
|
|
|
|
} else if let Some(meta) = ast::Meta::cast(parent) {
|
|
|
|
// attribute we failed expansion for earlier, this might be a derive invocation
|
|
|
|
// or derive helper attribute
|
|
|
|
let attr = meta.parent_attr()?;
|
|
|
|
|
|
|
|
let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast)
|
2022-07-24 10:04:15 +00:00
|
|
|
{
|
2023-12-08 14:26:38 +00:00
|
|
|
// this might be a derive, or a derive helper on an ADT
|
|
|
|
let derive_call = self.with_ctx(|ctx| {
|
|
|
|
// so try downmapping the token into the pseudo derive expansion
|
|
|
|
// see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
|
|
|
|
ctx.attr_to_derive_macro_call(
|
|
|
|
InFile::new(file_id, &adt),
|
|
|
|
InFile::new(file_id, attr.clone()),
|
|
|
|
)
|
|
|
|
.map(|(_, call_id, _)| call_id)
|
|
|
|
});
|
|
|
|
|
|
|
|
match derive_call {
|
|
|
|
Some(call_id) => {
|
|
|
|
// resolved to a derive
|
|
|
|
let file_id = call_id.as_macro_file();
|
|
|
|
let text_range = attr.syntax().text_range();
|
|
|
|
// remove any other token in this macro input, all their mappings are the
|
|
|
|
// same as this one
|
|
|
|
tokens.retain(|t| !text_range.contains_range(t.text_range()));
|
|
|
|
return process_expansion_for_token(&mut stack, file_id);
|
|
|
|
}
|
|
|
|
None => Some(adt),
|
|
|
|
}
|
2022-07-24 10:04:15 +00:00
|
|
|
} else {
|
2023-12-08 14:26:38 +00:00
|
|
|
// Otherwise this could be a derive helper on a variant or field
|
|
|
|
if let Some(field) =
|
|
|
|
attr.syntax().parent().and_then(ast::RecordField::cast)
|
|
|
|
{
|
|
|
|
field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
|
|
|
|
} else if let Some(field) =
|
|
|
|
attr.syntax().parent().and_then(ast::TupleField::cast)
|
|
|
|
{
|
|
|
|
field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
|
|
|
|
} else if let Some(variant) =
|
|
|
|
attr.syntax().parent().and_then(ast::Variant::cast)
|
|
|
|
{
|
|
|
|
variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}?;
|
|
|
|
if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(file_id, &adt))) {
|
|
|
|
return None;
|
2022-07-24 10:04:15 +00:00
|
|
|
}
|
2023-12-08 14:26:38 +00:00
|
|
|
// Not an attribute, nor a derive, so it's either a builtin or a derive helper
|
|
|
|
// Try to resolve to a derive helper and downmap
|
|
|
|
let attr_name =
|
|
|
|
attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
|
|
|
|
let id = self.db.ast_id_map(file_id).ast_id(&adt);
|
|
|
|
let helpers = def_map.derive_helpers_in_scope(InFile::new(file_id, id))?;
|
|
|
|
let mut res = None;
|
|
|
|
for (.., derive) in
|
|
|
|
helpers.iter().filter(|(helper, ..)| *helper == attr_name)
|
|
|
|
{
|
|
|
|
res = res.or(process_expansion_for_token(
|
|
|
|
&mut stack,
|
|
|
|
derive.as_macro_file(),
|
|
|
|
));
|
|
|
|
}
|
|
|
|
res
|
|
|
|
} else {
|
|
|
|
None
|
2022-07-24 10:04:15 +00:00
|
|
|
}
|
2023-12-08 14:26:38 +00:00
|
|
|
})()
|
|
|
|
.is_none();
|
2021-09-13 23:20:43 +00:00
|
|
|
|
2023-12-08 14:26:38 +00:00
|
|
|
if was_not_remapped && f(InFile::new(file_id, token)).is_break() {
|
|
|
|
break;
|
|
|
|
}
|
2021-01-30 15:19:21 +00:00
|
|
|
}
|
2021-08-28 19:18:56 +00:00
|
|
|
}
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2021-08-28 22:45:55 +00:00
|
|
|
// Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop
|
|
|
|
// traversing the inner iterator when it finds a node.
|
2021-09-14 12:09:52 +00:00
|
|
|
// The outer iterator is over the tokens descendants
|
|
|
|
// The inner iterator is the ancestors of a descendant
|
2020-07-11 10:31:50 +00:00
|
|
|
fn descend_node_at_offset(
|
2020-03-22 11:52:14 +00:00
|
|
|
&self,
|
|
|
|
node: &SyntaxNode,
|
2020-04-24 21:40:41 +00:00
|
|
|
offset: TextSize,
|
2021-08-28 22:36:26 +00:00
|
|
|
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
|
2020-03-22 11:52:14 +00:00
|
|
|
node.token_at_offset(offset)
|
2023-12-05 14:42:39 +00:00
|
|
|
.map(move |token| self.descend_into_macros(DescendPreference::None, token))
|
2021-09-14 12:09:52 +00:00
|
|
|
.map(|descendants| {
|
|
|
|
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
|
|
|
|
})
|
|
|
|
// re-order the tokens from token_at_offset by returning the ancestors with the smaller first nodes first
|
|
|
|
// See algo::ancestors_at_offset, which uses the same approach
|
|
|
|
.kmerge_by(|left, right| {
|
|
|
|
left.clone()
|
|
|
|
.map(|node| node.text_range().len())
|
|
|
|
.lt(right.clone().map(|node| node.text_range().len()))
|
|
|
|
})
|
2020-03-22 11:52:14 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
/// Attempts to map the node out of macro expanded files returning the original file range.
|
|
|
|
/// If upmapping is not possible, this will fall back to the range of the macro call of the
|
|
|
|
/// macro file the node resides in.
|
|
|
|
pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
|
2021-12-21 12:38:58 +00:00
|
|
|
let node = self.find_file(node);
|
|
|
|
node.original_file_range(self.db.upcast())
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
/// Attempts to map the node out of macro expanded files returning the original file range.
|
|
|
|
pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
|
2021-12-21 12:38:58 +00:00
|
|
|
let node = self.find_file(node);
|
|
|
|
node.original_file_range_opt(self.db.upcast())
|
2023-11-24 15:38:48 +00:00
|
|
|
.filter(|(_, ctx)| ctx.is_root())
|
|
|
|
.map(TupleExt::head)
|
2021-09-18 11:19:29 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
/// Attempts to map the node out of macro expanded files.
|
|
|
|
/// This only work for attribute expansions, as other ones do not have nodes as input.
|
|
|
|
pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
|
2023-12-19 07:30:48 +00:00
|
|
|
self.wrap_node_infile(node).original_ast_node_rooted(self.db.upcast()).map(
|
2023-11-25 13:39:55 +00:00
|
|
|
|InRealFile { file_id, value }| {
|
|
|
|
self.cache(find_root(value.syntax()), file_id.into());
|
2022-07-27 10:45:29 +00:00
|
|
|
value
|
|
|
|
},
|
|
|
|
)
|
2021-11-03 20:12:36 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
/// Attempts to map the node out of macro expanded files.
|
|
|
|
/// This only work for attribute expansions, as other ones do not have nodes as input.
|
|
|
|
pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
|
2022-10-01 13:34:45 +00:00
|
|
|
let InFile { file_id, .. } = self.find_file(node);
|
|
|
|
InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map(
|
2023-11-25 13:39:55 +00:00
|
|
|
|InRealFile { file_id, value }| {
|
|
|
|
self.cache(find_root(&value), file_id.into());
|
2022-10-01 13:34:45 +00:00
|
|
|
value
|
|
|
|
},
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
|
2023-04-16 17:20:48 +00:00
|
|
|
let root = self.parse_or_expand(src.file_id);
|
2022-01-14 10:07:53 +00:00
|
|
|
let node = src.map(|it| it.to_node(&root));
|
|
|
|
node.as_ref().original_file_range(self.db.upcast())
|
2020-04-17 11:06:02 +00:00
|
|
|
}
|
|
|
|
|
2021-01-30 15:19:21 +00:00
|
|
|
fn token_ancestors_with_macros(
|
|
|
|
&self,
|
|
|
|
token: SyntaxToken,
|
2021-09-14 12:09:52 +00:00
|
|
|
) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
|
2021-01-30 15:19:21 +00:00
|
|
|
token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
/// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
|
|
|
|
pub fn ancestors_with_macros(
|
2021-09-14 12:09:52 +00:00
|
|
|
&self,
|
|
|
|
node: SyntaxNode,
|
|
|
|
) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
|
2021-12-21 12:38:58 +00:00
|
|
|
let node = self.find_file(&node);
|
2022-01-14 10:07:53 +00:00
|
|
|
let db = self.db.upcast();
|
|
|
|
iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| {
|
|
|
|
match value.parent() {
|
|
|
|
Some(parent) => Some(InFile::new(file_id, parent)),
|
|
|
|
None => {
|
|
|
|
self.cache(value.clone(), file_id);
|
2023-12-06 13:36:39 +00:00
|
|
|
Some(file_id.macro_file()?.call_node(db))
|
2022-01-14 10:07:53 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.map(|it| it.value)
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn ancestors_at_offset_with_macros(
|
2020-03-07 14:27:03 +00:00
|
|
|
&self,
|
|
|
|
node: &SyntaxNode,
|
2020-04-24 21:40:41 +00:00
|
|
|
offset: TextSize,
|
2020-03-07 14:27:03 +00:00
|
|
|
) -> impl Iterator<Item = SyntaxNode> + '_ {
|
|
|
|
node.token_at_offset(offset)
|
2021-01-30 15:19:21 +00:00
|
|
|
.map(|token| self.token_ancestors_with_macros(token))
|
2020-03-07 14:27:03 +00:00
|
|
|
.kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
|
2020-12-15 18:23:51 +00:00
|
|
|
let text = lifetime.text();
|
|
|
|
let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
|
2021-09-27 10:54:24 +00:00
|
|
|
let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
|
2020-12-13 21:13:16 +00:00
|
|
|
gpl.lifetime_params()
|
2021-03-26 17:30:59 +00:00
|
|
|
.find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
|
2020-12-13 21:13:16 +00:00
|
|
|
})?;
|
2022-01-31 12:56:14 +00:00
|
|
|
let src = self.wrap_node_infile(lifetime_param);
|
2020-12-13 21:13:16 +00:00
|
|
|
ToDef::to_def(self, src)
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
|
2020-12-23 15:34:30 +00:00
|
|
|
let text = lifetime.text();
|
|
|
|
let label = lifetime.syntax().ancestors().find_map(|syn| {
|
|
|
|
let label = match_ast! {
|
|
|
|
match syn {
|
|
|
|
ast::ForExpr(it) => it.label(),
|
|
|
|
ast::WhileExpr(it) => it.label(),
|
|
|
|
ast::LoopExpr(it) => it.label(),
|
2021-09-26 09:12:57 +00:00
|
|
|
ast::BlockExpr(it) => it.label(),
|
2020-12-23 15:34:30 +00:00
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
};
|
|
|
|
label.filter(|l| {
|
|
|
|
l.lifetime()
|
|
|
|
.and_then(|lt| lt.lifetime_ident_token())
|
|
|
|
.map_or(false, |lt| lt.text() == text)
|
|
|
|
})
|
|
|
|
})?;
|
2022-01-31 12:56:14 +00:00
|
|
|
let src = self.wrap_node_infile(label);
|
2020-12-23 15:34:30 +00:00
|
|
|
ToDef::to_def(self, src)
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
|
2022-03-31 09:12:08 +00:00
|
|
|
let analyze = self.analyze(ty.syntax())?;
|
2023-04-17 15:31:39 +00:00
|
|
|
let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id);
|
2023-12-14 13:11:12 +00:00
|
|
|
let ty = hir_ty::TyLoweringContext::new_maybe_unowned(
|
2023-06-05 11:27:19 +00:00
|
|
|
self.db,
|
|
|
|
&analyze.resolver,
|
2023-12-14 13:11:12 +00:00
|
|
|
analyze.resolver.type_owner(),
|
2023-06-05 11:27:19 +00:00
|
|
|
)
|
|
|
|
.lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
|
2022-03-31 09:12:08 +00:00
|
|
|
Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
|
2021-05-06 15:05:49 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
|
2022-07-28 08:05:21 +00:00
|
|
|
let analyze = self.analyze(path.syntax())?;
|
2023-11-25 14:37:40 +00:00
|
|
|
let span_map = self.db.span_map(analyze.file_id);
|
|
|
|
let ctx = LowerCtx::with_span_map(self.db.upcast(), span_map);
|
2023-11-25 16:10:18 +00:00
|
|
|
let hir_path = Path::from_src(&ctx, path.clone())?;
|
2023-03-08 17:28:52 +00:00
|
|
|
match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? {
|
2022-07-28 08:05:21 +00:00
|
|
|
TypeNs::TraitId(id) => Some(Trait { id }),
|
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment>> {
|
2022-11-04 16:11:15 +00:00
|
|
|
let mutability = |m| match m {
|
|
|
|
hir_ty::Mutability::Not => Mutability::Shared,
|
|
|
|
hir_ty::Mutability::Mut => Mutability::Mut,
|
|
|
|
};
|
2022-12-20 19:33:27 +00:00
|
|
|
|
|
|
|
let analyzer = self.analyze(expr.syntax())?;
|
|
|
|
|
|
|
|
let (mut source_ty, _) = analyzer.type_of_expr(self.db, expr)?;
|
|
|
|
|
|
|
|
analyzer.expr_adjustments(self.db, expr).map(|it| {
|
2022-11-04 16:11:15 +00:00
|
|
|
it.iter()
|
2022-12-20 19:33:27 +00:00
|
|
|
.map(|adjust| {
|
|
|
|
let target =
|
|
|
|
Type::new_with_resolver(self.db, &analyzer.resolver, adjust.target.clone());
|
|
|
|
let kind = match adjust.kind {
|
|
|
|
hir_ty::Adjust::NeverToAny => Adjust::NeverToAny,
|
|
|
|
hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => {
|
2023-03-04 20:08:04 +00:00
|
|
|
// FIXME: Should we handle unknown mutability better?
|
|
|
|
Adjust::Deref(Some(OverloadedDeref(
|
|
|
|
m.map(mutability).unwrap_or(Mutability::Shared),
|
|
|
|
)))
|
2022-12-20 19:33:27 +00:00
|
|
|
}
|
|
|
|
hir_ty::Adjust::Deref(None) => Adjust::Deref(None),
|
|
|
|
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
|
|
|
|
Adjust::Borrow(AutoBorrow::RawPtr(mutability(m)))
|
|
|
|
}
|
|
|
|
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => {
|
|
|
|
Adjust::Borrow(AutoBorrow::Ref(mutability(m)))
|
|
|
|
}
|
|
|
|
hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc),
|
|
|
|
};
|
|
|
|
|
|
|
|
// Update `source_ty` for the next adjustment
|
|
|
|
let source = mem::replace(&mut source_ty, target.clone());
|
|
|
|
|
|
|
|
let adjustment = Adjustment { source, target, kind };
|
|
|
|
|
|
|
|
adjustment
|
2022-11-04 16:11:15 +00:00
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
})
|
2022-03-20 13:38:16 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
|
2022-03-31 09:12:08 +00:00
|
|
|
self.analyze(expr.syntax())?
|
2021-08-02 18:42:25 +00:00
|
|
|
.type_of_expr(self.db, expr)
|
2021-08-03 15:28:51 +00:00
|
|
|
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
|
2022-03-31 09:12:08 +00:00
|
|
|
self.analyze(pat.syntax())?
|
2021-08-02 18:42:25 +00:00
|
|
|
.type_of_pat(self.db, pat)
|
2021-08-03 15:28:51 +00:00
|
|
|
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
|
2021-07-10 17:03:46 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
/// It also includes the changes that binding mode makes in the type. For example in
|
|
|
|
/// `let ref x @ Some(_) = None` the result of `type_of_pat` is `Option<T>` but the result
|
|
|
|
/// of this function is `&mut Option<T>`
|
|
|
|
pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type> {
|
2023-05-04 12:33:36 +00:00
|
|
|
self.analyze(pat.syntax())?.type_of_binding_in_pat(self.db, pat)
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
|
2022-03-31 09:12:08 +00:00
|
|
|
self.analyze(param.syntax())?.type_of_self(self.db, param)
|
2020-07-10 12:08:35 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
|
2022-05-14 12:26:08 +00:00
|
|
|
self.analyze(pat.syntax())
|
|
|
|
.and_then(|it| it.pattern_adjustments(self.db, pat))
|
|
|
|
.unwrap_or_default()
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
|
2022-05-14 12:26:08 +00:00
|
|
|
self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
|
|
|
|
}
|
|
|
|
|
2023-12-08 15:36:41 +00:00
|
|
|
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
|
2022-06-25 09:33:27 +00:00
|
|
|
self.analyze(call.syntax())?.resolve_method_call(self.db, call)
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2023-12-08 15:36:41 +00:00
|
|
|
/// Attempts to resolve this call expression as a method call falling back to resolving it as a field.
|
|
|
|
pub fn resolve_method_call_fallback(
|
2023-03-04 19:33:28 +00:00
|
|
|
&self,
|
|
|
|
call: &ast::MethodCallExpr,
|
2023-12-08 15:36:41 +00:00
|
|
|
) -> Option<Either<Function, Field>> {
|
2023-03-04 19:33:28 +00:00
|
|
|
self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
|
|
|
|
}
|
|
|
|
|
2022-08-05 12:28:36 +00:00
|
|
|
fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<FunctionId> {
|
2022-08-05 12:16:36 +00:00
|
|
|
self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr)
|
|
|
|
}
|
|
|
|
|
2022-08-05 12:28:36 +00:00
|
|
|
fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<FunctionId> {
|
2022-08-05 12:16:36 +00:00
|
|
|
self.analyze(prefix_expr.syntax())?.resolve_prefix_expr(self.db, prefix_expr)
|
|
|
|
}
|
|
|
|
|
2022-08-05 12:28:36 +00:00
|
|
|
fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<FunctionId> {
|
2022-08-05 12:16:36 +00:00
|
|
|
self.analyze(index_expr.syntax())?.resolve_index_expr(self.db, index_expr)
|
|
|
|
}
|
|
|
|
|
2022-08-05 12:28:36 +00:00
|
|
|
fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<FunctionId> {
|
2022-08-05 12:16:36 +00:00
|
|
|
self.analyze(bin_expr.syntax())?.resolve_bin_expr(self.db, bin_expr)
|
|
|
|
}
|
|
|
|
|
2022-08-05 12:28:36 +00:00
|
|
|
fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<FunctionId> {
|
2022-08-05 12:16:36 +00:00
|
|
|
self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr)
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
|
2022-06-25 09:33:27 +00:00
|
|
|
self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
|
2020-07-16 11:00:56 +00:00
|
|
|
}
|
|
|
|
|
2024-01-06 14:04:58 +00:00
|
|
|
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Either<Field, TupleField>> {
|
2022-03-31 09:12:08 +00:00
|
|
|
self.analyze(field.syntax())?.resolve_field(self.db, field)
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2023-12-08 15:36:41 +00:00
|
|
|
pub fn resolve_field_fallback(
|
|
|
|
&self,
|
|
|
|
field: &ast::FieldExpr,
|
2024-01-06 14:04:58 +00:00
|
|
|
) -> Option<Either<Either<Field, TupleField>, Function>> {
|
2023-12-08 15:36:41 +00:00
|
|
|
self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn resolve_record_field(
|
2021-05-23 21:54:35 +00:00
|
|
|
&self,
|
|
|
|
field: &ast::RecordExprField,
|
|
|
|
) -> Option<(Field, Option<Local>, Type)> {
|
2022-03-31 09:12:08 +00:00
|
|
|
self.analyze(field.syntax())?.resolve_record_field(self.db, field)
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> {
|
2022-03-31 09:12:08 +00:00
|
|
|
self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
|
2020-04-18 20:05:06 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
|
2022-03-31 09:12:08 +00:00
|
|
|
let sa = self.analyze(macro_call.syntax())?;
|
2021-12-21 12:38:58 +00:00
|
|
|
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
|
2020-02-18 17:35:10 +00:00
|
|
|
sa.resolve_macro_call(self.db, macro_call)
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
|
2022-03-31 09:12:08 +00:00
|
|
|
let sa = match self.analyze(macro_call.syntax()) {
|
|
|
|
Some(it) => it,
|
|
|
|
None => return false,
|
|
|
|
};
|
2022-03-20 18:07:44 +00:00
|
|
|
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
|
|
|
|
sa.is_unsafe_macro_call(self.db, macro_call)
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
|
2022-01-31 12:56:14 +00:00
|
|
|
let item_in_file = self.wrap_node_infile(item.clone());
|
2022-03-08 22:51:48 +00:00
|
|
|
let id = self.with_ctx(|ctx| {
|
|
|
|
let macro_call_id = ctx.item_to_macro_call(item_in_file)?;
|
|
|
|
macro_call_to_macro_id(ctx, self.db.upcast(), macro_call_id)
|
|
|
|
})?;
|
|
|
|
Some(Macro { id })
|
2021-08-21 21:24:12 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
|
2022-03-31 09:12:08 +00:00
|
|
|
self.analyze(path.syntax())?.resolve_path(self.db, path)
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2020-07-30 14:21:30 +00:00
|
|
|
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
|
2022-03-31 09:12:08 +00:00
|
|
|
self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
|
2020-06-09 21:11:16 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
|
2022-03-31 09:12:08 +00:00
|
|
|
self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
|
2020-02-28 15:36:14 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
|
2020-04-07 15:09:02 +00:00
|
|
|
self.analyze(literal.syntax())
|
2022-03-31 09:12:08 +00:00
|
|
|
.and_then(|it| it.record_literal_missing_fields(self.db, literal))
|
2020-04-07 15:09:02 +00:00
|
|
|
.unwrap_or_default()
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
|
2020-04-07 15:09:02 +00:00
|
|
|
self.analyze(pattern.syntax())
|
2022-03-31 09:12:08 +00:00
|
|
|
.and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
|
2020-04-07 15:09:02 +00:00
|
|
|
.unwrap_or_default()
|
|
|
|
}
|
|
|
|
|
2022-07-20 13:02:08 +00:00
|
|
|
fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
|
2020-02-29 17:32:18 +00:00
|
|
|
let mut cache = self.s2d_cache.borrow_mut();
|
2023-11-28 09:55:21 +00:00
|
|
|
let mut ctx = SourceToDefCtx { db: self.db, dynmap_cache: &mut cache };
|
2020-02-29 17:32:18 +00:00
|
|
|
f(&mut ctx)
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
|
2022-12-21 20:34:49 +00:00
|
|
|
let src = self.find_file(src.syntax()).with_value(src).cloned();
|
2022-12-30 08:05:03 +00:00
|
|
|
T::to_def(self, src)
|
2022-12-21 20:34:49 +00:00
|
|
|
}
|
|
|
|
|
2021-03-15 13:51:20 +00:00
|
|
|
fn to_module_def(&self, file: FileId) -> impl Iterator<Item = Module> {
|
|
|
|
self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from)
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
|
2022-03-31 09:12:08 +00:00
|
|
|
self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
|
|
|
|
db: self.db,
|
|
|
|
file_id,
|
|
|
|
resolver,
|
|
|
|
})
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn scope_at_offset(
|
|
|
|
&self,
|
|
|
|
node: &SyntaxNode,
|
|
|
|
offset: TextSize,
|
|
|
|
) -> Option<SemanticsScope<'db>> {
|
2022-03-31 09:12:08 +00:00
|
|
|
self.analyze_with_offset_no_infer(node, offset).map(
|
|
|
|
|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
|
|
|
|
db: self.db,
|
|
|
|
file_id,
|
|
|
|
resolver,
|
|
|
|
},
|
|
|
|
)
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
/// Search for a definition's source and cache its syntax tree
|
|
|
|
pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
|
2021-10-07 12:46:49 +00:00
|
|
|
where
|
|
|
|
Def::Ast: AstNode,
|
|
|
|
{
|
|
|
|
let res = def.source(self.db)?;
|
|
|
|
self.cache(find_root(res.value.syntax()), res.file_id);
|
|
|
|
Some(res)
|
|
|
|
}
|
|
|
|
|
2022-03-31 09:12:08 +00:00
|
|
|
/// Returns none if the file of the node is not part of a crate.
|
|
|
|
fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
|
2021-12-20 12:19:48 +00:00
|
|
|
self.analyze_impl(node, None, true)
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
2021-11-10 16:33:35 +00:00
|
|
|
|
2022-03-31 09:12:08 +00:00
|
|
|
/// Returns none if the file of the node is not part of a crate.
|
|
|
|
fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
|
2021-12-20 12:19:48 +00:00
|
|
|
self.analyze_impl(node, None, false)
|
|
|
|
}
|
|
|
|
|
2022-03-31 09:12:08 +00:00
|
|
|
fn analyze_with_offset_no_infer(
|
|
|
|
&self,
|
|
|
|
node: &SyntaxNode,
|
|
|
|
offset: TextSize,
|
|
|
|
) -> Option<SourceAnalyzer> {
|
2021-12-21 12:38:58 +00:00
|
|
|
self.analyze_impl(node, Some(offset), false)
|
|
|
|
}
|
|
|
|
|
2021-12-20 12:19:48 +00:00
|
|
|
fn analyze_impl(
|
|
|
|
&self,
|
|
|
|
node: &SyntaxNode,
|
|
|
|
offset: Option<TextSize>,
|
|
|
|
infer_body: bool,
|
2022-03-31 09:12:08 +00:00
|
|
|
) -> Option<SourceAnalyzer> {
|
2020-12-11 13:50:47 +00:00
|
|
|
let _p = profile::span("Semantics::analyze_impl");
|
2021-12-21 12:38:58 +00:00
|
|
|
let node = self.find_file(node);
|
2020-02-18 17:35:10 +00:00
|
|
|
|
2023-01-30 10:44:51 +00:00
|
|
|
let container = self.with_ctx(|ctx| ctx.find_container(node))?;
|
2020-02-18 17:35:10 +00:00
|
|
|
|
|
|
|
let resolver = match container {
|
|
|
|
ChildContainer::DefWithBodyId(def) => {
|
2022-03-31 09:12:08 +00:00
|
|
|
return Some(if infer_body {
|
2021-12-20 12:19:48 +00:00
|
|
|
SourceAnalyzer::new_for_body(self.db, def, node, offset)
|
|
|
|
} else {
|
|
|
|
SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
|
2022-03-31 09:12:08 +00:00
|
|
|
})
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
2020-07-01 09:43:36 +00:00
|
|
|
ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
|
2023-03-03 15:24:07 +00:00
|
|
|
ChildContainer::TraitAliasId(it) => it.resolver(self.db.upcast()),
|
2020-07-01 09:43:36 +00:00
|
|
|
ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
|
|
|
|
ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()),
|
|
|
|
ChildContainer::EnumId(it) => it.resolver(self.db.upcast()),
|
|
|
|
ChildContainer::VariantId(it) => it.resolver(self.db.upcast()),
|
2020-07-11 10:45:30 +00:00
|
|
|
ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()),
|
2020-07-01 09:43:36 +00:00
|
|
|
ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()),
|
2020-02-18 17:35:10 +00:00
|
|
|
};
|
2022-03-31 09:12:08 +00:00
|
|
|
Some(SourceAnalyzer::new_for_resolver(resolver, node))
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2020-07-11 10:31:50 +00:00
|
|
|
fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
|
2020-02-18 17:35:10 +00:00
|
|
|
assert!(root_node.parent().is_none());
|
|
|
|
let mut cache = self.cache.borrow_mut();
|
|
|
|
let prev = cache.insert(root_node, file_id);
|
|
|
|
assert!(prev == None || prev == Some(file_id))
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn assert_contains_node(&self, node: &SyntaxNode) {
|
2021-12-21 12:38:58 +00:00
|
|
|
self.find_file(node);
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
|
|
|
|
let cache = self.cache.borrow();
|
|
|
|
cache.get(root_node).copied()
|
|
|
|
}
|
|
|
|
|
2022-01-31 12:56:14 +00:00
|
|
|
fn wrap_node_infile<N: AstNode>(&self, node: N) -> InFile<N> {
|
|
|
|
let InFile { file_id, .. } = self.find_file(node.syntax());
|
|
|
|
InFile::new(file_id, node)
|
|
|
|
}
|
|
|
|
|
2022-03-31 09:12:08 +00:00
|
|
|
/// Wraps the node in a [`InFile`] with the file id it belongs to.
|
2021-12-21 12:38:58 +00:00
|
|
|
fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
|
|
|
|
let root_node = find_root(node);
|
2020-02-18 17:35:10 +00:00
|
|
|
let file_id = self.lookup(&root_node).unwrap_or_else(|| {
|
|
|
|
panic!(
|
|
|
|
"\n\nFailed to lookup {:?} in this Semantics.\n\
|
|
|
|
Make sure to use only query nodes, derived from this instance of Semantics.\n\
|
|
|
|
root node: {:?}\n\
|
|
|
|
known nodes: {}\n\n",
|
|
|
|
node,
|
|
|
|
root_node,
|
|
|
|
self.cache
|
|
|
|
.borrow()
|
|
|
|
.keys()
|
2022-12-23 18:42:58 +00:00
|
|
|
.map(|it| format!("{it:?}"))
|
2020-02-18 17:35:10 +00:00
|
|
|
.collect::<Vec<_>>()
|
|
|
|
.join(", ")
|
|
|
|
)
|
|
|
|
});
|
|
|
|
InFile::new(file_id, node)
|
|
|
|
}
|
2020-07-30 13:26:40 +00:00
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
|
2020-07-30 13:26:40 +00:00
|
|
|
method_call_expr
|
2020-08-21 17:12:38 +00:00
|
|
|
.receiver()
|
2020-07-30 13:26:40 +00:00
|
|
|
.and_then(|expr| {
|
2020-08-19 11:46:34 +00:00
|
|
|
let field_expr = match expr {
|
|
|
|
ast::Expr::FieldExpr(field_expr) => field_expr,
|
|
|
|
_ => return None,
|
2020-07-30 13:26:40 +00:00
|
|
|
};
|
2021-08-03 15:28:51 +00:00
|
|
|
let ty = self.type_of_expr(&field_expr.expr()?)?.original;
|
2020-07-30 13:26:40 +00:00
|
|
|
if !ty.is_packed(self.db) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2023-12-08 15:36:41 +00:00
|
|
|
let func = self.resolve_method_call(method_call_expr)?;
|
2020-08-19 13:16:24 +00:00
|
|
|
let res = match func.self_param(self.db)?.access(self.db) {
|
|
|
|
Access::Shared | Access::Exclusive => true,
|
|
|
|
Access::Owned => false,
|
|
|
|
};
|
|
|
|
Some(res)
|
2020-07-30 13:26:40 +00:00
|
|
|
})
|
|
|
|
.unwrap_or(false)
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
|
2020-07-30 13:26:40 +00:00
|
|
|
ref_expr
|
|
|
|
.expr()
|
|
|
|
.and_then(|expr| {
|
|
|
|
let field_expr = match expr {
|
|
|
|
ast::Expr::FieldExpr(field_expr) => field_expr,
|
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
let expr = field_expr.expr()?;
|
|
|
|
self.type_of_expr(&expr)
|
|
|
|
})
|
|
|
|
// Binding a reference to a packed type is possibly unsafe.
|
2021-08-03 15:28:51 +00:00
|
|
|
.map(|ty| ty.original.is_packed(self.db))
|
2020-07-30 13:26:40 +00:00
|
|
|
.unwrap_or(false)
|
|
|
|
|
|
|
|
// FIXME This needs layout computation to be correct. It will highlight
|
|
|
|
// more than it should with the current implementation.
|
|
|
|
}
|
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
|
2021-09-03 14:00:50 +00:00
|
|
|
if ident_pat.ref_token().is_none() {
|
2020-08-07 14:40:09 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
ident_pat
|
2020-07-30 13:26:40 +00:00
|
|
|
.syntax()
|
|
|
|
.parent()
|
|
|
|
.and_then(|parent| {
|
2020-08-07 14:40:09 +00:00
|
|
|
// `IdentPat` can live under `RecordPat` directly under `RecordPatField` or
|
|
|
|
// `RecordPatFieldList`. `RecordPatField` also lives under `RecordPatFieldList`,
|
|
|
|
// so this tries to lookup the `IdentPat` anywhere along that structure to the
|
2020-07-30 13:26:40 +00:00
|
|
|
// `RecordPat` so we can get the containing type.
|
2020-08-07 14:40:09 +00:00
|
|
|
let record_pat = ast::RecordPatField::cast(parent.clone())
|
2020-07-30 13:26:40 +00:00
|
|
|
.and_then(|record_pat| record_pat.syntax().parent())
|
|
|
|
.or_else(|| Some(parent.clone()))
|
|
|
|
.and_then(|parent| {
|
2020-08-07 14:40:09 +00:00
|
|
|
ast::RecordPatFieldList::cast(parent)?
|
2020-07-30 13:26:40 +00:00
|
|
|
.syntax()
|
|
|
|
.parent()
|
|
|
|
.and_then(ast::RecordPat::cast)
|
|
|
|
});
|
|
|
|
|
|
|
|
// If this doesn't match a `RecordPat`, fallback to a `LetStmt` to see if
|
|
|
|
// this is initialized from a `FieldExpr`.
|
|
|
|
if let Some(record_pat) = record_pat {
|
|
|
|
self.type_of_pat(&ast::Pat::RecordPat(record_pat))
|
|
|
|
} else if let Some(let_stmt) = ast::LetStmt::cast(parent) {
|
|
|
|
let field_expr = match let_stmt.initializer()? {
|
|
|
|
ast::Expr::FieldExpr(field_expr) => field_expr,
|
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
|
|
|
|
self.type_of_expr(&field_expr.expr()?)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
})
|
|
|
|
// Binding a reference to a packed type is possibly unsafe.
|
2021-08-03 15:28:51 +00:00
|
|
|
.map(|ty| ty.original.is_packed(self.db))
|
2020-07-30 13:26:40 +00:00
|
|
|
.unwrap_or(false)
|
|
|
|
}
|
2022-12-21 20:34:49 +00:00
|
|
|
|
2023-09-13 22:02:28 +00:00
|
|
|
/// Returns `true` if the `node` is inside an `unsafe` context.
|
|
|
|
pub fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
|
2023-07-03 18:34:09 +00:00
|
|
|
let Some(enclosing_item) =
|
|
|
|
expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast)
|
|
|
|
else {
|
|
|
|
return false;
|
|
|
|
};
|
2022-12-21 20:34:49 +00:00
|
|
|
|
|
|
|
let def = match &enclosing_item {
|
2022-12-21 22:52:52 +00:00
|
|
|
Either::Left(ast::Item::Fn(it)) if it.unsafe_token().is_some() => return true,
|
2022-12-21 20:34:49 +00:00
|
|
|
Either::Left(ast::Item::Fn(it)) => {
|
|
|
|
self.to_def(it).map(<_>::into).map(DefWithBodyId::FunctionId)
|
|
|
|
}
|
|
|
|
Either::Left(ast::Item::Const(it)) => {
|
|
|
|
self.to_def(it).map(<_>::into).map(DefWithBodyId::ConstId)
|
|
|
|
}
|
|
|
|
Either::Left(ast::Item::Static(it)) => {
|
|
|
|
self.to_def(it).map(<_>::into).map(DefWithBodyId::StaticId)
|
|
|
|
}
|
|
|
|
Either::Left(_) => None,
|
|
|
|
Either::Right(it) => self.to_def(it).map(<_>::into).map(DefWithBodyId::VariantId),
|
|
|
|
};
|
|
|
|
let Some(def) = def else { return false };
|
|
|
|
let enclosing_node = enclosing_item.as_ref().either(|i| i.syntax(), |v| v.syntax());
|
|
|
|
|
|
|
|
let (body, source_map) = self.db.body_with_source_map(def);
|
|
|
|
|
2022-12-21 22:52:52 +00:00
|
|
|
let file_id = self.find_file(expr.syntax()).file_id;
|
2022-12-21 20:34:49 +00:00
|
|
|
|
2022-12-21 22:52:52 +00:00
|
|
|
let Some(mut parent) = expr.syntax().parent() else { return false };
|
2022-12-21 20:34:49 +00:00
|
|
|
loop {
|
|
|
|
if &parent == enclosing_node {
|
|
|
|
break false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(parent) = ast::Expr::cast(parent.clone()) {
|
|
|
|
if let Some(expr_id) = source_map.node_expr(InFile { file_id, value: &parent }) {
|
|
|
|
if let Expr::Unsafe { .. } = body[expr_id] {
|
|
|
|
break true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let Some(parent_) = parent.parent() else { break false };
|
|
|
|
parent = parent_;
|
|
|
|
}
|
|
|
|
}
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2022-03-08 22:51:48 +00:00
|
|
|
fn macro_call_to_macro_id(
|
2022-07-20 13:02:08 +00:00
|
|
|
ctx: &mut SourceToDefCtx<'_, '_>,
|
2023-03-13 15:33:52 +00:00
|
|
|
db: &dyn ExpandDatabase,
|
2022-03-08 22:51:48 +00:00
|
|
|
macro_call_id: MacroCallId,
|
|
|
|
) -> Option<MacroId> {
|
|
|
|
let loc = db.lookup_intern_macro_call(macro_call_id);
|
|
|
|
match loc.def.kind {
|
|
|
|
hir_expand::MacroDefKind::Declarative(it)
|
|
|
|
| hir_expand::MacroDefKind::BuiltIn(_, it)
|
|
|
|
| hir_expand::MacroDefKind::BuiltInAttr(_, it)
|
|
|
|
| hir_expand::MacroDefKind::BuiltInDerive(_, it)
|
|
|
|
| hir_expand::MacroDefKind::BuiltInEager(_, it) => {
|
|
|
|
ctx.macro_to_def(InFile::new(it.file_id, it.to_node(db)))
|
|
|
|
}
|
|
|
|
hir_expand::MacroDefKind::ProcMacro(_, _, it) => {
|
|
|
|
ctx.proc_macro_to_def(InFile::new(it.file_id, it.to_node(db)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-29 17:32:18 +00:00
|
|
|
pub trait ToDef: AstNode + Clone {
|
2020-02-26 12:22:46 +00:00
|
|
|
type Def;
|
2020-02-29 17:32:18 +00:00
|
|
|
|
2022-07-20 13:02:08 +00:00
|
|
|
fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def>;
|
2020-02-26 12:22:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
macro_rules! to_def_impls {
|
2020-02-29 17:32:18 +00:00
|
|
|
($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
|
2020-02-26 12:22:46 +00:00
|
|
|
impl ToDef for $ast {
|
|
|
|
type Def = $def;
|
2022-07-20 13:02:08 +00:00
|
|
|
fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def> {
|
2020-02-29 17:32:18 +00:00
|
|
|
sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
|
2020-02-26 12:22:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
)*}
|
|
|
|
}
|
|
|
|
|
|
|
|
to_def_impls![
|
2020-02-29 17:32:18 +00:00
|
|
|
(crate::Module, ast::Module, module_to_def),
|
2021-03-16 17:57:47 +00:00
|
|
|
(crate::Module, ast::SourceFile, source_file_to_def),
|
2020-07-30 15:50:40 +00:00
|
|
|
(crate::Struct, ast::Struct, struct_to_def),
|
2020-07-30 15:52:53 +00:00
|
|
|
(crate::Enum, ast::Enum, enum_to_def),
|
2020-07-30 15:36:46 +00:00
|
|
|
(crate::Union, ast::Union, union_to_def),
|
2020-07-30 16:17:28 +00:00
|
|
|
(crate::Trait, ast::Trait, trait_to_def),
|
2023-03-03 15:24:07 +00:00
|
|
|
(crate::TraitAlias, ast::TraitAlias, trait_alias_to_def),
|
2020-12-17 11:36:15 +00:00
|
|
|
(crate::Impl, ast::Impl, impl_to_def),
|
2020-07-30 13:25:46 +00:00
|
|
|
(crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
|
2020-07-30 16:02:20 +00:00
|
|
|
(crate::Const, ast::Const, const_to_def),
|
|
|
|
(crate::Static, ast::Static, static_to_def),
|
2020-07-30 12:51:08 +00:00
|
|
|
(crate::Function, ast::Fn, fn_to_def),
|
2020-07-30 14:49:13 +00:00
|
|
|
(crate::Field, ast::RecordField, record_field_to_def),
|
|
|
|
(crate::Field, ast::TupleField, tuple_field_to_def),
|
2020-12-20 07:05:24 +00:00
|
|
|
(crate::Variant, ast::Variant, enum_variant_to_def),
|
2020-02-29 17:32:18 +00:00
|
|
|
(crate::TypeParam, ast::TypeParam, type_param_to_def),
|
2020-12-13 21:13:16 +00:00
|
|
|
(crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def),
|
2021-01-01 09:06:42 +00:00
|
|
|
(crate::ConstParam, ast::ConstParam, const_param_to_def),
|
2021-12-20 12:47:06 +00:00
|
|
|
(crate::GenericParam, ast::GenericParam, generic_param_to_def),
|
2022-03-08 22:51:48 +00:00
|
|
|
(crate::Macro, ast::Macro, macro_to_def),
|
2020-07-31 18:09:09 +00:00
|
|
|
(crate::Local, ast::IdentPat, bind_pat_to_def),
|
2021-01-15 17:57:32 +00:00
|
|
|
(crate::Local, ast::SelfParam, self_param_to_def),
|
2020-12-23 15:34:30 +00:00
|
|
|
(crate::Label, ast::Label, label_to_def),
|
2021-08-14 15:42:06 +00:00
|
|
|
(crate::Adt, ast::Adt, adt_to_def),
|
2023-08-02 09:52:55 +00:00
|
|
|
(crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
|
2020-02-26 12:22:46 +00:00
|
|
|
];
|
|
|
|
|
2020-02-18 17:35:10 +00:00
|
|
|
fn find_root(node: &SyntaxNode) -> SyntaxNode {
|
|
|
|
node.ancestors().last().unwrap()
|
|
|
|
}
|
|
|
|
|
2023-01-30 10:44:51 +00:00
|
|
|
/// `SemanticsScope` encapsulates the notion of a scope (the set of visible
|
2020-09-21 10:30:55 +00:00
|
|
|
/// names) at a particular program point.
|
|
|
|
///
|
|
|
|
/// It is a bit tricky, as scopes do not really exist inside the compiler.
|
|
|
|
/// Rather, the compiler directly computes for each reference the definition it
|
|
|
|
/// refers to. It might transiently compute the explicit scope map while doing
|
|
|
|
/// so, but, generally, this is not something left after the analysis.
|
|
|
|
///
|
|
|
|
/// However, we do very much need explicit scopes for IDE purposes --
|
2020-09-21 12:35:42 +00:00
|
|
|
/// completion, at its core, lists the contents of the current scope. The notion
|
|
|
|
/// of scope is also useful to answer questions like "what would be the meaning
|
|
|
|
/// of this piece of code if we inserted it into this position?".
|
2020-09-21 10:30:55 +00:00
|
|
|
///
|
|
|
|
/// So `SemanticsScope` is constructed from a specific program point (a syntax
|
|
|
|
/// node or just a raw offset) and provides access to the set of visible names
|
2020-09-21 12:35:42 +00:00
|
|
|
/// on a somewhat best-effort basis.
|
2020-09-21 10:30:55 +00:00
|
|
|
///
|
2020-09-21 12:35:42 +00:00
|
|
|
/// Note that if you are wondering "what does this specific existing name mean?",
|
2020-09-21 10:30:55 +00:00
|
|
|
/// you'd better use the `resolve_` family of methods.
|
2020-07-10 23:26:24 +00:00
|
|
|
#[derive(Debug)]
|
2020-07-01 06:34:45 +00:00
|
|
|
pub struct SemanticsScope<'a> {
|
|
|
|
pub db: &'a dyn HirDatabase,
|
2020-08-13 21:52:14 +00:00
|
|
|
file_id: HirFileId,
|
2020-02-18 17:35:10 +00:00
|
|
|
resolver: Resolver,
|
|
|
|
}
|
|
|
|
|
2023-06-29 14:27:28 +00:00
|
|
|
impl SemanticsScope<'_> {
|
2022-03-31 09:12:08 +00:00
|
|
|
pub fn module(&self) -> Module {
|
|
|
|
Module { id: self.resolver.module() }
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2022-03-31 09:12:08 +00:00
|
|
|
pub fn krate(&self) -> Crate {
|
|
|
|
Crate { id: self.resolver.krate() }
|
2020-08-08 18:14:18 +00:00
|
|
|
}
|
|
|
|
|
Refactor autoderef and method resolution
- don't return the receiver type from method resolution; instead just
return the autorefs/autoderefs that happened and repeat them. This
ensures all the effects like trait obligations and whatever we learned
about type variables from derefing them are actually applied. Also, it
allows us to get rid of `decanonicalize_ty`, which was just wrong in
principle.
- Autoderef itself now directly works with an inference table. Sadly
this has the effect of making it harder to use as an iterator, often
requiring manual `while let` loops. (rustc works around this by using
inner mutability in the inference context, so that things like unifying
types don't require a unique reference.)
- We now record the adjustments (autoref/deref) for method receivers
and index expressions, which we didn't before.
- Removed the redundant crate parameter from method resolution, since
the trait_env contains the crate as well.
- in the HIR API, the methods now take a scope to determine the trait env.
`Type` carries a trait env, but I think that's probably a bad decision
because it's easy to create it with the wrong env, e.g. by using
`Adt::ty`. This mostly didn't matter so far because
`iterate_method_candidates` took a crate parameter and ignored
`self.krate`, but the trait env would still have been wrong in those
cases, which I think would give some wrong results in some edge cases.
Fixes #10058.
2022-02-16 16:44:03 +00:00
|
|
|
pub(crate) fn resolver(&self) -> &Resolver {
|
|
|
|
&self.resolver
|
|
|
|
}
|
|
|
|
|
2022-05-05 20:21:42 +00:00
|
|
|
/// Note: `VisibleTraits` should be treated as an opaque type, passed into `Type
|
|
|
|
pub fn visible_traits(&self) -> VisibleTraits {
|
2020-02-18 17:35:10 +00:00
|
|
|
let resolver = &self.resolver;
|
2022-05-05 20:21:42 +00:00
|
|
|
VisibleTraits(resolver.traits_in_scope(self.db.upcast()))
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2023-03-29 12:08:25 +00:00
|
|
|
/// Calls the passed closure `f` on all names in scope.
|
2020-02-18 17:35:10 +00:00
|
|
|
pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
|
2021-08-03 14:36:06 +00:00
|
|
|
let scope = self.resolver.names_in_scope(self.db.upcast());
|
|
|
|
for (name, entries) in scope {
|
|
|
|
for entry in entries {
|
|
|
|
let def = match entry {
|
|
|
|
resolver::ScopeDef::ModuleDef(it) => ScopeDef::ModuleDef(it.into()),
|
|
|
|
resolver::ScopeDef::Unknown => ScopeDef::Unknown,
|
|
|
|
resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
|
|
|
|
resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
|
|
|
|
resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
|
2023-02-18 20:32:55 +00:00
|
|
|
resolver::ScopeDef::Local(binding_id) => match self.resolver.body_owner() {
|
|
|
|
Some(parent) => ScopeDef::Local(Local { parent, binding_id }),
|
2022-03-14 19:36:35 +00:00
|
|
|
None => continue,
|
|
|
|
},
|
|
|
|
resolver::ScopeDef::Label(label_id) => match self.resolver.body_owner() {
|
|
|
|
Some(parent) => ScopeDef::Label(Label { parent, label_id }),
|
|
|
|
None => continue,
|
|
|
|
},
|
2021-08-03 14:36:06 +00:00
|
|
|
};
|
|
|
|
f(name.clone(), def)
|
|
|
|
}
|
|
|
|
}
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
2020-08-13 21:52:14 +00:00
|
|
|
/// Resolve a path as-if it was written at the given scope. This is
|
|
|
|
/// necessary a heuristic, as it doesn't take hygiene into account.
|
2020-08-14 13:23:27 +00:00
|
|
|
pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
|
2023-04-17 15:31:39 +00:00
|
|
|
let ctx = LowerCtx::with_file_id(self.db.upcast(), self.file_id);
|
2023-11-25 16:10:18 +00:00
|
|
|
let path = Path::from_src(&ctx, path.clone())?;
|
2020-08-15 16:50:41 +00:00
|
|
|
resolve_hir_path(self.db, &self.resolver, &path)
|
2020-05-15 21:23:49 +00:00
|
|
|
}
|
2022-02-03 11:43:15 +00:00
|
|
|
|
|
|
|
/// Iterates over associated types that may be specified after the given path (using
|
|
|
|
/// `Ty::Assoc` syntax).
|
|
|
|
pub fn assoc_type_shorthand_candidates<R>(
|
|
|
|
&self,
|
|
|
|
resolution: &PathResolution,
|
|
|
|
mut cb: impl FnMut(&Name, TypeAlias) -> Option<R>,
|
|
|
|
) -> Option<R> {
|
|
|
|
let def = self.resolver.generic_def()?;
|
|
|
|
hir_ty::associated_type_shorthand_candidates(
|
|
|
|
self.db,
|
|
|
|
def,
|
|
|
|
resolution.in_type_ns()?,
|
2022-12-07 14:22:37 +00:00
|
|
|
|name, id| cb(name, id.into()),
|
2022-02-03 11:43:15 +00:00
|
|
|
)
|
|
|
|
}
|
2023-03-30 06:53:36 +00:00
|
|
|
|
|
|
|
pub fn extern_crates(&self) -> impl Iterator<Item = (Name, Module)> + '_ {
|
|
|
|
self.resolver.extern_crates_in_scope().map(|(name, id)| (name, Module { id }))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn extern_crate_decls(&self) -> impl Iterator<Item = Name> + '_ {
|
|
|
|
self.resolver.extern_crate_decls_in_scope(self.db.upcast())
|
|
|
|
}
|
2023-10-09 15:30:34 +00:00
|
|
|
|
|
|
|
pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool {
|
|
|
|
self.resolver.impl_def() == other.resolver.impl_def()
|
|
|
|
}
|
2020-02-18 17:35:10 +00:00
|
|
|
}
|
2022-05-05 20:21:42 +00:00
|
|
|
|
2023-02-27 14:51:45 +00:00
|
|
|
#[derive(Debug)]
|
2022-05-05 20:21:42 +00:00
|
|
|
pub struct VisibleTraits(pub FxHashSet<TraitId>);
|
2022-06-20 19:55:33 +00:00
|
|
|
|
|
|
|
impl ops::Deref for VisibleTraits {
|
|
|
|
type Target = FxHashSet<TraitId>;
|
|
|
|
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
|
|
&self.0
|
|
|
|
}
|
|
|
|
}
|