Auto merge of #3706 - rust-lang:rustup-2024-06-24, r=oli-obk

Automatic Rustup
This commit is contained in:
bors 2024-06-24 06:06:16 +00:00
commit a25b21e067
284 changed files with 8890 additions and 4841 deletions

View file

@ -4,6 +4,7 @@ tq = "test -- -q"
qt = "tq" qt = "tq"
lint = "clippy --all-targets -- --cap-lints warn" lint = "clippy --all-targets -- --cap-lints warn"
codegen = "run --package xtask --bin xtask -- codegen" codegen = "run --package xtask --bin xtask -- codegen"
dist = "run --package xtask --bin xtask -- dist"
[target.x86_64-pc-windows-msvc] [target.x86_64-pc-windows-msvc]
linker = "rust-lld" linker = "rust-lld"

15
Cargo.lock generated
View file

@ -328,6 +328,15 @@ dependencies = [
"dirs-sys", "dirs-sys",
] ]
[[package]]
name = "dirs"
version = "5.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
dependencies = [
"dirs-sys",
]
[[package]] [[package]]
name = "dirs-sys" name = "dirs-sys"
version = "0.4.1" version = "0.4.1"
@ -503,6 +512,7 @@ dependencies = [
"hir-def", "hir-def",
"hir-expand", "hir-expand",
"hir-ty", "hir-ty",
"intern",
"itertools", "itertools",
"once_cell", "once_cell",
"rustc-hash", "rustc-hash",
@ -891,9 +901,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.154" version = "0.2.155"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346" checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
[[package]] [[package]]
name = "libloading" name = "libloading"
@ -1665,6 +1675,7 @@ dependencies = [
"anyhow", "anyhow",
"cfg", "cfg",
"crossbeam-channel", "crossbeam-channel",
"dirs",
"dissimilar", "dissimilar",
"expect-test", "expect-test",
"flycheck", "flycheck",

View file

@ -162,7 +162,11 @@ xshell = "0.2.5"
dashmap = { version = "=5.5.3", features = ["raw-api"] } dashmap = { version = "=5.5.3", features = ["raw-api"] }
[workspace.lints.rust] [workspace.lints.rust]
rust_2018_idioms = "warn" bare_trait_objects = "warn"
elided_lifetimes_in_paths = "warn"
ellipsis_inclusive_range_patterns = "warn"
explicit_outlives_requirements = "warn"
unused_extern_crates = "warn"
unused_lifetimes = "warn" unused_lifetimes = "warn"
unreachable_pub = "warn" unreachable_pub = "warn"
semicolon_in_expressions_from_macros = "warn" semicolon_in_expressions_from_macros = "warn"

View file

@ -51,7 +51,7 @@ impl FileChange {
} }
pub fn apply(self, db: &mut dyn SourceDatabaseExt) { pub fn apply(self, db: &mut dyn SourceDatabaseExt) {
let _p = tracing::span!(tracing::Level::INFO, "FileChange::apply").entered(); let _p = tracing::info_span!("FileChange::apply").entered();
if let Some(roots) = self.roots { if let Some(roots) = self.roots {
for (idx, root) in roots.into_iter().enumerate() { for (idx, root) in roots.into_iter().enumerate() {
let root_id = SourceRootId(idx as u32); let root_id = SourceRootId(idx as u32);

View file

@ -412,7 +412,7 @@ impl CrateGraph {
from: CrateId, from: CrateId,
dep: Dependency, dep: Dependency,
) -> Result<(), CyclicDependenciesError> { ) -> Result<(), CyclicDependenciesError> {
let _p = tracing::span!(tracing::Level::INFO, "add_dep").entered(); let _p = tracing::info_span!("add_dep").entered();
self.check_cycle_after_dependency(from, dep.crate_id)?; self.check_cycle_after_dependency(from, dep.crate_id)?;

View file

@ -85,7 +85,7 @@ fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseC
} }
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> { fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = tracing::span!(tracing::Level::INFO, "parse", ?file_id).entered(); let _p = tracing::info_span!("parse", ?file_id).entered();
let text = db.file_text(file_id); let text = db.file_text(file_id);
// FIXME: Edition based parsing // FIXME: Edition based parsing
SourceFile::parse(&text, span::Edition::CURRENT) SourceFile::parse(&text, span::Edition::CURRENT)
@ -187,7 +187,7 @@ impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
} }
fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> { fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
let _p = tracing::span!(tracing::Level::INFO, "relevant_crates").entered(); let _p = tracing::info_span!("relevant_crates").entered();
let source_root = self.0.file_source_root(file_id); let source_root = self.0.file_source_root(file_id);
self.0.source_root_crates(source_root) self.0.source_root_crates(source_root)
} }

View file

@ -304,7 +304,7 @@ impl FlycheckActor {
Some(c) => c, Some(c) => c,
None => continue, None => continue,
}; };
let formatted_command = format!("{:?}", command); let formatted_command = format!("{command:?}");
tracing::debug!(?command, "will restart flycheck"); tracing::debug!(?command, "will restart flycheck");
let (sender, receiver) = unbounded(); let (sender, receiver) = unbounded();
@ -318,8 +318,7 @@ impl FlycheckActor {
} }
Err(error) => { Err(error) => {
self.report_progress(Progress::DidFailToRestart(format!( self.report_progress(Progress::DidFailToRestart(format!(
"Failed to run the following command: {} error={}", "Failed to run the following command: {formatted_command} error={error}"
formatted_command, error
))); )));
self.status = FlycheckStatus::Finished; self.status = FlycheckStatus::Finished;
} }
@ -331,7 +330,7 @@ impl FlycheckActor {
// Watcher finished // Watcher finished
let command_handle = self.command_handle.take().unwrap(); let command_handle = self.command_handle.take().unwrap();
self.command_receiver.take(); self.command_receiver.take();
let formatted_handle = format!("{:?}", command_handle); let formatted_handle = format!("{command_handle:?}");
let res = command_handle.join(); let res = command_handle.join();
if let Err(error) = &res { if let Err(error) = &res {
@ -387,6 +386,7 @@ impl FlycheckActor {
"did cancel flycheck" "did cancel flycheck"
); );
command_handle.cancel(); command_handle.cancel();
self.command_receiver.take();
self.report_progress(Progress::DidCancel); self.report_progress(Progress::DidCancel);
self.status = FlycheckStatus::Finished; self.status = FlycheckStatus::Finished;
} }

View file

@ -1,10 +1,5 @@
//! A higher level attributes based on TokenTree, with also some shortcuts. //! A higher level attributes based on TokenTree, with also some shortcuts.
pub mod builtin;
#[cfg(test)]
mod tests;
use std::{borrow::Cow, hash::Hash, ops, slice::Iter as SliceIter}; use std::{borrow::Cow, hash::Hash, ops, slice::Iter as SliceIter};
use base_db::CrateId; use base_db::CrateId;
@ -75,7 +70,7 @@ impl Attrs {
db: &dyn DefDatabase, db: &dyn DefDatabase,
v: VariantId, v: VariantId,
) -> Arc<ArenaMap<LocalFieldId, Attrs>> { ) -> Arc<ArenaMap<LocalFieldId, Attrs>> {
let _p = tracing::span!(tracing::Level::INFO, "fields_attrs_query").entered(); let _p = tracing::info_span!("fields_attrs_query").entered();
// FIXME: There should be some proper form of mapping between item tree field ids and hir field ids // FIXME: There should be some proper form of mapping between item tree field ids and hir field ids
let mut res = ArenaMap::default(); let mut res = ArenaMap::default();
@ -326,7 +321,7 @@ impl AttrsWithOwner {
} }
pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs { pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs {
let _p = tracing::span!(tracing::Level::INFO, "attrs_query").entered(); let _p = tracing::info_span!("attrs_query").entered();
// FIXME: this should use `Trace` to avoid duplication in `source_map` below // FIXME: this should use `Trace` to avoid duplication in `source_map` below
let raw_attrs = match def { let raw_attrs = match def {
AttrDefId::ModuleId(module) => { AttrDefId::ModuleId(module) => {
@ -646,3 +641,55 @@ pub(crate) fn fields_attrs_source_map(
Arc::new(res) Arc::new(res)
} }
#[cfg(test)]
mod tests {
//! This module contains tests for doc-expression parsing.
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
use triomphe::Arc;
use base_db::FileId;
use hir_expand::span_map::{RealSpanMap, SpanMap};
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode};
use syntax::{ast, AstNode, TextRange};
use crate::attr::{DocAtom, DocExpr};
fn assert_parse_result(input: &str, expected: DocExpr) {
let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let tt = syntax_node_to_token_tree(
tt.syntax(),
map.as_ref(),
map.span_for_range(TextRange::empty(0.into())),
DocCommentDesugarMode::ProcMacro,
);
let cfg = DocExpr::parse(&tt);
assert_eq!(cfg, expected);
}
#[test]
fn test_doc_expr_parser() {
assert_parse_result("#![doc(hidden)]", DocAtom::Flag("hidden".into()).into());
assert_parse_result(
r#"#![doc(alias = "foo")]"#,
DocAtom::KeyValue { key: "alias".into(), value: "foo".into() }.into(),
);
assert_parse_result(r#"#![doc(alias("foo"))]"#, DocExpr::Alias(["foo".into()].into()));
assert_parse_result(
r#"#![doc(alias("foo", "bar", "baz"))]"#,
DocExpr::Alias(["foo".into(), "bar".into(), "baz".into()].into()),
);
assert_parse_result(
r#"
#[doc(alias("Bar", "Qux"))]
struct Foo;"#,
DocExpr::Alias(["Bar".into(), "Qux".into()].into()),
);
}
}

View file

@ -1,48 +0,0 @@
//! This module contains tests for doc-expression parsing.
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
use triomphe::Arc;
use base_db::FileId;
use hir_expand::span_map::{RealSpanMap, SpanMap};
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode};
use syntax::{ast, AstNode, TextRange};
use crate::attr::{DocAtom, DocExpr};
fn assert_parse_result(input: &str, expected: DocExpr) {
let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let tt = syntax_node_to_token_tree(
tt.syntax(),
map.as_ref(),
map.span_for_range(TextRange::empty(0.into())),
DocCommentDesugarMode::ProcMacro,
);
let cfg = DocExpr::parse(&tt);
assert_eq!(cfg, expected);
}
#[test]
fn test_doc_expr_parser() {
assert_parse_result("#![doc(hidden)]", DocAtom::Flag("hidden".into()).into());
assert_parse_result(
r#"#![doc(alias = "foo")]"#,
DocAtom::KeyValue { key: "alias".into(), value: "foo".into() }.into(),
);
assert_parse_result(r#"#![doc(alias("foo"))]"#, DocExpr::Alias(["foo".into()].into()));
assert_parse_result(
r#"#![doc(alias("foo", "bar", "baz"))]"#,
DocExpr::Alias(["foo".into(), "bar".into(), "baz".into()].into()),
);
assert_parse_result(
r#"
#[doc(alias("Bar", "Qux"))]
struct Foo;"#,
DocExpr::Alias(["Bar".into(), "Qux".into()].into()),
);
}

View file

@ -124,7 +124,7 @@ impl Body {
db: &dyn DefDatabase, db: &dyn DefDatabase,
def: DefWithBodyId, def: DefWithBodyId,
) -> (Arc<Body>, Arc<BodySourceMap>) { ) -> (Arc<Body>, Arc<BodySourceMap>) {
let _p = tracing::span!(tracing::Level::INFO, "body_with_source_map_query").entered(); let _p = tracing::info_span!("body_with_source_map_query").entered();
let mut params = None; let mut params = None;
let mut is_async_fn = false; let mut is_async_fn = false;
@ -395,6 +395,12 @@ impl BodySourceMap {
self.expr_map.get(&src).copied() self.expr_map.get(&src).copied()
} }
pub fn expansions(
&self,
) -> impl Iterator<Item = (&InFile<AstPtr<ast::MacroCall>>, &MacroFileId)> {
self.expansions.iter()
}
pub fn implicit_format_args( pub fn implicit_format_args(
&self, &self,
node: InFile<&ast::FormatArgsExpr>, node: InFile<&ast::FormatArgsExpr>,

View file

@ -12,6 +12,7 @@ use intern::Interned;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use smallvec::SmallVec; use smallvec::SmallVec;
use span::AstIdMap; use span::AstIdMap;
use stdx::never;
use syntax::{ use syntax::{
ast::{ ast::{
self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName, self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName,
@ -480,7 +481,8 @@ impl ExprCollector<'_> {
} else if e.const_token().is_some() { } else if e.const_token().is_some() {
Mutability::Shared Mutability::Shared
} else { } else {
unreachable!("parser only remaps to raw_token() if matching mutability token follows") never!("parser only remaps to raw_token() if matching mutability token follows");
Mutability::Shared
} }
} else { } else {
Mutability::from_mutable(e.mut_token().is_some()) Mutability::from_mutable(e.mut_token().is_some())
@ -963,7 +965,7 @@ impl ExprCollector<'_> {
.resolve_path( .resolve_path(
self.db, self.db,
module, module,
&path, path,
crate::item_scope::BuiltinShadowMode::Other, crate::item_scope::BuiltinShadowMode::Other,
Some(MacroSubNs::Bang), Some(MacroSubNs::Bang),
) )
@ -1006,9 +1008,9 @@ impl ExprCollector<'_> {
Some((mark, expansion)) => { Some((mark, expansion)) => {
// Keep collecting even with expansion errors so we can provide completions and // Keep collecting even with expansion errors so we can provide completions and
// other services in incomplete macro expressions. // other services in incomplete macro expressions.
self.source_map if let Some(macro_file) = self.expander.current_file_id().macro_file() {
.expansions self.source_map.expansions.insert(macro_call_ptr, macro_file);
.insert(macro_call_ptr, self.expander.current_file_id().macro_file().unwrap()); }
let prev_ast_id_map = mem::replace( let prev_ast_id_map = mem::replace(
&mut self.ast_id_map, &mut self.ast_id_map,
self.db.ast_id_map(self.expander.current_file_id()), self.db.ast_id_map(self.expander.current_file_id()),

View file

@ -48,21 +48,30 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false }; let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false };
if let DefWithBodyId::FunctionId(it) = owner { if let DefWithBodyId::FunctionId(it) = owner {
p.buf.push('('); p.buf.push('(');
let params = &db.function_data(it).params; let function_data = &db.function_data(it);
let mut params = params.iter(); let (mut params, ret_type) = (function_data.params.iter(), &function_data.ret_type);
if let Some(self_param) = body.self_param { if let Some(self_param) = body.self_param {
p.print_binding(self_param); p.print_binding(self_param);
p.buf.push(':'); p.buf.push_str(": ");
if let Some(ty) = params.next() { if let Some(ty) = params.next() {
p.print_type_ref(ty); p.print_type_ref(ty);
p.buf.push_str(", ");
} }
} }
body.params.iter().zip(params).for_each(|(&param, ty)| { body.params.iter().zip(params).for_each(|(&param, ty)| {
p.print_pat(param); p.print_pat(param);
p.buf.push(':'); p.buf.push_str(": ");
p.print_type_ref(ty); p.print_type_ref(ty);
p.buf.push_str(", ");
}); });
// remove the last ", " in param list
if body.params.len() > 0 {
p.buf.truncate(p.buf.len() - 2);
}
p.buf.push(')'); p.buf.push(')');
// return type
p.buf.push_str(" -> ");
p.print_type_ref(ret_type);
p.buf.push(' '); p.buf.push(' ');
} }
p.print_expr(body.body_expr); p.print_expr(body.body_expr);

View file

@ -156,7 +156,7 @@ fn main() {
); );
expect![[r#" expect![[r#"
fn main() { fn main() -> () {
let are = "are"; let are = "are";
let count = 10; let count = 10;
builtin#lang(Arguments::new_v1_formatted)( builtin#lang(Arguments::new_v1_formatted)(
@ -258,7 +258,7 @@ impl SsrError {
assert_eq!(db.body_with_source_map(def).1.diagnostics(), &[]); assert_eq!(db.body_with_source_map(def).1.diagnostics(), &[]);
expect![[r#" expect![[r#"
fn main() { fn main() -> () {
_ = $crate::error::SsrError::new( _ = $crate::error::SsrError::new(
builtin#lang(Arguments::new_v1_formatted)( builtin#lang(Arguments::new_v1_formatted)(
&[ &[
@ -303,7 +303,7 @@ macro_rules! m {
}; };
} }
fn f() { fn f(a: i32, b: u32) -> String {
m!(); m!();
} }
"#, "#,
@ -317,7 +317,7 @@ fn f() {
} }
expect![[r#" expect![[r#"
fn f() { fn f(a: i32, b: u32) -> String {
{ {
$crate::panicking::panic_fmt( $crate::panicking::panic_fmt(
builtin#lang(Arguments::new_v1_formatted)( builtin#lang(Arguments::new_v1_formatted)(

View file

@ -6,7 +6,7 @@
use either::Either; use either::Either;
use hir_expand::{attrs::collect_attrs, HirFileId}; use hir_expand::{attrs::collect_attrs, HirFileId};
use syntax::ast; use syntax::{ast, AstPtr};
use crate::{ use crate::{
db::DefDatabase, db::DefDatabase,
@ -38,7 +38,7 @@ impl ChildBySource for TraitId {
data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each( data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|(ast_id, call_id)| { |(ast_id, call_id)| {
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id); res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
}, },
); );
data.items.iter().for_each(|&(_, item)| { data.items.iter().for_each(|&(_, item)| {
@ -50,9 +50,10 @@ impl ChildBySource for TraitId {
impl ChildBySource for ImplId { impl ChildBySource for ImplId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let data = db.impl_data(*self); let data = db.impl_data(*self);
// FIXME: Macro calls
data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each( data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|(ast_id, call_id)| { |(ast_id, call_id)| {
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id); res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
}, },
); );
data.items.iter().for_each(|&item| { data.items.iter().for_each(|&item| {
@ -80,7 +81,7 @@ impl ChildBySource for ItemScope {
.for_each(|konst| insert_item_loc(db, res, file_id, konst, keys::CONST)); .for_each(|konst| insert_item_loc(db, res, file_id, konst, keys::CONST));
self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each( self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
|(ast_id, call_id)| { |(ast_id, call_id)| {
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id); res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
}, },
); );
self.legacy_macros().for_each(|(_, ids)| { self.legacy_macros().for_each(|(_, ids)| {
@ -88,7 +89,7 @@ impl ChildBySource for ItemScope {
if let MacroId::MacroRulesId(id) = id { if let MacroId::MacroRulesId(id) = id {
let loc = id.lookup(db); let loc = id.lookup(db);
if loc.id.file_id() == file_id { if loc.id.file_id() == file_id {
res[keys::MACRO_RULES].insert(loc.source(db).value, id); res[keys::MACRO_RULES].insert(loc.ast_ptr(db).value, id);
} }
} }
}) })
@ -100,12 +101,18 @@ impl ChildBySource for ItemScope {
if let Some((_, Either::Left(attr))) = if let Some((_, Either::Left(attr))) =
collect_attrs(&adt).nth(attr_id.ast_index()) collect_attrs(&adt).nth(attr_id.ast_index())
{ {
res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into())); res[keys::DERIVE_MACRO_CALL]
.insert(AstPtr::new(&attr), (attr_id, call_id, calls.into()));
} }
}); });
}, },
); );
self.iter_macro_invoc().filter(|(id, _)| id.file_id == file_id).for_each(
|(ast_id, &call)| {
let ast = ast_id.to_ptr(db.upcast());
res[keys::MACRO_CALL].insert(ast, call);
},
);
fn add_module_def( fn add_module_def(
db: &dyn DefDatabase, db: &dyn DefDatabase,
map: &mut DynMap, map: &mut DynMap,
@ -155,8 +162,8 @@ impl ChildBySource for VariantId {
for (local_id, source) in arena_map.value.iter() { for (local_id, source) in arena_map.value.iter() {
let id = FieldId { parent, local_id }; let id = FieldId { parent, local_id };
match source.clone() { match source.clone() {
Either::Left(source) => res[keys::TUPLE_FIELD].insert(source, id), Either::Left(source) => res[keys::TUPLE_FIELD].insert(AstPtr::new(&source), id),
Either::Right(source) => res[keys::RECORD_FIELD].insert(source, id), Either::Right(source) => res[keys::RECORD_FIELD].insert(AstPtr::new(&source), id),
} }
} }
} }
@ -171,29 +178,30 @@ impl ChildBySource for EnumId {
let tree = loc.id.item_tree(db); let tree = loc.id.item_tree(db);
let ast_id_map = db.ast_id_map(loc.id.file_id()); let ast_id_map = db.ast_id_map(loc.id.file_id());
let root = db.parse_or_expand(loc.id.file_id());
db.enum_data(*self).variants.iter().for_each(|&(variant, _)| { db.enum_data(*self).variants.iter().for_each(|&(variant, _)| {
res[keys::ENUM_VARIANT].insert( res[keys::ENUM_VARIANT]
ast_id_map.get(tree[variant.lookup(db).id.value].ast_id).to_node(&root), .insert(ast_id_map.get(tree[variant.lookup(db).id.value].ast_id), variant);
variant,
);
}); });
} }
} }
impl ChildBySource for DefWithBodyId { impl ChildBySource for DefWithBodyId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let body = db.body(*self); let (body, sm) = db.body_with_source_map(*self);
if let &DefWithBodyId::VariantId(v) = self { if let &DefWithBodyId::VariantId(v) = self {
VariantId::EnumVariantId(v).child_by_source_to(db, res, file_id) VariantId::EnumVariantId(v).child_by_source_to(db, res, file_id)
} }
sm.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(|(ast, &exp_id)| {
res[keys::MACRO_CALL].insert(ast.value, exp_id.macro_call_id);
});
for (block, def_map) in body.blocks(db) { for (block, def_map) in body.blocks(db) {
// All block expressions are merged into the same map, because they logically all add // All block expressions are merged into the same map, because they logically all add
// inner items to the containing `DefWithBodyId`. // inner items to the containing `DefWithBodyId`.
def_map[DefMap::ROOT].scope.child_by_source_to(db, res, file_id); def_map[DefMap::ROOT].scope.child_by_source_to(db, res, file_id);
res[keys::BLOCK].insert(block.lookup(db).ast_id.to_node(db.upcast()), block); res[keys::BLOCK].insert(block.lookup(db).ast_id.to_ptr(db.upcast()), block);
} }
} }
} }
@ -220,13 +228,17 @@ impl ChildBySource for GenericDefId {
{ {
let id = TypeOrConstParamId { parent: *self, local_id }; let id = TypeOrConstParamId { parent: *self, local_id };
match ast_param { match ast_param {
ast::TypeOrConstParam::Type(a) => res[keys::TYPE_PARAM].insert(a, id), ast::TypeOrConstParam::Type(a) => {
ast::TypeOrConstParam::Const(a) => res[keys::CONST_PARAM].insert(a, id), res[keys::TYPE_PARAM].insert(AstPtr::new(&a), id)
}
ast::TypeOrConstParam::Const(a) => {
res[keys::CONST_PARAM].insert(AstPtr::new(&a), id)
}
} }
} }
for (local_id, ast_param) in lts_idx_iter.zip(generic_params_list.lifetime_params()) { for (local_id, ast_param) in lts_idx_iter.zip(generic_params_list.lifetime_params()) {
let id = LifetimeParamId { parent: *self, local_id }; let id = LifetimeParamId { parent: *self, local_id };
res[keys::LIFETIME_PARAM].insert(ast_param, id); res[keys::LIFETIME_PARAM].insert(AstPtr::new(&ast_param), id);
} }
} }
} }
@ -246,7 +258,7 @@ fn insert_item_loc<ID, N, Data>(
{ {
let loc = id.lookup(db); let loc = id.lookup(db);
if loc.item_tree_id().file_id() == file_id { if loc.item_tree_id().file_id() == file_id {
res[key].insert(loc.source(db).value, id) res[key].insert(loc.ast_ptr(db).value, id)
} }
} }

View file

@ -340,7 +340,7 @@ impl ImplData {
db: &dyn DefDatabase, db: &dyn DefDatabase,
id: ImplId, id: ImplId,
) -> (Arc<ImplData>, DefDiagnostics) { ) -> (Arc<ImplData>, DefDiagnostics) {
let _p = tracing::span!(tracing::Level::INFO, "impl_data_with_diagnostics_query").entered(); let _p = tracing::info_span!("impl_data_with_diagnostics_query").entered();
let ItemLoc { container: module_id, id: tree_id } = id.lookup(db); let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
let item_tree = tree_id.item_tree(db); let item_tree = tree_id.item_tree(db);
@ -628,7 +628,7 @@ impl<'a> AssocItemCollector<'a> {
'attrs: for attr in &*attrs { 'attrs: for attr in &*attrs {
let ast_id = let ast_id =
AstId::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast()); AstId::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast());
let ast_id_with_path = AstIdWithPath { path: (*attr.path).clone(), ast_id }; let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id };
match self.def_map.resolve_attr_macro( match self.def_map.resolve_attr_macro(
self.db, self.db,
@ -642,7 +642,7 @@ impl<'a> AssocItemCollector<'a> {
continue 'attrs; continue 'attrs;
} }
let loc = self.db.lookup_intern_macro_call(call_id); let loc = self.db.lookup_intern_macro_call(call_id);
if let MacroDefKind::ProcMacro(exp, ..) = loc.def.kind { if let MacroDefKind::ProcMacro(_, exp, _) = loc.def.kind {
// If there's no expander for the proc macro (e.g. the // If there's no expander for the proc macro (e.g. the
// proc macro is ignored, or building the proc macro // proc macro is ignored, or building the proc macro
// crate failed), skip expansion like we would if it was // crate failed), skip expansion like we would if it was
@ -719,12 +719,12 @@ impl<'a> AssocItemCollector<'a> {
let MacroCall { ast_id, expand_to, ctxt, ref path } = item_tree[call]; let MacroCall { ast_id, expand_to, ctxt, ref path } = item_tree[call];
let module = self.expander.module.local_id; let module = self.expander.module.local_id;
let resolver = |path| { let resolver = |path: &_| {
self.def_map self.def_map
.resolve_path( .resolve_path(
self.db, self.db,
module, module,
&path, path,
crate::item_scope::BuiltinShadowMode::Other, crate::item_scope::BuiltinShadowMode::Other,
Some(MacroSubNs::Bang), Some(MacroSubNs::Bang),
) )

View file

@ -294,10 +294,10 @@ fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
let in_file = InFile::new(file_id, m); let in_file = InFile::new(file_id, m);
match expander { match expander {
MacroExpander::Declarative => MacroDefKind::Declarative(in_file), MacroExpander::Declarative => MacroDefKind::Declarative(in_file),
MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(it, in_file), MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(in_file, it),
MacroExpander::BuiltInAttr(it) => MacroDefKind::BuiltInAttr(it, in_file), MacroExpander::BuiltInAttr(it) => MacroDefKind::BuiltInAttr(in_file, it),
MacroExpander::BuiltInDerive(it) => MacroDefKind::BuiltInDerive(it, in_file), MacroExpander::BuiltInDerive(it) => MacroDefKind::BuiltInDerive(in_file, it),
MacroExpander::BuiltInEager(it) => MacroDefKind::BuiltInEager(it, in_file), MacroExpander::BuiltInEager(it) => MacroDefKind::BuiltInEager(in_file, it),
} }
}; };
@ -338,9 +338,9 @@ fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
MacroDefId { MacroDefId {
krate: loc.container.krate, krate: loc.container.krate,
kind: MacroDefKind::ProcMacro( kind: MacroDefKind::ProcMacro(
InFile::new(loc.id.file_id(), makro.ast_id),
loc.expander, loc.expander,
loc.kind, loc.kind,
InFile::new(loc.id.file_id(), makro.ast_id),
), ),
local_inner: false, local_inner: false,
allow_internal_unsafe: false, allow_internal_unsafe: false,

View file

@ -13,7 +13,7 @@ use crate::{
TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId, TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId,
}; };
pub type Key<K, V> = crate::dyn_map::Key<K, V, AstPtrPolicy<K, V>>; pub type Key<K, V> = crate::dyn_map::Key<AstPtr<K>, V, AstPtrPolicy<K, V>>;
pub const BLOCK: Key<ast::BlockExpr, BlockId> = Key::new(); pub const BLOCK: Key<ast::BlockExpr, BlockId> = Key::new();
pub const FUNCTION: Key<ast::Fn, FunctionId> = Key::new(); pub const FUNCTION: Key<ast::Fn, FunctionId> = Key::new();
@ -39,6 +39,7 @@ pub const LIFETIME_PARAM: Key<ast::LifetimeParam, LifetimeParamId> = Key::new();
pub const MACRO_RULES: Key<ast::MacroRules, MacroRulesId> = Key::new(); pub const MACRO_RULES: Key<ast::MacroRules, MacroRulesId> = Key::new();
pub const MACRO2: Key<ast::MacroDef, Macro2Id> = Key::new(); pub const MACRO2: Key<ast::MacroDef, Macro2Id> = Key::new();
pub const PROC_MACRO: Key<ast::Fn, ProcMacroId> = Key::new(); pub const PROC_MACRO: Key<ast::Fn, ProcMacroId> = Key::new();
pub const MACRO_CALL: Key<ast::MacroCall, MacroCallId> = Key::new();
pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new(); pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new();
pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, MacroCallId, Box<[Option<MacroCallId>]>)> = pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, MacroCallId, Box<[Option<MacroCallId>]>)> =
Key::new(); Key::new();
@ -54,18 +55,16 @@ pub struct AstPtrPolicy<AST, ID> {
} }
impl<AST: AstNode + 'static, ID: 'static> Policy for AstPtrPolicy<AST, ID> { impl<AST: AstNode + 'static, ID: 'static> Policy for AstPtrPolicy<AST, ID> {
type K = AST; type K = AstPtr<AST>;
type V = ID; type V = ID;
fn insert(map: &mut DynMap, key: AST, value: ID) { fn insert(map: &mut DynMap, key: AstPtr<AST>, value: ID) {
let key = AstPtr::new(&key);
map.map map.map
.entry::<FxHashMap<AstPtr<AST>, ID>>() .entry::<FxHashMap<AstPtr<AST>, ID>>()
.or_insert_with(Default::default) .or_insert_with(Default::default)
.insert(key, value); .insert(key, value);
} }
fn get<'a>(map: &'a DynMap, key: &AST) -> Option<&'a ID> { fn get<'a>(map: &'a DynMap, key: &AstPtr<AST>) -> Option<&'a ID> {
let key = AstPtr::new(key); map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(key)
map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(&key)
} }
fn is_empty(map: &DynMap) -> bool { fn is_empty(map: &DynMap) -> bool {
map.map.get::<FxHashMap<AstPtr<AST>, ID>>().map_or(true, |it| it.is_empty()) map.map.get::<FxHashMap<AstPtr<AST>, ID>>().map_or(true, |it| it.is_empty())

View file

@ -56,7 +56,7 @@ impl Expander {
&mut self, &mut self,
db: &dyn DefDatabase, db: &dyn DefDatabase,
macro_call: ast::MacroCall, macro_call: ast::MacroCall,
resolver: impl Fn(ModPath) -> Option<MacroId>, resolver: impl Fn(&ModPath) -> Option<MacroId>,
) -> Result<ExpandResult<Option<(Mark, Parse<T>)>>, UnresolvedMacro> { ) -> Result<ExpandResult<Option<(Mark, Parse<T>)>>, UnresolvedMacro> {
// FIXME: within_limit should support this, instead of us having to extract the error // FIXME: within_limit should support this, instead of us having to extract the error
let mut unresolved_macro_err = None; let mut unresolved_macro_err = None;

View file

@ -1,6 +1,6 @@
//! An algorithm to find a path to refer to a certain item. //! An algorithm to find a path to refer to a certain item.
use std::{cmp::Ordering, iter}; use std::{cell::Cell, cmp::Ordering, iter};
use hir_expand::{ use hir_expand::{
name::{known, AsName, Name}, name::{known, AsName, Name},
@ -23,15 +23,40 @@ pub fn find_path(
db: &dyn DefDatabase, db: &dyn DefDatabase,
item: ItemInNs, item: ItemInNs,
from: ModuleId, from: ModuleId,
prefix_kind: PrefixKind, mut prefix_kind: PrefixKind,
ignore_local_imports: bool, ignore_local_imports: bool,
cfg: ImportPathConfig, mut cfg: ImportPathConfig,
) -> Option<ModPath> { ) -> Option<ModPath> {
let _p = tracing::span!(tracing::Level::INFO, "find_path").entered(); let _p = tracing::info_span!("find_path").entered();
find_path_inner(FindPathCtx { db, prefix: prefix_kind, cfg, ignore_local_imports }, item, from)
// - if the item is a builtin, it's in scope
if let ItemInNs::Types(ModuleDefId::BuiltinType(builtin)) = item {
return Some(ModPath::from_segments(PathKind::Plain, iter::once(builtin.as_name())));
}
// within block modules, forcing a `self` or `crate` prefix will not allow using inner items, so
// default to plain paths.
if item.module(db).is_some_and(ModuleId::is_within_block) {
prefix_kind = PrefixKind::Plain;
}
cfg.prefer_no_std = cfg.prefer_no_std || db.crate_supports_no_std(from.krate());
find_path_inner(
&FindPathCtx {
db,
prefix: prefix_kind,
cfg,
ignore_local_imports,
from,
from_def_map: &from.def_map(db),
fuel: Cell::new(FIND_PATH_FUEL),
},
item,
MAX_PATH_LEN,
)
} }
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum Stability { enum Stability {
Unstable, Unstable,
Stable, Stable,
@ -46,6 +71,7 @@ fn zip_stability(a: Stability, b: Stability) -> Stability {
} }
const MAX_PATH_LEN: usize = 15; const MAX_PATH_LEN: usize = 15;
const FIND_PATH_FUEL: usize = 10000;
#[derive(Copy, Clone, Debug, PartialEq, Eq)] #[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum PrefixKind { pub enum PrefixKind {
@ -63,79 +89,54 @@ impl PrefixKind {
#[inline] #[inline]
fn path_kind(self) -> PathKind { fn path_kind(self) -> PathKind {
match self { match self {
PrefixKind::BySelf => PathKind::Super(0), PrefixKind::BySelf => PathKind::SELF,
PrefixKind::Plain => PathKind::Plain, PrefixKind::Plain => PathKind::Plain,
PrefixKind::ByCrate => PathKind::Crate, PrefixKind::ByCrate => PathKind::Crate,
} }
} }
} }
#[derive(Copy, Clone)]
struct FindPathCtx<'db> { struct FindPathCtx<'db> {
db: &'db dyn DefDatabase, db: &'db dyn DefDatabase,
prefix: PrefixKind, prefix: PrefixKind,
cfg: ImportPathConfig, cfg: ImportPathConfig,
ignore_local_imports: bool, ignore_local_imports: bool,
from: ModuleId,
from_def_map: &'db DefMap,
fuel: Cell<usize>,
} }
/// Attempts to find a path to refer to the given `item` visible from the `from` ModuleId /// Attempts to find a path to refer to the given `item` visible from the `from` ModuleId
fn find_path_inner(ctx: FindPathCtx<'_>, item: ItemInNs, from: ModuleId) -> Option<ModPath> { fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Option<ModPath> {
// - if the item is a builtin, it's in scope
if let ItemInNs::Types(ModuleDefId::BuiltinType(builtin)) = item {
return Some(ModPath::from_segments(PathKind::Plain, iter::once(builtin.as_name())));
}
let def_map = from.def_map(ctx.db);
let crate_root = from.derive_crate_root();
// - if the item is a module, jump straight to module search // - if the item is a module, jump straight to module search
if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item { if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item {
let mut visited_modules = FxHashSet::default(); let mut visited_modules = FxHashSet::default();
return find_path_for_module( return find_path_for_module(ctx, &mut visited_modules, module_id, max_len)
FindPathCtx { .map(|(item, _)| item);
cfg: ImportPathConfig {
prefer_no_std: ctx.cfg.prefer_no_std
|| ctx.db.crate_supports_no_std(crate_root.krate),
..ctx.cfg
},
..ctx
},
&def_map,
&mut visited_modules,
from,
module_id,
MAX_PATH_LEN,
)
.map(|(item, _)| item);
} }
let prefix = if item.module(ctx.db).is_some_and(|it| it.is_within_block()) { let may_be_in_scope = match ctx.prefix {
PrefixKind::Plain
} else {
ctx.prefix
};
let may_be_in_scope = match prefix {
PrefixKind::Plain | PrefixKind::BySelf => true, PrefixKind::Plain | PrefixKind::BySelf => true,
PrefixKind::ByCrate => from.is_crate_root(), PrefixKind::ByCrate => ctx.from.is_crate_root(),
}; };
if may_be_in_scope { if may_be_in_scope {
// - if the item is already in scope, return the name under which it is // - if the item is already in scope, return the name under which it is
let scope_name = find_in_scope(ctx.db, &def_map, from, item, ctx.ignore_local_imports); let scope_name =
find_in_scope(ctx.db, ctx.from_def_map, ctx.from, item, ctx.ignore_local_imports);
if let Some(scope_name) = scope_name { if let Some(scope_name) = scope_name {
return Some(ModPath::from_segments(prefix.path_kind(), iter::once(scope_name))); return Some(ModPath::from_segments(ctx.prefix.path_kind(), iter::once(scope_name)));
} }
} }
// - if the item is in the prelude, return the name from there // - if the item is in the prelude, return the name from there
if let value @ Some(_) = if let Some(value) = find_in_prelude(ctx.db, ctx.from_def_map, item, ctx.from) {
find_in_prelude(ctx.db, &crate_root.def_map(ctx.db), &def_map, item, from) return Some(value);
{
return value;
} }
if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() { if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() {
// - if the item is an enum variant, refer to it via the enum // - if the item is an enum variant, refer to it via the enum
if let Some(mut path) = if let Some(mut path) =
find_path_inner(ctx, ItemInNs::Types(variant.lookup(ctx.db).parent.into()), from) find_path_inner(ctx, ItemInNs::Types(variant.lookup(ctx.db).parent.into()), max_len)
{ {
path.push_segment(ctx.db.enum_variant_data(variant).name.clone()); path.push_segment(ctx.db.enum_variant_data(variant).name.clone());
return Some(path); return Some(path);
@ -147,53 +148,32 @@ fn find_path_inner(ctx: FindPathCtx<'_>, item: ItemInNs, from: ModuleId) -> Opti
let mut visited_modules = FxHashSet::default(); let mut visited_modules = FxHashSet::default();
calculate_best_path( calculate_best_path(ctx, &mut visited_modules, item, max_len).map(|(item, _)| item)
FindPathCtx {
cfg: ImportPathConfig {
prefer_no_std: ctx.cfg.prefer_no_std
|| ctx.db.crate_supports_no_std(crate_root.krate),
..ctx.cfg
},
..ctx
},
&def_map,
&mut visited_modules,
MAX_PATH_LEN,
item,
from,
)
.map(|(item, _)| item)
} }
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
fn find_path_for_module( fn find_path_for_module(
ctx: FindPathCtx<'_>, ctx: &FindPathCtx<'_>,
def_map: &DefMap,
visited_modules: &mut FxHashSet<ModuleId>, visited_modules: &mut FxHashSet<ModuleId>,
from: ModuleId,
module_id: ModuleId, module_id: ModuleId,
max_len: usize, max_len: usize,
) -> Option<(ModPath, Stability)> { ) -> Option<(ModPath, Stability)> {
if max_len == 0 { if let Some(crate_root) = module_id.as_crate_root() {
return None; if crate_root == ctx.from.derive_crate_root() {
} // - if the item is the crate root, return `crate`
return Some((ModPath::from_segments(PathKind::Crate, None), Stable));
}
// - otherwise if the item is the crate root of a dependency crate, return the name from the extern prelude
let is_crate_root = module_id.as_crate_root(); let root_def_map = ctx.from.derive_crate_root().def_map(ctx.db);
// - if the item is the crate root, return `crate`
if is_crate_root.is_some_and(|it| it == from.derive_crate_root()) {
return Some((ModPath::from_segments(PathKind::Crate, None), Stable));
}
let root_def_map = from.derive_crate_root().def_map(ctx.db);
// - if the item is the crate root of a dependency crate, return the name from the extern prelude
if let Some(crate_root) = is_crate_root {
// rev here so we prefer looking at renamed extern decls first // rev here so we prefer looking at renamed extern decls first
for (name, (def_id, _extern_crate)) in root_def_map.extern_prelude().rev() { for (name, (def_id, _extern_crate)) in root_def_map.extern_prelude().rev() {
if crate_root != def_id { if crate_root != def_id {
continue; continue;
} }
let name_already_occupied_in_type_ns = def_map let name_already_occupied_in_type_ns = ctx
.with_ancestor_maps(ctx.db, from.local_id, &mut |def_map, local_id| { .from_def_map
.with_ancestor_maps(ctx.db, ctx.from.local_id, &mut |def_map, local_id| {
def_map[local_id] def_map[local_id]
.scope .scope
.type_(name) .type_(name)
@ -209,30 +189,30 @@ fn find_path_for_module(
return Some((ModPath::from_segments(kind, iter::once(name.clone())), Stable)); return Some((ModPath::from_segments(kind, iter::once(name.clone())), Stable));
} }
} }
let prefix = if module_id.is_within_block() { PrefixKind::Plain } else { ctx.prefix };
let may_be_in_scope = match prefix { let may_be_in_scope = match ctx.prefix {
PrefixKind::Plain | PrefixKind::BySelf => true, PrefixKind::Plain | PrefixKind::BySelf => true,
PrefixKind::ByCrate => from.is_crate_root(), PrefixKind::ByCrate => ctx.from.is_crate_root(),
}; };
if may_be_in_scope { if may_be_in_scope {
let scope_name = find_in_scope( let scope_name = find_in_scope(
ctx.db, ctx.db,
def_map, ctx.from_def_map,
from, ctx.from,
ItemInNs::Types(module_id.into()), ItemInNs::Types(module_id.into()),
ctx.ignore_local_imports, ctx.ignore_local_imports,
); );
if let Some(scope_name) = scope_name { if let Some(scope_name) = scope_name {
// - if the item is already in scope, return the name under which it is // - if the item is already in scope, return the name under which it is
return Some(( return Some((
ModPath::from_segments(prefix.path_kind(), iter::once(scope_name)), ModPath::from_segments(ctx.prefix.path_kind(), iter::once(scope_name)),
Stable, Stable,
)); ));
} }
} }
// - if the module can be referenced as self, super or crate, do that // - if the module can be referenced as self, super or crate, do that
if let Some(mod_path) = is_kw_kind_relative_to_from(def_map, module_id, from) { if let Some(mod_path) = is_kw_kind_relative_to_from(ctx.from_def_map, module_id, ctx.from) {
if ctx.prefix != PrefixKind::ByCrate || mod_path.kind == PathKind::Crate { if ctx.prefix != PrefixKind::ByCrate || mod_path.kind == PathKind::Crate {
return Some((mod_path, Stable)); return Some((mod_path, Stable));
} }
@ -240,21 +220,13 @@ fn find_path_for_module(
// - if the module is in the prelude, return it by that path // - if the module is in the prelude, return it by that path
if let Some(mod_path) = if let Some(mod_path) =
find_in_prelude(ctx.db, &root_def_map, def_map, ItemInNs::Types(module_id.into()), from) find_in_prelude(ctx.db, ctx.from_def_map, ItemInNs::Types(module_id.into()), ctx.from)
{ {
return Some((mod_path, Stable)); return Some((mod_path, Stable));
} }
calculate_best_path( calculate_best_path(ctx, visited_modules, ItemInNs::Types(module_id.into()), max_len)
ctx,
def_map,
visited_modules,
max_len,
ItemInNs::Types(module_id.into()),
from,
)
} }
// FIXME: Do we still need this now that we record import origins, and hence aliases?
fn find_in_scope( fn find_in_scope(
db: &dyn DefDatabase, db: &dyn DefDatabase,
def_map: &DefMap, def_map: &DefMap,
@ -274,13 +246,11 @@ fn find_in_scope(
/// name doesn't clash in current scope. /// name doesn't clash in current scope.
fn find_in_prelude( fn find_in_prelude(
db: &dyn DefDatabase, db: &dyn DefDatabase,
root_def_map: &DefMap,
local_def_map: &DefMap, local_def_map: &DefMap,
item: ItemInNs, item: ItemInNs,
from: ModuleId, from: ModuleId,
) -> Option<ModPath> { ) -> Option<ModPath> {
let (prelude_module, _) = root_def_map.prelude()?; let (prelude_module, _) = local_def_map.prelude()?;
// Preludes in block DefMaps are ignored, only the crate DefMap is searched
let prelude_def_map = prelude_module.def_map(db); let prelude_def_map = prelude_module.def_map(db);
let prelude_scope = &prelude_def_map[prelude_module.local_id].scope; let prelude_scope = &prelude_def_map[prelude_module.local_id].scope;
let (name, vis, _declared) = prelude_scope.name_of(item)?; let (name, vis, _declared) = prelude_scope.name_of(item)?;
@ -319,7 +289,7 @@ fn is_kw_kind_relative_to_from(
let from = from.local_id; let from = from.local_id;
if item == from { if item == from {
// - if the item is the module we're in, use `self` // - if the item is the module we're in, use `self`
Some(ModPath::from_segments(PathKind::Super(0), None)) Some(ModPath::from_segments(PathKind::SELF, None))
} else if let Some(parent_id) = def_map[from].parent { } else if let Some(parent_id) = def_map[from].parent {
if item == parent_id { if item == parent_id {
// - if the item is the parent module, use `super` (this is not used recursively, since `super::super` is ugly) // - if the item is the parent module, use `super` (this is not used recursively, since `super::super` is ugly)
@ -337,60 +307,71 @@ fn is_kw_kind_relative_to_from(
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
fn calculate_best_path( fn calculate_best_path(
ctx: FindPathCtx<'_>, ctx: &FindPathCtx<'_>,
def_map: &DefMap,
visited_modules: &mut FxHashSet<ModuleId>, visited_modules: &mut FxHashSet<ModuleId>,
max_len: usize,
item: ItemInNs, item: ItemInNs,
from: ModuleId, max_len: usize,
) -> Option<(ModPath, Stability)> { ) -> Option<(ModPath, Stability)> {
if max_len <= 1 { if max_len <= 1 {
// recursive base case, we can't find a path prefix of length 0, one segment is occupied by
// the item's name itself.
return None; return None;
} }
let fuel = ctx.fuel.get();
if fuel == 0 {
// we ran out of fuel, so we stop searching here
tracing::warn!(
"ran out of fuel while searching for a path for item {item:?} of krate {:?} from krate {:?}",
item.krate(ctx.db),
ctx.from.krate()
);
return None;
}
ctx.fuel.set(fuel - 1);
let mut best_path = None; let mut best_path = None;
let update_best_path = let mut best_path_len = max_len;
|best_path: &mut Option<_>, new_path: (ModPath, Stability)| match best_path { let mut process = |mut path: (ModPath, Stability), name, best_path_len: &mut _| {
path.0.push_segment(name);
let new_path = match best_path.take() {
Some(best_path) => select_best_path(best_path, path, ctx.cfg),
None => path,
};
if new_path.1 == Stable {
*best_path_len = new_path.0.len();
}
match &mut best_path {
Some((old_path, old_stability)) => { Some((old_path, old_stability)) => {
*old_path = new_path.0; *old_path = new_path.0;
*old_stability = zip_stability(*old_stability, new_path.1); *old_stability = zip_stability(*old_stability, new_path.1);
} }
None => *best_path = Some(new_path), None => best_path = Some(new_path),
}; }
// Recursive case: };
// - otherwise, look for modules containing (reexporting) it and import it from one of those let db = ctx.db;
if item.krate(ctx.db) == Some(from.krate) { if item.krate(db) == Some(ctx.from.krate) {
let mut best_path_len = max_len;
// Item was defined in the same crate that wants to import it. It cannot be found in any // Item was defined in the same crate that wants to import it. It cannot be found in any
// dependency in this case. // dependency in this case.
for (module_id, name) in find_local_import_locations(ctx.db, item, from) { // FIXME: cache the `find_local_import_locations` output?
find_local_import_locations(db, item, ctx.from, ctx.from_def_map, |name, module_id| {
if !visited_modules.insert(module_id) { if !visited_modules.insert(module_id) {
continue; return;
} }
if let Some(mut path) = find_path_for_module( // we are looking for paths of length up to best_path_len, any longer will make it be
ctx, // less optimal. The -1 is due to us pushing name onto it afterwards.
def_map, if let Some(path) =
visited_modules, find_path_for_module(ctx, visited_modules, module_id, best_path_len - 1)
from, {
module_id, process(path, name.clone(), &mut best_path_len);
best_path_len - 1,
) {
path.0.push_segment(name);
let new_path = match best_path.take() {
Some(best_path) => select_best_path(best_path, path, ctx.cfg),
None => path,
};
best_path_len = new_path.0.len();
update_best_path(&mut best_path, new_path);
} }
} })
} else { } else {
// Item was defined in some upstream crate. This means that it must be exported from one, // Item was defined in some upstream crate. This means that it must be exported from one,
// too (unless we can't name it at all). It could *also* be (re)exported by the same crate // too (unless we can't name it at all). It could *also* be (re)exported by the same crate
// that wants to import it here, but we always prefer to use the external path here. // that wants to import it here, but we always prefer to use the external path here.
for dep in &ctx.db.crate_graph()[from.krate].dependencies { for dep in &db.crate_graph()[ctx.from.krate].dependencies {
let import_map = ctx.db.import_map(dep.crate_id); let import_map = db.import_map(dep.crate_id);
let Some(import_info_for) = import_map.import_info_for(item) else { continue }; let Some(import_info_for) = import_map.import_info_for(item) else { continue };
for info in import_info_for { for info in import_info_for {
if info.is_doc_hidden { if info.is_doc_hidden {
@ -400,29 +381,18 @@ fn calculate_best_path(
// Determine best path for containing module and append last segment from `info`. // Determine best path for containing module and append last segment from `info`.
// FIXME: we should guide this to look up the path locally, or from the same crate again? // FIXME: we should guide this to look up the path locally, or from the same crate again?
let Some((mut path, path_stability)) = find_path_for_module( let path =
ctx, find_path_for_module(ctx, visited_modules, info.container, best_path_len - 1);
def_map, let Some((path, path_stability)) = path else {
visited_modules,
from,
info.container,
max_len - 1,
) else {
continue; continue;
}; };
cov_mark::hit!(partially_imported); cov_mark::hit!(partially_imported);
path.push_segment(info.name.clone()); let path = (
let path_with_stab = (
path, path,
zip_stability(path_stability, if info.is_unstable { Unstable } else { Stable }), zip_stability(path_stability, if info.is_unstable { Unstable } else { Stable }),
); );
let new_path_with_stab = match best_path.take() { process(path, info.name.clone(), &mut best_path_len);
Some(best_path) => select_best_path(best_path, path_with_stab, ctx.cfg),
None => path_with_stab,
};
update_best_path(&mut best_path, new_path_with_stab);
} }
} }
} }
@ -430,7 +400,7 @@ fn calculate_best_path(
} }
/// Select the best (most relevant) path between two paths. /// Select the best (most relevant) path between two paths.
/// This accounts for stability, path length whether std should be chosen over alloc/core paths as /// This accounts for stability, path length whether, std should be chosen over alloc/core paths as
/// well as ignoring prelude like paths or not. /// well as ignoring prelude like paths or not.
fn select_best_path( fn select_best_path(
old_path @ (_, old_stability): (ModPath, Stability), old_path @ (_, old_stability): (ModPath, Stability),
@ -496,36 +466,33 @@ fn select_best_path(
} }
} }
// FIXME: Remove allocations
/// Finds locations in `from.krate` from which `item` can be imported by `from`. /// Finds locations in `from.krate` from which `item` can be imported by `from`.
fn find_local_import_locations( fn find_local_import_locations(
db: &dyn DefDatabase, db: &dyn DefDatabase,
item: ItemInNs, item: ItemInNs,
from: ModuleId, from: ModuleId,
) -> Vec<(ModuleId, Name)> { def_map: &DefMap,
let _p = tracing::span!(tracing::Level::INFO, "find_local_import_locations").entered(); mut cb: impl FnMut(&Name, ModuleId),
) {
let _p = tracing::info_span!("find_local_import_locations").entered();
// `from` can import anything below `from` with visibility of at least `from`, and anything // `from` can import anything below `from` with visibility of at least `from`, and anything
// above `from` with any visibility. That means we do not need to descend into private siblings // above `from` with any visibility. That means we do not need to descend into private siblings
// of `from` (and similar). // of `from` (and similar).
let def_map = from.def_map(db);
// Compute the initial worklist. We start with all direct child modules of `from` as well as all // Compute the initial worklist. We start with all direct child modules of `from` as well as all
// of its (recursive) parent modules. // of its (recursive) parent modules.
let data = &def_map[from.local_id]; let mut worklist = def_map[from.local_id]
let mut worklist = .children
data.children.values().map(|child| def_map.module_id(*child)).collect::<Vec<_>>(); .values()
// FIXME: do we need to traverse out of block expressions here? .map(|child| def_map.module_id(*child))
for ancestor in iter::successors(from.containing_module(db), |m| m.containing_module(db)) { // FIXME: do we need to traverse out of block expressions here?
worklist.push(ancestor); .chain(iter::successors(from.containing_module(db), |m| m.containing_module(db)))
} .collect::<Vec<_>>();
let mut seen: FxHashSet<_> = FxHashSet::default();
let def_map = def_map.crate_root().def_map(db); let def_map = def_map.crate_root().def_map(db);
let mut seen: FxHashSet<_> = FxHashSet::default();
let mut locations = Vec::new();
while let Some(module) = worklist.pop() { while let Some(module) = worklist.pop() {
if !seen.insert(module) { if !seen.insert(module) {
continue; // already processed this module continue; // already processed this module
@ -566,7 +533,7 @@ fn find_local_import_locations(
// the item and we're a submodule of it, so can we. // the item and we're a submodule of it, so can we.
// Also this keeps the cached data smaller. // Also this keeps the cached data smaller.
if declared || is_pub_or_explicit { if declared || is_pub_or_explicit {
locations.push((module, name.clone())); cb(name, module);
} }
} }
} }
@ -578,8 +545,6 @@ fn find_local_import_locations(
} }
} }
} }
locations
} }
#[cfg(test)] #[cfg(test)]
@ -633,15 +598,13 @@ mod tests {
.into_iter() .into_iter()
.cartesian_product([false, true]) .cartesian_product([false, true])
{ {
let found_path = find_path_inner( let found_path = find_path(
FindPathCtx { &db,
db: &db,
prefix,
cfg: ImportPathConfig { prefer_no_std: false, prefer_prelude },
ignore_local_imports,
},
resolved, resolved,
module, module,
prefix,
ignore_local_imports,
ImportPathConfig { prefer_no_std: false, prefer_prelude },
); );
format_to!( format_to!(
res, res,

View file

@ -11,7 +11,7 @@ use hir_expand::{
ExpandResult, ExpandResult,
}; };
use intern::Interned; use intern::Interned;
use la_arena::Arena; use la_arena::{Arena, RawIdx};
use once_cell::unsync::Lazy; use once_cell::unsync::Lazy;
use stdx::impl_from; use stdx::impl_from;
use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds}; use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds};
@ -28,6 +28,9 @@ use crate::{
LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
}; };
const SELF_PARAM_ID_IN_SELF: la_arena::Idx<TypeOrConstParamData> =
LocalTypeOrConstParamId::from_raw(RawIdx::from_u32(0));
/// Data about a generic type parameter (to a function, struct, impl, ...). /// Data about a generic type parameter (to a function, struct, impl, ...).
#[derive(Clone, PartialEq, Eq, Debug, Hash)] #[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct TypeParamData { pub struct TypeParamData {
@ -403,12 +406,12 @@ impl GenericParamsCollector {
let (def_map, expander) = &mut **exp; let (def_map, expander) = &mut **exp;
let module = expander.module.local_id; let module = expander.module.local_id;
let resolver = |path| { let resolver = |path: &_| {
def_map def_map
.resolve_path( .resolve_path(
db, db,
module, module,
&path, path,
crate::item_scope::BuiltinShadowMode::Other, crate::item_scope::BuiltinShadowMode::Other,
Some(MacroSubNs::Bang), Some(MacroSubNs::Bang),
) )
@ -441,15 +444,18 @@ impl GenericParamsCollector {
impl GenericParams { impl GenericParams {
/// Number of Generic parameters (type_or_consts + lifetimes) /// Number of Generic parameters (type_or_consts + lifetimes)
#[inline]
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
self.type_or_consts.len() + self.lifetimes.len() self.type_or_consts.len() + self.lifetimes.len()
} }
#[inline]
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.len() == 0 self.len() == 0
} }
/// Iterator of type_or_consts field /// Iterator of type_or_consts field
#[inline]
pub fn iter_type_or_consts( pub fn iter_type_or_consts(
&self, &self,
) -> impl DoubleEndedIterator<Item = (LocalTypeOrConstParamId, &TypeOrConstParamData)> { ) -> impl DoubleEndedIterator<Item = (LocalTypeOrConstParamId, &TypeOrConstParamData)> {
@ -457,6 +463,7 @@ impl GenericParams {
} }
/// Iterator of lifetimes field /// Iterator of lifetimes field
#[inline]
pub fn iter_lt( pub fn iter_lt(
&self, &self,
) -> impl DoubleEndedIterator<Item = (LocalLifetimeParamId, &LifetimeParamData)> { ) -> impl DoubleEndedIterator<Item = (LocalLifetimeParamId, &LifetimeParamData)> {
@ -467,7 +474,7 @@ impl GenericParams {
db: &dyn DefDatabase, db: &dyn DefDatabase,
def: GenericDefId, def: GenericDefId,
) -> Interned<GenericParams> { ) -> Interned<GenericParams> {
let _p = tracing::span!(tracing::Level::INFO, "generic_params_query").entered(); let _p = tracing::info_span!("generic_params_query").entered();
let krate = def.module(db).krate; let krate = def.module(db).krate;
let cfg_options = db.crate_graph(); let cfg_options = db.crate_graph();
@ -605,17 +612,18 @@ impl GenericParams {
}) })
} }
pub fn find_trait_self_param(&self) -> Option<LocalTypeOrConstParamId> { pub fn trait_self_param(&self) -> Option<LocalTypeOrConstParamId> {
self.type_or_consts.iter().find_map(|(id, p)| { if self.type_or_consts.is_empty() {
matches!( return None;
p, }
TypeOrConstParamData::TypeParamData(TypeParamData { matches!(
provenance: TypeParamProvenance::TraitSelf, self.type_or_consts[SELF_PARAM_ID_IN_SELF],
.. TypeOrConstParamData::TypeParamData(TypeParamData {
}) provenance: TypeParamProvenance::TraitSelf,
) ..
.then(|| id) })
}) )
.then(|| SELF_PARAM_ID_IN_SELF)
} }
pub fn find_lifetime_by_name( pub fn find_lifetime_by_name(

View file

@ -503,11 +503,11 @@ impl BindingAnnotation {
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
pub enum BindingProblems { pub enum BindingProblems {
/// https://doc.rust-lang.org/stable/error_codes/E0416.html /// <https://doc.rust-lang.org/stable/error_codes/E0416.html>
BoundMoreThanOnce, BoundMoreThanOnce,
/// https://doc.rust-lang.org/stable/error_codes/E0409.html /// <https://doc.rust-lang.org/stable/error_codes/E0409.html>
BoundInconsistently, BoundInconsistently,
/// https://doc.rust-lang.org/stable/error_codes/E0408.html /// <https://doc.rust-lang.org/stable/error_codes/E0408.html>
NotBoundAcrossAll, NotBoundAcrossAll,
} }

View file

@ -73,7 +73,7 @@ impl ImportMap {
} }
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> { pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
let _p = tracing::span!(tracing::Level::INFO, "import_map_query").entered(); let _p = tracing::info_span!("import_map_query").entered();
let map = Self::collect_import_map(db, krate); let map = Self::collect_import_map(db, krate);
@ -124,7 +124,7 @@ impl ImportMap {
} }
fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex { fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex {
let _p = tracing::span!(tracing::Level::INFO, "collect_import_map").entered(); let _p = tracing::info_span!("collect_import_map").entered();
let def_map = db.crate_def_map(krate); let def_map = db.crate_def_map(krate);
let mut map = FxIndexMap::default(); let mut map = FxIndexMap::default();
@ -214,7 +214,7 @@ impl ImportMap {
is_type_in_ns: bool, is_type_in_ns: bool,
trait_import_info: &ImportInfo, trait_import_info: &ImportInfo,
) { ) {
let _p = tracing::span!(tracing::Level::INFO, "collect_trait_assoc_items").entered(); let _p = tracing::info_span!("collect_trait_assoc_items").entered();
for &(ref assoc_item_name, item) in &db.trait_data(tr).items { for &(ref assoc_item_name, item) in &db.trait_data(tr).items {
let module_def_id = match item { let module_def_id = match item {
AssocItemId::FunctionId(f) => ModuleDefId::from(f), AssocItemId::FunctionId(f) => ModuleDefId::from(f),
@ -396,7 +396,7 @@ pub fn search_dependencies(
krate: CrateId, krate: CrateId,
query: &Query, query: &Query,
) -> FxHashSet<ItemInNs> { ) -> FxHashSet<ItemInNs> {
let _p = tracing::span!(tracing::Level::INFO, "search_dependencies", ?query).entered(); let _p = tracing::info_span!("search_dependencies", ?query).entered();
let graph = db.crate_graph(); let graph = db.crate_graph();

View file

@ -99,7 +99,7 @@ pub struct ItemTree {
impl ItemTree { impl ItemTree {
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> { pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
let _p = tracing::span!(tracing::Level::INFO, "file_item_tree_query", ?file_id).entered(); let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered();
let syntax = db.parse_or_expand(file_id); let syntax = db.parse_or_expand(file_id);
@ -242,11 +242,11 @@ impl ItemVisibilities {
match &vis { match &vis {
RawVisibility::Public => RawVisibilityId::PUB, RawVisibility::Public => RawVisibilityId::PUB,
RawVisibility::Module(path, explicitiy) if path.segments().is_empty() => { RawVisibility::Module(path, explicitiy) if path.segments().is_empty() => {
match (&path.kind, explicitiy) { match (path.kind, explicitiy) {
(PathKind::Super(0), VisibilityExplicitness::Explicit) => { (PathKind::SELF, VisibilityExplicitness::Explicit) => {
RawVisibilityId::PRIV_EXPLICIT RawVisibilityId::PRIV_EXPLICIT
} }
(PathKind::Super(0), VisibilityExplicitness::Implicit) => { (PathKind::SELF, VisibilityExplicitness::Implicit) => {
RawVisibilityId::PRIV_IMPLICIT RawVisibilityId::PRIV_IMPLICIT
} }
(PathKind::Crate, _) => RawVisibilityId::PUB_CRATE, (PathKind::Crate, _) => RawVisibilityId::PUB_CRATE,
@ -586,11 +586,11 @@ impl Index<RawVisibilityId> for ItemTree {
fn index(&self, index: RawVisibilityId) -> &Self::Output { fn index(&self, index: RawVisibilityId) -> &Self::Output {
static VIS_PUB: RawVisibility = RawVisibility::Public; static VIS_PUB: RawVisibility = RawVisibility::Public;
static VIS_PRIV_IMPLICIT: RawVisibility = RawVisibility::Module( static VIS_PRIV_IMPLICIT: RawVisibility = RawVisibility::Module(
ModPath::from_kind(PathKind::Super(0)), ModPath::from_kind(PathKind::SELF),
VisibilityExplicitness::Implicit, VisibilityExplicitness::Implicit,
); );
static VIS_PRIV_EXPLICIT: RawVisibility = RawVisibility::Module( static VIS_PRIV_EXPLICIT: RawVisibility = RawVisibility::Module(
ModPath::from_kind(PathKind::Super(0)), ModPath::from_kind(PathKind::SELF),
VisibilityExplicitness::Explicit, VisibilityExplicitness::Explicit,
); );
static VIS_PUB_CRATE: RawVisibility = RawVisibility::Module( static VIS_PUB_CRATE: RawVisibility = RawVisibility::Module(
@ -928,7 +928,7 @@ impl UseTree {
_ => None, _ => None,
} }
} }
(Some(prefix), PathKind::Super(0)) if path.segments().is_empty() => { (Some(prefix), PathKind::SELF) if path.segments().is_empty() => {
// `some::path::self` == `some::path` // `some::path::self` == `some::path`
Some((prefix, ImportKind::TypeOnly)) Some((prefix, ImportKind::TypeOnly))
} }

View file

@ -91,7 +91,7 @@ impl LangItems {
db: &dyn DefDatabase, db: &dyn DefDatabase,
krate: CrateId, krate: CrateId,
) -> Option<Arc<LangItems>> { ) -> Option<Arc<LangItems>> {
let _p = tracing::span!(tracing::Level::INFO, "crate_lang_items_query").entered(); let _p = tracing::info_span!("crate_lang_items_query").entered();
let mut lang_items = LangItems::default(); let mut lang_items = LangItems::default();
@ -163,7 +163,7 @@ impl LangItems {
start_crate: CrateId, start_crate: CrateId,
item: LangItem, item: LangItem,
) -> Option<LangItemTarget> { ) -> Option<LangItemTarget> {
let _p = tracing::span!(tracing::Level::INFO, "lang_item_query").entered(); let _p = tracing::info_span!("lang_item_query").entered();
if let Some(target) = if let Some(target) =
db.crate_lang_items(start_crate).and_then(|it| it.items.get(&item).copied()) db.crate_lang_items(start_crate).and_then(|it| it.items.get(&item).copied())
{ {
@ -183,7 +183,7 @@ impl LangItems {
) where ) where
T: Into<AttrDefId> + Copy, T: Into<AttrDefId> + Copy,
{ {
let _p = tracing::span!(tracing::Level::INFO, "collect_lang_item").entered(); let _p = tracing::info_span!("collect_lang_item").entered();
if let Some(lang_item) = lang_attr(db, item.into()) { if let Some(lang_item) = lang_attr(db, item.into()) {
self.items.entry(lang_item).or_insert_with(|| constructor(item)); self.items.entry(lang_item).or_insert_with(|| constructor(item));
} }
@ -199,7 +199,7 @@ pub(crate) fn notable_traits_in_deps(
db: &dyn DefDatabase, db: &dyn DefDatabase,
krate: CrateId, krate: CrateId,
) -> Arc<[Arc<[TraitId]>]> { ) -> Arc<[Arc<[TraitId]>]> {
let _p = tracing::span!(tracing::Level::INFO, "notable_traits_in_deps", ?krate).entered(); let _p = tracing::info_span!("notable_traits_in_deps", ?krate).entered();
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
Arc::from_iter( Arc::from_iter(
@ -208,7 +208,7 @@ pub(crate) fn notable_traits_in_deps(
} }
pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: CrateId) -> Option<Arc<[TraitId]>> { pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: CrateId) -> Option<Arc<[TraitId]>> {
let _p = tracing::span!(tracing::Level::INFO, "crate_notable_traits", ?krate).entered(); let _p = tracing::info_span!("crate_notable_traits", ?krate).entered();
let mut traits = Vec::new(); let mut traits = Vec::new();

View file

@ -56,6 +56,7 @@ pub mod find_path;
pub mod import_map; pub mod import_map;
pub mod visibility; pub mod visibility;
use intern::Interned;
pub use rustc_abi as layout; pub use rustc_abi as layout;
use triomphe::Arc; use triomphe::Arc;
@ -72,7 +73,7 @@ use std::{
use base_db::{ use base_db::{
impl_intern_key, impl_intern_key,
salsa::{self, impl_intern_value_trivial}, salsa::{self, InternValueTrivial},
CrateId, CrateId,
}; };
use hir_expand::{ use hir_expand::{
@ -90,7 +91,7 @@ use hir_expand::{
use item_tree::ExternBlock; use item_tree::ExternBlock;
use la_arena::Idx; use la_arena::Idx;
use nameres::DefMap; use nameres::DefMap;
use span::{AstIdNode, Edition, FileAstId, FileId, SyntaxContextId}; use span::{AstIdNode, Edition, FileAstId, SyntaxContextId};
use stdx::impl_from; use stdx::impl_from;
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
@ -186,7 +187,7 @@ pub trait ItemTreeLoc {
macro_rules! impl_intern { macro_rules! impl_intern {
($id:ident, $loc:ident, $intern:ident, $lookup:ident) => { ($id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
impl_intern_key!($id); impl_intern_key!($id);
impl_intern_value_trivial!($loc); impl InternValueTrivial for $loc {}
impl_intern_lookup!(DefDatabase, $id, $loc, $intern, $lookup); impl_intern_lookup!(DefDatabase, $id, $loc, $intern, $lookup);
}; };
} }
@ -396,6 +397,23 @@ impl PartialEq<ModuleId> for CrateRootModuleId {
other.block.is_none() && other.local_id == DefMap::ROOT && self.krate == other.krate other.block.is_none() && other.local_id == DefMap::ROOT && self.krate == other.krate
} }
} }
impl PartialEq<CrateRootModuleId> for ModuleId {
fn eq(&self, other: &CrateRootModuleId) -> bool {
other == self
}
}
impl From<CrateRootModuleId> for ModuleId {
fn from(CrateRootModuleId { krate }: CrateRootModuleId) -> Self {
ModuleId { krate, block: None, local_id: DefMap::ROOT }
}
}
impl From<CrateRootModuleId> for ModuleDefId {
fn from(value: CrateRootModuleId) -> Self {
ModuleDefId::ModuleId(value.into())
}
}
impl From<CrateId> for CrateRootModuleId { impl From<CrateId> for CrateRootModuleId {
fn from(krate: CrateId) -> Self { fn from(krate: CrateId) -> Self {
@ -472,6 +490,7 @@ impl ModuleId {
self.block.is_some() self.block.is_some()
} }
/// Returns the [`CrateRootModuleId`] for this module if it is the crate root module.
pub fn as_crate_root(&self) -> Option<CrateRootModuleId> { pub fn as_crate_root(&self) -> Option<CrateRootModuleId> {
if self.local_id == DefMap::ROOT && self.block.is_none() { if self.local_id == DefMap::ROOT && self.block.is_none() {
Some(CrateRootModuleId { krate: self.krate }) Some(CrateRootModuleId { krate: self.krate })
@ -480,33 +499,17 @@ impl ModuleId {
} }
} }
/// Returns the [`CrateRootModuleId`] for this module.
pub fn derive_crate_root(&self) -> CrateRootModuleId { pub fn derive_crate_root(&self) -> CrateRootModuleId {
CrateRootModuleId { krate: self.krate } CrateRootModuleId { krate: self.krate }
} }
/// Whether this module represents the crate root module
fn is_crate_root(&self) -> bool { fn is_crate_root(&self) -> bool {
self.local_id == DefMap::ROOT && self.block.is_none() self.local_id == DefMap::ROOT && self.block.is_none()
} }
} }
impl PartialEq<CrateRootModuleId> for ModuleId {
fn eq(&self, other: &CrateRootModuleId) -> bool {
other == self
}
}
impl From<CrateRootModuleId> for ModuleId {
fn from(CrateRootModuleId { krate }: CrateRootModuleId) -> Self {
ModuleId { krate, block: None, local_id: DefMap::ROOT }
}
}
impl From<CrateRootModuleId> for ModuleDefId {
fn from(value: CrateRootModuleId) -> Self {
ModuleDefId::ModuleId(value.into())
}
}
/// An ID of a module, **local** to a `DefMap`. /// An ID of a module, **local** to a `DefMap`.
pub type LocalModuleId = Idx<nameres::ModuleData>; pub type LocalModuleId = Idx<nameres::ModuleData>;
@ -532,7 +535,7 @@ pub struct TypeOrConstParamId {
pub parent: GenericDefId, pub parent: GenericDefId,
pub local_id: LocalTypeOrConstParamId, pub local_id: LocalTypeOrConstParamId,
} }
impl_intern_value_trivial!(TypeOrConstParamId); impl InternValueTrivial for TypeOrConstParamId {}
/// A TypeOrConstParamId with an invariant that it actually belongs to a type /// A TypeOrConstParamId with an invariant that it actually belongs to a type
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -594,7 +597,7 @@ pub struct LifetimeParamId {
pub local_id: LocalLifetimeParamId, pub local_id: LocalLifetimeParamId,
} }
pub type LocalLifetimeParamId = Idx<generics::LifetimeParamData>; pub type LocalLifetimeParamId = Idx<generics::LifetimeParamData>;
impl_intern_value_trivial!(LifetimeParamId); impl InternValueTrivial for LifetimeParamId {}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ItemContainerId { pub enum ItemContainerId {
@ -920,6 +923,7 @@ pub enum GenericDefId {
ImplId(ImplId), ImplId(ImplId),
// enum variants cannot have generics themselves, but their parent enums // enum variants cannot have generics themselves, but their parent enums
// can, and this makes some code easier to write // can, and this makes some code easier to write
// FIXME: Try to remove this as that will reduce the amount of query slots generated per enum?
EnumVariantId(EnumVariantId), EnumVariantId(EnumVariantId),
// consts can have type parameters from their parents (i.e. associated consts of traits) // consts can have type parameters from their parents (i.e. associated consts of traits)
ConstId(ConstId), ConstId(ConstId),
@ -956,15 +960,14 @@ impl GenericDefId {
match self { match self {
GenericDefId::FunctionId(it) => file_id_and_params_of_item_loc(db, it), GenericDefId::FunctionId(it) => file_id_and_params_of_item_loc(db, it),
GenericDefId::TypeAliasId(it) => file_id_and_params_of_item_loc(db, it), GenericDefId::TypeAliasId(it) => file_id_and_params_of_item_loc(db, it),
GenericDefId::ConstId(_) => (FileId::BOGUS.into(), None),
GenericDefId::AdtId(AdtId::StructId(it)) => file_id_and_params_of_item_loc(db, it), GenericDefId::AdtId(AdtId::StructId(it)) => file_id_and_params_of_item_loc(db, it),
GenericDefId::AdtId(AdtId::UnionId(it)) => file_id_and_params_of_item_loc(db, it), GenericDefId::AdtId(AdtId::UnionId(it)) => file_id_and_params_of_item_loc(db, it),
GenericDefId::AdtId(AdtId::EnumId(it)) => file_id_and_params_of_item_loc(db, it), GenericDefId::AdtId(AdtId::EnumId(it)) => file_id_and_params_of_item_loc(db, it),
GenericDefId::TraitId(it) => file_id_and_params_of_item_loc(db, it), GenericDefId::TraitId(it) => file_id_and_params_of_item_loc(db, it),
GenericDefId::TraitAliasId(it) => file_id_and_params_of_item_loc(db, it), GenericDefId::TraitAliasId(it) => file_id_and_params_of_item_loc(db, it),
GenericDefId::ImplId(it) => file_id_and_params_of_item_loc(db, it), GenericDefId::ImplId(it) => file_id_and_params_of_item_loc(db, it),
// We won't be using this ID anyway GenericDefId::ConstId(it) => (it.lookup(db).id.file_id(), None),
GenericDefId::EnumVariantId(_) => (FileId::BOGUS.into(), None), GenericDefId::EnumVariantId(it) => (it.lookup(db).id.file_id(), None),
} }
} }
@ -1368,7 +1371,7 @@ pub trait AsMacroCall {
&self, &self,
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
krate: CrateId, krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy, resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
) -> Option<MacroCallId> { ) -> Option<MacroCallId> {
self.as_call_id_with_errors(db, krate, resolver).ok()?.value self.as_call_id_with_errors(db, krate, resolver).ok()?.value
} }
@ -1377,7 +1380,7 @@ pub trait AsMacroCall {
&self, &self,
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
krate: CrateId, krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy, resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro>; ) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro>;
} }
@ -1386,7 +1389,7 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
&self, &self,
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
krate: CrateId, krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy, resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> { ) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
let expands_to = hir_expand::ExpandTo::from_call_site(self.value); let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value)); let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
@ -1406,7 +1409,8 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
macro_call_as_call_id_with_eager( macro_call_as_call_id_with_eager(
db, db,
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path), ast_id,
&path,
call_site.ctx, call_site.ctx,
expands_to, expands_to,
krate, krate,
@ -1420,11 +1424,15 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
struct AstIdWithPath<T: AstIdNode> { struct AstIdWithPath<T: AstIdNode> {
ast_id: AstId<T>, ast_id: AstId<T>,
path: path::ModPath, path: Interned<path::ModPath>,
} }
impl<T: AstIdNode> AstIdWithPath<T> { impl<T: AstIdNode> AstIdWithPath<T> {
fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> { fn new(
file_id: HirFileId,
ast_id: FileAstId<T>,
path: Interned<path::ModPath>,
) -> AstIdWithPath<T> {
AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path } AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path }
} }
} }
@ -1435,30 +1443,39 @@ fn macro_call_as_call_id(
call_site: SyntaxContextId, call_site: SyntaxContextId,
expand_to: ExpandTo, expand_to: ExpandTo,
krate: CrateId, krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy, resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<Option<MacroCallId>, UnresolvedMacro> { ) -> Result<Option<MacroCallId>, UnresolvedMacro> {
macro_call_as_call_id_with_eager(db, call, call_site, expand_to, krate, resolver, resolver) macro_call_as_call_id_with_eager(
.map(|res| res.value) db,
call.ast_id,
&call.path,
call_site,
expand_to,
krate,
resolver,
resolver,
)
.map(|res| res.value)
} }
fn macro_call_as_call_id_with_eager( fn macro_call_as_call_id_with_eager(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>, ast_id: AstId<ast::MacroCall>,
path: &path::ModPath,
call_site: SyntaxContextId, call_site: SyntaxContextId,
expand_to: ExpandTo, expand_to: ExpandTo,
krate: CrateId, krate: CrateId,
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>, resolver: impl FnOnce(&path::ModPath) -> Option<MacroDefId>,
eager_resolver: impl Fn(path::ModPath) -> Option<MacroDefId>, eager_resolver: impl Fn(&path::ModPath) -> Option<MacroDefId>,
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> { ) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
let def = let def = resolver(path).ok_or_else(|| UnresolvedMacro { path: path.clone() })?;
resolver(call.path.clone()).ok_or_else(|| UnresolvedMacro { path: call.path.clone() })?;
let res = match def.kind { let res = match def.kind {
MacroDefKind::BuiltInEager(..) => expand_eager_macro_input( MacroDefKind::BuiltInEager(..) => expand_eager_macro_input(
db, db,
krate, krate,
&call.ast_id.to_node(db), &ast_id.to_node(db),
call.ast_id, ast_id,
def, def,
call_site, call_site,
&|path| eager_resolver(path).filter(MacroDefId::is_fn_like), &|path| eager_resolver(path).filter(MacroDefId::is_fn_like),
@ -1467,12 +1484,12 @@ fn macro_call_as_call_id_with_eager(
value: Some(def.make_call( value: Some(def.make_call(
db, db,
krate, krate,
MacroCallKind::FnLike { ast_id: call.ast_id, expand_to, eager: None }, MacroCallKind::FnLike { ast_id, expand_to, eager: None },
call_site, call_site,
)), )),
err: None, err: None,
}, },
_ => return Err(UnresolvedMacro { path: call.path.clone() }), _ => return Err(UnresolvedMacro { path: path.clone() }),
}; };
Ok(res) Ok(res)
} }

View file

@ -1883,3 +1883,41 @@ fn test() {
"#]], "#]],
); );
} }
#[test]
fn test_pat_fragment_eof_17441() {
check(
r#"
macro_rules! matches {
($expression:expr, $pattern:pat $(if $guard:expr)? ) => {
match $expression {
$pattern $(if $guard)? => true,
_ => false
}
};
}
fn f() {
matches!(0, 10..);
matches!(0, 10.. if true);
}
"#,
expect![[r#"
macro_rules! matches {
($expression:expr, $pattern:pat $(if $guard:expr)? ) => {
match $expression {
$pattern $(if $guard)? => true,
_ => false
}
};
}
fn f() {
match 0 {
10.. =>true , _=>false
};
match 0 {
10..if true =>true , _=>false
};
}
"#]],
);
}

View file

@ -96,7 +96,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
let res = macro_call let res = macro_call
.as_call_id_with_errors(&db, krate, |path| { .as_call_id_with_errors(&db, krate, |path| {
resolver resolver
.resolve_path_as_macro(&db, &path, Some(MacroSubNs::Bang)) .resolve_path_as_macro(&db, path, Some(MacroSubNs::Bang))
.map(|(it, _)| db.macro_def(it)) .map(|(it, _)| db.macro_def(it))
}) })
.unwrap(); .unwrap();

View file

@ -16,8 +16,8 @@
//! //!
//! This happens in the `raw` module, which parses a single source file into a //! This happens in the `raw` module, which parses a single source file into a
//! set of top-level items. Nested imports are desugared to flat imports in this //! set of top-level items. Nested imports are desugared to flat imports in this
//! phase. Macro calls are represented as a triple of (Path, Option<Name>, //! phase. Macro calls are represented as a triple of `(Path, Option<Name>,
//! TokenTree). //! TokenTree)`.
//! //!
//! ## Collecting Modules //! ## Collecting Modules
//! //!
@ -333,7 +333,7 @@ impl DefMap {
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
let krate = &crate_graph[crate_id]; let krate = &crate_graph[crate_id];
let name = krate.display_name.as_deref().unwrap_or_default(); let name = krate.display_name.as_deref().unwrap_or_default();
let _p = tracing::span!(tracing::Level::INFO, "crate_def_map_query", ?name).entered(); let _p = tracing::info_span!("crate_def_map_query", ?name).entered();
let module_data = ModuleData::new( let module_data = ModuleData::new(
ModuleOrigin::CrateRoot { definition: krate.root_file_id }, ModuleOrigin::CrateRoot { definition: krate.root_file_id },

View file

@ -3,6 +3,7 @@
use base_db::CrateId; use base_db::CrateId;
use hir_expand::{ use hir_expand::{
attrs::{Attr, AttrId, AttrInput}, attrs::{Attr, AttrId, AttrInput},
inert_attr_macro::find_builtin_attr_idx,
MacroCallId, MacroCallKind, MacroDefId, MacroCallId, MacroCallKind, MacroDefId,
}; };
use span::SyntaxContextId; use span::SyntaxContextId;
@ -10,7 +11,6 @@ use syntax::{ast, SmolStr};
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
attr::builtin::find_builtin_attr_idx,
db::DefDatabase, db::DefDatabase,
item_scope::BuiltinShadowMode, item_scope::BuiltinShadowMode,
nameres::path_resolution::ResolveMode, nameres::path_resolution::ResolveMode,
@ -59,7 +59,7 @@ impl DefMap {
return Ok(ResolvedAttr::Other); return Ok(ResolvedAttr::Other);
} }
} }
None => return Err(UnresolvedMacro { path: ast_id.path }), None => return Err(UnresolvedMacro { path: ast_id.path.as_ref().clone() }),
}; };
Ok(ResolvedAttr::Macro(attr_macro_as_call_id( Ok(ResolvedAttr::Macro(attr_macro_as_call_id(
@ -89,9 +89,12 @@ impl DefMap {
} }
if segments.len() == 1 { if segments.len() == 1 {
let mut registered = self.data.registered_attrs.iter().map(SmolStr::as_str); if find_builtin_attr_idx(&name).is_some() {
let is_inert = find_builtin_attr_idx(&name).is_some() || registered.any(pred); return true;
return is_inert; }
if self.data.registered_attrs.iter().map(SmolStr::as_str).any(pred) {
return true;
}
} }
} }
false false
@ -134,12 +137,12 @@ pub(super) fn derive_macro_as_call_id(
derive_pos: u32, derive_pos: u32,
call_site: SyntaxContextId, call_site: SyntaxContextId,
krate: CrateId, krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>, resolver: impl Fn(&path::ModPath) -> Option<(MacroId, MacroDefId)>,
derive_macro_id: MacroCallId, derive_macro_id: MacroCallId,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> { ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
let (macro_id, def_id) = resolver(item_attr.path.clone()) let (macro_id, def_id) = resolver(&item_attr.path)
.filter(|(_, def_id)| def_id.is_derive()) .filter(|(_, def_id)| def_id.is_derive())
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?; .ok_or_else(|| UnresolvedMacro { path: item_attr.path.as_ref().clone() })?;
let call_id = def_id.make_call( let call_id = def_id.make_call(
db.upcast(), db.upcast(),
krate, krate,

View file

@ -10,18 +10,19 @@ use cfg::{CfgExpr, CfgOptions};
use either::Either; use either::Either;
use hir_expand::{ use hir_expand::{
attrs::{Attr, AttrId}, attrs::{Attr, AttrId},
builtin_attr_macro::{find_builtin_attr, BuiltinAttrExpander}, builtin_attr_macro::find_builtin_attr,
builtin_derive_macro::find_builtin_derive, builtin_derive_macro::find_builtin_derive,
builtin_fn_macro::find_builtin_macro, builtin_fn_macro::find_builtin_macro,
name::{name, AsName, Name}, name::{name, AsName, Name},
proc_macro::CustomProcMacroExpander, proc_macro::CustomProcMacroExpander,
ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind,
}; };
use intern::Interned;
use itertools::{izip, Itertools}; use itertools::{izip, Itertools};
use la_arena::Idx; use la_arena::Idx;
use limit::Limit; use limit::Limit;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, ErasedFileAstId, FileAstId, Span, SyntaxContextId}; use span::{Edition, ErasedFileAstId, FileAstId, SyntaxContextId};
use syntax::ast; use syntax::ast;
use triomphe::Arc; use triomphe::Arc;
@ -75,36 +76,23 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
let proc_macros = if krate.is_proc_macro { let proc_macros = if krate.is_proc_macro {
match db.proc_macros().get(&def_map.krate) { match db.proc_macros().get(&def_map.krate) {
Some(Ok(proc_macros)) => { Some(Ok(proc_macros)) => Ok(proc_macros
Ok(proc_macros .iter()
.iter() .enumerate()
.enumerate() .map(|(idx, it)| {
.map(|(idx, it)| { let name = Name::new_text_dont_use(it.name.clone());
// FIXME: a hacky way to create a Name from string. (
let name = tt::Ident { name,
text: it.name.clone(), if it.disabled {
span: Span { CustomProcMacroExpander::disabled()
range: syntax::TextRange::empty(syntax::TextSize::new(0)), } else {
anchor: span::SpanAnchor { CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId::new(
file_id: FileId::BOGUS, idx as u32,
ast_id: span::ROOT_ERASED_FILE_AST_ID, ))
}, },
ctx: SyntaxContextId::ROOT, )
}, })
}; .collect()),
(
name.as_name(),
if it.disabled {
CustomProcMacroExpander::disabled()
} else {
CustomProcMacroExpander::new(
hir_expand::proc_macro::ProcMacroId::new(idx as u32),
)
},
)
})
.collect())
}
Some(Err(e)) => Err(e.clone().into_boxed_str()), Some(Err(e)) => Err(e.clone().into_boxed_str()),
None => Err("No proc-macros present for crate".to_owned().into_boxed_str()), None => Err("No proc-macros present for crate".to_owned().into_boxed_str()),
} }
@ -270,12 +258,13 @@ struct DefCollector<'a> {
/// ///
/// This also stores the attributes to skip when we resolve derive helpers and non-macro /// This also stores the attributes to skip when we resolve derive helpers and non-macro
/// non-builtin attributes in general. /// non-builtin attributes in general.
// FIXME: There has to be a better way to do this
skip_attrs: FxHashMap<InFile<ModItem>, AttrId>, skip_attrs: FxHashMap<InFile<ModItem>, AttrId>,
} }
impl DefCollector<'_> { impl DefCollector<'_> {
fn seed_with_top_level(&mut self) { fn seed_with_top_level(&mut self) {
let _p = tracing::span!(tracing::Level::INFO, "seed_with_top_level").entered(); let _p = tracing::info_span!("seed_with_top_level").entered();
let crate_graph = self.db.crate_graph(); let crate_graph = self.db.crate_graph();
let file_id = crate_graph[self.def_map.krate].root_file_id; let file_id = crate_graph[self.def_map.krate].root_file_id;
@ -410,17 +399,17 @@ impl DefCollector<'_> {
} }
fn resolution_loop(&mut self) { fn resolution_loop(&mut self) {
let _p = tracing::span!(tracing::Level::INFO, "DefCollector::resolution_loop").entered(); let _p = tracing::info_span!("DefCollector::resolution_loop").entered();
// main name resolution fixed-point loop. // main name resolution fixed-point loop.
let mut i = 0; let mut i = 0;
'resolve_attr: loop { 'resolve_attr: loop {
let _p = tracing::span!(tracing::Level::INFO, "resolve_macros loop").entered(); let _p = tracing::info_span!("resolve_macros loop").entered();
'resolve_macros: loop { 'resolve_macros: loop {
self.db.unwind_if_cancelled(); self.db.unwind_if_cancelled();
{ {
let _p = tracing::span!(tracing::Level::INFO, "resolve_imports loop").entered(); let _p = tracing::info_span!("resolve_imports loop").entered();
'resolve_imports: loop { 'resolve_imports: loop {
if self.resolve_imports() == ReachedFixedPoint::Yes { if self.resolve_imports() == ReachedFixedPoint::Yes {
@ -446,7 +435,7 @@ impl DefCollector<'_> {
} }
fn collect(&mut self) { fn collect(&mut self) {
let _p = tracing::span!(tracing::Level::INFO, "DefCollector::collect").entered(); let _p = tracing::info_span!("DefCollector::collect").entered();
self.resolution_loop(); self.resolution_loop();
@ -794,7 +783,7 @@ impl DefCollector<'_> {
} }
fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport { fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport {
let _p = tracing::span!(tracing::Level::INFO, "resolve_import", import_path = %import.path.display(self.db.upcast())) let _p = tracing::info_span!("resolve_import", import_path = %import.path.display(self.db.upcast()))
.entered(); .entered();
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition); tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
match import.source { match import.source {
@ -856,7 +845,7 @@ impl DefCollector<'_> {
} }
fn record_resolved_import(&mut self, directive: &ImportDirective) { fn record_resolved_import(&mut self, directive: &ImportDirective) {
let _p = tracing::span!(tracing::Level::INFO, "record_resolved_import").entered(); let _p = tracing::info_span!("record_resolved_import").entered();
let module_id = directive.module_id; let module_id = directive.module_id;
let import = &directive.import; let import = &directive.import;
@ -1136,18 +1125,18 @@ impl DefCollector<'_> {
MacroSubNs::Attr MacroSubNs::Attr
} }
}; };
let resolver = |path| { let resolver = |path: &_| {
let resolved_res = self.def_map.resolve_path_fp_with_macro( let resolved_res = self.def_map.resolve_path_fp_with_macro(
self.db, self.db,
ResolveMode::Other, ResolveMode::Other,
directive.module_id, directive.module_id,
&path, path,
BuiltinShadowMode::Module, BuiltinShadowMode::Module,
Some(subns), Some(subns),
); );
resolved_res.resolved_def.take_macros().map(|it| (it, self.db.macro_def(it))) resolved_res.resolved_def.take_macros().map(|it| (it, self.db.macro_def(it)))
}; };
let resolver_def_id = |path| resolver(path).map(|(_, it)| it); let resolver_def_id = |path: &_| resolver(path).map(|(_, it)| it);
match &directive.kind { match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => { MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => {
@ -1250,22 +1239,28 @@ impl DefCollector<'_> {
} }
} }
let def = match resolver_def_id(path.clone()) { let def = match resolver_def_id(path) {
Some(def) if def.is_attribute() => def, Some(def) if def.is_attribute() => def,
_ => return Resolved::No, _ => return Resolved::No,
}; };
let call_id = // Skip #[test]/#[bench] expansion, which would merely result in more memory usage
attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def); // due to duplicating functions into macro expansions
if let MacroDefId { if matches!(
kind: def.kind,
MacroDefKind::BuiltInAttr( MacroDefKind::BuiltInAttr(_, expander)
BuiltinAttrExpander::Derive | BuiltinAttrExpander::DeriveConst, if expander.is_test() || expander.is_bench()
_, ) {
), return recollect_without(self);
.. }
} = def
{ let call_id = || {
attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def)
};
if matches!(def,
MacroDefId { kind: MacroDefKind::BuiltInAttr(_, exp), .. }
if exp.is_derive()
) {
// Resolved to `#[derive]`, we don't actually expand this attribute like // Resolved to `#[derive]`, we don't actually expand this attribute like
// normal (as that would just be an identity expansion with extra output) // normal (as that would just be an identity expansion with extra output)
// Instead we treat derive attributes special and apply them separately. // Instead we treat derive attributes special and apply them separately.
@ -1290,9 +1285,14 @@ impl DefCollector<'_> {
match attr.parse_path_comma_token_tree(self.db.upcast()) { match attr.parse_path_comma_token_tree(self.db.upcast()) {
Some(derive_macros) => { Some(derive_macros) => {
let call_id = call_id();
let mut len = 0; let mut len = 0;
for (idx, (path, call_site)) in derive_macros.enumerate() { for (idx, (path, call_site)) in derive_macros.enumerate() {
let ast_id = AstIdWithPath::new(file_id, ast_id.value, path); let ast_id = AstIdWithPath::new(
file_id,
ast_id.value,
Interned::new(path),
);
self.unresolved_macros.push(MacroDirective { self.unresolved_macros.push(MacroDirective {
module_id: directive.module_id, module_id: directive.module_id,
depth: directive.depth + 1, depth: directive.depth + 1,
@ -1312,13 +1312,6 @@ impl DefCollector<'_> {
// This is just a trick to be able to resolve the input to derives // This is just a trick to be able to resolve the input to derives
// as proper paths in `Semantics`. // as proper paths in `Semantics`.
// Check the comment in [`builtin_attr_macro`]. // Check the comment in [`builtin_attr_macro`].
let call_id = attr_macro_as_call_id(
self.db,
file_ast_id,
attr,
self.def_map.krate,
def,
);
self.def_map.modules[directive.module_id] self.def_map.modules[directive.module_id]
.scope .scope
.init_derive_attribute(ast_id, attr.id, call_id, len + 1); .init_derive_attribute(ast_id, attr.id, call_id, len + 1);
@ -1336,17 +1329,8 @@ impl DefCollector<'_> {
return recollect_without(self); return recollect_without(self);
} }
// Skip #[test]/#[bench] expansion, which would merely result in more memory usage let call_id = call_id();
// due to duplicating functions into macro expansions if let MacroDefKind::ProcMacro(_, exp, _) = def.kind {
if matches!(
def.kind,
MacroDefKind::BuiltInAttr(expander, _)
if expander.is_test() || expander.is_bench()
) {
return recollect_without(self);
}
if let MacroDefKind::ProcMacro(exp, ..) = def.kind {
// If proc attribute macro expansion is disabled, skip expanding it here // If proc attribute macro expansion is disabled, skip expanding it here
if !self.db.expand_proc_attr_macros() { if !self.db.expand_proc_attr_macros() {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
@ -1430,7 +1414,7 @@ impl DefCollector<'_> {
fn finish(mut self) -> DefMap { fn finish(mut self) -> DefMap {
// Emit diagnostics for all remaining unexpanded macros. // Emit diagnostics for all remaining unexpanded macros.
let _p = tracing::span!(tracing::Level::INFO, "DefCollector::finish").entered(); let _p = tracing::info_span!("DefCollector::finish").entered();
for directive in &self.unresolved_macros { for directive in &self.unresolved_macros {
match &directive.kind { match &directive.kind {
@ -1447,7 +1431,7 @@ impl DefCollector<'_> {
self.db, self.db,
ResolveMode::Other, ResolveMode::Other,
directive.module_id, directive.module_id,
&path, path,
BuiltinShadowMode::Module, BuiltinShadowMode::Module,
Some(MacroSubNs::Bang), Some(MacroSubNs::Bang),
); );
@ -1481,7 +1465,7 @@ impl DefCollector<'_> {
derive_index: *derive_pos as u32, derive_index: *derive_pos as u32,
derive_macro_id: *derive_macro_id, derive_macro_id: *derive_macro_id,
}, },
ast_id.path.clone(), ast_id.path.as_ref().clone(),
)); ));
} }
// These are diagnosed by `reseed_with_unresolved_attribute`, as that function consumes them // These are diagnosed by `reseed_with_unresolved_attribute`, as that function consumes them
@ -2116,7 +2100,7 @@ impl ModCollector<'_, '_> {
let ast_id = AstIdWithPath::new( let ast_id = AstIdWithPath::new(
self.file_id(), self.file_id(),
mod_item.ast_id(self.item_tree), mod_item.ast_id(self.item_tree),
attr.path.as_ref().clone(), attr.path.clone(),
); );
self.def_collector.unresolved_macros.push(MacroDirective { self.def_collector.unresolved_macros.push(MacroDirective {
module_id: self.module_id, module_id: self.module_id,
@ -2162,19 +2146,7 @@ impl ModCollector<'_, '_> {
let name; let name;
let name = match attrs.by_key("rustc_builtin_macro").string_value() { let name = match attrs.by_key("rustc_builtin_macro").string_value() {
Some(it) => { Some(it) => {
// FIXME: a hacky way to create a Name from string. name = Name::new_text_dont_use(it.into());
name = tt::Ident {
text: it.into(),
span: Span {
range: syntax::TextRange::empty(syntax::TextSize::new(0)),
anchor: span::SpanAnchor {
file_id: FileId::BOGUS,
ast_id: span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
},
}
.as_name();
&name &name
} }
None => { None => {
@ -2310,7 +2282,7 @@ impl ModCollector<'_, '_> {
&MacroCall { ref path, ast_id, expand_to, ctxt }: &MacroCall, &MacroCall { ref path, ast_id, expand_to, ctxt }: &MacroCall,
container: ItemContainerId, container: ItemContainerId,
) { ) {
let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(path)); let ast_id = AstIdWithPath::new(self.file_id(), ast_id, path.clone());
let db = self.def_collector.db; let db = self.def_collector.db;
// FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define // FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define
@ -2320,7 +2292,8 @@ impl ModCollector<'_, '_> {
// Case 1: try to resolve macro calls with single-segment name and expand macro_rules // Case 1: try to resolve macro calls with single-segment name and expand macro_rules
if let Ok(res) = macro_call_as_call_id_with_eager( if let Ok(res) = macro_call_as_call_id_with_eager(
db.upcast(), db.upcast(),
&ast_id, ast_id.ast_id,
&ast_id.path,
ctxt, ctxt,
expand_to, expand_to,
self.def_collector.def_map.krate, self.def_collector.def_map.krate,
@ -2347,7 +2320,7 @@ impl ModCollector<'_, '_> {
db, db,
ResolveMode::Other, ResolveMode::Other,
self.module_id, self.module_id,
&path, path,
BuiltinShadowMode::Module, BuiltinShadowMode::Module,
Some(MacroSubNs::Bang), Some(MacroSubNs::Bang),
); );

View file

@ -283,7 +283,7 @@ impl DefMap {
// If we have a different `DefMap` from `self` (the original `DefMap` we started // If we have a different `DefMap` from `self` (the original `DefMap` we started
// with), resolve the remaining path segments in that `DefMap`. // with), resolve the remaining path segments in that `DefMap`.
let path = let path =
ModPath::from_segments(PathKind::Super(0), path.segments().iter().cloned()); ModPath::from_segments(PathKind::SELF, path.segments().iter().cloned());
return def_map.resolve_path_fp_with_macro( return def_map.resolve_path_fp_with_macro(
db, db,
mode, mode,
@ -333,7 +333,7 @@ impl DefMap {
ModuleDefId::ModuleId(module) => { ModuleDefId::ModuleId(module) => {
if module.krate != self.krate { if module.krate != self.krate {
let path = ModPath::from_segments( let path = ModPath::from_segments(
PathKind::Super(0), PathKind::SELF,
path.segments()[i..].iter().cloned(), path.segments()[i..].iter().cloned(),
); );
tracing::debug!("resolving {:?} in other crate", path); tracing::debug!("resolving {:?} in other crate", path);
@ -493,7 +493,12 @@ impl DefMap {
) )
}) })
}; };
let prelude = || self.resolve_in_prelude(db, name); let prelude = || {
if self.block.is_some() && module == DefMap::ROOT {
return PerNs::none();
}
self.resolve_in_prelude(db, name)
};
from_legacy_macro from_legacy_macro
.or(from_scope_or_builtin) .or(from_scope_or_builtin)

View file

@ -122,7 +122,7 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path
// don't break out if `self` is the last segment of a path, this mean we got a // don't break out if `self` is the last segment of a path, this mean we got a
// use tree like `foo::{self}` which we want to resolve as `foo` // use tree like `foo::{self}` which we want to resolve as `foo`
if !segments.is_empty() { if !segments.is_empty() {
kind = PathKind::Super(0); kind = PathKind::SELF;
break; break;
} }
} }
@ -144,7 +144,7 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path
if segments.is_empty() && kind == PathKind::Plain && type_anchor.is_none() { if segments.is_empty() && kind == PathKind::Plain && type_anchor.is_none() {
// plain empty paths don't exist, this means we got a single `self` segment as our path // plain empty paths don't exist, this means we got a single `self` segment as our path
kind = PathKind::Super(0); kind = PathKind::SELF;
} }
// handle local_inner_macros : // handle local_inner_macros :

View file

@ -86,7 +86,7 @@ impl PerNs {
} }
pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs { pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs {
let _p = tracing::span!(tracing::Level::INFO, "PerNs::filter_visibility").entered(); let _p = tracing::info_span!("PerNs::filter_visibility").entered();
PerNs { PerNs {
types: self.types.filter(|&(_, v, _)| f(v)), types: self.types.filter(|&(_, v, _)| f(v)),
values: self.values.filter(|&(_, v, _)| f(v)), values: self.values.filter(|&(_, v, _)| f(v)),
@ -119,7 +119,7 @@ impl PerNs {
} }
pub fn iter_items(self) -> impl Iterator<Item = (ItemInNs, Option<ImportOrExternCrate>)> { pub fn iter_items(self) -> impl Iterator<Item = (ItemInNs, Option<ImportOrExternCrate>)> {
let _p = tracing::span!(tracing::Level::INFO, "PerNs::iter_items").entered(); let _p = tracing::info_span!("PerNs::iter_items").entered();
self.types self.types
.map(|it| (ItemInNs::Types(it.0), it.2)) .map(|it| (ItemInNs::Types(it.0), it.2))
.into_iter() .into_iter()

View file

@ -57,7 +57,7 @@ pub(crate) fn print_path(db: &dyn DefDatabase, path: &Path, buf: &mut dyn Write)
} }
None => match path.kind() { None => match path.kind() {
PathKind::Plain => {} PathKind::Plain => {}
PathKind::Super(0) => write!(buf, "self")?, &PathKind::SELF => write!(buf, "self")?,
PathKind::Super(n) => { PathKind::Super(n) => {
for i in 0..*n { for i in 0..*n {
if i == 0 { if i == 0 {

View file

@ -27,10 +27,7 @@ pub enum RawVisibility {
impl RawVisibility { impl RawVisibility {
pub(crate) const fn private() -> RawVisibility { pub(crate) const fn private() -> RawVisibility {
RawVisibility::Module( RawVisibility::Module(ModPath::from_kind(PathKind::SELF), VisibilityExplicitness::Implicit)
ModPath::from_kind(PathKind::Super(0)),
VisibilityExplicitness::Implicit,
)
} }
pub(crate) fn from_ast( pub(crate) fn from_ast(
@ -60,7 +57,7 @@ impl RawVisibility {
} }
ast::VisibilityKind::PubCrate => ModPath::from_kind(PathKind::Crate), ast::VisibilityKind::PubCrate => ModPath::from_kind(PathKind::Crate),
ast::VisibilityKind::PubSuper => ModPath::from_kind(PathKind::Super(1)), ast::VisibilityKind::PubSuper => ModPath::from_kind(PathKind::Super(1)),
ast::VisibilityKind::PubSelf => ModPath::from_kind(PathKind::Super(0)), ast::VisibilityKind::PubSelf => ModPath::from_kind(PathKind::SELF),
ast::VisibilityKind::Pub => return RawVisibility::Public, ast::VisibilityKind::Pub => return RawVisibility::Public,
}; };
RawVisibility::Module(path, VisibilityExplicitness::Explicit) RawVisibility::Module(path, VisibilityExplicitness::Explicit)

View file

@ -54,7 +54,7 @@ impl RawAttrs {
let span = span_map.span_for_range(comment.syntax().text_range()); let span = span_map.span_for_range(comment.syntax().text_range());
Attr { Attr {
id, id,
input: Some(Interned::new(AttrInput::Literal(tt::Literal { input: Some(Box::new(AttrInput::Literal(tt::Literal {
text: SmolStr::new(format_smolstr!("\"{}\"", Self::escape_chars(doc))), text: SmolStr::new(format_smolstr!("\"{}\"", Self::escape_chars(doc))),
span, span,
}))), }))),
@ -199,7 +199,7 @@ impl AttrId {
pub struct Attr { pub struct Attr {
pub id: AttrId, pub id: AttrId,
pub path: Interned<ModPath>, pub path: Interned<ModPath>,
pub input: Option<Interned<AttrInput>>, pub input: Option<Box<AttrInput>>,
pub ctxt: SyntaxContextId, pub ctxt: SyntaxContextId,
} }
@ -234,7 +234,7 @@ impl Attr {
})?); })?);
let span = span_map.span_for_range(range); let span = span_map.span_for_range(range);
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
Some(Interned::new(AttrInput::Literal(tt::Literal { Some(Box::new(AttrInput::Literal(tt::Literal {
text: lit.token().text().into(), text: lit.token().text().into(),
span, span,
}))) })))
@ -245,7 +245,7 @@ impl Attr {
span, span,
DocCommentDesugarMode::ProcMacro, DocCommentDesugarMode::ProcMacro,
); );
Some(Interned::new(AttrInput::TokenTree(Box::new(tree)))) Some(Box::new(AttrInput::TokenTree(Box::new(tree))))
} else { } else {
None None
}; };
@ -281,12 +281,12 @@ impl Attr {
let input = match input.first() { let input = match input.first() {
Some(tt::TokenTree::Subtree(tree)) => { Some(tt::TokenTree::Subtree(tree)) => {
Some(Interned::new(AttrInput::TokenTree(Box::new(tree.clone())))) Some(Box::new(AttrInput::TokenTree(Box::new(tree.clone()))))
} }
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))) => { Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))) => {
let input = match input.get(1) { let input = match input.get(1) {
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => { Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => {
Some(Interned::new(AttrInput::Literal(lit.clone()))) Some(Box::new(AttrInput::Literal(lit.clone())))
} }
_ => None, _ => None,
}; };

View file

@ -52,8 +52,6 @@ impl BuiltinAttrExpander {
register_builtin! { register_builtin! {
(bench, Bench) => dummy_attr_expand, (bench, Bench) => dummy_attr_expand,
(cfg, Cfg) => dummy_attr_expand,
(cfg_attr, CfgAttr) => dummy_attr_expand,
(cfg_accessible, CfgAccessible) => dummy_attr_expand, (cfg_accessible, CfgAccessible) => dummy_attr_expand,
(cfg_eval, CfgEval) => dummy_attr_expand, (cfg_eval, CfgEval) => dummy_attr_expand,
(derive, Derive) => derive_expand, (derive, Derive) => derive_expand,

View file

@ -67,6 +67,10 @@ impl BuiltinFnLikeExpander {
let span = span_with_def_site_ctxt(db, span, id); let span = span_with_def_site_ctxt(db, span, id);
self.expander()(db, id, tt, span) self.expander()(db, id, tt, span)
} }
pub fn is_asm(&self) -> bool {
matches!(self, Self::Asm | Self::GlobalAsm)
}
} }
impl EagerExpander { impl EagerExpander {

View file

@ -189,8 +189,8 @@ pub(crate) fn process_cfg_attrs(
// FIXME: #[cfg_eval] is not implemented. But it is not stable yet // FIXME: #[cfg_eval] is not implemented. But it is not stable yet
let is_derive = match loc.def.kind { let is_derive = match loc.def.kind {
MacroDefKind::BuiltInDerive(..) MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _) => true, | MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive) => true,
MacroDefKind::BuiltInAttr(expander, _) => expander.is_derive(), MacroDefKind::BuiltInAttr(_, expander) => expander.is_derive(),
_ => false, _ => false,
}; };
if !is_derive { if !is_derive {

View file

@ -146,13 +146,11 @@ pub fn expand_speculative(
token_to_map: SyntaxToken, token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> { ) -> Option<(SyntaxNode, SyntaxToken)> {
let loc = db.lookup_intern_macro_call(actual_macro_call); let loc = db.lookup_intern_macro_call(actual_macro_call);
// FIXME: This BOGUS here is dangerous once the proc-macro server can call back into the database!
let span_map = RealSpanMap::absolute(FileId::BOGUS);
let span_map = SpanMapRef::RealSpanMap(&span_map);
let (_, _, span) = db.macro_arg_considering_derives(actual_macro_call, &loc.kind); let (_, _, span) = db.macro_arg_considering_derives(actual_macro_call, &loc.kind);
let span_map = RealSpanMap::absolute(span.anchor.file_id);
let span_map = SpanMapRef::RealSpanMap(&span_map);
// Build the subtree and token mapping for the speculative args // Build the subtree and token mapping for the speculative args
let (mut tt, undo_info) = match loc.kind { let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => ( MacroCallKind::FnLike { .. } => (
@ -252,7 +250,7 @@ pub fn expand_speculative(
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead. // Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = let mut speculative_expansion =
match loc.def.kind { match loc.def.kind {
MacroDefKind::ProcMacro(expander, _, ast) => { MacroDefKind::ProcMacro(ast, expander, _) => {
let span = db.proc_macro_span(ast); let span = db.proc_macro_span(ast);
tt.delimiter = tt::Delimiter::invisible_spanned(span); tt.delimiter = tt::Delimiter::invisible_spanned(span);
expander.expand( expander.expand(
@ -266,22 +264,22 @@ pub fn expand_speculative(
span_with_mixed_site_ctxt(db, span, actual_macro_call), span_with_mixed_site_ctxt(db, span, actual_macro_call),
) )
} }
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span) pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
} }
MacroDefKind::Declarative(it) => db MacroDefKind::Declarative(it) => db
.decl_macro_expander(loc.krate, it) .decl_macro_expander(loc.krate, it)
.expand_unhygienic(db, tt, loc.def.krate, span, loc.def.edition), .expand_unhygienic(db, tt, loc.def.krate, span, loc.def.edition),
MacroDefKind::BuiltIn(it, _) => { MacroDefKind::BuiltIn(_, it) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
} }
MacroDefKind::BuiltInDerive(it, ..) => { MacroDefKind::BuiltInDerive(_, it) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
} }
MacroDefKind::BuiltInEager(it, _) => { MacroDefKind::BuiltInEager(_, it) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
} }
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt, span), MacroDefKind::BuiltInAttr(_, it) => it.expand(db, actual_macro_call, &tt, span),
}; };
let expand_to = loc.expand_to(); let expand_to = loc.expand_to();
@ -334,7 +332,7 @@ fn parse_macro_expansion(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
macro_file: MacroFileId, macro_file: MacroFileId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> { ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = tracing::span!(tracing::Level::INFO, "parse_macro_expansion").entered(); let _p = tracing::info_span!("parse_macro_expansion").entered();
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let edition = loc.def.edition; let edition = loc.def.edition;
let expand_to = loc.expand_to(); let expand_to = loc.expand_to();
@ -493,7 +491,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
.map_or_else(|| node.syntax().text_range(), |it| it.syntax().text_range()), .map_or_else(|| node.syntax().text_range(), |it| it.syntax().text_range()),
); );
// If derive attribute we need to censor the derive input // If derive attribute we need to censor the derive input
if matches!(loc.def.kind, MacroDefKind::BuiltInAttr(expander, ..) if expander.is_derive()) if matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive())
&& ast::Adt::can_cast(node.syntax().kind()) && ast::Adt::can_cast(node.syntax().kind())
{ {
let adt = ast::Adt::cast(node.syntax().clone()).unwrap(); let adt = ast::Adt::cast(node.syntax().clone()).unwrap();
@ -569,11 +567,11 @@ impl TokenExpander {
MacroDefKind::Declarative(ast_id) => { MacroDefKind::Declarative(ast_id) => {
TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id)) TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
} }
MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander), MacroDefKind::BuiltIn(_, expander) => TokenExpander::BuiltIn(expander),
MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander), MacroDefKind::BuiltInAttr(_, expander) => TokenExpander::BuiltInAttr(expander),
MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander), MacroDefKind::BuiltInDerive(_, expander) => TokenExpander::BuiltInDerive(expander),
MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander), MacroDefKind::BuiltInEager(_, expander) => TokenExpander::BuiltInEager(expander),
MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander), MacroDefKind::ProcMacro(_, expander, _) => TokenExpander::ProcMacro(expander),
} }
} }
} }
@ -588,7 +586,7 @@ fn macro_expand(
macro_call_id: MacroCallId, macro_call_id: MacroCallId,
loc: MacroCallLoc, loc: MacroCallLoc,
) -> ExpandResult<(CowArc<tt::Subtree>, MatchedArmIndex)> { ) -> ExpandResult<(CowArc<tt::Subtree>, MatchedArmIndex)> {
let _p = tracing::span!(tracing::Level::INFO, "macro_expand").entered(); let _p = tracing::info_span!("macro_expand").entered();
let (ExpandResult { value: (tt, matched_arm), err }, span) = match loc.def.kind { let (ExpandResult { value: (tt, matched_arm), err }, span) = match loc.def.kind {
MacroDefKind::ProcMacro(..) => { MacroDefKind::ProcMacro(..) => {
@ -604,13 +602,13 @@ fn macro_expand(
MacroDefKind::Declarative(id) => db MacroDefKind::Declarative(id) => db
.decl_macro_expander(loc.def.krate, id) .decl_macro_expander(loc.def.krate, id)
.expand(db, arg.clone(), macro_call_id, span), .expand(db, arg.clone(), macro_call_id, span),
MacroDefKind::BuiltIn(it, _) => { MacroDefKind::BuiltIn(_, it) => {
it.expand(db, macro_call_id, arg, span).map_err(Into::into).zip_val(None) it.expand(db, macro_call_id, arg, span).map_err(Into::into).zip_val(None)
} }
MacroDefKind::BuiltInDerive(it, _) => { MacroDefKind::BuiltInDerive(_, it) => {
it.expand(db, macro_call_id, arg, span).map_err(Into::into).zip_val(None) it.expand(db, macro_call_id, arg, span).map_err(Into::into).zip_val(None)
} }
MacroDefKind::BuiltInEager(it, _) => { MacroDefKind::BuiltInEager(_, it) => {
// This might look a bit odd, but we do not expand the inputs to eager macros here. // This might look a bit odd, but we do not expand the inputs to eager macros here.
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls. // Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
// That kind of expansion uses the ast id map of an eager macros input though which goes through // That kind of expansion uses the ast id map of an eager macros input though which goes through
@ -634,12 +632,12 @@ fn macro_expand(
} }
res.zip_val(None) res.zip_val(None)
} }
MacroDefKind::BuiltInAttr(it, _) => { MacroDefKind::BuiltInAttr(_, it) => {
let mut res = it.expand(db, macro_call_id, arg, span); let mut res = it.expand(db, macro_call_id, arg, span);
fixup::reverse_fixups(&mut res.value, &undo_info); fixup::reverse_fixups(&mut res.value, &undo_info);
res.zip_val(None) res.zip_val(None)
} }
_ => unreachable!(), MacroDefKind::ProcMacro(_, _, _) => unreachable!(),
}; };
(ExpandResult { value: res.value, err: res.err }, span) (ExpandResult { value: res.value, err: res.err }, span)
} }
@ -678,8 +676,8 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
let loc = db.lookup_intern_macro_call(id); let loc = db.lookup_intern_macro_call(id);
let (macro_arg, undo_info, span) = db.macro_arg_considering_derives(id, &loc.kind); let (macro_arg, undo_info, span) = db.macro_arg_considering_derives(id, &loc.kind);
let (expander, ast) = match loc.def.kind { let (ast, expander) = match loc.def.kind {
MacroDefKind::ProcMacro(expander, _, ast) => (expander, ast), MacroDefKind::ProcMacro(ast, expander, _) => (ast, expander),
_ => unreachable!(), _ => unreachable!(),
}; };

View file

@ -39,7 +39,7 @@ pub fn expand_eager_macro_input(
ast_id: AstId<ast::MacroCall>, ast_id: AstId<ast::MacroCall>,
def: MacroDefId, def: MacroDefId,
call_site: SyntaxContextId, call_site: SyntaxContextId,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>, resolver: &dyn Fn(&ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<MacroCallId>> { ) -> ExpandResult<Option<MacroCallId>> {
let expand_to = ExpandTo::from_call_site(macro_call); let expand_to = ExpandTo::from_call_site(macro_call);
@ -138,7 +138,7 @@ fn eager_macro_recur(
curr: InFile<SyntaxNode>, curr: InFile<SyntaxNode>,
krate: CrateId, krate: CrateId,
call_site: SyntaxContextId, call_site: SyntaxContextId,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>, macro_resolver: &dyn Fn(&ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<(SyntaxNode, TextSize)>> { ) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
let original = curr.value.clone_for_update(); let original = curr.value.clone_for_update();
@ -172,7 +172,7 @@ fn eager_macro_recur(
let def = match call.path().and_then(|path| { let def = match call.path().and_then(|path| {
ModPath::from_src(db, path, &mut |range| span_map.span_at(range.start()).ctx) ModPath::from_src(db, path, &mut |range| span_map.span_at(range.start()).ctx)
}) { }) {
Some(path) => match macro_resolver(path.clone()) { Some(path) => match macro_resolver(&path) {
Some(def) => def, Some(def) => def,
None => { None => {
error = error =

View file

@ -1,6 +1,4 @@
//! Things to wrap other things in file ids. //! Things to wrap other things in file ids.
use std::iter;
use either::Either; use either::Either;
use span::{ use span::{
AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr, AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr,
@ -150,27 +148,16 @@ impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
} }
} }
impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, &N> {
// unfortunately `syntax` collides with the impl above, because `&_` is fundamental
pub fn syntax_ref(&self) -> InFileWrapper<FileId, &SyntaxNode> {
self.with_value(self.value.syntax())
}
}
// region:specific impls // region:specific impls
impl InFile<&SyntaxNode> { impl InFile<&SyntaxNode> {
/// Traverse up macro calls and skips the macro invocation node
pub fn ancestors_with_macros(
self,
db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => db
.lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id)
.to_node_item(db)
.syntax()
.cloned()
.map(|node| node.parent())
.transpose(),
};
iter::successors(succ(&self.cloned()), succ)
}
/// Falls back to the macro call range if the node cannot be mapped up fully. /// Falls back to the macro call range if the node cannot be mapped up fully.
/// ///
/// For attributes and derives, this will point back to the attribute only. /// For attributes and derives, this will point back to the attribute only.

View file

@ -4,7 +4,10 @@
use mbe::DocCommentDesugarMode; use mbe::DocCommentDesugarMode;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec; use smallvec::SmallVec;
use span::{ErasedFileAstId, Span, SpanAnchor, FIXUP_ERASED_FILE_AST_ID_MARKER}; use span::{
ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, FIXUP_ERASED_FILE_AST_ID_MARKER,
ROOT_ERASED_FILE_AST_ID,
};
use stdx::never; use stdx::never;
use syntax::{ use syntax::{
ast::{self, AstNode, HasLoopBody}, ast::{self, AstNode, HasLoopBody},
@ -88,7 +91,6 @@ pub(crate) fn fixup_syntax(
preorder.skip_subtree(); preorder.skip_subtree();
continue; continue;
} }
// In some other situations, we can fix things by just appending some tokens. // In some other situations, we can fix things by just appending some tokens.
match_ast! { match_ast! {
match node { match node {
@ -273,6 +275,62 @@ pub(crate) fn fixup_syntax(
]); ]);
} }
}, },
ast::RecordExprField(it) => {
if let Some(colon) = it.colon_token() {
if it.name_ref().is_some() && it.expr().is_none() {
append.insert(colon.into(), vec![
Leaf::Ident(Ident {
text: "__ra_fixup".into(),
span: fake_span(node_range)
})
]);
}
}
},
ast::Path(it) => {
if let Some(colon) = it.coloncolon_token() {
if it.segment().is_none() {
append.insert(colon.into(), vec![
Leaf::Ident(Ident {
text: "__ra_fixup".into(),
span: fake_span(node_range)
})
]);
}
}
},
ast::ArgList(it) => {
if it.r_paren_token().is_none() {
append.insert(node.into(), vec![
Leaf::Punct(Punct {
span: fake_span(node_range),
char: ')',
spacing: Spacing::Alone
})
]);
}
},
ast::ArgList(it) => {
if it.r_paren_token().is_none() {
append.insert(node.into(), vec![
Leaf::Punct(Punct {
span: fake_span(node_range),
char: ')',
spacing: Spacing::Alone
})
]);
}
},
ast::ClosureExpr(it) => {
if it.body().is_none() {
append.insert(node.into(), vec![
Leaf::Ident(Ident {
text: "__ra_fixup".into(),
span: fake_span(node_range)
})
]);
}
},
_ => (), _ => (),
} }
} }
@ -307,8 +365,13 @@ pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo)
tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
|| tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID || tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
) { ) {
tt.delimiter.close = Span::DUMMY; let span = |file_id| Span {
tt.delimiter.open = Span::DUMMY; range: TextRange::empty(TextSize::new(0)),
anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
ctx: SyntaxContextId::ROOT,
};
tt.delimiter.open = span(tt.delimiter.open.anchor.file_id);
tt.delimiter.close = span(tt.delimiter.close.anchor.file_id);
} }
reverse_fixups_(tt, undo_info); reverse_fixups_(tt, undo_info);
} }
@ -751,4 +814,84 @@ fn foo () {loop { }}
"#]], "#]],
) )
} }
#[test]
fn fixup_path() {
check(
r#"
fn foo() {
path::
}
"#,
expect![[r#"
fn foo () {path :: __ra_fixup}
"#]],
)
}
#[test]
fn fixup_record_ctor_field() {
check(
r#"
fn foo() {
R { f: }
}
"#,
expect![[r#"
fn foo () {R {f : __ra_fixup}}
"#]],
)
}
#[test]
fn no_fixup_record_ctor_field() {
check(
r#"
fn foo() {
R { f: a }
}
"#,
expect![[r#"
fn foo () {R {f : a}}
"#]],
)
}
#[test]
fn fixup_arg_list() {
check(
r#"
fn foo() {
foo(a
}
"#,
expect![[r#"
fn foo () { foo ( a ) }
"#]],
);
check(
r#"
fn foo() {
bar.foo(a
}
"#,
expect![[r#"
fn foo () { bar . foo ( a ) }
"#]],
);
}
#[test]
fn fixup_closure() {
check(
r#"
fn foo() {
||
}
"#,
expect![[r#"
fn foo () {|| __ra_fixup}
"#]],
);
}
} }

View file

@ -4,7 +4,7 @@
//! Expansion, and Definition Contexts,” *Journal of Functional Programming* 22, no. 2 //! Expansion, and Definition Contexts,” *Journal of Functional Programming* 22, no. 2
//! (March 1, 2012): 181216, <https://doi.org/10.1017/S0956796812000093>. //! (March 1, 2012): 181216, <https://doi.org/10.1017/S0956796812000093>.
//! //!
//! Also see https://rustc-dev-guide.rust-lang.org/macro-expansion.html#hygiene-and-hierarchies //! Also see <https://rustc-dev-guide.rust-lang.org/macro-expansion.html#hygiene-and-hierarchies>
//! //!
//! # The Expansion Order Hierarchy //! # The Expansion Order Hierarchy
//! //!

View file

@ -36,11 +36,6 @@ pub fn find_builtin_attr_idx(name: &str) -> Option<usize> {
.copied() .copied()
} }
// impl AttributeTemplate {
// const DEFAULT: AttributeTemplate =
// AttributeTemplate { word: false, list: None, name_value_str: None };
// }
/// A convenience macro for constructing attribute templates. /// A convenience macro for constructing attribute templates.
/// E.g., `template!(Word, List: "description")` means that the attribute /// E.g., `template!(Word, List: "description")` means that the attribute
/// supports forms `#[attr]` and `#[attr(description)]`. /// supports forms `#[attr]` and `#[attr(description)]`.
@ -628,6 +623,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_safe_intrinsic, Normal, template!(Word), WarnFollowing, rustc_safe_intrinsic, Normal, template!(Word), WarnFollowing,
"the `#[rustc_safe_intrinsic]` attribute is used internally to mark intrinsics as safe" "the `#[rustc_safe_intrinsic]` attribute is used internally to mark intrinsics as safe"
), ),
rustc_attr!(
rustc_deprecated_safe_2024, Normal, template!(Word), WarnFollowing,
"the `#[rustc_safe_intrinsic]` marks functions as unsafe in Rust 2024",
),
// ========================================================================== // ==========================================================================
// Internal attributes, Testing: // Internal attributes, Testing:

View file

@ -16,6 +16,7 @@ pub mod declarative;
pub mod eager; pub mod eager;
pub mod files; pub mod files;
pub mod hygiene; pub mod hygiene;
pub mod inert_attr_macro;
pub mod mod_path; pub mod mod_path;
pub mod name; pub mod name;
pub mod proc_macro; pub mod proc_macro;
@ -30,7 +31,7 @@ use triomphe::Arc;
use std::{fmt, hash::Hash}; use std::{fmt, hash::Hash};
use base_db::{salsa::impl_intern_value_trivial, CrateId, FileId}; use base_db::{salsa::InternValueTrivial, CrateId, FileId};
use either::Either; use either::Either;
use span::{ use span::{
Edition, ErasedFileAstId, FileAstId, FileRange, HirFileIdRepr, Span, SpanAnchor, Edition, ErasedFileAstId, FileAstId, FileRange, HirFileIdRepr, Span, SpanAnchor,
@ -46,7 +47,7 @@ use crate::{
builtin_attr_macro::BuiltinAttrExpander, builtin_attr_macro::BuiltinAttrExpander,
builtin_derive_macro::BuiltinDeriveExpander, builtin_derive_macro::BuiltinDeriveExpander,
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::{ExpandDatabase, TokenExpander}, db::ExpandDatabase,
mod_path::ModPath, mod_path::ModPath,
proc_macro::{CustomProcMacroExpander, ProcMacroKind}, proc_macro::{CustomProcMacroExpander, ProcMacroKind},
span_map::{ExpansionSpanMap, SpanMap}, span_map::{ExpansionSpanMap, SpanMap},
@ -172,7 +173,7 @@ pub struct MacroCallLoc {
pub kind: MacroCallKind, pub kind: MacroCallKind,
pub ctxt: SyntaxContextId, pub ctxt: SyntaxContextId,
} }
impl_intern_value_trivial!(MacroCallLoc); impl InternValueTrivial for MacroCallLoc {}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroDefId { pub struct MacroDefId {
@ -186,11 +187,11 @@ pub struct MacroDefId {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum MacroDefKind { pub enum MacroDefKind {
Declarative(AstId<ast::Macro>), Declarative(AstId<ast::Macro>),
BuiltIn(BuiltinFnLikeExpander, AstId<ast::Macro>), BuiltIn(AstId<ast::Macro>, BuiltinFnLikeExpander),
BuiltInAttr(BuiltinAttrExpander, AstId<ast::Macro>), BuiltInAttr(AstId<ast::Macro>, BuiltinAttrExpander),
BuiltInDerive(BuiltinDeriveExpander, AstId<ast::Macro>), BuiltInDerive(AstId<ast::Macro>, BuiltinDeriveExpander),
BuiltInEager(EagerExpander, AstId<ast::Macro>), BuiltInEager(AstId<ast::Macro>, EagerExpander),
ProcMacro(CustomProcMacroExpander, ProcMacroKind, AstId<ast::Fn>), ProcMacro(AstId<ast::Fn>, CustomProcMacroExpander, ProcMacroKind),
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -252,9 +253,6 @@ pub trait HirFileIdExt {
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in. /// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>>; fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>>;
/// Return expansion information if it is a macro-expansion file
fn expansion_info(self, db: &dyn ExpandDatabase) -> Option<ExpansionInfo>;
fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>>; fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>>;
} }
@ -308,11 +306,6 @@ impl HirFileIdExt for HirFileId {
} }
} }
/// Return expansion information if it is a macro-expansion file
fn expansion_info(self, db: &dyn ExpandDatabase) -> Option<ExpansionInfo> {
Some(ExpansionInfo::new(db, self.macro_file()?))
}
fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>> { fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>> {
let macro_file = self.macro_file()?; let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@ -379,7 +372,7 @@ impl MacroFileIdExt for MacroFileId {
fn is_custom_derive(&self, db: &dyn ExpandDatabase) -> bool { fn is_custom_derive(&self, db: &dyn ExpandDatabase) -> bool {
matches!( matches!(
db.lookup_intern_macro_call(self.macro_call_id).def.kind, db.lookup_intern_macro_call(self.macro_call_id).def.kind,
MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _) MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive)
) )
} }
@ -416,8 +409,10 @@ impl MacroFileIdExt for MacroFileId {
} }
fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool { fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool {
let loc = db.lookup_intern_macro_call(self.macro_call_id); matches!(
matches!(loc.kind, MacroCallKind::Attr { .. }) db.lookup_intern_macro_call(self.macro_call_id).def.kind,
MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, _, ProcMacroKind::Attr)
)
} }
fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool { fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool {
@ -440,13 +435,13 @@ impl MacroDefId {
pub fn definition_range(&self, db: &dyn ExpandDatabase) -> InFile<TextRange> { pub fn definition_range(&self, db: &dyn ExpandDatabase) -> InFile<TextRange> {
match self.kind { match self.kind {
MacroDefKind::Declarative(id) MacroDefKind::Declarative(id)
| MacroDefKind::BuiltIn(_, id) | MacroDefKind::BuiltIn(id, _)
| MacroDefKind::BuiltInAttr(_, id) | MacroDefKind::BuiltInAttr(id, _)
| MacroDefKind::BuiltInDerive(_, id) | MacroDefKind::BuiltInDerive(id, _)
| MacroDefKind::BuiltInEager(_, id) => { | MacroDefKind::BuiltInEager(id, _) => {
id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range()) id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range())
} }
MacroDefKind::ProcMacro(_, _, id) => { MacroDefKind::ProcMacro(id, _, _) => {
id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range()) id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range())
} }
} }
@ -454,12 +449,12 @@ impl MacroDefId {
pub fn ast_id(&self) -> Either<AstId<ast::Macro>, AstId<ast::Fn>> { pub fn ast_id(&self) -> Either<AstId<ast::Macro>, AstId<ast::Fn>> {
match self.kind { match self.kind {
MacroDefKind::ProcMacro(.., id) => Either::Right(id), MacroDefKind::ProcMacro(id, ..) => Either::Right(id),
MacroDefKind::Declarative(id) MacroDefKind::Declarative(id)
| MacroDefKind::BuiltIn(_, id) | MacroDefKind::BuiltIn(id, _)
| MacroDefKind::BuiltInAttr(_, id) | MacroDefKind::BuiltInAttr(id, _)
| MacroDefKind::BuiltInDerive(_, id) | MacroDefKind::BuiltInDerive(id, _)
| MacroDefKind::BuiltInEager(_, id) => Either::Left(id), | MacroDefKind::BuiltInEager(id, _) => Either::Left(id),
} }
} }
@ -470,7 +465,7 @@ impl MacroDefId {
pub fn is_attribute(&self) -> bool { pub fn is_attribute(&self) -> bool {
matches!( matches!(
self.kind, self.kind,
MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, ProcMacroKind::Attr, _) MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, _, ProcMacroKind::Attr)
) )
} }
@ -478,7 +473,7 @@ impl MacroDefId {
matches!( matches!(
self.kind, self.kind,
MacroDefKind::BuiltInDerive(..) MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _) | MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive)
) )
} }
@ -486,26 +481,26 @@ impl MacroDefId {
matches!( matches!(
self.kind, self.kind,
MacroDefKind::BuiltIn(..) MacroDefKind::BuiltIn(..)
| MacroDefKind::ProcMacro(_, ProcMacroKind::Bang, _) | MacroDefKind::ProcMacro(_, _, ProcMacroKind::Bang)
| MacroDefKind::BuiltInEager(..) | MacroDefKind::BuiltInEager(..)
| MacroDefKind::Declarative(..) | MacroDefKind::Declarative(..)
) )
} }
pub fn is_attribute_derive(&self) -> bool { pub fn is_attribute_derive(&self) -> bool {
matches!(self.kind, MacroDefKind::BuiltInAttr(expander, ..) if expander.is_derive()) matches!(self.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive())
} }
pub fn is_include(&self) -> bool { pub fn is_include(&self) -> bool {
matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_include()) matches!(self.kind, MacroDefKind::BuiltInEager(_, expander) if expander.is_include())
} }
pub fn is_include_like(&self) -> bool { pub fn is_include_like(&self) -> bool {
matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_include_like()) matches!(self.kind, MacroDefKind::BuiltInEager(_, expander) if expander.is_include_like())
} }
pub fn is_env_or_option_env(&self) -> bool { pub fn is_env_or_option_env(&self) -> bool {
matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_env_or_option_env()) matches!(self.kind, MacroDefKind::BuiltInEager(_, expander) if expander.is_env_or_option_env())
} }
} }
@ -702,16 +697,12 @@ impl MacroCallKind {
// simpler function calls if the map is only used once // simpler function calls if the map is only used once
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub struct ExpansionInfo { pub struct ExpansionInfo {
pub expanded: InMacroFile<SyntaxNode>, expanded: InMacroFile<SyntaxNode>,
/// The argument TokenTree or item for attributes /// The argument TokenTree or item for attributes
arg: InFile<Option<SyntaxNode>>, arg: InFile<Option<SyntaxNode>>,
/// The `macro_rules!` or attribute input. exp_map: Arc<ExpansionSpanMap>,
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
macro_def: TokenExpander,
macro_arg: Arc<tt::Subtree>,
pub exp_map: Arc<ExpansionSpanMap>,
arg_map: SpanMap, arg_map: SpanMap,
loc: MacroCallLoc,
} }
impl ExpansionInfo { impl ExpansionInfo {
@ -719,14 +710,21 @@ impl ExpansionInfo {
self.expanded.clone() self.expanded.clone()
} }
pub fn call_node(&self) -> Option<InFile<SyntaxNode>> { pub fn arg(&self) -> InFile<Option<&SyntaxNode>> {
Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?)) self.arg.as_ref().map(|it| it.as_ref())
} }
pub fn call_file(&self) -> HirFileId { pub fn call_file(&self) -> HirFileId {
self.arg.file_id self.arg.file_id
} }
pub fn is_attr(&self) -> bool {
matches!(
self.loc.def.kind,
MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, _, ProcMacroKind::Attr)
)
}
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call. /// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
/// ///
/// Note this does a linear search through the entire backing vector of the spanmap. /// Note this does a linear search through the entire backing vector of the spanmap.
@ -811,49 +809,16 @@ impl ExpansionInfo {
} }
pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo { pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); let _p = tracing::info_span!("ExpansionInfo::new").entered();
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let arg_tt = loc.kind.arg(db); let arg_tt = loc.kind.arg(db);
let arg_map = db.span_map(arg_tt.file_id); let arg_map = db.span_map(arg_tt.file_id);
let macro_def = db.macro_expander(loc.def);
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
let (macro_arg, _, _) = ExpansionInfo { expanded, loc, arg: arg_tt, exp_map, arg_map }
db.macro_arg_considering_derives(macro_file.macro_call_id, &loc.kind);
let def = loc.def.ast_id().left().and_then(|id| {
let def_tt = match id.to_node(db) {
ast::Macro::MacroRules(mac) => mac.token_tree()?,
ast::Macro::MacroDef(_) if matches!(macro_def, TokenExpander::BuiltInAttr(_)) => {
return None
}
ast::Macro::MacroDef(mac) => mac.body()?,
};
Some(InFile::new(id.file_id, def_tt))
});
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
// FIXME: handle `cfg_attr`
let tt = collect_attrs(&ast_id.to_node(db))
.nth(invoc_attr_index.ast_index())
.and_then(|x| Either::left(x.1))?
.token_tree()?;
Some(InFile::new(ast_id.file_id, tt))
}
_ => None,
});
ExpansionInfo {
expanded,
arg: arg_tt,
attr_input_or_mac_def,
macro_arg,
macro_def,
exp_map,
arg_map,
}
} }
} }

View file

@ -44,6 +44,10 @@ pub enum PathKind {
DollarCrate(CrateId), DollarCrate(CrateId),
} }
impl PathKind {
pub const SELF: PathKind = PathKind::Super(0);
}
impl ModPath { impl ModPath {
pub fn from_src( pub fn from_src(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
@ -96,7 +100,7 @@ impl ModPath {
pub fn textual_len(&self) -> usize { pub fn textual_len(&self) -> usize {
let base = match self.kind { let base = match self.kind {
PathKind::Plain => 0, PathKind::Plain => 0,
PathKind::Super(0) => "self".len(), PathKind::SELF => "self".len(),
PathKind::Super(i) => "super".len() * i as usize, PathKind::Super(i) => "super".len() * i as usize,
PathKind::Crate => "crate".len(), PathKind::Crate => "crate".len(),
PathKind::Abs => 0, PathKind::Abs => 0,
@ -113,7 +117,7 @@ impl ModPath {
} }
pub fn is_self(&self) -> bool { pub fn is_self(&self) -> bool {
self.kind == PathKind::Super(0) && self.segments.is_empty() self.kind == PathKind::SELF && self.segments.is_empty()
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]
@ -193,7 +197,7 @@ fn display_fmt_path(
}; };
match path.kind { match path.kind {
PathKind::Plain => {} PathKind::Plain => {}
PathKind::Super(0) => add_segment("self")?, PathKind::SELF => add_segment("self")?,
PathKind::Super(n) => { PathKind::Super(n) => {
for _ in 0..n { for _ in 0..n {
add_segment("super")?; add_segment("super")?;
@ -316,7 +320,7 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModP
tt::Leaf::Ident(tt::Ident { text, span }) if text == "$crate" => { tt::Leaf::Ident(tt::Ident { text, span }) if text == "$crate" => {
resolve_crate_root(db, span.ctx).map(PathKind::DollarCrate).unwrap_or(PathKind::Crate) resolve_crate_root(db, span.ctx).map(PathKind::DollarCrate).unwrap_or(PathKind::Crate)
} }
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "self" => PathKind::Super(0), tt::Leaf::Ident(tt::Ident { text, .. }) if text == "self" => PathKind::SELF,
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "super" => { tt::Leaf::Ident(tt::Ident { text, .. }) if text == "super" => {
let mut deg = 1; let mut deg = 1;
while let Some(tt::Leaf::Ident(tt::Ident { text, .. })) = leaves.next() { while let Some(tt::Leaf::Ident(tt::Ident { text, .. })) = leaves.next() {

View file

@ -231,7 +231,7 @@ mod tests {
const DUMMY: tt::Span = tt::Span { const DUMMY: tt::Span = tt::Span {
range: TextRange::empty(TextSize::new(0)), range: TextRange::empty(TextSize::new(0)),
anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID }, anchor: SpanAnchor { file_id: FileId::from_raw(0xe4e4e), ast_id: ROOT_ERASED_FILE_AST_ID },
ctx: SyntaxContextId::ROOT, ctx: SyntaxContextId::ROOT,
}; };

View file

@ -143,7 +143,7 @@ pub(crate) fn deref_by_trait(
table @ &mut InferenceTable { db, .. }: &mut InferenceTable<'_>, table @ &mut InferenceTable { db, .. }: &mut InferenceTable<'_>,
ty: Ty, ty: Ty,
) -> Option<Ty> { ) -> Option<Ty> {
let _p = tracing::span!(tracing::Level::INFO, "deref_by_trait").entered(); let _p = tracing::info_span!("deref_by_trait").entered();
if table.resolve_ty_shallow(&ty).inference_var(Interner).is_some() { if table.resolve_ty_shallow(&ty).inference_var(Interner).is_some() {
// don't try to deref unknown variables // don't try to deref unknown variables
return None; return None;

View file

@ -14,10 +14,10 @@ use hir_def::{
use smallvec::SmallVec; use smallvec::SmallVec;
use crate::{ use crate::{
consteval::unknown_const_as_generic, db::HirDatabase, error_lifetime, consteval::unknown_const_as_generic, db::HirDatabase, error_lifetime, generics::generics,
infer::unify::InferenceTable, primitive, to_assoc_type_id, to_chalk_trait_id, utils::generics, infer::unify::InferenceTable, primitive, to_assoc_type_id, to_chalk_trait_id, Binders,
Binders, BoundVar, CallableSig, GenericArg, GenericArgData, Interner, ProjectionTy, BoundVar, CallableSig, GenericArg, GenericArgData, Interner, ProjectionTy, Substitution,
Substitution, TraitRef, Ty, TyDefId, TyExt, TyKind, TraitRef, Ty, TyDefId, TyExt, TyKind,
}; };
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
@ -246,6 +246,7 @@ impl TyBuilder<()> {
/// - yield type of coroutine ([`Coroutine::Yield`](std::ops::Coroutine::Yield)) /// - yield type of coroutine ([`Coroutine::Yield`](std::ops::Coroutine::Yield))
/// - return type of coroutine ([`Coroutine::Return`](std::ops::Coroutine::Return)) /// - return type of coroutine ([`Coroutine::Return`](std::ops::Coroutine::Return))
/// - generic parameters in scope on `parent` /// - generic parameters in scope on `parent`
///
/// in this order. /// in this order.
/// ///
/// This method prepopulates the builder with placeholder substitution of `parent`, so you /// This method prepopulates the builder with placeholder substitution of `parent`, so you

View file

@ -20,13 +20,14 @@ use hir_expand::name::name;
use crate::{ use crate::{
db::{HirDatabase, InternedCoroutine}, db::{HirDatabase, InternedCoroutine},
display::HirDisplay, display::HirDisplay,
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, make_binders, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
make_single_type_binders, generics::generics,
make_binders, make_single_type_binders,
mapping::{from_chalk, ToChalk, TypeAliasAsValue}, mapping::{from_chalk, ToChalk, TypeAliasAsValue},
method_resolution::{TraitImpls, TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS}, method_resolution::{TraitImpls, TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS},
to_assoc_type_id, to_chalk_trait_id, to_assoc_type_id, to_chalk_trait_id,
traits::ChalkContext, traits::ChalkContext,
utils::{generics, ClosureSubst}, utils::ClosureSubst,
wrap_empty_binders, AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId, wrap_empty_binders, AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId,
Interner, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef, Interner, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef,
TraitRefExt, Ty, TyBuilder, TyExt, TyKind, WhereClause, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, WhereClause,
@ -603,7 +604,6 @@ pub(crate) fn associated_ty_data_query(
// Lower bounds -- we could/should maybe move this to a separate query in `lower` // Lower bounds -- we could/should maybe move this to a separate query in `lower`
let type_alias_data = db.type_alias_data(type_alias); let type_alias_data = db.type_alias_data(type_alias);
let generic_params = generics(db.upcast(), type_alias.into()); let generic_params = generics(db.upcast(), type_alias.into());
// let bound_vars = generic_params.bound_vars_subst(DebruijnIndex::INNERMOST);
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast()); let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast());
let ctx = crate::TyLoweringContext::new(db, &resolver, type_alias.into()) let ctx = crate::TyLoweringContext::new(db, &resolver, type_alias.into())
.with_type_param_mode(crate::lower::ParamLoweringMode::Variable); .with_type_param_mode(crate::lower::ParamLoweringMode::Variable);
@ -806,7 +806,7 @@ pub(crate) fn impl_datum_query(
krate: CrateId, krate: CrateId,
impl_id: ImplId, impl_id: ImplId,
) -> Arc<ImplDatum> { ) -> Arc<ImplDatum> {
let _p = tracing::span!(tracing::Level::INFO, "impl_datum_query").entered(); let _p = tracing::info_span!("impl_datum_query").entered();
debug!("impl_datum {:?}", impl_id); debug!("impl_datum {:?}", impl_id);
let impl_: hir_def::ImplId = from_chalk(db, impl_id); let impl_: hir_def::ImplId = from_chalk(db, impl_id);
impl_def_datum(db, krate, impl_id, impl_) impl_def_datum(db, krate, impl_id, impl_)

View file

@ -12,12 +12,10 @@ use hir_def::{
}; };
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx, from_placeholder_idx, generics::generics, to_chalk_trait_id, utils::ClosureSubst, AdtId,
to_chalk_trait_id, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, ClosureId,
utils::{generics, ClosureSubst}, DynTy, FnPointer, ImplTraitId, InEnvironment, Interner, Lifetime, ProjectionTy,
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Canonical, CanonicalVarKinds,
ClosureId, DynTy, FnPointer, ImplTraitId, InEnvironment, Interner, Lifetime, ProjectionTy,
QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, WhereClause, QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, WhereClause,
}; };

View file

@ -15,10 +15,9 @@ use stdx::never;
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
db::HirDatabase, infer::InferenceContext, lower::ParamLoweringMode, db::HirDatabase, generics::Generics, infer::InferenceContext, lower::ParamLoweringMode,
mir::monomorphize_mir_body_bad, to_placeholder_idx, utils::Generics, Const, ConstData, mir::monomorphize_mir_body_bad, to_placeholder_idx, Const, ConstData, ConstScalar, ConstValue,
ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, TraitEnvironment, Ty, GenericArg, Interner, MemoryMap, Substitution, TraitEnvironment, Ty, TyBuilder,
TyBuilder,
}; };
use super::mir::{interpret_mir, lower_to_mir, pad16, MirEvalError, MirLowerError}; use super::mir::{interpret_mir, lower_to_mir, pad16, MirEvalError, MirLowerError};
@ -72,12 +71,12 @@ impl From<MirEvalError> for ConstEvalError {
} }
} }
pub(crate) fn path_to_const( pub(crate) fn path_to_const<'g>(
db: &dyn HirDatabase, db: &dyn HirDatabase,
resolver: &Resolver, resolver: &Resolver,
path: &Path, path: &Path,
mode: ParamLoweringMode, mode: ParamLoweringMode,
args: impl FnOnce() -> Option<Generics>, args: impl FnOnce() -> Option<&'g Generics>,
debruijn: DebruijnIndex, debruijn: DebruijnIndex,
expected_ty: Ty, expected_ty: Ty,
) -> Option<Const> { ) -> Option<Const> {
@ -90,7 +89,7 @@ pub(crate) fn path_to_const(
} }
ParamLoweringMode::Variable => { ParamLoweringMode::Variable => {
let args = args(); let args = args();
match args.as_ref().and_then(|args| args.type_or_const_param_idx(p.into())) { match args.and_then(|args| args.type_or_const_param_idx(p.into())) {
Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)), Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)),
None => { None => {
never!( never!(

View file

@ -73,7 +73,7 @@ fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
Ok(t) => t, Ok(t) => t,
Err(e) => { Err(e) => {
let err = pretty_print_err(e, db); let err = pretty_print_err(e, db);
panic!("Error in evaluating goal: {}", err); panic!("Error in evaluating goal: {err}");
} }
}; };
match &r.data(Interner).value { match &r.data(Interner).value {
@ -81,7 +81,7 @@ fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
ConstScalar::Bytes(b, mm) => { ConstScalar::Bytes(b, mm) => {
check(b, mm); check(b, mm);
} }
x => panic!("Expected number but found {:?}", x), x => panic!("Expected number but found {x:?}"),
}, },
_ => panic!("result of const eval wasn't a concrete const"), _ => panic!("result of const eval wasn't a concrete const"),
} }
@ -89,7 +89,7 @@ fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String { fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String {
let mut err = String::new(); let mut err = String::new();
let span_formatter = |file, range| format!("{:?} {:?}", file, range); let span_formatter = |file, range| format!("{file:?} {range:?}");
match e { match e {
ConstEvalError::MirLowerError(e) => e.pretty_print(&mut err, &db, span_formatter), ConstEvalError::MirLowerError(e) => e.pretty_print(&mut err, &db, span_formatter),
ConstEvalError::MirEvalError(e) => e.pretty_print(&mut err, &db, span_formatter), ConstEvalError::MirEvalError(e) => e.pretty_print(&mut err, &db, span_formatter),

View file

@ -5,7 +5,7 @@ use std::sync;
use base_db::{ use base_db::{
impl_intern_key, impl_intern_key,
salsa::{self, impl_intern_value_trivial}, salsa::{self, InternValueTrivial},
CrateId, Upcast, CrateId, Upcast,
}; };
use hir_def::{ use hir_def::{
@ -21,11 +21,12 @@ use crate::{
chalk_db, chalk_db,
consteval::ConstEvalError, consteval::ConstEvalError,
layout::{Layout, LayoutError}, layout::{Layout, LayoutError},
lower::{GenericDefaults, GenericPredicates},
method_resolution::{InherentImpls, TraitImpls, TyFingerprint}, method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
mir::{BorrowckResult, MirBody, MirLowerError}, mir::{BorrowckResult, MirBody, MirLowerError},
Binders, CallableDefId, ClosureId, Const, FnDefId, GenericArg, ImplTraitId, ImplTraits, Binders, CallableDefId, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult,
InferenceResult, Interner, PolyFnSig, QuantifiedWhereClause, Substitution, TraitEnvironment, Interner, PolyFnSig, QuantifiedWhereClause, Substitution, TraitEnvironment, TraitRef, Ty,
TraitRef, Ty, TyDefId, ValueTyDefId, TyDefId, ValueTyDefId,
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
@ -147,7 +148,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
) -> Arc<[Binders<QuantifiedWhereClause>]>; ) -> Arc<[Binders<QuantifiedWhereClause>]>;
#[salsa::invoke(crate::lower::generic_predicates_query)] #[salsa::invoke(crate::lower::generic_predicates_query)]
fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders<QuantifiedWhereClause>]>; fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates;
#[salsa::invoke(crate::lower::trait_environment_for_body_query)] #[salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent] #[salsa::transparent]
@ -158,7 +159,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::lower::generic_defaults_query)] #[salsa::invoke(crate::lower::generic_defaults_query)]
#[salsa::cycle(crate::lower::generic_defaults_recover)] #[salsa::cycle(crate::lower::generic_defaults_recover)]
fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<GenericArg>]>; fn generic_defaults(&self, def: GenericDefId) -> GenericDefaults;
#[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)] #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>; fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>;
@ -298,7 +299,8 @@ impl_intern_key!(InternedClosureId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InternedClosure(pub DefWithBodyId, pub ExprId); pub struct InternedClosure(pub DefWithBodyId, pub ExprId);
impl_intern_value_trivial!(InternedClosure);
impl InternValueTrivial for InternedClosure {}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InternedCoroutineId(salsa::InternId); pub struct InternedCoroutineId(salsa::InternId);
@ -306,7 +308,7 @@ impl_intern_key!(InternedCoroutineId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InternedCoroutine(pub DefWithBodyId, pub ExprId); pub struct InternedCoroutine(pub DefWithBodyId, pub ExprId);
impl_intern_value_trivial!(InternedCoroutine); impl InternValueTrivial for InternedCoroutine {}
/// This exists just for Chalk, because Chalk just has a single `FnDefId` where /// This exists just for Chalk, because Chalk just has a single `FnDefId` where
/// we have different IDs for struct and enum variant constructors. /// we have different IDs for struct and enum variant constructors.

View file

@ -43,7 +43,7 @@ mod allow {
} }
pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec<IncorrectCase> { pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec<IncorrectCase> {
let _p = tracing::span!(tracing::Level::INFO, "incorrect_case").entered(); let _p = tracing::info_span!("incorrect_case").entered();
let mut validator = DeclValidator::new(db); let mut validator = DeclValidator::new(db);
validator.validate_item(owner); validator.validate_item(owner);
validator.sink validator.sink

View file

@ -65,8 +65,7 @@ impl BodyValidationDiagnostic {
owner: DefWithBodyId, owner: DefWithBodyId,
validate_lints: bool, validate_lints: bool,
) -> Vec<BodyValidationDiagnostic> { ) -> Vec<BodyValidationDiagnostic> {
let _p = let _p = tracing::info_span!("BodyValidationDiagnostic::collect").entered();
tracing::span!(tracing::Level::INFO, "BodyValidationDiagnostic::collect").entered();
let infer = db.infer(owner); let infer = db.infer(owner);
let body = db.body(owner); let body = db.body(owner);
let mut validator = let mut validator =

View file

@ -13,7 +13,7 @@ use crate::{
}; };
pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> { pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> {
let _p = tracing::span!(tracing::Level::INFO, "missing_unsafe").entered(); let _p = tracing::info_span!("missing_unsafe").entered();
let mut res = Vec::new(); let mut res = Vec::new();
let is_unsafe = match def { let is_unsafe = match def {

View file

@ -4,7 +4,7 @@
use std::{ use std::{
fmt::{self, Debug}, fmt::{self, Debug},
mem::size_of, mem::{self, size_of},
}; };
use base_db::CrateId; use base_db::CrateId;
@ -36,12 +36,13 @@ use crate::{
consteval::try_const_usize, consteval::try_const_usize,
db::{HirDatabase, InternedClosure}, db::{HirDatabase, InternedClosure},
from_assoc_type_id, from_foreign_def_id, from_placeholder_idx, from_assoc_type_id, from_foreign_def_id, from_placeholder_idx,
generics::generics,
layout::Layout, layout::Layout,
lt_from_placeholder_idx, lt_from_placeholder_idx,
mapping::from_chalk, mapping::from_chalk,
mir::pad16, mir::pad16,
primitive, to_assoc_type_id, primitive, to_assoc_type_id,
utils::{self, detect_variant_from_bytes, generics, ClosureSubst}, utils::{self, detect_variant_from_bytes, ClosureSubst},
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, ConcreteConst, Const, AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, ConcreteConst, Const,
ConstScalar, ConstValue, DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime, ConstScalar, ConstValue, DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime,
LifetimeData, LifetimeOutlives, MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt, LifetimeData, LifetimeOutlives, MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt,
@ -74,6 +75,8 @@ pub struct HirFormatter<'a> {
/// When rendering something that has a concept of "children" (like fields in a struct), this limits /// When rendering something that has a concept of "children" (like fields in a struct), this limits
/// how many should be rendered. /// how many should be rendered.
pub entity_limit: Option<usize>, pub entity_limit: Option<usize>,
/// When rendering functions, whether to show the constraint from the container
show_container_bounds: bool,
omit_verbose_types: bool, omit_verbose_types: bool,
closure_style: ClosureStyle, closure_style: ClosureStyle,
display_target: DisplayTarget, display_target: DisplayTarget,
@ -101,6 +104,7 @@ pub trait HirDisplay {
omit_verbose_types: bool, omit_verbose_types: bool,
display_target: DisplayTarget, display_target: DisplayTarget,
closure_style: ClosureStyle, closure_style: ClosureStyle,
show_container_bounds: bool,
) -> HirDisplayWrapper<'a, Self> ) -> HirDisplayWrapper<'a, Self>
where where
Self: Sized, Self: Sized,
@ -117,6 +121,7 @@ pub trait HirDisplay {
omit_verbose_types, omit_verbose_types,
display_target, display_target,
closure_style, closure_style,
show_container_bounds,
} }
} }
@ -134,6 +139,7 @@ pub trait HirDisplay {
omit_verbose_types: false, omit_verbose_types: false,
closure_style: ClosureStyle::ImplFn, closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Diagnostics, display_target: DisplayTarget::Diagnostics,
show_container_bounds: false,
} }
} }
@ -155,6 +161,7 @@ pub trait HirDisplay {
omit_verbose_types: true, omit_verbose_types: true,
closure_style: ClosureStyle::ImplFn, closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Diagnostics, display_target: DisplayTarget::Diagnostics,
show_container_bounds: false,
} }
} }
@ -176,6 +183,7 @@ pub trait HirDisplay {
omit_verbose_types: true, omit_verbose_types: true,
closure_style: ClosureStyle::ImplFn, closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Diagnostics, display_target: DisplayTarget::Diagnostics,
show_container_bounds: false,
} }
} }
@ -198,6 +206,7 @@ pub trait HirDisplay {
omit_verbose_types: false, omit_verbose_types: false,
closure_style: ClosureStyle::ImplFn, closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::SourceCode { module_id, allow_opaque }, display_target: DisplayTarget::SourceCode { module_id, allow_opaque },
show_container_bounds: false,
}) { }) {
Ok(()) => {} Ok(()) => {}
Err(HirDisplayError::FmtError) => panic!("Writing to String can't fail!"), Err(HirDisplayError::FmtError) => panic!("Writing to String can't fail!"),
@ -219,6 +228,29 @@ pub trait HirDisplay {
omit_verbose_types: false, omit_verbose_types: false,
closure_style: ClosureStyle::ImplFn, closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Test, display_target: DisplayTarget::Test,
show_container_bounds: false,
}
}
/// Returns a String representation of `self` that shows the constraint from
/// the container for functions
fn display_with_container_bounds<'a>(
&'a self,
db: &'a dyn HirDatabase,
show_container_bounds: bool,
) -> HirDisplayWrapper<'a, Self>
where
Self: Sized,
{
HirDisplayWrapper {
db,
t: self,
max_size: None,
limited_size: None,
omit_verbose_types: false,
closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Diagnostics,
show_container_bounds,
} }
} }
} }
@ -277,6 +309,10 @@ impl HirFormatter<'_> {
pub fn omit_verbose_types(&self) -> bool { pub fn omit_verbose_types(&self) -> bool {
self.omit_verbose_types self.omit_verbose_types
} }
pub fn show_container_bounds(&self) -> bool {
self.show_container_bounds
}
} }
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
@ -336,6 +372,7 @@ pub struct HirDisplayWrapper<'a, T> {
omit_verbose_types: bool, omit_verbose_types: bool,
closure_style: ClosureStyle, closure_style: ClosureStyle,
display_target: DisplayTarget, display_target: DisplayTarget,
show_container_bounds: bool,
} }
#[derive(Debug, PartialEq, Eq, Clone, Copy)] #[derive(Debug, PartialEq, Eq, Clone, Copy)]
@ -365,6 +402,7 @@ impl<T: HirDisplay> HirDisplayWrapper<'_, T> {
omit_verbose_types: self.omit_verbose_types, omit_verbose_types: self.omit_verbose_types,
display_target: self.display_target, display_target: self.display_target,
closure_style: self.closure_style, closure_style: self.closure_style,
show_container_bounds: self.show_container_bounds,
}) })
} }
@ -423,7 +461,7 @@ impl HirDisplay for ProjectionTy {
let proj_params_count = let proj_params_count =
self.substitution.len(Interner) - trait_ref.substitution.len(Interner); self.substitution.len(Interner) - trait_ref.substitution.len(Interner);
let proj_params = &self.substitution.as_slice(Interner)[..proj_params_count]; let proj_params = &self.substitution.as_slice(Interner)[..proj_params_count];
hir_fmt_generics(f, proj_params, None) hir_fmt_generics(f, proj_params, None, None)
} }
} }
@ -456,7 +494,7 @@ impl HirDisplay for Const {
ConstValue::Placeholder(idx) => { ConstValue::Placeholder(idx) => {
let id = from_placeholder_idx(f.db, *idx); let id = from_placeholder_idx(f.db, *idx);
let generics = generics(f.db.upcast(), id.parent); let generics = generics(f.db.upcast(), id.parent);
let param_data = &generics.params[id.local_id]; let param_data = &generics[id.local_id];
write!(f, "{}", param_data.name().unwrap().display(f.db.upcast()))?; write!(f, "{}", param_data.name().unwrap().display(f.db.upcast()))?;
Ok(()) Ok(())
} }
@ -468,6 +506,7 @@ impl HirDisplay for Const {
f, f,
parameters.as_slice(Interner), parameters.as_slice(Interner),
c.generic_def(f.db.upcast()), c.generic_def(f.db.upcast()),
None,
)?; )?;
Ok(()) Ok(())
} }
@ -670,7 +709,7 @@ fn render_const_scalar(
TyKind::FnDef(..) => ty.hir_fmt(f), TyKind::FnDef(..) => ty.hir_fmt(f),
TyKind::Function(_) | TyKind::Raw(_, _) => { TyKind::Function(_) | TyKind::Raw(_, _) => {
let it = u128::from_le_bytes(pad16(b, false)); let it = u128::from_le_bytes(pad16(b, false));
write!(f, "{:#X} as ", it)?; write!(f, "{it:#X} as ")?;
ty.hir_fmt(f) ty.hir_fmt(f)
} }
TyKind::Array(ty, len) => { TyKind::Array(ty, len) => {
@ -950,7 +989,7 @@ impl HirDisplay for Ty {
if parameters.len(Interner) > 0 { if parameters.len(Interner) > 0 {
let generics = generics(db.upcast(), def.into()); let generics = generics(db.upcast(), def.into());
let (parent_len, self_, type_, const_, impl_, lifetime) = let (parent_len, self_param, type_, const_, impl_, lifetime) =
generics.provenance_split(); generics.provenance_split();
let parameters = parameters.as_slice(Interner); let parameters = parameters.as_slice(Interner);
// We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self? // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
@ -958,7 +997,7 @@ impl HirDisplay for Ty {
// `parameters` are in the order of fn's params (including impl traits), fn's lifetimes // `parameters` are in the order of fn's params (including impl traits), fn's lifetimes
// parent's params (those from enclosing impl or trait, if any). // parent's params (those from enclosing impl or trait, if any).
let (fn_params, other) = let (fn_params, other) =
parameters.split_at(self_ + type_ + const_ + lifetime); parameters.split_at(self_param as usize + type_ + const_ + lifetime);
let (_impl, parent_params) = other.split_at(impl_); let (_impl, parent_params) = other.split_at(impl_);
debug_assert_eq!(parent_params.len(), parent_len); debug_assert_eq!(parent_params.len(), parent_len);
@ -967,11 +1006,11 @@ impl HirDisplay for Ty {
let fn_params = generic_args_sans_defaults(f, Some(def.into()), fn_params); let fn_params = generic_args_sans_defaults(f, Some(def.into()), fn_params);
write!(f, "<")?; write!(f, "<")?;
hir_fmt_generic_arguments(f, parent_params)?; hir_fmt_generic_arguments(f, parent_params, None)?;
if !parent_params.is_empty() && !fn_params.is_empty() { if !parent_params.is_empty() && !fn_params.is_empty() {
write!(f, ", ")?; write!(f, ", ")?;
} }
hir_fmt_generic_arguments(f, fn_params)?; hir_fmt_generic_arguments(f, fn_params, None)?;
write!(f, ">")?; write!(f, ">")?;
} }
} }
@ -1016,7 +1055,7 @@ impl HirDisplay for Ty {
let generic_def = self.as_generic_def(db); let generic_def = self.as_generic_def(db);
hir_fmt_generics(f, parameters.as_slice(Interner), generic_def)?; hir_fmt_generics(f, parameters.as_slice(Interner), generic_def, None)?;
} }
TyKind::AssociatedType(assoc_type_id, parameters) => { TyKind::AssociatedType(assoc_type_id, parameters) => {
let type_alias = from_assoc_type_id(*assoc_type_id); let type_alias = from_assoc_type_id(*assoc_type_id);
@ -1039,7 +1078,7 @@ impl HirDisplay for Ty {
f.end_location_link(); f.end_location_link();
// Note that the generic args for the associated type come before those for the // Note that the generic args for the associated type come before those for the
// trait (including the self type). // trait (including the self type).
hir_fmt_generics(f, parameters.as_slice(Interner), None) hir_fmt_generics(f, parameters.as_slice(Interner), None, None)
} else { } else {
let projection_ty = ProjectionTy { let projection_ty = ProjectionTy {
associated_ty_id: to_assoc_type_id(type_alias), associated_ty_id: to_assoc_type_id(type_alias),
@ -1141,7 +1180,7 @@ impl HirDisplay for Ty {
} }
ClosureStyle::ClosureWithSubst => { ClosureStyle::ClosureWithSubst => {
write!(f, "{{closure#{:?}}}", id.0.as_u32())?; write!(f, "{{closure#{:?}}}", id.0.as_u32())?;
return hir_fmt_generics(f, substs.as_slice(Interner), None); return hir_fmt_generics(f, substs.as_slice(Interner), None, None);
} }
_ => (), _ => (),
} }
@ -1177,7 +1216,7 @@ impl HirDisplay for Ty {
TyKind::Placeholder(idx) => { TyKind::Placeholder(idx) => {
let id = from_placeholder_idx(db, *idx); let id = from_placeholder_idx(db, *idx);
let generics = generics(db.upcast(), id.parent); let generics = generics(db.upcast(), id.parent);
let param_data = &generics.params[id.local_id]; let param_data = &generics[id.local_id];
match param_data { match param_data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance { TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => { TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
@ -1329,6 +1368,7 @@ fn hir_fmt_generics(
f: &mut HirFormatter<'_>, f: &mut HirFormatter<'_>,
parameters: &[GenericArg], parameters: &[GenericArg],
generic_def: Option<hir_def::GenericDefId>, generic_def: Option<hir_def::GenericDefId>,
self_: Option<&Ty>,
) -> Result<(), HirDisplayError> { ) -> Result<(), HirDisplayError> {
if parameters.is_empty() { if parameters.is_empty() {
return Ok(()); return Ok(());
@ -1348,7 +1388,7 @@ fn hir_fmt_generics(
}); });
if !parameters_to_write.is_empty() && !only_err_lifetimes { if !parameters_to_write.is_empty() && !only_err_lifetimes {
write!(f, "<")?; write!(f, "<")?;
hir_fmt_generic_arguments(f, parameters_to_write)?; hir_fmt_generic_arguments(f, parameters_to_write, self_)?;
write!(f, ">")?; write!(f, ">")?;
} }
@ -1411,6 +1451,7 @@ fn generic_args_sans_defaults<'ga>(
fn hir_fmt_generic_arguments( fn hir_fmt_generic_arguments(
f: &mut HirFormatter<'_>, f: &mut HirFormatter<'_>,
parameters: &[GenericArg], parameters: &[GenericArg],
self_: Option<&Ty>,
) -> Result<(), HirDisplayError> { ) -> Result<(), HirDisplayError> {
let mut first = true; let mut first = true;
let lifetime_offset = parameters.iter().position(|arg| arg.lifetime(Interner).is_some()); let lifetime_offset = parameters.iter().position(|arg| arg.lifetime(Interner).is_some());
@ -1432,11 +1473,13 @@ fn hir_fmt_generic_arguments(
continue; continue;
} }
if !first { if !mem::take(&mut first) {
write!(f, ", ")?; write!(f, ", ")?;
} }
first = false; match self_ {
generic_arg.hir_fmt(f)?; self_ @ Some(_) if generic_arg.ty(Interner) == self_ => write!(f, "Self")?,
_ => generic_arg.hir_fmt(f)?,
}
} }
Ok(()) Ok(())
} }
@ -1559,12 +1602,16 @@ fn write_bounds_like_dyn_trait(
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?; write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?;
f.end_location_link(); f.end_location_link();
if is_fn_trait { if is_fn_trait {
if let [_self, params @ ..] = trait_ref.substitution.as_slice(Interner) { if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner) {
if let Some(args) = if let Some(args) =
params.first().and_then(|it| it.assert_ty_ref(Interner).as_tuple()) params.first().and_then(|it| it.assert_ty_ref(Interner).as_tuple())
{ {
write!(f, "(")?; write!(f, "(")?;
hir_fmt_generic_arguments(f, args.as_slice(Interner))?; hir_fmt_generic_arguments(
f,
args.as_slice(Interner),
self_.ty(Interner),
)?;
write!(f, ")")?; write!(f, ")")?;
} }
} }
@ -1574,10 +1621,10 @@ fn write_bounds_like_dyn_trait(
Some(trait_.into()), Some(trait_.into()),
trait_ref.substitution.as_slice(Interner), trait_ref.substitution.as_slice(Interner),
); );
if let [_self, params @ ..] = params { if let [self_, params @ ..] = params {
if !params.is_empty() { if !params.is_empty() {
write!(f, "<")?; write!(f, "<")?;
hir_fmt_generic_arguments(f, params)?; hir_fmt_generic_arguments(f, params, self_.ty(Interner))?;
// there might be assoc type bindings, so we leave the angle brackets open // there might be assoc type bindings, so we leave the angle brackets open
angle_open = true; angle_open = true;
} }
@ -1635,6 +1682,7 @@ fn write_bounds_like_dyn_trait(
hir_fmt_generic_arguments( hir_fmt_generic_arguments(
f, f,
&proj.substitution.as_slice(Interner)[..proj_arg_count], &proj.substitution.as_slice(Interner)[..proj_arg_count],
None,
)?; )?;
write!(f, ">")?; write!(f, ">")?;
} }
@ -1691,7 +1739,8 @@ fn fmt_trait_ref(
f.start_location_link(trait_.into()); f.start_location_link(trait_.into());
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?; write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?;
f.end_location_link(); f.end_location_link();
hir_fmt_generics(f, &tr.substitution.as_slice(Interner)[1..], None) let substs = tr.substitution.as_slice(Interner);
hir_fmt_generics(f, &substs[1..], None, substs[0].ty(Interner))
} }
impl HirDisplay for TraitRef { impl HirDisplay for TraitRef {
@ -1749,7 +1798,7 @@ impl HirDisplay for LifetimeData {
LifetimeData::Placeholder(idx) => { LifetimeData::Placeholder(idx) => {
let id = lt_from_placeholder_idx(f.db, *idx); let id = lt_from_placeholder_idx(f.db, *idx);
let generics = generics(f.db.upcast(), id.parent); let generics = generics(f.db.upcast(), id.parent);
let param_data = &generics.params[id.local_id]; let param_data = &generics[id.local_id];
write!(f, "{}", param_data.name.display(f.db.upcast()))?; write!(f, "{}", param_data.name.display(f.db.upcast()))?;
Ok(()) Ok(())
} }
@ -1943,7 +1992,7 @@ impl HirDisplay for Path {
(_, PathKind::Plain) => {} (_, PathKind::Plain) => {}
(_, PathKind::Abs) => {} (_, PathKind::Abs) => {}
(_, PathKind::Crate) => write!(f, "crate")?, (_, PathKind::Crate) => write!(f, "crate")?,
(_, PathKind::Super(0)) => write!(f, "self")?, (_, &PathKind::SELF) => write!(f, "self")?,
(_, PathKind::Super(n)) => { (_, PathKind::Super(n)) => {
for i in 0..*n { for i in 0..*n {
if i > 0 { if i > 0 {

View file

@ -0,0 +1,263 @@
//! Utilities for working with generics.
//!
//! The layout for generics as expected by chalk are as follows:
//! - Optional Self parameter
//! - Type or Const parameters
//! - Lifetime parameters
//! - Parent parameters
//!
//! where parent follows the same scheme.
use std::ops;
use chalk_ir::{cast::Cast as _, BoundVar, DebruijnIndex};
use hir_def::{
db::DefDatabase,
generics::{
GenericParamDataRef, GenericParams, LifetimeParamData, TypeOrConstParamData,
TypeParamProvenance,
},
ConstParamId, GenericDefId, GenericParamId, ItemContainerId, LifetimeParamId,
LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
};
use intern::Interned;
use crate::{db::HirDatabase, lt_to_placeholder_idx, to_placeholder_idx, Interner, Substitution};
pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
Generics { def, params: db.generic_params(def), parent_generics }
}
#[derive(Clone, Debug)]
pub(crate) struct Generics {
def: GenericDefId,
params: Interned<GenericParams>,
parent_generics: Option<Box<Generics>>,
}
impl<T> ops::Index<T> for Generics
where
GenericParams: ops::Index<T>,
{
type Output = <GenericParams as ops::Index<T>>::Output;
fn index(&self, index: T) -> &Self::Output {
&self.params[index]
}
}
impl Generics {
pub(crate) fn def(&self) -> GenericDefId {
self.def
}
pub(crate) fn iter_id(&self) -> impl Iterator<Item = GenericParamId> + '_ {
self.iter_self_id().chain(self.iter_parent_id())
}
pub(crate) fn iter_self_id(&self) -> impl Iterator<Item = GenericParamId> + '_ {
self.iter_self().map(|(id, _)| id)
}
fn iter_parent_id(&self) -> impl Iterator<Item = GenericParamId> + '_ {
self.iter_parent().map(|(id, _)| id)
}
pub(crate) fn iter_self_type_or_consts(
&self,
) -> impl DoubleEndedIterator<Item = (LocalTypeOrConstParamId, &TypeOrConstParamData)> {
self.params.iter_type_or_consts()
}
/// Iterate over the params followed by the parent params.
pub(crate) fn iter(
&self,
) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
self.iter_self().chain(self.iter_parent())
}
/// Iterate over the params without parent params.
pub(crate) fn iter_self(
&self,
) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
self.params
.iter_type_or_consts()
.map(from_toc_id(self))
.chain(self.params.iter_lt().map(from_lt_id(self)))
}
/// Iterator over types and const params of parent.
fn iter_parent(
&self,
) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
self.parent_generics().into_iter().flat_map(|it| {
let lt_iter = it.params.iter_lt().map(from_lt_id(it));
it.params.iter_type_or_consts().map(from_toc_id(it)).chain(lt_iter)
})
}
/// Returns total number of generic parameters in scope, including those from parent.
pub(crate) fn len(&self) -> usize {
let parent = self.parent_generics().map_or(0, Generics::len);
let child = self.params.len();
parent + child
}
/// Returns numbers of generic parameters excluding those from parent.
pub(crate) fn len_self(&self) -> usize {
self.params.len()
}
/// (parent total, self param, type params, const params, impl trait list, lifetimes)
pub(crate) fn provenance_split(&self) -> (usize, bool, usize, usize, usize, usize) {
let mut self_param = false;
let mut type_params = 0;
let mut impl_trait_params = 0;
let mut const_params = 0;
self.params.iter_type_or_consts().for_each(|(_, data)| match data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::TypeParamList => type_params += 1,
TypeParamProvenance::TraitSelf => self_param |= true,
TypeParamProvenance::ArgumentImplTrait => impl_trait_params += 1,
},
TypeOrConstParamData::ConstParamData(_) => const_params += 1,
});
let lifetime_params = self.params.iter_lt().count();
let parent_len = self.parent_generics().map_or(0, Generics::len);
(parent_len, self_param, type_params, const_params, impl_trait_params, lifetime_params)
}
pub(crate) fn type_or_const_param_idx(&self, param: TypeOrConstParamId) -> Option<usize> {
self.find_type_or_const_param(param)
}
fn find_type_or_const_param(&self, param: TypeOrConstParamId) -> Option<usize> {
if param.parent == self.def {
let idx = param.local_id.into_raw().into_u32() as usize;
debug_assert!(idx <= self.params.type_or_consts.len());
Some(idx)
} else {
debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(param.parent));
self.parent_generics()
.and_then(|g| g.find_type_or_const_param(param))
// Remember that parent parameters come after parameters for self.
.map(|idx| self.len_self() + idx)
}
}
pub(crate) fn lifetime_idx(&self, lifetime: LifetimeParamId) -> Option<usize> {
self.find_lifetime(lifetime)
}
fn find_lifetime(&self, lifetime: LifetimeParamId) -> Option<usize> {
if lifetime.parent == self.def {
let idx = lifetime.local_id.into_raw().into_u32() as usize;
debug_assert!(idx <= self.params.lifetimes.len());
Some(self.params.type_or_consts.len() + idx)
} else {
debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(lifetime.parent));
self.parent_generics()
.and_then(|g| g.find_lifetime(lifetime))
.map(|idx| self.len_self() + idx)
}
}
pub(crate) fn parent_generics(&self) -> Option<&Generics> {
self.parent_generics.as_deref()
}
pub(crate) fn parent_or_self(&self) -> &Generics {
self.parent_generics.as_deref().unwrap_or(self)
}
/// Returns a Substitution that replaces each parameter by a bound variable.
pub(crate) fn bound_vars_subst(
&self,
db: &dyn HirDatabase,
debruijn: DebruijnIndex,
) -> Substitution {
Substitution::from_iter(
Interner,
self.iter_id().enumerate().map(|(idx, id)| match id {
GenericParamId::ConstParamId(id) => BoundVar::new(debruijn, idx)
.to_const(Interner, db.const_param_ty(id))
.cast(Interner),
GenericParamId::TypeParamId(_) => {
BoundVar::new(debruijn, idx).to_ty(Interner).cast(Interner)
}
GenericParamId::LifetimeParamId(_) => {
BoundVar::new(debruijn, idx).to_lifetime(Interner).cast(Interner)
}
}),
)
}
/// Returns a Substitution that replaces each parameter by itself (i.e. `Ty::Param`).
pub(crate) fn placeholder_subst(&self, db: &dyn HirDatabase) -> Substitution {
Substitution::from_iter(
Interner,
self.iter_id().map(|id| match id {
GenericParamId::TypeParamId(id) => {
to_placeholder_idx(db, id.into()).to_ty(Interner).cast(Interner)
}
GenericParamId::ConstParamId(id) => to_placeholder_idx(db, id.into())
.to_const(Interner, db.const_param_ty(id))
.cast(Interner),
GenericParamId::LifetimeParamId(id) => {
lt_to_placeholder_idx(db, id).to_lifetime(Interner).cast(Interner)
}
}),
)
}
}
fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<GenericDefId> {
let container = match def {
GenericDefId::FunctionId(it) => it.lookup(db).container,
GenericDefId::TypeAliasId(it) => it.lookup(db).container,
GenericDefId::ConstId(it) => it.lookup(db).container,
GenericDefId::EnumVariantId(it) => return Some(it.lookup(db).parent.into()),
GenericDefId::AdtId(_)
| GenericDefId::TraitId(_)
| GenericDefId::ImplId(_)
| GenericDefId::TraitAliasId(_) => return None,
};
match container {
ItemContainerId::ImplId(it) => Some(it.into()),
ItemContainerId::TraitId(it) => Some(it.into()),
ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
}
}
fn from_toc_id<'a>(
it: &'a Generics,
) -> impl Fn(
(LocalTypeOrConstParamId, &'a TypeOrConstParamData),
) -> (GenericParamId, GenericParamDataRef<'a>) {
move |(local_id, p): (_, _)| {
let id = TypeOrConstParamId { parent: it.def, local_id };
match p {
TypeOrConstParamData::TypeParamData(p) => (
GenericParamId::TypeParamId(TypeParamId::from_unchecked(id)),
GenericParamDataRef::TypeParamData(p),
),
TypeOrConstParamData::ConstParamData(p) => (
GenericParamId::ConstParamId(ConstParamId::from_unchecked(id)),
GenericParamDataRef::ConstParamData(p),
),
}
}
}
fn from_lt_id<'a>(
it: &'a Generics,
) -> impl Fn((LocalLifetimeParamId, &'a LifetimeParamData)) -> (GenericParamId, GenericParamDataRef<'a>)
{
move |(local_id, p): (_, _)| {
(
GenericParamId::LifetimeParamId(LifetimeParamId { parent: it.def, local_id }),
GenericParamDataRef::LifetimeParamData(p),
)
}
}

View file

@ -49,6 +49,7 @@ use hir_def::{
use hir_expand::name::{name, Name}; use hir_expand::name::{name, Name};
use indexmap::IndexSet; use indexmap::IndexSet;
use la_arena::{ArenaMap, Entry}; use la_arena::{ArenaMap, Entry};
use once_cell::unsync::OnceCell;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use stdx::{always, never}; use stdx::{always, never};
use triomphe::Arc; use triomphe::Arc;
@ -56,14 +57,15 @@ use triomphe::Arc;
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase,
error_lifetime, fold_tys, error_lifetime, fold_tys,
generics::Generics,
infer::{coerce::CoerceMany, unify::InferenceTable}, infer::{coerce::CoerceMany, unify::InferenceTable},
lower::ImplTraitLoweringMode, lower::ImplTraitLoweringMode,
to_assoc_type_id, to_assoc_type_id,
traits::FnTrait, traits::FnTrait,
utils::{Generics, InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder}, utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId, AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId,
ImplTraitIdx, InEnvironment, Interner, Lifetime, OpaqueTyId, ProjectionTy, Substitution, ImplTraitIdx, InEnvironment, Interner, Lifetime, OpaqueTyId, ParamLoweringMode, ProjectionTy,
TraitEnvironment, Ty, TyBuilder, TyExt, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
}; };
// This lint has a false positive here. See the link below for details. // This lint has a false positive here. See the link below for details.
@ -79,7 +81,7 @@ pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
/// The entry point of type inference. /// The entry point of type inference.
pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> { pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
let _p = tracing::span!(tracing::Level::INFO, "infer_query").entered(); let _p = tracing::info_span!("infer_query").entered();
let resolver = def.resolver(db.upcast()); let resolver = def.resolver(db.upcast());
let body = db.body(def); let body = db.body(def);
let mut ctx = InferenceContext::new(db, def, &body, resolver); let mut ctx = InferenceContext::new(db, def, &body, resolver);
@ -526,6 +528,7 @@ pub(crate) struct InferenceContext<'a> {
pub(crate) owner: DefWithBodyId, pub(crate) owner: DefWithBodyId,
pub(crate) body: &'a Body, pub(crate) body: &'a Body,
pub(crate) resolver: Resolver, pub(crate) resolver: Resolver,
generics: OnceCell<Option<Generics>>,
table: unify::InferenceTable<'a>, table: unify::InferenceTable<'a>,
/// The traits in scope, disregarding block modules. This is used for caching purposes. /// The traits in scope, disregarding block modules. This is used for caching purposes.
traits_in_scope: FxHashSet<TraitId>, traits_in_scope: FxHashSet<TraitId>,
@ -611,6 +614,7 @@ impl<'a> InferenceContext<'a> {
) -> Self { ) -> Self {
let trait_env = db.trait_environment_for_body(owner); let trait_env = db.trait_environment_for_body(owner);
InferenceContext { InferenceContext {
generics: OnceCell::new(),
result: InferenceResult::default(), result: InferenceResult::default(),
table: unify::InferenceTable::new(db, trait_env), table: unify::InferenceTable::new(db, trait_env),
tuple_field_accesses_rev: Default::default(), tuple_field_accesses_rev: Default::default(),
@ -632,8 +636,14 @@ impl<'a> InferenceContext<'a> {
} }
} }
pub(crate) fn generics(&self) -> Option<Generics> { pub(crate) fn generics(&self) -> Option<&Generics> {
Some(crate::utils::generics(self.db.upcast(), self.resolver.generic_def()?)) self.generics
.get_or_init(|| {
self.resolver
.generic_def()
.map(|def| crate::generics::generics(self.db.upcast(), def))
})
.as_ref()
} }
// FIXME: This function should be private in module. It is currently only used in the consteval, since we need // FIXME: This function should be private in module. It is currently only used in the consteval, since we need
@ -781,7 +791,8 @@ impl<'a> InferenceContext<'a> {
fn collect_fn(&mut self, func: FunctionId) { fn collect_fn(&mut self, func: FunctionId) {
let data = self.db.function_data(func); let data = self.db.function_data(func);
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, func.into()) let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into())
.with_type_param_mode(ParamLoweringMode::Placeholder)
.with_impl_trait_mode(ImplTraitLoweringMode::Param); .with_impl_trait_mode(ImplTraitLoweringMode::Param);
let mut param_tys = let mut param_tys =
data.params.iter().map(|type_ref| ctx.lower_ty(type_ref)).collect::<Vec<_>>(); data.params.iter().map(|type_ref| ctx.lower_ty(type_ref)).collect::<Vec<_>>();
@ -816,6 +827,7 @@ impl<'a> InferenceContext<'a> {
let return_ty = &*data.ret_type; let return_ty = &*data.ret_type;
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()) let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into())
.with_type_param_mode(ParamLoweringMode::Placeholder)
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque); .with_impl_trait_mode(ImplTraitLoweringMode::Opaque);
let return_ty = ctx.lower_ty(return_ty); let return_ty = ctx.lower_ty(return_ty);
let return_ty = self.insert_type_vars(return_ty); let return_ty = self.insert_type_vars(return_ty);
@ -1263,7 +1275,7 @@ impl<'a> InferenceContext<'a> {
forbid_unresolved_segments((ty, Some(var.into())), unresolved) forbid_unresolved_segments((ty, Some(var.into())), unresolved)
} }
TypeNs::SelfType(impl_id) => { TypeNs::SelfType(impl_id) => {
let generics = crate::utils::generics(self.db.upcast(), impl_id.into()); let generics = crate::generics::generics(self.db.upcast(), impl_id.into());
let substs = generics.placeholder_subst(self.db); let substs = generics.placeholder_subst(self.db);
let mut ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs); let mut ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);

View file

@ -22,11 +22,13 @@ use stdx::never;
use crate::{ use crate::{
db::{HirDatabase, InternedClosure}, db::{HirDatabase, InternedClosure},
error_lifetime, from_chalk_trait_id, from_placeholder_idx, make_binders, error_lifetime, from_chalk_trait_id, from_placeholder_idx,
generics::Generics,
make_binders,
mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem}, mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem},
to_chalk_trait_id, to_chalk_trait_id,
traits::FnTrait, traits::FnTrait,
utils::{self, elaborate_clause_supertraits, Generics}, utils::{self, elaborate_clause_supertraits},
Adjust, Adjustment, AliasEq, AliasTy, Binders, BindingMode, ChalkTraitId, ClosureId, DynTy, Adjust, Adjustment, AliasEq, AliasTy, Binders, BindingMode, ChalkTraitId, ClosureId, DynTy,
DynTyExt, FnAbi, FnPointer, FnSig, Interner, OpaqueTy, ProjectionTyExt, Substitution, Ty, DynTyExt, FnAbi, FnPointer, FnSig, Interner, OpaqueTy, ProjectionTyExt, Substitution, Ty,
TyExt, WhereClause, TyExt, WhereClause,
@ -337,7 +339,7 @@ impl CapturedItemWithoutTy {
fn replace_placeholder_with_binder(ctx: &mut InferenceContext<'_>, ty: Ty) -> Binders<Ty> { fn replace_placeholder_with_binder(ctx: &mut InferenceContext<'_>, ty: Ty) -> Binders<Ty> {
struct Filler<'a> { struct Filler<'a> {
db: &'a dyn HirDatabase, db: &'a dyn HirDatabase,
generics: Generics, generics: &'a Generics,
} }
impl FallibleTypeFolder<Interner> for Filler<'_> { impl FallibleTypeFolder<Interner> for Filler<'_> {
type Error = (); type Error = ();
@ -380,7 +382,7 @@ impl CapturedItemWithoutTy {
}; };
let filler = &mut Filler { db: ctx.db, generics }; let filler = &mut Filler { db: ctx.db, generics };
let result = ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST).unwrap_or(ty); let result = ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST).unwrap_or(ty);
make_binders(ctx.db, &filler.generics, result) make_binders(ctx.db, filler.generics, result)
} }
} }
} }

View file

@ -24,6 +24,7 @@ use crate::{
consteval, consteval,
db::{InternedClosure, InternedCoroutine}, db::{InternedClosure, InternedCoroutine},
error_lifetime, error_lifetime,
generics::{generics, Generics},
infer::{ infer::{
coerce::{CoerceMany, CoercionCause}, coerce::{CoerceMany, CoercionCause},
find_continuable, find_continuable,
@ -39,7 +40,6 @@ use crate::{
primitive::{self, UintTy}, primitive::{self, UintTy},
static_lifetime, to_chalk_trait_id, static_lifetime, to_chalk_trait_id,
traits::FnTrait, traits::FnTrait,
utils::{generics, Generics},
Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnAbi, FnPointer, FnSig, Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnAbi, FnPointer, FnSig,
FnSubst, Interner, Rawness, Scalar, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, FnSubst, Interner, Rawness, Scalar, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder,
TyExt, TyKind, TyExt, TyKind,
@ -1830,13 +1830,13 @@ impl InferenceContext<'_> {
) -> Substitution { ) -> Substitution {
let ( let (
parent_params, parent_params,
self_params, has_self_param,
type_params, type_params,
const_params, const_params,
impl_trait_params, impl_trait_params,
lifetime_params, lifetime_params,
) = def_generics.provenance_split(); ) = def_generics.provenance_split();
assert_eq!(self_params, 0); // method shouldn't have another Self param assert!(!has_self_param); // method shouldn't have another Self param
let total_len = let total_len =
parent_params + type_params + const_params + impl_trait_params + lifetime_params; parent_params + type_params + const_params + impl_trait_params + lifetime_params;
let mut substs = Vec::with_capacity(total_len); let mut substs = Vec::with_capacity(total_len);
@ -1844,13 +1844,11 @@ impl InferenceContext<'_> {
// handle provided arguments // handle provided arguments
if let Some(generic_args) = generic_args { if let Some(generic_args) = generic_args {
// if args are provided, it should be all of them, but we can't rely on that // if args are provided, it should be all of them, but we can't rely on that
for (arg, kind_id) in generic_args let self_params = type_params + const_params + lifetime_params;
.args for (arg, kind_id) in
.iter() generic_args.args.iter().zip(def_generics.iter_self_id()).take(self_params)
.take(type_params + const_params + lifetime_params)
.zip(def_generics.iter_id())
{ {
if let Some(g) = generic_arg_to_chalk( let arg = generic_arg_to_chalk(
self.db, self.db,
kind_id, kind_id,
arg, arg,
@ -1869,9 +1867,8 @@ impl InferenceContext<'_> {
) )
}, },
|this, lt_ref| this.make_lifetime(lt_ref), |this, lt_ref| this.make_lifetime(lt_ref),
) { );
substs.push(g); substs.push(arg);
}
} }
}; };

View file

@ -12,11 +12,10 @@ use stdx::never;
use crate::{ use crate::{
builder::ParamKind, builder::ParamKind,
consteval, error_lifetime, consteval, error_lifetime,
generics::generics,
method_resolution::{self, VisibleFromModule}, method_resolution::{self, VisibleFromModule},
to_chalk_trait_id, to_chalk_trait_id, InferenceDiagnostic, Interner, Substitution, TraitRef, TraitRefExt, Ty,
utils::generics, TyBuilder, TyExt, TyKind, ValueTyDefId,
InferenceDiagnostic, Interner, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt,
TyKind, ValueTyDefId,
}; };
use super::{ExprOrPatId, InferenceContext}; use super::{ExprOrPatId, InferenceContext};
@ -64,7 +63,7 @@ impl InferenceContext<'_> {
it.into() it.into()
} }
ValueNs::ImplSelf(impl_id) => { ValueNs::ImplSelf(impl_id) => {
let generics = crate::utils::generics(self.db.upcast(), impl_id.into()); let generics = crate::generics::generics(self.db.upcast(), impl_id.into());
let substs = generics.placeholder_subst(self.db); let substs = generics.placeholder_subst(self.db);
let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs); let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() { if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {

View file

@ -613,8 +613,7 @@ impl<'a> InferenceTable<'a> {
} }
pub(crate) fn resolve_obligations_as_possible(&mut self) { pub(crate) fn resolve_obligations_as_possible(&mut self) {
let _span = let _span = tracing::info_span!("resolve_obligations_as_possible").entered();
tracing::span!(tracing::Level::INFO, "resolve_obligations_as_possible").entered();
let mut changed = true; let mut changed = true;
let mut obligations = mem::take(&mut self.resolve_obligations_buffer); let mut obligations = mem::take(&mut self.resolve_obligations_buffer);
while mem::take(&mut changed) { while mem::take(&mut changed) {

View file

@ -15,7 +15,7 @@ use crate::{
// FIXME: Turn this into a query, it can be quite slow // FIXME: Turn this into a query, it can be quite slow
/// Checks whether a type is visibly uninhabited from a particular module. /// Checks whether a type is visibly uninhabited from a particular module.
pub(crate) fn is_ty_uninhabited_from(db: &dyn HirDatabase, ty: &Ty, target_mod: ModuleId) -> bool { pub(crate) fn is_ty_uninhabited_from(db: &dyn HirDatabase, ty: &Ty, target_mod: ModuleId) -> bool {
let _p = tracing::span!(tracing::Level::INFO, "is_ty_uninhabited_from", ?ty).entered(); let _p = tracing::info_span!("is_ty_uninhabited_from", ?ty).entered();
let mut uninhabited_from = let mut uninhabited_from =
UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() }; UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };
let inhabitedness = ty.visit_with(&mut uninhabited_from, DebruijnIndex::INNERMOST); let inhabitedness = ty.visit_with(&mut uninhabited_from, DebruijnIndex::INNERMOST);
@ -30,7 +30,7 @@ pub(crate) fn is_enum_variant_uninhabited_from(
subst: &Substitution, subst: &Substitution,
target_mod: ModuleId, target_mod: ModuleId,
) -> bool { ) -> bool {
let _p = tracing::span!(tracing::Level::INFO, "is_enum_variant_uninhabited_from").entered(); let _p = tracing::info_span!("is_enum_variant_uninhabited_from").entered();
let mut uninhabited_from = let mut uninhabited_from =
UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() }; UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };

View file

@ -22,6 +22,7 @@ extern crate ra_ap_rustc_pattern_analysis as rustc_pattern_analysis;
mod builder; mod builder;
mod chalk_db; mod chalk_db;
mod chalk_ext; mod chalk_ext;
mod generics;
mod infer; mod infer;
mod inhabitedness; mod inhabitedness;
mod interner; mod interner;
@ -52,7 +53,7 @@ use std::{
hash::{BuildHasherDefault, Hash}, hash::{BuildHasherDefault, Hash},
}; };
use base_db::salsa::impl_intern_value_trivial; use base_db::salsa::InternValueTrivial;
use chalk_ir::{ use chalk_ir::{
fold::{Shift, TypeFoldable}, fold::{Shift, TypeFoldable},
interner::HasInterner, interner::HasInterner,
@ -67,11 +68,10 @@ use rustc_hash::{FxHashMap, FxHashSet};
use syntax::ast::{make, ConstArg}; use syntax::ast::{make, ConstArg};
use traits::FnTrait; use traits::FnTrait;
use triomphe::Arc; use triomphe::Arc;
use utils::Generics;
use crate::{ use crate::{
consteval::unknown_const, db::HirDatabase, display::HirDisplay, infer::unify::InferenceTable, consteval::unknown_const, db::HirDatabase, display::HirDisplay, generics::Generics,
utils::generics, infer::unify::InferenceTable,
}; };
pub use autoderef::autoderef; pub use autoderef::autoderef;
@ -289,7 +289,7 @@ impl Hash for ConstScalar {
/// Return an index of a parameter in the generic type parameter list by it's id. /// Return an index of a parameter in the generic type parameter list by it's id.
pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> { pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
generics(db.upcast(), id.parent).type_or_const_param_idx(id) generics::generics(db.upcast(), id.parent).type_or_const_param_idx(id)
} }
pub(crate) fn wrap_empty_binders<T>(value: T) -> Binders<T> pub(crate) fn wrap_empty_binders<T>(value: T) -> Binders<T>
@ -330,18 +330,15 @@ pub(crate) fn make_single_type_binders<T: HasInterner<Interner = Interner>>(
) )
} }
pub(crate) fn make_binders_with_count<T: HasInterner<Interner = Interner>>( pub(crate) fn make_binders<T: HasInterner<Interner = Interner>>(
db: &dyn HirDatabase, db: &dyn HirDatabase,
count: usize,
generics: &Generics, generics: &Generics,
value: T, value: T,
) -> Binders<T> { ) -> Binders<T> {
let it = generics.iter_id().take(count);
Binders::new( Binders::new(
VariableKinds::from_iter( VariableKinds::from_iter(
Interner, Interner,
it.map(|x| match x { generics.iter_id().map(|x| match x {
hir_def::GenericParamId::ConstParamId(id) => { hir_def::GenericParamId::ConstParamId(id) => {
chalk_ir::VariableKind::Const(db.const_param_ty(id)) chalk_ir::VariableKind::Const(db.const_param_ty(id))
} }
@ -355,14 +352,6 @@ pub(crate) fn make_binders_with_count<T: HasInterner<Interner = Interner>>(
) )
} }
pub(crate) fn make_binders<T: HasInterner<Interner = Interner>>(
db: &dyn HirDatabase,
generics: &Generics,
value: T,
) -> Binders<T> {
make_binders_with_count(db, usize::MAX, generics, value)
}
// FIXME: get rid of this, just replace it by FnPointer // FIXME: get rid of this, just replace it by FnPointer
/// A function signature as seen by type inference: Several parameter types and /// A function signature as seen by type inference: Several parameter types and
/// one return type. /// one return type.
@ -524,14 +513,16 @@ pub type PolyFnSig = Binders<CallableSig>;
impl CallableSig { impl CallableSig {
pub fn from_params_and_return( pub fn from_params_and_return(
mut params: Vec<Ty>, params: impl ExactSizeIterator<Item = Ty>,
ret: Ty, ret: Ty,
is_varargs: bool, is_varargs: bool,
safety: Safety, safety: Safety,
abi: FnAbi, abi: FnAbi,
) -> CallableSig { ) -> CallableSig {
params.push(ret); let mut params_and_return = Vec::with_capacity(params.len() + 1);
CallableSig { params_and_return: params.into(), is_varargs, safety, abi } params_and_return.extend(params);
params_and_return.push(ret);
CallableSig { params_and_return: params_and_return.into(), is_varargs, safety, abi }
} }
pub fn from_def(db: &dyn HirDatabase, def: FnDefId, substs: &Substitution) -> CallableSig { pub fn from_def(db: &dyn HirDatabase, def: FnDefId, substs: &Substitution) -> CallableSig {
@ -606,7 +597,7 @@ pub enum ImplTraitId {
AssociatedTypeImplTrait(hir_def::TypeAliasId, ImplTraitIdx), AssociatedTypeImplTrait(hir_def::TypeAliasId, ImplTraitIdx),
AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId), AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
} }
impl_intern_value_trivial!(ImplTraitId); impl InternValueTrivial for ImplTraitId {}
#[derive(PartialEq, Eq, Debug, Hash)] #[derive(PartialEq, Eq, Debug, Hash)]
pub struct ImplTraits { pub struct ImplTraits {
@ -946,8 +937,7 @@ pub fn callable_sig_from_fn_trait(
.as_tuple()? .as_tuple()?
.iter(Interner) .iter(Interner)
.map(|it| it.assert_ty_ref(Interner)) .map(|it| it.assert_ty_ref(Interner))
.cloned() .cloned();
.collect();
return Some(( return Some((
fn_x, fn_x,

View file

@ -8,10 +8,11 @@
use std::{ use std::{
cell::{Cell, RefCell, RefMut}, cell::{Cell, RefCell, RefMut},
iter, iter,
ops::{self, Not as _},
}; };
use base_db::{ use base_db::{
salsa::{impl_intern_value_trivial, Cycle}, salsa::{Cycle, InternValueTrivial},
CrateId, CrateId,
}; };
use chalk_ir::{ use chalk_ir::{
@ -45,7 +46,9 @@ use hir_def::{
use hir_expand::{name::Name, ExpandResult}; use hir_expand::{name::Name, ExpandResult};
use intern::Interned; use intern::Interned;
use la_arena::{Arena, ArenaMap}; use la_arena::{Arena, ArenaMap};
use once_cell::unsync::OnceCell;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use rustc_pattern_analysis::Captures;
use smallvec::SmallVec; use smallvec::SmallVec;
use stdx::{impl_from, never}; use stdx::{impl_from, never};
use syntax::ast; use syntax::ast;
@ -58,12 +61,13 @@ use crate::{
unknown_const_as_generic, unknown_const_as_generic,
}, },
db::HirDatabase, db::HirDatabase,
error_lifetime, make_binders, error_lifetime,
generics::{generics, Generics},
make_binders,
mapping::{from_chalk_trait_id, lt_to_placeholder_idx, ToChalk}, mapping::{from_chalk_trait_id, lt_to_placeholder_idx, ToChalk},
static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx, static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
utils::{ utils::{
self, all_super_trait_refs, associated_type_by_name_including_super_traits, generics, all_super_trait_refs, associated_type_by_name_including_super_traits, InTypeConstIdMetadata,
Generics, InTypeConstIdMetadata,
}, },
AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, ConstScalar, DebruijnIndex, DynTy, AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, ConstScalar, DebruijnIndex, DynTy,
FnAbi, FnPointer, FnSig, FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime, FnAbi, FnPointer, FnSig, FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime,
@ -121,6 +125,7 @@ impl ImplTraitLoweringState {
pub struct TyLoweringContext<'a> { pub struct TyLoweringContext<'a> {
pub db: &'a dyn HirDatabase, pub db: &'a dyn HirDatabase,
resolver: &'a Resolver, resolver: &'a Resolver,
generics: OnceCell<Option<Generics>>,
in_binders: DebruijnIndex, in_binders: DebruijnIndex,
// FIXME: Should not be an `Option` but `Resolver` currently does not return owners in all cases // FIXME: Should not be an `Option` but `Resolver` currently does not return owners in all cases
// where expected // where expected
@ -152,6 +157,7 @@ impl<'a> TyLoweringContext<'a> {
Self { Self {
db, db,
resolver, resolver,
generics: OnceCell::new(),
owner, owner,
in_binders, in_binders,
impl_trait_mode, impl_trait_mode,
@ -174,6 +180,7 @@ impl<'a> TyLoweringContext<'a> {
impl_trait_mode, impl_trait_mode,
expander: RefCell::new(expander), expander: RefCell::new(expander),
unsized_types: RefCell::new(unsized_types), unsized_types: RefCell::new(unsized_types),
generics: self.generics.clone(),
..*self ..*self
}; };
let result = f(&new_ctx); let result = f(&new_ctx);
@ -245,8 +252,10 @@ impl<'a> TyLoweringContext<'a> {
) )
} }
fn generics(&self) -> Option<Generics> { fn generics(&self) -> Option<&Generics> {
Some(generics(self.db.upcast(), self.resolver.generic_def()?)) self.generics
.get_or_init(|| self.resolver.generic_def().map(|def| generics(self.db.upcast(), def)))
.as_ref()
} }
pub fn lower_ty_ext(&self, type_ref: &TypeRef) -> (Ty, Option<TypeNs>) { pub fn lower_ty_ext(&self, type_ref: &TypeRef) -> (Ty, Option<TypeNs>) {
@ -374,7 +383,7 @@ impl<'a> TyLoweringContext<'a> {
counter.set(idx + count_impl_traits(type_ref) as u16); counter.set(idx + count_impl_traits(type_ref) as u16);
let ( let (
_parent_params, _parent_params,
self_params, self_param,
type_params, type_params,
const_params, const_params,
_impl_trait_params, _impl_trait_params,
@ -385,7 +394,7 @@ impl<'a> TyLoweringContext<'a> {
.provenance_split(); .provenance_split();
TyKind::BoundVar(BoundVar::new( TyKind::BoundVar(BoundVar::new(
self.in_binders, self.in_binders,
idx as usize + self_params + type_params + const_params, idx as usize + self_param as usize + type_params + const_params,
)) ))
.intern(Interner) .intern(Interner)
} }
@ -416,9 +425,9 @@ impl<'a> TyLoweringContext<'a> {
}; };
let ty = { let ty = {
let macro_call = macro_call.to_node(self.db.upcast()); let macro_call = macro_call.to_node(self.db.upcast());
let resolver = |path| { let resolver = |path: &_| {
self.resolver self.resolver
.resolve_path_as_macro(self.db.upcast(), &path, Some(MacroSubNs::Bang)) .resolve_path_as_macro(self.db.upcast(), path, Some(MacroSubNs::Bang))
.map(|(it, _)| it) .map(|(it, _)| it)
}; };
match expander.enter_expand::<ast::Type>(self.db.upcast(), macro_call, resolver) match expander.enter_expand::<ast::Type>(self.db.upcast(), macro_call, resolver)
@ -705,7 +714,8 @@ impl<'a> TyLoweringContext<'a> {
None, None,
); );
let len_self = utils::generics(self.db.upcast(), associated_ty.into()).len_self(); let len_self =
crate::generics::generics(self.db.upcast(), associated_ty.into()).len_self();
let substs = Substitution::from_iter( let substs = Substitution::from_iter(
Interner, Interner,
@ -815,14 +825,14 @@ impl<'a> TyLoweringContext<'a> {
let def_generics = generics(self.db.upcast(), def); let def_generics = generics(self.db.upcast(), def);
let ( let (
parent_params, parent_params,
self_params, self_param,
type_params, type_params,
const_params, const_params,
impl_trait_params, impl_trait_params,
lifetime_params, lifetime_params,
) = def_generics.provenance_split(); ) = def_generics.provenance_split();
let item_len = let item_len =
self_params + type_params + const_params + impl_trait_params + lifetime_params; self_param as usize + type_params + const_params + impl_trait_params + lifetime_params;
let total_len = parent_params + item_len; let total_len = parent_params + item_len;
let ty_error = TyKind::Error.intern(Interner).cast(Interner); let ty_error = TyKind::Error.intern(Interner).cast(Interner);
@ -830,18 +840,16 @@ impl<'a> TyLoweringContext<'a> {
let mut def_generic_iter = def_generics.iter_id(); let mut def_generic_iter = def_generics.iter_id();
let fill_self_params = || { let fill_self_params = || {
for x in explicit_self_ty if self_param {
.into_iter() let self_ty =
.map(|x| x.cast(Interner)) explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(|| ty_error.clone());
.chain(iter::repeat(ty_error.clone()))
.take(self_params)
{
if let Some(id) = def_generic_iter.next() { if let Some(id) = def_generic_iter.next() {
assert!(matches!( assert!(matches!(
id, id,
GenericParamId::TypeParamId(_) | GenericParamId::LifetimeParamId(_) GenericParamId::TypeParamId(_) | GenericParamId::LifetimeParamId(_)
)); ));
substs.push(x); substs.push(self_ty);
} }
} }
}; };
@ -852,11 +860,11 @@ impl<'a> TyLoweringContext<'a> {
fill_self_params(); fill_self_params();
} }
let expected_num = if generic_args.has_self_type { let expected_num = if generic_args.has_self_type {
self_params + type_params + const_params self_param as usize + type_params + const_params
} else { } else {
type_params + const_params type_params + const_params
}; };
let skip = if generic_args.has_self_type && self_params == 0 { 1 } else { 0 }; let skip = if generic_args.has_self_type && !self_param { 1 } else { 0 };
// if args are provided, it should be all of them, but we can't rely on that // if args are provided, it should be all of them, but we can't rely on that
for arg in generic_args for arg in generic_args
.args .args
@ -866,7 +874,7 @@ impl<'a> TyLoweringContext<'a> {
.take(expected_num) .take(expected_num)
{ {
if let Some(id) = def_generic_iter.next() { if let Some(id) = def_generic_iter.next() {
if let Some(x) = generic_arg_to_chalk( let arg = generic_arg_to_chalk(
self.db, self.db,
id, id,
arg, arg,
@ -874,13 +882,9 @@ impl<'a> TyLoweringContext<'a> {
|_, type_ref| self.lower_ty(type_ref), |_, type_ref| self.lower_ty(type_ref),
|_, const_ref, ty| self.lower_const(const_ref, ty), |_, const_ref, ty| self.lower_const(const_ref, ty),
|_, lifetime_ref| self.lower_lifetime(lifetime_ref), |_, lifetime_ref| self.lower_lifetime(lifetime_ref),
) { );
had_explicit_args = true; had_explicit_args = true;
substs.push(x); substs.push(arg);
} else {
// we just filtered them out
never!("Unexpected lifetime argument");
}
} }
} }
@ -893,7 +897,7 @@ impl<'a> TyLoweringContext<'a> {
// Taking into the fact that def_generic_iter will always have lifetimes at the end // Taking into the fact that def_generic_iter will always have lifetimes at the end
// Should have some test cases tho to test this behaviour more properly // Should have some test cases tho to test this behaviour more properly
if let Some(id) = def_generic_iter.next() { if let Some(id) = def_generic_iter.next() {
if let Some(x) = generic_arg_to_chalk( let arg = generic_arg_to_chalk(
self.db, self.db,
id, id,
arg, arg,
@ -901,13 +905,9 @@ impl<'a> TyLoweringContext<'a> {
|_, type_ref| self.lower_ty(type_ref), |_, type_ref| self.lower_ty(type_ref),
|_, const_ref, ty| self.lower_const(const_ref, ty), |_, const_ref, ty| self.lower_const(const_ref, ty),
|_, lifetime_ref| self.lower_lifetime(lifetime_ref), |_, lifetime_ref| self.lower_lifetime(lifetime_ref),
) { );
had_explicit_args = true; had_explicit_args = true;
substs.push(x); substs.push(arg);
} else {
// Never return a None explicitly
never!("Unexpected None by generic_arg_to_chalk");
}
} }
} }
} else { } else {
@ -1176,7 +1176,7 @@ impl<'a> TyLoweringContext<'a> {
let ty = if let Some(target_param_idx) = target_param_idx { let ty = if let Some(target_param_idx) = target_param_idx {
let mut counter = 0; let mut counter = 0;
let generics = self.generics().expect("generics in scope"); let generics = self.generics().expect("generics in scope");
for (idx, data) in generics.params.type_or_consts.iter() { for (idx, data) in generics.iter_self_type_or_consts() {
// Count the number of `impl Trait` things that appear before // Count the number of `impl Trait` things that appear before
// the target of our `bound`. // the target of our `bound`.
// Our counter within `impl_trait_mode` should be that number // Our counter within `impl_trait_mode` should be that number
@ -1478,7 +1478,7 @@ fn named_associated_type_shorthand_candidates<R>(
// Handle `Self::Type` referring to own associated type in trait definitions // Handle `Self::Type` referring to own associated type in trait definitions
if let GenericDefId::TraitId(trait_id) = param_id.parent() { if let GenericDefId::TraitId(trait_id) = param_id.parent() {
let trait_generics = generics(db.upcast(), trait_id.into()); let trait_generics = generics(db.upcast(), trait_id.into());
if trait_generics.params[param_id.local_id()].is_trait_self() { if trait_generics[param_id.local_id()].is_trait_self() {
let def_generics = generics(db.upcast(), def); let def_generics = generics(db.upcast(), def);
let starting_idx = match def { let starting_idx = match def {
GenericDefId::TraitId(_) => 0, GenericDefId::TraitId(_) => 0,
@ -1596,14 +1596,20 @@ pub(crate) fn generic_predicates_for_param_query(
.collect(); .collect();
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST); let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
let explicitly_unsized_tys = ctx.unsized_types.into_inner(); if !subst.is_empty(Interner) {
if let Some(implicitly_sized_predicates) = let explicitly_unsized_tys = ctx.unsized_types.into_inner();
implicitly_sized_clauses(db, param_id.parent, &explicitly_unsized_tys, &subst, &resolver) if let Some(implicitly_sized_predicates) = implicitly_sized_clauses(
{ db,
predicates.extend( param_id.parent,
implicitly_sized_predicates &explicitly_unsized_tys,
.map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p))), &subst,
); &resolver,
) {
predicates.extend(
implicitly_sized_predicates
.map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p))),
);
};
} }
predicates.into() predicates.into()
} }
@ -1666,14 +1672,17 @@ pub(crate) fn trait_environment_query(
} }
let subst = generics(db.upcast(), def).placeholder_subst(db); let subst = generics(db.upcast(), def).placeholder_subst(db);
let explicitly_unsized_tys = ctx.unsized_types.into_inner(); if !subst.is_empty(Interner) {
if let Some(implicitly_sized_clauses) = let explicitly_unsized_tys = ctx.unsized_types.into_inner();
implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver) if let Some(implicitly_sized_clauses) =
{ implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
clauses.extend( {
implicitly_sized_clauses clauses.extend(
.map(|pred| pred.cast::<ProgramClause>(Interner).into_from_env_clause(Interner)), implicitly_sized_clauses.map(|pred| {
); pred.cast::<ProgramClause>(Interner).into_from_env_clause(Interner)
}),
);
};
} }
let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses); let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
@ -1681,20 +1690,32 @@ pub(crate) fn trait_environment_query(
TraitEnvironment::new(resolver.krate(), None, traits_in_scope.into_boxed_slice(), env) TraitEnvironment::new(resolver.krate(), None, traits_in_scope.into_boxed_slice(), env)
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct GenericPredicates(Option<Arc<[Binders<QuantifiedWhereClause>]>>);
impl ops::Deref for GenericPredicates {
type Target = [Binders<crate::QuantifiedWhereClause>];
fn deref(&self) -> &Self::Target {
self.0.as_deref().unwrap_or(&[])
}
}
/// Resolve the where clause(s) of an item with generics. /// Resolve the where clause(s) of an item with generics.
pub(crate) fn generic_predicates_query( pub(crate) fn generic_predicates_query(
db: &dyn HirDatabase, db: &dyn HirDatabase,
def: GenericDefId, def: GenericDefId,
) -> Arc<[Binders<QuantifiedWhereClause>]> { ) -> GenericPredicates {
let resolver = def.resolver(db.upcast()); let resolver = def.resolver(db.upcast());
let ctx = if let GenericDefId::FunctionId(_) = def { let (impl_trait_lowering, param_lowering) = match def {
TyLoweringContext::new(db, &resolver, def.into()) GenericDefId::FunctionId(_) => {
.with_impl_trait_mode(ImplTraitLoweringMode::Variable) (ImplTraitLoweringMode::Variable, ParamLoweringMode::Variable)
.with_type_param_mode(ParamLoweringMode::Variable) }
} else { _ => (ImplTraitLoweringMode::Disallowed, ParamLoweringMode::Variable),
TyLoweringContext::new(db, &resolver, def.into())
.with_type_param_mode(ParamLoweringMode::Variable)
}; };
let ctx = TyLoweringContext::new(db, &resolver, def.into())
.with_impl_trait_mode(impl_trait_lowering)
.with_type_param_mode(param_lowering);
let generics = generics(db.upcast(), def); let generics = generics(db.upcast(), def);
let mut predicates = resolver let mut predicates = resolver
@ -1705,27 +1726,29 @@ pub(crate) fn generic_predicates_query(
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST); let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
let explicitly_unsized_tys = ctx.unsized_types.into_inner(); if !subst.is_empty(Interner) {
if let Some(implicitly_sized_predicates) = let explicitly_unsized_tys = ctx.unsized_types.into_inner();
implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver) if let Some(implicitly_sized_predicates) =
{ implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
predicates.extend( {
implicitly_sized_predicates predicates.extend(
.map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p))), implicitly_sized_predicates
); .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p))),
);
};
} }
predicates.into() GenericPredicates(predicates.is_empty().not().then(|| predicates.into()))
} }
/// Generate implicit `: Sized` predicates for all generics that has no `?Sized` bound. /// Generate implicit `: Sized` predicates for all generics that has no `?Sized` bound.
/// Exception is Self of a trait def. /// Exception is Self of a trait def.
fn implicitly_sized_clauses<'a>( fn implicitly_sized_clauses<'a, 'subst: 'a>(
db: &dyn HirDatabase, db: &dyn HirDatabase,
def: GenericDefId, def: GenericDefId,
explicitly_unsized_tys: &'a FxHashSet<Ty>, explicitly_unsized_tys: &'a FxHashSet<Ty>,
substitution: &'a Substitution, substitution: &'subst Substitution,
resolver: &Resolver, resolver: &Resolver,
) -> Option<impl Iterator<Item = WhereClause> + 'a> { ) -> Option<impl Iterator<Item = WhereClause> + Captures<'a> + Captures<'subst>> {
let is_trait_def = matches!(def, GenericDefId::TraitId(..)); let is_trait_def = matches!(def, GenericDefId::TraitId(..));
let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..]; let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..];
let sized_trait = db let sized_trait = db
@ -1746,71 +1769,84 @@ fn implicitly_sized_clauses<'a>(
}) })
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct GenericDefaults(Option<Arc<[Binders<crate::GenericArg>]>>);
impl ops::Deref for GenericDefaults {
type Target = [Binders<crate::GenericArg>];
fn deref(&self) -> &Self::Target {
self.0.as_deref().unwrap_or(&[])
}
}
/// Resolve the default type params from generics /// Resolve the default type params from generics
pub(crate) fn generic_defaults_query( pub(crate) fn generic_defaults_query(db: &dyn HirDatabase, def: GenericDefId) -> GenericDefaults {
db: &dyn HirDatabase,
def: GenericDefId,
) -> Arc<[Binders<crate::GenericArg>]> {
let resolver = def.resolver(db.upcast());
let ctx = TyLoweringContext::new(db, &resolver, def.into())
.with_type_param_mode(ParamLoweringMode::Variable);
let generic_params = generics(db.upcast(), def); let generic_params = generics(db.upcast(), def);
if generic_params.len() == 0 {
return GenericDefaults(None);
}
let resolver = def.resolver(db.upcast());
let parent_start_idx = generic_params.len_self(); let parent_start_idx = generic_params.len_self();
let defaults = Arc::from_iter(generic_params.iter().enumerate().map(|(idx, (id, p))| { let ctx = TyLoweringContext::new(db, &resolver, def.into())
match p { .with_impl_trait_mode(ImplTraitLoweringMode::Disallowed)
GenericParamDataRef::TypeParamData(p) => { .with_type_param_mode(ParamLoweringMode::Variable);
let mut ty = GenericDefaults(Some(Arc::from_iter(generic_params.iter().enumerate().map(
p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t)); |(idx, (id, p))| {
// Each default can only refer to previous parameters. match p {
// Type variable default referring to parameter coming GenericParamDataRef::TypeParamData(p) => {
// after it is forbidden (FIXME: report diagnostic) let ty = p.default.as_ref().map_or(TyKind::Error.intern(Interner), |ty| {
ty = fallback_bound_vars(ty, idx, parent_start_idx); // Each default can only refer to previous parameters.
crate::make_binders(db, &generic_params, ty.cast(Interner)) // Type variable default referring to parameter coming
} // after it is forbidden (FIXME: report diagnostic)
GenericParamDataRef::ConstParamData(p) => { fallback_bound_vars(ctx.lower_ty(ty), idx, parent_start_idx)
let GenericParamId::ConstParamId(id) = id else { });
unreachable!("Unexpected lifetime or type argument") crate::make_binders(db, &generic_params, ty.cast(Interner))
}; }
GenericParamDataRef::ConstParamData(p) => {
let GenericParamId::ConstParamId(id) = id else {
unreachable!("Unexpected lifetime or type argument")
};
let mut val = p.default.as_ref().map_or_else( let mut val = p.default.as_ref().map_or_else(
|| unknown_const_as_generic(db.const_param_ty(id)), || unknown_const_as_generic(db.const_param_ty(id)),
|c| { |c| {
let c = ctx.lower_const(c, ctx.lower_ty(&p.ty)); let c = ctx.lower_const(c, ctx.lower_ty(&p.ty));
c.cast(Interner) c.cast(Interner)
}, },
); );
// Each default can only refer to previous parameters, see above. // Each default can only refer to previous parameters, see above.
val = fallback_bound_vars(val, idx, parent_start_idx); val = fallback_bound_vars(val, idx, parent_start_idx);
make_binders(db, &generic_params, val) make_binders(db, &generic_params, val)
}
GenericParamDataRef::LifetimeParamData(_) => {
make_binders(db, &generic_params, error_lifetime().cast(Interner))
}
} }
GenericParamDataRef::LifetimeParamData(_) => { },
make_binders(db, &generic_params, error_lifetime().cast(Interner)) ))))
}
}
}));
defaults
} }
pub(crate) fn generic_defaults_recover( pub(crate) fn generic_defaults_recover(
db: &dyn HirDatabase, db: &dyn HirDatabase,
_cycle: &Cycle, _cycle: &Cycle,
def: &GenericDefId, def: &GenericDefId,
) -> Arc<[Binders<crate::GenericArg>]> { ) -> GenericDefaults {
let generic_params = generics(db.upcast(), *def); let generic_params = generics(db.upcast(), *def);
if generic_params.len() == 0 {
return GenericDefaults(None);
}
// FIXME: this code is not covered in tests. // FIXME: this code is not covered in tests.
// we still need one default per parameter // we still need one default per parameter
let defaults = Arc::from_iter(generic_params.iter_id().map(|id| { GenericDefaults(Some(Arc::from_iter(generic_params.iter_id().map(|id| {
let val = match id { let val = match id {
GenericParamId::TypeParamId(_) => TyKind::Error.intern(Interner).cast(Interner), GenericParamId::TypeParamId(_) => TyKind::Error.intern(Interner).cast(Interner),
GenericParamId::ConstParamId(id) => unknown_const_as_generic(db.const_param_ty(id)), GenericParamId::ConstParamId(id) => unknown_const_as_generic(db.const_param_ty(id)),
GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner), GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner),
}; };
crate::make_binders(db, &generic_params, val) crate::make_binders(db, &generic_params, val)
})); }))))
defaults
} }
fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig { fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
@ -1819,7 +1855,7 @@ fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
let ctx_params = TyLoweringContext::new(db, &resolver, def.into()) let ctx_params = TyLoweringContext::new(db, &resolver, def.into())
.with_impl_trait_mode(ImplTraitLoweringMode::Variable) .with_impl_trait_mode(ImplTraitLoweringMode::Variable)
.with_type_param_mode(ParamLoweringMode::Variable); .with_type_param_mode(ParamLoweringMode::Variable);
let params = data.params.iter().map(|tr| ctx_params.lower_ty(tr)).collect::<Vec<_>>(); let params = data.params.iter().map(|tr| ctx_params.lower_ty(tr));
let ctx_ret = TyLoweringContext::new(db, &resolver, def.into()) let ctx_ret = TyLoweringContext::new(db, &resolver, def.into())
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque) .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
.with_type_param_mode(ParamLoweringMode::Variable); .with_type_param_mode(ParamLoweringMode::Variable);
@ -1873,7 +1909,7 @@ fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnS
let resolver = def.resolver(db.upcast()); let resolver = def.resolver(db.upcast());
let ctx = TyLoweringContext::new(db, &resolver, AdtId::from(def).into()) let ctx = TyLoweringContext::new(db, &resolver, AdtId::from(def).into())
.with_type_param_mode(ParamLoweringMode::Variable); .with_type_param_mode(ParamLoweringMode::Variable);
let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>(); let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref));
let (ret, binders) = type_for_adt(db, def.into()).into_value_and_skipped_binders(); let (ret, binders) = type_for_adt(db, def.into()).into_value_and_skipped_binders();
Binders::new( Binders::new(
binders, binders,
@ -1905,7 +1941,7 @@ fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId)
let resolver = def.resolver(db.upcast()); let resolver = def.resolver(db.upcast());
let ctx = TyLoweringContext::new(db, &resolver, DefWithBodyId::VariantId(def).into()) let ctx = TyLoweringContext::new(db, &resolver, DefWithBodyId::VariantId(def).into())
.with_type_param_mode(ParamLoweringMode::Variable); .with_type_param_mode(ParamLoweringMode::Variable);
let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>(); let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref));
let (ret, binders) = let (ret, binders) =
type_for_adt(db, def.lookup(db.upcast()).parent.into()).into_value_and_skipped_binders(); type_for_adt(db, def.lookup(db.upcast()).parent.into()).into_value_and_skipped_binders();
Binders::new( Binders::new(
@ -1965,7 +2001,9 @@ pub enum CallableDefId {
StructId(StructId), StructId(StructId),
EnumVariantId(EnumVariantId), EnumVariantId(EnumVariantId),
} }
impl_intern_value_trivial!(CallableDefId);
impl InternValueTrivial for CallableDefId {}
impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId); impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
impl From<CallableDefId> for ModuleDefId { impl From<CallableDefId> for ModuleDefId {
fn from(def: CallableDefId) -> ModuleDefId { fn from(def: CallableDefId) -> ModuleDefId {
@ -2166,7 +2204,6 @@ pub(crate) fn lower_to_chalk_mutability(m: hir_def::type_ref::Mutability) -> Mut
/// Checks if the provided generic arg matches its expected kind, then lower them via /// Checks if the provided generic arg matches its expected kind, then lower them via
/// provided closures. Use unknown if there was kind mismatch. /// provided closures. Use unknown if there was kind mismatch.
/// ///
/// Returns `Some` of the lowered generic arg. `None` if the provided arg is a lifetime.
pub(crate) fn generic_arg_to_chalk<'a, T>( pub(crate) fn generic_arg_to_chalk<'a, T>(
db: &dyn HirDatabase, db: &dyn HirDatabase,
kind_id: GenericParamId, kind_id: GenericParamId,
@ -2175,7 +2212,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
for_type: impl FnOnce(&mut T, &TypeRef) -> Ty + 'a, for_type: impl FnOnce(&mut T, &TypeRef) -> Ty + 'a,
for_const: impl FnOnce(&mut T, &ConstRef, Ty) -> Const + 'a, for_const: impl FnOnce(&mut T, &ConstRef, Ty) -> Const + 'a,
for_lifetime: impl FnOnce(&mut T, &LifetimeRef) -> Lifetime + 'a, for_lifetime: impl FnOnce(&mut T, &LifetimeRef) -> Lifetime + 'a,
) -> Option<crate::GenericArg> { ) -> crate::GenericArg {
let kind = match kind_id { let kind = match kind_id {
GenericParamId::TypeParamId(_) => ParamKind::Type, GenericParamId::TypeParamId(_) => ParamKind::Type,
GenericParamId::ConstParamId(id) => { GenericParamId::ConstParamId(id) => {
@ -2184,7 +2221,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
} }
GenericParamId::LifetimeParamId(_) => ParamKind::Lifetime, GenericParamId::LifetimeParamId(_) => ParamKind::Lifetime,
}; };
Some(match (arg, kind) { match (arg, kind) {
(GenericArg::Type(type_ref), ParamKind::Type) => for_type(this, type_ref).cast(Interner), (GenericArg::Type(type_ref), ParamKind::Type) => for_type(this, type_ref).cast(Interner),
(GenericArg::Const(c), ParamKind::Const(c_ty)) => for_const(this, c, c_ty).cast(Interner), (GenericArg::Const(c), ParamKind::Const(c_ty)) => for_const(this, c, c_ty).cast(Interner),
(GenericArg::Lifetime(lifetime_ref), ParamKind::Lifetime) => { (GenericArg::Lifetime(lifetime_ref), ParamKind::Lifetime) => {
@ -2197,11 +2234,12 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
// as types. Maybe here is not the best place to do it, but // as types. Maybe here is not the best place to do it, but
// it works. // it works.
if let TypeRef::Path(p) = t { if let TypeRef::Path(p) = t {
let p = p.mod_path()?; if let Some(p) = p.mod_path() {
if p.kind == PathKind::Plain { if p.kind == PathKind::Plain {
if let [n] = p.segments() { if let [n] = p.segments() {
let c = ConstRef::Path(n.clone()); let c = ConstRef::Path(n.clone());
return Some(for_const(this, &c, c_ty).cast(Interner)); return for_const(this, &c, c_ty).cast(Interner);
}
} }
} }
} }
@ -2210,17 +2248,17 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
(GenericArg::Lifetime(_), ParamKind::Const(c_ty)) => unknown_const_as_generic(c_ty), (GenericArg::Lifetime(_), ParamKind::Const(c_ty)) => unknown_const_as_generic(c_ty),
(GenericArg::Type(_), ParamKind::Lifetime) => error_lifetime().cast(Interner), (GenericArg::Type(_), ParamKind::Lifetime) => error_lifetime().cast(Interner),
(GenericArg::Const(_), ParamKind::Lifetime) => error_lifetime().cast(Interner), (GenericArg::Const(_), ParamKind::Lifetime) => error_lifetime().cast(Interner),
}) }
} }
pub(crate) fn const_or_path_to_chalk( pub(crate) fn const_or_path_to_chalk<'g>(
db: &dyn HirDatabase, db: &dyn HirDatabase,
resolver: &Resolver, resolver: &Resolver,
owner: TypeOwnerId, owner: TypeOwnerId,
expected_ty: Ty, expected_ty: Ty,
value: &ConstRef, value: &ConstRef,
mode: ParamLoweringMode, mode: ParamLoweringMode,
args: impl FnOnce() -> Option<Generics>, args: impl FnOnce() -> Option<&'g Generics>,
debruijn: DebruijnIndex, debruijn: DebruijnIndex,
) -> Const { ) -> Const {
match value { match value {

View file

@ -144,8 +144,7 @@ pub struct TraitImpls {
impl TraitImpls { impl TraitImpls {
pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> { pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
let _p = let _p = tracing::info_span!("trait_impls_in_crate_query", ?krate).entered();
tracing::span!(tracing::Level::INFO, "trait_impls_in_crate_query", ?krate).entered();
let mut impls = FxHashMap::default(); let mut impls = FxHashMap::default();
Self::collect_def_map(db, &mut impls, &db.crate_def_map(krate)); Self::collect_def_map(db, &mut impls, &db.crate_def_map(krate));
@ -157,7 +156,7 @@ impl TraitImpls {
db: &dyn HirDatabase, db: &dyn HirDatabase,
block: BlockId, block: BlockId,
) -> Option<Arc<Self>> { ) -> Option<Arc<Self>> {
let _p = tracing::span!(tracing::Level::INFO, "trait_impls_in_block_query").entered(); let _p = tracing::info_span!("trait_impls_in_block_query").entered();
let mut impls = FxHashMap::default(); let mut impls = FxHashMap::default();
Self::collect_def_map(db, &mut impls, &db.block_def_map(block)); Self::collect_def_map(db, &mut impls, &db.block_def_map(block));
@ -173,8 +172,7 @@ impl TraitImpls {
db: &dyn HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
) -> Arc<[Arc<Self>]> { ) -> Arc<[Arc<Self>]> {
let _p = let _p = tracing::info_span!("trait_impls_in_deps_query", ?krate).entered();
tracing::span!(tracing::Level::INFO, "trait_impls_in_deps_query", ?krate).entered();
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
Arc::from_iter( Arc::from_iter(
@ -280,8 +278,7 @@ pub struct InherentImpls {
impl InherentImpls { impl InherentImpls {
pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> { pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
let _p = let _p = tracing::info_span!("inherent_impls_in_crate_query", ?krate).entered();
tracing::span!(tracing::Level::INFO, "inherent_impls_in_crate_query", ?krate).entered();
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() }; let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
let crate_def_map = db.crate_def_map(krate); let crate_def_map = db.crate_def_map(krate);
@ -295,7 +292,7 @@ impl InherentImpls {
db: &dyn HirDatabase, db: &dyn HirDatabase,
block: BlockId, block: BlockId,
) -> Option<Arc<Self>> { ) -> Option<Arc<Self>> {
let _p = tracing::span!(tracing::Level::INFO, "inherent_impls_in_block_query").entered(); let _p = tracing::info_span!("inherent_impls_in_block_query").entered();
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() }; let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
let block_def_map = db.block_def_map(block); let block_def_map = db.block_def_map(block);
@ -368,7 +365,7 @@ pub(crate) fn incoherent_inherent_impl_crates(
krate: CrateId, krate: CrateId,
fp: TyFingerprint, fp: TyFingerprint,
) -> SmallVec<[CrateId; 2]> { ) -> SmallVec<[CrateId; 2]> {
let _p = tracing::span!(tracing::Level::INFO, "incoherent_inherent_impl_crates").entered(); let _p = tracing::info_span!("incoherent_inherent_impl_crates").entered();
let mut res = SmallVec::new(); let mut res = SmallVec::new();
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
@ -937,8 +934,7 @@ pub fn iterate_method_candidates_dyn(
mode: LookupMode, mode: LookupMode,
callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
) -> ControlFlow<()> { ) -> ControlFlow<()> {
let _p = tracing::span!( let _p = tracing::info_span!(
tracing::Level::INFO,
"iterate_method_candidates_dyn", "iterate_method_candidates_dyn",
?mode, ?mode,
?name, ?name,
@ -1504,7 +1500,7 @@ fn is_valid_impl_fn_candidate(
} }
} }
table.run_in_snapshot(|table| { table.run_in_snapshot(|table| {
let _p = tracing::span!(tracing::Level::INFO, "subst_for_def").entered(); let _p = tracing::info_span!("subst_for_def").entered();
let impl_subst = let impl_subst =
TyBuilder::subst_for_def(db, impl_id, None).fill_with_inference_vars(table).build(); TyBuilder::subst_for_def(db, impl_id, None).fill_with_inference_vars(table).build();
let expect_self_ty = db.impl_self_ty(impl_id).substitute(Interner, &impl_subst); let expect_self_ty = db.impl_self_ty(impl_id).substitute(Interner, &impl_subst);
@ -1512,7 +1508,7 @@ fn is_valid_impl_fn_candidate(
check_that!(table.unify(&expect_self_ty, self_ty)); check_that!(table.unify(&expect_self_ty, self_ty));
if let Some(receiver_ty) = receiver_ty { if let Some(receiver_ty) = receiver_ty {
let _p = tracing::span!(tracing::Level::INFO, "check_receiver_ty").entered(); let _p = tracing::info_span!("check_receiver_ty").entered();
check_that!(data.has_self_param()); check_that!(data.has_self_param());
let fn_subst = TyBuilder::subst_for_def(db, fn_id, Some(impl_subst.clone())) let fn_subst = TyBuilder::subst_for_def(db, fn_id, Some(impl_subst.clone()))

View file

@ -898,20 +898,19 @@ pub enum Rvalue {
Cast(CastKind, Operand, Ty), Cast(CastKind, Operand, Ty),
// FIXME link to `pointer::offset` when it hits stable. // FIXME link to `pointer::offset` when it hits stable.
/// * `Offset` has the same semantics as `pointer::offset`, except that the second // /// * `Offset` has the same semantics as `pointer::offset`, except that the second
/// parameter may be a `usize` as well. // /// parameter may be a `usize` as well.
/// * The comparison operations accept `bool`s, `char`s, signed or unsigned integers, floats, // /// * The comparison operations accept `bool`s, `char`s, signed or unsigned integers, floats,
/// raw pointers, or function pointers and return a `bool`. The types of the operands must be // /// raw pointers, or function pointers and return a `bool`. The types of the operands must be
/// matching, up to the usual caveat of the lifetimes in function pointers. // /// matching, up to the usual caveat of the lifetimes in function pointers.
/// * Left and right shift operations accept signed or unsigned integers not necessarily of the // /// * Left and right shift operations accept signed or unsigned integers not necessarily of the
/// same type and return a value of the same type as their LHS. Like in Rust, the RHS is // /// same type and return a value of the same type as their LHS. Like in Rust, the RHS is
/// truncated as needed. // /// truncated as needed.
/// * The `Bit*` operations accept signed integers, unsigned integers, or bools with matching // /// * The `Bit*` operations accept signed integers, unsigned integers, or bools with matching
/// types and return a value of that type. // /// types and return a value of that type.
/// * The remaining operations accept signed integers, unsigned integers, or floats with // /// * The remaining operations accept signed integers, unsigned integers, or floats with
/// matching types and return a value of that type. // /// matching types and return a value of that type.
//BinaryOp(BinOp, Box<(Operand, Operand)>), //BinaryOp(BinOp, Box<(Operand, Operand)>),
/// Same as `BinaryOp`, but yields `(T, bool)` with a `bool` indicating an error condition. /// Same as `BinaryOp`, but yields `(T, bool)` with a `bool` indicating an error condition.
/// ///
/// When overflow checking is disabled and we are generating run-time code, the error condition /// When overflow checking is disabled and we are generating run-time code, the error condition

View file

@ -91,7 +91,7 @@ pub fn borrowck_query(
db: &dyn HirDatabase, db: &dyn HirDatabase,
def: DefWithBodyId, def: DefWithBodyId,
) -> Result<Arc<[BorrowckResult]>, MirLowerError> { ) -> Result<Arc<[BorrowckResult]>, MirLowerError> {
let _p = tracing::span!(tracing::Level::INFO, "borrowck_query").entered(); let _p = tracing::info_span!("borrowck_query").entered();
let mut res = vec![]; let mut res = vec![];
all_mir_bodies(db, def, |body| { all_mir_bodies(db, def, |body| {
res.push(BorrowckResult { res.push(BorrowckResult {

View file

@ -363,7 +363,7 @@ impl MirEvalError {
)?; )?;
} }
Either::Right(closure) => { Either::Right(closure) => {
writeln!(f, "In {:?}", closure)?; writeln!(f, "In {closure:?}")?;
} }
} }
let source_map = db.body_with_source_map(*def).1; let source_map = db.body_with_source_map(*def).1;
@ -424,7 +424,7 @@ impl MirEvalError {
| MirEvalError::StackOverflow | MirEvalError::StackOverflow
| MirEvalError::CoerceUnsizedError(_) | MirEvalError::CoerceUnsizedError(_)
| MirEvalError::InternalError(_) | MirEvalError::InternalError(_)
| MirEvalError::InvalidVTableId(_) => writeln!(f, "{:?}", err)?, | MirEvalError::InvalidVTableId(_) => writeln!(f, "{err:?}")?,
} }
Ok(()) Ok(())
} }

View file

@ -77,7 +77,7 @@ fn check_panic(ra_fixture: &str, expected_panic: &str) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture); let (db, file_ids) = TestDB::with_many_files(ra_fixture);
let file_id = *file_ids.last().unwrap(); let file_id = *file_ids.last().unwrap();
let e = eval_main(&db, file_id).unwrap_err(); let e = eval_main(&db, file_id).unwrap_err();
assert_eq!(e.is_panic().unwrap_or_else(|| panic!("unexpected error: {:?}", e)), expected_panic); assert_eq!(e.is_panic().unwrap_or_else(|| panic!("unexpected error: {e:?}")), expected_panic);
} }
#[test] #[test]

View file

@ -28,6 +28,7 @@ use crate::{
db::{HirDatabase, InternedClosure}, db::{HirDatabase, InternedClosure},
display::HirDisplay, display::HirDisplay,
error_lifetime, error_lifetime,
generics::generics,
infer::{CaptureKind, CapturedItem, TypeMismatch}, infer::{CaptureKind, CapturedItem, TypeMismatch},
inhabitedness::is_ty_uninhabited_from, inhabitedness::is_ty_uninhabited_from,
layout::LayoutError, layout::LayoutError,
@ -42,7 +43,7 @@ use crate::{
}, },
static_lifetime, static_lifetime,
traits::FnTrait, traits::FnTrait,
utils::{generics, ClosureSubst}, utils::ClosureSubst,
Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt, Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt,
}; };
@ -213,7 +214,7 @@ impl MirLowerError {
| MirLowerError::LangItemNotFound(_) | MirLowerError::LangItemNotFound(_)
| MirLowerError::MutatingRvalue | MirLowerError::MutatingRvalue
| MirLowerError::UnresolvedLabel | MirLowerError::UnresolvedLabel
| MirLowerError::UnresolvedUpvar(_) => writeln!(f, "{:?}", self)?, | MirLowerError::UnresolvedUpvar(_) => writeln!(f, "{self:?}")?,
} }
Ok(()) Ok(())
} }
@ -2133,7 +2134,7 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<Mi
} }
DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"), DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
}; };
let _p = tracing::span!(tracing::Level::INFO, "mir_body_query", ?detail).entered(); let _p = tracing::info_span!("mir_body_query", ?detail).entered();
let body = db.body(def); let body = db.body(def);
let infer = db.infer(def); let infer = db.infer(def);
let mut result = lower_to_mir(db, def, &body, &infer, body.body_expr)?; let mut result = lower_to_mir(db, def, &body, &infer, body.body_expr)?;

View file

@ -21,8 +21,8 @@ use crate::{
consteval::{intern_const_scalar, unknown_const}, consteval::{intern_const_scalar, unknown_const},
db::{HirDatabase, InternedClosure}, db::{HirDatabase, InternedClosure},
from_placeholder_idx, from_placeholder_idx,
generics::{generics, Generics},
infer::normalize, infer::normalize,
utils::{generics, Generics},
ClosureId, Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind, ClosureId, Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
}; };

View file

@ -108,7 +108,7 @@ pub(crate) fn trait_solve_query(
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(), GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(),
_ => "??".to_owned(), _ => "??".to_owned(),
}; };
let _p = tracing::span!(tracing::Level::INFO, "trait_solve_query", ?detail).entered(); let _p = tracing::info_span!("trait_solve_query", ?detail).entered();
tracing::info!("trait_solve_query({:?})", goal.value.goal); tracing::info!("trait_solve_query({:?})", goal.value.goal);
if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq { if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq {
@ -140,7 +140,7 @@ fn solve(
block: Option<BlockId>, block: Option<BlockId>,
goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>, goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>,
) -> Option<chalk_solve::Solution<Interner>> { ) -> Option<chalk_solve::Solution<Interner>> {
let _p = tracing::span!(tracing::Level::INFO, "solve", ?krate, ?block).entered(); let _p = tracing::info_span!("solve", ?krate, ?block).entered();
let context = ChalkContext { db, krate, block }; let context = ChalkContext { db, krate, block };
tracing::debug!("solve goal: {:?}", goal); tracing::debug!("solve goal: {:?}", goal);
let mut solver = create_chalk_solver(); let mut solver = create_chalk_solver();

View file

@ -5,25 +5,19 @@ use std::{hash::Hash, iter};
use base_db::CrateId; use base_db::CrateId;
use chalk_ir::{ use chalk_ir::{
cast::Cast,
fold::{FallibleTypeFolder, Shift}, fold::{FallibleTypeFolder, Shift},
BoundVar, DebruijnIndex, DebruijnIndex,
}; };
use hir_def::{ use hir_def::{
db::DefDatabase, db::DefDatabase,
generics::{ generics::{WherePredicate, WherePredicateTypeTarget},
GenericParamDataRef, GenericParams, LifetimeParamData, TypeOrConstParamData,
TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
},
lang_item::LangItem, lang_item::LangItem,
resolver::{HasResolver, TypeNs}, resolver::{HasResolver, TypeNs},
type_ref::{TraitBoundModifier, TypeRef}, type_ref::{TraitBoundModifier, TypeRef},
ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId, GenericParamId, ItemContainerId, EnumId, EnumVariantId, FunctionId, Lookup, OpaqueInternableThing, TraitId, TypeAliasId,
LifetimeParamId, Lookup, OpaqueInternableThing, TraitId, TypeAliasId, TypeOrConstParamId, TypeOrConstParamId,
TypeParamId,
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
use intern::Interned;
use rustc_abi::TargetDataLayout; use rustc_abi::TargetDataLayout;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
@ -161,7 +155,7 @@ impl Iterator for ClauseElaborator<'_> {
fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(TraitId)) { fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(TraitId)) {
let resolver = trait_.resolver(db); let resolver = trait_.resolver(db);
let generic_params = db.generic_params(trait_.into()); let generic_params = db.generic_params(trait_.into());
let trait_self = generic_params.find_trait_self_param(); let trait_self = generic_params.trait_self_param();
generic_params generic_params
.where_predicates .where_predicates
.iter() .iter()
@ -194,7 +188,7 @@ fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(Tra
fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef, cb: impl FnMut(TraitRef)) { fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef, cb: impl FnMut(TraitRef)) {
let generic_params = db.generic_params(trait_ref.hir_trait_id().into()); let generic_params = db.generic_params(trait_ref.hir_trait_id().into());
let trait_self = match generic_params.find_trait_self_param() { let trait_self = match generic_params.trait_self_param() {
Some(p) => TypeOrConstParamId { parent: trait_ref.hir_trait_id().into(), local_id: p }, Some(p) => TypeOrConstParamId { parent: trait_ref.hir_trait_id().into(), local_id: p },
None => return, None => return,
}; };
@ -226,11 +220,6 @@ pub(super) fn associated_type_by_name_including_super_traits(
}) })
} }
pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
Generics { def, params: db.generic_params(def), parent_generics }
}
/// It is a bit different from the rustc equivalent. Currently it stores: /// It is a bit different from the rustc equivalent. Currently it stores:
/// - 0: the function signature, encoded as a function pointer type /// - 0: the function signature, encoded as a function pointer type
/// - 1..n: generics of the parent /// - 1..n: generics of the parent
@ -262,278 +251,14 @@ impl<'a> ClosureSubst<'a> {
} }
} }
#[derive(Clone, Debug)]
pub(crate) struct Generics {
def: GenericDefId,
pub(crate) params: Interned<GenericParams>,
parent_generics: Option<Box<Generics>>,
}
impl Generics {
pub(crate) fn iter_id(&self) -> impl Iterator<Item = GenericParamId> + '_ {
self.iter().map(|(id, _)| id)
}
pub(crate) fn def(&self) -> GenericDefId {
self.def
}
/// Iterator over types and const params of self, then parent.
pub(crate) fn iter<'a>(
&'a self,
) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'a>)> + 'a {
let from_toc_id = |it: &'a Generics| {
move |(local_id, p): (_, &'a TypeOrConstParamData)| {
let id = TypeOrConstParamId { parent: it.def, local_id };
match p {
TypeOrConstParamData::TypeParamData(p) => (
GenericParamId::TypeParamId(TypeParamId::from_unchecked(id)),
GenericParamDataRef::TypeParamData(p),
),
TypeOrConstParamData::ConstParamData(p) => (
GenericParamId::ConstParamId(ConstParamId::from_unchecked(id)),
GenericParamDataRef::ConstParamData(p),
),
}
}
};
let from_lt_id = |it: &'a Generics| {
move |(local_id, p): (_, &'a LifetimeParamData)| {
(
GenericParamId::LifetimeParamId(LifetimeParamId { parent: it.def, local_id }),
GenericParamDataRef::LifetimeParamData(p),
)
}
};
let lt_iter = self.params.iter_lt().map(from_lt_id(self));
self.params
.iter_type_or_consts()
.map(from_toc_id(self))
.chain(lt_iter)
.chain(self.iter_parent())
}
/// Iterate over types and const params without parent params.
pub(crate) fn iter_self<'a>(
&'a self,
) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'a>)> + 'a {
let from_toc_id = |it: &'a Generics| {
move |(local_id, p): (_, &'a TypeOrConstParamData)| {
let id = TypeOrConstParamId { parent: it.def, local_id };
match p {
TypeOrConstParamData::TypeParamData(p) => (
GenericParamId::TypeParamId(TypeParamId::from_unchecked(id)),
GenericParamDataRef::TypeParamData(p),
),
TypeOrConstParamData::ConstParamData(p) => (
GenericParamId::ConstParamId(ConstParamId::from_unchecked(id)),
GenericParamDataRef::ConstParamData(p),
),
}
}
};
let from_lt_id = |it: &'a Generics| {
move |(local_id, p): (_, &'a LifetimeParamData)| {
(
GenericParamId::LifetimeParamId(LifetimeParamId { parent: it.def, local_id }),
GenericParamDataRef::LifetimeParamData(p),
)
}
};
self.params
.iter_type_or_consts()
.map(from_toc_id(self))
.chain(self.params.iter_lt().map(from_lt_id(self)))
}
/// Iterator over types and const params of parent.
pub(crate) fn iter_parent(
&self,
) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
self.parent_generics().into_iter().flat_map(|it| {
let from_toc_id = move |(local_id, p)| {
let p: &_ = p;
let id = TypeOrConstParamId { parent: it.def, local_id };
match p {
TypeOrConstParamData::TypeParamData(p) => (
GenericParamId::TypeParamId(TypeParamId::from_unchecked(id)),
GenericParamDataRef::TypeParamData(p),
),
TypeOrConstParamData::ConstParamData(p) => (
GenericParamId::ConstParamId(ConstParamId::from_unchecked(id)),
GenericParamDataRef::ConstParamData(p),
),
}
};
let from_lt_id = move |(local_id, p): (_, _)| {
(
GenericParamId::LifetimeParamId(LifetimeParamId { parent: it.def, local_id }),
GenericParamDataRef::LifetimeParamData(p),
)
};
let lt_iter = it.params.iter_lt().map(from_lt_id);
it.params.iter_type_or_consts().map(from_toc_id).chain(lt_iter)
})
}
/// Returns total number of generic parameters in scope, including those from parent.
pub(crate) fn len(&self) -> usize {
let parent = self.parent_generics().map_or(0, Generics::len);
let child = self.params.len();
parent + child
}
/// Returns numbers of generic parameters and lifetimes excluding those from parent.
pub(crate) fn len_self(&self) -> usize {
self.params.len()
}
/// Returns number of generic parameter excluding those from parent
fn len_type_and_const_params(&self) -> usize {
self.params.type_or_consts.len()
}
/// (parent total, self param, type params, const params, impl trait list, lifetimes)
pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize, usize, usize) {
let mut self_params = 0;
let mut type_params = 0;
let mut impl_trait_params = 0;
let mut const_params = 0;
let mut lifetime_params = 0;
self.params.iter_type_or_consts().for_each(|(_, data)| match data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::TypeParamList => type_params += 1,
TypeParamProvenance::TraitSelf => self_params += 1,
TypeParamProvenance::ArgumentImplTrait => impl_trait_params += 1,
},
TypeOrConstParamData::ConstParamData(_) => const_params += 1,
});
self.params.iter_lt().for_each(|(_, _)| lifetime_params += 1);
let parent_len = self.parent_generics().map_or(0, Generics::len);
(parent_len, self_params, type_params, const_params, impl_trait_params, lifetime_params)
}
pub(crate) fn type_or_const_param_idx(&self, param: TypeOrConstParamId) -> Option<usize> {
Some(self.find_type_or_const_param(param)?.0)
}
fn find_type_or_const_param(
&self,
param: TypeOrConstParamId,
) -> Option<(usize, &TypeOrConstParamData)> {
if param.parent == self.def {
let idx = param.local_id.into_raw().into_u32() as usize;
if idx >= self.params.type_or_consts.len() {
return None;
}
Some((idx, &self.params.type_or_consts[param.local_id]))
} else {
self.parent_generics()
.and_then(|g| g.find_type_or_const_param(param))
// Remember that parent parameters come after parameters for self.
.map(|(idx, data)| (self.len_self() + idx, data))
}
}
pub(crate) fn lifetime_idx(&self, lifetime: LifetimeParamId) -> Option<usize> {
Some(self.find_lifetime(lifetime)?.0)
}
fn find_lifetime(&self, lifetime: LifetimeParamId) -> Option<(usize, &LifetimeParamData)> {
if lifetime.parent == self.def {
let idx = lifetime.local_id.into_raw().into_u32() as usize;
if idx >= self.params.lifetimes.len() {
return None;
}
Some((
self.len_type_and_const_params() + idx,
&self.params.lifetimes[lifetime.local_id],
))
} else {
self.parent_generics()
.and_then(|g| g.find_lifetime(lifetime))
.map(|(idx, data)| (self.len_self() + idx, data))
}
}
pub(crate) fn parent_generics(&self) -> Option<&Generics> {
self.parent_generics.as_deref()
}
pub(crate) fn parent_or_self(&self) -> &Generics {
self.parent_generics.as_deref().unwrap_or(self)
}
/// Returns a Substitution that replaces each parameter by a bound variable.
pub(crate) fn bound_vars_subst(
&self,
db: &dyn HirDatabase,
debruijn: DebruijnIndex,
) -> Substitution {
Substitution::from_iter(
Interner,
self.iter_id().enumerate().map(|(idx, id)| match id {
GenericParamId::ConstParamId(id) => BoundVar::new(debruijn, idx)
.to_const(Interner, db.const_param_ty(id))
.cast(Interner),
GenericParamId::TypeParamId(_) => {
BoundVar::new(debruijn, idx).to_ty(Interner).cast(Interner)
}
GenericParamId::LifetimeParamId(_) => {
BoundVar::new(debruijn, idx).to_lifetime(Interner).cast(Interner)
}
}),
)
}
/// Returns a Substitution that replaces each parameter by itself (i.e. `Ty::Param`).
pub(crate) fn placeholder_subst(&self, db: &dyn HirDatabase) -> Substitution {
Substitution::from_iter(
Interner,
self.iter_id().map(|id| match id {
GenericParamId::TypeParamId(id) => {
crate::to_placeholder_idx(db, id.into()).to_ty(Interner).cast(Interner)
}
GenericParamId::ConstParamId(id) => crate::to_placeholder_idx(db, id.into())
.to_const(Interner, db.const_param_ty(id))
.cast(Interner),
GenericParamId::LifetimeParamId(id) => {
crate::lt_to_placeholder_idx(db, id).to_lifetime(Interner).cast(Interner)
}
}),
)
}
}
fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<GenericDefId> {
let container = match def {
GenericDefId::FunctionId(it) => it.lookup(db).container,
GenericDefId::TypeAliasId(it) => it.lookup(db).container,
GenericDefId::ConstId(it) => it.lookup(db).container,
GenericDefId::EnumVariantId(it) => return Some(it.lookup(db).parent.into()),
GenericDefId::AdtId(_)
| GenericDefId::TraitId(_)
| GenericDefId::ImplId(_)
| GenericDefId::TraitAliasId(_) => return None,
};
match container {
ItemContainerId::ImplId(it) => Some(it.into()),
ItemContainerId::TraitId(it) => Some(it.into()),
ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
}
}
pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool { pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool {
let data = db.function_data(func); let data = db.function_data(func);
if data.has_unsafe_kw() { if data.has_unsafe_kw() {
// Functions that are `#[rustc_deprecated_safe_2024]` are safe to call before 2024.
if db.attrs(func.into()).by_key("rustc_deprecated_safe_2024").exists() {
// FIXME: Properly check the caller span and mark it as unsafe after 2024.
return false;
}
return true; return true;
} }

View file

@ -27,6 +27,7 @@ cfg.workspace = true
hir-def.workspace = true hir-def.workspace = true
hir-expand.workspace = true hir-expand.workspace = true
hir-ty.workspace = true hir-ty.workspace = true
intern.workspace = true
stdx.workspace = true stdx.workspace = true
syntax.workspace = true syntax.workspace = true
tt.workspace = true tt.workspace = true

View file

@ -307,7 +307,7 @@ fn doc_modpath_from_str(link: &str) -> Option<ModPath> {
let kind = match parts.next()? { let kind = match parts.next()? {
"" => PathKind::Abs, "" => PathKind::Abs,
"crate" => PathKind::Crate, "crate" => PathKind::Crate,
"self" => PathKind::Super(0), "self" => PathKind::SELF,
"super" => { "super" => {
let mut deg = 1; let mut deg = 1;
for segment in parts.by_ref() { for segment in parts.by_ref() {

View file

@ -3,7 +3,8 @@ use either::Either;
use hir_def::{ use hir_def::{
data::adt::{StructKind, VariantData}, data::adt::{StructKind, VariantData},
generics::{ generics::{
TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget, GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate,
WherePredicateTypeTarget,
}, },
lang_item::LangItem, lang_item::LangItem,
type_ref::{TypeBound, TypeRef}, type_ref::{TypeBound, TypeRef},
@ -16,10 +17,12 @@ use hir_ty::{
}, },
AliasEq, AliasTy, Interner, ProjectionTyExt, TraitRefExt, TyKind, WhereClause, AliasEq, AliasTy, Interner, ProjectionTyExt, TraitRefExt, TyKind, WhereClause,
}; };
use intern::Interned;
use itertools::Itertools;
use crate::{ use crate::{
Adt, AsAssocItem, AssocItem, AssocItemContainer, Const, ConstParam, Enum, ExternCrateDecl, Adt, AsAssocItem, AssocItem, AssocItemContainer, Const, ConstParam, Enum, ExternCrateDecl,
Field, Function, GenericParam, HasCrate, HasVisibility, LifetimeParam, Macro, Module, Field, Function, GenericParam, HasCrate, HasVisibility, Impl, LifetimeParam, Macro, Module,
SelfParam, Static, Struct, Trait, TraitAlias, TupleField, TyBuilder, Type, TypeAlias, SelfParam, Static, Struct, Trait, TraitAlias, TupleField, TyBuilder, Type, TypeAlias,
TypeOrConstParam, TypeParam, Union, Variant, TypeOrConstParam, TypeParam, Union, Variant,
}; };
@ -30,12 +33,42 @@ impl HirDisplay for Function {
let data = db.function_data(self.id); let data = db.function_data(self.id);
let container = self.as_assoc_item(db).map(|it| it.container(db)); let container = self.as_assoc_item(db).map(|it| it.container(db));
let mut module = self.module(db); let mut module = self.module(db);
// Write container (trait or impl)
let container_params = match container {
Some(AssocItemContainer::Trait(trait_)) => {
let params = f.db.generic_params(trait_.id.into());
if f.show_container_bounds() && !params.is_empty() {
write_trait_header(&trait_, f)?;
f.write_char('\n')?;
has_disaplayable_predicates(&params).then_some(params)
} else {
None
}
}
Some(AssocItemContainer::Impl(impl_)) => {
let params = f.db.generic_params(impl_.id.into());
if f.show_container_bounds() && !params.is_empty() {
write_impl_header(&impl_, f)?;
f.write_char('\n')?;
has_disaplayable_predicates(&params).then_some(params)
} else {
None
}
}
None => None,
};
// Write signature of the function
// Block-local impls are "hoisted" to the nearest (non-block) module.
if let Some(AssocItemContainer::Impl(_)) = container { if let Some(AssocItemContainer::Impl(_)) = container {
// Block-local impls are "hoisted" to the nearest (non-block) module.
module = module.nearest_non_block_module(db); module = module.nearest_non_block_module(db);
} }
let module_id = module.id; let module_id = module.id;
write_visibility(module_id, self.visibility(db), f)?; write_visibility(module_id, self.visibility(db), f)?;
if data.has_default_kw() { if data.has_default_kw() {
f.write_str("default ")?; f.write_str("default ")?;
} }
@ -116,12 +149,41 @@ impl HirDisplay for Function {
} }
} }
write_where_clause(GenericDefId::FunctionId(self.id), f)?; // Write where clauses
let has_written_where = write_where_clause(GenericDefId::FunctionId(self.id), f)?;
if let Some(container_params) = container_params {
if !has_written_where {
f.write_str("\nwhere")?;
}
let container_name = match container.unwrap() {
AssocItemContainer::Trait(_) => "trait",
AssocItemContainer::Impl(_) => "impl",
};
write!(f, "\n // Bounds from {container_name}:",)?;
write_where_predicates(&container_params, f)?;
}
Ok(()) Ok(())
} }
} }
fn write_impl_header(impl_: &Impl, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
let db = f.db;
f.write_str("impl")?;
let def_id = GenericDefId::ImplId(impl_.id);
write_generic_params(def_id, f)?;
if let Some(trait_) = impl_.trait_(db) {
let trait_data = db.trait_data(trait_.id);
write!(f, " {} for", trait_data.name.display(db.upcast()))?;
}
f.write_char(' ')?;
impl_.self_ty(db).hir_fmt(f)?;
Ok(())
}
impl HirDisplay for SelfParam { impl HirDisplay for SelfParam {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
let data = f.db.function_data(self.func); let data = f.db.function_data(self.func);
@ -188,7 +250,7 @@ impl HirDisplay for Struct {
StructKind::Record => { StructKind::Record => {
let has_where_clause = write_where_clause(def_id, f)?; let has_where_clause = write_where_clause(def_id, f)?;
if let Some(limit) = f.entity_limit { if let Some(limit) = f.entity_limit {
display_fields(&self.fields(f.db), has_where_clause, limit, false, f)?; write_fields(&self.fields(f.db), has_where_clause, limit, false, f)?;
} }
} }
StructKind::Unit => _ = write_where_clause(def_id, f)?, StructKind::Unit => _ = write_where_clause(def_id, f)?,
@ -208,7 +270,7 @@ impl HirDisplay for Enum {
let has_where_clause = write_where_clause(def_id, f)?; let has_where_clause = write_where_clause(def_id, f)?;
if let Some(limit) = f.entity_limit { if let Some(limit) = f.entity_limit {
display_variants(&self.variants(f.db), has_where_clause, limit, f)?; write_variants(&self.variants(f.db), has_where_clause, limit, f)?;
} }
Ok(()) Ok(())
@ -225,13 +287,13 @@ impl HirDisplay for Union {
let has_where_clause = write_where_clause(def_id, f)?; let has_where_clause = write_where_clause(def_id, f)?;
if let Some(limit) = f.entity_limit { if let Some(limit) = f.entity_limit {
display_fields(&self.fields(f.db), has_where_clause, limit, false, f)?; write_fields(&self.fields(f.db), has_where_clause, limit, false, f)?;
} }
Ok(()) Ok(())
} }
} }
fn display_fields( fn write_fields(
fields: &[Field], fields: &[Field],
has_where_clause: bool, has_where_clause: bool,
limit: usize, limit: usize,
@ -242,11 +304,7 @@ fn display_fields(
let (indent, separator) = if in_line { ("", ' ') } else { (" ", '\n') }; let (indent, separator) = if in_line { ("", ' ') } else { (" ", '\n') };
f.write_char(if !has_where_clause { ' ' } else { separator })?; f.write_char(if !has_where_clause { ' ' } else { separator })?;
if count == 0 { if count == 0 {
if fields.is_empty() { f.write_str(if fields.is_empty() { "{}" } else { "{ /* … */ }" })?;
f.write_str("{}")?;
} else {
f.write_str("{ /* … */ }")?;
}
} else { } else {
f.write_char('{')?; f.write_char('{')?;
@ -255,14 +313,11 @@ fn display_fields(
for field in &fields[..count] { for field in &fields[..count] {
f.write_str(indent)?; f.write_str(indent)?;
field.hir_fmt(f)?; field.hir_fmt(f)?;
f.write_char(',')?; write!(f, ",{separator}")?;
f.write_char(separator)?;
} }
if fields.len() > count { if fields.len() > count {
f.write_str(indent)?; write!(f, "{indent}/* … */{separator}")?;
f.write_str("/* … */")?;
f.write_char(separator)?;
} }
} }
@ -272,7 +327,7 @@ fn display_fields(
Ok(()) Ok(())
} }
fn display_variants( fn write_variants(
variants: &[Variant], variants: &[Variant],
has_where_clause: bool, has_where_clause: bool,
limit: usize, limit: usize,
@ -281,30 +336,22 @@ fn display_variants(
let count = variants.len().min(limit); let count = variants.len().min(limit);
f.write_char(if !has_where_clause { ' ' } else { '\n' })?; f.write_char(if !has_where_clause { ' ' } else { '\n' })?;
if count == 0 { if count == 0 {
if variants.is_empty() { let variants = if variants.is_empty() { "{}" } else { "{ /* … */ }" };
f.write_str("{}")?; f.write_str(variants)?;
} else {
f.write_str("{ /* … */ }")?;
}
} else { } else {
f.write_str("{\n")?; f.write_str("{\n")?;
for variant in &variants[..count] { for variant in &variants[..count] {
f.write_str(" ")?; write!(f, " {}", variant.name(f.db).display(f.db.upcast()))?;
write!(f, "{}", variant.name(f.db).display(f.db.upcast()))?;
match variant.kind(f.db) { match variant.kind(f.db) {
StructKind::Tuple => { StructKind::Tuple => {
if variant.fields(f.db).is_empty() { let fields_str =
f.write_str("()")?; if variant.fields(f.db).is_empty() { "()" } else { "( /* … */ )" };
} else { f.write_str(fields_str)?;
f.write_str("( /* … */ )")?;
}
} }
StructKind::Record => { StructKind::Record => {
if variant.fields(f.db).is_empty() { let fields_str =
f.write_str(" {}")?; if variant.fields(f.db).is_empty() { " {}" } else { " { /* … */ }" };
} else { f.write_str(fields_str)?;
f.write_str(" { /* … */ }")?;
}
} }
StructKind::Unit => {} StructKind::Unit => {}
} }
@ -357,7 +404,7 @@ impl HirDisplay for Variant {
} }
VariantData::Record(_) => { VariantData::Record(_) => {
if let Some(limit) = f.entity_limit { if let Some(limit) = f.entity_limit {
display_fields(&self.fields(f.db), false, limit, true, f)?; write_fields(&self.fields(f.db), false, limit, true, f)?;
} }
} }
} }
@ -554,104 +601,98 @@ fn write_where_clause(
f: &mut HirFormatter<'_>, f: &mut HirFormatter<'_>,
) -> Result<bool, HirDisplayError> { ) -> Result<bool, HirDisplayError> {
let params = f.db.generic_params(def); let params = f.db.generic_params(def);
if !has_disaplayable_predicates(&params) {
// unnamed type targets are displayed inline with the argument itself, e.g. `f: impl Y`.
let is_unnamed_type_target = |target: &WherePredicateTypeTarget| match target {
WherePredicateTypeTarget::TypeRef(_) => false,
WherePredicateTypeTarget::TypeOrConstParam(id) => {
params.type_or_consts[*id].name().is_none()
}
};
let has_displayable_predicate = params
.where_predicates
.iter()
.any(|pred| {
!matches!(pred, WherePredicate::TypeBound { target, .. } if is_unnamed_type_target(target))
});
if !has_displayable_predicate {
return Ok(false); return Ok(false);
} }
let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target {
WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f),
WherePredicateTypeTarget::TypeOrConstParam(id) => {
match &params.type_or_consts[*id].name() {
Some(name) => write!(f, "{}", name.display(f.db.upcast())),
None => f.write_str("{unnamed}"),
}
}
};
f.write_str("\nwhere")?; f.write_str("\nwhere")?;
write_where_predicates(&params, f)?;
for (pred_idx, pred) in params.where_predicates.iter().enumerate() {
let prev_pred =
if pred_idx == 0 { None } else { Some(&params.where_predicates[pred_idx - 1]) };
let new_predicate = |f: &mut HirFormatter<'_>| {
f.write_str(if pred_idx == 0 { "\n " } else { ",\n " })
};
match pred {
WherePredicate::TypeBound { target, .. } if is_unnamed_type_target(target) => {}
WherePredicate::TypeBound { target, bound } => {
if matches!(prev_pred, Some(WherePredicate::TypeBound { target: target_, .. }) if target_ == target)
{
f.write_str(" + ")?;
} else {
new_predicate(f)?;
write_target(target, f)?;
f.write_str(": ")?;
}
bound.hir_fmt(f)?;
}
WherePredicate::Lifetime { target, bound } => {
if matches!(prev_pred, Some(WherePredicate::Lifetime { target: target_, .. }) if target_ == target)
{
write!(f, " + {}", bound.name.display(f.db.upcast()))?;
} else {
new_predicate(f)?;
write!(
f,
"{}: {}",
target.name.display(f.db.upcast()),
bound.name.display(f.db.upcast())
)?;
}
}
WherePredicate::ForLifetime { lifetimes, target, bound } => {
if matches!(
prev_pred,
Some(WherePredicate::ForLifetime { lifetimes: lifetimes_, target: target_, .. })
if lifetimes_ == lifetimes && target_ == target,
) {
f.write_str(" + ")?;
} else {
new_predicate(f)?;
f.write_str("for<")?;
for (idx, lifetime) in lifetimes.iter().enumerate() {
if idx != 0 {
f.write_str(", ")?;
}
write!(f, "{}", lifetime.display(f.db.upcast()))?;
}
f.write_str("> ")?;
write_target(target, f)?;
f.write_str(": ")?;
}
bound.hir_fmt(f)?;
}
}
}
// End of final predicate. There must be at least one predicate here.
f.write_char(',')?;
Ok(true) Ok(true)
} }
fn has_disaplayable_predicates(params: &Interned<GenericParams>) -> bool {
params.where_predicates.iter().any(|pred| {
!matches!(
pred,
WherePredicate::TypeBound { target: WherePredicateTypeTarget::TypeOrConstParam(id), .. }
if params.type_or_consts[*id].name().is_none()
)
})
}
fn write_where_predicates(
params: &Interned<GenericParams>,
f: &mut HirFormatter<'_>,
) -> Result<(), HirDisplayError> {
use WherePredicate::*;
// unnamed type targets are displayed inline with the argument itself, e.g. `f: impl Y`.
let is_unnamed_type_target =
|params: &Interned<GenericParams>, target: &WherePredicateTypeTarget| {
matches!(target,
WherePredicateTypeTarget::TypeOrConstParam(id) if params.type_or_consts[*id].name().is_none()
)
};
let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target {
WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f),
WherePredicateTypeTarget::TypeOrConstParam(id) => match params.type_or_consts[*id].name() {
Some(name) => write!(f, "{}", name.display(f.db.upcast())),
None => f.write_str("{unnamed}"),
},
};
let check_same_target = |pred1: &WherePredicate, pred2: &WherePredicate| match (pred1, pred2) {
(TypeBound { target: t1, .. }, TypeBound { target: t2, .. }) => t1 == t2,
(Lifetime { target: t1, .. }, Lifetime { target: t2, .. }) => t1 == t2,
(
ForLifetime { lifetimes: l1, target: t1, .. },
ForLifetime { lifetimes: l2, target: t2, .. },
) => l1 == l2 && t1 == t2,
_ => false,
};
let mut iter = params.where_predicates.iter().peekable();
while let Some(pred) = iter.next() {
if matches!(pred, TypeBound { target, .. } if is_unnamed_type_target(params, target)) {
continue;
}
f.write_str("\n ")?;
match pred {
TypeBound { target, bound } => {
write_target(target, f)?;
f.write_str(": ")?;
bound.hir_fmt(f)?;
}
Lifetime { target, bound } => {
let target = target.name.display(f.db.upcast());
let bound = bound.name.display(f.db.upcast());
write!(f, "{target}: {bound}")?;
}
ForLifetime { lifetimes, target, bound } => {
let lifetimes = lifetimes.iter().map(|it| it.display(f.db.upcast())).join(", ");
write!(f, "for<{lifetimes}> ")?;
write_target(target, f)?;
f.write_str(": ")?;
bound.hir_fmt(f)?;
}
}
while let Some(nxt) = iter.next_if(|nxt| check_same_target(pred, nxt)) {
f.write_str(" + ")?;
match nxt {
TypeBound { bound, .. } | ForLifetime { bound, .. } => bound.hir_fmt(f)?,
Lifetime { bound, .. } => write!(f, "{}", bound.name.display(f.db.upcast()))?,
}
}
f.write_str(",")?;
}
Ok(())
}
impl HirDisplay for Const { impl HirDisplay for Const {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
let db = f.db; let db = f.db;
@ -689,17 +730,8 @@ impl HirDisplay for Static {
impl HirDisplay for Trait { impl HirDisplay for Trait {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; write_trait_header(self, f)?;
let data = f.db.trait_data(self.id);
if data.is_unsafe {
f.write_str("unsafe ")?;
}
if data.is_auto {
f.write_str("auto ")?;
}
write!(f, "trait {}", data.name.display(f.db.upcast()))?;
let def_id = GenericDefId::TraitId(self.id); let def_id = GenericDefId::TraitId(self.id);
write_generic_params(def_id, f)?;
let has_where_clause = write_where_clause(def_id, f)?; let has_where_clause = write_where_clause(def_id, f)?;
if let Some(limit) = f.entity_limit { if let Some(limit) = f.entity_limit {
@ -735,6 +767,20 @@ impl HirDisplay for Trait {
} }
} }
fn write_trait_header(trait_: &Trait, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write_visibility(trait_.module(f.db).id, trait_.visibility(f.db), f)?;
let data = f.db.trait_data(trait_.id);
if data.is_unsafe {
f.write_str("unsafe ")?;
}
if data.is_auto {
f.write_str("auto ")?;
}
write!(f, "trait {}", data.name.display(f.db.upcast()))?;
write_generic_params(GenericDefId::TraitId(trait_.id), f)?;
Ok(())
}
impl HirDisplay for TraitAlias { impl HirDisplay for TraitAlias {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;

View file

@ -8,13 +8,14 @@ use hir_def::{
Lookup, MacroId, VariantId, Lookup, MacroId, VariantId,
}; };
use hir_expand::{HirFileId, InFile}; use hir_expand::{HirFileId, InFile};
use hir_ty::{db::InternedClosure, CallableDefId};
use syntax::ast; use syntax::ast;
use tt::TextRange; use tt::TextRange;
use crate::{ use crate::{
db::HirDatabase, Adt, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl, db::HirDatabase, Adt, Callee, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl,
LifetimeParam, LocalSource, Macro, Module, Static, Struct, Trait, TraitAlias, TypeAlias, Label, LifetimeParam, LocalSource, Macro, Module, Param, SelfParam, Static, Struct, Trait,
TypeOrConstParam, Union, Variant, TraitAlias, TypeAlias, TypeOrConstParam, Union, Variant,
}; };
pub trait HasSource { pub trait HasSource {
@ -25,7 +26,7 @@ pub trait HasSource {
/// ///
/// The current some implementations can return `InFile` instead of `Option<InFile>`. /// The current some implementations can return `InFile` instead of `Option<InFile>`.
/// But we made this method `Option` to support rlib in the future /// But we made this method `Option` to support rlib in the future
/// by https://github.com/rust-lang/rust-analyzer/issues/6913 /// by <https://github.com/rust-lang/rust-analyzer/issues/6913>
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>>; fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>>;
} }
@ -202,7 +203,7 @@ impl HasSource for TypeOrConstParam {
type Ast = Either<ast::TypeOrConstParam, ast::TraitOrAlias>; type Ast = Either<ast::TypeOrConstParam, ast::TraitOrAlias>;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> { fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
let child_source = self.id.parent.child_source(db.upcast()); let child_source = self.id.parent.child_source(db.upcast());
Some(child_source.map(|it| it[self.id.local_id].clone())) child_source.map(|it| it.get(self.id.local_id).cloned()).transpose()
} }
} }
@ -210,7 +211,7 @@ impl HasSource for LifetimeParam {
type Ast = ast::LifetimeParam; type Ast = ast::LifetimeParam;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> { fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
let child_source = self.id.parent.child_source(db.upcast()); let child_source = self.id.parent.child_source(db.upcast());
Some(child_source.map(|it| it[self.id.local_id].clone())) child_source.map(|it| it.get(self.id.local_id).cloned()).transpose()
} }
} }
@ -222,6 +223,68 @@ impl HasSource for LocalSource {
} }
} }
impl HasSource for Param {
type Ast = Either<ast::SelfParam, ast::Param>;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
match self.func {
Callee::Def(CallableDefId::FunctionId(func)) => {
let InFile { file_id, value } = Function { id: func }.source(db)?;
let params = value.param_list()?;
if let Some(self_param) = params.self_param() {
if let Some(idx) = self.idx.checked_sub(1) {
params.params().nth(idx).map(Either::Right)
} else {
Some(Either::Left(self_param))
}
} else {
params.params().nth(self.idx).map(Either::Right)
}
.map(|value| InFile { file_id, value })
}
Callee::Closure(closure, _) => {
let InternedClosure(owner, expr_id) = db.lookup_intern_closure(closure.into());
let (_, source_map) = db.body_with_source_map(owner);
let ast @ InFile { file_id, value } = source_map.expr_syntax(expr_id).ok()?;
let root = db.parse_or_expand(file_id);
match value.to_node(&root) {
ast::Expr::ClosureExpr(it) => it
.param_list()?
.params()
.nth(self.idx)
.map(Either::Right)
.map(|value| InFile { file_id: ast.file_id, value }),
_ => None,
}
}
_ => None,
}
}
}
impl HasSource for SelfParam {
type Ast = ast::SelfParam;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
let InFile { file_id, value } = Function::from(self.func).source(db)?;
value
.param_list()
.and_then(|params| params.self_param())
.map(|value| InFile { file_id, value })
}
}
impl HasSource for Label {
type Ast = ast::Label;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
let (_body, source_map) = db.body_with_source_map(self.parent);
let src = source_map.label_syntax(self.label_id);
let root = src.file_syntax(db.upcast());
Some(src.map(|ast| ast.to_node(&root)))
}
}
impl HasSource for ExternCrateDecl { impl HasSource for ExternCrateDecl {
type Ast = ast::ExternCrate; type Ast = ast::ExternCrate;

View file

@ -64,7 +64,6 @@ use hir_expand::{
use hir_ty::{ use hir_ty::{
all_super_traits, autoderef, check_orphan_rules, all_super_traits, autoderef, check_orphan_rules,
consteval::{try_const_usize, unknown_const_as_generic, ConstExt}, consteval::{try_const_usize, unknown_const_as_generic, ConstExt},
db::InternedClosure,
diagnostics::BodyValidationDiagnostic, diagnostics::BodyValidationDiagnostic,
error_lifetime, known_const_to_ast, error_lifetime, known_const_to_ast,
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding}, layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
@ -113,7 +112,7 @@ pub use hir_ty::method_resolution::TyFingerprint;
pub use { pub use {
cfg::{CfgAtom, CfgExpr, CfgOptions}, cfg::{CfgAtom, CfgExpr, CfgOptions},
hir_def::{ hir_def::{
attr::{builtin::AttributeTemplate, AttrSourceMap, Attrs, AttrsWithOwner}, attr::{AttrSourceMap, Attrs, AttrsWithOwner},
data::adt::StructKind, data::adt::StructKind,
find_path::PrefixKind, find_path::PrefixKind,
import_map, import_map,
@ -132,6 +131,7 @@ pub use {
attrs::{Attr, AttrId}, attrs::{Attr, AttrId},
change::ChangeWithProcMacros, change::ChangeWithProcMacros,
hygiene::{marks_rev, SyntaxContextExt}, hygiene::{marks_rev, SyntaxContextExt},
inert_attr_macro::AttributeTemplate,
name::{known, Name}, name::{known, Name},
proc_macro::ProcMacros, proc_macro::ProcMacros,
tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId, tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId,
@ -242,7 +242,7 @@ impl Crate {
db: &dyn DefDatabase, db: &dyn DefDatabase,
query: import_map::Query, query: import_map::Query,
) -> impl Iterator<Item = Either<ModuleDef, Macro>> { ) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
let _p = tracing::span!(tracing::Level::INFO, "query_external_importables").entered(); let _p = tracing::info_span!("query_external_importables").entered();
import_map::search_dependencies(db, self.into(), &query).into_iter().map(|item| { import_map::search_dependencies(db, self.into(), &query).into_iter().map(|item| {
match ItemInNs::from(item) { match ItemInNs::from(item) {
ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id), ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
@ -551,8 +551,7 @@ impl Module {
acc: &mut Vec<AnyDiagnostic>, acc: &mut Vec<AnyDiagnostic>,
style_lints: bool, style_lints: bool,
) { ) {
let _p = tracing::span!(tracing::Level::INFO, "Module::diagnostics", name = ?self.name(db)) let _p = tracing::info_span!("Module::diagnostics", name = ?self.name(db)).entered();
.entered();
let def_map = self.id.def_map(db.upcast()); let def_map = self.id.def_map(db.upcast());
for diag in def_map.diagnostics() { for diag in def_map.diagnostics() {
if diag.in_module != self.id.local_id { if diag.in_module != self.id.local_id {
@ -1099,6 +1098,35 @@ pub enum FieldSource {
Pos(ast::TupleField), Pos(ast::TupleField),
} }
impl AstNode for FieldSource {
fn can_cast(kind: syntax::SyntaxKind) -> bool
where
Self: Sized,
{
ast::RecordField::can_cast(kind) || ast::TupleField::can_cast(kind)
}
fn cast(syntax: SyntaxNode) -> Option<Self>
where
Self: Sized,
{
if ast::RecordField::can_cast(syntax.kind()) {
<ast::RecordField as AstNode>::cast(syntax).map(FieldSource::Named)
} else if ast::TupleField::can_cast(syntax.kind()) {
<ast::TupleField as AstNode>::cast(syntax).map(FieldSource::Pos)
} else {
None
}
}
fn syntax(&self) -> &SyntaxNode {
match self {
FieldSource::Named(it) => it.syntax(),
FieldSource::Pos(it) => it.syntax(),
}
}
}
impl Field { impl Field {
pub fn name(&self, db: &dyn HirDatabase) -> Name { pub fn name(&self, db: &dyn HirDatabase) -> Name {
self.parent.variant_data(db).fields()[self.id].name.clone() self.parent.variant_data(db).fields()[self.id].name.clone()
@ -1884,6 +1912,14 @@ impl Function {
Type::from_value_def(db, self.id) Type::from_value_def(db, self.id)
} }
pub fn fn_ptr_type(self, db: &dyn HirDatabase) -> Type {
let resolver = self.id.resolver(db.upcast());
let substs = TyBuilder::placeholder_subst(db, self.id);
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
let ty = TyKind::Function(callable_sig.to_fn_ptr()).intern(Interner);
Type::new_with_resolver_inner(db, &resolver, ty)
}
/// Get this function's return type /// Get this function's return type
pub fn ret_type(self, db: &dyn HirDatabase) -> Type { pub fn ret_type(self, db: &dyn HirDatabase) -> Type {
let resolver = self.id.resolver(db.upcast()); let resolver = self.id.resolver(db.upcast());
@ -2208,47 +2244,9 @@ impl Param {
} }
} }
pub fn pattern_source(&self, db: &dyn HirDatabase) -> Option<ast::Pat> { pub fn pattern_source(self, db: &dyn HirDatabase) -> Option<ast::Pat> {
self.source(db).and_then(|p| p.value.right()?.pat()) self.source(db).and_then(|p| p.value.right()?.pat())
} }
pub fn source(
&self,
db: &dyn HirDatabase,
) -> Option<InFile<Either<ast::SelfParam, ast::Param>>> {
match self.func {
Callee::Def(CallableDefId::FunctionId(func)) => {
let InFile { file_id, value } = Function { id: func }.source(db)?;
let params = value.param_list()?;
if let Some(self_param) = params.self_param() {
if let Some(idx) = self.idx.checked_sub(1) {
params.params().nth(idx).map(Either::Right)
} else {
Some(Either::Left(self_param))
}
} else {
params.params().nth(self.idx).map(Either::Right)
}
.map(|value| InFile { file_id, value })
}
Callee::Closure(closure, _) => {
let InternedClosure(owner, expr_id) = db.lookup_intern_closure(closure.into());
let (_, source_map) = db.body_with_source_map(owner);
let ast @ InFile { file_id, value } = source_map.expr_syntax(expr_id).ok()?;
let root = db.parse_or_expand(file_id);
match value.to_node(&root) {
ast::Expr::ClosureExpr(it) => it
.param_list()?
.params()
.nth(self.idx)
.map(Either::Right)
.map(|value| InFile { file_id: ast.file_id, value }),
_ => None,
}
}
_ => None,
}
}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -2272,14 +2270,6 @@ impl SelfParam {
.unwrap_or(Access::Owned) .unwrap_or(Access::Owned)
} }
pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::SelfParam>> {
let InFile { file_id, value } = Function::from(self.func).source(db)?;
value
.param_list()
.and_then(|params| params.self_param())
.map(|value| InFile { file_id, value })
}
pub fn parent_fn(&self) -> Function { pub fn parent_fn(&self) -> Function {
Function::from(self.func) Function::from(self.func)
} }
@ -2414,9 +2404,9 @@ impl Const {
let value_signed = let value_signed =
i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_)))); i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_))));
if value >= 10 { if value >= 10 {
return Ok(format!("{} ({:#X})", value_signed, value)); return Ok(format!("{value_signed} ({value:#X})"));
} else { } else {
return Ok(format!("{}", value_signed)); return Ok(format!("{value_signed}"));
} }
} }
} }
@ -2746,6 +2736,12 @@ impl Macro {
} }
} }
pub fn is_asm_or_global_asm(&self, db: &dyn HirDatabase) -> bool {
matches!(self.id, MacroId::Macro2Id(it) if {
matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltIn(m) if m.is_asm())
})
}
pub fn is_attr(&self, db: &dyn HirDatabase) -> bool { pub fn is_attr(&self, db: &dyn HirDatabase) -> bool {
matches!(self.kind(db), MacroKind::Attr) matches!(self.kind(db), MacroKind::Attr)
} }
@ -2788,6 +2784,7 @@ impl From<ModuleDef> for ItemInNs {
ModuleDef::Static(_) | ModuleDef::Const(_) | ModuleDef::Function(_) => { ModuleDef::Static(_) | ModuleDef::Const(_) | ModuleDef::Function(_) => {
ItemInNs::Values(module_def) ItemInNs::Values(module_def)
} }
ModuleDef::Macro(it) => ItemInNs::Macros(it),
_ => ItemInNs::Types(module_def), _ => ItemInNs::Types(module_def),
} }
} }
@ -3381,7 +3378,7 @@ impl BuiltinAttr {
} }
fn builtin(name: &str) -> Option<Self> { fn builtin(name: &str) -> Option<Self> {
hir_def::attr::builtin::find_builtin_attr_idx(name) hir_expand::inert_attr_macro::find_builtin_attr_idx(name)
.map(|idx| BuiltinAttr { krate: None, idx: idx as u32 }) .map(|idx| BuiltinAttr { krate: None, idx: idx as u32 })
} }
@ -3389,14 +3386,18 @@ impl BuiltinAttr {
// FIXME: Return a `Name` here // FIXME: Return a `Name` here
match self.krate { match self.krate {
Some(krate) => db.crate_def_map(krate).registered_attrs()[self.idx as usize].clone(), Some(krate) => db.crate_def_map(krate).registered_attrs()[self.idx as usize].clone(),
None => SmolStr::new(hir_def::attr::builtin::INERT_ATTRIBUTES[self.idx as usize].name), None => {
SmolStr::new(hir_expand::inert_attr_macro::INERT_ATTRIBUTES[self.idx as usize].name)
}
} }
} }
pub fn template(&self, _: &dyn HirDatabase) -> Option<AttributeTemplate> { pub fn template(&self, _: &dyn HirDatabase) -> Option<AttributeTemplate> {
match self.krate { match self.krate {
Some(_) => None, Some(_) => None,
None => Some(hir_def::attr::builtin::INERT_ATTRIBUTES[self.idx as usize].template), None => {
Some(hir_expand::inert_attr_macro::INERT_ATTRIBUTES[self.idx as usize].template)
}
} }
} }
} }
@ -3440,13 +3441,6 @@ impl Label {
let body = db.body(self.parent); let body = db.body(self.parent);
body[self.label_id].name.clone() body[self.label_id].name.clone()
} }
pub fn source(self, db: &dyn HirDatabase) -> InFile<ast::Label> {
let (_body, source_map) = db.body_with_source_map(self.parent);
let src = source_map.label_syntax(self.label_id);
let root = src.file_syntax(db.upcast());
src.map(|ast| ast.to_node(&root))
}
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@ -4612,8 +4606,7 @@ impl Type {
name: Option<&Name>, name: Option<&Name>,
mut callback: impl FnMut(Function) -> Option<T>, mut callback: impl FnMut(Function) -> Option<T>,
) -> Option<T> { ) -> Option<T> {
let _p = let _p = tracing::info_span!("iterate_method_candidates_with_traits").entered();
tracing::span!(tracing::Level::INFO, "iterate_method_candidates_with_traits").entered();
let mut slot = None; let mut slot = None;
self.iterate_method_candidates_dyn( self.iterate_method_candidates_dyn(
@ -4662,8 +4655,7 @@ impl Type {
name: Option<&Name>, name: Option<&Name>,
callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>, callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
) { ) {
let _p = tracing::span!( let _p = tracing::info_span!(
tracing::Level::INFO,
"iterate_method_candidates_dyn", "iterate_method_candidates_dyn",
with_local_impls = traits_in_scope.len(), with_local_impls = traits_in_scope.len(),
traits_in_scope = traits_in_scope.len(), traits_in_scope = traits_in_scope.len(),
@ -4701,7 +4693,7 @@ impl Type {
name: Option<&Name>, name: Option<&Name>,
mut callback: impl FnMut(AssocItem) -> Option<T>, mut callback: impl FnMut(AssocItem) -> Option<T>,
) -> Option<T> { ) -> Option<T> {
let _p = tracing::span!(tracing::Level::INFO, "iterate_path_candidates").entered(); let _p = tracing::info_span!("iterate_path_candidates").entered();
let mut slot = None; let mut slot = None;
self.iterate_path_candidates_dyn( self.iterate_path_candidates_dyn(
db, db,
@ -4768,7 +4760,7 @@ impl Type {
&'a self, &'a self,
db: &'a dyn HirDatabase, db: &'a dyn HirDatabase,
) -> impl Iterator<Item = Trait> + 'a { ) -> impl Iterator<Item = Trait> + 'a {
let _p = tracing::span!(tracing::Level::INFO, "applicable_inherent_traits").entered(); let _p = tracing::info_span!("applicable_inherent_traits").entered();
self.autoderef_(db) self.autoderef_(db)
.filter_map(|ty| ty.dyn_trait()) .filter_map(|ty| ty.dyn_trait())
.flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id)) .flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id))
@ -4776,7 +4768,7 @@ impl Type {
} }
pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a { pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a {
let _p = tracing::span!(tracing::Level::INFO, "env_traits").entered(); let _p = tracing::info_span!("env_traits").entered();
self.autoderef_(db) self.autoderef_(db)
.filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_))) .filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_)))
.flat_map(|ty| { .flat_map(|ty| {

View file

@ -19,7 +19,11 @@ use hir_def::{
AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId, AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
}; };
use hir_expand::{ use hir_expand::{
attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, attrs::collect_attrs,
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
files::InRealFile,
name::AsName,
InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
}; };
use itertools::Itertools; use itertools::Itertools;
@ -132,9 +136,6 @@ pub struct SemanticsImpl<'db> {
s2d_cache: RefCell<SourceToDefCache>, s2d_cache: RefCell<SourceToDefCache>,
/// Rootnode to HirFileId cache /// Rootnode to HirFileId cache
root_to_file_cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, root_to_file_cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
// These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
// So we might wanna move them out into something specific for semantic highlighting
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
/// MacroCall to its expansion's MacroFileId cache /// MacroCall to its expansion's MacroFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>, macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
} }
@ -295,7 +296,6 @@ impl<'db> SemanticsImpl<'db> {
db, db,
s2d_cache: Default::default(), s2d_cache: Default::default(),
root_to_file_cache: Default::default(), root_to_file_cache: Default::default(),
expansion_info_cache: Default::default(),
macro_call_cache: Default::default(), macro_call_cache: Default::default(),
} }
} }
@ -314,7 +314,58 @@ impl<'db> SemanticsImpl<'db> {
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze_no_infer(macro_call.syntax())?; let sa = self.analyze_no_infer(macro_call.syntax())?;
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
let macro_call = InFile::new(sa.file_id, macro_call);
let file_id = if let Some(call) =
<ast::MacroCall as crate::semantics::ToDef>::to_def(self, macro_call)
{
call.as_macro_file()
} else {
sa.expand(self.db, macro_call)?
};
let node = self.parse_or_expand(file_id.into());
Some(node)
}
/// Expands the macro if it isn't one of the built-in ones that expand to custom syntax or dummy
/// expansions.
pub fn expand_allowed_builtins(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze_no_infer(macro_call.syntax())?;
let macro_call = InFile::new(sa.file_id, macro_call);
let file_id = if let Some(call) =
<ast::MacroCall as crate::semantics::ToDef>::to_def(self, macro_call)
{
call.as_macro_file()
} else {
sa.expand(self.db, macro_call)?
};
let macro_call = self.db.lookup_intern_macro_call(file_id.macro_call_id);
let skip = matches!(
macro_call.def.kind,
hir_expand::MacroDefKind::BuiltIn(
_,
BuiltinFnLikeExpander::Column
| BuiltinFnLikeExpander::File
| BuiltinFnLikeExpander::ModulePath
| BuiltinFnLikeExpander::Asm
| BuiltinFnLikeExpander::LlvmAsm
| BuiltinFnLikeExpander::GlobalAsm
| BuiltinFnLikeExpander::LogSyntax
| BuiltinFnLikeExpander::TraceMacros
| BuiltinFnLikeExpander::FormatArgs
| BuiltinFnLikeExpander::FormatArgsNl
| BuiltinFnLikeExpander::ConstFormatArgs,
) | hir_expand::MacroDefKind::BuiltInEager(_, EagerExpander::CompileError)
);
if skip {
// these macros expand to custom builtin syntax and/or dummy things, no point in
// showing these to the user
return None;
}
let node = self.parse_or_expand(file_id.into()); let node = self.parse_or_expand(file_id.into());
Some(node) Some(node)
} }
@ -322,7 +373,7 @@ impl<'db> SemanticsImpl<'db> {
/// If `item` has an attribute macro attached to it, expands it. /// If `item` has an attribute macro attached to it, expands it.
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> { pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
let src = self.wrap_node_infile(item.clone()); let src = self.wrap_node_infile(item.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?; let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
Some(self.parse_or_expand(macro_call_id.as_file())) Some(self.parse_or_expand(macro_call_id.as_file()))
} }
@ -341,9 +392,7 @@ impl<'db> SemanticsImpl<'db> {
Some( Some(
calls calls
.into_iter() .into_iter()
.map(|call| { .map(|call| macro_call_to_macro_id(ctx, call?).map(|id| Macro { id }))
macro_call_to_macro_id(ctx, self.db.upcast(), call?).map(|id| Macro { id })
})
.collect(), .collect(),
) )
}) })
@ -403,7 +452,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool { pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
let file_id = self.find_file(item.syntax()).file_id; let file_id = self.find_file(item.syntax()).file_id;
let src = InFile::new(file_id, item.clone()); let src = InFile::new(file_id, item);
self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some()) self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some())
} }
@ -420,7 +469,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_call = InFile::new(file_id, actual_macro_call); let macro_call = InFile::new(file_id, actual_macro_call);
let krate = resolver.krate(); let krate = resolver.krate();
let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
resolver.resolve_path_as_macro_def(self.db.upcast(), &path, Some(MacroSubNs::Bang)) resolver.resolve_path_as_macro_def(self.db.upcast(), path, Some(MacroSubNs::Bang))
})?; })?;
hir_expand::db::expand_speculative( hir_expand::db::expand_speculative(
self.db.upcast(), self.db.upcast(),
@ -453,7 +502,7 @@ impl<'db> SemanticsImpl<'db> {
token_to_map: SyntaxToken, token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> { ) -> Option<(SyntaxNode, SyntaxToken)> {
let macro_call = self.wrap_node_infile(actual_macro_call.clone()); let macro_call = self.wrap_node_infile(actual_macro_call.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call))?; let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?;
hir_expand::db::expand_speculative( hir_expand::db::expand_speculative(
self.db.upcast(), self.db.upcast(),
macro_call_id, macro_call_id,
@ -705,8 +754,6 @@ impl<'db> SemanticsImpl<'db> {
let parent = token.parent()?; let parent = token.parent()?;
let file_id = self.find_file(&parent).file_id.file_id()?; let file_id = self.find_file(&parent).file_id.file_id()?;
let mut cache = self.expansion_info_cache.borrow_mut();
// iterate related crates and find all include! invocations that include_file_id matches // iterate related crates and find all include! invocations that include_file_id matches
for (invoc, _) in self for (invoc, _) in self
.db .db
@ -716,18 +763,32 @@ impl<'db> SemanticsImpl<'db> {
.filter(|&(_, include_file_id)| include_file_id == file_id) .filter(|&(_, include_file_id)| include_file_id == file_id)
{ {
let macro_file = invoc.as_macro_file(); let macro_file = invoc.as_macro_file();
let expansion_info = cache.entry(macro_file).or_insert_with(|| { let expansion_info = {
let exp_info = macro_file.expansion_info(self.db.upcast()); self.with_ctx(|ctx| {
ctx.cache
.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| {
let exp_info = macro_file.expansion_info(self.db.upcast());
let InMacroFile { file_id, value } = exp_info.expanded(); let InMacroFile { file_id, value } = exp_info.expanded();
self.cache(value, file_id.into()); if let InFile { file_id, value: Some(value) } = exp_info.arg() {
self.cache(value.ancestors().last().unwrap(), file_id);
}
self.cache(value, file_id.into());
exp_info exp_info
}); })
.clone()
})
};
// FIXME: uncached parse // FIXME: uncached parse
// Create the source analyzer for the macro call scope // Create the source analyzer for the macro call scope
let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file())) let Some(sa) = expansion_info
.arg()
.value
.and_then(|it| self.analyze_no_infer(&it.ancestors().last().unwrap()))
else { else {
continue; continue;
}; };
@ -758,7 +819,7 @@ impl<'db> SemanticsImpl<'db> {
mut token: SyntaxToken, mut token: SyntaxToken,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>, f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) { ) {
let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros_impl").entered(); let _p = tracing::info_span!("descend_into_macros_impl").entered();
let (sa, span, file_id) = let (sa, span, file_id) =
match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) { match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
Some(sa) => match sa.file_id.file_id() { Some(sa) => match sa.file_id.file_id() {
@ -785,23 +846,28 @@ impl<'db> SemanticsImpl<'db> {
} }
}; };
let mut cache = self.expansion_info_cache.borrow_mut(); let mut m_cache = self.macro_call_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut();
let def_map = sa.resolver.def_map(); let def_map = sa.resolver.def_map();
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])]; let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| { let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
let exp_info = cache.entry(macro_file).or_insert_with(|| { let InMacroFile { file_id, value: mapped_tokens } = self.with_ctx(|ctx| {
let exp_info = macro_file.expansion_info(self.db.upcast()); Some(
ctx.cache
.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| {
let exp_info = macro_file.expansion_info(self.db.upcast());
let InMacroFile { file_id, value } = exp_info.expanded(); let InMacroFile { file_id, value } = exp_info.expanded();
self.cache(value, file_id.into()); self.cache(value, file_id.into());
exp_info exp_info
}); })
.map_range_down(span)?
let InMacroFile { file_id, value: mapped_tokens } = exp_info.map_range_down(span)?; .map(SmallVec::<[_; 2]>::from_iter),
let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect(); )
})?;
// we have found a mapping for the token if the vec is non-empty // we have found a mapping for the token if the vec is non-empty
let res = mapped_tokens.is_empty().not().then_some(()); let res = mapped_tokens.is_empty().not().then_some(());
@ -818,10 +884,7 @@ impl<'db> SemanticsImpl<'db> {
token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| { token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
// Don't force populate the dyn cache for items that don't have an attribute anyways // Don't force populate the dyn cache for items that don't have an attribute anyways
item.attrs().next()?; item.attrs().next()?;
Some(( Some((ctx.item_to_macro_call(InFile::new(file_id, &item))?, item))
ctx.item_to_macro_call(InFile::new(file_id, item.clone()))?,
item,
))
}) })
}); });
if let Some((call_id, item)) = containing_attribute_macro_call { if let Some((call_id, item)) = containing_attribute_macro_call {
@ -874,13 +937,20 @@ impl<'db> SemanticsImpl<'db> {
return None; return None;
} }
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?; let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
let mcall: hir_expand::files::InFileWrapper<HirFileId, ast::MacroCall> = let mcall = InFile::new(file_id, macro_call);
InFile::new(file_id, macro_call); let file_id = match m_cache.get(&mcall) {
let file_id = match mcache.get(&mcall) {
Some(&it) => it, Some(&it) => it,
None => { None => {
let it = sa.expand(self.db, mcall.as_ref())?; let it = if let Some(call) =
mcache.insert(mcall, it); <ast::MacroCall as crate::semantics::ToDef>::to_def(
self,
mcall.as_ref(),
) {
call.as_macro_file()
} else {
sa.expand(self.db, mcall.as_ref())?
};
m_cache.insert(mcall, it);
it it
} }
}; };
@ -953,6 +1023,13 @@ impl<'db> SemanticsImpl<'db> {
let helpers = let helpers =
def_map.derive_helpers_in_scope(InFile::new(file_id, id))?; def_map.derive_helpers_in_scope(InFile::new(file_id, id))?;
if !helpers.is_empty() {
let text_range = attr.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this
tokens.retain(|t| !text_range.contains_range(t.text_range()));
}
let mut res = None; let mut res = None;
for (.., derive) in for (.., derive) in
helpers.iter().filter(|(helper, ..)| *helper == attr_name) helpers.iter().filter(|(helper, ..)| *helper == attr_name)
@ -1056,16 +1133,20 @@ impl<'db> SemanticsImpl<'db> {
node: SyntaxNode, node: SyntaxNode,
) -> impl Iterator<Item = SyntaxNode> + Clone + '_ { ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
let node = self.find_file(&node); let node = self.find_file(&node);
let db = self.db.upcast();
iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| { iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| {
match value.parent() { match value.parent() {
Some(parent) => Some(InFile::new(file_id, parent)), Some(parent) => Some(InFile::new(file_id, parent)),
None => { None => {
let call_node = file_id.macro_file()?.call_node(db); let macro_file = file_id.macro_file()?;
// cache the node
// FIXME: uncached parse self.with_ctx(|ctx| {
self.parse_or_expand(call_node.file_id); let expansion_info = ctx
Some(call_node) .cache
.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
expansion_info.arg().map(|node| node?.parent()).transpose()
})
} }
} }
}) })
@ -1090,7 +1171,7 @@ impl<'db> SemanticsImpl<'db> {
.find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text)) .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
})?; })?;
let src = self.wrap_node_infile(lifetime_param); let src = self.wrap_node_infile(lifetime_param);
ToDef::to_def(self, src) ToDef::to_def(self, src.as_ref())
} }
pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> { pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
@ -1112,7 +1193,7 @@ impl<'db> SemanticsImpl<'db> {
}) })
})?; })?;
let src = self.wrap_node_infile(label); let src = self.wrap_node_infile(label);
ToDef::to_def(self, src) ToDef::to_def(self, src.as_ref())
} }
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> { pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
@ -1275,9 +1356,15 @@ impl<'db> SemanticsImpl<'db> {
} }
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> { pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
let sa = self.analyze(macro_call.syntax())?;
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call); let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
sa.resolve_macro_call(self.db, macro_call) self.with_ctx(|ctx| {
ctx.macro_call_to_macro_call(macro_call)
.and_then(|call| macro_call_to_macro_id(ctx, call))
.map(Into::into)
})
.or_else(|| {
self.analyze(macro_call.value.syntax())?.resolve_macro_call(self.db, macro_call)
})
} }
pub fn is_proc_macro_call(&self, macro_call: &ast::MacroCall) -> bool { pub fn is_proc_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
@ -1297,19 +1384,24 @@ impl<'db> SemanticsImpl<'db> {
} }
pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool { pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
let sa = match self.analyze(macro_call.syntax()) { let Some(mac) = self.resolve_macro_call(macro_call) else { return false };
Some(it) => it, if mac.is_asm_or_global_asm(self.db) {
None => return false, return true;
}; }
let Some(sa) = self.analyze(macro_call.syntax()) else { return false };
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call); let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
sa.is_unsafe_macro_call(self.db, macro_call) match macro_call.map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast)).transpose() {
Some(it) => sa.is_unsafe_macro_call_expr(self.db, it.as_ref()),
None => false,
}
} }
pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> { pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
let item_in_file = self.wrap_node_infile(item.clone()); let item_in_file = self.wrap_node_infile(item.clone());
let id = self.with_ctx(|ctx| { let id = self.with_ctx(|ctx| {
let macro_call_id = ctx.item_to_macro_call(item_in_file)?; let macro_call_id = ctx.item_to_macro_call(item_in_file.as_ref())?;
macro_call_to_macro_id(ctx, self.db.upcast(), macro_call_id) macro_call_to_macro_id(ctx, macro_call_id)
})?; })?;
Some(Macro { id }) Some(Macro { id })
} }
@ -1339,18 +1431,17 @@ impl<'db> SemanticsImpl<'db> {
} }
fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T { fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
let mut cache = self.s2d_cache.borrow_mut(); let mut ctx = SourceToDefCtx { db: self.db, cache: &mut self.s2d_cache.borrow_mut() };
let mut ctx = SourceToDefCtx { db: self.db, dynmap_cache: &mut cache };
f(&mut ctx) f(&mut ctx)
} }
pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
let src = self.find_file(src.syntax()).with_value(src).cloned(); let src = self.find_file(src.syntax()).with_value(src);
T::to_def(self, src) T::to_def(self, src)
} }
fn file_to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> { fn file_to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from) self.with_ctx(|ctx| ctx.file_to_def(file).to_owned()).into_iter().map(Module::from)
} }
pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> { pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
@ -1380,6 +1471,7 @@ impl<'db> SemanticsImpl<'db> {
where where
Def::Ast: AstNode, Def::Ast: AstNode,
{ {
// FIXME: source call should go through the parse cache
let res = def.source(self.db)?; let res = def.source(self.db)?;
self.cache(find_root(res.value.syntax()), res.file_id); self.cache(find_root(res.value.syntax()), res.file_id);
Some(res) Some(res)
@ -1409,7 +1501,7 @@ impl<'db> SemanticsImpl<'db> {
offset: Option<TextSize>, offset: Option<TextSize>,
infer_body: bool, infer_body: bool,
) -> Option<SourceAnalyzer> { ) -> Option<SourceAnalyzer> {
let _p = tracing::span!(tracing::Level::INFO, "SemanticsImpl::analyze_impl").entered(); let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered();
let node = self.find_file(node); let node = self.find_file(node);
let container = self.with_ctx(|ctx| ctx.find_container(node))?; let container = self.with_ctx(|ctx| ctx.find_container(node))?;
@ -1438,7 +1530,7 @@ impl<'db> SemanticsImpl<'db> {
assert!(root_node.parent().is_none()); assert!(root_node.parent().is_none());
let mut cache = self.root_to_file_cache.borrow_mut(); let mut cache = self.root_to_file_cache.borrow_mut();
let prev = cache.insert(root_node, file_id); let prev = cache.insert(root_node, file_id);
assert!(prev.is_none() || prev == Some(file_id)) assert!(prev.is_none() || prev == Some(file_id));
} }
pub fn assert_contains_node(&self, node: &SyntaxNode) { pub fn assert_contains_node(&self, node: &SyntaxNode) {
@ -1613,35 +1705,59 @@ impl<'db> SemanticsImpl<'db> {
fn macro_call_to_macro_id( fn macro_call_to_macro_id(
ctx: &mut SourceToDefCtx<'_, '_>, ctx: &mut SourceToDefCtx<'_, '_>,
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId, macro_call_id: MacroCallId,
) -> Option<MacroId> { ) -> Option<MacroId> {
use span::HirFileIdRepr;
let db: &dyn ExpandDatabase = ctx.db.upcast();
let loc = db.lookup_intern_macro_call(macro_call_id); let loc = db.lookup_intern_macro_call(macro_call_id);
match loc.def.kind {
hir_expand::MacroDefKind::Declarative(it) match loc.def.ast_id() {
| hir_expand::MacroDefKind::BuiltIn(_, it) Either::Left(it) => {
| hir_expand::MacroDefKind::BuiltInAttr(_, it) let node = match it.file_id.repr() {
| hir_expand::MacroDefKind::BuiltInDerive(_, it) HirFileIdRepr::FileId(file_id) => {
| hir_expand::MacroDefKind::BuiltInEager(_, it) => { it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
ctx.macro_to_def(InFile::new(it.file_id, it.to_node(db))) }
HirFileIdRepr::MacroFile(macro_file) => {
let expansion_info = ctx
.cache
.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(ctx.db.upcast()));
it.to_ptr(db).to_node(&expansion_info.expanded().value)
}
};
ctx.macro_to_def(InFile::new(it.file_id, &node))
} }
hir_expand::MacroDefKind::ProcMacro(_, _, it) => { Either::Right(it) => {
ctx.proc_macro_to_def(InFile::new(it.file_id, it.to_node(db))) let node = match it.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
}
HirFileIdRepr::MacroFile(macro_file) => {
let expansion_info = ctx
.cache
.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(ctx.db.upcast()));
it.to_ptr(db).to_node(&expansion_info.expanded().value)
}
};
ctx.proc_macro_to_def(InFile::new(it.file_id, &node))
} }
} }
} }
pub trait ToDef: AstNode + Clone { pub trait ToDef: AstNode + Clone {
type Def; type Def;
fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def>;
fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def>;
} }
macro_rules! to_def_impls { macro_rules! to_def_impls {
($(($def:path, $ast:path, $meth:ident)),* ,) => {$( ($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
impl ToDef for $ast { impl ToDef for $ast {
type Def = $def; type Def = $def;
fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def> { fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def> {
sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from) sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
} }
} }
@ -1674,6 +1790,7 @@ to_def_impls![
(crate::Label, ast::Label, label_to_def), (crate::Label, ast::Label, label_to_def),
(crate::Adt, ast::Adt, adt_to_def), (crate::Adt, ast::Adt, adt_to_def),
(crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def), (crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
(MacroCallId, ast::MacroCall, macro_call_to_macro_call),
]; ];
fn find_root(node: &SyntaxNode) -> SyntaxNode { fn find_root(node: &SyntaxNode) -> SyntaxNode {

View file

@ -26,19 +26,19 @@
//! //!
//! The actual algorithm to resolve syntax to def is curious in two aspects: //! The actual algorithm to resolve syntax to def is curious in two aspects:
//! //!
//! * It is recursive //! * It is recursive
//! * It uses the inverse algorithm (what is the syntax for this def?) //! * It uses the inverse algorithm (what is the syntax for this def?)
//! //!
//! Specifically, the algorithm goes like this: //! Specifically, the algorithm goes like this:
//! //!
//! 1. Find the syntactic container for the syntax. For example, field's //! 1. Find the syntactic container for the syntax. For example, field's
//! container is the struct, and structs container is a module. //! container is the struct, and structs container is a module.
//! 2. Recursively get the def corresponding to container. //! 2. Recursively get the def corresponding to container.
//! 3. Ask the container def for all child defs. These child defs contain //! 3. Ask the container def for all child defs. These child defs contain
//! the answer and answer's siblings. //! the answer and answer's siblings.
//! 4. For each child def, ask for it's source. //! 4. For each child def, ask for it's source.
//! 5. The child def whose source is the syntax node we've started with //! 5. The child def whose source is the syntax node we've started with
//! is the answer. //! is the answer.
//! //!
//! It's interesting that both Roslyn and Kotlin contain very similar code //! It's interesting that both Roslyn and Kotlin contain very similar code
//! shape. //! shape.
@ -98,56 +98,68 @@ use hir_def::{
FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId,
StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
}; };
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, HirFileIdExt, MacroCallId}; use hir_expand::{
attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, MacroCallId,
};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use smallvec::SmallVec; use smallvec::SmallVec;
use span::MacroFileId;
use stdx::impl_from; use stdx::impl_from;
use syntax::{ use syntax::{
ast::{self, HasName}, ast::{self, HasName},
AstNode, SyntaxNode, AstNode, AstPtr, SyntaxNode,
}; };
use crate::{db::HirDatabase, InFile}; use crate::{db::HirDatabase, InFile};
pub(super) type SourceToDefCache = FxHashMap<(ChildContainer, HirFileId), DynMap>; #[derive(Default)]
pub(super) struct SourceToDefCache {
pub(super) dynmap_cache: FxHashMap<(ChildContainer, HirFileId), DynMap>,
pub(super) expansion_info_cache: FxHashMap<MacroFileId, ExpansionInfo>,
pub(super) file_to_def_cache: FxHashMap<FileId, SmallVec<[ModuleId; 1]>>,
}
pub(super) struct SourceToDefCtx<'a, 'b> { pub(super) struct SourceToDefCtx<'db, 'cache> {
pub(super) db: &'b dyn HirDatabase, pub(super) db: &'db dyn HirDatabase,
pub(super) dynmap_cache: &'a mut SourceToDefCache, pub(super) cache: &'cache mut SourceToDefCache,
} }
impl SourceToDefCtx<'_, '_> { impl SourceToDefCtx<'_, '_> {
pub(super) fn file_to_def(&self, file: FileId) -> SmallVec<[ModuleId; 1]> { pub(super) fn file_to_def(&mut self, file: FileId) -> &SmallVec<[ModuleId; 1]> {
let _p = tracing::span!(tracing::Level::INFO, "SourceToDefCtx::file_to_def").entered(); let _p = tracing::info_span!("SourceToDefCtx::file_to_def").entered();
let mut mods = SmallVec::new(); self.cache.file_to_def_cache.entry(file).or_insert_with(|| {
for &crate_id in self.db.relevant_crates(file).iter() { let mut mods = SmallVec::new();
// Note: `mod` declarations in block modules cannot be supported here for &crate_id in self.db.relevant_crates(file).iter() {
let crate_def_map = self.db.crate_def_map(crate_id); // Note: `mod` declarations in block modules cannot be supported here
mods.extend( let crate_def_map = self.db.crate_def_map(crate_id);
crate_def_map mods.extend(
.modules_for_file(file) crate_def_map
.map(|local_id| crate_def_map.module_id(local_id)), .modules_for_file(file)
) .map(|local_id| crate_def_map.module_id(local_id)),
} )
if mods.is_empty() { }
// FIXME: detached file if mods.is_empty() {
} // FIXME: detached file
mods }
mods
})
} }
pub(super) fn module_to_def(&mut self, src: InFile<ast::Module>) -> Option<ModuleId> { pub(super) fn module_to_def(&mut self, src: InFile<&ast::Module>) -> Option<ModuleId> {
let _p = tracing::span!(tracing::Level::INFO, "module_to_def").entered(); let _p = tracing::info_span!("module_to_def").entered();
let parent_declaration = src let parent_declaration = self
.syntax() .ancestors_with_macros(src.syntax_ref(), |_, ancestor| {
.ancestors_with_macros(self.db.upcast()) ancestor.map(Either::<ast::Module, ast::BlockExpr>::cast).transpose()
.find_map(|it| it.map(Either::<ast::Module, ast::BlockExpr>::cast).transpose()) })
.map(|it| it.transpose()); .map(|it| it.transpose());
let parent_module = match parent_declaration { let parent_module = match parent_declaration {
Some(Either::Right(parent_block)) => self Some(Either::Right(parent_block)) => self
.block_to_def(parent_block) .block_to_def(parent_block.as_ref())
.map(|block| self.db.block_def_map(block).root_module_id()), .map(|block| self.db.block_def_map(block).root_module_id()),
Some(Either::Left(parent_declaration)) => self.module_to_def(parent_declaration), Some(Either::Left(parent_declaration)) => {
self.module_to_def(parent_declaration.as_ref())
}
None => { None => {
let file_id = src.file_id.original_file(self.db.upcast()); let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id).first().copied() self.file_to_def(file_id).first().copied()
@ -160,73 +172,79 @@ impl SourceToDefCtx<'_, '_> {
Some(def_map.module_id(child_id)) Some(def_map.module_id(child_id))
} }
pub(super) fn source_file_to_def(&self, src: InFile<ast::SourceFile>) -> Option<ModuleId> { pub(super) fn source_file_to_def(&mut self, src: InFile<&ast::SourceFile>) -> Option<ModuleId> {
let _p = tracing::span!(tracing::Level::INFO, "source_file_to_def").entered(); let _p = tracing::info_span!("source_file_to_def").entered();
let file_id = src.file_id.original_file(self.db.upcast()); let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id).first().copied() self.file_to_def(file_id).first().copied()
} }
pub(super) fn trait_to_def(&mut self, src: InFile<ast::Trait>) -> Option<TraitId> { pub(super) fn trait_to_def(&mut self, src: InFile<&ast::Trait>) -> Option<TraitId> {
self.to_def(src, keys::TRAIT) self.to_def(src, keys::TRAIT)
} }
pub(super) fn trait_alias_to_def( pub(super) fn trait_alias_to_def(
&mut self, &mut self,
src: InFile<ast::TraitAlias>, src: InFile<&ast::TraitAlias>,
) -> Option<TraitAliasId> { ) -> Option<TraitAliasId> {
self.to_def(src, keys::TRAIT_ALIAS) self.to_def(src, keys::TRAIT_ALIAS)
} }
pub(super) fn impl_to_def(&mut self, src: InFile<ast::Impl>) -> Option<ImplId> { pub(super) fn impl_to_def(&mut self, src: InFile<&ast::Impl>) -> Option<ImplId> {
self.to_def(src, keys::IMPL) self.to_def(src, keys::IMPL)
} }
pub(super) fn fn_to_def(&mut self, src: InFile<ast::Fn>) -> Option<FunctionId> { pub(super) fn fn_to_def(&mut self, src: InFile<&ast::Fn>) -> Option<FunctionId> {
self.to_def(src, keys::FUNCTION) self.to_def(src, keys::FUNCTION)
} }
pub(super) fn struct_to_def(&mut self, src: InFile<ast::Struct>) -> Option<StructId> { pub(super) fn struct_to_def(&mut self, src: InFile<&ast::Struct>) -> Option<StructId> {
self.to_def(src, keys::STRUCT) self.to_def(src, keys::STRUCT)
} }
pub(super) fn enum_to_def(&mut self, src: InFile<ast::Enum>) -> Option<EnumId> { pub(super) fn enum_to_def(&mut self, src: InFile<&ast::Enum>) -> Option<EnumId> {
self.to_def(src, keys::ENUM) self.to_def(src, keys::ENUM)
} }
pub(super) fn union_to_def(&mut self, src: InFile<ast::Union>) -> Option<UnionId> { pub(super) fn union_to_def(&mut self, src: InFile<&ast::Union>) -> Option<UnionId> {
self.to_def(src, keys::UNION) self.to_def(src, keys::UNION)
} }
pub(super) fn static_to_def(&mut self, src: InFile<ast::Static>) -> Option<StaticId> { pub(super) fn static_to_def(&mut self, src: InFile<&ast::Static>) -> Option<StaticId> {
self.to_def(src, keys::STATIC) self.to_def(src, keys::STATIC)
} }
pub(super) fn const_to_def(&mut self, src: InFile<ast::Const>) -> Option<ConstId> { pub(super) fn const_to_def(&mut self, src: InFile<&ast::Const>) -> Option<ConstId> {
self.to_def(src, keys::CONST) self.to_def(src, keys::CONST)
} }
pub(super) fn type_alias_to_def(&mut self, src: InFile<ast::TypeAlias>) -> Option<TypeAliasId> { pub(super) fn type_alias_to_def(
&mut self,
src: InFile<&ast::TypeAlias>,
) -> Option<TypeAliasId> {
self.to_def(src, keys::TYPE_ALIAS) self.to_def(src, keys::TYPE_ALIAS)
} }
pub(super) fn record_field_to_def(&mut self, src: InFile<ast::RecordField>) -> Option<FieldId> { pub(super) fn record_field_to_def(
&mut self,
src: InFile<&ast::RecordField>,
) -> Option<FieldId> {
self.to_def(src, keys::RECORD_FIELD) self.to_def(src, keys::RECORD_FIELD)
} }
pub(super) fn tuple_field_to_def(&mut self, src: InFile<ast::TupleField>) -> Option<FieldId> { pub(super) fn tuple_field_to_def(&mut self, src: InFile<&ast::TupleField>) -> Option<FieldId> {
self.to_def(src, keys::TUPLE_FIELD) self.to_def(src, keys::TUPLE_FIELD)
} }
pub(super) fn block_to_def(&mut self, src: InFile<ast::BlockExpr>) -> Option<BlockId> { pub(super) fn block_to_def(&mut self, src: InFile<&ast::BlockExpr>) -> Option<BlockId> {
self.to_def(src, keys::BLOCK) self.to_def(src, keys::BLOCK)
} }
pub(super) fn enum_variant_to_def( pub(super) fn enum_variant_to_def(
&mut self, &mut self,
src: InFile<ast::Variant>, src: InFile<&ast::Variant>,
) -> Option<EnumVariantId> { ) -> Option<EnumVariantId> {
self.to_def(src, keys::ENUM_VARIANT) self.to_def(src, keys::ENUM_VARIANT)
} }
pub(super) fn extern_crate_to_def( pub(super) fn extern_crate_to_def(
&mut self, &mut self,
src: InFile<ast::ExternCrate>, src: InFile<&ast::ExternCrate>,
) -> Option<ExternCrateId> { ) -> Option<ExternCrateId> {
self.to_def(src, keys::EXTERN_CRATE) self.to_def(src, keys::EXTERN_CRATE)
} }
#[allow(dead_code)] #[allow(dead_code)]
pub(super) fn use_to_def(&mut self, src: InFile<ast::Use>) -> Option<UseId> { pub(super) fn use_to_def(&mut self, src: InFile<&ast::Use>) -> Option<UseId> {
self.to_def(src, keys::USE) self.to_def(src, keys::USE)
} }
pub(super) fn adt_to_def( pub(super) fn adt_to_def(
&mut self, &mut self,
InFile { file_id, value }: InFile<ast::Adt>, InFile { file_id, value }: InFile<&ast::Adt>,
) -> Option<AdtId> { ) -> Option<AdtId> {
match value { match value {
ast::Adt::Enum(it) => self.enum_to_def(InFile::new(file_id, it)).map(AdtId::EnumId), ast::Adt::Enum(it) => self.enum_to_def(InFile::new(file_id, it)).map(AdtId::EnumId),
@ -238,11 +256,11 @@ impl SourceToDefCtx<'_, '_> {
} }
pub(super) fn bind_pat_to_def( pub(super) fn bind_pat_to_def(
&mut self, &mut self,
src: InFile<ast::IdentPat>, src: InFile<&ast::IdentPat>,
) -> Option<(DefWithBodyId, BindingId)> { ) -> Option<(DefWithBodyId, BindingId)> {
let container = self.find_pat_or_label_container(src.syntax())?; let container = self.find_pat_or_label_container(src.syntax_ref())?;
let (body, source_map) = self.db.body_with_source_map(container); let (body, source_map) = self.db.body_with_source_map(container);
let src = src.map(ast::Pat::from); let src = src.cloned().map(ast::Pat::from);
let pat_id = source_map.node_pat(src.as_ref())?; let pat_id = source_map.node_pat(src.as_ref())?;
// the pattern could resolve to a constant, verify that that is not the case // the pattern could resolve to a constant, verify that that is not the case
if let crate::Pat::Bind { id, .. } = body[pat_id] { if let crate::Pat::Bind { id, .. } = body[pat_id] {
@ -253,25 +271,33 @@ impl SourceToDefCtx<'_, '_> {
} }
pub(super) fn self_param_to_def( pub(super) fn self_param_to_def(
&mut self, &mut self,
src: InFile<ast::SelfParam>, src: InFile<&ast::SelfParam>,
) -> Option<(DefWithBodyId, BindingId)> { ) -> Option<(DefWithBodyId, BindingId)> {
let container = self.find_pat_or_label_container(src.syntax())?; let container = self.find_pat_or_label_container(src.syntax_ref())?;
let body = self.db.body(container); let body = self.db.body(container);
Some((container, body.self_param?)) Some((container, body.self_param?))
} }
pub(super) fn label_to_def( pub(super) fn label_to_def(
&mut self, &mut self,
src: InFile<ast::Label>, src: InFile<&ast::Label>,
) -> Option<(DefWithBodyId, LabelId)> { ) -> Option<(DefWithBodyId, LabelId)> {
let container = self.find_pat_or_label_container(src.syntax())?; let container = self.find_pat_or_label_container(src.syntax_ref())?;
let (_body, source_map) = self.db.body_with_source_map(container); let (_body, source_map) = self.db.body_with_source_map(container);
let label_id = source_map.node_label(src.as_ref())?; let label_id = source_map.node_label(src)?;
Some((container, label_id)) Some((container, label_id))
} }
pub(super) fn item_to_macro_call(&mut self, src: InFile<ast::Item>) -> Option<MacroCallId> { pub(super) fn item_to_macro_call(&mut self, src: InFile<&ast::Item>) -> Option<MacroCallId> {
let map = self.dyn_map(src.as_ref())?; let map = self.dyn_map(src)?;
map[keys::ATTR_MACRO_CALL].get(&src.value).copied() map[keys::ATTR_MACRO_CALL].get(&AstPtr::new(src.value)).copied()
}
pub(super) fn macro_call_to_macro_call(
&mut self,
src: InFile<&ast::MacroCall>,
) -> Option<MacroCallId> {
let map = self.dyn_map(src)?;
map[keys::MACRO_CALL].get(&AstPtr::new(src.value)).copied()
} }
/// (AttrId, derive attribute call id, derive call ids) /// (AttrId, derive attribute call id, derive call ids)
@ -282,7 +308,7 @@ impl SourceToDefCtx<'_, '_> {
) -> Option<(AttrId, MacroCallId, &[Option<MacroCallId>])> { ) -> Option<(AttrId, MacroCallId, &[Option<MacroCallId>])> {
let map = self.dyn_map(item)?; let map = self.dyn_map(item)?;
map[keys::DERIVE_MACRO_CALL] map[keys::DERIVE_MACRO_CALL]
.get(&src.value) .get(&AstPtr::new(&src.value))
.map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids)) .map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids))
} }
@ -292,10 +318,10 @@ impl SourceToDefCtx<'_, '_> {
fn to_def<Ast: AstNode + 'static, ID: Copy + 'static>( fn to_def<Ast: AstNode + 'static, ID: Copy + 'static>(
&mut self, &mut self,
src: InFile<Ast>, src: InFile<&Ast>,
key: Key<Ast, ID>, key: Key<Ast, ID>,
) -> Option<ID> { ) -> Option<ID> {
self.dyn_map(src.as_ref())?[key].get(&src.value).copied() self.dyn_map(src)?[key].get(&AstPtr::new(src.value)).copied()
} }
fn dyn_map<Ast: AstNode + 'static>(&mut self, src: InFile<&Ast>) -> Option<&DynMap> { fn dyn_map<Ast: AstNode + 'static>(&mut self, src: InFile<&Ast>) -> Option<&DynMap> {
@ -305,38 +331,48 @@ impl SourceToDefCtx<'_, '_> {
fn cache_for(&mut self, container: ChildContainer, file_id: HirFileId) -> &DynMap { fn cache_for(&mut self, container: ChildContainer, file_id: HirFileId) -> &DynMap {
let db = self.db; let db = self.db;
self.dynmap_cache self.cache
.dynmap_cache
.entry((container, file_id)) .entry((container, file_id))
.or_insert_with(|| container.child_by_source(db, file_id)) .or_insert_with(|| container.child_by_source(db, file_id))
} }
pub(super) fn type_param_to_def(&mut self, src: InFile<ast::TypeParam>) -> Option<TypeParamId> { pub(super) fn type_param_to_def(
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into(); &mut self,
src: InFile<&ast::TypeParam>,
) -> Option<TypeParamId> {
let container: ChildContainer = self.find_generic_param_container(src.syntax_ref())?.into();
let dyn_map = self.cache_for(container, src.file_id); let dyn_map = self.cache_for(container, src.file_id);
dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(TypeParamId::from_unchecked) dyn_map[keys::TYPE_PARAM]
.get(&AstPtr::new(src.value))
.copied()
.map(TypeParamId::from_unchecked)
} }
pub(super) fn lifetime_param_to_def( pub(super) fn lifetime_param_to_def(
&mut self, &mut self,
src: InFile<ast::LifetimeParam>, src: InFile<&ast::LifetimeParam>,
) -> Option<LifetimeParamId> { ) -> Option<LifetimeParamId> {
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into(); let container: ChildContainer = self.find_generic_param_container(src.syntax_ref())?.into();
let dyn_map = self.cache_for(container, src.file_id); let dyn_map = self.cache_for(container, src.file_id);
dyn_map[keys::LIFETIME_PARAM].get(&src.value).copied() dyn_map[keys::LIFETIME_PARAM].get(&AstPtr::new(src.value)).copied()
} }
pub(super) fn const_param_to_def( pub(super) fn const_param_to_def(
&mut self, &mut self,
src: InFile<ast::ConstParam>, src: InFile<&ast::ConstParam>,
) -> Option<ConstParamId> { ) -> Option<ConstParamId> {
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into(); let container: ChildContainer = self.find_generic_param_container(src.syntax_ref())?.into();
let dyn_map = self.cache_for(container, src.file_id); let dyn_map = self.cache_for(container, src.file_id);
dyn_map[keys::CONST_PARAM].get(&src.value).copied().map(ConstParamId::from_unchecked) dyn_map[keys::CONST_PARAM]
.get(&AstPtr::new(src.value))
.copied()
.map(ConstParamId::from_unchecked)
} }
pub(super) fn generic_param_to_def( pub(super) fn generic_param_to_def(
&mut self, &mut self,
InFile { file_id, value }: InFile<ast::GenericParam>, InFile { file_id, value }: InFile<&ast::GenericParam>,
) -> Option<GenericParamId> { ) -> Option<GenericParamId> {
match value { match value {
ast::GenericParam::ConstParam(it) => { ast::GenericParam::ConstParam(it) => {
@ -351,34 +387,113 @@ impl SourceToDefCtx<'_, '_> {
} }
} }
pub(super) fn macro_to_def(&mut self, src: InFile<ast::Macro>) -> Option<MacroId> { pub(super) fn macro_to_def(&mut self, src: InFile<&ast::Macro>) -> Option<MacroId> {
self.dyn_map(src.as_ref()).and_then(|it| match &src.value { self.dyn_map(src).and_then(|it| match src.value {
ast::Macro::MacroRules(value) => { ast::Macro::MacroRules(value) => {
it[keys::MACRO_RULES].get(value).copied().map(MacroId::from) it[keys::MACRO_RULES].get(&AstPtr::new(value)).copied().map(MacroId::from)
}
ast::Macro::MacroDef(value) => {
it[keys::MACRO2].get(&AstPtr::new(value)).copied().map(MacroId::from)
} }
ast::Macro::MacroDef(value) => it[keys::MACRO2].get(value).copied().map(MacroId::from),
}) })
} }
pub(super) fn proc_macro_to_def(&mut self, src: InFile<ast::Fn>) -> Option<MacroId> { pub(super) fn proc_macro_to_def(&mut self, src: InFile<&ast::Fn>) -> Option<MacroId> {
self.dyn_map(src.as_ref()) self.dyn_map(src).and_then(|it| {
.and_then(|it| it[keys::PROC_MACRO].get(&src.value).copied().map(MacroId::from)) it[keys::PROC_MACRO].get(&AstPtr::new(src.value)).copied().map(MacroId::from)
})
} }
pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> { pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> {
for container in src.ancestors_with_macros(self.db.upcast()) { let _p = tracing::info_span!("find_container").entered();
if let Some(res) = self.container_to_def(container) { let def =
return Some(res); self.ancestors_with_macros(src, |this, container| this.container_to_def(container));
} if let Some(def) = def {
return Some(def);
} }
let def = self.file_to_def(src.file_id.original_file(self.db.upcast())).first().copied()?; let def = self.file_to_def(src.file_id.original_file(self.db.upcast())).first().copied()?;
Some(def.into()) Some(def.into())
} }
/// Skips the attributed item that caused the macro invocation we are climbing up
fn ancestors_with_macros<T>(
&mut self,
node: InFile<&SyntaxNode>,
mut cb: impl FnMut(&mut Self, InFile<SyntaxNode>) -> Option<T>,
) -> Option<T> {
use hir_expand::MacroFileIdExt;
let parent = |this: &mut Self, node: InFile<&SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => {
let macro_file = node.file_id.macro_file()?;
let expansion_info = this
.cache
.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(this.db.upcast()));
expansion_info.arg().map(|node| node?.parent()).transpose()
}
};
let mut node = node.cloned();
while let Some(parent) = parent(self, node.as_ref()) {
if let Some(res) = cb(self, parent.clone()) {
return Some(res);
}
node = parent;
}
None
}
fn find_generic_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option<GenericDefId> {
self.ancestors_with_macros(src, |this, InFile { file_id, value }| {
let item = ast::Item::cast(value)?;
match &item {
ast::Item::Fn(it) => this.fn_to_def(InFile::new(file_id, it)).map(Into::into),
ast::Item::Struct(it) => {
this.struct_to_def(InFile::new(file_id, it)).map(Into::into)
}
ast::Item::Enum(it) => this.enum_to_def(InFile::new(file_id, it)).map(Into::into),
ast::Item::Trait(it) => this.trait_to_def(InFile::new(file_id, it)).map(Into::into),
ast::Item::TraitAlias(it) => {
this.trait_alias_to_def(InFile::new(file_id, it)).map(Into::into)
}
ast::Item::TypeAlias(it) => {
this.type_alias_to_def(InFile::new(file_id, it)).map(Into::into)
}
ast::Item::Impl(it) => this.impl_to_def(InFile::new(file_id, it)).map(Into::into),
_ => None,
}
})
}
fn find_pat_or_label_container(&mut self, src: InFile<&SyntaxNode>) -> Option<DefWithBodyId> {
self.ancestors_with_macros(src, |this, InFile { file_id, value }| {
let item = match ast::Item::cast(value.clone()) {
Some(it) => it,
None => {
let variant = ast::Variant::cast(value.clone())?;
return this
.enum_variant_to_def(InFile::new(file_id, &variant))
.map(Into::into);
}
};
match &item {
ast::Item::Fn(it) => this.fn_to_def(InFile::new(file_id, it)).map(Into::into),
ast::Item::Const(it) => this.const_to_def(InFile::new(file_id, it)).map(Into::into),
ast::Item::Static(it) => {
this.static_to_def(InFile::new(file_id, it)).map(Into::into)
}
_ => None,
}
})
}
fn container_to_def(&mut self, container: InFile<SyntaxNode>) -> Option<ChildContainer> { fn container_to_def(&mut self, container: InFile<SyntaxNode>) -> Option<ChildContainer> {
let cont = if let Some(item) = ast::Item::cast(container.value.clone()) { let cont = if let Some(item) = ast::Item::cast(container.value.clone()) {
match item { match &item {
ast::Item::Module(it) => self.module_to_def(container.with_value(it))?.into(), ast::Item::Module(it) => self.module_to_def(container.with_value(it))?.into(),
ast::Item::Trait(it) => self.trait_to_def(container.with_value(it))?.into(), ast::Item::Trait(it) => self.trait_to_def(container.with_value(it))?.into(),
ast::Item::TraitAlias(it) => { ast::Item::TraitAlias(it) => {
@ -413,63 +528,11 @@ impl SourceToDefCtx<'_, '_> {
} }
} else { } else {
let it = ast::Variant::cast(container.value)?; let it = ast::Variant::cast(container.value)?;
let def = self.enum_variant_to_def(InFile::new(container.file_id, it))?; let def = self.enum_variant_to_def(InFile::new(container.file_id, &it))?;
DefWithBodyId::from(def).into() DefWithBodyId::from(def).into()
}; };
Some(cont) Some(cont)
} }
fn find_generic_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option<GenericDefId> {
let ancestors = src.ancestors_with_macros(self.db.upcast());
for InFile { file_id, value } in ancestors {
let item = match ast::Item::cast(value) {
Some(it) => it,
None => continue,
};
let res: GenericDefId = match item {
ast::Item::Fn(it) => self.fn_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Struct(it) => self.struct_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Union(it) => self.union_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Enum(it) => self.enum_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Trait(it) => self.trait_to_def(InFile::new(file_id, it))?.into(),
ast::Item::TraitAlias(it) => {
self.trait_alias_to_def(InFile::new(file_id, it))?.into()
}
ast::Item::TypeAlias(it) => {
self.type_alias_to_def(InFile::new(file_id, it))?.into()
}
ast::Item::Impl(it) => self.impl_to_def(InFile::new(file_id, it))?.into(),
_ => continue,
};
return Some(res);
}
None
}
fn find_pat_or_label_container(&mut self, src: InFile<&SyntaxNode>) -> Option<DefWithBodyId> {
let ancestors = src.ancestors_with_macros(self.db.upcast());
for InFile { file_id, value } in ancestors {
let item = match ast::Item::cast(value.clone()) {
Some(it) => it,
None => {
if let Some(variant) = ast::Variant::cast(value.clone()) {
return self
.enum_variant_to_def(InFile::new(file_id, variant))
.map(Into::into);
}
continue;
}
};
let res: DefWithBodyId = match item {
ast::Item::Const(it) => self.const_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Static(it) => self.static_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Fn(it) => self.fn_to_def(InFile::new(file_id, it))?.into(),
_ => continue,
};
return Some(res);
}
None
}
} }
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
@ -501,6 +564,7 @@ impl_from! {
impl ChildContainer { impl ChildContainer {
fn child_by_source(self, db: &dyn HirDatabase, file_id: HirFileId) -> DynMap { fn child_by_source(self, db: &dyn HirDatabase, file_id: HirFileId) -> DynMap {
let _p = tracing::info_span!("ChildContainer::child_by_source").entered();
let db = db.upcast(); let db = db.upcast();
match self { match self {
ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id), ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id),

View file

@ -24,11 +24,12 @@ use hir_def::{
LocalFieldId, Lookup, ModuleDefId, TraitId, VariantId, LocalFieldId, Lookup, ModuleDefId, TraitId, VariantId,
}; };
use hir_expand::{ use hir_expand::{
builtin_fn_macro::BuiltinFnLikeExpander,
mod_path::path, mod_path::path,
name,
name::{AsName, Name},
HirFileId, InFile, InMacroFile, MacroFileId, MacroFileIdExt, HirFileId, InFile, InMacroFile, MacroFileId, MacroFileIdExt,
{
name,
name::{AsName, Name},
},
}; };
use hir_ty::{ use hir_ty::{
diagnostics::{ diagnostics::{
@ -822,8 +823,10 @@ impl SourceAnalyzer {
macro_call: InFile<&ast::MacroCall>, macro_call: InFile<&ast::MacroCall>,
) -> Option<MacroFileId> { ) -> Option<MacroFileId> {
let krate = self.resolver.krate(); let krate = self.resolver.krate();
// FIXME: This causes us to parse, generally this is the wrong approach for resolving a
// macro call to a macro call id!
let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| { let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
self.resolver.resolve_path_as_macro_def(db.upcast(), &path, Some(MacroSubNs::Bang)) self.resolver.resolve_path_as_macro_def(db.upcast(), path, Some(MacroSubNs::Bang))
})?; })?;
// why the 64? // why the 64?
Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64) Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
@ -839,37 +842,13 @@ impl SourceAnalyzer {
infer.variant_resolution_for_expr(expr_id) infer.variant_resolution_for_expr(expr_id)
} }
pub(crate) fn is_unsafe_macro_call( pub(crate) fn is_unsafe_macro_call_expr(
&self, &self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>, macro_expr: InFile<&ast::MacroExpr>,
) -> bool { ) -> bool {
// check for asm/global_asm
if let Some(mac) = self.resolve_macro_call(db, macro_call) {
let ex = match mac.id {
hir_def::MacroId::Macro2Id(it) => it.lookup(db.upcast()).expander,
hir_def::MacroId::MacroRulesId(it) => it.lookup(db.upcast()).expander,
_ => hir_def::MacroExpander::Declarative,
};
match ex {
hir_def::MacroExpander::BuiltIn(e)
if e == BuiltinFnLikeExpander::Asm || e == BuiltinFnLikeExpander::GlobalAsm =>
{
return true
}
_ => (),
}
}
let macro_expr = match macro_call
.map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast))
.transpose()
{
Some(it) => it,
None => return false,
};
if let (Some((def, body, sm)), Some(infer)) = (&self.def, &self.infer) { if let (Some((def, body, sm)), Some(infer)) = (&self.def, &self.infer) {
if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr.as_ref()) { if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr) {
let mut is_unsafe = false; let mut is_unsafe = false;
unsafe_expressions( unsafe_expressions(
db, db,

View file

@ -325,6 +325,7 @@ pub fn term_search<DB: HirDatabase>(ctx: &TermSearchCtx<'_, DB>) -> Vec<Expr> {
let mut solutions: Vec<Expr> = tactics::trivial(ctx, &defs, &mut lookup).collect(); let mut solutions: Vec<Expr> = tactics::trivial(ctx, &defs, &mut lookup).collect();
// Use well known types tactic before iterations as it does not depend on other tactics // Use well known types tactic before iterations as it does not depend on other tactics
solutions.extend(tactics::famous_types(ctx, &defs, &mut lookup)); solutions.extend(tactics::famous_types(ctx, &defs, &mut lookup));
solutions.extend(tactics::assoc_const(ctx, &defs, &mut lookup));
while should_continue() { while should_continue() {
lookup.new_round(); lookup.new_round();

View file

@ -9,8 +9,8 @@ use hir_ty::{
use itertools::Itertools; use itertools::Itertools;
use crate::{ use crate::{
Adt, AsAssocItem, Const, ConstParam, Field, Function, GenericDef, Local, ModuleDef, Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Field, Function, GenericDef, Local,
SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant, ModuleDef, SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant,
}; };
/// Helper function to get path to `ModuleDef` /// Helper function to get path to `ModuleDef`
@ -138,7 +138,17 @@ impl Expr {
let db = sema_scope.db; let db = sema_scope.db;
let mod_item_path_str = |s, def| mod_item_path_str(s, def, cfg); let mod_item_path_str = |s, def| mod_item_path_str(s, def, cfg);
match self { match self {
Expr::Const(it) => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)), Expr::Const(it) => match it.as_assoc_item(db).map(|it| it.container(db)) {
Some(container) => {
let container_name = container_name(container, sema_scope, cfg)?;
let const_name = it
.name(db)
.map(|c| c.display(db.upcast()).to_string())
.unwrap_or(String::new());
Ok(format!("{container_name}::{const_name}"))
}
None => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
},
Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)), Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)),
Expr::Local(it) => Ok(it.name(db).display(db.upcast()).to_string()), Expr::Local(it) => Ok(it.name(db).display(db.upcast()).to_string()),
Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast()).to_string()), Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast()).to_string()),
@ -153,22 +163,7 @@ impl Expr {
match func.as_assoc_item(db).map(|it| it.container(db)) { match func.as_assoc_item(db).map(|it| it.container(db)) {
Some(container) => { Some(container) => {
let container_name = match container { let container_name = container_name(container, sema_scope, cfg)?;
crate::AssocItemContainer::Trait(trait_) => {
mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_))?
}
crate::AssocItemContainer::Impl(imp) => {
let self_ty = imp.self_ty(db);
// Should it be guaranteed that `mod_item_path` always exists?
match self_ty
.as_adt()
.and_then(|adt| mod_item_path(sema_scope, &adt.into(), cfg))
{
Some(path) => path.display(sema_scope.db.upcast()).to_string(),
None => self_ty.display(db).to_string(),
}
}
};
let fn_name = func.name(db).display(db.upcast()).to_string(); let fn_name = func.name(db).display(db.upcast()).to_string();
Ok(format!("{container_name}::{fn_name}({args})")) Ok(format!("{container_name}::{fn_name}({args})"))
} }
@ -414,3 +409,25 @@ impl Expr {
matches!(self, Expr::Many(_)) matches!(self, Expr::Many(_))
} }
} }
/// Helper function to find name of container
fn container_name(
container: AssocItemContainer,
sema_scope: &SemanticsScope<'_>,
cfg: ImportPathConfig,
) -> Result<String, DisplaySourceCodeError> {
let container_name = match container {
crate::AssocItemContainer::Trait(trait_) => {
mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_), cfg)?
}
crate::AssocItemContainer::Impl(imp) => {
let self_ty = imp.self_ty(sema_scope.db);
// Should it be guaranteed that `mod_item_path` always exists?
match self_ty.as_adt().and_then(|adt| mod_item_path(sema_scope, &adt.into(), cfg)) {
Some(path) => path.display(sema_scope.db.upcast()).to_string(),
None => self_ty.display(sema_scope.db).to_string(),
}
}
};
Ok(container_name)
}

View file

@ -5,6 +5,7 @@
//! * `defs` - Set of items in scope at term search target location //! * `defs` - Set of items in scope at term search target location
//! * `lookup` - Lookup table for types //! * `lookup` - Lookup table for types
//! * `should_continue` - Function that indicates when to stop iterating //! * `should_continue` - Function that indicates when to stop iterating
//!
//! And they return iterator that yields type trees that unify with the `goal` type. //! And they return iterator that yields type trees that unify with the `goal` type.
use std::iter; use std::iter;
@ -79,7 +80,10 @@ pub(super) fn trivial<'a, DB: HirDatabase>(
lookup.insert(ty.clone(), std::iter::once(expr.clone())); lookup.insert(ty.clone(), std::iter::once(expr.clone()));
// Don't suggest local references as they are not valid for return // Don't suggest local references as they are not valid for return
if matches!(expr, Expr::Local(_)) && ty.contains_reference(db) { if matches!(expr, Expr::Local(_))
&& ty.contains_reference(db)
&& ctx.config.enable_borrowcheck
{
return None; return None;
} }
@ -87,6 +91,52 @@ pub(super) fn trivial<'a, DB: HirDatabase>(
}) })
} }
/// # Associated constant tactic
///
/// Attempts to fulfill the goal by trying constants defined as associated items.
/// Only considers them on types that are in scope.
///
/// # Arguments
/// * `ctx` - Context for the term search
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
///
/// Returns iterator that yields elements that unify with `goal`.
///
/// _Note that there is no use of calling this tactic in every iteration as the output does not
/// depend on the current state of `lookup`_
pub(super) fn assoc_const<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>,
defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable,
) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db;
let module = ctx.scope.module();
defs.iter()
.filter_map(|def| match def {
ScopeDef::ModuleDef(ModuleDef::Adt(it)) => Some(it),
_ => None,
})
.flat_map(|it| Impl::all_for_type(db, it.ty(db)))
.filter(|it| !it.is_unsafe(db))
.flat_map(|it| it.items(db))
.filter(move |it| it.is_visible_from(db, module))
.filter_map(AssocItem::as_const)
.filter_map(|it| {
let expr = Expr::Const(it);
let ty = it.ty(db);
if ty.contains_unknown() {
return None;
}
lookup.insert(ty.clone(), std::iter::once(expr.clone()));
ty.could_unify_with_deeply(db, &ctx.goal).then_some(expr)
})
}
/// # Data constructor tactic /// # Data constructor tactic
/// ///
/// Attempts different data constructors for enums and structs in scope /// Attempts different data constructors for enums and structs in scope

View file

@ -105,7 +105,7 @@ fn add_missing_impl_members_inner(
assist_id: &'static str, assist_id: &'static str,
label: &'static str, label: &'static str,
) -> Option<()> { ) -> Option<()> {
let _p = tracing::span!(tracing::Level::INFO, "add_missing_impl_members_inner").entered(); let _p = tracing::info_span!("add_missing_impl_members_inner").entered();
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?; let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
let impl_ = ctx.sema.to_def(&impl_def)?; let impl_ = ctx.sema.to_def(&impl_def)?;

View file

@ -140,7 +140,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
acc.add_group( acc.add_group(
&group_label, &group_label,
assist_id, assist_id,
format!("Import `{}`", import_name), format!("Import `{import_name}`"),
range, range,
|builder| { |builder| {
let scope = match scope.clone() { let scope = match scope.clone() {
@ -165,7 +165,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
acc.add_group( acc.add_group(
&group_label, &group_label,
assist_id, assist_id,
format!("Import `{} as _`", import_name), format!("Import `{import_name} as _`"),
range, range,
|builder| { |builder| {
let scope = match scope.clone() { let scope = match scope.clone() {
@ -272,8 +272,10 @@ fn module_distance_heuristic(db: &dyn HirDatabase, current: &Module, item: &Modu
// cost of importing from another crate // cost of importing from another crate
let crate_boundary_cost = if current.krate() == item.krate() { let crate_boundary_cost = if current.krate() == item.krate() {
0 0
} else if item.krate().is_builtin(db) { } else if item.krate().origin(db).is_local() {
2 2
} else if item.krate().is_builtin(db) {
3
} else { } else {
4 4
}; };
@ -365,6 +367,49 @@ pub struct HashMap;
) )
} }
#[test]
fn prefer_workspace() {
let before = r"
//- /main.rs crate:main deps:foo,bar
HashMap$0::new();
//- /lib.rs crate:foo
pub mod module {
pub struct HashMap;
}
//- /lib.rs crate:bar library
pub struct HashMap;
";
check_auto_import_order(before, &["Import `foo::module::HashMap`", "Import `bar::HashMap`"])
}
#[test]
fn prefer_non_local_over_long_path() {
let before = r"
//- /main.rs crate:main deps:foo,bar
HashMap$0::new();
//- /lib.rs crate:foo
pub mod deeply {
pub mod nested {
pub mod module {
pub struct HashMap;
}
}
}
//- /lib.rs crate:bar library
pub struct HashMap;
";
check_auto_import_order(
before,
&["Import `bar::HashMap`", "Import `foo::deeply::nested::module::HashMap`"],
)
}
#[test] #[test]
fn not_applicable_if_scope_inside_macro() { fn not_applicable_if_scope_inside_macro() {
check_assist_not_applicable( check_assist_not_applicable(

View file

@ -228,7 +228,7 @@ fn replace_usages(
edit.replace( edit.replace(
prefix_expr.syntax().text_range(), prefix_expr.syntax().text_range(),
format!("{} == Bool::False", inner_expr), format!("{inner_expr} == Bool::False"),
); );
} else if let Some((record_field, initializer)) = name } else if let Some((record_field, initializer)) = name
.as_name_ref() .as_name_ref()
@ -275,7 +275,7 @@ fn replace_usages(
} else if let Some(receiver) = find_method_call_expr_usage(&name) { } else if let Some(receiver) = find_method_call_expr_usage(&name) {
edit.replace( edit.replace(
receiver.syntax().text_range(), receiver.syntax().text_range(),
format!("({} == Bool::True)", receiver), format!("({receiver} == Bool::True)"),
); );
} else if name.syntax().ancestors().find_map(ast::UseTree::cast).is_none() { } else if name.syntax().ancestors().find_map(ast::UseTree::cast).is_none() {
// for any other usage in an expression, replace it with a check that it is the true variant // for any other usage in an expression, replace it with a check that it is the true variant

View file

@ -242,7 +242,7 @@ fn generate_field_names(ctx: &AssistContext<'_>, data: &StructEditData) -> Vec<(
.iter() .iter()
.enumerate() .enumerate()
.map(|(index, _)| { .map(|(index, _)| {
let new_name = new_field_name((format!("_{}", index)).into(), &data.names_in_scope); let new_name = new_field_name((format!("_{index}")).into(), &data.names_in_scope);
(index.to_string().into(), new_name) (index.to_string().into(), new_name)
}) })
.collect(), .collect(),

View file

@ -1,4 +1,4 @@
use crate::{AssistContext, Assists}; use crate::{utils, AssistContext, Assists};
use hir::DescendPreference; use hir::DescendPreference;
use ide_db::{ use ide_db::{
assists::{AssistId, AssistKind}, assists::{AssistId, AssistKind},
@ -8,8 +8,12 @@ use ide_db::{
}, },
}; };
use itertools::Itertools; use itertools::Itertools;
use stdx::format_to; use syntax::{
use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange}; ast::{self, make},
ted, AstNode, AstToken, NodeOrToken,
SyntaxKind::WHITESPACE,
T,
};
// Assist: extract_expressions_from_format_string // Assist: extract_expressions_from_format_string
// //
@ -34,6 +38,7 @@ pub(crate) fn extract_expressions_from_format_string(
) -> Option<()> { ) -> Option<()> {
let fmt_string = ctx.find_token_at_offset::<ast::String>()?; let fmt_string = ctx.find_token_at_offset::<ast::String>()?;
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?; let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
let tt_delimiter = tt.left_delimiter_token()?.kind();
let expanded_t = ast::String::cast( let expanded_t = ast::String::cast(
ctx.sema ctx.sema
@ -61,72 +66,63 @@ pub(crate) fn extract_expressions_from_format_string(
"Extract format expressions", "Extract format expressions",
tt.syntax().text_range(), tt.syntax().text_range(),
|edit| { |edit| {
let fmt_range = fmt_string.syntax().text_range(); let tt = edit.make_mut(tt);
// Replace old format string with new format string whose arguments have been extracted
edit.replace(fmt_range, new_fmt);
// Insert cursor at end of format string
edit.insert(fmt_range.end(), "$0");
// Extract existing arguments in macro // Extract existing arguments in macro
let tokens = let tokens = tt.token_trees_and_tokens().collect_vec();
tt.token_trees_and_tokens().collect_vec();
let mut existing_args: Vec<String> = vec![]; let existing_args = if let [_opening_bracket, NodeOrToken::Token(_format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(_end_bracket)] =
let mut current_arg = String::new();
if let [_opening_bracket, NodeOrToken::Token(format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(end_bracket)] =
tokens.as_slice() tokens.as_slice()
{ {
for t in tokens { let args = tokens.split(|it| matches!(it, NodeOrToken::Token(t) if t.kind() == T![,])).map(|arg| {
match t { // Strip off leading and trailing whitespace tokens
NodeOrToken::Node(n) => { let arg = match arg.split_first() {
format_to!(current_arg, "{n}"); Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
}, _ => arg,
NodeOrToken::Token(t) if t.kind() == COMMA => { };
existing_args.push(current_arg.trim().into()); let arg = match arg.split_last() {
current_arg.clear(); Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
}, _ => arg,
NodeOrToken::Token(t) => { };
current_arg.push_str(t.text()); arg
}, });
}
}
existing_args.push(current_arg.trim().into());
// delete everything after the format string till end bracket args.collect()
// we're going to insert the new arguments later } else {
edit.delete(TextRange::new( vec![]
format_string.text_range().end(), };
end_bracket.text_range().start(),
));
}
// Start building the new args // Start building the new args
let mut existing_args = existing_args.into_iter(); let mut existing_args = existing_args.into_iter();
let mut args = String::new(); let mut new_tt_bits = vec![NodeOrToken::Token(make::tokens::literal(&new_fmt))];
let mut placeholder_indexes = vec![];
let mut placeholder_idx = 1; for arg in extracted_args {
if matches!(arg, Arg::Expr(_) | Arg::Placeholder) {
// insert ", " before each arg
new_tt_bits.extend_from_slice(&[
NodeOrToken::Token(make::token(T![,])),
NodeOrToken::Token(make::tokens::single_space()),
]);
}
for extracted_args in extracted_args { match arg {
match extracted_args { Arg::Expr(s) => {
Arg::Expr(s)=> {
args.push_str(", ");
// insert arg // insert arg
args.push_str(&s); // FIXME: use the crate's edition for parsing
let expr = ast::Expr::parse(&s, syntax::Edition::CURRENT).syntax_node();
let mut expr_tt = utils::tt_from_syntax(expr);
new_tt_bits.append(&mut expr_tt);
} }
Arg::Placeholder => { Arg::Placeholder => {
args.push_str(", ");
// try matching with existing argument // try matching with existing argument
match existing_args.next() { match existing_args.next() {
Some(ea) => { Some(arg) => {
args.push_str(&ea); new_tt_bits.extend_from_slice(arg);
} }
None => { None => {
// insert placeholder placeholder_indexes.push(new_tt_bits.len());
args.push_str(&format!("${placeholder_idx}")); new_tt_bits.push(NodeOrToken::Token(make::token(T![_])));
placeholder_idx += 1;
} }
} }
} }
@ -134,8 +130,31 @@ pub(crate) fn extract_expressions_from_format_string(
} }
} }
// Insert new args // Insert new args
edit.insert(fmt_range.end(), args); let new_tt = make::token_tree(tt_delimiter, new_tt_bits).clone_for_update();
ted::replace(tt.syntax(), new_tt.syntax());
if let Some(cap) = ctx.config.snippet_cap {
// Add placeholder snippets over placeholder args
for pos in placeholder_indexes {
// Skip the opening delimiter
let Some(NodeOrToken::Token(placeholder)) =
new_tt.token_trees_and_tokens().skip(1).nth(pos)
else {
continue;
};
if stdx::always!(placeholder.kind() == T![_]) {
edit.add_placeholder_snippet_token(cap, placeholder);
}
}
// Add the final tabstop after the format literal
if let Some(NodeOrToken::Token(literal)) = new_tt.token_trees_and_tokens().nth(1) {
edit.add_tabstop_after_token(cap, literal);
}
}
}, },
); );
@ -145,7 +164,7 @@ pub(crate) fn extract_expressions_from_format_string(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::tests::check_assist; use crate::tests::{check_assist, check_assist_no_snippet_cap};
#[test] #[test]
fn multiple_middle_arg() { fn multiple_middle_arg() {
@ -195,7 +214,7 @@ fn main() {
"#, "#,
r#" r#"
fn main() { fn main() {
print!("{} {:b} {} {}"$0, y + 2, x + 1, 2, $1); print!("{} {:b} {} {}"$0, y + 2, x + 1, 2, ${1:_});
} }
"#, "#,
); );
@ -292,4 +311,22 @@ fn main() {
"#, "#,
); );
} }
#[test]
fn without_snippets() {
check_assist_no_snippet_cap(
extract_expressions_from_format_string,
r#"
//- minicore: fmt
fn main() {
print!("{} {x + 1:b} {} {}$0", y + 2, 2);
}
"#,
r#"
fn main() {
print!("{} {:b} {} {}", y + 2, x + 1, 2, _);
}
"#,
);
}
} }

View file

@ -758,7 +758,7 @@ fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option<Ass
} }
fn qualified_path(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Path { fn qualified_path(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Path {
make::path_from_text(&format!("{}::{}", qual_path_ty, path_expr_seg)) make::path_from_text(&format!("{qual_path_ty}::{path_expr_seg}"))
} }
#[cfg(test)] #[cfg(test)]

View file

@ -393,9 +393,9 @@ impl FunctionBuilder {
/// The rule for whether we focus a return type or not (and thus focus the function body), /// The rule for whether we focus a return type or not (and thus focus the function body),
/// is rather simple: /// is rather simple:
/// * If we could *not* infer what the return type should be, focus it (so the user can fill-in /// * If we could *not* infer what the return type should be, focus it (so the user can fill-in
/// the correct return type). /// the correct return type).
/// * If we could infer the return type, don't focus it (and thus focus the function body) so the /// * If we could infer the return type, don't focus it (and thus focus the function body) so the
/// user can change the `todo!` function body. /// user can change the `todo!` function body.
fn make_return_type( fn make_return_type(
ctx: &AssistContext<'_>, ctx: &AssistContext<'_>,
expr: &ast::Expr, expr: &ast::Expr,
@ -918,9 +918,9 @@ fn filter_generic_params(ctx: &AssistContext<'_>, node: SyntaxNode) -> Option<hi
/// Say we have a trait bound `Struct<T>: Trait<U>`. Given `necessary_params`, when is it relevant /// Say we have a trait bound `Struct<T>: Trait<U>`. Given `necessary_params`, when is it relevant
/// and when not? Some observations: /// and when not? Some observations:
/// - When `necessary_params` contains `T`, it's likely that we want this bound, but now we have /// - When `necessary_params` contains `T`, it's likely that we want this bound, but now we have
/// an extra param to consider: `U`. /// an extra param to consider: `U`.
/// - On the other hand, when `necessary_params` contains `U` (but not `T`), then it's unlikely /// - On the other hand, when `necessary_params` contains `U` (but not `T`), then it's unlikely
/// that we want this bound because it doesn't really constrain `U`. /// that we want this bound because it doesn't really constrain `U`.
/// ///
/// (FIXME?: The latter clause might be overstating. We may want to include the bound if the self /// (FIXME?: The latter clause might be overstating. We may want to include the bound if the self
/// type does *not* include generic params at all - like `Option<i32>: From<U>`) /// type does *not* include generic params at all - like `Option<i32>: From<U>`)
@ -928,7 +928,7 @@ fn filter_generic_params(ctx: &AssistContext<'_>, node: SyntaxNode) -> Option<hi
/// Can we make this a bit more formal? Let's define "dependency" between generic parameters and /// Can we make this a bit more formal? Let's define "dependency" between generic parameters and
/// trait bounds: /// trait bounds:
/// - A generic parameter `T` depends on a trait bound if `T` appears in the self type (i.e. left /// - A generic parameter `T` depends on a trait bound if `T` appears in the self type (i.e. left
/// part) of the bound. /// part) of the bound.
/// - A trait bound depends on a generic parameter `T` if `T` appears in the bound. /// - A trait bound depends on a generic parameter `T` if `T` appears in the bound.
/// ///
/// Using the notion, what we want is all the bounds that params in `necessary_params` /// Using the notion, what we want is all the bounds that params in `necessary_params`

View file

@ -47,7 +47,7 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
} }
// Prepend set_ to fn names. // Prepend set_ to fn names.
fn_names.iter_mut().for_each(|name| *name = format!("set_{}", name)); fn_names.iter_mut().for_each(|name| *name = format!("set_{name}"));
// Return early if we've found an existing fn // Return early if we've found an existing fn
let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &fn_names)?; let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &fn_names)?;

View file

@ -105,7 +105,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
"Generate `IndexMut` impl from this `Index` trait", "Generate `IndexMut` impl from this `Index` trait",
target, target,
|edit| { |edit| {
edit.insert(target.start(), format!("$0{}\n\n", impl_def)); edit.insert(target.start(), format!("$0{impl_def}\n\n"));
}, },
) )
} }

View file

@ -368,7 +368,7 @@ fn inline(
_ => None, _ => None,
}) })
.for_each(|usage| { .for_each(|usage| {
ted::replace(usage, &this()); ted::replace(usage, this());
}); });
} }
} }
@ -483,7 +483,7 @@ fn inline(
cov_mark::hit!(inline_call_inline_direct_field); cov_mark::hit!(inline_call_inline_direct_field);
field.replace_expr(replacement.clone_for_update()); field.replace_expr(replacement.clone_for_update());
} else { } else {
ted::replace(usage.syntax(), &replacement.syntax().clone_for_update()); ted::replace(usage.syntax(), replacement.syntax().clone_for_update());
} }
}; };

View file

@ -67,9 +67,9 @@ pub(crate) fn into_to_qualified_from(acc: &mut Assists, ctx: &AssistContext<'_>)
edit.replace( edit.replace(
method_call.syntax().text_range(), method_call.syntax().text_range(),
if sc.chars().all(|c| c.is_alphanumeric() || c == ':') { if sc.chars().all(|c| c.is_alphanumeric() || c == ':') {
format!("{}::from({})", sc, receiver) format!("{sc}::from({receiver})")
} else { } else {
format!("<{}>::from({})", sc, receiver) format!("<{sc}>::from({receiver})")
}, },
); );
}, },

View file

@ -86,7 +86,7 @@ pub(crate) fn merge_nested_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
nested_if_cond.syntax().text().to_string() nested_if_cond.syntax().text().to_string()
}; };
let replace_cond = format!("{} && {}", cond_text, nested_if_cond_text); let replace_cond = format!("{cond_text} && {nested_if_cond_text}");
edit.replace(cond_range, replace_cond); edit.replace(cond_range, replace_cond);
edit.replace(then_branch_range, nested_if_then_branch.syntax().text()); edit.replace(then_branch_range, nested_if_then_branch.syntax().text());

Some files were not shown because too many files have changed in this diff Show more