Lay the foundation for diagnostics in ty lowering, and implement a first diagnostic

The diagnostic implemented is a simple one (E0109). It serves as a test for the new foundation.

This commit only implements diagnostics for type in bodies and body-carrying signatures; the next commit will include diagnostics in the rest of the things.

Also fix one weird bug that was detected when implementing this that caused `Fn::(A, B) -> C` (which is a valid, if bizarre, alternative syntax to `Fn(A, B) -> C` to lower incorrectly.

And also fix a maybe-bug where parentheses were sneaked into a code string needlessly; this was not detected until now because the parentheses were removed (by the make-AST family API), but with a change in this commit they are now inserted. So fix that too.
This commit is contained in:
Chayim Refael Friedman 2024-11-20 23:05:48 +02:00
parent 4e475a3245
commit 5f25ae3d1b
19 changed files with 811 additions and 80 deletions

4
Cargo.lock generated
View file

@ -509,6 +509,7 @@ dependencies = [
"base-db", "base-db",
"cfg", "cfg",
"either", "either",
"expect-test",
"hir-def", "hir-def",
"hir-expand", "hir-expand",
"hir-ty", "hir-ty",
@ -519,6 +520,9 @@ dependencies = [
"span", "span",
"stdx", "stdx",
"syntax", "syntax",
"syntax-bridge",
"test-fixture",
"test-utils",
"tracing", "tracing",
"triomphe", "triomphe",
"tt", "tt",

View file

@ -141,7 +141,7 @@ pub struct BodySourceMap {
field_map_back: FxHashMap<ExprId, FieldSource>, field_map_back: FxHashMap<ExprId, FieldSource>,
pat_field_map_back: FxHashMap<PatId, PatFieldSource>, pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
types: TypesSourceMap, pub types: TypesSourceMap,
// FIXME: Make this a sane struct. // FIXME: Make this a sane struct.
template_map: Option< template_map: Option<

View file

@ -219,6 +219,8 @@ pub struct TypesSourceMap {
} }
impl TypesSourceMap { impl TypesSourceMap {
pub const EMPTY: Self = Self { types_map_back: ArenaMap::new() };
pub fn type_syntax(&self, id: TypeRefId) -> Result<TypeSource, SyntheticSyntax> { pub fn type_syntax(&self, id: TypeRefId) -> Result<TypeSource, SyntheticSyntax> {
self.types_map_back.get(id).cloned().ok_or(SyntheticSyntax) self.types_map_back.get(id).cloned().ok_or(SyntheticSyntax)
} }

View file

@ -1,5 +1,7 @@
//! A desugared representation of paths like `crate::foo` or `<Type as Trait>::bar`. //! A desugared representation of paths like `crate::foo` or `<Type as Trait>::bar`.
mod lower; mod lower;
#[cfg(test)]
mod tests;
use std::{ use std::{
fmt::{self, Display}, fmt::{self, Display},
@ -19,6 +21,8 @@ use syntax::ast;
pub use hir_expand::mod_path::{path, ModPath, PathKind}; pub use hir_expand::mod_path::{path, ModPath, PathKind};
pub use lower::hir_segment_to_ast_segment;
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum ImportAlias { pub enum ImportAlias {
/// Unnamed alias, as in `use Foo as _;` /// Unnamed alias, as in `use Foo as _;`
@ -230,7 +234,7 @@ impl Path {
} }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct PathSegment<'a> { pub struct PathSegment<'a> {
pub name: &'a Name, pub name: &'a Name,
pub args_and_bindings: Option<&'a GenericArgs>, pub args_and_bindings: Option<&'a GenericArgs>,
@ -274,6 +278,12 @@ impl<'a> PathSegments<'a> {
generic_args: self.generic_args.map(|it| it.get(..len).unwrap_or(it)), generic_args: self.generic_args.map(|it| it.get(..len).unwrap_or(it)),
} }
} }
pub fn strip_last(&self) -> PathSegments<'a> {
PathSegments {
segments: self.segments.split_last().map_or(&[], |it| it.1),
generic_args: self.generic_args.map(|it| it.split_last().map_or(&[][..], |it| it.1)),
}
}
pub fn iter(&self) -> impl Iterator<Item = PathSegment<'a>> { pub fn iter(&self) -> impl Iterator<Item = PathSegment<'a>> {
self.segments self.segments
.iter() .iter()

View file

@ -17,13 +17,31 @@ use crate::{
type_ref::{LifetimeRef, TypeBound, TypeRef}, type_ref::{LifetimeRef, TypeBound, TypeRef},
}; };
#[cfg(test)]
thread_local! {
/// This is used to test `hir_segment_to_ast_segment()`. It's a hack, but it makes testing much easier.
pub(super) static SEGMENT_LOWERING_MAP: std::cell::RefCell<rustc_hash::FxHashMap<ast::PathSegment, usize>> = std::cell::RefCell::default();
}
/// Converts an `ast::Path` to `Path`. Works with use trees. /// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call. /// It correctly handles `$crate` based path from macro call.
// If you modify the logic of the lowering, make sure to check if `hir_segment_to_ast_segment()`
// also needs an update.
pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<Path> { pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<Path> {
let mut kind = PathKind::Plain; let mut kind = PathKind::Plain;
let mut type_anchor = None; let mut type_anchor = None;
let mut segments = Vec::new(); let mut segments = Vec::new();
let mut generic_args = Vec::new(); let mut generic_args = Vec::new();
#[cfg(test)]
let mut ast_segments = Vec::new();
#[cfg(test)]
let mut ast_segments_offset = 0;
#[allow(unused_mut)]
let mut push_segment = |_segment: &ast::PathSegment, segments: &mut Vec<Name>, name| {
#[cfg(test)]
ast_segments.push(_segment.clone());
segments.push(name);
};
loop { loop {
let segment = path.segment()?; let segment = path.segment()?;
@ -34,6 +52,10 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
match segment.kind()? { match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => { ast::PathSegmentKind::Name(name_ref) => {
if name_ref.text() == "$crate" { if name_ref.text() == "$crate" {
if path.qualifier().is_some() {
// FIXME: Report an error.
return None;
}
break kind = resolve_crate_root( break kind = resolve_crate_root(
ctx.db.upcast(), ctx.db.upcast(),
ctx.span_map().span_for_range(name_ref.syntax().text_range()).ctx, ctx.span_map().span_for_range(name_ref.syntax().text_range()).ctx,
@ -56,10 +78,10 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
generic_args.resize(segments.len(), None); generic_args.resize(segments.len(), None);
generic_args.push(args); generic_args.push(args);
} }
segments.push(name); push_segment(&segment, &mut segments, name);
} }
ast::PathSegmentKind::SelfTypeKw => { ast::PathSegmentKind::SelfTypeKw => {
segments.push(Name::new_symbol_root(sym::Self_.clone())); push_segment(&segment, &mut segments, Name::new_symbol_root(sym::Self_.clone()));
} }
ast::PathSegmentKind::Type { type_ref, trait_ref } => { ast::PathSegmentKind::Type { type_ref, trait_ref } => {
assert!(path.qualifier().is_none()); // this can only occur at the first segment assert!(path.qualifier().is_none()); // this can only occur at the first segment
@ -81,6 +103,10 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
kind = mod_path.kind; kind = mod_path.kind;
segments.extend(mod_path.segments().iter().cloned().rev()); segments.extend(mod_path.segments().iter().cloned().rev());
#[cfg(test)]
{
ast_segments_offset = mod_path.segments().len();
}
if let Some(path_generic_args) = path_generic_args { if let Some(path_generic_args) = path_generic_args {
generic_args.resize(segments.len() - num_segments, None); generic_args.resize(segments.len() - num_segments, None);
generic_args.extend(Vec::from(path_generic_args).into_iter().rev()); generic_args.extend(Vec::from(path_generic_args).into_iter().rev());
@ -112,10 +138,18 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
} }
} }
ast::PathSegmentKind::CrateKw => { ast::PathSegmentKind::CrateKw => {
if path.qualifier().is_some() {
// FIXME: Report an error.
return None;
}
kind = PathKind::Crate; kind = PathKind::Crate;
break; break;
} }
ast::PathSegmentKind::SelfKw => { ast::PathSegmentKind::SelfKw => {
if path.qualifier().is_some() {
// FIXME: Report an error.
return None;
}
// don't break out if `self` is the last segment of a path, this mean we got a // don't break out if `self` is the last segment of a path, this mean we got a
// use tree like `foo::{self}` which we want to resolve as `foo` // use tree like `foo::{self}` which we want to resolve as `foo`
if !segments.is_empty() { if !segments.is_empty() {
@ -162,6 +196,13 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
} }
} }
#[cfg(test)]
{
ast_segments.reverse();
SEGMENT_LOWERING_MAP
.with_borrow_mut(|map| map.extend(ast_segments.into_iter().zip(ast_segments_offset..)));
}
let mod_path = Interned::new(ModPath::from_segments(kind, segments)); let mod_path = Interned::new(ModPath::from_segments(kind, segments));
if type_anchor.is_none() && generic_args.is_empty() { if type_anchor.is_none() && generic_args.is_empty() {
return Some(Path::BarePath(mod_path)); return Some(Path::BarePath(mod_path));
@ -181,6 +222,41 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
} }
} }
/// This function finds the AST segment that corresponds to the HIR segment
/// with index `segment_idx` on the path that is lowered from `path`.
pub fn hir_segment_to_ast_segment(path: &ast::Path, segment_idx: u32) -> Option<ast::PathSegment> {
// Too tightly coupled to `lower_path()`, but unfortunately we cannot decouple them,
// as keeping source maps for all paths segments will have a severe impact on memory usage.
let mut segments = path.segments();
if let Some(ast::PathSegmentKind::Type { trait_ref: Some(trait_ref), .. }) =
segments.clone().next().and_then(|it| it.kind())
{
segments.next();
return find_segment(trait_ref.path()?.segments().chain(segments), segment_idx);
}
return find_segment(segments, segment_idx);
fn find_segment(
segments: impl Iterator<Item = ast::PathSegment>,
segment_idx: u32,
) -> Option<ast::PathSegment> {
segments
.filter(|segment| match segment.kind() {
Some(
ast::PathSegmentKind::CrateKw
| ast::PathSegmentKind::SelfKw
| ast::PathSegmentKind::SuperKw
| ast::PathSegmentKind::Type { .. },
)
| None => false,
Some(ast::PathSegmentKind::Name(name)) => name.text() != "$crate",
Some(ast::PathSegmentKind::SelfTypeKw) => true,
})
.nth(segment_idx as usize)
}
}
pub(super) fn lower_generic_args( pub(super) fn lower_generic_args(
lower_ctx: &mut LowerCtx<'_>, lower_ctx: &mut LowerCtx<'_>,
node: ast::GenericArgList, node: ast::GenericArgList,

View file

@ -0,0 +1,126 @@
use expect_test::{expect, Expect};
use span::Edition;
use syntax::ast::{self, make};
use test_fixture::WithFixture;
use crate::{
lower::LowerCtx,
path::{
lower::{hir_segment_to_ast_segment, SEGMENT_LOWERING_MAP},
Path,
},
pretty,
test_db::TestDB,
type_ref::{TypesMap, TypesSourceMap},
};
fn lower_path(path: ast::Path) -> (TestDB, TypesMap, Option<Path>) {
let (db, file_id) = TestDB::with_single_file("");
let mut types_map = TypesMap::default();
let mut types_source_map = TypesSourceMap::default();
let mut ctx = LowerCtx::new(&db, file_id.into(), &mut types_map, &mut types_source_map);
let lowered_path = ctx.lower_path(path);
(db, types_map, lowered_path)
}
#[track_caller]
fn check_hir_to_ast(path: &str, ignore_segments: &[&str]) {
let path = make::path_from_text(path);
SEGMENT_LOWERING_MAP.with_borrow_mut(|map| map.clear());
let _ = lower_path(path.clone()).2.expect("failed to lower path");
SEGMENT_LOWERING_MAP.with_borrow(|map| {
for (segment, segment_idx) in map {
if ignore_segments.contains(&&*segment.to_string()) {
continue;
}
let restored_segment = hir_segment_to_ast_segment(&path, *segment_idx as u32)
.unwrap_or_else(|| {
panic!(
"failed to map back segment `{segment}` \
numbered {segment_idx} in HIR from path `{path}`"
)
});
assert_eq!(
segment, &restored_segment,
"mapping back `{segment}` numbered {segment_idx} in HIR \
from path `{path}` produced incorrect segment `{restored_segment}`"
);
}
});
}
#[test]
fn hir_to_ast_trait_ref() {
check_hir_to_ast("<A as B::C::D>::E::F", &["A"]);
}
#[test]
fn hir_to_ast_plain_path() {
check_hir_to_ast("A::B::C::D::E::F", &[]);
}
#[test]
fn hir_to_ast_crate_path() {
check_hir_to_ast("crate::A::B::C", &[]);
check_hir_to_ast("crate::super::super::A::B::C", &[]);
}
#[test]
fn hir_to_ast_self_path() {
check_hir_to_ast("self::A::B::C", &[]);
check_hir_to_ast("self::super::super::A::B::C", &[]);
}
#[test]
fn hir_to_ast_super_path() {
check_hir_to_ast("super::A::B::C", &[]);
check_hir_to_ast("super::super::super::A::B::C", &[]);
}
#[test]
fn hir_to_ast_type_anchor_path() {
check_hir_to_ast("<A::B>::C::D", &["A", "B"]);
}
#[test]
fn hir_to_ast_path_super_in_middle() {
check_hir_to_ast("A::super::B::super::super::C::D", &[]);
}
#[track_caller]
fn check_fail_lowering(path: &str) {
let (_, _, lowered_path) = lower_path(make::path_from_text(path));
assert!(lowered_path.is_none(), "path `{path}` should fail lowering");
}
#[test]
fn keywords_in_middle_fail_lowering1() {
check_fail_lowering("self::A::self::B::super::C::crate::D");
}
#[test]
fn keywords_in_middle_fail_lowering2() {
check_fail_lowering("A::super::self::C::D");
}
#[test]
fn keywords_in_middle_fail_lowering3() {
check_fail_lowering("A::crate::B::C::D");
}
#[track_caller]
fn check_path_lowering(path: &str, expected: Expect) {
let (db, types_map, lowered_path) = lower_path(make::path_from_text(path));
let lowered_path = lowered_path.expect("failed to lower path");
let mut buf = String::new();
pretty::print_path(&db, &lowered_path, &types_map, &mut buf, Edition::CURRENT)
.expect("failed to pretty-print path");
expected.assert_eq(&buf);
}
#[test]
fn fn_like_path_with_coloncolon() {
check_path_lowering("Fn::(A, B) -> C", expect![[r#"Fn::<(A, B), Output = C>"#]]);
check_path_lowering("Fn::(A, B)", expect![[r#"Fn::<(A, B), Output = ()>"#]]);
}

View file

@ -180,6 +180,13 @@ impl<FileId: FileIdToSyntax, T> InFileWrapper<FileId, T> {
} }
} }
#[allow(private_bounds)]
impl<FileId: FileIdToSyntax, N: AstNode> InFileWrapper<FileId, AstPtr<N>> {
pub fn to_node(&self, db: &dyn ExpandDatabase) -> N {
self.value.to_node(&self.file_syntax(db))
}
}
impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> { impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
pub fn syntax(&self) -> InFileWrapper<FileId, &SyntaxNode> { pub fn syntax(&self) -> InFileWrapper<FileId, &SyntaxNode> {
self.with_value(self.value.syntax()) self.with_value(self.value.syntax())

View file

@ -58,7 +58,7 @@ use crate::{
fold_tys, fold_tys,
generics::Generics, generics::Generics,
infer::{coerce::CoerceMany, expr::ExprIsRead, unify::InferenceTable}, infer::{coerce::CoerceMany, expr::ExprIsRead, unify::InferenceTable},
lower::ImplTraitLoweringMode, lower::{ImplTraitLoweringMode, TyLoweringDiagnostic},
mir::MirSpan, mir::MirSpan,
to_assoc_type_id, to_assoc_type_id,
traits::FnTrait, traits::FnTrait,
@ -191,6 +191,14 @@ impl<T> InferOk<T> {
} }
} }
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum InferenceTyDiagnosticSource {
/// Diagnostics that come from types in the body.
Body,
/// Diagnostics that come from types in fn parameters/return type, or static & const types.
Signature,
}
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct TypeError; pub(crate) struct TypeError;
pub(crate) type InferResult<T> = Result<InferOk<T>, TypeError>; pub(crate) type InferResult<T> = Result<InferOk<T>, TypeError>;
@ -264,6 +272,10 @@ pub enum InferenceDiagnostic {
expr_ty: Ty, expr_ty: Ty,
cast_ty: Ty, cast_ty: Ty,
}, },
TyDiagnostic {
source: InferenceTyDiagnosticSource,
diag: TyLoweringDiagnostic,
},
} }
/// A mismatch between an expected and an inferred type. /// A mismatch between an expected and an inferred type.
@ -858,7 +870,8 @@ impl<'a> InferenceContext<'a> {
} }
fn collect_const(&mut self, data: &ConstData) { fn collect_const(&mut self, data: &ConstData) {
let return_ty = self.make_ty(data.type_ref, &data.types_map); let return_ty =
self.make_ty(data.type_ref, &data.types_map, InferenceTyDiagnosticSource::Signature);
// Constants might be defining usage sites of TAITs. // Constants might be defining usage sites of TAITs.
self.make_tait_coercion_table(iter::once(&return_ty)); self.make_tait_coercion_table(iter::once(&return_ty));
@ -867,7 +880,8 @@ impl<'a> InferenceContext<'a> {
} }
fn collect_static(&mut self, data: &StaticData) { fn collect_static(&mut self, data: &StaticData) {
let return_ty = self.make_ty(data.type_ref, &data.types_map); let return_ty =
self.make_ty(data.type_ref, &data.types_map, InferenceTyDiagnosticSource::Signature);
// Statics might be defining usage sites of TAITs. // Statics might be defining usage sites of TAITs.
self.make_tait_coercion_table(iter::once(&return_ty)); self.make_tait_coercion_table(iter::once(&return_ty));
@ -877,11 +891,12 @@ impl<'a> InferenceContext<'a> {
fn collect_fn(&mut self, func: FunctionId) { fn collect_fn(&mut self, func: FunctionId) {
let data = self.db.function_data(func); let data = self.db.function_data(func);
let mut param_tys = self.with_ty_lowering(&data.types_map, |ctx| { let mut param_tys =
ctx.type_param_mode(ParamLoweringMode::Placeholder) self.with_ty_lowering(&data.types_map, InferenceTyDiagnosticSource::Signature, |ctx| {
.impl_trait_mode(ImplTraitLoweringMode::Param); ctx.type_param_mode(ParamLoweringMode::Placeholder)
data.params.iter().map(|&type_ref| ctx.lower_ty(type_ref)).collect::<Vec<_>>() .impl_trait_mode(ImplTraitLoweringMode::Param);
}); data.params.iter().map(|&type_ref| ctx.lower_ty(type_ref)).collect::<Vec<_>>()
});
// Check if function contains a va_list, if it does then we append it to the parameter types // Check if function contains a va_list, if it does then we append it to the parameter types
// that are collected from the function data // that are collected from the function data
if data.is_varargs() { if data.is_varargs() {
@ -918,11 +933,12 @@ impl<'a> InferenceContext<'a> {
} }
let return_ty = data.ret_type; let return_ty = data.ret_type;
let return_ty = self.with_ty_lowering(&data.types_map, |ctx| { let return_ty =
ctx.type_param_mode(ParamLoweringMode::Placeholder) self.with_ty_lowering(&data.types_map, InferenceTyDiagnosticSource::Signature, |ctx| {
.impl_trait_mode(ImplTraitLoweringMode::Opaque) ctx.type_param_mode(ParamLoweringMode::Placeholder)
.lower_ty(return_ty) .impl_trait_mode(ImplTraitLoweringMode::Opaque)
}); .lower_ty(return_ty)
});
let return_ty = self.insert_type_vars(return_ty); let return_ty = self.insert_type_vars(return_ty);
let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) { let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) {
@ -1226,9 +1242,20 @@ impl<'a> InferenceContext<'a> {
self.result.diagnostics.push(diagnostic); self.result.diagnostics.push(diagnostic);
} }
fn push_ty_diagnostics(
&mut self,
source: InferenceTyDiagnosticSource,
diagnostics: Vec<TyLoweringDiagnostic>,
) {
self.result.diagnostics.extend(
diagnostics.into_iter().map(|diag| InferenceDiagnostic::TyDiagnostic { source, diag }),
);
}
fn with_ty_lowering<R>( fn with_ty_lowering<R>(
&self, &mut self,
types_map: &TypesMap, types_map: &TypesMap,
types_source: InferenceTyDiagnosticSource,
f: impl FnOnce(&mut crate::lower::TyLoweringContext<'_>) -> R, f: impl FnOnce(&mut crate::lower::TyLoweringContext<'_>) -> R,
) -> R { ) -> R {
let mut ctx = crate::lower::TyLoweringContext::new( let mut ctx = crate::lower::TyLoweringContext::new(
@ -1237,32 +1264,41 @@ impl<'a> InferenceContext<'a> {
types_map, types_map,
self.owner.into(), self.owner.into(),
); );
f(&mut ctx) let result = f(&mut ctx);
self.push_ty_diagnostics(types_source, ctx.diagnostics);
result
} }
fn with_body_ty_lowering<R>( fn with_body_ty_lowering<R>(
&self, &mut self,
f: impl FnOnce(&mut crate::lower::TyLoweringContext<'_>) -> R, f: impl FnOnce(&mut crate::lower::TyLoweringContext<'_>) -> R,
) -> R { ) -> R {
self.with_ty_lowering(&self.body.types, f) self.with_ty_lowering(&self.body.types, InferenceTyDiagnosticSource::Body, f)
} }
fn make_ty(&mut self, type_ref: TypeRefId, types_map: &TypesMap) -> Ty { fn make_ty(
let ty = self.with_ty_lowering(types_map, |ctx| ctx.lower_ty(type_ref)); &mut self,
type_ref: TypeRefId,
types_map: &TypesMap,
type_source: InferenceTyDiagnosticSource,
) -> Ty {
let ty = self.with_ty_lowering(types_map, type_source, |ctx| ctx.lower_ty(type_ref));
let ty = self.insert_type_vars(ty); let ty = self.insert_type_vars(ty);
self.normalize_associated_types_in(ty) self.normalize_associated_types_in(ty)
} }
fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty { fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty {
self.make_ty(type_ref, &self.body.types) self.make_ty(type_ref, &self.body.types, InferenceTyDiagnosticSource::Body)
} }
fn err_ty(&self) -> Ty { fn err_ty(&self) -> Ty {
self.result.standard_types.unknown.clone() self.result.standard_types.unknown.clone()
} }
fn make_lifetime(&mut self, lifetime_ref: &LifetimeRef) -> Lifetime { fn make_body_lifetime(&mut self, lifetime_ref: &LifetimeRef) -> Lifetime {
let lt = self.with_ty_lowering(TypesMap::EMPTY, |ctx| ctx.lower_lifetime(lifetime_ref)); let lt = self.with_ty_lowering(TypesMap::EMPTY, InferenceTyDiagnosticSource::Body, |ctx| {
ctx.lower_lifetime(lifetime_ref)
});
self.insert_type_vars(lt) self.insert_type_vars(lt)
} }
@ -1431,12 +1467,20 @@ impl<'a> InferenceContext<'a> {
Some(ResolveValueResult::ValueNs(value, _)) => match value { Some(ResolveValueResult::ValueNs(value, _)) => match value {
ValueNs::EnumVariantId(var) => { ValueNs::EnumVariantId(var) => {
let substs = ctx.substs_from_path(path, var.into(), true); let substs = ctx.substs_from_path(path, var.into(), true);
self.push_ty_diagnostics(
InferenceTyDiagnosticSource::Body,
ctx.diagnostics,
);
let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into()); let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
return (ty, Some(var.into())); return (ty, Some(var.into()));
} }
ValueNs::StructId(strukt) => { ValueNs::StructId(strukt) => {
let substs = ctx.substs_from_path(path, strukt.into(), true); let substs = ctx.substs_from_path(path, strukt.into(), true);
self.push_ty_diagnostics(
InferenceTyDiagnosticSource::Body,
ctx.diagnostics,
);
let ty = self.db.ty(strukt.into()); let ty = self.db.ty(strukt.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
return (ty, Some(strukt.into())); return (ty, Some(strukt.into()));
@ -1462,18 +1506,21 @@ impl<'a> InferenceContext<'a> {
return match resolution { return match resolution {
TypeNs::AdtId(AdtId::StructId(strukt)) => { TypeNs::AdtId(AdtId::StructId(strukt)) => {
let substs = ctx.substs_from_path(path, strukt.into(), true); let substs = ctx.substs_from_path(path, strukt.into(), true);
self.push_ty_diagnostics(InferenceTyDiagnosticSource::Body, ctx.diagnostics);
let ty = self.db.ty(strukt.into()); let ty = self.db.ty(strukt.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
forbid_unresolved_segments((ty, Some(strukt.into())), unresolved) forbid_unresolved_segments((ty, Some(strukt.into())), unresolved)
} }
TypeNs::AdtId(AdtId::UnionId(u)) => { TypeNs::AdtId(AdtId::UnionId(u)) => {
let substs = ctx.substs_from_path(path, u.into(), true); let substs = ctx.substs_from_path(path, u.into(), true);
self.push_ty_diagnostics(InferenceTyDiagnosticSource::Body, ctx.diagnostics);
let ty = self.db.ty(u.into()); let ty = self.db.ty(u.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
forbid_unresolved_segments((ty, Some(u.into())), unresolved) forbid_unresolved_segments((ty, Some(u.into())), unresolved)
} }
TypeNs::EnumVariantId(var) => { TypeNs::EnumVariantId(var) => {
let substs = ctx.substs_from_path(path, var.into(), true); let substs = ctx.substs_from_path(path, var.into(), true);
self.push_ty_diagnostics(InferenceTyDiagnosticSource::Body, ctx.diagnostics);
let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into()); let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
forbid_unresolved_segments((ty, Some(var.into())), unresolved) forbid_unresolved_segments((ty, Some(var.into())), unresolved)
@ -1519,6 +1566,9 @@ impl<'a> InferenceContext<'a> {
resolved_segment, resolved_segment,
current_segment, current_segment,
false, false,
&mut |_, _reason| {
// FIXME: Report an error.
},
); );
ty = self.table.insert_type_vars(ty); ty = self.table.insert_type_vars(ty);
@ -1532,6 +1582,7 @@ impl<'a> InferenceContext<'a> {
remaining_idx += 1; remaining_idx += 1;
remaining_segments = remaining_segments.skip(1); remaining_segments = remaining_segments.skip(1);
} }
self.push_ty_diagnostics(InferenceTyDiagnosticSource::Body, ctx.diagnostics);
let variant = ty.as_adt().and_then(|(id, _)| match id { let variant = ty.as_adt().and_then(|(id, _)| match id {
AdtId::StructId(s) => Some(VariantId::StructId(s)), AdtId::StructId(s) => Some(VariantId::StructId(s)),
@ -1550,6 +1601,7 @@ impl<'a> InferenceContext<'a> {
}; };
let substs = let substs =
ctx.substs_from_path_segment(resolved_seg, Some(it.into()), true, None); ctx.substs_from_path_segment(resolved_seg, Some(it.into()), true, None);
self.push_ty_diagnostics(InferenceTyDiagnosticSource::Body, ctx.diagnostics);
let ty = self.db.ty(it.into()); let ty = self.db.ty(it.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); let ty = self.insert_type_vars(ty.substitute(Interner, &substs));

View file

@ -2155,7 +2155,7 @@ impl InferenceContext<'_> {
DebruijnIndex::INNERMOST, DebruijnIndex::INNERMOST,
) )
}, },
|this, lt_ref| this.make_lifetime(lt_ref), |this, lt_ref| this.make_body_lifetime(lt_ref),
), ),
}; };

View file

@ -19,7 +19,7 @@ use crate::{
TyBuilder, TyExt, TyKind, ValueTyDefId, TyBuilder, TyExt, TyKind, ValueTyDefId,
}; };
use super::{ExprOrPatId, InferenceContext}; use super::{ExprOrPatId, InferenceContext, InferenceTyDiagnosticSource};
impl InferenceContext<'_> { impl InferenceContext<'_> {
pub(super) fn infer_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<Ty> { pub(super) fn infer_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<Ty> {
@ -163,6 +163,7 @@ impl InferenceContext<'_> {
let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1); let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty); let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty);
self.push_ty_diagnostics(InferenceTyDiagnosticSource::Body, ctx.diagnostics);
let ty = self.table.insert_type_vars(ty); let ty = self.table.insert_type_vars(ty);
let ty = self.table.normalize_associated_types_in(ty); let ty = self.table.normalize_associated_types_in(ty);
self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))? self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
@ -265,6 +266,9 @@ impl InferenceContext<'_> {
resolved_segment, resolved_segment,
remaining_segments_for_ty, remaining_segments_for_ty,
true, true,
&mut |_, _reason| {
// FIXME: Report an error.
},
) )
}); });
if ty.is_unknown() { if ty.is_unknown() {

View file

@ -84,12 +84,13 @@ pub use infer::{
cast::CastError, cast::CastError,
closure::{CaptureKind, CapturedItem}, closure::{CaptureKind, CapturedItem},
could_coerce, could_unify, could_unify_deeply, Adjust, Adjustment, AutoBorrow, BindingMode, could_coerce, could_unify, could_unify_deeply, Adjust, Adjustment, AutoBorrow, BindingMode,
InferenceDiagnostic, InferenceResult, OverloadedDeref, PointerCast, InferenceDiagnostic, InferenceResult, InferenceTyDiagnosticSource, OverloadedDeref,
PointerCast,
}; };
pub use interner::Interner; pub use interner::Interner;
pub use lower::{ pub use lower::{
associated_type_shorthand_candidates, ImplTraitLoweringMode, ParamLoweringMode, TyDefId, associated_type_shorthand_candidates, GenericArgsProhibitedReason, ImplTraitLoweringMode,
TyLoweringContext, ValueTyDefId, ParamLoweringMode, TyDefId, TyLoweringContext, TyLoweringDiagnosticKind, ValueTyDefId,
}; };
pub use mapping::{ pub use mapping::{
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,

View file

@ -102,6 +102,31 @@ impl ImplTraitLoweringState {
} }
} }
type TypeSource = Either<TypeRefId, hir_def::type_ref::TypeSource>;
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct TyLoweringDiagnostic {
pub source: TypeSource,
pub kind: TyLoweringDiagnosticKind,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum TyLoweringDiagnosticKind {
GenericArgsProhibited { segment: u32, reason: GenericArgsProhibitedReason },
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum GenericArgsProhibitedReason {
Module,
TyParam,
SelfTy,
PrimitiveTy,
/// When there is a generic enum, within the expression `Enum::Variant`,
/// either `Enum` or `Variant` are allowed to have generic arguments, but not both.
// FIXME: This is not used now but it should be.
EnumVariant,
}
#[derive(Debug)] #[derive(Debug)]
pub struct TyLoweringContext<'a> { pub struct TyLoweringContext<'a> {
pub db: &'a dyn HirDatabase, pub db: &'a dyn HirDatabase,
@ -125,6 +150,7 @@ pub struct TyLoweringContext<'a> {
expander: Option<Expander>, expander: Option<Expander>,
/// Tracks types with explicit `?Sized` bounds. /// Tracks types with explicit `?Sized` bounds.
pub(crate) unsized_types: FxHashSet<Ty>, pub(crate) unsized_types: FxHashSet<Ty>,
pub(crate) diagnostics: Vec<TyLoweringDiagnostic>,
} }
impl<'a> TyLoweringContext<'a> { impl<'a> TyLoweringContext<'a> {
@ -159,6 +185,7 @@ impl<'a> TyLoweringContext<'a> {
type_param_mode, type_param_mode,
expander: None, expander: None,
unsized_types: FxHashSet::default(), unsized_types: FxHashSet::default(),
diagnostics: Vec::new(),
} }
} }
@ -198,6 +225,20 @@ impl<'a> TyLoweringContext<'a> {
self.type_param_mode = type_param_mode; self.type_param_mode = type_param_mode;
self self
} }
pub fn push_diagnostic(&mut self, type_ref: TypeRefId, kind: TyLoweringDiagnosticKind) {
let source = match self.types_source_map {
Some(source_map) => {
let Ok(source) = source_map.type_syntax(type_ref) else {
stdx::never!("error in synthetic type");
return;
};
Either::Right(source)
}
None => Either::Left(type_ref),
};
self.diagnostics.push(TyLoweringDiagnostic { source, kind });
}
} }
#[derive(Copy, Clone, Debug, PartialEq, Eq, Default)] #[derive(Copy, Clone, Debug, PartialEq, Eq, Default)]
@ -464,6 +505,7 @@ impl<'a> TyLoweringContext<'a> {
impl_trait_mode: mem::take(&mut self.impl_trait_mode), impl_trait_mode: mem::take(&mut self.impl_trait_mode),
expander: self.expander.take(), expander: self.expander.take(),
unsized_types: mem::take(&mut self.unsized_types), unsized_types: mem::take(&mut self.unsized_types),
diagnostics: mem::take(&mut self.diagnostics),
}; };
let ty = inner_ctx.lower_ty(type_ref); let ty = inner_ctx.lower_ty(type_ref);
@ -471,6 +513,7 @@ impl<'a> TyLoweringContext<'a> {
self.impl_trait_mode = inner_ctx.impl_trait_mode; self.impl_trait_mode = inner_ctx.impl_trait_mode;
self.expander = inner_ctx.expander; self.expander = inner_ctx.expander;
self.unsized_types = inner_ctx.unsized_types; self.unsized_types = inner_ctx.unsized_types;
self.diagnostics = inner_ctx.diagnostics;
self.expander.as_mut().unwrap().exit(mark); self.expander.as_mut().unwrap().exit(mark);
Some(ty) Some(ty)
@ -542,6 +585,10 @@ impl<'a> TyLoweringContext<'a> {
resolved_segment: PathSegment<'_>, resolved_segment: PathSegment<'_>,
remaining_segments: PathSegments<'_>, remaining_segments: PathSegments<'_>,
infer_args: bool, infer_args: bool,
on_prohibited_generics_for_resolved_segment: &mut dyn FnMut(
&mut Self,
GenericArgsProhibitedReason,
),
) -> (Ty, Option<TypeNs>) { ) -> (Ty, Option<TypeNs>) {
let ty = match resolution { let ty = match resolution {
TypeNs::TraitId(trait_) => { TypeNs::TraitId(trait_) => {
@ -608,28 +655,44 @@ impl<'a> TyLoweringContext<'a> {
// FIXME(trait_alias): Implement trait alias. // FIXME(trait_alias): Implement trait alias.
return (TyKind::Error.intern(Interner), None); return (TyKind::Error.intern(Interner), None);
} }
TypeNs::GenericParam(param_id) => match self.type_param_mode { TypeNs::GenericParam(param_id) => {
ParamLoweringMode::Placeholder => { if resolved_segment.args_and_bindings.is_some() {
TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into())) on_prohibited_generics_for_resolved_segment(
self,
GenericArgsProhibitedReason::TyParam,
);
} }
ParamLoweringMode::Variable => {
let idx = match self
.generics()
.expect("generics in scope")
.type_or_const_param_idx(param_id.into())
{
None => {
never!("no matching generics");
return (TyKind::Error.intern(Interner), None);
}
Some(idx) => idx,
};
TyKind::BoundVar(BoundVar::new(self.in_binders, idx)) match self.type_param_mode {
ParamLoweringMode::Placeholder => {
TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into()))
}
ParamLoweringMode::Variable => {
let idx = match self
.generics()
.expect("generics in scope")
.type_or_const_param_idx(param_id.into())
{
None => {
never!("no matching generics");
return (TyKind::Error.intern(Interner), None);
}
Some(idx) => idx,
};
TyKind::BoundVar(BoundVar::new(self.in_binders, idx))
}
} }
.intern(Interner)
} }
.intern(Interner),
TypeNs::SelfType(impl_id) => { TypeNs::SelfType(impl_id) => {
if resolved_segment.args_and_bindings.is_some() {
on_prohibited_generics_for_resolved_segment(
self,
GenericArgsProhibitedReason::SelfTy,
);
}
let generics = self.generics().expect("impl should have generic param scope"); let generics = self.generics().expect("impl should have generic param scope");
match self.type_param_mode { match self.type_param_mode {
@ -655,6 +718,13 @@ impl<'a> TyLoweringContext<'a> {
} }
} }
TypeNs::AdtSelfType(adt) => { TypeNs::AdtSelfType(adt) => {
if resolved_segment.args_and_bindings.is_some() {
on_prohibited_generics_for_resolved_segment(
self,
GenericArgsProhibitedReason::SelfTy,
);
}
let generics = generics(self.db.upcast(), adt.into()); let generics = generics(self.db.upcast(), adt.into());
let substs = match self.type_param_mode { let substs = match self.type_param_mode {
ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db), ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db),
@ -667,6 +737,12 @@ impl<'a> TyLoweringContext<'a> {
TypeNs::AdtId(it) => self.lower_path_inner(resolved_segment, it.into(), infer_args), TypeNs::AdtId(it) => self.lower_path_inner(resolved_segment, it.into(), infer_args),
TypeNs::BuiltinType(it) => { TypeNs::BuiltinType(it) => {
if resolved_segment.args_and_bindings.is_some() {
on_prohibited_generics_for_resolved_segment(
self,
GenericArgsProhibitedReason::PrimitiveTy,
);
}
self.lower_path_inner(resolved_segment, it.into(), infer_args) self.lower_path_inner(resolved_segment, it.into(), infer_args)
} }
TypeNs::TypeAliasId(it) => { TypeNs::TypeAliasId(it) => {
@ -698,14 +774,39 @@ impl<'a> TyLoweringContext<'a> {
return (ty, None); return (ty, None);
} }
let (resolved_segment, remaining_segments) = match remaining_index { let (module_segments, resolved_segment_idx, resolved_segment, remaining_segments) =
None => ( match remaining_index {
path.segments().last().expect("resolved path has at least one element"), None => (
PathSegments::EMPTY, path.segments().strip_last(),
), path.segments().len() - 1,
Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)), path.segments().last().expect("resolved path has at least one element"),
}; PathSegments::EMPTY,
self.lower_partly_resolved_path(resolution, resolved_segment, remaining_segments, false) ),
Some(i) => (
path.segments().take(i - 1),
i - 1,
path.segments().get(i - 1).unwrap(),
path.segments().skip(i),
),
};
self.prohibit_generics(path_id, 0, module_segments, GenericArgsProhibitedReason::Module);
self.lower_partly_resolved_path(
resolution,
resolved_segment,
remaining_segments,
false,
&mut |this, reason| {
this.push_diagnostic(
path_id.type_ref(),
TyLoweringDiagnosticKind::GenericArgsProhibited {
segment: resolved_segment_idx as u32,
reason,
},
)
},
)
} }
fn select_associated_type(&mut self, res: Option<TypeNs>, segment: PathSegment<'_>) -> Ty { fn select_associated_type(&mut self, res: Option<TypeNs>, segment: PathSegment<'_>) -> Ty {
@ -742,12 +843,8 @@ impl<'a> TyLoweringContext<'a> {
// generic params. It's inefficient to splice the `Substitution`s, so we may want // generic params. It's inefficient to splice the `Substitution`s, so we may want
// that method to optionally take parent `Substitution` as we already know them at // that method to optionally take parent `Substitution` as we already know them at
// this point (`t.substitution`). // this point (`t.substitution`).
let substs = self.substs_from_path_segment( let substs =
segment.clone(), self.substs_from_path_segment(segment, Some(associated_ty.into()), false, None);
Some(associated_ty.into()),
false,
None,
);
let len_self = let len_self =
crate::generics::generics(self.db.upcast(), associated_ty.into()).len_self(); crate::generics::generics(self.db.upcast(), associated_ty.into()).len_self();
@ -999,6 +1096,23 @@ impl<'a> TyLoweringContext<'a> {
TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs } TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs }
} }
fn prohibit_generics(
&mut self,
path_id: PathId,
idx: u32,
segments: PathSegments<'_>,
reason: GenericArgsProhibitedReason,
) {
segments.iter().zip(idx..).for_each(|(segment, idx)| {
if segment.args_and_bindings.is_some() {
self.push_diagnostic(
path_id.type_ref(),
TyLoweringDiagnosticKind::GenericArgsProhibited { segment: idx, reason },
);
}
});
}
fn lower_trait_ref_from_path( fn lower_trait_ref_from_path(
&mut self, &mut self,
path_id: PathId, path_id: PathId,
@ -1010,6 +1124,13 @@ impl<'a> TyLoweringContext<'a> {
TypeNs::TraitId(tr) => tr, TypeNs::TraitId(tr) => tr,
_ => return None, _ => return None,
}; };
// Do this after we verify it's indeed a trait to not confuse the user if they're not modules.
self.prohibit_generics(
path_id,
0,
path.segments().strip_last(),
GenericArgsProhibitedReason::Module,
);
let segment = path.segments().last().expect("path should have at least one segment"); let segment = path.segments().last().expect("path should have at least one segment");
Some(self.lower_trait_ref_from_resolved_path(resolved, segment, explicit_self_ty)) Some(self.lower_trait_ref_from_resolved_path(resolved, segment, explicit_self_ty))
} }
@ -1233,7 +1354,9 @@ impl<'a> TyLoweringContext<'a> {
} }
_ => unreachable!(), _ => unreachable!(),
} }
ext.lower_ty(type_ref) let ty = ext.lower_ty(type_ref);
self.diagnostics.extend(ext.diagnostics);
ty
} else { } else {
self.lower_ty(type_ref) self.lower_ty(type_ref)
}; };

View file

@ -33,6 +33,14 @@ syntax.workspace = true
tt.workspace = true tt.workspace = true
span.workspace = true span.workspace = true
[dev-dependencies]
expect-test.workspace = true
# local deps
test-utils.workspace = true
test-fixture.workspace = true
syntax-bridge.workspace = true
[features] [features]
in-rust-tree = ["hir-expand/in-rust-tree"] in-rust-tree = ["hir-expand/in-rust-tree"]

View file

@ -3,23 +3,34 @@
//! //!
//! This probably isn't the best way to do this -- ideally, diagnostics should //! This probably isn't the best way to do this -- ideally, diagnostics should
//! be expressed in terms of hir types themselves. //! be expressed in terms of hir types themselves.
pub use hir_ty::diagnostics::{CaseType, IncorrectCase}; use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_def::{
hir::ExprOrPatId,
path::{hir_segment_to_ast_segment, ModPath},
type_ref::TypesSourceMap,
AssocItemId, DefWithBodyId, SyntheticSyntax,
};
use hir_expand::{name::Name, HirFileId, InFile};
use hir_ty::{ use hir_ty::{
db::HirDatabase, db::HirDatabase,
diagnostics::{BodyValidationDiagnostic, UnsafetyReason}, diagnostics::{BodyValidationDiagnostic, UnsafetyReason},
CastError, InferenceDiagnostic, CastError, InferenceDiagnostic, InferenceTyDiagnosticSource, TyLoweringDiagnosticKind,
};
use syntax::{
ast::{self, HasGenericArgs},
AstPtr, SyntaxError, SyntaxNodePtr, TextRange,
}; };
use cfg::{CfgExpr, CfgOptions};
use either::Either;
pub use hir_def::VariantId;
use hir_def::{hir::ExprOrPatId, path::ModPath, AssocItemId, DefWithBodyId, SyntheticSyntax};
use hir_expand::{name::Name, HirFileId, InFile};
use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange};
use triomphe::Arc; use triomphe::Arc;
use crate::{AssocItem, Field, Local, Trait, Type}; use crate::{AssocItem, Field, Local, Trait, Type};
pub use hir_def::VariantId;
pub use hir_ty::{
diagnostics::{CaseType, IncorrectCase},
GenericArgsProhibitedReason,
};
macro_rules! diagnostics { macro_rules! diagnostics {
($($diag:ident,)*) => { ($($diag:ident,)*) => {
#[derive(Debug)] #[derive(Debug)]
@ -98,6 +109,7 @@ diagnostics![
UnresolvedIdent, UnresolvedIdent,
UnusedMut, UnusedMut,
UnusedVariable, UnusedVariable,
GenericArgsProhibited,
]; ];
#[derive(Debug)] #[derive(Debug)]
@ -388,6 +400,12 @@ pub struct InvalidCast {
pub cast_ty: Type, pub cast_ty: Type,
} }
#[derive(Debug)]
pub struct GenericArgsProhibited {
pub args: InFile<AstPtr<Either<ast::GenericArgList, ast::ParamList>>>,
pub reason: GenericArgsProhibitedReason,
}
impl AnyDiagnostic { impl AnyDiagnostic {
pub(crate) fn body_validation_diagnostic( pub(crate) fn body_validation_diagnostic(
db: &dyn HirDatabase, db: &dyn HirDatabase,
@ -527,6 +545,7 @@ impl AnyDiagnostic {
db: &dyn HirDatabase, db: &dyn HirDatabase,
def: DefWithBodyId, def: DefWithBodyId,
d: &InferenceDiagnostic, d: &InferenceDiagnostic,
outer_types_source_map: &TypesSourceMap,
source_map: &hir_def::body::BodySourceMap, source_map: &hir_def::body::BodySourceMap,
) -> Option<AnyDiagnostic> { ) -> Option<AnyDiagnostic> {
let expr_syntax = |expr| { let expr_syntax = |expr| {
@ -640,6 +659,36 @@ impl AnyDiagnostic {
let cast_ty = Type::new(db, def, cast_ty.clone()); let cast_ty = Type::new(db, def, cast_ty.clone());
InvalidCast { expr, error: *error, expr_ty, cast_ty }.into() InvalidCast { expr, error: *error, expr_ty, cast_ty }.into()
} }
InferenceDiagnostic::TyDiagnostic { source, diag } => {
let source_map = match source {
InferenceTyDiagnosticSource::Body => &source_map.types,
InferenceTyDiagnosticSource::Signature => outer_types_source_map,
};
let source = match diag.source {
Either::Left(type_ref_id) => {
let Ok(source) = source_map.type_syntax(type_ref_id) else {
stdx::never!("error on synthetic type syntax");
return None;
};
source
}
Either::Right(source) => source,
};
let syntax = || source.value.to_node(&db.parse_or_expand(source.file_id));
match diag.kind {
TyLoweringDiagnosticKind::GenericArgsProhibited { segment, reason } => {
let ast::Type::PathType(syntax) = syntax() else { return None };
let segment = hir_segment_to_ast_segment(&syntax.path()?, segment)?;
let args = if let Some(generics) = segment.generic_arg_list() {
AstPtr::new(&generics).wrap_left()
} else {
AstPtr::new(&segment.param_list()?).wrap_right()
};
let args = source.with_value(args);
GenericArgsProhibited { args, reason }.into()
}
}
}
}) })
} }
} }

View file

@ -20,12 +20,11 @@
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![recursion_limit = "512"] #![recursion_limit = "512"]
mod semantics;
mod source_analyzer;
mod attrs; mod attrs;
mod from_id; mod from_id;
mod has_source; mod has_source;
mod semantics;
mod source_analyzer;
pub mod db; pub mod db;
pub mod diagnostics; pub mod diagnostics;
@ -54,6 +53,7 @@ use hir_def::{
path::ImportAlias, path::ImportAlias,
per_ns::PerNs, per_ns::PerNs,
resolver::{HasResolver, Resolver}, resolver::{HasResolver, Resolver},
type_ref::TypesSourceMap,
AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId, CrateRootModuleId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId, CrateRootModuleId,
DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId,
HasModule, ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, HasModule, ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup,
@ -1802,6 +1802,25 @@ impl DefWithBody {
let krate = self.module(db).id.krate(); let krate = self.module(db).id.krate();
let (body, source_map) = db.body_with_source_map(self.into()); let (body, source_map) = db.body_with_source_map(self.into());
let item_tree_source_maps;
let outer_types_source_map = match self {
DefWithBody::Function(function) => {
let function = function.id.lookup(db.upcast()).id;
item_tree_source_maps = function.item_tree_with_source_map(db.upcast()).1;
item_tree_source_maps.function(function.value).item()
}
DefWithBody::Static(statik) => {
let statik = statik.id.lookup(db.upcast()).id;
item_tree_source_maps = statik.item_tree_with_source_map(db.upcast()).1;
item_tree_source_maps.statik(statik.value)
}
DefWithBody::Const(konst) => {
let konst = konst.id.lookup(db.upcast()).id;
item_tree_source_maps = konst.item_tree_with_source_map(db.upcast()).1;
item_tree_source_maps.konst(konst.value)
}
DefWithBody::Variant(_) | DefWithBody::InTypeConst(_) => &TypesSourceMap::EMPTY,
};
for (_, def_map) in body.blocks(db.upcast()) { for (_, def_map) in body.blocks(db.upcast()) {
Module { id: def_map.module_id(DefMap::ROOT) }.diagnostics(db, acc, style_lints); Module { id: def_map.module_id(DefMap::ROOT) }.diagnostics(db, acc, style_lints);
@ -1861,7 +1880,13 @@ impl DefWithBody {
let infer = db.infer(self.into()); let infer = db.infer(self.into());
for d in &infer.diagnostics { for d in &infer.diagnostics {
acc.extend(AnyDiagnostic::inference_diagnostic(db, self.into(), d, &source_map)); acc.extend(AnyDiagnostic::inference_diagnostic(
db,
self.into(),
d,
outer_types_source_map,
&source_map,
));
} }
for (pat_or_expr, mismatch) in infer.type_mismatches() { for (pat_or_expr, mismatch) in infer.type_mismatches() {

View file

@ -0,0 +1,242 @@
use either::Either;
use hir::GenericArgsProhibitedReason;
use ide_db::assists::Assist;
use ide_db::source_change::SourceChange;
use ide_db::text_edit::TextEdit;
use syntax::{ast, AstNode, TextRange};
use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: generic-args-prohibited
//
// This diagnostic is shown when generic arguments are provided for a type that does not accept
// generic arguments.
pub(crate) fn generic_args_prohibited(
ctx: &DiagnosticsContext<'_>,
d: &hir::GenericArgsProhibited,
) -> Diagnostic {
Diagnostic::new_with_syntax_node_ptr(
ctx,
DiagnosticCode::RustcHardError("E0109"),
describe_reason(d.reason),
d.args.map(Into::into),
)
.with_fixes(fixes(ctx, d))
}
fn describe_reason(reason: GenericArgsProhibitedReason) -> String {
let kind = match reason {
GenericArgsProhibitedReason::Module => "modules",
GenericArgsProhibitedReason::TyParam => "type parameters",
GenericArgsProhibitedReason::SelfTy => "`Self`",
GenericArgsProhibitedReason::PrimitiveTy => "builtin types",
GenericArgsProhibitedReason::EnumVariant => {
return "you can specify generic arguments on either the enum or the variant, but not both"
.to_owned();
}
};
format!("generic arguments are not allowed on {kind}")
}
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::GenericArgsProhibited) -> Option<Vec<Assist>> {
let file_id = d.args.file_id.file_id()?;
let syntax = d.args.to_node(ctx.sema.db);
let range = match &syntax {
Either::Left(_) => syntax.syntax().text_range(),
Either::Right(param_list) => {
let path_segment = ast::PathSegment::cast(param_list.syntax().parent()?)?;
let start = if let Some(coloncolon) = path_segment.coloncolon_token() {
coloncolon.text_range().start()
} else {
param_list.syntax().text_range().start()
};
let end = if let Some(ret_type) = path_segment.ret_type() {
ret_type.syntax().text_range().end()
} else {
param_list.syntax().text_range().end()
};
TextRange::new(start, end)
}
};
Some(vec![fix(
"remove_generic_args",
"Remove these generics",
SourceChange::from_text_edit(file_id, TextEdit::delete(range)),
syntax.syntax().text_range(),
)])
}
#[cfg(test)]
mod tests {
use crate::tests::{check_diagnostics, check_fix};
#[test]
fn primitives() {
check_diagnostics(
r#"
//- /core.rs crate:core library
#![rustc_coherence_is_core]
impl str {
pub fn trim() {}
}
//- /lib.rs crate:foo deps:core
fn bar<T>() {}
fn foo() {
let _: (bool<()>, ());
// ^^^^ 💡 error: generic arguments are not allowed on builtin types
let _ = <str<'_>>::trim;
// ^^^^ 💡 error: generic arguments are not allowed on builtin types
bar::<u32<{ const { 1 + 1 } }>>();
// ^^^^^^^^^^^^^^^^^^^^^ 💡 error: generic arguments are not allowed on builtin types
}
"#,
);
}
#[test]
fn modules() {
check_diagnostics(
r#"
pub mod foo {
pub mod bar {
pub struct Baz;
impl Baz {
pub fn qux() {}
}
}
}
fn foo() {
let _: foo::<'_>::bar::Baz;
// ^^^^^^ 💡 error: generic arguments are not allowed on modules
let _ = <foo::bar<()>::Baz>::qux;
// ^^^^ 💡 error: generic arguments are not allowed on modules
}
"#,
);
}
#[test]
fn type_parameters() {
check_diagnostics(
r#"
fn foo<T, U>() {
let _: T<'a>;
// ^^^^ 💡 error: generic arguments are not allowed on type parameters
let _: U::<{ 1 + 2 }>;
// ^^^^^^^^^^^^^ 💡 error: generic arguments are not allowed on type parameters
}
"#,
);
}
#[test]
fn fn_like_generic_args() {
check_diagnostics(
r#"
fn foo() {
let _: bool(bool, i32) -> ();
// ^^^^^^^^^^^ 💡 error: generic arguments are not allowed on builtin types
}
"#,
);
}
#[test]
fn fn_signature() {
check_diagnostics(
r#"
fn foo(
_a: bool<'_>,
// ^^^^ 💡 error: generic arguments are not allowed on builtin types
_b: i32::<i64>,
// ^^^^^^^ 💡 error: generic arguments are not allowed on builtin types
_c: &(&str<1>)
// ^^^ 💡 error: generic arguments are not allowed on builtin types
) -> ((), i32<bool>) {
// ^^^^^^ 💡 error: generic arguments are not allowed on builtin types
((), 0)
}
"#,
);
}
#[test]
fn const_static_type() {
check_diagnostics(
r#"
const A: i32<bool> = 0;
// ^^^^^^ 💡 error: generic arguments are not allowed on builtin types
static A: i32::<{ 1 + 3 }> = 0;
// ^^^^^^^^^^^^^ 💡 error: generic arguments are not allowed on builtin types
"#,
);
}
#[test]
fn fix() {
check_fix(
r#"
fn foo() {
let _: bool<'_, (), { 1 + 1 }>$0;
}"#,
r#"
fn foo() {
let _: bool;
}"#,
);
check_fix(
r#"
fn foo() {
let _: bool::$0<'_, (), { 1 + 1 }>;
}"#,
r#"
fn foo() {
let _: bool;
}"#,
);
check_fix(
r#"
fn foo() {
let _: bool(i$032);
}"#,
r#"
fn foo() {
let _: bool;
}"#,
);
check_fix(
r#"
fn foo() {
let _: bool$0(i32) -> i64;
}"#,
r#"
fn foo() {
let _: bool;
}"#,
);
check_fix(
r#"
fn foo() {
let _: bool::(i$032) -> i64;
}"#,
r#"
fn foo() {
let _: bool;
}"#,
);
check_fix(
r#"
fn foo() {
let _: bool::(i32)$0;
}"#,
r#"
fn foo() {
let _: bool;
}"#,
);
}
}

View file

@ -167,9 +167,9 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
} }
let method_name = call.name_ref()?; let method_name = call.name_ref()?;
let assoc_func_call = format!("{receiver_type_adt_name}::{method_name}()"); let assoc_func_path = format!("{receiver_type_adt_name}::{method_name}");
let assoc_func_call = make::expr_path(make::path_from_text(&assoc_func_call)); let assoc_func_path = make::expr_path(make::path_from_text(&assoc_func_path));
let args: Vec<_> = if need_to_take_receiver_as_first_arg { let args: Vec<_> = if need_to_take_receiver_as_first_arg {
std::iter::once(receiver).chain(call.arg_list()?.args()).collect() std::iter::once(receiver).chain(call.arg_list()?.args()).collect()
@ -178,7 +178,7 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
}; };
let args = make::arg_list(args); let args = make::arg_list(args);
let assoc_func_call_expr_string = make::expr_call(assoc_func_call, args).to_string(); let assoc_func_call_expr_string = make::expr_call(assoc_func_path, args).to_string();
let file_id = ctx.sema.original_range_opt(call.receiver()?.syntax())?.file_id; let file_id = ctx.sema.original_range_opt(call.receiver()?.syntax())?.file_id;

View file

@ -27,6 +27,7 @@ mod handlers {
pub(crate) mod await_outside_of_async; pub(crate) mod await_outside_of_async;
pub(crate) mod break_outside_of_loop; pub(crate) mod break_outside_of_loop;
pub(crate) mod expected_function; pub(crate) mod expected_function;
pub(crate) mod generic_args_prohibited;
pub(crate) mod inactive_code; pub(crate) mod inactive_code;
pub(crate) mod incoherent_impl; pub(crate) mod incoherent_impl;
pub(crate) mod incorrect_case; pub(crate) mod incorrect_case;
@ -468,6 +469,7 @@ pub fn semantic_diagnostics(
Some(it) => it, Some(it) => it,
None => continue, None => continue,
}, },
AnyDiagnostic::GenericArgsProhibited(d) => handlers::generic_args_prohibited::generic_args_prohibited(&ctx, &d)
}; };
res.push(d) res.push(d)
} }

View file

@ -402,7 +402,7 @@ pub fn join_paths(paths: impl IntoIterator<Item = ast::Path>) -> ast::Path {
// FIXME: should not be pub // FIXME: should not be pub
pub fn path_from_text(text: &str) -> ast::Path { pub fn path_from_text(text: &str) -> ast::Path {
ast_from_text(&format!("fn main() {{ let test = {text}; }}")) ast_from_text(&format!("fn main() {{ let test: {text}; }}"))
} }
pub fn use_tree_glob() -> ast::UseTree { pub fn use_tree_glob() -> ast::UseTree {