mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 04:53:34 +00:00
Lay the foundation for diagnostics in ty lowering, and implement a first diagnostic
The diagnostic implemented is a simple one (E0109). It serves as a test for the new foundation. This commit only implements diagnostics for type in bodies and body-carrying signatures; the next commit will include diagnostics in the rest of the things. Also fix one weird bug that was detected when implementing this that caused `Fn::(A, B) -> C` (which is a valid, if bizarre, alternative syntax to `Fn(A, B) -> C` to lower incorrectly. And also fix a maybe-bug where parentheses were sneaked into a code string needlessly; this was not detected until now because the parentheses were removed (by the make-AST family API), but with a change in this commit they are now inserted. So fix that too.
This commit is contained in:
parent
4e475a3245
commit
5f25ae3d1b
19 changed files with 811 additions and 80 deletions
4
Cargo.lock
generated
4
Cargo.lock
generated
|
@ -509,6 +509,7 @@ dependencies = [
|
|||
"base-db",
|
||||
"cfg",
|
||||
"either",
|
||||
"expect-test",
|
||||
"hir-def",
|
||||
"hir-expand",
|
||||
"hir-ty",
|
||||
|
@ -519,6 +520,9 @@ dependencies = [
|
|||
"span",
|
||||
"stdx",
|
||||
"syntax",
|
||||
"syntax-bridge",
|
||||
"test-fixture",
|
||||
"test-utils",
|
||||
"tracing",
|
||||
"triomphe",
|
||||
"tt",
|
||||
|
|
|
@ -141,7 +141,7 @@ pub struct BodySourceMap {
|
|||
field_map_back: FxHashMap<ExprId, FieldSource>,
|
||||
pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
|
||||
|
||||
types: TypesSourceMap,
|
||||
pub types: TypesSourceMap,
|
||||
|
||||
// FIXME: Make this a sane struct.
|
||||
template_map: Option<
|
||||
|
|
|
@ -219,6 +219,8 @@ pub struct TypesSourceMap {
|
|||
}
|
||||
|
||||
impl TypesSourceMap {
|
||||
pub const EMPTY: Self = Self { types_map_back: ArenaMap::new() };
|
||||
|
||||
pub fn type_syntax(&self, id: TypeRefId) -> Result<TypeSource, SyntheticSyntax> {
|
||||
self.types_map_back.get(id).cloned().ok_or(SyntheticSyntax)
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
//! A desugared representation of paths like `crate::foo` or `<Type as Trait>::bar`.
|
||||
mod lower;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use std::{
|
||||
fmt::{self, Display},
|
||||
|
@ -19,6 +21,8 @@ use syntax::ast;
|
|||
|
||||
pub use hir_expand::mod_path::{path, ModPath, PathKind};
|
||||
|
||||
pub use lower::hir_segment_to_ast_segment;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum ImportAlias {
|
||||
/// Unnamed alias, as in `use Foo as _;`
|
||||
|
@ -230,7 +234,7 @@ impl Path {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct PathSegment<'a> {
|
||||
pub name: &'a Name,
|
||||
pub args_and_bindings: Option<&'a GenericArgs>,
|
||||
|
@ -274,6 +278,12 @@ impl<'a> PathSegments<'a> {
|
|||
generic_args: self.generic_args.map(|it| it.get(..len).unwrap_or(it)),
|
||||
}
|
||||
}
|
||||
pub fn strip_last(&self) -> PathSegments<'a> {
|
||||
PathSegments {
|
||||
segments: self.segments.split_last().map_or(&[], |it| it.1),
|
||||
generic_args: self.generic_args.map(|it| it.split_last().map_or(&[][..], |it| it.1)),
|
||||
}
|
||||
}
|
||||
pub fn iter(&self) -> impl Iterator<Item = PathSegment<'a>> {
|
||||
self.segments
|
||||
.iter()
|
||||
|
|
|
@ -17,13 +17,31 @@ use crate::{
|
|||
type_ref::{LifetimeRef, TypeBound, TypeRef},
|
||||
};
|
||||
|
||||
#[cfg(test)]
|
||||
thread_local! {
|
||||
/// This is used to test `hir_segment_to_ast_segment()`. It's a hack, but it makes testing much easier.
|
||||
pub(super) static SEGMENT_LOWERING_MAP: std::cell::RefCell<rustc_hash::FxHashMap<ast::PathSegment, usize>> = std::cell::RefCell::default();
|
||||
}
|
||||
|
||||
/// Converts an `ast::Path` to `Path`. Works with use trees.
|
||||
/// It correctly handles `$crate` based path from macro call.
|
||||
// If you modify the logic of the lowering, make sure to check if `hir_segment_to_ast_segment()`
|
||||
// also needs an update.
|
||||
pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<Path> {
|
||||
let mut kind = PathKind::Plain;
|
||||
let mut type_anchor = None;
|
||||
let mut segments = Vec::new();
|
||||
let mut generic_args = Vec::new();
|
||||
#[cfg(test)]
|
||||
let mut ast_segments = Vec::new();
|
||||
#[cfg(test)]
|
||||
let mut ast_segments_offset = 0;
|
||||
#[allow(unused_mut)]
|
||||
let mut push_segment = |_segment: &ast::PathSegment, segments: &mut Vec<Name>, name| {
|
||||
#[cfg(test)]
|
||||
ast_segments.push(_segment.clone());
|
||||
segments.push(name);
|
||||
};
|
||||
loop {
|
||||
let segment = path.segment()?;
|
||||
|
||||
|
@ -34,6 +52,10 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
|
|||
match segment.kind()? {
|
||||
ast::PathSegmentKind::Name(name_ref) => {
|
||||
if name_ref.text() == "$crate" {
|
||||
if path.qualifier().is_some() {
|
||||
// FIXME: Report an error.
|
||||
return None;
|
||||
}
|
||||
break kind = resolve_crate_root(
|
||||
ctx.db.upcast(),
|
||||
ctx.span_map().span_for_range(name_ref.syntax().text_range()).ctx,
|
||||
|
@ -56,10 +78,10 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
|
|||
generic_args.resize(segments.len(), None);
|
||||
generic_args.push(args);
|
||||
}
|
||||
segments.push(name);
|
||||
push_segment(&segment, &mut segments, name);
|
||||
}
|
||||
ast::PathSegmentKind::SelfTypeKw => {
|
||||
segments.push(Name::new_symbol_root(sym::Self_.clone()));
|
||||
push_segment(&segment, &mut segments, Name::new_symbol_root(sym::Self_.clone()));
|
||||
}
|
||||
ast::PathSegmentKind::Type { type_ref, trait_ref } => {
|
||||
assert!(path.qualifier().is_none()); // this can only occur at the first segment
|
||||
|
@ -81,6 +103,10 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
|
|||
kind = mod_path.kind;
|
||||
|
||||
segments.extend(mod_path.segments().iter().cloned().rev());
|
||||
#[cfg(test)]
|
||||
{
|
||||
ast_segments_offset = mod_path.segments().len();
|
||||
}
|
||||
if let Some(path_generic_args) = path_generic_args {
|
||||
generic_args.resize(segments.len() - num_segments, None);
|
||||
generic_args.extend(Vec::from(path_generic_args).into_iter().rev());
|
||||
|
@ -112,10 +138,18 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
|
|||
}
|
||||
}
|
||||
ast::PathSegmentKind::CrateKw => {
|
||||
if path.qualifier().is_some() {
|
||||
// FIXME: Report an error.
|
||||
return None;
|
||||
}
|
||||
kind = PathKind::Crate;
|
||||
break;
|
||||
}
|
||||
ast::PathSegmentKind::SelfKw => {
|
||||
if path.qualifier().is_some() {
|
||||
// FIXME: Report an error.
|
||||
return None;
|
||||
}
|
||||
// don't break out if `self` is the last segment of a path, this mean we got a
|
||||
// use tree like `foo::{self}` which we want to resolve as `foo`
|
||||
if !segments.is_empty() {
|
||||
|
@ -162,6 +196,13 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
{
|
||||
ast_segments.reverse();
|
||||
SEGMENT_LOWERING_MAP
|
||||
.with_borrow_mut(|map| map.extend(ast_segments.into_iter().zip(ast_segments_offset..)));
|
||||
}
|
||||
|
||||
let mod_path = Interned::new(ModPath::from_segments(kind, segments));
|
||||
if type_anchor.is_none() && generic_args.is_empty() {
|
||||
return Some(Path::BarePath(mod_path));
|
||||
|
@ -181,6 +222,41 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
|
|||
}
|
||||
}
|
||||
|
||||
/// This function finds the AST segment that corresponds to the HIR segment
|
||||
/// with index `segment_idx` on the path that is lowered from `path`.
|
||||
pub fn hir_segment_to_ast_segment(path: &ast::Path, segment_idx: u32) -> Option<ast::PathSegment> {
|
||||
// Too tightly coupled to `lower_path()`, but unfortunately we cannot decouple them,
|
||||
// as keeping source maps for all paths segments will have a severe impact on memory usage.
|
||||
|
||||
let mut segments = path.segments();
|
||||
if let Some(ast::PathSegmentKind::Type { trait_ref: Some(trait_ref), .. }) =
|
||||
segments.clone().next().and_then(|it| it.kind())
|
||||
{
|
||||
segments.next();
|
||||
return find_segment(trait_ref.path()?.segments().chain(segments), segment_idx);
|
||||
}
|
||||
return find_segment(segments, segment_idx);
|
||||
|
||||
fn find_segment(
|
||||
segments: impl Iterator<Item = ast::PathSegment>,
|
||||
segment_idx: u32,
|
||||
) -> Option<ast::PathSegment> {
|
||||
segments
|
||||
.filter(|segment| match segment.kind() {
|
||||
Some(
|
||||
ast::PathSegmentKind::CrateKw
|
||||
| ast::PathSegmentKind::SelfKw
|
||||
| ast::PathSegmentKind::SuperKw
|
||||
| ast::PathSegmentKind::Type { .. },
|
||||
)
|
||||
| None => false,
|
||||
Some(ast::PathSegmentKind::Name(name)) => name.text() != "$crate",
|
||||
Some(ast::PathSegmentKind::SelfTypeKw) => true,
|
||||
})
|
||||
.nth(segment_idx as usize)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn lower_generic_args(
|
||||
lower_ctx: &mut LowerCtx<'_>,
|
||||
node: ast::GenericArgList,
|
||||
|
|
126
crates/hir-def/src/path/tests.rs
Normal file
126
crates/hir-def/src/path/tests.rs
Normal file
|
@ -0,0 +1,126 @@
|
|||
use expect_test::{expect, Expect};
|
||||
use span::Edition;
|
||||
use syntax::ast::{self, make};
|
||||
use test_fixture::WithFixture;
|
||||
|
||||
use crate::{
|
||||
lower::LowerCtx,
|
||||
path::{
|
||||
lower::{hir_segment_to_ast_segment, SEGMENT_LOWERING_MAP},
|
||||
Path,
|
||||
},
|
||||
pretty,
|
||||
test_db::TestDB,
|
||||
type_ref::{TypesMap, TypesSourceMap},
|
||||
};
|
||||
|
||||
fn lower_path(path: ast::Path) -> (TestDB, TypesMap, Option<Path>) {
|
||||
let (db, file_id) = TestDB::with_single_file("");
|
||||
let mut types_map = TypesMap::default();
|
||||
let mut types_source_map = TypesSourceMap::default();
|
||||
let mut ctx = LowerCtx::new(&db, file_id.into(), &mut types_map, &mut types_source_map);
|
||||
let lowered_path = ctx.lower_path(path);
|
||||
(db, types_map, lowered_path)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn check_hir_to_ast(path: &str, ignore_segments: &[&str]) {
|
||||
let path = make::path_from_text(path);
|
||||
SEGMENT_LOWERING_MAP.with_borrow_mut(|map| map.clear());
|
||||
let _ = lower_path(path.clone()).2.expect("failed to lower path");
|
||||
SEGMENT_LOWERING_MAP.with_borrow(|map| {
|
||||
for (segment, segment_idx) in map {
|
||||
if ignore_segments.contains(&&*segment.to_string()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let restored_segment = hir_segment_to_ast_segment(&path, *segment_idx as u32)
|
||||
.unwrap_or_else(|| {
|
||||
panic!(
|
||||
"failed to map back segment `{segment}` \
|
||||
numbered {segment_idx} in HIR from path `{path}`"
|
||||
)
|
||||
});
|
||||
assert_eq!(
|
||||
segment, &restored_segment,
|
||||
"mapping back `{segment}` numbered {segment_idx} in HIR \
|
||||
from path `{path}` produced incorrect segment `{restored_segment}`"
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hir_to_ast_trait_ref() {
|
||||
check_hir_to_ast("<A as B::C::D>::E::F", &["A"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hir_to_ast_plain_path() {
|
||||
check_hir_to_ast("A::B::C::D::E::F", &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hir_to_ast_crate_path() {
|
||||
check_hir_to_ast("crate::A::B::C", &[]);
|
||||
check_hir_to_ast("crate::super::super::A::B::C", &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hir_to_ast_self_path() {
|
||||
check_hir_to_ast("self::A::B::C", &[]);
|
||||
check_hir_to_ast("self::super::super::A::B::C", &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hir_to_ast_super_path() {
|
||||
check_hir_to_ast("super::A::B::C", &[]);
|
||||
check_hir_to_ast("super::super::super::A::B::C", &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hir_to_ast_type_anchor_path() {
|
||||
check_hir_to_ast("<A::B>::C::D", &["A", "B"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hir_to_ast_path_super_in_middle() {
|
||||
check_hir_to_ast("A::super::B::super::super::C::D", &[]);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn check_fail_lowering(path: &str) {
|
||||
let (_, _, lowered_path) = lower_path(make::path_from_text(path));
|
||||
assert!(lowered_path.is_none(), "path `{path}` should fail lowering");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keywords_in_middle_fail_lowering1() {
|
||||
check_fail_lowering("self::A::self::B::super::C::crate::D");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keywords_in_middle_fail_lowering2() {
|
||||
check_fail_lowering("A::super::self::C::D");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keywords_in_middle_fail_lowering3() {
|
||||
check_fail_lowering("A::crate::B::C::D");
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn check_path_lowering(path: &str, expected: Expect) {
|
||||
let (db, types_map, lowered_path) = lower_path(make::path_from_text(path));
|
||||
let lowered_path = lowered_path.expect("failed to lower path");
|
||||
let mut buf = String::new();
|
||||
pretty::print_path(&db, &lowered_path, &types_map, &mut buf, Edition::CURRENT)
|
||||
.expect("failed to pretty-print path");
|
||||
expected.assert_eq(&buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fn_like_path_with_coloncolon() {
|
||||
check_path_lowering("Fn::(A, B) -> C", expect![[r#"Fn::<(A, B), Output = C>"#]]);
|
||||
check_path_lowering("Fn::(A, B)", expect![[r#"Fn::<(A, B), Output = ()>"#]]);
|
||||
}
|
|
@ -180,6 +180,13 @@ impl<FileId: FileIdToSyntax, T> InFileWrapper<FileId, T> {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(private_bounds)]
|
||||
impl<FileId: FileIdToSyntax, N: AstNode> InFileWrapper<FileId, AstPtr<N>> {
|
||||
pub fn to_node(&self, db: &dyn ExpandDatabase) -> N {
|
||||
self.value.to_node(&self.file_syntax(db))
|
||||
}
|
||||
}
|
||||
|
||||
impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
|
||||
pub fn syntax(&self) -> InFileWrapper<FileId, &SyntaxNode> {
|
||||
self.with_value(self.value.syntax())
|
||||
|
|
|
@ -58,7 +58,7 @@ use crate::{
|
|||
fold_tys,
|
||||
generics::Generics,
|
||||
infer::{coerce::CoerceMany, expr::ExprIsRead, unify::InferenceTable},
|
||||
lower::ImplTraitLoweringMode,
|
||||
lower::{ImplTraitLoweringMode, TyLoweringDiagnostic},
|
||||
mir::MirSpan,
|
||||
to_assoc_type_id,
|
||||
traits::FnTrait,
|
||||
|
@ -191,6 +191,14 @@ impl<T> InferOk<T> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub enum InferenceTyDiagnosticSource {
|
||||
/// Diagnostics that come from types in the body.
|
||||
Body,
|
||||
/// Diagnostics that come from types in fn parameters/return type, or static & const types.
|
||||
Signature,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct TypeError;
|
||||
pub(crate) type InferResult<T> = Result<InferOk<T>, TypeError>;
|
||||
|
@ -264,6 +272,10 @@ pub enum InferenceDiagnostic {
|
|||
expr_ty: Ty,
|
||||
cast_ty: Ty,
|
||||
},
|
||||
TyDiagnostic {
|
||||
source: InferenceTyDiagnosticSource,
|
||||
diag: TyLoweringDiagnostic,
|
||||
},
|
||||
}
|
||||
|
||||
/// A mismatch between an expected and an inferred type.
|
||||
|
@ -858,7 +870,8 @@ impl<'a> InferenceContext<'a> {
|
|||
}
|
||||
|
||||
fn collect_const(&mut self, data: &ConstData) {
|
||||
let return_ty = self.make_ty(data.type_ref, &data.types_map);
|
||||
let return_ty =
|
||||
self.make_ty(data.type_ref, &data.types_map, InferenceTyDiagnosticSource::Signature);
|
||||
|
||||
// Constants might be defining usage sites of TAITs.
|
||||
self.make_tait_coercion_table(iter::once(&return_ty));
|
||||
|
@ -867,7 +880,8 @@ impl<'a> InferenceContext<'a> {
|
|||
}
|
||||
|
||||
fn collect_static(&mut self, data: &StaticData) {
|
||||
let return_ty = self.make_ty(data.type_ref, &data.types_map);
|
||||
let return_ty =
|
||||
self.make_ty(data.type_ref, &data.types_map, InferenceTyDiagnosticSource::Signature);
|
||||
|
||||
// Statics might be defining usage sites of TAITs.
|
||||
self.make_tait_coercion_table(iter::once(&return_ty));
|
||||
|
@ -877,7 +891,8 @@ impl<'a> InferenceContext<'a> {
|
|||
|
||||
fn collect_fn(&mut self, func: FunctionId) {
|
||||
let data = self.db.function_data(func);
|
||||
let mut param_tys = self.with_ty_lowering(&data.types_map, |ctx| {
|
||||
let mut param_tys =
|
||||
self.with_ty_lowering(&data.types_map, InferenceTyDiagnosticSource::Signature, |ctx| {
|
||||
ctx.type_param_mode(ParamLoweringMode::Placeholder)
|
||||
.impl_trait_mode(ImplTraitLoweringMode::Param);
|
||||
data.params.iter().map(|&type_ref| ctx.lower_ty(type_ref)).collect::<Vec<_>>()
|
||||
|
@ -918,7 +933,8 @@ impl<'a> InferenceContext<'a> {
|
|||
}
|
||||
let return_ty = data.ret_type;
|
||||
|
||||
let return_ty = self.with_ty_lowering(&data.types_map, |ctx| {
|
||||
let return_ty =
|
||||
self.with_ty_lowering(&data.types_map, InferenceTyDiagnosticSource::Signature, |ctx| {
|
||||
ctx.type_param_mode(ParamLoweringMode::Placeholder)
|
||||
.impl_trait_mode(ImplTraitLoweringMode::Opaque)
|
||||
.lower_ty(return_ty)
|
||||
|
@ -1226,9 +1242,20 @@ impl<'a> InferenceContext<'a> {
|
|||
self.result.diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
fn push_ty_diagnostics(
|
||||
&mut self,
|
||||
source: InferenceTyDiagnosticSource,
|
||||
diagnostics: Vec<TyLoweringDiagnostic>,
|
||||
) {
|
||||
self.result.diagnostics.extend(
|
||||
diagnostics.into_iter().map(|diag| InferenceDiagnostic::TyDiagnostic { source, diag }),
|
||||
);
|
||||
}
|
||||
|
||||
fn with_ty_lowering<R>(
|
||||
&self,
|
||||
&mut self,
|
||||
types_map: &TypesMap,
|
||||
types_source: InferenceTyDiagnosticSource,
|
||||
f: impl FnOnce(&mut crate::lower::TyLoweringContext<'_>) -> R,
|
||||
) -> R {
|
||||
let mut ctx = crate::lower::TyLoweringContext::new(
|
||||
|
@ -1237,32 +1264,41 @@ impl<'a> InferenceContext<'a> {
|
|||
types_map,
|
||||
self.owner.into(),
|
||||
);
|
||||
f(&mut ctx)
|
||||
let result = f(&mut ctx);
|
||||
self.push_ty_diagnostics(types_source, ctx.diagnostics);
|
||||
result
|
||||
}
|
||||
|
||||
fn with_body_ty_lowering<R>(
|
||||
&self,
|
||||
&mut self,
|
||||
f: impl FnOnce(&mut crate::lower::TyLoweringContext<'_>) -> R,
|
||||
) -> R {
|
||||
self.with_ty_lowering(&self.body.types, f)
|
||||
self.with_ty_lowering(&self.body.types, InferenceTyDiagnosticSource::Body, f)
|
||||
}
|
||||
|
||||
fn make_ty(&mut self, type_ref: TypeRefId, types_map: &TypesMap) -> Ty {
|
||||
let ty = self.with_ty_lowering(types_map, |ctx| ctx.lower_ty(type_ref));
|
||||
fn make_ty(
|
||||
&mut self,
|
||||
type_ref: TypeRefId,
|
||||
types_map: &TypesMap,
|
||||
type_source: InferenceTyDiagnosticSource,
|
||||
) -> Ty {
|
||||
let ty = self.with_ty_lowering(types_map, type_source, |ctx| ctx.lower_ty(type_ref));
|
||||
let ty = self.insert_type_vars(ty);
|
||||
self.normalize_associated_types_in(ty)
|
||||
}
|
||||
|
||||
fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty {
|
||||
self.make_ty(type_ref, &self.body.types)
|
||||
self.make_ty(type_ref, &self.body.types, InferenceTyDiagnosticSource::Body)
|
||||
}
|
||||
|
||||
fn err_ty(&self) -> Ty {
|
||||
self.result.standard_types.unknown.clone()
|
||||
}
|
||||
|
||||
fn make_lifetime(&mut self, lifetime_ref: &LifetimeRef) -> Lifetime {
|
||||
let lt = self.with_ty_lowering(TypesMap::EMPTY, |ctx| ctx.lower_lifetime(lifetime_ref));
|
||||
fn make_body_lifetime(&mut self, lifetime_ref: &LifetimeRef) -> Lifetime {
|
||||
let lt = self.with_ty_lowering(TypesMap::EMPTY, InferenceTyDiagnosticSource::Body, |ctx| {
|
||||
ctx.lower_lifetime(lifetime_ref)
|
||||
});
|
||||
self.insert_type_vars(lt)
|
||||
}
|
||||
|
||||
|
@ -1431,12 +1467,20 @@ impl<'a> InferenceContext<'a> {
|
|||
Some(ResolveValueResult::ValueNs(value, _)) => match value {
|
||||
ValueNs::EnumVariantId(var) => {
|
||||
let substs = ctx.substs_from_path(path, var.into(), true);
|
||||
self.push_ty_diagnostics(
|
||||
InferenceTyDiagnosticSource::Body,
|
||||
ctx.diagnostics,
|
||||
);
|
||||
let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into());
|
||||
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
||||
return (ty, Some(var.into()));
|
||||
}
|
||||
ValueNs::StructId(strukt) => {
|
||||
let substs = ctx.substs_from_path(path, strukt.into(), true);
|
||||
self.push_ty_diagnostics(
|
||||
InferenceTyDiagnosticSource::Body,
|
||||
ctx.diagnostics,
|
||||
);
|
||||
let ty = self.db.ty(strukt.into());
|
||||
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
||||
return (ty, Some(strukt.into()));
|
||||
|
@ -1462,18 +1506,21 @@ impl<'a> InferenceContext<'a> {
|
|||
return match resolution {
|
||||
TypeNs::AdtId(AdtId::StructId(strukt)) => {
|
||||
let substs = ctx.substs_from_path(path, strukt.into(), true);
|
||||
self.push_ty_diagnostics(InferenceTyDiagnosticSource::Body, ctx.diagnostics);
|
||||
let ty = self.db.ty(strukt.into());
|
||||
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
||||
forbid_unresolved_segments((ty, Some(strukt.into())), unresolved)
|
||||
}
|
||||
TypeNs::AdtId(AdtId::UnionId(u)) => {
|
||||
let substs = ctx.substs_from_path(path, u.into(), true);
|
||||
self.push_ty_diagnostics(InferenceTyDiagnosticSource::Body, ctx.diagnostics);
|
||||
let ty = self.db.ty(u.into());
|
||||
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
||||
forbid_unresolved_segments((ty, Some(u.into())), unresolved)
|
||||
}
|
||||
TypeNs::EnumVariantId(var) => {
|
||||
let substs = ctx.substs_from_path(path, var.into(), true);
|
||||
self.push_ty_diagnostics(InferenceTyDiagnosticSource::Body, ctx.diagnostics);
|
||||
let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into());
|
||||
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
||||
forbid_unresolved_segments((ty, Some(var.into())), unresolved)
|
||||
|
@ -1519,6 +1566,9 @@ impl<'a> InferenceContext<'a> {
|
|||
resolved_segment,
|
||||
current_segment,
|
||||
false,
|
||||
&mut |_, _reason| {
|
||||
// FIXME: Report an error.
|
||||
},
|
||||
);
|
||||
|
||||
ty = self.table.insert_type_vars(ty);
|
||||
|
@ -1532,6 +1582,7 @@ impl<'a> InferenceContext<'a> {
|
|||
remaining_idx += 1;
|
||||
remaining_segments = remaining_segments.skip(1);
|
||||
}
|
||||
self.push_ty_diagnostics(InferenceTyDiagnosticSource::Body, ctx.diagnostics);
|
||||
|
||||
let variant = ty.as_adt().and_then(|(id, _)| match id {
|
||||
AdtId::StructId(s) => Some(VariantId::StructId(s)),
|
||||
|
@ -1550,6 +1601,7 @@ impl<'a> InferenceContext<'a> {
|
|||
};
|
||||
let substs =
|
||||
ctx.substs_from_path_segment(resolved_seg, Some(it.into()), true, None);
|
||||
self.push_ty_diagnostics(InferenceTyDiagnosticSource::Body, ctx.diagnostics);
|
||||
let ty = self.db.ty(it.into());
|
||||
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
||||
|
||||
|
|
|
@ -2155,7 +2155,7 @@ impl InferenceContext<'_> {
|
|||
DebruijnIndex::INNERMOST,
|
||||
)
|
||||
},
|
||||
|this, lt_ref| this.make_lifetime(lt_ref),
|
||||
|this, lt_ref| this.make_body_lifetime(lt_ref),
|
||||
),
|
||||
};
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ use crate::{
|
|||
TyBuilder, TyExt, TyKind, ValueTyDefId,
|
||||
};
|
||||
|
||||
use super::{ExprOrPatId, InferenceContext};
|
||||
use super::{ExprOrPatId, InferenceContext, InferenceTyDiagnosticSource};
|
||||
|
||||
impl InferenceContext<'_> {
|
||||
pub(super) fn infer_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<Ty> {
|
||||
|
@ -163,6 +163,7 @@ impl InferenceContext<'_> {
|
|||
|
||||
let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
|
||||
let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty);
|
||||
self.push_ty_diagnostics(InferenceTyDiagnosticSource::Body, ctx.diagnostics);
|
||||
let ty = self.table.insert_type_vars(ty);
|
||||
let ty = self.table.normalize_associated_types_in(ty);
|
||||
self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
|
||||
|
@ -265,6 +266,9 @@ impl InferenceContext<'_> {
|
|||
resolved_segment,
|
||||
remaining_segments_for_ty,
|
||||
true,
|
||||
&mut |_, _reason| {
|
||||
// FIXME: Report an error.
|
||||
},
|
||||
)
|
||||
});
|
||||
if ty.is_unknown() {
|
||||
|
|
|
@ -84,12 +84,13 @@ pub use infer::{
|
|||
cast::CastError,
|
||||
closure::{CaptureKind, CapturedItem},
|
||||
could_coerce, could_unify, could_unify_deeply, Adjust, Adjustment, AutoBorrow, BindingMode,
|
||||
InferenceDiagnostic, InferenceResult, OverloadedDeref, PointerCast,
|
||||
InferenceDiagnostic, InferenceResult, InferenceTyDiagnosticSource, OverloadedDeref,
|
||||
PointerCast,
|
||||
};
|
||||
pub use interner::Interner;
|
||||
pub use lower::{
|
||||
associated_type_shorthand_candidates, ImplTraitLoweringMode, ParamLoweringMode, TyDefId,
|
||||
TyLoweringContext, ValueTyDefId,
|
||||
associated_type_shorthand_candidates, GenericArgsProhibitedReason, ImplTraitLoweringMode,
|
||||
ParamLoweringMode, TyDefId, TyLoweringContext, TyLoweringDiagnosticKind, ValueTyDefId,
|
||||
};
|
||||
pub use mapping::{
|
||||
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
|
||||
|
|
|
@ -102,6 +102,31 @@ impl ImplTraitLoweringState {
|
|||
}
|
||||
}
|
||||
|
||||
type TypeSource = Either<TypeRefId, hir_def::type_ref::TypeSource>;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct TyLoweringDiagnostic {
|
||||
pub source: TypeSource,
|
||||
pub kind: TyLoweringDiagnosticKind,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum TyLoweringDiagnosticKind {
|
||||
GenericArgsProhibited { segment: u32, reason: GenericArgsProhibitedReason },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum GenericArgsProhibitedReason {
|
||||
Module,
|
||||
TyParam,
|
||||
SelfTy,
|
||||
PrimitiveTy,
|
||||
/// When there is a generic enum, within the expression `Enum::Variant`,
|
||||
/// either `Enum` or `Variant` are allowed to have generic arguments, but not both.
|
||||
// FIXME: This is not used now but it should be.
|
||||
EnumVariant,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TyLoweringContext<'a> {
|
||||
pub db: &'a dyn HirDatabase,
|
||||
|
@ -125,6 +150,7 @@ pub struct TyLoweringContext<'a> {
|
|||
expander: Option<Expander>,
|
||||
/// Tracks types with explicit `?Sized` bounds.
|
||||
pub(crate) unsized_types: FxHashSet<Ty>,
|
||||
pub(crate) diagnostics: Vec<TyLoweringDiagnostic>,
|
||||
}
|
||||
|
||||
impl<'a> TyLoweringContext<'a> {
|
||||
|
@ -159,6 +185,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
type_param_mode,
|
||||
expander: None,
|
||||
unsized_types: FxHashSet::default(),
|
||||
diagnostics: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -198,6 +225,20 @@ impl<'a> TyLoweringContext<'a> {
|
|||
self.type_param_mode = type_param_mode;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn push_diagnostic(&mut self, type_ref: TypeRefId, kind: TyLoweringDiagnosticKind) {
|
||||
let source = match self.types_source_map {
|
||||
Some(source_map) => {
|
||||
let Ok(source) = source_map.type_syntax(type_ref) else {
|
||||
stdx::never!("error in synthetic type");
|
||||
return;
|
||||
};
|
||||
Either::Right(source)
|
||||
}
|
||||
None => Either::Left(type_ref),
|
||||
};
|
||||
self.diagnostics.push(TyLoweringDiagnostic { source, kind });
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Default)]
|
||||
|
@ -464,6 +505,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
impl_trait_mode: mem::take(&mut self.impl_trait_mode),
|
||||
expander: self.expander.take(),
|
||||
unsized_types: mem::take(&mut self.unsized_types),
|
||||
diagnostics: mem::take(&mut self.diagnostics),
|
||||
};
|
||||
|
||||
let ty = inner_ctx.lower_ty(type_ref);
|
||||
|
@ -471,6 +513,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
self.impl_trait_mode = inner_ctx.impl_trait_mode;
|
||||
self.expander = inner_ctx.expander;
|
||||
self.unsized_types = inner_ctx.unsized_types;
|
||||
self.diagnostics = inner_ctx.diagnostics;
|
||||
|
||||
self.expander.as_mut().unwrap().exit(mark);
|
||||
Some(ty)
|
||||
|
@ -542,6 +585,10 @@ impl<'a> TyLoweringContext<'a> {
|
|||
resolved_segment: PathSegment<'_>,
|
||||
remaining_segments: PathSegments<'_>,
|
||||
infer_args: bool,
|
||||
on_prohibited_generics_for_resolved_segment: &mut dyn FnMut(
|
||||
&mut Self,
|
||||
GenericArgsProhibitedReason,
|
||||
),
|
||||
) -> (Ty, Option<TypeNs>) {
|
||||
let ty = match resolution {
|
||||
TypeNs::TraitId(trait_) => {
|
||||
|
@ -608,7 +655,15 @@ impl<'a> TyLoweringContext<'a> {
|
|||
// FIXME(trait_alias): Implement trait alias.
|
||||
return (TyKind::Error.intern(Interner), None);
|
||||
}
|
||||
TypeNs::GenericParam(param_id) => match self.type_param_mode {
|
||||
TypeNs::GenericParam(param_id) => {
|
||||
if resolved_segment.args_and_bindings.is_some() {
|
||||
on_prohibited_generics_for_resolved_segment(
|
||||
self,
|
||||
GenericArgsProhibitedReason::TyParam,
|
||||
);
|
||||
}
|
||||
|
||||
match self.type_param_mode {
|
||||
ParamLoweringMode::Placeholder => {
|
||||
TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into()))
|
||||
}
|
||||
|
@ -628,8 +683,16 @@ impl<'a> TyLoweringContext<'a> {
|
|||
TyKind::BoundVar(BoundVar::new(self.in_binders, idx))
|
||||
}
|
||||
}
|
||||
.intern(Interner),
|
||||
.intern(Interner)
|
||||
}
|
||||
TypeNs::SelfType(impl_id) => {
|
||||
if resolved_segment.args_and_bindings.is_some() {
|
||||
on_prohibited_generics_for_resolved_segment(
|
||||
self,
|
||||
GenericArgsProhibitedReason::SelfTy,
|
||||
);
|
||||
}
|
||||
|
||||
let generics = self.generics().expect("impl should have generic param scope");
|
||||
|
||||
match self.type_param_mode {
|
||||
|
@ -655,6 +718,13 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
}
|
||||
TypeNs::AdtSelfType(adt) => {
|
||||
if resolved_segment.args_and_bindings.is_some() {
|
||||
on_prohibited_generics_for_resolved_segment(
|
||||
self,
|
||||
GenericArgsProhibitedReason::SelfTy,
|
||||
);
|
||||
}
|
||||
|
||||
let generics = generics(self.db.upcast(), adt.into());
|
||||
let substs = match self.type_param_mode {
|
||||
ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db),
|
||||
|
@ -667,6 +737,12 @@ impl<'a> TyLoweringContext<'a> {
|
|||
|
||||
TypeNs::AdtId(it) => self.lower_path_inner(resolved_segment, it.into(), infer_args),
|
||||
TypeNs::BuiltinType(it) => {
|
||||
if resolved_segment.args_and_bindings.is_some() {
|
||||
on_prohibited_generics_for_resolved_segment(
|
||||
self,
|
||||
GenericArgsProhibitedReason::PrimitiveTy,
|
||||
);
|
||||
}
|
||||
self.lower_path_inner(resolved_segment, it.into(), infer_args)
|
||||
}
|
||||
TypeNs::TypeAliasId(it) => {
|
||||
|
@ -698,14 +774,39 @@ impl<'a> TyLoweringContext<'a> {
|
|||
return (ty, None);
|
||||
}
|
||||
|
||||
let (resolved_segment, remaining_segments) = match remaining_index {
|
||||
let (module_segments, resolved_segment_idx, resolved_segment, remaining_segments) =
|
||||
match remaining_index {
|
||||
None => (
|
||||
path.segments().strip_last(),
|
||||
path.segments().len() - 1,
|
||||
path.segments().last().expect("resolved path has at least one element"),
|
||||
PathSegments::EMPTY,
|
||||
),
|
||||
Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)),
|
||||
Some(i) => (
|
||||
path.segments().take(i - 1),
|
||||
i - 1,
|
||||
path.segments().get(i - 1).unwrap(),
|
||||
path.segments().skip(i),
|
||||
),
|
||||
};
|
||||
self.lower_partly_resolved_path(resolution, resolved_segment, remaining_segments, false)
|
||||
|
||||
self.prohibit_generics(path_id, 0, module_segments, GenericArgsProhibitedReason::Module);
|
||||
|
||||
self.lower_partly_resolved_path(
|
||||
resolution,
|
||||
resolved_segment,
|
||||
remaining_segments,
|
||||
false,
|
||||
&mut |this, reason| {
|
||||
this.push_diagnostic(
|
||||
path_id.type_ref(),
|
||||
TyLoweringDiagnosticKind::GenericArgsProhibited {
|
||||
segment: resolved_segment_idx as u32,
|
||||
reason,
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn select_associated_type(&mut self, res: Option<TypeNs>, segment: PathSegment<'_>) -> Ty {
|
||||
|
@ -742,12 +843,8 @@ impl<'a> TyLoweringContext<'a> {
|
|||
// generic params. It's inefficient to splice the `Substitution`s, so we may want
|
||||
// that method to optionally take parent `Substitution` as we already know them at
|
||||
// this point (`t.substitution`).
|
||||
let substs = self.substs_from_path_segment(
|
||||
segment.clone(),
|
||||
Some(associated_ty.into()),
|
||||
false,
|
||||
None,
|
||||
);
|
||||
let substs =
|
||||
self.substs_from_path_segment(segment, Some(associated_ty.into()), false, None);
|
||||
|
||||
let len_self =
|
||||
crate::generics::generics(self.db.upcast(), associated_ty.into()).len_self();
|
||||
|
@ -999,6 +1096,23 @@ impl<'a> TyLoweringContext<'a> {
|
|||
TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs }
|
||||
}
|
||||
|
||||
fn prohibit_generics(
|
||||
&mut self,
|
||||
path_id: PathId,
|
||||
idx: u32,
|
||||
segments: PathSegments<'_>,
|
||||
reason: GenericArgsProhibitedReason,
|
||||
) {
|
||||
segments.iter().zip(idx..).for_each(|(segment, idx)| {
|
||||
if segment.args_and_bindings.is_some() {
|
||||
self.push_diagnostic(
|
||||
path_id.type_ref(),
|
||||
TyLoweringDiagnosticKind::GenericArgsProhibited { segment: idx, reason },
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn lower_trait_ref_from_path(
|
||||
&mut self,
|
||||
path_id: PathId,
|
||||
|
@ -1010,6 +1124,13 @@ impl<'a> TyLoweringContext<'a> {
|
|||
TypeNs::TraitId(tr) => tr,
|
||||
_ => return None,
|
||||
};
|
||||
// Do this after we verify it's indeed a trait to not confuse the user if they're not modules.
|
||||
self.prohibit_generics(
|
||||
path_id,
|
||||
0,
|
||||
path.segments().strip_last(),
|
||||
GenericArgsProhibitedReason::Module,
|
||||
);
|
||||
let segment = path.segments().last().expect("path should have at least one segment");
|
||||
Some(self.lower_trait_ref_from_resolved_path(resolved, segment, explicit_self_ty))
|
||||
}
|
||||
|
@ -1233,7 +1354,9 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
ext.lower_ty(type_ref)
|
||||
let ty = ext.lower_ty(type_ref);
|
||||
self.diagnostics.extend(ext.diagnostics);
|
||||
ty
|
||||
} else {
|
||||
self.lower_ty(type_ref)
|
||||
};
|
||||
|
|
|
@ -33,6 +33,14 @@ syntax.workspace = true
|
|||
tt.workspace = true
|
||||
span.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
expect-test.workspace = true
|
||||
|
||||
# local deps
|
||||
test-utils.workspace = true
|
||||
test-fixture.workspace = true
|
||||
syntax-bridge.workspace = true
|
||||
|
||||
[features]
|
||||
in-rust-tree = ["hir-expand/in-rust-tree"]
|
||||
|
||||
|
|
|
@ -3,23 +3,34 @@
|
|||
//!
|
||||
//! This probably isn't the best way to do this -- ideally, diagnostics should
|
||||
//! be expressed in terms of hir types themselves.
|
||||
pub use hir_ty::diagnostics::{CaseType, IncorrectCase};
|
||||
use cfg::{CfgExpr, CfgOptions};
|
||||
use either::Either;
|
||||
use hir_def::{
|
||||
hir::ExprOrPatId,
|
||||
path::{hir_segment_to_ast_segment, ModPath},
|
||||
type_ref::TypesSourceMap,
|
||||
AssocItemId, DefWithBodyId, SyntheticSyntax,
|
||||
};
|
||||
use hir_expand::{name::Name, HirFileId, InFile};
|
||||
use hir_ty::{
|
||||
db::HirDatabase,
|
||||
diagnostics::{BodyValidationDiagnostic, UnsafetyReason},
|
||||
CastError, InferenceDiagnostic,
|
||||
CastError, InferenceDiagnostic, InferenceTyDiagnosticSource, TyLoweringDiagnosticKind,
|
||||
};
|
||||
use syntax::{
|
||||
ast::{self, HasGenericArgs},
|
||||
AstPtr, SyntaxError, SyntaxNodePtr, TextRange,
|
||||
};
|
||||
|
||||
use cfg::{CfgExpr, CfgOptions};
|
||||
use either::Either;
|
||||
pub use hir_def::VariantId;
|
||||
use hir_def::{hir::ExprOrPatId, path::ModPath, AssocItemId, DefWithBodyId, SyntheticSyntax};
|
||||
use hir_expand::{name::Name, HirFileId, InFile};
|
||||
use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange};
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{AssocItem, Field, Local, Trait, Type};
|
||||
|
||||
pub use hir_def::VariantId;
|
||||
pub use hir_ty::{
|
||||
diagnostics::{CaseType, IncorrectCase},
|
||||
GenericArgsProhibitedReason,
|
||||
};
|
||||
|
||||
macro_rules! diagnostics {
|
||||
($($diag:ident,)*) => {
|
||||
#[derive(Debug)]
|
||||
|
@ -98,6 +109,7 @@ diagnostics![
|
|||
UnresolvedIdent,
|
||||
UnusedMut,
|
||||
UnusedVariable,
|
||||
GenericArgsProhibited,
|
||||
];
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -388,6 +400,12 @@ pub struct InvalidCast {
|
|||
pub cast_ty: Type,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GenericArgsProhibited {
|
||||
pub args: InFile<AstPtr<Either<ast::GenericArgList, ast::ParamList>>>,
|
||||
pub reason: GenericArgsProhibitedReason,
|
||||
}
|
||||
|
||||
impl AnyDiagnostic {
|
||||
pub(crate) fn body_validation_diagnostic(
|
||||
db: &dyn HirDatabase,
|
||||
|
@ -527,6 +545,7 @@ impl AnyDiagnostic {
|
|||
db: &dyn HirDatabase,
|
||||
def: DefWithBodyId,
|
||||
d: &InferenceDiagnostic,
|
||||
outer_types_source_map: &TypesSourceMap,
|
||||
source_map: &hir_def::body::BodySourceMap,
|
||||
) -> Option<AnyDiagnostic> {
|
||||
let expr_syntax = |expr| {
|
||||
|
@ -640,6 +659,36 @@ impl AnyDiagnostic {
|
|||
let cast_ty = Type::new(db, def, cast_ty.clone());
|
||||
InvalidCast { expr, error: *error, expr_ty, cast_ty }.into()
|
||||
}
|
||||
InferenceDiagnostic::TyDiagnostic { source, diag } => {
|
||||
let source_map = match source {
|
||||
InferenceTyDiagnosticSource::Body => &source_map.types,
|
||||
InferenceTyDiagnosticSource::Signature => outer_types_source_map,
|
||||
};
|
||||
let source = match diag.source {
|
||||
Either::Left(type_ref_id) => {
|
||||
let Ok(source) = source_map.type_syntax(type_ref_id) else {
|
||||
stdx::never!("error on synthetic type syntax");
|
||||
return None;
|
||||
};
|
||||
source
|
||||
}
|
||||
Either::Right(source) => source,
|
||||
};
|
||||
let syntax = || source.value.to_node(&db.parse_or_expand(source.file_id));
|
||||
match diag.kind {
|
||||
TyLoweringDiagnosticKind::GenericArgsProhibited { segment, reason } => {
|
||||
let ast::Type::PathType(syntax) = syntax() else { return None };
|
||||
let segment = hir_segment_to_ast_segment(&syntax.path()?, segment)?;
|
||||
let args = if let Some(generics) = segment.generic_arg_list() {
|
||||
AstPtr::new(&generics).wrap_left()
|
||||
} else {
|
||||
AstPtr::new(&segment.param_list()?).wrap_right()
|
||||
};
|
||||
let args = source.with_value(args);
|
||||
GenericArgsProhibited { args, reason }.into()
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,12 +20,11 @@
|
|||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||
#![recursion_limit = "512"]
|
||||
|
||||
mod semantics;
|
||||
mod source_analyzer;
|
||||
|
||||
mod attrs;
|
||||
mod from_id;
|
||||
mod has_source;
|
||||
mod semantics;
|
||||
mod source_analyzer;
|
||||
|
||||
pub mod db;
|
||||
pub mod diagnostics;
|
||||
|
@ -54,6 +53,7 @@ use hir_def::{
|
|||
path::ImportAlias,
|
||||
per_ns::PerNs,
|
||||
resolver::{HasResolver, Resolver},
|
||||
type_ref::TypesSourceMap,
|
||||
AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId, CrateRootModuleId,
|
||||
DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId,
|
||||
HasModule, ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup,
|
||||
|
@ -1802,6 +1802,25 @@ impl DefWithBody {
|
|||
let krate = self.module(db).id.krate();
|
||||
|
||||
let (body, source_map) = db.body_with_source_map(self.into());
|
||||
let item_tree_source_maps;
|
||||
let outer_types_source_map = match self {
|
||||
DefWithBody::Function(function) => {
|
||||
let function = function.id.lookup(db.upcast()).id;
|
||||
item_tree_source_maps = function.item_tree_with_source_map(db.upcast()).1;
|
||||
item_tree_source_maps.function(function.value).item()
|
||||
}
|
||||
DefWithBody::Static(statik) => {
|
||||
let statik = statik.id.lookup(db.upcast()).id;
|
||||
item_tree_source_maps = statik.item_tree_with_source_map(db.upcast()).1;
|
||||
item_tree_source_maps.statik(statik.value)
|
||||
}
|
||||
DefWithBody::Const(konst) => {
|
||||
let konst = konst.id.lookup(db.upcast()).id;
|
||||
item_tree_source_maps = konst.item_tree_with_source_map(db.upcast()).1;
|
||||
item_tree_source_maps.konst(konst.value)
|
||||
}
|
||||
DefWithBody::Variant(_) | DefWithBody::InTypeConst(_) => &TypesSourceMap::EMPTY,
|
||||
};
|
||||
|
||||
for (_, def_map) in body.blocks(db.upcast()) {
|
||||
Module { id: def_map.module_id(DefMap::ROOT) }.diagnostics(db, acc, style_lints);
|
||||
|
@ -1861,7 +1880,13 @@ impl DefWithBody {
|
|||
|
||||
let infer = db.infer(self.into());
|
||||
for d in &infer.diagnostics {
|
||||
acc.extend(AnyDiagnostic::inference_diagnostic(db, self.into(), d, &source_map));
|
||||
acc.extend(AnyDiagnostic::inference_diagnostic(
|
||||
db,
|
||||
self.into(),
|
||||
d,
|
||||
outer_types_source_map,
|
||||
&source_map,
|
||||
));
|
||||
}
|
||||
|
||||
for (pat_or_expr, mismatch) in infer.type_mismatches() {
|
||||
|
|
242
crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs
Normal file
242
crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs
Normal file
|
@ -0,0 +1,242 @@
|
|||
use either::Either;
|
||||
use hir::GenericArgsProhibitedReason;
|
||||
use ide_db::assists::Assist;
|
||||
use ide_db::source_change::SourceChange;
|
||||
use ide_db::text_edit::TextEdit;
|
||||
use syntax::{ast, AstNode, TextRange};
|
||||
|
||||
use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
|
||||
|
||||
// Diagnostic: generic-args-prohibited
|
||||
//
|
||||
// This diagnostic is shown when generic arguments are provided for a type that does not accept
|
||||
// generic arguments.
|
||||
pub(crate) fn generic_args_prohibited(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::GenericArgsProhibited,
|
||||
) -> Diagnostic {
|
||||
Diagnostic::new_with_syntax_node_ptr(
|
||||
ctx,
|
||||
DiagnosticCode::RustcHardError("E0109"),
|
||||
describe_reason(d.reason),
|
||||
d.args.map(Into::into),
|
||||
)
|
||||
.with_fixes(fixes(ctx, d))
|
||||
}
|
||||
|
||||
fn describe_reason(reason: GenericArgsProhibitedReason) -> String {
|
||||
let kind = match reason {
|
||||
GenericArgsProhibitedReason::Module => "modules",
|
||||
GenericArgsProhibitedReason::TyParam => "type parameters",
|
||||
GenericArgsProhibitedReason::SelfTy => "`Self`",
|
||||
GenericArgsProhibitedReason::PrimitiveTy => "builtin types",
|
||||
GenericArgsProhibitedReason::EnumVariant => {
|
||||
return "you can specify generic arguments on either the enum or the variant, but not both"
|
||||
.to_owned();
|
||||
}
|
||||
};
|
||||
format!("generic arguments are not allowed on {kind}")
|
||||
}
|
||||
|
||||
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::GenericArgsProhibited) -> Option<Vec<Assist>> {
|
||||
let file_id = d.args.file_id.file_id()?;
|
||||
let syntax = d.args.to_node(ctx.sema.db);
|
||||
let range = match &syntax {
|
||||
Either::Left(_) => syntax.syntax().text_range(),
|
||||
Either::Right(param_list) => {
|
||||
let path_segment = ast::PathSegment::cast(param_list.syntax().parent()?)?;
|
||||
let start = if let Some(coloncolon) = path_segment.coloncolon_token() {
|
||||
coloncolon.text_range().start()
|
||||
} else {
|
||||
param_list.syntax().text_range().start()
|
||||
};
|
||||
let end = if let Some(ret_type) = path_segment.ret_type() {
|
||||
ret_type.syntax().text_range().end()
|
||||
} else {
|
||||
param_list.syntax().text_range().end()
|
||||
};
|
||||
TextRange::new(start, end)
|
||||
}
|
||||
};
|
||||
Some(vec![fix(
|
||||
"remove_generic_args",
|
||||
"Remove these generics",
|
||||
SourceChange::from_text_edit(file_id, TextEdit::delete(range)),
|
||||
syntax.syntax().text_range(),
|
||||
)])
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::tests::{check_diagnostics, check_fix};
|
||||
|
||||
#[test]
|
||||
fn primitives() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
//- /core.rs crate:core library
|
||||
#![rustc_coherence_is_core]
|
||||
impl str {
|
||||
pub fn trim() {}
|
||||
}
|
||||
|
||||
//- /lib.rs crate:foo deps:core
|
||||
fn bar<T>() {}
|
||||
|
||||
fn foo() {
|
||||
let _: (bool<()>, ());
|
||||
// ^^^^ 💡 error: generic arguments are not allowed on builtin types
|
||||
let _ = <str<'_>>::trim;
|
||||
// ^^^^ 💡 error: generic arguments are not allowed on builtin types
|
||||
bar::<u32<{ const { 1 + 1 } }>>();
|
||||
// ^^^^^^^^^^^^^^^^^^^^^ 💡 error: generic arguments are not allowed on builtin types
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn modules() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
pub mod foo {
|
||||
pub mod bar {
|
||||
pub struct Baz;
|
||||
|
||||
impl Baz {
|
||||
pub fn qux() {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn foo() {
|
||||
let _: foo::<'_>::bar::Baz;
|
||||
// ^^^^^^ 💡 error: generic arguments are not allowed on modules
|
||||
let _ = <foo::bar<()>::Baz>::qux;
|
||||
// ^^^^ 💡 error: generic arguments are not allowed on modules
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn type_parameters() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
fn foo<T, U>() {
|
||||
let _: T<'a>;
|
||||
// ^^^^ 💡 error: generic arguments are not allowed on type parameters
|
||||
let _: U::<{ 1 + 2 }>;
|
||||
// ^^^^^^^^^^^^^ 💡 error: generic arguments are not allowed on type parameters
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fn_like_generic_args() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
fn foo() {
|
||||
let _: bool(bool, i32) -> ();
|
||||
// ^^^^^^^^^^^ 💡 error: generic arguments are not allowed on builtin types
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fn_signature() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
fn foo(
|
||||
_a: bool<'_>,
|
||||
// ^^^^ 💡 error: generic arguments are not allowed on builtin types
|
||||
_b: i32::<i64>,
|
||||
// ^^^^^^^ 💡 error: generic arguments are not allowed on builtin types
|
||||
_c: &(&str<1>)
|
||||
// ^^^ 💡 error: generic arguments are not allowed on builtin types
|
||||
) -> ((), i32<bool>) {
|
||||
// ^^^^^^ 💡 error: generic arguments are not allowed on builtin types
|
||||
((), 0)
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn const_static_type() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
const A: i32<bool> = 0;
|
||||
// ^^^^^^ 💡 error: generic arguments are not allowed on builtin types
|
||||
static A: i32::<{ 1 + 3 }> = 0;
|
||||
// ^^^^^^^^^^^^^ 💡 error: generic arguments are not allowed on builtin types
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fix() {
|
||||
check_fix(
|
||||
r#"
|
||||
fn foo() {
|
||||
let _: bool<'_, (), { 1 + 1 }>$0;
|
||||
}"#,
|
||||
r#"
|
||||
fn foo() {
|
||||
let _: bool;
|
||||
}"#,
|
||||
);
|
||||
check_fix(
|
||||
r#"
|
||||
fn foo() {
|
||||
let _: bool::$0<'_, (), { 1 + 1 }>;
|
||||
}"#,
|
||||
r#"
|
||||
fn foo() {
|
||||
let _: bool;
|
||||
}"#,
|
||||
);
|
||||
check_fix(
|
||||
r#"
|
||||
fn foo() {
|
||||
let _: bool(i$032);
|
||||
}"#,
|
||||
r#"
|
||||
fn foo() {
|
||||
let _: bool;
|
||||
}"#,
|
||||
);
|
||||
check_fix(
|
||||
r#"
|
||||
fn foo() {
|
||||
let _: bool$0(i32) -> i64;
|
||||
}"#,
|
||||
r#"
|
||||
fn foo() {
|
||||
let _: bool;
|
||||
}"#,
|
||||
);
|
||||
check_fix(
|
||||
r#"
|
||||
fn foo() {
|
||||
let _: bool::(i$032) -> i64;
|
||||
}"#,
|
||||
r#"
|
||||
fn foo() {
|
||||
let _: bool;
|
||||
}"#,
|
||||
);
|
||||
check_fix(
|
||||
r#"
|
||||
fn foo() {
|
||||
let _: bool::(i32)$0;
|
||||
}"#,
|
||||
r#"
|
||||
fn foo() {
|
||||
let _: bool;
|
||||
}"#,
|
||||
);
|
||||
}
|
||||
}
|
|
@ -167,9 +167,9 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
|
|||
}
|
||||
|
||||
let method_name = call.name_ref()?;
|
||||
let assoc_func_call = format!("{receiver_type_adt_name}::{method_name}()");
|
||||
let assoc_func_path = format!("{receiver_type_adt_name}::{method_name}");
|
||||
|
||||
let assoc_func_call = make::expr_path(make::path_from_text(&assoc_func_call));
|
||||
let assoc_func_path = make::expr_path(make::path_from_text(&assoc_func_path));
|
||||
|
||||
let args: Vec<_> = if need_to_take_receiver_as_first_arg {
|
||||
std::iter::once(receiver).chain(call.arg_list()?.args()).collect()
|
||||
|
@ -178,7 +178,7 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
|
|||
};
|
||||
let args = make::arg_list(args);
|
||||
|
||||
let assoc_func_call_expr_string = make::expr_call(assoc_func_call, args).to_string();
|
||||
let assoc_func_call_expr_string = make::expr_call(assoc_func_path, args).to_string();
|
||||
|
||||
let file_id = ctx.sema.original_range_opt(call.receiver()?.syntax())?.file_id;
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ mod handlers {
|
|||
pub(crate) mod await_outside_of_async;
|
||||
pub(crate) mod break_outside_of_loop;
|
||||
pub(crate) mod expected_function;
|
||||
pub(crate) mod generic_args_prohibited;
|
||||
pub(crate) mod inactive_code;
|
||||
pub(crate) mod incoherent_impl;
|
||||
pub(crate) mod incorrect_case;
|
||||
|
@ -468,6 +469,7 @@ pub fn semantic_diagnostics(
|
|||
Some(it) => it,
|
||||
None => continue,
|
||||
},
|
||||
AnyDiagnostic::GenericArgsProhibited(d) => handlers::generic_args_prohibited::generic_args_prohibited(&ctx, &d)
|
||||
};
|
||||
res.push(d)
|
||||
}
|
||||
|
|
|
@ -402,7 +402,7 @@ pub fn join_paths(paths: impl IntoIterator<Item = ast::Path>) -> ast::Path {
|
|||
|
||||
// FIXME: should not be pub
|
||||
pub fn path_from_text(text: &str) -> ast::Path {
|
||||
ast_from_text(&format!("fn main() {{ let test = {text}; }}"))
|
||||
ast_from_text(&format!("fn main() {{ let test: {text}; }}"))
|
||||
}
|
||||
|
||||
pub fn use_tree_glob() -> ast::UseTree {
|
||||
|
|
Loading…
Reference in a new issue