8813: Get some more array lengths! r=lf- a=lf-

This is built on #8799 and thus contains its changes. I'll rebase it onto master when that one gets merged. It adds support for r-a understanding the length of:

* `let a: [u8; 2] = ...`
* `let a = b"aaa"`
* `let a = [0u8; 4]`

I have added support for getting the values of byte strings, which was not previously there. I am least confident in the correctness of this part and it probably needs some more tests, as we currently have only one test that exercised that part (!).

Fixes #2922.

Co-authored-by: Jade <software@lfcode.ca>
This commit is contained in:
bors[bot] 2021-05-16 01:53:12 +00:00 committed by GitHub
commit a57bd59f35
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
21 changed files with 427 additions and 136 deletions

View file

@ -52,7 +52,9 @@ use hir_def::{
}; };
use hir_expand::{diagnostics::DiagnosticSink, name::name, MacroDefKind}; use hir_expand::{diagnostics::DiagnosticSink, name::name, MacroDefKind};
use hir_ty::{ use hir_ty::{
autoderef, could_unify, autoderef,
consteval::ConstExt,
could_unify,
method_resolution::{self, def_crates, TyFingerprint}, method_resolution::{self, def_crates, TyFingerprint},
primitive::UintTy, primitive::UintTy,
subst_prefix, subst_prefix,
@ -1914,6 +1916,7 @@ impl Type {
substs.iter(&Interner).filter_map(|a| a.ty(&Interner)).any(go) substs.iter(&Interner).filter_map(|a| a.ty(&Interner)).any(go)
} }
TyKind::Array(_ty, len) if len.is_unknown() => true,
TyKind::Array(ty, _) TyKind::Array(ty, _)
| TyKind::Slice(ty) | TyKind::Slice(ty)
| TyKind::Raw(_, ty) | TyKind::Raw(_, ty)

View file

@ -1006,23 +1006,27 @@ impl From<ast::BinOp> for BinaryOp {
impl From<ast::LiteralKind> for Literal { impl From<ast::LiteralKind> for Literal {
fn from(ast_lit_kind: ast::LiteralKind) -> Self { fn from(ast_lit_kind: ast::LiteralKind) -> Self {
match ast_lit_kind { match ast_lit_kind {
// FIXME: these should have actual values filled in, but unsure on perf impact
LiteralKind::IntNumber(lit) => { LiteralKind::IntNumber(lit) => {
if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) { if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) {
return Literal::Float(Default::default(), builtin); return Literal::Float(Default::default(), builtin);
} else if let builtin @ Some(_) = } else if let builtin @ Some(_) =
lit.suffix().and_then(|it| BuiltinInt::from_suffix(&it)) lit.suffix().and_then(|it| BuiltinInt::from_suffix(&it))
{ {
Literal::Int(Default::default(), builtin) Literal::Int(lit.value().unwrap_or(0) as i128, builtin)
} else { } else {
let builtin = lit.suffix().and_then(|it| BuiltinUint::from_suffix(&it)); let builtin = lit.suffix().and_then(|it| BuiltinUint::from_suffix(&it));
Literal::Uint(Default::default(), builtin) Literal::Uint(lit.value().unwrap_or(0), builtin)
} }
} }
LiteralKind::FloatNumber(lit) => { LiteralKind::FloatNumber(lit) => {
let ty = lit.suffix().and_then(|it| BuiltinFloat::from_suffix(&it)); let ty = lit.suffix().and_then(|it| BuiltinFloat::from_suffix(&it));
Literal::Float(Default::default(), ty) Literal::Float(Default::default(), ty)
} }
LiteralKind::ByteString(_) => Literal::ByteString(Default::default()), LiteralKind::ByteString(bs) => {
let text = bs.value().map(Vec::from).unwrap_or_else(Default::default);
Literal::ByteString(text)
}
LiteralKind::String(_) => Literal::String(Default::default()), LiteralKind::String(_) => Literal::String(Default::default()),
LiteralKind::Byte => Literal::Uint(Default::default(), Some(BuiltinUint::U8)), LiteralKind::Byte => Literal::Uint(Default::default(), Some(BuiltinUint::U8)),
LiteralKind::Bool(val) => Literal::Bool(val), LiteralKind::Bool(val) => Literal::Bool(val),

View file

@ -43,8 +43,8 @@ pub enum Literal {
ByteString(Vec<u8>), ByteString(Vec<u8>),
Char(char), Char(char),
Bool(bool), Bool(bool),
Int(u64, Option<BuiltinInt>), Int(i128, Option<BuiltinInt>),
Uint(u64, Option<BuiltinUint>), Uint(u128, Option<BuiltinUint>),
Float(u64, Option<BuiltinFloat>), // FIXME: f64 is not Eq Float(u64, Option<BuiltinFloat>), // FIXME: f64 is not Eq
} }

View file

@ -2,6 +2,7 @@
//! be directly created from an ast::TypeRef, without further queries. //! be directly created from an ast::TypeRef, without further queries.
use hir_expand::{name::Name, AstId, InFile}; use hir_expand::{name::Name, AstId, InFile};
use std::convert::TryInto;
use syntax::ast; use syntax::ast;
use crate::{body::LowerCtx, path::Path}; use crate::{body::LowerCtx, path::Path};
@ -79,7 +80,9 @@ pub enum TypeRef {
Path(Path), Path(Path),
RawPtr(Box<TypeRef>, Mutability), RawPtr(Box<TypeRef>, Mutability),
Reference(Box<TypeRef>, Option<LifetimeRef>, Mutability), Reference(Box<TypeRef>, Option<LifetimeRef>, Mutability),
Array(Box<TypeRef> /*, Expr*/), // FIXME: for full const generics, the latter element (length) here is going to have to be an
// expression that is further lowered later in hir_ty.
Array(Box<TypeRef>, ConstScalar),
Slice(Box<TypeRef>), Slice(Box<TypeRef>),
/// A fn pointer. Last element of the vector is the return type. /// A fn pointer. Last element of the vector is the return type.
Fn(Vec<TypeRef>, bool /*varargs*/), Fn(Vec<TypeRef>, bool /*varargs*/),
@ -140,7 +143,16 @@ impl TypeRef {
TypeRef::RawPtr(Box::new(inner_ty), mutability) TypeRef::RawPtr(Box::new(inner_ty), mutability)
} }
ast::Type::ArrayType(inner) => { ast::Type::ArrayType(inner) => {
TypeRef::Array(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty()))) // FIXME: This is a hack. We should probably reuse the machinery of
// `hir_def::body::lower` to lower this into an `Expr` and then evaluate it at the
// `hir_ty` level, which would allow knowing the type of:
// let v: [u8; 2 + 2] = [0u8; 4];
let len = inner
.expr()
.map(ConstScalar::usize_from_literal_expr)
.unwrap_or(ConstScalar::Unknown);
TypeRef::Array(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty())), len)
} }
ast::Type::SliceType(inner) => { ast::Type::SliceType(inner) => {
TypeRef::Slice(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty()))) TypeRef::Slice(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty())))
@ -212,7 +224,7 @@ impl TypeRef {
} }
TypeRef::RawPtr(type_ref, _) TypeRef::RawPtr(type_ref, _)
| TypeRef::Reference(type_ref, ..) | TypeRef::Reference(type_ref, ..)
| TypeRef::Array(type_ref) | TypeRef::Array(type_ref, _)
| TypeRef::Slice(type_ref) => go(&type_ref, f), | TypeRef::Slice(type_ref) => go(&type_ref, f),
TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => { TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => {
for bound in bounds { for bound in bounds {
@ -298,3 +310,58 @@ impl TypeBound {
} }
} }
} }
/// A concrete constant value
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ConstScalar {
// for now, we only support the trivial case of constant evaluating the length of an array
// Note that this is u64 because the target usize may be bigger than our usize
Usize(u64),
/// Case of an unknown value that rustc might know but we don't
// FIXME: this is a hack to get around chalk not being able to represent unevaluatable
// constants
// https://github.com/rust-analyzer/rust-analyzer/pull/8813#issuecomment-840679177
// https://rust-lang.zulipchat.com/#narrow/stream/144729-wg-traits/topic/Handling.20non.20evaluatable.20constants'.20equality/near/238386348
Unknown,
}
impl std::fmt::Display for ConstScalar {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
match self {
ConstScalar::Usize(us) => write!(fmt, "{}", us),
ConstScalar::Unknown => write!(fmt, "_"),
}
}
}
impl ConstScalar {
/// Gets a target usize out of the ConstScalar
pub fn as_usize(&self) -> Option<u64> {
match self {
&ConstScalar::Usize(us) => Some(us),
_ => None,
}
}
// FIXME: as per the comments on `TypeRef::Array`, this evaluation should not happen at this
// parse stage.
fn usize_from_literal_expr(expr: ast::Expr) -> ConstScalar {
match expr {
ast::Expr::Literal(lit) => {
let lkind = lit.kind();
match lkind {
ast::LiteralKind::IntNumber(num)
if num.suffix() == None || num.suffix() == Some("usize") =>
{
num.value().and_then(|v| v.try_into().ok())
}
_ => None,
}
}
_ => None,
}
.map(ConstScalar::Usize)
.unwrap_or(ConstScalar::Unknown)
}
}

View file

@ -0,0 +1,56 @@
//! Constant evaluation details
use std::convert::TryInto;
use hir_def::{
builtin_type::BuiltinUint,
expr::{Expr, Literal},
type_ref::ConstScalar,
};
use crate::{Const, ConstData, ConstValue, Interner, TyKind};
/// Extension trait for [`Const`]
pub trait ConstExt {
/// Is a [`Const`] unknown?
fn is_unknown(&self) -> bool;
}
impl ConstExt for Const {
fn is_unknown(&self) -> bool {
match self.data(&Interner).value {
// interned Unknown
chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst {
interned: ConstScalar::Unknown,
}) => true,
// interned concrete anything else
chalk_ir::ConstValue::Concrete(..) => false,
_ => {
log::error!("is_unknown was called on a non-concrete constant value! {:?}", self);
true
}
}
}
}
// FIXME: support more than just evaluating literals
pub fn eval_usize(expr: &Expr) -> Option<u64> {
match expr {
Expr::Literal(Literal::Uint(v, None))
| Expr::Literal(Literal::Uint(v, Some(BuiltinUint::Usize))) => (*v).try_into().ok(),
_ => None,
}
}
/// Interns a possibly-unknown target usize
pub fn usize_const(value: Option<u64>) -> Const {
ConstData {
ty: TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)).intern(&Interner),
value: ConstValue::Concrete(chalk_ir::ConcreteConst {
interned: value.map(|value| ConstScalar::Usize(value)).unwrap_or(ConstScalar::Unknown),
}),
}
.intern(&Interner)
}

View file

@ -1,21 +0,0 @@
//! Handling of concrete const values
/// A concrete constant value
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ConstScalar {
// for now, we only support the trivial case of constant evaluating the length of an array
// Note that this is u64 because the target usize may be bigger than our usize
Usize(u64),
/// Case of an unknown value that rustc might know but we don't
Unknown,
}
impl std::fmt::Display for ConstScalar {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
match self {
ConstScalar::Usize(us) => write!(fmt, "{}", us),
ConstScalar::Unknown => write!(fmt, "_"),
}
}
}

View file

@ -962,11 +962,10 @@ impl HirDisplay for TypeRef {
write!(f, "{}", mutability)?; write!(f, "{}", mutability)?;
inner.hir_fmt(f)?; inner.hir_fmt(f)?;
} }
TypeRef::Array(inner) => { TypeRef::Array(inner, len) => {
write!(f, "[")?; write!(f, "[")?;
inner.hir_fmt(f)?; inner.hir_fmt(f)?;
// FIXME: Array length? write!(f, "; {}]", len)?;
write!(f, "; _]")?;
} }
TypeRef::Slice(inner) => { TypeRef::Slice(inner) => {
write!(f, "[")?; write!(f, "[")?;

View file

@ -3,7 +3,7 @@
use std::iter::{repeat, repeat_with}; use std::iter::{repeat, repeat_with};
use std::{mem, sync::Arc}; use std::{mem, sync::Arc};
use chalk_ir::{cast::Cast, fold::Shift, ConstData, Mutability, TyVariableKind}; use chalk_ir::{cast::Cast, fold::Shift, Mutability, TyVariableKind};
use hir_def::{ use hir_def::{
expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp}, expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp},
path::{GenericArg, GenericArgs}, path::{GenericArg, GenericArgs},
@ -15,9 +15,7 @@ use stdx::always;
use syntax::ast::RangeOp; use syntax::ast::RangeOp;
use crate::{ use crate::{
autoderef, autoderef, consteval,
consts::ConstScalar,
dummy_usize_const,
lower::lower_to_chalk_mutability, lower::lower_to_chalk_mutability,
mapping::from_chalk, mapping::from_chalk,
method_resolution, op, method_resolution, op,
@ -25,7 +23,7 @@ use crate::{
static_lifetime, to_chalk_trait_id, static_lifetime, to_chalk_trait_id,
traits::FnTrait, traits::FnTrait,
utils::{generics, Generics}, utils::{generics, Generics},
AdtId, Binders, CallableDefId, ConstValue, FnPointer, FnSig, FnSubst, InEnvironment, Interner, AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, InEnvironment, Interner,
ProjectionTyExt, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind, ProjectionTyExt, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind,
}; };
@ -724,7 +722,7 @@ impl<'a> InferenceContext<'a> {
for expr in items.iter() { for expr in items.iter() {
self.infer_expr_coerce(*expr, &Expectation::has_type(elem_ty.clone())); self.infer_expr_coerce(*expr, &Expectation::has_type(elem_ty.clone()));
} }
Some(items.len()) Some(items.len() as u64)
} }
Array::Repeat { initializer, repeat } => { Array::Repeat { initializer, repeat } => {
self.infer_expr_coerce( self.infer_expr_coerce(
@ -737,20 +735,13 @@ impl<'a> InferenceContext<'a> {
TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(&Interner), TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(&Interner),
), ),
); );
// FIXME: support length for Repeat array expressions
None let repeat_expr = &self.body.exprs[*repeat];
consteval::eval_usize(repeat_expr)
} }
}; };
let cd = ConstData { TyKind::Array(elem_ty, consteval::usize_const(len)).intern(&Interner)
ty: TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(&Interner),
value: ConstValue::Concrete(chalk_ir::ConcreteConst {
interned: len
.map(|len| ConstScalar::Usize(len as u64))
.unwrap_or(ConstScalar::Unknown),
}),
};
TyKind::Array(elem_ty, cd.intern(&Interner)).intern(&Interner)
} }
Expr::Literal(lit) => match lit { Expr::Literal(lit) => match lit {
Literal::Bool(..) => TyKind::Scalar(Scalar::Bool).intern(&Interner), Literal::Bool(..) => TyKind::Scalar(Scalar::Bool).intern(&Interner),
@ -758,11 +749,12 @@ impl<'a> InferenceContext<'a> {
TyKind::Ref(Mutability::Not, static_lifetime(), TyKind::Str.intern(&Interner)) TyKind::Ref(Mutability::Not, static_lifetime(), TyKind::Str.intern(&Interner))
.intern(&Interner) .intern(&Interner)
} }
Literal::ByteString(..) => { Literal::ByteString(bs) => {
let byte_type = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(&Interner); let byte_type = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(&Interner);
let array_type = let len = consteval::usize_const(Some(bs.len() as u64));
TyKind::Array(byte_type, dummy_usize_const()).intern(&Interner);
let array_type = TyKind::Array(byte_type, len).intern(&Interner);
TyKind::Ref(Mutability::Not, static_lifetime(), array_type).intern(&Interner) TyKind::Ref(Mutability::Not, static_lifetime(), array_type).intern(&Interner)
} }
Literal::Char(..) => TyKind::Scalar(Scalar::Char).intern(&Interner), Literal::Char(..) => TyKind::Scalar(Scalar::Char).intern(&Interner),

View file

@ -1,11 +1,12 @@
//! Implementation of the Chalk `Interner` trait, which allows customizing the //! Implementation of the Chalk `Interner` trait, which allows customizing the
//! representation of the various objects Chalk deals with (types, goals etc.). //! representation of the various objects Chalk deals with (types, goals etc.).
use crate::{chalk_db, consts::ConstScalar, tls, GenericArg}; use crate::{chalk_db, tls, GenericArg};
use base_db::salsa::InternId; use base_db::salsa::InternId;
use chalk_ir::{Goal, GoalData}; use chalk_ir::{Goal, GoalData};
use hir_def::{ use hir_def::{
intern::{impl_internable, InternStorage, Internable, Interned}, intern::{impl_internable, InternStorage, Internable, Interned},
type_ref::ConstScalar,
TypeAliasId, TypeAliasId,
}; };
use smallvec::SmallVec; use smallvec::SmallVec;

View file

@ -10,9 +10,9 @@ mod autoderef;
mod builder; mod builder;
mod chalk_db; mod chalk_db;
mod chalk_ext; mod chalk_ext;
pub mod consteval;
mod infer; mod infer;
mod interner; mod interner;
mod consts;
mod lower; mod lower;
mod mapping; mod mapping;
mod op; mod op;
@ -38,9 +38,13 @@ use chalk_ir::{
interner::HasInterner, interner::HasInterner,
UintTy, UintTy,
}; };
use hir_def::{expr::ExprId, type_ref::Rawness, TypeParamId}; use hir_def::{
expr::ExprId,
type_ref::{ConstScalar, Rawness},
TypeParamId,
};
use crate::{consts::ConstScalar, db::HirDatabase, display::HirDisplay, utils::generics}; use crate::{db::HirDatabase, display::HirDisplay, utils::generics};
pub use autoderef::autoderef; pub use autoderef::autoderef;
pub use builder::TyBuilder; pub use builder::TyBuilder;

View file

@ -29,8 +29,8 @@ use stdx::impl_from;
use syntax::ast; use syntax::ast;
use crate::{ use crate::{
consteval,
db::HirDatabase, db::HirDatabase,
dummy_usize_const,
mapping::ToChalk, mapping::ToChalk,
static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx, static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
utils::{ utils::{
@ -172,11 +172,12 @@ impl<'a> TyLoweringContext<'a> {
let inner_ty = self.lower_ty(inner); let inner_ty = self.lower_ty(inner);
TyKind::Raw(lower_to_chalk_mutability(*mutability), inner_ty).intern(&Interner) TyKind::Raw(lower_to_chalk_mutability(*mutability), inner_ty).intern(&Interner)
} }
TypeRef::Array(inner) => { TypeRef::Array(inner, len) => {
let inner_ty = self.lower_ty(inner); let inner_ty = self.lower_ty(inner);
// FIXME: we don't have length info here because we don't store an expression for
// the length let const_len = consteval::usize_const(len.as_usize());
TyKind::Array(inner_ty, dummy_usize_const()).intern(&Interner)
TyKind::Array(inner_ty, const_len).intern(&Interner)
} }
TypeRef::Slice(inner) => { TypeRef::Slice(inner) => {
let inner_ty = self.lower_ty(inner); let inner_ty = self.lower_ty(inner);

View file

@ -64,42 +64,42 @@ fn coerce_places() {
81..92 '{ loop {} }': T 81..92 '{ loop {} }': T
83..90 'loop {}': ! 83..90 'loop {}': !
88..90 '{}': () 88..90 '{}': ()
121..132 '{ loop {} }': *mut [T; _] 121..132 '{ loop {} }': *mut [T; 2]
123..130 'loop {}': ! 123..130 'loop {}': !
128..130 '{}': () 128..130 '{}': ()
159..172 '{ gen() }': *mut [U] 159..172 '{ gen() }': *mut [U]
165..168 'gen': fn gen<U>() -> *mut [U; _] 165..168 'gen': fn gen<U>() -> *mut [U; 2]
165..170 'gen()': *mut [U; _] 165..170 'gen()': *mut [U; 2]
185..419 '{ ...rr); }': () 185..419 '{ ...rr); }': ()
195..198 'arr': &[u8; _] 195..198 'arr': &[u8; 1]
211..215 '&[1]': &[u8; 1] 211..215 '&[1]': &[u8; 1]
212..215 '[1]': [u8; 1] 212..215 '[1]': [u8; 1]
213..214 '1': u8 213..214 '1': u8
226..227 'a': &[u8] 226..227 'a': &[u8]
236..239 'arr': &[u8; _] 236..239 'arr': &[u8; 1]
249..250 'b': u8 249..250 'b': u8
253..254 'f': fn f<u8>(&[u8]) -> u8 253..254 'f': fn f<u8>(&[u8]) -> u8
253..259 'f(arr)': u8 253..259 'f(arr)': u8
255..258 'arr': &[u8; _] 255..258 'arr': &[u8; 1]
269..270 'c': &[u8] 269..270 'c': &[u8]
279..286 '{ arr }': &[u8] 279..286 '{ arr }': &[u8]
281..284 'arr': &[u8; _] 281..284 'arr': &[u8; 1]
296..297 'd': u8 296..297 'd': u8
300..301 'g': fn g<u8>(S<&[u8]>) -> u8 300..301 'g': fn g<u8>(S<&[u8]>) -> u8
300..315 'g(S { a: arr })': u8 300..315 'g(S { a: arr })': u8
302..314 'S { a: arr }': S<&[u8]> 302..314 'S { a: arr }': S<&[u8]>
309..312 'arr': &[u8; _] 309..312 'arr': &[u8; 1]
325..326 'e': [&[u8]; _] 325..326 'e': [&[u8]; 1]
340..345 '[arr]': [&[u8]; 1] 340..345 '[arr]': [&[u8]; 1]
341..344 'arr': &[u8; _] 341..344 'arr': &[u8; 1]
355..356 'f': [&[u8]; _] 355..356 'f': [&[u8]; 2]
370..378 '[arr; 2]': [&[u8]; _] 370..378 '[arr; 2]': [&[u8]; 2]
371..374 'arr': &[u8; _] 371..374 'arr': &[u8; 1]
376..377 '2': usize 376..377 '2': usize
388..389 'g': (&[u8], &[u8]) 388..389 'g': (&[u8], &[u8])
406..416 '(arr, arr)': (&[u8], &[u8]) 406..416 '(arr, arr)': (&[u8], &[u8])
407..410 'arr': &[u8; _] 407..410 'arr': &[u8; 1]
412..415 'arr': &[u8; _] 412..415 'arr': &[u8; 1]
"#]], "#]],
); );
} }
@ -159,7 +159,7 @@ fn infer_custom_coerce_unsized() {
impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {} impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {} impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
"#, "#,
expect![[r" expect![[r#"
257..258 'x': A<[T]> 257..258 'x': A<[T]>
278..283 '{ x }': A<[T]> 278..283 '{ x }': A<[T]>
280..281 'x': A<[T]> 280..281 'x': A<[T]>
@ -169,23 +169,23 @@ fn infer_custom_coerce_unsized() {
333..334 'x': C<[T]> 333..334 'x': C<[T]>
354..359 '{ x }': C<[T]> 354..359 '{ x }': C<[T]>
356..357 'x': C<[T]> 356..357 'x': C<[T]>
369..370 'a': A<[u8; _]> 369..370 'a': A<[u8; 2]>
384..385 'b': B<[u8; _]> 384..385 'b': B<[u8; 2]>
399..400 'c': C<[u8; _]> 399..400 'c': C<[u8; 2]>
414..480 '{ ...(c); }': () 414..480 '{ ...(c); }': ()
424..425 'd': A<[{unknown}]> 424..425 'd': A<[{unknown}]>
428..432 'foo1': fn foo1<{unknown}>(A<[{unknown}]>) -> A<[{unknown}]> 428..432 'foo1': fn foo1<{unknown}>(A<[{unknown}]>) -> A<[{unknown}]>
428..435 'foo1(a)': A<[{unknown}]> 428..435 'foo1(a)': A<[{unknown}]>
433..434 'a': A<[u8; _]> 433..434 'a': A<[u8; 2]>
445..446 'e': B<[u8]> 445..446 'e': B<[u8]>
449..453 'foo2': fn foo2<u8>(B<[u8]>) -> B<[u8]> 449..453 'foo2': fn foo2<u8>(B<[u8]>) -> B<[u8]>
449..456 'foo2(b)': B<[u8]> 449..456 'foo2(b)': B<[u8]>
454..455 'b': B<[u8; _]> 454..455 'b': B<[u8; 2]>
466..467 'f': C<[u8]> 466..467 'f': C<[u8]>
470..474 'foo3': fn foo3<u8>(C<[u8]>) -> C<[u8]> 470..474 'foo3': fn foo3<u8>(C<[u8]>) -> C<[u8]>
470..477 'foo3(c)': C<[u8]> 470..477 'foo3(c)': C<[u8]>
475..476 'c': C<[u8; _]> 475..476 'c': C<[u8; 2]>
"]], "#]],
); );
} }

View file

@ -345,19 +345,19 @@ fn infer_pattern_match_arr() {
"#, "#,
expect![[r#" expect![[r#"
10..179 '{ ... } }': () 10..179 '{ ... } }': ()
20..23 'arr': [f64; _] 20..23 'arr': [f64; 2]
36..46 '[0.0, 1.0]': [f64; 2] 36..46 '[0.0, 1.0]': [f64; 2]
37..40 '0.0': f64 37..40 '0.0': f64
42..45 '1.0': f64 42..45 '1.0': f64
52..177 'match ... }': () 52..177 'match ... }': ()
58..61 'arr': [f64; _] 58..61 'arr': [f64; 2]
72..80 '[1.0, a]': [f64; _] 72..80 '[1.0, a]': [f64; 2]
73..76 '1.0': f64 73..76 '1.0': f64
73..76 '1.0': f64 73..76 '1.0': f64
78..79 'a': f64 78..79 'a': f64
84..110 '{ ... }': () 84..110 '{ ... }': ()
98..99 'a': f64 98..99 'a': f64
120..126 '[b, c]': [f64; _] 120..126 '[b, c]': [f64; 2]
121..122 'b': f64 121..122 'b': f64
124..125 'c': f64 124..125 'c': f64
130..171 '{ ... }': () 130..171 '{ ... }': ()

View file

@ -488,23 +488,34 @@ fn infer_literals() {
mod foo {} mod foo {}
"#; "#;
br#"yolo"#; br#"yolo"#;
let a = b"a\x20b\
c";
let b = br"g\
h";
let c = br#"x"\"yb"#;
} }
"##, "##,
expect![[r##" expect![[r##"
10..216 '{ ...o"#; }': () 18..478 '{ ... }': ()
16..20 '5i32': i32 32..36 '5i32': i32
26..30 '5f32': f32 50..54 '5f32': f32
36..40 '5f64': f64 68..72 '5f64': f64
46..53 '"hello"': &str 86..93 '"hello"': &str
59..67 'b"bytes"': &[u8; _] 107..115 'b"bytes"': &[u8; 5]
73..76 ''c'': char 129..132 ''c'': char
82..86 'b'b'': u8 146..150 'b'b'': u8
92..96 '3.14': f64 164..168 '3.14': f64
102..106 '5000': i32 182..186 '5000': i32
112..117 'false': bool 200..205 'false': bool
123..127 'true': bool 219..223 'true': bool
133..197 'r#" ... "#': &str 237..333 'r#" ... "#': &str
203..213 'br#"yolo"#': &[u8; _] 347..357 'br#"yolo"#': &[u8; 4]
375..376 'a': &[u8; 4]
379..403 'b"a\x2... c"': &[u8; 4]
421..422 'b': &[u8; 4]
425..433 'br"g\ h"': &[u8; 4]
451..452 'c': &[u8; 6]
455..467 'br#"x"\"yb"#': &[u8; 6]
"##]], "##]],
); );
} }
@ -1260,12 +1271,14 @@ fn infer_array() {
let b = [a, ["b"]]; let b = [a, ["b"]];
let x: [u8; 0] = []; let x: [u8; 0] = [];
// FIXME: requires const evaluation/taking type from rhs somehow
let y: [u8; 2+2] = [1,2,3,4];
} }
"#, "#,
expect![[r#" expect![[r#"
8..9 'x': &str 8..9 'x': &str
17..18 'y': isize 17..18 'y': isize
27..292 '{ ... []; }': () 27..395 '{ ...,4]; }': ()
37..38 'a': [&str; 1] 37..38 'a': [&str; 1]
41..44 '[x]': [&str; 1] 41..44 '[x]': [&str; 1]
42..43 'x': &str 42..43 'x': &str
@ -1313,8 +1326,14 @@ fn infer_array() {
255..256 'a': [&str; 1] 255..256 'a': [&str; 1]
258..263 '["b"]': [&str; 1] 258..263 '["b"]': [&str; 1]
259..262 '"b"': &str 259..262 '"b"': &str
274..275 'x': [u8; _] 274..275 'x': [u8; 0]
287..289 '[]': [u8; 0] 287..289 '[]': [u8; 0]
368..369 'y': [u8; _]
383..392 '[1,2,3,4]': [u8; 4]
384..385 '1': u8
386..387 '2': u8
388..389 '3': u8
390..391 '4': u8
"#]], "#]],
); );
} }
@ -2409,38 +2428,38 @@ fn infer_operator_overload() {
320..422 '{ ... }': V2 320..422 '{ ... }': V2
334..335 'x': f32 334..335 'x': f32
338..342 'self': V2 338..342 'self': V2
338..344 'self.0': [f32; _] 338..344 'self.0': [f32; 2]
338..347 'self.0[0]': {unknown} 338..347 'self.0[0]': {unknown}
338..358 'self.0...s.0[0]': f32 338..358 'self.0...s.0[0]': f32
345..346 '0': i32 345..346 '0': i32
350..353 'rhs': V2 350..353 'rhs': V2
350..355 'rhs.0': [f32; _] 350..355 'rhs.0': [f32; 2]
350..358 'rhs.0[0]': {unknown} 350..358 'rhs.0[0]': {unknown}
356..357 '0': i32 356..357 '0': i32
372..373 'y': f32 372..373 'y': f32
376..380 'self': V2 376..380 'self': V2
376..382 'self.0': [f32; _] 376..382 'self.0': [f32; 2]
376..385 'self.0[1]': {unknown} 376..385 'self.0[1]': {unknown}
376..396 'self.0...s.0[1]': f32 376..396 'self.0...s.0[1]': f32
383..384 '1': i32 383..384 '1': i32
388..391 'rhs': V2 388..391 'rhs': V2
388..393 'rhs.0': [f32; _] 388..393 'rhs.0': [f32; 2]
388..396 'rhs.0[1]': {unknown} 388..396 'rhs.0[1]': {unknown}
394..395 '1': i32 394..395 '1': i32
406..408 'V2': V2([f32; _]) -> V2 406..408 'V2': V2([f32; 2]) -> V2
406..416 'V2([x, y])': V2 406..416 'V2([x, y])': V2
409..415 '[x, y]': [f32; 2] 409..415 '[x, y]': [f32; 2]
410..411 'x': f32 410..411 'x': f32
413..414 'y': f32 413..414 'y': f32
436..519 '{ ... vb; }': () 436..519 '{ ... vb; }': ()
446..448 'va': V2 446..448 'va': V2
451..453 'V2': V2([f32; _]) -> V2 451..453 'V2': V2([f32; 2]) -> V2
451..465 'V2([0.0, 1.0])': V2 451..465 'V2([0.0, 1.0])': V2
454..464 '[0.0, 1.0]': [f32; 2] 454..464 '[0.0, 1.0]': [f32; 2]
455..458 '0.0': f32 455..458 '0.0': f32
460..463 '1.0': f32 460..463 '1.0': f32
475..477 'vb': V2 475..477 'vb': V2
480..482 'V2': V2([f32; _]) -> V2 480..482 'V2': V2([f32; 2]) -> V2
480..494 'V2([0.0, 1.0])': V2 480..494 'V2([0.0, 1.0])': V2
483..493 '[0.0, 1.0]': [f32; 2] 483..493 '[0.0, 1.0]': [f32; 2]
484..487 '0.0': f32 484..487 '0.0': f32

View file

@ -3474,3 +3474,100 @@ fn main(){
"#]], "#]],
) )
} }
#[test]
fn array_length() {
check_infer(
r#"
trait T {
type Output;
fn do_thing(&self) -> Self::Output;
}
impl T for [u8; 4] {
type Output = usize;
fn do_thing(&self) -> Self::Output {
2
}
}
impl T for [u8; 2] {
type Output = u8;
fn do_thing(&self) -> Self::Output {
2
}
}
fn main() {
let v = [0u8; 2];
let v2 = v.do_thing();
let v3 = [0u8; 4];
let v4 = v3.do_thing();
}
"#,
expect![[r#"
44..48 'self': &Self
133..137 'self': &[u8; 4]
155..172 '{ ... }': usize
165..166 '2': usize
236..240 'self': &[u8; 2]
258..275 '{ ... }': u8
268..269 '2': u8
289..392 '{ ...g(); }': ()
299..300 'v': [u8; 2]
303..311 '[0u8; 2]': [u8; 2]
304..307 '0u8': u8
309..310 '2': usize
321..323 'v2': u8
326..327 'v': [u8; 2]
326..338 'v.do_thing()': u8
348..350 'v3': [u8; 4]
353..361 '[0u8; 4]': [u8; 4]
354..357 '0u8': u8
359..360 '4': usize
371..373 'v4': usize
376..378 'v3': [u8; 4]
376..389 'v3.do_thing()': usize
"#]],
)
}
// FIXME: We should infer the length of the returned array :)
#[test]
fn const_generics() {
check_infer(
r#"
trait T {
type Output;
fn do_thing(&self) -> Self::Output;
}
impl<const L: usize> T for [u8; L] {
type Output = [u8; L];
fn do_thing(&self) -> Self::Output {
*self
}
}
fn main() {
let v = [0u8; 2];
let v2 = v.do_thing();
}
"#,
expect![[r#"
44..48 'self': &Self
151..155 'self': &[u8; _]
173..194 '{ ... }': [u8; _]
183..188 '*self': [u8; _]
184..188 'self': &[u8; _]
208..260 '{ ...g(); }': ()
218..219 'v': [u8; 2]
222..230 '[0u8; 2]': [u8; 2]
223..226 '0u8': u8
228..229 '2': usize
240..242 'v2': [u8; _]
245..246 'v': [u8; 2]
245..257 'v.do_thing()': [u8; _]
"#]],
)
}

View file

@ -4,7 +4,7 @@ use ide_assists::utils::extract_trivial_expression;
use itertools::Itertools; use itertools::Itertools;
use syntax::{ use syntax::{
algo::non_trivia_sibling, algo::non_trivia_sibling,
ast::{self, AstNode, AstToken}, ast::{self, AstNode, AstToken, IsString},
Direction, NodeOrToken, SourceFile, Direction, NodeOrToken, SourceFile,
SyntaxKind::{self, USE_TREE, WHITESPACE}, SyntaxKind::{self, USE_TREE, WHITESPACE},
SyntaxNode, SyntaxToken, TextRange, TextSize, T, SyntaxNode, SyntaxToken, TextRange, TextSize, T,

View file

@ -6,7 +6,7 @@ use either::Either;
use hir::{InFile, Semantics}; use hir::{InFile, Semantics};
use ide_db::{call_info::ActiveParameter, helpers::rust_doc::is_rust_fence, SymbolKind}; use ide_db::{call_info::ActiveParameter, helpers::rust_doc::is_rust_fence, SymbolKind};
use syntax::{ use syntax::{
ast::{self, AstNode}, ast::{self, AstNode, IsString},
AstToken, NodeOrToken, SyntaxNode, SyntaxToken, TextRange, TextSize, AstToken, NodeOrToken, SyntaxNode, SyntaxToken, TextRange, TextSize,
}; };

View file

@ -198,6 +198,34 @@ fn main() {
) )
} }
/// https://github.com/rust-analyzer/rust-analyzer/issues/2922
#[test]
fn regression_issue_2922() {
check_assist(
add_explicit_type,
r#"
fn main() {
let $0v = [0.0; 2];
}
"#,
r#"
fn main() {
let v: [f64; 2] = [0.0; 2];
}
"#,
);
// note: this may break later if we add more consteval. it just needs to be something that our
// consteval engine doesn't understand
check_assist_not_applicable(
add_explicit_type,
r#"
fn main() {
let $0l = [0.0; 2+2];
}
"#,
);
}
#[test] #[test]
fn default_generics_should_not_be_added() { fn default_generics_should_not_be_added() {
check_assist( check_assist(

View file

@ -1,6 +1,6 @@
use std::borrow::Cow; use std::borrow::Cow;
use syntax::{ast, AstToken, TextRange, TextSize}; use syntax::{ast, ast::IsString, AstToken, TextRange, TextSize};
use crate::{AssistContext, AssistId, AssistKind, Assists}; use crate::{AssistContext, AssistId, AssistKind, Assists};

View file

@ -1,4 +1,4 @@
use syntax::{ast, AstToken, SyntaxKind::STRING}; use syntax::{ast, ast::IsString, AstToken, SyntaxKind::STRING};
use crate::{AssistContext, AssistId, AssistKind, Assists}; use crate::{AssistContext, AssistId, AssistKind, Assists};

View file

@ -143,6 +143,30 @@ impl QuoteOffsets {
} }
} }
pub trait IsString: AstToken {
fn quote_offsets(&self) -> Option<QuoteOffsets> {
let text = self.text();
let offsets = QuoteOffsets::new(text)?;
let o = self.syntax().text_range().start();
let offsets = QuoteOffsets {
quotes: (offsets.quotes.0 + o, offsets.quotes.1 + o),
contents: offsets.contents + o,
};
Some(offsets)
}
fn text_range_between_quotes(&self) -> Option<TextRange> {
self.quote_offsets().map(|it| it.contents)
}
fn open_quote_text_range(&self) -> Option<TextRange> {
self.quote_offsets().map(|it| it.quotes.0)
}
fn close_quote_text_range(&self) -> Option<TextRange> {
self.quote_offsets().map(|it| it.quotes.1)
}
}
impl IsString for ast::String {}
impl ast::String { impl ast::String {
pub fn is_raw(&self) -> bool { pub fn is_raw(&self) -> bool {
self.text().starts_with('r') self.text().starts_with('r')
@ -187,32 +211,49 @@ impl ast::String {
(false, false) => Some(Cow::Owned(buf)), (false, false) => Some(Cow::Owned(buf)),
} }
} }
pub fn quote_offsets(&self) -> Option<QuoteOffsets> {
let text = self.text();
let offsets = QuoteOffsets::new(text)?;
let o = self.syntax().text_range().start();
let offsets = QuoteOffsets {
quotes: (offsets.quotes.0 + o, offsets.quotes.1 + o),
contents: offsets.contents + o,
};
Some(offsets)
}
pub fn text_range_between_quotes(&self) -> Option<TextRange> {
self.quote_offsets().map(|it| it.contents)
}
pub fn open_quote_text_range(&self) -> Option<TextRange> {
self.quote_offsets().map(|it| it.quotes.0)
}
pub fn close_quote_text_range(&self) -> Option<TextRange> {
self.quote_offsets().map(|it| it.quotes.1)
}
} }
impl IsString for ast::ByteString {}
impl ast::ByteString { impl ast::ByteString {
pub fn is_raw(&self) -> bool { pub fn is_raw(&self) -> bool {
self.text().starts_with("br") self.text().starts_with("br")
} }
pub fn value(&self) -> Option<Cow<'_, [u8]>> {
if self.is_raw() {
let text = self.text();
let text =
&text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
return Some(Cow::Borrowed(text.as_bytes()));
}
let text = self.text();
let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
let mut buf: Vec<u8> = Vec::new();
let mut text_iter = text.chars();
let mut has_error = false;
unescape_literal(text, Mode::ByteStr, &mut |char_range, unescaped_char| match (
unescaped_char,
buf.capacity() == 0,
) {
(Ok(c), false) => buf.push(c as u8),
(Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (),
(Ok(c), true) => {
buf.reserve_exact(text.len());
buf.extend_from_slice(&text[..char_range.start].as_bytes());
buf.push(c as u8);
}
(Err(_), _) => has_error = true,
});
match (has_error, buf.capacity() == 0) {
(true, _) => None,
(false, true) => Some(Cow::Borrowed(text.as_bytes())),
(false, false) => Some(Cow::Owned(buf)),
}
}
} }
#[derive(Debug)] #[derive(Debug)]