mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-13 21:54:42 +00:00
Replace x
with it
This commit is contained in:
parent
4125fea0a7
commit
3a1054fc1c
42 changed files with 590 additions and 582 deletions
|
@ -224,7 +224,7 @@ impl ChangeFixture {
|
|||
false,
|
||||
CrateOrigin::Local { repo: None, name: None },
|
||||
default_target_data_layout
|
||||
.map(|x| x.into())
|
||||
.map(|it| it.into())
|
||||
.ok_or_else(|| "target_data_layout unset".into()),
|
||||
Some(toolchain),
|
||||
);
|
||||
|
|
|
@ -69,7 +69,7 @@ impl CfgOptions {
|
|||
}
|
||||
|
||||
pub fn get_cfg_keys(&self) -> impl Iterator<Item = &SmolStr> {
|
||||
self.enabled.iter().map(|x| match x {
|
||||
self.enabled.iter().map(|it| match it {
|
||||
CfgAtom::Flag(key) => key,
|
||||
CfgAtom::KeyValue { key, .. } => key,
|
||||
})
|
||||
|
@ -79,7 +79,7 @@ impl CfgOptions {
|
|||
&'a self,
|
||||
cfg_key: &'a str,
|
||||
) -> impl Iterator<Item = &'a SmolStr> + 'a {
|
||||
self.enabled.iter().filter_map(move |x| match x {
|
||||
self.enabled.iter().filter_map(move |it| match it {
|
||||
CfgAtom::KeyValue { key, value } if cfg_key == key => Some(value),
|
||||
_ => None,
|
||||
})
|
||||
|
|
|
@ -276,13 +276,13 @@ impl Attrs {
|
|||
}
|
||||
|
||||
pub fn is_test(&self) -> bool {
|
||||
self.iter().any(|x| {
|
||||
x.path()
|
||||
self.iter().any(|it| {
|
||||
it.path()
|
||||
.segments()
|
||||
.iter()
|
||||
.rev()
|
||||
.zip(["core", "prelude", "v1", "test"].iter().rev())
|
||||
.all(|x| x.0.as_str() == Some(x.1))
|
||||
.all(|it| it.0.as_str() == Some(it.1))
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -304,7 +304,7 @@ use std::slice::Iter as SliceIter;
|
|||
pub enum DocAtom {
|
||||
/// eg. `#[doc(hidden)]`
|
||||
Flag(SmolStr),
|
||||
/// eg. `#[doc(alias = "x")]`
|
||||
/// eg. `#[doc(alias = "it")]`
|
||||
///
|
||||
/// Note that a key can have multiple values that are all considered "active" at the same time.
|
||||
/// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`.
|
||||
|
|
|
@ -273,10 +273,10 @@ impl Body {
|
|||
|
||||
pub fn is_binding_upvar(&self, binding: BindingId, relative_to: ExprId) -> bool {
|
||||
match self.binding_owners.get(&binding) {
|
||||
Some(x) => {
|
||||
Some(it) => {
|
||||
// We assign expression ids in a way that outer closures will receive
|
||||
// a lower id
|
||||
x.into_raw() < relative_to.into_raw()
|
||||
it.into_raw() < relative_to.into_raw()
|
||||
}
|
||||
None => true,
|
||||
}
|
||||
|
|
|
@ -297,11 +297,11 @@ impl ExprCollector<'_> {
|
|||
let (result_expr_id, prev_binding_owner) =
|
||||
this.initialize_binding_owner(syntax_ptr);
|
||||
let inner_expr = this.collect_block(e);
|
||||
let x = this.db.intern_anonymous_const(ConstBlockLoc {
|
||||
let it = this.db.intern_anonymous_const(ConstBlockLoc {
|
||||
parent: this.owner,
|
||||
root: inner_expr,
|
||||
});
|
||||
this.body.exprs[result_expr_id] = Expr::Const(x);
|
||||
this.body.exprs[result_expr_id] = Expr::Const(it);
|
||||
this.current_binding_owner = prev_binding_owner;
|
||||
result_expr_id
|
||||
})
|
||||
|
@ -324,10 +324,10 @@ impl ExprCollector<'_> {
|
|||
ast::Expr::CallExpr(e) => {
|
||||
let is_rustc_box = {
|
||||
let attrs = e.attrs();
|
||||
attrs.filter_map(|x| x.as_simple_atom()).any(|x| x == "rustc_box")
|
||||
attrs.filter_map(|it| it.as_simple_atom()).any(|it| it == "rustc_box")
|
||||
};
|
||||
if is_rustc_box {
|
||||
let expr = self.collect_expr_opt(e.arg_list().and_then(|x| x.args().next()));
|
||||
let expr = self.collect_expr_opt(e.arg_list().and_then(|it| it.args().next()));
|
||||
self.alloc_expr(Expr::Box { expr }, syntax_ptr)
|
||||
} else {
|
||||
let callee = self.collect_expr_opt(e.expr());
|
||||
|
@ -781,7 +781,7 @@ impl ExprCollector<'_> {
|
|||
pat: self.alloc_pat_desugared(some_pat),
|
||||
guard: None,
|
||||
expr: self.with_opt_labeled_rib(label, |this| {
|
||||
this.collect_expr_opt(e.loop_body().map(|x| x.into()))
|
||||
this.collect_expr_opt(e.loop_body().map(|it| it.into()))
|
||||
}),
|
||||
};
|
||||
let iter_name = Name::generate_new_name();
|
||||
|
@ -874,10 +874,10 @@ impl ExprCollector<'_> {
|
|||
}),
|
||||
guard: None,
|
||||
expr: {
|
||||
let x = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr.clone());
|
||||
let it = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr.clone());
|
||||
let callee = self.alloc_expr(Expr::Path(try_from_residual), syntax_ptr.clone());
|
||||
let result = self.alloc_expr(
|
||||
Expr::Call { callee, args: Box::new([x]), is_assignee_expr: false },
|
||||
Expr::Call { callee, args: Box::new([it]), is_assignee_expr: false },
|
||||
syntax_ptr.clone(),
|
||||
);
|
||||
self.alloc_expr(
|
||||
|
@ -1240,12 +1240,12 @@ impl ExprCollector<'_> {
|
|||
pats.push(self.collect_pat(first, binding_list));
|
||||
binding_list.reject_new = true;
|
||||
for rest in it {
|
||||
for (_, x) in binding_list.is_used.iter_mut() {
|
||||
*x = false;
|
||||
for (_, it) in binding_list.is_used.iter_mut() {
|
||||
*it = false;
|
||||
}
|
||||
pats.push(self.collect_pat(rest, binding_list));
|
||||
for (&id, &x) in binding_list.is_used.iter() {
|
||||
if !x {
|
||||
for (&id, &is_used) in binding_list.is_used.iter() {
|
||||
if !is_used {
|
||||
self.body.bindings[id].problems =
|
||||
Some(BindingProblems::NotBoundAcrossAll);
|
||||
}
|
||||
|
@ -1352,9 +1352,9 @@ impl ExprCollector<'_> {
|
|||
// FIXME: implement in a way that also builds source map and calculates assoc resolutions in type inference.
|
||||
ast::Pat::RangePat(p) => {
|
||||
let mut range_part_lower = |p: Option<ast::Pat>| {
|
||||
p.and_then(|x| match &x {
|
||||
ast::Pat::LiteralPat(x) => {
|
||||
Some(Box::new(LiteralOrConst::Literal(pat_literal_to_hir(x)?.0)))
|
||||
p.and_then(|it| match &it {
|
||||
ast::Pat::LiteralPat(it) => {
|
||||
Some(Box::new(LiteralOrConst::Literal(pat_literal_to_hir(it)?.0)))
|
||||
}
|
||||
ast::Pat::IdentPat(p) => {
|
||||
let name =
|
||||
|
|
|
@ -67,21 +67,21 @@ pub enum TypeOrConstParamData {
|
|||
impl TypeOrConstParamData {
|
||||
pub fn name(&self) -> Option<&Name> {
|
||||
match self {
|
||||
TypeOrConstParamData::TypeParamData(x) => x.name.as_ref(),
|
||||
TypeOrConstParamData::ConstParamData(x) => Some(&x.name),
|
||||
TypeOrConstParamData::TypeParamData(it) => it.name.as_ref(),
|
||||
TypeOrConstParamData::ConstParamData(it) => Some(&it.name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_default(&self) -> bool {
|
||||
match self {
|
||||
TypeOrConstParamData::TypeParamData(x) => x.default.is_some(),
|
||||
TypeOrConstParamData::ConstParamData(x) => x.has_default,
|
||||
TypeOrConstParamData::TypeParamData(it) => it.default.is_some(),
|
||||
TypeOrConstParamData::ConstParamData(it) => it.has_default,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn type_param(&self) -> Option<&TypeParamData> {
|
||||
match self {
|
||||
TypeOrConstParamData::TypeParamData(x) => Some(x),
|
||||
TypeOrConstParamData::TypeParamData(it) => Some(it),
|
||||
TypeOrConstParamData::ConstParamData(_) => None,
|
||||
}
|
||||
}
|
||||
|
@ -89,14 +89,14 @@ impl TypeOrConstParamData {
|
|||
pub fn const_param(&self) -> Option<&ConstParamData> {
|
||||
match self {
|
||||
TypeOrConstParamData::TypeParamData(_) => None,
|
||||
TypeOrConstParamData::ConstParamData(x) => Some(x),
|
||||
TypeOrConstParamData::ConstParamData(it) => Some(it),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_trait_self(&self) -> bool {
|
||||
match self {
|
||||
TypeOrConstParamData::TypeParamData(x) => {
|
||||
x.provenance == TypeParamProvenance::TraitSelf
|
||||
TypeOrConstParamData::TypeParamData(it) => {
|
||||
it.provenance == TypeParamProvenance::TraitSelf
|
||||
}
|
||||
TypeOrConstParamData::ConstParamData(_) => false,
|
||||
}
|
||||
|
|
|
@ -425,8 +425,8 @@ impl ConstRef {
|
|||
}
|
||||
match expr {
|
||||
ast::Expr::PathExpr(p) if is_path_ident(&p) => {
|
||||
match p.path().and_then(|x| x.segment()).and_then(|x| x.name_ref()) {
|
||||
Some(x) => Self::Path(x.as_name()),
|
||||
match p.path().and_then(|it| it.segment()).and_then(|it| it.name_ref()) {
|
||||
Some(it) => Self::Path(it.as_name()),
|
||||
None => Self::Scalar(LiteralConstRef::Unknown),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -406,14 +406,14 @@ impl TypeParamId {
|
|||
|
||||
impl TypeParamId {
|
||||
/// Caller should check if this toc id really belongs to a type
|
||||
pub fn from_unchecked(x: TypeOrConstParamId) -> Self {
|
||||
Self(x)
|
||||
pub fn from_unchecked(it: TypeOrConstParamId) -> Self {
|
||||
Self(it)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TypeParamId> for TypeOrConstParamId {
|
||||
fn from(x: TypeParamId) -> Self {
|
||||
x.0
|
||||
fn from(it: TypeParamId) -> Self {
|
||||
it.0
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -432,14 +432,14 @@ impl ConstParamId {
|
|||
|
||||
impl ConstParamId {
|
||||
/// Caller should check if this toc id really belongs to a const
|
||||
pub fn from_unchecked(x: TypeOrConstParamId) -> Self {
|
||||
Self(x)
|
||||
pub fn from_unchecked(it: TypeOrConstParamId) -> Self {
|
||||
Self(it)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ConstParamId> for TypeOrConstParamId {
|
||||
fn from(x: ConstParamId) -> Self {
|
||||
x.0
|
||||
fn from(it: ConstParamId) -> Self {
|
||||
it.0
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -562,14 +562,14 @@ pub enum TypeOwnerId {
|
|||
impl TypeOwnerId {
|
||||
fn as_generic_def_id(self) -> Option<GenericDefId> {
|
||||
Some(match self {
|
||||
TypeOwnerId::FunctionId(x) => GenericDefId::FunctionId(x),
|
||||
TypeOwnerId::ConstId(x) => GenericDefId::ConstId(x),
|
||||
TypeOwnerId::AdtId(x) => GenericDefId::AdtId(x),
|
||||
TypeOwnerId::TraitId(x) => GenericDefId::TraitId(x),
|
||||
TypeOwnerId::TraitAliasId(x) => GenericDefId::TraitAliasId(x),
|
||||
TypeOwnerId::TypeAliasId(x) => GenericDefId::TypeAliasId(x),
|
||||
TypeOwnerId::ImplId(x) => GenericDefId::ImplId(x),
|
||||
TypeOwnerId::EnumVariantId(x) => GenericDefId::EnumVariantId(x),
|
||||
TypeOwnerId::FunctionId(it) => GenericDefId::FunctionId(it),
|
||||
TypeOwnerId::ConstId(it) => GenericDefId::ConstId(it),
|
||||
TypeOwnerId::AdtId(it) => GenericDefId::AdtId(it),
|
||||
TypeOwnerId::TraitId(it) => GenericDefId::TraitId(it),
|
||||
TypeOwnerId::TraitAliasId(it) => GenericDefId::TraitAliasId(it),
|
||||
TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
|
||||
TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it),
|
||||
TypeOwnerId::EnumVariantId(it) => GenericDefId::EnumVariantId(it),
|
||||
TypeOwnerId::InTypeConstId(_) | TypeOwnerId::ModuleId(_) | TypeOwnerId::StaticId(_) => {
|
||||
return None
|
||||
}
|
||||
|
@ -592,15 +592,15 @@ impl_from!(
|
|||
for TypeOwnerId
|
||||
);
|
||||
|
||||
// Every `DefWithBodyId` is a type owner, since bodies can contain type (e.g. `{ let x: Type = _; }`)
|
||||
// Every `DefWithBodyId` is a type owner, since bodies can contain type (e.g. `{ let it: Type = _; }`)
|
||||
impl From<DefWithBodyId> for TypeOwnerId {
|
||||
fn from(value: DefWithBodyId) -> Self {
|
||||
match value {
|
||||
DefWithBodyId::FunctionId(x) => x.into(),
|
||||
DefWithBodyId::StaticId(x) => x.into(),
|
||||
DefWithBodyId::ConstId(x) => x.into(),
|
||||
DefWithBodyId::InTypeConstId(x) => x.into(),
|
||||
DefWithBodyId::VariantId(x) => x.into(),
|
||||
DefWithBodyId::FunctionId(it) => it.into(),
|
||||
DefWithBodyId::StaticId(it) => it.into(),
|
||||
DefWithBodyId::ConstId(it) => it.into(),
|
||||
DefWithBodyId::InTypeConstId(it) => it.into(),
|
||||
DefWithBodyId::VariantId(it) => it.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -608,14 +608,14 @@ impl From<DefWithBodyId> for TypeOwnerId {
|
|||
impl From<GenericDefId> for TypeOwnerId {
|
||||
fn from(value: GenericDefId) -> Self {
|
||||
match value {
|
||||
GenericDefId::FunctionId(x) => x.into(),
|
||||
GenericDefId::AdtId(x) => x.into(),
|
||||
GenericDefId::TraitId(x) => x.into(),
|
||||
GenericDefId::TraitAliasId(x) => x.into(),
|
||||
GenericDefId::TypeAliasId(x) => x.into(),
|
||||
GenericDefId::ImplId(x) => x.into(),
|
||||
GenericDefId::EnumVariantId(x) => x.into(),
|
||||
GenericDefId::ConstId(x) => x.into(),
|
||||
GenericDefId::FunctionId(it) => it.into(),
|
||||
GenericDefId::AdtId(it) => it.into(),
|
||||
GenericDefId::TraitId(it) => it.into(),
|
||||
GenericDefId::TraitAliasId(it) => it.into(),
|
||||
GenericDefId::TypeAliasId(it) => it.into(),
|
||||
GenericDefId::ImplId(it) => it.into(),
|
||||
GenericDefId::EnumVariantId(it) => it.into(),
|
||||
GenericDefId::ConstId(it) => it.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -730,7 +730,7 @@ impl GeneralConstId {
|
|||
.const_data(const_id)
|
||||
.name
|
||||
.as_ref()
|
||||
.and_then(|x| x.as_str())
|
||||
.and_then(|it| it.as_str())
|
||||
.unwrap_or("_")
|
||||
.to_owned(),
|
||||
GeneralConstId::ConstBlockId(id) => format!("{{anonymous const {id:?}}}"),
|
||||
|
@ -972,17 +972,17 @@ impl HasModule for MacroId {
|
|||
impl HasModule for TypeOwnerId {
|
||||
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
|
||||
match self {
|
||||
TypeOwnerId::FunctionId(x) => x.lookup(db).module(db),
|
||||
TypeOwnerId::StaticId(x) => x.lookup(db).module(db),
|
||||
TypeOwnerId::ConstId(x) => x.lookup(db).module(db),
|
||||
TypeOwnerId::InTypeConstId(x) => x.lookup(db).owner.module(db),
|
||||
TypeOwnerId::AdtId(x) => x.module(db),
|
||||
TypeOwnerId::TraitId(x) => x.lookup(db).container,
|
||||
TypeOwnerId::TraitAliasId(x) => x.lookup(db).container,
|
||||
TypeOwnerId::TypeAliasId(x) => x.lookup(db).module(db),
|
||||
TypeOwnerId::ImplId(x) => x.lookup(db).container,
|
||||
TypeOwnerId::EnumVariantId(x) => x.parent.lookup(db).container,
|
||||
TypeOwnerId::ModuleId(x) => *x,
|
||||
TypeOwnerId::FunctionId(it) => it.lookup(db).module(db),
|
||||
TypeOwnerId::StaticId(it) => it.lookup(db).module(db),
|
||||
TypeOwnerId::ConstId(it) => it.lookup(db).module(db),
|
||||
TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.module(db),
|
||||
TypeOwnerId::AdtId(it) => it.module(db),
|
||||
TypeOwnerId::TraitId(it) => it.lookup(db).container,
|
||||
TypeOwnerId::TraitAliasId(it) => it.lookup(db).container,
|
||||
TypeOwnerId::TypeAliasId(it) => it.lookup(db).module(db),
|
||||
TypeOwnerId::ImplId(it) => it.lookup(db).container,
|
||||
TypeOwnerId::EnumVariantId(it) => it.parent.lookup(db).container,
|
||||
TypeOwnerId::ModuleId(it) => *it,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ pub enum Path {
|
|||
/// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`.
|
||||
generic_args: Option<Box<[Option<Interned<GenericArgs>>]>>,
|
||||
},
|
||||
/// A link to a lang item. It is used in desugaring of things like `x?`. We can show these
|
||||
/// A link to a lang item. It is used in desugaring of things like `it?`. We can show these
|
||||
/// links via a normal path since they might be private and not accessible in the usage place.
|
||||
LangItem(LangItemTarget),
|
||||
}
|
||||
|
|
|
@ -12,8 +12,8 @@ use crate::{
|
|||
};
|
||||
|
||||
pub(crate) fn print_path(db: &dyn ExpandDatabase, path: &Path, buf: &mut dyn Write) -> fmt::Result {
|
||||
if let Path::LangItem(x) = path {
|
||||
return write!(buf, "$lang_item::{x:?}");
|
||||
if let Path::LangItem(it) = path {
|
||||
return write!(buf, "$lang_item::{it:?}");
|
||||
}
|
||||
match path.type_anchor() {
|
||||
Some(anchor) => {
|
||||
|
|
|
@ -186,12 +186,12 @@ impl Resolver {
|
|||
Path::LangItem(l) => {
|
||||
return Some((
|
||||
match *l {
|
||||
LangItemTarget::Union(x) => TypeNs::AdtId(x.into()),
|
||||
LangItemTarget::TypeAlias(x) => TypeNs::TypeAliasId(x),
|
||||
LangItemTarget::Struct(x) => TypeNs::AdtId(x.into()),
|
||||
LangItemTarget::EnumVariant(x) => TypeNs::EnumVariantId(x),
|
||||
LangItemTarget::EnumId(x) => TypeNs::AdtId(x.into()),
|
||||
LangItemTarget::Trait(x) => TypeNs::TraitId(x),
|
||||
LangItemTarget::Union(it) => TypeNs::AdtId(it.into()),
|
||||
LangItemTarget::TypeAlias(it) => TypeNs::TypeAliasId(it),
|
||||
LangItemTarget::Struct(it) => TypeNs::AdtId(it.into()),
|
||||
LangItemTarget::EnumVariant(it) => TypeNs::EnumVariantId(it),
|
||||
LangItemTarget::EnumId(it) => TypeNs::AdtId(it.into()),
|
||||
LangItemTarget::Trait(it) => TypeNs::TraitId(it),
|
||||
LangItemTarget::Function(_)
|
||||
| LangItemTarget::ImplDef(_)
|
||||
| LangItemTarget::Static(_) => return None,
|
||||
|
@ -273,10 +273,10 @@ impl Resolver {
|
|||
Path::Normal { mod_path, .. } => mod_path,
|
||||
Path::LangItem(l) => {
|
||||
return Some(ResolveValueResult::ValueNs(match *l {
|
||||
LangItemTarget::Function(x) => ValueNs::FunctionId(x),
|
||||
LangItemTarget::Static(x) => ValueNs::StaticId(x),
|
||||
LangItemTarget::Struct(x) => ValueNs::StructId(x),
|
||||
LangItemTarget::EnumVariant(x) => ValueNs::EnumVariantId(x),
|
||||
LangItemTarget::Function(it) => ValueNs::FunctionId(it),
|
||||
LangItemTarget::Static(it) => ValueNs::StaticId(it),
|
||||
LangItemTarget::Struct(it) => ValueNs::StructId(it),
|
||||
LangItemTarget::EnumVariant(it) => ValueNs::EnumVariantId(it),
|
||||
LangItemTarget::Union(_)
|
||||
| LangItemTarget::ImplDef(_)
|
||||
| LangItemTarget::TypeAlias(_)
|
||||
|
@ -425,14 +425,14 @@ impl Resolver {
|
|||
/// The shadowing is accounted for: in
|
||||
///
|
||||
/// ```
|
||||
/// let x = 92;
|
||||
/// let it = 92;
|
||||
/// {
|
||||
/// let x = 92;
|
||||
/// let it = 92;
|
||||
/// $0
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// there will be only one entry for `x` in the result.
|
||||
/// there will be only one entry for `it` in the result.
|
||||
///
|
||||
/// The result is ordered *roughly* from the innermost scope to the
|
||||
/// outermost: when the name is introduced in two namespaces in two scopes,
|
||||
|
@ -1027,17 +1027,17 @@ impl HasResolver for ExternCrateId {
|
|||
impl HasResolver for TypeOwnerId {
|
||||
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
|
||||
match self {
|
||||
TypeOwnerId::FunctionId(x) => x.resolver(db),
|
||||
TypeOwnerId::StaticId(x) => x.resolver(db),
|
||||
TypeOwnerId::ConstId(x) => x.resolver(db),
|
||||
TypeOwnerId::InTypeConstId(x) => x.lookup(db).owner.resolver(db),
|
||||
TypeOwnerId::AdtId(x) => x.resolver(db),
|
||||
TypeOwnerId::TraitId(x) => x.resolver(db),
|
||||
TypeOwnerId::TraitAliasId(x) => x.resolver(db),
|
||||
TypeOwnerId::TypeAliasId(x) => x.resolver(db),
|
||||
TypeOwnerId::ImplId(x) => x.resolver(db),
|
||||
TypeOwnerId::EnumVariantId(x) => x.resolver(db),
|
||||
TypeOwnerId::ModuleId(x) => x.resolver(db),
|
||||
TypeOwnerId::FunctionId(it) => it.resolver(db),
|
||||
TypeOwnerId::StaticId(it) => it.resolver(db),
|
||||
TypeOwnerId::ConstId(it) => it.resolver(db),
|
||||
TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.resolver(db),
|
||||
TypeOwnerId::AdtId(it) => it.resolver(db),
|
||||
TypeOwnerId::TraitId(it) => it.resolver(db),
|
||||
TypeOwnerId::TraitAliasId(it) => it.resolver(db),
|
||||
TypeOwnerId::TypeAliasId(it) => it.resolver(db),
|
||||
TypeOwnerId::ImplId(it) => it.resolver(db),
|
||||
TypeOwnerId::EnumVariantId(it) => it.resolver(db),
|
||||
TypeOwnerId::ModuleId(it) => it.resolver(db),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,12 +72,12 @@ enum VariantShape {
|
|||
}
|
||||
|
||||
fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
|
||||
(0..n).map(|x| Ident::new(format!("f{x}"), tt::TokenId::unspecified()))
|
||||
(0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified()))
|
||||
}
|
||||
|
||||
impl VariantShape {
|
||||
fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree {
|
||||
self.as_pattern_map(path, |x| quote!(#x))
|
||||
self.as_pattern_map(path, |it| quote!(#it))
|
||||
}
|
||||
|
||||
fn field_names(&self) -> Vec<tt::Ident> {
|
||||
|
@ -95,17 +95,17 @@ impl VariantShape {
|
|||
) -> tt::Subtree {
|
||||
match self {
|
||||
VariantShape::Struct(fields) => {
|
||||
let fields = fields.iter().map(|x| {
|
||||
let mapped = field_map(x);
|
||||
quote! { #x : #mapped , }
|
||||
let fields = fields.iter().map(|it| {
|
||||
let mapped = field_map(it);
|
||||
quote! { #it : #mapped , }
|
||||
});
|
||||
quote! {
|
||||
#path { ##fields }
|
||||
}
|
||||
}
|
||||
&VariantShape::Tuple(n) => {
|
||||
let fields = tuple_field_iterator(n).map(|x| {
|
||||
let mapped = field_map(&x);
|
||||
let fields = tuple_field_iterator(n).map(|it| {
|
||||
let mapped = field_map(&it);
|
||||
quote! {
|
||||
#mapped ,
|
||||
}
|
||||
|
@ -121,13 +121,13 @@ impl VariantShape {
|
|||
fn from(value: Option<FieldList>, token_map: &TokenMap) -> Result<Self, ExpandError> {
|
||||
let r = match value {
|
||||
None => VariantShape::Unit,
|
||||
Some(FieldList::RecordFieldList(x)) => VariantShape::Struct(
|
||||
x.fields()
|
||||
.map(|x| x.name())
|
||||
.map(|x| name_to_token(token_map, x))
|
||||
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
|
||||
it.fields()
|
||||
.map(|it| it.name())
|
||||
.map(|it| name_to_token(token_map, it))
|
||||
.collect::<Result<_, _>>()?,
|
||||
),
|
||||
Some(FieldList::TupleFieldList(x)) => VariantShape::Tuple(x.fields().count()),
|
||||
Some(FieldList::TupleFieldList(it)) => VariantShape::Tuple(it.fields().count()),
|
||||
};
|
||||
Ok(r)
|
||||
}
|
||||
|
@ -141,7 +141,7 @@ enum AdtShape {
|
|||
|
||||
impl AdtShape {
|
||||
fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> {
|
||||
self.as_pattern_map(name, |x| quote!(#x))
|
||||
self.as_pattern_map(name, |it| quote!(#it))
|
||||
}
|
||||
|
||||
fn field_names(&self) -> Vec<Vec<tt::Ident>> {
|
||||
|
@ -214,8 +214,8 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
|
|||
let default_variant = it
|
||||
.variant_list()
|
||||
.into_iter()
|
||||
.flat_map(|x| x.variants())
|
||||
.position(|x| x.attrs().any(|x| x.simple_name() == Some("default".into())));
|
||||
.flat_map(|it| it.variants())
|
||||
.position(|it| it.attrs().any(|it| it.simple_name() == Some("default".into())));
|
||||
(
|
||||
it.name(),
|
||||
it.generic_param_list(),
|
||||
|
@ -224,11 +224,11 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
|
|||
variants: it
|
||||
.variant_list()
|
||||
.into_iter()
|
||||
.flat_map(|x| x.variants())
|
||||
.map(|x| {
|
||||
.flat_map(|it| it.variants())
|
||||
.map(|it| {
|
||||
Ok((
|
||||
name_to_token(&token_map, x.name())?,
|
||||
VariantShape::from(x.field_list(), &token_map)?,
|
||||
name_to_token(&token_map, it.name())?,
|
||||
VariantShape::from(it.field_list(), &token_map)?,
|
||||
))
|
||||
})
|
||||
.collect::<Result<_, ExpandError>>()?,
|
||||
|
@ -246,16 +246,16 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
|
|||
let name = {
|
||||
let this = param.name();
|
||||
match this {
|
||||
Some(x) => {
|
||||
param_type_set.insert(x.as_name());
|
||||
mbe::syntax_node_to_token_tree(x.syntax()).0
|
||||
Some(it) => {
|
||||
param_type_set.insert(it.as_name());
|
||||
mbe::syntax_node_to_token_tree(it.syntax()).0
|
||||
}
|
||||
None => tt::Subtree::empty(),
|
||||
}
|
||||
};
|
||||
let bounds = match ¶m {
|
||||
ast::TypeOrConstParam::Type(x) => {
|
||||
x.type_bound_list().map(|x| mbe::syntax_node_to_token_tree(x.syntax()).0)
|
||||
ast::TypeOrConstParam::Type(it) => {
|
||||
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
|
||||
}
|
||||
ast::TypeOrConstParam::Const(_) => None,
|
||||
};
|
||||
|
@ -296,7 +296,7 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
|
|||
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
|
||||
param_type_set.contains(&name).then_some(p)
|
||||
})
|
||||
.map(|x| mbe::syntax_node_to_token_tree(x.syntax()).0)
|
||||
.map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
|
||||
.collect();
|
||||
let name_token = name_to_token(&token_map, name)?;
|
||||
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
|
||||
|
@ -373,10 +373,10 @@ fn expand_simple_derive(
|
|||
})
|
||||
.unzip();
|
||||
|
||||
where_block.extend(info.associated_types.iter().map(|x| {
|
||||
let x = x.clone();
|
||||
where_block.extend(info.associated_types.iter().map(|it| {
|
||||
let it = it.clone();
|
||||
let bound = trait_path.clone();
|
||||
quote! { #x : #bound , }
|
||||
quote! { #it : #bound , }
|
||||
}));
|
||||
|
||||
let name = info.name;
|
||||
|
@ -444,7 +444,7 @@ fn clone_expand(
|
|||
}
|
||||
let name = &adt.name;
|
||||
let patterns = adt.shape.as_pattern(name);
|
||||
let exprs = adt.shape.as_pattern_map(name, |x| quote! { #x .clone() });
|
||||
let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() });
|
||||
let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
|
||||
let fat_arrow = fat_arrow();
|
||||
quote! {
|
||||
|
@ -524,10 +524,10 @@ fn debug_expand(
|
|||
expand_simple_derive(tt, quote! { #krate::fmt::Debug }, |adt| {
|
||||
let for_variant = |name: String, v: &VariantShape| match v {
|
||||
VariantShape::Struct(fields) => {
|
||||
let for_fields = fields.iter().map(|x| {
|
||||
let x_string = x.to_string();
|
||||
let for_fields = fields.iter().map(|it| {
|
||||
let x_string = it.to_string();
|
||||
quote! {
|
||||
.field(#x_string, & #x)
|
||||
.field(#x_string, & #it)
|
||||
}
|
||||
});
|
||||
quote! {
|
||||
|
@ -535,9 +535,9 @@ fn debug_expand(
|
|||
}
|
||||
}
|
||||
VariantShape::Tuple(n) => {
|
||||
let for_fields = tuple_field_iterator(*n).map(|x| {
|
||||
let for_fields = tuple_field_iterator(*n).map(|it| {
|
||||
quote! {
|
||||
.field( & #x)
|
||||
.field( & #it)
|
||||
}
|
||||
});
|
||||
quote! {
|
||||
|
@ -621,7 +621,7 @@ fn hash_expand(
|
|||
let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map(
|
||||
|(pat, names)| {
|
||||
let expr = {
|
||||
let it = names.iter().map(|x| quote! { #x . hash(ra_expand_state); });
|
||||
let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); });
|
||||
quote! { {
|
||||
##it
|
||||
} }
|
||||
|
@ -674,9 +674,9 @@ fn partial_eq_expand(
|
|||
quote!(true)
|
||||
}
|
||||
[first, rest @ ..] => {
|
||||
let rest = rest.iter().map(|x| {
|
||||
let t1 = Ident::new(format!("{}_self", x.text), x.span);
|
||||
let t2 = Ident::new(format!("{}_other", x.text), x.span);
|
||||
let rest = rest.iter().map(|it| {
|
||||
let t1 = Ident::new(format!("{}_self", it.text), it.span);
|
||||
let t2 = Ident::new(format!("{}_other", it.text), it.span);
|
||||
let and_and = and_and();
|
||||
quote!(#and_and #t1 .eq( #t2 ))
|
||||
});
|
||||
|
@ -708,12 +708,12 @@ fn self_and_other_patterns(
|
|||
adt: &BasicAdtInfo,
|
||||
name: &tt::Ident,
|
||||
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
|
||||
let self_patterns = adt.shape.as_pattern_map(name, |x| {
|
||||
let t = Ident::new(format!("{}_self", x.text), x.span);
|
||||
let self_patterns = adt.shape.as_pattern_map(name, |it| {
|
||||
let t = Ident::new(format!("{}_self", it.text), it.span);
|
||||
quote!(#t)
|
||||
});
|
||||
let other_patterns = adt.shape.as_pattern_map(name, |x| {
|
||||
let t = Ident::new(format!("{}_other", x.text), x.span);
|
||||
let other_patterns = adt.shape.as_pattern_map(name, |it| {
|
||||
let t = Ident::new(format!("{}_other", it.text), it.span);
|
||||
quote!(#t)
|
||||
});
|
||||
(self_patterns, other_patterns)
|
||||
|
|
|
@ -339,7 +339,7 @@ fn format_args_expand_general(
|
|||
parts.push(mem::take(&mut last_part));
|
||||
let arg_tree = if argument.is_empty() {
|
||||
match args.next() {
|
||||
Some(x) => x,
|
||||
Some(it) => it,
|
||||
None => {
|
||||
err = Some(mbe::ExpandError::NoMatchingRule.into());
|
||||
tt::Subtree::empty()
|
||||
|
@ -378,11 +378,11 @@ fn format_args_expand_general(
|
|||
if !last_part.is_empty() {
|
||||
parts.push(last_part);
|
||||
}
|
||||
let part_tts = parts.into_iter().map(|x| {
|
||||
let part_tts = parts.into_iter().map(|it| {
|
||||
let text = if let Some(raw) = &raw_sharps {
|
||||
format!("r{raw}\"{}\"{raw}", x).into()
|
||||
format!("r{raw}\"{}\"{raw}", it).into()
|
||||
} else {
|
||||
format!("\"{}\"", x).into()
|
||||
format!("\"{}\"", it).into()
|
||||
};
|
||||
let l = tt::Literal { span: tt::TokenId::unspecified(), text };
|
||||
quote!(#l ,)
|
||||
|
@ -574,7 +574,7 @@ fn concat_bytes_expand(
|
|||
syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()),
|
||||
syntax::SyntaxKind::BYTE_STRING => {
|
||||
let components = unquote_byte_string(lit).unwrap_or_default();
|
||||
components.into_iter().for_each(|x| bytes.push(x.to_string()));
|
||||
components.into_iter().for_each(|it| bytes.push(it.to_string()));
|
||||
}
|
||||
_ => {
|
||||
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
||||
|
|
|
@ -472,13 +472,13 @@ fn foo () {match __ra_fixup {}}
|
|||
check(
|
||||
r#"
|
||||
fn foo() {
|
||||
match x {
|
||||
match it {
|
||||
|
||||
}
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {match x {}}
|
||||
fn foo () {match it {}}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
@ -547,11 +547,11 @@ fn foo () {a . __ra_fixup ; bar () ;}
|
|||
check(
|
||||
r#"
|
||||
fn foo() {
|
||||
let x = a
|
||||
let it = a
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {let x = a ;}
|
||||
fn foo () {let it = a ;}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
@ -561,11 +561,11 @@ fn foo () {let x = a ;}
|
|||
check(
|
||||
r#"
|
||||
fn foo() {
|
||||
let x = a.
|
||||
let it = a.
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {let x = a . __ra_fixup ;}
|
||||
fn foo () {let it = a . __ra_fixup ;}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
|
|
@ -343,7 +343,7 @@ impl TyExt for Ty {
|
|||
|
||||
fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool {
|
||||
let crate_id = owner.module(db.upcast()).krate();
|
||||
let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|x| x.as_trait())
|
||||
let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|it| it.as_trait())
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
|
|
|
@ -88,7 +88,7 @@ pub(crate) fn path_to_const(
|
|||
ConstValue::Placeholder(to_placeholder_idx(db, p.into()))
|
||||
}
|
||||
ParamLoweringMode::Variable => match args.param_idx(p.into()) {
|
||||
Some(x) => ConstValue::BoundVar(BoundVar::new(debruijn, x)),
|
||||
Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)),
|
||||
None => {
|
||||
never!(
|
||||
"Generic list doesn't contain this param: {:?}, {:?}, {:?}",
|
||||
|
@ -139,11 +139,11 @@ pub fn intern_const_ref(
|
|||
let bytes = match value {
|
||||
LiteralConstRef::Int(i) => {
|
||||
// FIXME: We should handle failure of layout better.
|
||||
let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16);
|
||||
let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
|
||||
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
|
||||
}
|
||||
LiteralConstRef::UInt(i) => {
|
||||
let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16);
|
||||
let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
|
||||
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
|
||||
}
|
||||
LiteralConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()),
|
||||
|
@ -171,7 +171,7 @@ pub fn try_const_usize(db: &dyn HirDatabase, c: &Const) -> Option<u128> {
|
|||
chalk_ir::ConstValue::InferenceVar(_) => None,
|
||||
chalk_ir::ConstValue::Placeholder(_) => None,
|
||||
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
|
||||
ConstScalar::Bytes(x, _) => Some(u128::from_le_bytes(pad16(&x, false))),
|
||||
ConstScalar::Bytes(it, _) => Some(u128::from_le_bytes(pad16(&it, false))),
|
||||
ConstScalar::UnevaluatedConst(c, subst) => {
|
||||
let ec = db.const_eval(*c, subst.clone()).ok()?;
|
||||
try_const_usize(db, &ec)
|
||||
|
|
|
@ -37,8 +37,8 @@ fn size_of_val() {
|
|||
}
|
||||
|
||||
const GOAL: usize = {
|
||||
let x: &[i32] = &[1, 2, 3];
|
||||
size_of_val(x)
|
||||
let it: &[i32] = &[1, 2, 3];
|
||||
size_of_val(it)
|
||||
};
|
||||
"#,
|
||||
12,
|
||||
|
|
|
@ -481,28 +481,28 @@ fn render_const_scalar(
|
|||
TyKind::Scalar(s) => match s {
|
||||
Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }),
|
||||
Scalar::Char => {
|
||||
let x = u128::from_le_bytes(pad16(b, false)) as u32;
|
||||
let Ok(c) = char::try_from(x) else {
|
||||
let it = u128::from_le_bytes(pad16(b, false)) as u32;
|
||||
let Ok(c) = char::try_from(it) else {
|
||||
return f.write_str("<unicode-error>");
|
||||
};
|
||||
write!(f, "{c:?}")
|
||||
}
|
||||
Scalar::Int(_) => {
|
||||
let x = i128::from_le_bytes(pad16(b, true));
|
||||
write!(f, "{x}")
|
||||
let it = i128::from_le_bytes(pad16(b, true));
|
||||
write!(f, "{it}")
|
||||
}
|
||||
Scalar::Uint(_) => {
|
||||
let x = u128::from_le_bytes(pad16(b, false));
|
||||
write!(f, "{x}")
|
||||
let it = u128::from_le_bytes(pad16(b, false));
|
||||
write!(f, "{it}")
|
||||
}
|
||||
Scalar::Float(fl) => match fl {
|
||||
chalk_ir::FloatTy::F32 => {
|
||||
let x = f32::from_le_bytes(b.try_into().unwrap());
|
||||
write!(f, "{x:?}")
|
||||
let it = f32::from_le_bytes(b.try_into().unwrap());
|
||||
write!(f, "{it:?}")
|
||||
}
|
||||
chalk_ir::FloatTy::F64 => {
|
||||
let x = f64::from_le_bytes(b.try_into().unwrap());
|
||||
write!(f, "{x:?}")
|
||||
let it = f64::from_le_bytes(b.try_into().unwrap());
|
||||
write!(f, "{it:?}")
|
||||
}
|
||||
},
|
||||
},
|
||||
|
@ -659,8 +659,8 @@ fn render_const_scalar(
|
|||
}
|
||||
TyKind::FnDef(..) => ty.hir_fmt(f),
|
||||
TyKind::Function(_) | TyKind::Raw(_, _) => {
|
||||
let x = u128::from_le_bytes(pad16(b, false));
|
||||
write!(f, "{:#X} as ", x)?;
|
||||
let it = u128::from_le_bytes(pad16(b, false));
|
||||
write!(f, "{:#X} as ", it)?;
|
||||
ty.hir_fmt(f)
|
||||
}
|
||||
TyKind::Array(ty, len) => {
|
||||
|
@ -736,7 +736,7 @@ fn render_variant_after_name(
|
|||
}
|
||||
write!(f, " }}")?;
|
||||
} else {
|
||||
let mut it = it.map(|x| x.0);
|
||||
let mut it = it.map(|it| it.0);
|
||||
write!(f, "(")?;
|
||||
if let Some(id) = it.next() {
|
||||
render_field(f, id)?;
|
||||
|
@ -1278,19 +1278,20 @@ fn hir_fmt_generics(
|
|||
i: usize,
|
||||
parameters: &Substitution,
|
||||
) -> bool {
|
||||
if parameter.ty(Interner).map(|x| x.kind(Interner)) == Some(&TyKind::Error)
|
||||
if parameter.ty(Interner).map(|it| it.kind(Interner))
|
||||
== Some(&TyKind::Error)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
if let Some(ConstValue::Concrete(c)) =
|
||||
parameter.constant(Interner).map(|x| &x.data(Interner).value)
|
||||
parameter.constant(Interner).map(|it| &it.data(Interner).value)
|
||||
{
|
||||
if c.interned == ConstScalar::Unknown {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
let default_parameter = match default_parameters.get(i) {
|
||||
Some(x) => x,
|
||||
Some(it) => it,
|
||||
None => return true,
|
||||
};
|
||||
let actual_default =
|
||||
|
|
|
@ -290,7 +290,7 @@ impl Default for InternedStandardTypes {
|
|||
/// ```
|
||||
///
|
||||
/// Note that for a struct, the 'deep' unsizing of the struct is not recorded.
|
||||
/// E.g., `struct Foo<T> { x: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
|
||||
/// E.g., `struct Foo<T> { it: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
|
||||
/// The autoderef and -ref are the same as in the above example, but the type
|
||||
/// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about
|
||||
/// the underlying conversions from `[i32; 4]` to `[i32]`.
|
||||
|
@ -1172,7 +1172,7 @@ impl<'a> InferenceContext<'a> {
|
|||
unresolved: Option<usize>,
|
||||
path: &ModPath,
|
||||
) -> (Ty, Option<VariantId>) {
|
||||
let remaining = unresolved.map(|x| path.segments()[x..].len()).filter(|x| x > &0);
|
||||
let remaining = unresolved.map(|it| path.segments()[it..].len()).filter(|it| it > &0);
|
||||
match remaining {
|
||||
None => {
|
||||
let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id {
|
||||
|
@ -1324,7 +1324,7 @@ impl Expectation {
|
|||
/// The primary use case is where the expected type is a fat pointer,
|
||||
/// like `&[isize]`. For example, consider the following statement:
|
||||
///
|
||||
/// let x: &[isize] = &[1, 2, 3];
|
||||
/// let it: &[isize] = &[1, 2, 3];
|
||||
///
|
||||
/// In this case, the expected type for the `&[1, 2, 3]` expression is
|
||||
/// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
|
||||
|
|
|
@ -139,7 +139,7 @@ impl HirPlace {
|
|||
) -> CaptureKind {
|
||||
match current_capture {
|
||||
CaptureKind::ByRef(BorrowKind::Mut { .. }) => {
|
||||
if self.projections[len..].iter().any(|x| *x == ProjectionElem::Deref) {
|
||||
if self.projections[len..].iter().any(|it| *it == ProjectionElem::Deref) {
|
||||
current_capture = CaptureKind::ByRef(BorrowKind::Unique);
|
||||
}
|
||||
}
|
||||
|
@ -199,7 +199,7 @@ impl CapturedItem {
|
|||
.to_string(),
|
||||
VariantData::Tuple(fields) => fields
|
||||
.iter()
|
||||
.position(|x| x.0 == f.local_id)
|
||||
.position(|it| it.0 == f.local_id)
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
VariantData::Unit => "[missing field]".to_string(),
|
||||
|
@ -439,10 +439,10 @@ impl InferenceContext<'_> {
|
|||
}
|
||||
|
||||
fn walk_expr(&mut self, tgt_expr: ExprId) {
|
||||
if let Some(x) = self.result.expr_adjustments.get_mut(&tgt_expr) {
|
||||
if let Some(it) = self.result.expr_adjustments.get_mut(&tgt_expr) {
|
||||
// FIXME: this take is completely unneeded, and just is here to make borrow checker
|
||||
// happy. Remove it if you can.
|
||||
let x_taken = mem::take(x);
|
||||
let x_taken = mem::take(it);
|
||||
self.walk_expr_with_adjust(tgt_expr, &x_taken);
|
||||
*self.result.expr_adjustments.get_mut(&tgt_expr).unwrap() = x_taken;
|
||||
} else {
|
||||
|
@ -536,7 +536,7 @@ impl InferenceContext<'_> {
|
|||
if let &Some(expr) = spread {
|
||||
self.consume_expr(expr);
|
||||
}
|
||||
self.consume_exprs(fields.iter().map(|x| x.expr));
|
||||
self.consume_exprs(fields.iter().map(|it| it.expr));
|
||||
}
|
||||
Expr::Field { expr, name: _ } => self.select_from_expr(*expr),
|
||||
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
|
||||
|
@ -548,7 +548,7 @@ impl InferenceContext<'_> {
|
|||
} else if let Some((f, _)) = self.result.method_resolution(tgt_expr) {
|
||||
let mutability = 'b: {
|
||||
if let Some(deref_trait) =
|
||||
self.resolve_lang_item(LangItem::DerefMut).and_then(|x| x.as_trait())
|
||||
self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait())
|
||||
{
|
||||
if let Some(deref_fn) =
|
||||
self.db.trait_data(deref_trait).method_by_name(&name![deref_mut])
|
||||
|
@ -615,8 +615,8 @@ impl InferenceContext<'_> {
|
|||
"We sort closures, so we should always have data for inner closures",
|
||||
);
|
||||
let mut cc = mem::take(&mut self.current_captures);
|
||||
cc.extend(captures.iter().filter(|x| self.is_upvar(&x.place)).map(|x| {
|
||||
CapturedItemWithoutTy { place: x.place.clone(), kind: x.kind, span: x.span }
|
||||
cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| {
|
||||
CapturedItemWithoutTy { place: it.place.clone(), kind: it.kind, span: it.span }
|
||||
}));
|
||||
self.current_captures = cc;
|
||||
}
|
||||
|
@ -694,7 +694,7 @@ impl InferenceContext<'_> {
|
|||
},
|
||||
},
|
||||
}
|
||||
if self.result.pat_adjustments.get(&p).map_or(false, |x| !x.is_empty()) {
|
||||
if self.result.pat_adjustments.get(&p).map_or(false, |it| !it.is_empty()) {
|
||||
for_mut = BorrowKind::Unique;
|
||||
}
|
||||
self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut));
|
||||
|
@ -706,9 +706,9 @@ impl InferenceContext<'_> {
|
|||
|
||||
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
|
||||
let mut ty = None;
|
||||
if let Some(x) = self.result.expr_adjustments.get(&e) {
|
||||
if let Some(x) = x.last() {
|
||||
ty = Some(x.target.clone());
|
||||
if let Some(it) = self.result.expr_adjustments.get(&e) {
|
||||
if let Some(it) = it.last() {
|
||||
ty = Some(it.target.clone());
|
||||
}
|
||||
}
|
||||
ty.unwrap_or_else(|| self.expr_ty(e))
|
||||
|
@ -727,7 +727,7 @@ impl InferenceContext<'_> {
|
|||
// FIXME: We handle closure as a special case, since chalk consider every closure as copy. We
|
||||
// should probably let chalk know which closures are copy, but I don't know how doing it
|
||||
// without creating query cycles.
|
||||
return self.result.closure_info.get(id).map(|x| x.1 == FnTrait::Fn).unwrap_or(true);
|
||||
return self.result.closure_info.get(id).map(|it| it.1 == FnTrait::Fn).unwrap_or(true);
|
||||
}
|
||||
self.table.resolve_completely(ty).is_copy(self.db, self.owner)
|
||||
}
|
||||
|
@ -748,7 +748,7 @@ impl InferenceContext<'_> {
|
|||
}
|
||||
|
||||
fn minimize_captures(&mut self) {
|
||||
self.current_captures.sort_by_key(|x| x.place.projections.len());
|
||||
self.current_captures.sort_by_key(|it| it.place.projections.len());
|
||||
let mut hash_map = HashMap::<HirPlace, usize>::new();
|
||||
let result = mem::take(&mut self.current_captures);
|
||||
for item in result {
|
||||
|
@ -759,7 +759,7 @@ impl InferenceContext<'_> {
|
|||
break Some(*k);
|
||||
}
|
||||
match it.next() {
|
||||
Some(x) => lookup_place.projections.push(x.clone()),
|
||||
Some(it) => lookup_place.projections.push(it.clone()),
|
||||
None => break None,
|
||||
}
|
||||
};
|
||||
|
@ -780,7 +780,7 @@ impl InferenceContext<'_> {
|
|||
}
|
||||
|
||||
fn consume_with_pat(&mut self, mut place: HirPlace, pat: PatId) {
|
||||
let cnt = self.result.pat_adjustments.get(&pat).map(|x| x.len()).unwrap_or_default();
|
||||
let cnt = self.result.pat_adjustments.get(&pat).map(|it| it.len()).unwrap_or_default();
|
||||
place.projections = place
|
||||
.projections
|
||||
.iter()
|
||||
|
@ -894,10 +894,10 @@ impl InferenceContext<'_> {
|
|||
|
||||
fn closure_kind(&self) -> FnTrait {
|
||||
let mut r = FnTrait::Fn;
|
||||
for x in &self.current_captures {
|
||||
for it in &self.current_captures {
|
||||
r = cmp::min(
|
||||
r,
|
||||
match &x.kind {
|
||||
match &it.kind {
|
||||
CaptureKind::ByRef(BorrowKind::Unique | BorrowKind::Mut { .. }) => {
|
||||
FnTrait::FnMut
|
||||
}
|
||||
|
@ -933,7 +933,7 @@ impl InferenceContext<'_> {
|
|||
}
|
||||
self.minimize_captures();
|
||||
let result = mem::take(&mut self.current_captures);
|
||||
let captures = result.into_iter().map(|x| x.with_ty(self)).collect::<Vec<_>>();
|
||||
let captures = result.into_iter().map(|it| it.with_ty(self)).collect::<Vec<_>>();
|
||||
self.result.closure_info.insert(closure, (captures, closure_kind));
|
||||
closure_kind
|
||||
}
|
||||
|
@ -973,20 +973,20 @@ impl InferenceContext<'_> {
|
|||
fn sort_closures(&mut self) -> Vec<(ClosureId, Vec<(Ty, Ty, Vec<Ty>, ExprId)>)> {
|
||||
let mut deferred_closures = mem::take(&mut self.deferred_closures);
|
||||
let mut dependents_count: FxHashMap<ClosureId, usize> =
|
||||
deferred_closures.keys().map(|x| (*x, 0)).collect();
|
||||
deferred_closures.keys().map(|it| (*it, 0)).collect();
|
||||
for (_, deps) in &self.closure_dependencies {
|
||||
for dep in deps {
|
||||
*dependents_count.entry(*dep).or_default() += 1;
|
||||
}
|
||||
}
|
||||
let mut queue: Vec<_> =
|
||||
deferred_closures.keys().copied().filter(|x| dependents_count[x] == 0).collect();
|
||||
deferred_closures.keys().copied().filter(|it| dependents_count[it] == 0).collect();
|
||||
let mut result = vec![];
|
||||
while let Some(x) = queue.pop() {
|
||||
if let Some(d) = deferred_closures.remove(&x) {
|
||||
result.push((x, d));
|
||||
while let Some(it) = queue.pop() {
|
||||
if let Some(d) = deferred_closures.remove(&it) {
|
||||
result.push((it, d));
|
||||
}
|
||||
for dep in self.closure_dependencies.get(&x).into_iter().flat_map(|x| x.iter()) {
|
||||
for dep in self.closure_dependencies.get(&it).into_iter().flat_map(|it| it.iter()) {
|
||||
let cnt = dependents_count.get_mut(dep).unwrap();
|
||||
*cnt -= 1;
|
||||
if *cnt == 0 {
|
||||
|
|
|
@ -928,7 +928,7 @@ impl InferenceContext<'_> {
|
|||
if let TyKind::Ref(Mutability::Mut, _, inner) = derefed_callee.kind(Interner) {
|
||||
if adjustments
|
||||
.last()
|
||||
.map(|x| matches!(x.kind, Adjust::Borrow(_)))
|
||||
.map(|it| matches!(it.kind, Adjust::Borrow(_)))
|
||||
.unwrap_or(true)
|
||||
{
|
||||
// prefer reborrow to move
|
||||
|
|
|
@ -73,12 +73,12 @@ impl InferenceContext<'_> {
|
|||
self.infer_mut_expr(c, Mutability::Not);
|
||||
self.infer_mut_expr(body, Mutability::Not);
|
||||
}
|
||||
Expr::MethodCall { receiver: x, method_name: _, args, generic_args: _ }
|
||||
| Expr::Call { callee: x, args, is_assignee_expr: _ } => {
|
||||
self.infer_mut_not_expr_iter(args.iter().copied().chain(Some(*x)));
|
||||
Expr::MethodCall { receiver: it, method_name: _, args, generic_args: _ }
|
||||
| Expr::Call { callee: it, args, is_assignee_expr: _ } => {
|
||||
self.infer_mut_not_expr_iter(args.iter().copied().chain(Some(*it)));
|
||||
}
|
||||
Expr::Match { expr, arms } => {
|
||||
let m = self.pat_iter_bound_mutability(arms.iter().map(|x| x.pat));
|
||||
let m = self.pat_iter_bound_mutability(arms.iter().map(|it| it.pat));
|
||||
self.infer_mut_expr(*expr, m);
|
||||
for arm in arms.iter() {
|
||||
self.infer_mut_expr(arm.expr, Mutability::Not);
|
||||
|
@ -96,7 +96,7 @@ impl InferenceContext<'_> {
|
|||
}
|
||||
}
|
||||
Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => {
|
||||
self.infer_mut_not_expr_iter(fields.iter().map(|x| x.expr).chain(*spread))
|
||||
self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread))
|
||||
}
|
||||
&Expr::Index { base, index } => {
|
||||
if mutability == Mutability::Mut {
|
||||
|
@ -204,8 +204,8 @@ impl InferenceContext<'_> {
|
|||
}
|
||||
|
||||
/// Checks if the pat contains a `ref mut` binding. Such paths makes the context of bounded expressions
|
||||
/// mutable. For example in `let (ref mut x0, ref x1) = *x;` we need to use `DerefMut` for `*x` but in
|
||||
/// `let (ref x0, ref x1) = *x;` we should use `Deref`.
|
||||
/// mutable. For example in `let (ref mut x0, ref x1) = *it;` we need to use `DerefMut` for `*it` but in
|
||||
/// `let (ref x0, ref x1) = *it;` we should use `Deref`.
|
||||
fn pat_bound_mutability(&self, pat: PatId) -> Mutability {
|
||||
let mut r = Mutability::Not;
|
||||
self.body.walk_bindings_in_pat(pat, |b| {
|
||||
|
|
|
@ -306,7 +306,7 @@ impl InferenceContext<'_> {
|
|||
self.result
|
||||
.pat_adjustments
|
||||
.get(&pat)
|
||||
.and_then(|x| x.first())
|
||||
.and_then(|it| it.first())
|
||||
.unwrap_or(&self.result.type_of_pat[pat])
|
||||
.clone()
|
||||
}
|
||||
|
|
|
@ -91,7 +91,7 @@ pub(crate) fn unify(
|
|||
let mut table = InferenceTable::new(db, env);
|
||||
let vars = Substitution::from_iter(
|
||||
Interner,
|
||||
tys.binders.iter(Interner).map(|x| match &x.kind {
|
||||
tys.binders.iter(Interner).map(|it| match &it.kind {
|
||||
chalk_ir::VariableKind::Ty(_) => {
|
||||
GenericArgData::Ty(table.new_type_var()).intern(Interner)
|
||||
}
|
||||
|
@ -686,8 +686,8 @@ impl<'a> InferenceTable<'a> {
|
|||
|
||||
let mut arg_tys = vec![];
|
||||
let arg_ty = TyBuilder::tuple(num_args)
|
||||
.fill(|x| {
|
||||
let arg = match x {
|
||||
.fill(|it| {
|
||||
let arg = match it {
|
||||
ParamKind::Type => self.new_type_var(),
|
||||
ParamKind::Const(ty) => {
|
||||
never!("Tuple with const parameter");
|
||||
|
@ -753,7 +753,7 @@ impl<'a> InferenceTable<'a> {
|
|||
{
|
||||
fold_tys_and_consts(
|
||||
ty,
|
||||
|x, _| match x {
|
||||
|it, _| match it {
|
||||
Either::Left(ty) => Either::Left(self.insert_type_vars_shallow(ty)),
|
||||
Either::Right(c) => Either::Right(self.insert_const_vars_shallow(c)),
|
||||
},
|
||||
|
|
|
@ -24,8 +24,8 @@ pub use self::{
|
|||
};
|
||||
|
||||
macro_rules! user_error {
|
||||
($x: expr) => {
|
||||
return Err(LayoutError::UserError(format!($x)))
|
||||
($it: expr) => {
|
||||
return Err(LayoutError::UserError(format!($it)))
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -90,13 +90,13 @@ fn layout_of_simd_ty(
|
|||
// Supported SIMD vectors are homogeneous ADTs with at least one field:
|
||||
//
|
||||
// * #[repr(simd)] struct S(T, T, T, T);
|
||||
// * #[repr(simd)] struct S { x: T, y: T, z: T, w: T }
|
||||
// * #[repr(simd)] struct S { it: T, y: T, z: T, w: T }
|
||||
// * #[repr(simd)] struct S([T; 4])
|
||||
//
|
||||
// where T is a primitive scalar (integer/float/pointer).
|
||||
|
||||
let f0_ty = match fields.iter().next() {
|
||||
Some(x) => x.1.clone().substitute(Interner, subst),
|
||||
Some(it) => it.1.clone().substitute(Interner, subst),
|
||||
None => {
|
||||
user_error!("simd type with zero fields");
|
||||
}
|
||||
|
@ -230,7 +230,7 @@ pub fn layout_of_ty_query(
|
|||
.iter(Interner)
|
||||
.map(|k| db.layout_of_ty(k.assert_ty_ref(Interner).clone(), krate))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let fields = fields.iter().map(|x| &**x).collect::<Vec<_>>();
|
||||
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
|
||||
let fields = fields.iter().collect::<Vec<_>>();
|
||||
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
|
||||
}
|
||||
|
@ -348,14 +348,14 @@ pub fn layout_of_ty_query(
|
|||
let (captures, _) = infer.closure_info(c);
|
||||
let fields = captures
|
||||
.iter()
|
||||
.map(|x| {
|
||||
.map(|it| {
|
||||
db.layout_of_ty(
|
||||
x.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()),
|
||||
it.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()),
|
||||
krate,
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let fields = fields.iter().map(|x| &**x).collect::<Vec<_>>();
|
||||
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
|
||||
let fields = fields.iter().collect::<Vec<_>>();
|
||||
cx.univariant(dl, &fields, &ReprOptions::default(), StructKind::AlwaysSized)
|
||||
.ok_or(LayoutError::Unknown)?
|
||||
|
|
|
@ -72,9 +72,9 @@ pub fn layout_of_adt_query(
|
|||
};
|
||||
let variants = variants
|
||||
.iter()
|
||||
.map(|x| x.iter().map(|x| &**x).collect::<Vec<_>>())
|
||||
.map(|it| it.iter().map(|it| &**it).collect::<Vec<_>>())
|
||||
.collect::<SmallVec<[_; 1]>>();
|
||||
let variants = variants.iter().map(|x| x.iter().collect()).collect();
|
||||
let variants = variants.iter().map(|it| it.iter().collect()).collect();
|
||||
let result = if matches!(def, AdtId::UnionId(..)) {
|
||||
cx.layout_of_union(&repr, &variants).ok_or(LayoutError::Unknown)?
|
||||
} else {
|
||||
|
@ -105,7 +105,7 @@ pub fn layout_of_adt_query(
|
|||
&& variants
|
||||
.iter()
|
||||
.next()
|
||||
.and_then(|x| x.last().map(|x| !x.is_unsized()))
|
||||
.and_then(|it| it.last().map(|it| !it.is_unsized()))
|
||||
.unwrap_or(true),
|
||||
)
|
||||
.ok_or(LayoutError::SizeOverflow)?
|
||||
|
@ -118,9 +118,9 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
|
|||
let get = |name| {
|
||||
let attr = attrs.by_key(name).tt_values();
|
||||
for tree in attr {
|
||||
if let Some(x) = tree.token_trees.first() {
|
||||
if let Ok(x) = x.to_string().parse() {
|
||||
return Bound::Included(x);
|
||||
if let Some(it) = tree.token_trees.first() {
|
||||
if let Ok(it) = it.to_string().parse() {
|
||||
return Bound::Included(it);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -559,10 +559,10 @@ impl ReceiverAdjustments {
|
|||
adjust.push(a);
|
||||
}
|
||||
if self.unsize_array {
|
||||
ty = 'x: {
|
||||
ty = 'it: {
|
||||
if let TyKind::Ref(m, l, inner) = ty.kind(Interner) {
|
||||
if let TyKind::Array(inner, _) = inner.kind(Interner) {
|
||||
break 'x TyKind::Ref(
|
||||
break 'it TyKind::Ref(
|
||||
m.clone(),
|
||||
l.clone(),
|
||||
TyKind::Slice(inner.clone()).intern(Interner),
|
||||
|
@ -666,7 +666,7 @@ pub fn is_dyn_method(
|
|||
let self_ty = trait_ref.self_type_parameter(Interner);
|
||||
if let TyKind::Dyn(d) = self_ty.kind(Interner) {
|
||||
let is_my_trait_in_bounds =
|
||||
d.bounds.skip_binders().as_slice(Interner).iter().any(|x| match x.skip_binders() {
|
||||
d.bounds.skip_binders().as_slice(Interner).iter().any(|it| match it.skip_binders() {
|
||||
// rustc doesn't accept `impl Foo<2> for dyn Foo<5>`, so if the trait id is equal, no matter
|
||||
// what the generics are, we are sure that the method is come from the vtable.
|
||||
WhereClause::Implemented(tr) => tr.trait_id == trait_ref.trait_id,
|
||||
|
@ -731,7 +731,7 @@ fn lookup_impl_assoc_item_for_trait_ref(
|
|||
let impls = db.trait_impls_in_deps(env.krate);
|
||||
let self_impls = match self_ty.kind(Interner) {
|
||||
TyKind::Adt(id, _) => {
|
||||
id.0.module(db.upcast()).containing_block().map(|x| db.trait_impls_in_block(x))
|
||||
id.0.module(db.upcast()).containing_block().map(|it| db.trait_impls_in_block(it))
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
@ -895,8 +895,8 @@ pub fn iterate_method_candidates_dyn(
|
|||
// (just as rustc does an autoderef and then autoref again).
|
||||
|
||||
// We have to be careful about the order we're looking at candidates
|
||||
// in here. Consider the case where we're resolving `x.clone()`
|
||||
// where `x: &Vec<_>`. This resolves to the clone method with self
|
||||
// in here. Consider the case where we're resolving `it.clone()`
|
||||
// where `it: &Vec<_>`. This resolves to the clone method with self
|
||||
// type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where
|
||||
// the receiver type exactly matches before cases where we have to
|
||||
// do autoref. But in the autoderef steps, the `&_` self type comes
|
||||
|
@ -1480,8 +1480,8 @@ fn generic_implements_goal(
|
|||
.push(self_ty.value.clone())
|
||||
.fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
|
||||
.build();
|
||||
kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|x| {
|
||||
let vk = match x.data(Interner) {
|
||||
kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|it| {
|
||||
let vk = match it.data(Interner) {
|
||||
chalk_ir::GenericArgData::Ty(_) => {
|
||||
chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ fn all_mir_bodies(
|
|||
let closures = body.closures.clone();
|
||||
Box::new(
|
||||
iter::once(Ok(body))
|
||||
.chain(closures.into_iter().flat_map(|x| for_closure(db, x))),
|
||||
.chain(closures.into_iter().flat_map(|it| for_closure(db, it))),
|
||||
)
|
||||
}
|
||||
Err(e) => Box::new(iter::once(Err(e))),
|
||||
|
@ -62,7 +62,7 @@ fn all_mir_bodies(
|
|||
Ok(body) => {
|
||||
let closures = body.closures.clone();
|
||||
Box::new(
|
||||
iter::once(Ok(body)).chain(closures.into_iter().flat_map(|x| for_closure(db, x))),
|
||||
iter::once(Ok(body)).chain(closures.into_iter().flat_map(|it| for_closure(db, it))),
|
||||
)
|
||||
}
|
||||
Err(e) => Box::new(iter::once(Err(e))),
|
||||
|
@ -171,7 +171,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
|
|||
}
|
||||
TerminatorKind::Call { func, args, .. } => {
|
||||
for_operand(func, terminator.span);
|
||||
args.iter().for_each(|x| for_operand(x, terminator.span));
|
||||
args.iter().for_each(|it| for_operand(it, terminator.span));
|
||||
}
|
||||
TerminatorKind::Assert { cond, .. } => {
|
||||
for_operand(cond, terminator.span);
|
||||
|
@ -245,7 +245,7 @@ fn ever_initialized_map(
|
|||
body: &MirBody,
|
||||
) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> {
|
||||
let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> =
|
||||
body.basic_blocks.iter().map(|x| (x.0, ArenaMap::default())).collect();
|
||||
body.basic_blocks.iter().map(|it| (it.0, ArenaMap::default())).collect();
|
||||
fn dfs(
|
||||
db: &dyn HirDatabase,
|
||||
body: &MirBody,
|
||||
|
@ -314,7 +314,7 @@ fn ever_initialized_map(
|
|||
result[body.start_block].insert(l, true);
|
||||
dfs(db, body, body.start_block, l, &mut result);
|
||||
}
|
||||
for l in body.locals.iter().map(|x| x.0) {
|
||||
for l in body.locals.iter().map(|it| it.0) {
|
||||
if !result[body.start_block].contains_idx(l) {
|
||||
result[body.start_block].insert(l, false);
|
||||
dfs(db, body, body.start_block, l, &mut result);
|
||||
|
@ -328,10 +328,10 @@ fn mutability_of_locals(
|
|||
body: &MirBody,
|
||||
) -> ArenaMap<LocalId, MutabilityReason> {
|
||||
let mut result: ArenaMap<LocalId, MutabilityReason> =
|
||||
body.locals.iter().map(|x| (x.0, MutabilityReason::Not)).collect();
|
||||
body.locals.iter().map(|it| (it.0, MutabilityReason::Not)).collect();
|
||||
let mut push_mut_span = |local, span| match &mut result[local] {
|
||||
MutabilityReason::Mut { spans } => spans.push(span),
|
||||
x @ MutabilityReason::Not => *x = MutabilityReason::Mut { spans: vec![span] },
|
||||
it @ MutabilityReason::Not => *it = MutabilityReason::Mut { spans: vec![span] },
|
||||
};
|
||||
let ever_init_maps = ever_initialized_map(db, body);
|
||||
for (block_id, mut ever_init_map) in ever_init_maps.into_iter() {
|
||||
|
|
|
@ -48,15 +48,15 @@ mod tests;
|
|||
macro_rules! from_bytes {
|
||||
($ty:tt, $value:expr) => {
|
||||
($ty::from_le_bytes(match ($value).try_into() {
|
||||
Ok(x) => x,
|
||||
Ok(it) => it,
|
||||
Err(_) => return Err(MirEvalError::TypeError(stringify!(mismatched size in constructing $ty))),
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! not_supported {
|
||||
($x: expr) => {
|
||||
return Err(MirEvalError::NotSupported(format!($x)))
|
||||
($it: expr) => {
|
||||
return Err(MirEvalError::NotSupported(format!($it)))
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -68,8 +68,8 @@ pub struct VTableMap {
|
|||
|
||||
impl VTableMap {
|
||||
fn id(&mut self, ty: Ty) -> usize {
|
||||
if let Some(x) = self.ty_to_id.get(&ty) {
|
||||
return *x;
|
||||
if let Some(it) = self.ty_to_id.get(&ty) {
|
||||
return *it;
|
||||
}
|
||||
let id = self.id_to_ty.len();
|
||||
self.id_to_ty.push(ty.clone());
|
||||
|
@ -237,17 +237,17 @@ const STACK_OFFSET: usize = 1 << 30;
|
|||
const HEAP_OFFSET: usize = 1 << 29;
|
||||
|
||||
impl Address {
|
||||
fn from_bytes(x: &[u8]) -> Result<Self> {
|
||||
Ok(Address::from_usize(from_bytes!(usize, x)))
|
||||
fn from_bytes(it: &[u8]) -> Result<Self> {
|
||||
Ok(Address::from_usize(from_bytes!(usize, it)))
|
||||
}
|
||||
|
||||
fn from_usize(x: usize) -> Self {
|
||||
if x > STACK_OFFSET {
|
||||
Stack(x - STACK_OFFSET)
|
||||
} else if x > HEAP_OFFSET {
|
||||
Heap(x - HEAP_OFFSET)
|
||||
fn from_usize(it: usize) -> Self {
|
||||
if it > STACK_OFFSET {
|
||||
Stack(it - STACK_OFFSET)
|
||||
} else if it > HEAP_OFFSET {
|
||||
Heap(it - HEAP_OFFSET)
|
||||
} else {
|
||||
Invalid(x)
|
||||
Invalid(it)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -257,23 +257,23 @@ impl Address {
|
|||
|
||||
fn to_usize(&self) -> usize {
|
||||
let as_num = match self {
|
||||
Stack(x) => *x + STACK_OFFSET,
|
||||
Heap(x) => *x + HEAP_OFFSET,
|
||||
Invalid(x) => *x,
|
||||
Stack(it) => *it + STACK_OFFSET,
|
||||
Heap(it) => *it + HEAP_OFFSET,
|
||||
Invalid(it) => *it,
|
||||
};
|
||||
as_num
|
||||
}
|
||||
|
||||
fn map(&self, f: impl FnOnce(usize) -> usize) -> Address {
|
||||
match self {
|
||||
Stack(x) => Stack(f(*x)),
|
||||
Heap(x) => Heap(f(*x)),
|
||||
Invalid(x) => Invalid(f(*x)),
|
||||
Stack(it) => Stack(f(*it)),
|
||||
Heap(it) => Heap(f(*it)),
|
||||
Invalid(it) => Invalid(f(*it)),
|
||||
}
|
||||
}
|
||||
|
||||
fn offset(&self, offset: usize) -> Address {
|
||||
self.map(|x| x + offset)
|
||||
self.map(|it| it + offset)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -329,11 +329,11 @@ impl MirEvalError {
|
|||
let source_map = db.body_with_source_map(*def).1;
|
||||
let span: InFile<SyntaxNodePtr> = match span {
|
||||
MirSpan::ExprId(e) => match source_map.expr_syntax(*e) {
|
||||
Ok(s) => s.map(|x| x.into()),
|
||||
Ok(s) => s.map(|it| it.into()),
|
||||
Err(_) => continue,
|
||||
},
|
||||
MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
|
||||
Ok(s) => s.map(|x| match x {
|
||||
Ok(s) => s.map(|it| match it {
|
||||
Either::Left(e) => e.into(),
|
||||
Either::Right(e) => e.into(),
|
||||
}),
|
||||
|
@ -445,10 +445,10 @@ struct DropFlags {
|
|||
|
||||
impl DropFlags {
|
||||
fn add_place(&mut self, p: Place) {
|
||||
if p.iterate_over_parents().any(|x| self.need_drop.contains(&x)) {
|
||||
if p.iterate_over_parents().any(|it| self.need_drop.contains(&it)) {
|
||||
return;
|
||||
}
|
||||
self.need_drop.retain(|x| !p.is_parent(x));
|
||||
self.need_drop.retain(|it| !p.is_parent(it));
|
||||
self.need_drop.insert(p);
|
||||
}
|
||||
|
||||
|
@ -477,7 +477,7 @@ pub fn interpret_mir(
|
|||
) -> (Result<Const>, String, String) {
|
||||
let ty = body.locals[return_slot()].ty.clone();
|
||||
let mut evaluator = Evaluator::new(db, body, assert_placeholder_ty_is_unused);
|
||||
let x: Result<Const> = (|| {
|
||||
let it: Result<Const> = (|| {
|
||||
if evaluator.ptr_size() != std::mem::size_of::<usize>() {
|
||||
not_supported!("targets with different pointer size from host");
|
||||
}
|
||||
|
@ -491,7 +491,7 @@ pub fn interpret_mir(
|
|||
return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty));
|
||||
})();
|
||||
(
|
||||
x,
|
||||
it,
|
||||
String::from_utf8_lossy(&evaluator.stdout).into_owned(),
|
||||
String::from_utf8_lossy(&evaluator.stderr).into_owned(),
|
||||
)
|
||||
|
@ -540,7 +540,7 @@ impl Evaluator<'_> {
|
|||
|
||||
fn ptr_size(&self) -> usize {
|
||||
match self.db.target_data_layout(self.crate_id) {
|
||||
Some(x) => x.pointer_size.bytes_usize(),
|
||||
Some(it) => it.pointer_size.bytes_usize(),
|
||||
None => 8,
|
||||
}
|
||||
}
|
||||
|
@ -582,8 +582,8 @@ impl Evaluator<'_> {
|
|||
} else {
|
||||
None
|
||||
};
|
||||
let x = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?);
|
||||
addr = Address::from_usize(x);
|
||||
let it = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?);
|
||||
addr = Address::from_usize(it);
|
||||
}
|
||||
ProjectionElem::Index(op) => {
|
||||
let offset = from_bytes!(
|
||||
|
@ -599,13 +599,13 @@ impl Evaluator<'_> {
|
|||
let offset = if from_end {
|
||||
let len = match prev_ty.kind(Interner) {
|
||||
TyKind::Array(_, c) => match try_const_usize(self.db, c) {
|
||||
Some(x) => x as u64,
|
||||
Some(it) => it as u64,
|
||||
None => {
|
||||
not_supported!("indexing array with unknown const from end")
|
||||
}
|
||||
},
|
||||
TyKind::Slice(_) => match metadata {
|
||||
Some(x) => from_bytes!(u64, x.get(self)?),
|
||||
Some(it) => from_bytes!(u64, it.get(self)?),
|
||||
None => not_supported!("slice place without metadata"),
|
||||
},
|
||||
_ => not_supported!("bad type for const index"),
|
||||
|
@ -625,8 +625,8 @@ impl Evaluator<'_> {
|
|||
_ => TyKind::Error.intern(Interner),
|
||||
};
|
||||
metadata = match metadata {
|
||||
Some(x) => {
|
||||
let prev_len = from_bytes!(u64, x.get(self)?);
|
||||
Some(it) => {
|
||||
let prev_len = from_bytes!(u64, it.get(self)?);
|
||||
Some(IntervalOrOwned::Owned(
|
||||
(prev_len - from - to).to_le_bytes().to_vec(),
|
||||
))
|
||||
|
@ -649,8 +649,8 @@ impl Evaluator<'_> {
|
|||
Variants::Single { .. } => &layout,
|
||||
Variants::Multiple { variants, .. } => {
|
||||
&variants[match f.parent {
|
||||
hir_def::VariantId::EnumVariantId(x) => {
|
||||
RustcEnumVariantIdx(x.local_id)
|
||||
hir_def::VariantId::EnumVariantId(it) => {
|
||||
RustcEnumVariantIdx(it.local_id)
|
||||
}
|
||||
_ => {
|
||||
return Err(MirEvalError::TypeError(
|
||||
|
@ -717,8 +717,8 @@ impl Evaluator<'_> {
|
|||
body: &MirBody,
|
||||
args: impl Iterator<Item = Vec<u8>>,
|
||||
) -> Result<Vec<u8>> {
|
||||
if let Some(x) = self.stack_depth_limit.checked_sub(1) {
|
||||
self.stack_depth_limit = x;
|
||||
if let Some(it) = self.stack_depth_limit.checked_sub(1) {
|
||||
self.stack_depth_limit = it;
|
||||
} else {
|
||||
return Err(MirEvalError::StackOverflow);
|
||||
}
|
||||
|
@ -730,9 +730,9 @@ impl Evaluator<'_> {
|
|||
let addr = body
|
||||
.locals
|
||||
.iter()
|
||||
.map(|(id, x)| {
|
||||
.map(|(id, it)| {
|
||||
let (size, align) = self.size_align_of_sized(
|
||||
&x.ty,
|
||||
&it.ty,
|
||||
&locals,
|
||||
"no unsized local in extending stack",
|
||||
)?;
|
||||
|
@ -763,8 +763,8 @@ impl Evaluator<'_> {
|
|||
}
|
||||
loop {
|
||||
let current_block = &body.basic_blocks[current_block_idx];
|
||||
if let Some(x) = self.execution_limit.checked_sub(1) {
|
||||
self.execution_limit = x;
|
||||
if let Some(it) = self.execution_limit.checked_sub(1) {
|
||||
self.execution_limit = it;
|
||||
} else {
|
||||
return Err(MirEvalError::ExecutionLimitExceeded);
|
||||
}
|
||||
|
@ -801,7 +801,7 @@ impl Evaluator<'_> {
|
|||
let fn_ty = self.operand_ty(func, &locals)?;
|
||||
let args = args
|
||||
.iter()
|
||||
.map(|x| self.operand_ty_and_eval(x, &mut locals))
|
||||
.map(|it| self.operand_ty_and_eval(it, &mut locals))
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
match &fn_ty.data(Interner).kind {
|
||||
TyKind::Function(_) => {
|
||||
|
@ -824,7 +824,7 @@ impl Evaluator<'_> {
|
|||
terminator.span,
|
||||
)?;
|
||||
}
|
||||
x => not_supported!("unknown function type {x:?}"),
|
||||
it => not_supported!("unknown function type {it:?}"),
|
||||
}
|
||||
locals.drop_flags.add_place(destination.clone());
|
||||
current_block_idx = target.expect("broken mir, function without target");
|
||||
|
@ -855,7 +855,7 @@ impl Evaluator<'_> {
|
|||
fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals<'_>) -> Result<IntervalOrOwned> {
|
||||
use IntervalOrOwned::*;
|
||||
Ok(match r {
|
||||
Rvalue::Use(x) => Borrowed(self.eval_operand(x, locals)?),
|
||||
Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?),
|
||||
Rvalue::Ref(_, p) => {
|
||||
let (addr, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?;
|
||||
let mut r = addr.to_bytes();
|
||||
|
@ -900,9 +900,9 @@ impl Evaluator<'_> {
|
|||
c[0] = 1 - c[0];
|
||||
} else {
|
||||
match op {
|
||||
UnOp::Not => c.iter_mut().for_each(|x| *x = !*x),
|
||||
UnOp::Not => c.iter_mut().for_each(|it| *it = !*it),
|
||||
UnOp::Neg => {
|
||||
c.iter_mut().for_each(|x| *x = !*x);
|
||||
c.iter_mut().for_each(|it| *it = !*it);
|
||||
for k in c.iter_mut() {
|
||||
let o;
|
||||
(*k, o) = k.overflowing_add(1);
|
||||
|
@ -967,8 +967,8 @@ impl Evaluator<'_> {
|
|||
};
|
||||
Owned(r.to_le_bytes().into())
|
||||
}
|
||||
x => not_supported!(
|
||||
"invalid binop {x:?} on floating point operators"
|
||||
it => not_supported!(
|
||||
"invalid binop {it:?} on floating point operators"
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -995,8 +995,8 @@ impl Evaluator<'_> {
|
|||
};
|
||||
Owned(r.to_le_bytes().into())
|
||||
}
|
||||
x => not_supported!(
|
||||
"invalid binop {x:?} on floating point operators"
|
||||
it => not_supported!(
|
||||
"invalid binop {it:?} on floating point operators"
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -1076,12 +1076,12 @@ impl Evaluator<'_> {
|
|||
let result = self.compute_discriminant(ty, bytes)?;
|
||||
Owned(result.to_le_bytes().to_vec())
|
||||
}
|
||||
Rvalue::Repeat(x, len) => {
|
||||
Rvalue::Repeat(it, len) => {
|
||||
let len = match try_const_usize(self.db, &len) {
|
||||
Some(x) => x as usize,
|
||||
Some(it) => it as usize,
|
||||
None => not_supported!("non evaluatable array len in repeat Rvalue"),
|
||||
};
|
||||
let val = self.eval_operand(x, locals)?.get(self)?;
|
||||
let val = self.eval_operand(it, locals)?.get(self)?;
|
||||
let size = len * val.len();
|
||||
Owned(val.iter().copied().cycle().take(size).collect())
|
||||
}
|
||||
|
@ -1097,13 +1097,13 @@ impl Evaluator<'_> {
|
|||
Rvalue::Aggregate(kind, values) => {
|
||||
let values = values
|
||||
.iter()
|
||||
.map(|x| self.eval_operand(x, locals))
|
||||
.map(|it| self.eval_operand(it, locals))
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
match kind {
|
||||
AggregateKind::Array(_) => {
|
||||
let mut r = vec![];
|
||||
for x in values {
|
||||
let value = x.get(&self)?;
|
||||
for it in values {
|
||||
let value = it.get(&self)?;
|
||||
r.extend(value);
|
||||
}
|
||||
Owned(r)
|
||||
|
@ -1114,11 +1114,12 @@ impl Evaluator<'_> {
|
|||
layout.size.bytes_usize(),
|
||||
&layout,
|
||||
None,
|
||||
values.iter().map(|&x| x.into()),
|
||||
values.iter().map(|&it| it.into()),
|
||||
)?)
|
||||
}
|
||||
AggregateKind::Union(x, f) => {
|
||||
let layout = self.layout_adt((*x).into(), Substitution::empty(Interner))?;
|
||||
AggregateKind::Union(it, f) => {
|
||||
let layout =
|
||||
self.layout_adt((*it).into(), Substitution::empty(Interner))?;
|
||||
let offset = layout
|
||||
.fields
|
||||
.offset(u32::from(f.local_id.into_raw()) as usize)
|
||||
|
@ -1128,14 +1129,14 @@ impl Evaluator<'_> {
|
|||
result[offset..offset + op.len()].copy_from_slice(op);
|
||||
Owned(result)
|
||||
}
|
||||
AggregateKind::Adt(x, subst) => {
|
||||
AggregateKind::Adt(it, subst) => {
|
||||
let (size, variant_layout, tag) =
|
||||
self.layout_of_variant(*x, subst.clone(), locals)?;
|
||||
self.layout_of_variant(*it, subst.clone(), locals)?;
|
||||
Owned(self.make_by_layout(
|
||||
size,
|
||||
&variant_layout,
|
||||
tag,
|
||||
values.iter().map(|&x| x.into()),
|
||||
values.iter().map(|&it| it.into()),
|
||||
)?)
|
||||
}
|
||||
AggregateKind::Closure(ty) => {
|
||||
|
@ -1144,7 +1145,7 @@ impl Evaluator<'_> {
|
|||
layout.size.bytes_usize(),
|
||||
&layout,
|
||||
None,
|
||||
values.iter().map(|&x| x.into()),
|
||||
values.iter().map(|&it| it.into()),
|
||||
)?)
|
||||
}
|
||||
}
|
||||
|
@ -1242,8 +1243,8 @@ impl Evaluator<'_> {
|
|||
as usize;
|
||||
let variant = variants
|
||||
.iter_enumerated()
|
||||
.map(|(x, _)| x)
|
||||
.filter(|x| x != untagged_variant)
|
||||
.map(|(it, _)| it)
|
||||
.filter(|it| it != untagged_variant)
|
||||
.nth(candidate_tag)
|
||||
.unwrap_or(*untagged_variant)
|
||||
.0;
|
||||
|
@ -1264,15 +1265,15 @@ impl Evaluator<'_> {
|
|||
goal: impl Fn(&TyKind) -> Option<T>,
|
||||
) -> Result<T> {
|
||||
let kind = ty.kind(Interner);
|
||||
if let Some(x) = goal(kind) {
|
||||
return Ok(x);
|
||||
if let Some(it) = goal(kind) {
|
||||
return Ok(it);
|
||||
}
|
||||
if let TyKind::Adt(id, subst) = kind {
|
||||
if let AdtId::StructId(struct_id) = id.0 {
|
||||
let field_types = self.db.field_types(struct_id.into());
|
||||
let mut field_types = field_types.iter();
|
||||
if let Some(ty) =
|
||||
field_types.next().map(|x| x.1.clone().substitute(Interner, subst))
|
||||
field_types.next().map(|it| it.1.clone().substitute(Interner, subst))
|
||||
{
|
||||
return self.coerce_unsized_look_through_fields(&ty, goal);
|
||||
}
|
||||
|
@ -1287,8 +1288,8 @@ impl Evaluator<'_> {
|
|||
current_ty: &Ty,
|
||||
target_ty: &Ty,
|
||||
) -> Result<IntervalOrOwned> {
|
||||
fn for_ptr(x: &TyKind) -> Option<Ty> {
|
||||
match x {
|
||||
fn for_ptr(it: &TyKind) -> Option<Ty> {
|
||||
match it {
|
||||
TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => Some(ty.clone()),
|
||||
_ => None,
|
||||
}
|
||||
|
@ -1314,7 +1315,7 @@ impl Evaluator<'_> {
|
|||
None => {
|
||||
not_supported!("unevaluatble len of array in coerce unsized")
|
||||
}
|
||||
Some(x) => x as usize,
|
||||
Some(it) => it as usize,
|
||||
};
|
||||
let mut r = Vec::with_capacity(16);
|
||||
let addr = addr.get(self)?;
|
||||
|
@ -1369,17 +1370,17 @@ impl Evaluator<'_> {
|
|||
|
||||
fn layout_of_variant(
|
||||
&mut self,
|
||||
x: VariantId,
|
||||
it: VariantId,
|
||||
subst: Substitution,
|
||||
locals: &Locals<'_>,
|
||||
) -> Result<(usize, Arc<Layout>, Option<(usize, usize, i128)>)> {
|
||||
let adt = x.adt_id();
|
||||
let adt = it.adt_id();
|
||||
if let DefWithBodyId::VariantId(f) = locals.body.owner {
|
||||
if let VariantId::EnumVariantId(x) = x {
|
||||
if let VariantId::EnumVariantId(it) = it {
|
||||
if AdtId::from(f.parent) == adt {
|
||||
// Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
|
||||
// infinite sized type errors) we use a dummy layout
|
||||
let i = self.const_eval_discriminant(x)?;
|
||||
let i = self.const_eval_discriminant(it)?;
|
||||
return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i))));
|
||||
}
|
||||
}
|
||||
|
@ -1392,8 +1393,8 @@ impl Evaluator<'_> {
|
|||
.db
|
||||
.target_data_layout(self.crate_id)
|
||||
.ok_or(MirEvalError::TargetDataLayoutNotAvailable)?;
|
||||
let enum_variant_id = match x {
|
||||
VariantId::EnumVariantId(x) => x,
|
||||
let enum_variant_id = match it {
|
||||
VariantId::EnumVariantId(it) => it,
|
||||
_ => not_supported!("multi variant layout for non-enums"),
|
||||
};
|
||||
let rustc_enum_variant_idx = RustcEnumVariantIdx(enum_variant_id.local_id);
|
||||
|
@ -1407,8 +1408,8 @@ impl Evaluator<'_> {
|
|||
} else {
|
||||
discriminant = (variants
|
||||
.iter_enumerated()
|
||||
.filter(|(x, _)| x != untagged_variant)
|
||||
.position(|(x, _)| x == rustc_enum_variant_idx)
|
||||
.filter(|(it, _)| it != untagged_variant)
|
||||
.position(|(it, _)| it == rustc_enum_variant_idx)
|
||||
.unwrap() as i128)
|
||||
.wrapping_add(*niche_start as i128);
|
||||
true
|
||||
|
@ -1451,8 +1452,8 @@ impl Evaluator<'_> {
|
|||
Ok(result)
|
||||
}
|
||||
|
||||
fn eval_operand(&mut self, x: &Operand, locals: &mut Locals<'_>) -> Result<Interval> {
|
||||
Ok(match x {
|
||||
fn eval_operand(&mut self, it: &Operand, locals: &mut Locals<'_>) -> Result<Interval> {
|
||||
Ok(match it {
|
||||
Operand::Copy(p) | Operand::Move(p) => {
|
||||
locals.drop_flags.remove_place(p);
|
||||
self.eval_place(p, locals)?
|
||||
|
@ -1528,11 +1529,11 @@ impl Evaluator<'_> {
|
|||
return Ok(&[]);
|
||||
}
|
||||
let (mem, pos) = match addr {
|
||||
Stack(x) => (&self.stack, x),
|
||||
Heap(x) => (&self.heap, x),
|
||||
Invalid(x) => {
|
||||
Stack(it) => (&self.stack, it),
|
||||
Heap(it) => (&self.heap, it),
|
||||
Invalid(it) => {
|
||||
return Err(MirEvalError::UndefinedBehavior(format!(
|
||||
"read invalid memory address {x} with size {size}"
|
||||
"read invalid memory address {it} with size {size}"
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
@ -1545,11 +1546,11 @@ impl Evaluator<'_> {
|
|||
return Ok(());
|
||||
}
|
||||
let (mem, pos) = match addr {
|
||||
Stack(x) => (&mut self.stack, x),
|
||||
Heap(x) => (&mut self.heap, x),
|
||||
Invalid(x) => {
|
||||
Stack(it) => (&mut self.stack, it),
|
||||
Heap(it) => (&mut self.heap, it),
|
||||
Invalid(it) => {
|
||||
return Err(MirEvalError::UndefinedBehavior(format!(
|
||||
"write invalid memory address {x} with content {r:?}"
|
||||
"write invalid memory address {it} with content {r:?}"
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
@ -1587,7 +1588,7 @@ impl Evaluator<'_> {
|
|||
/// be something that complete this: `error: type {ty} was unsized. {what} should be sized`
|
||||
fn size_of_sized(&self, ty: &Ty, locals: &Locals<'_>, what: &'static str) -> Result<usize> {
|
||||
match self.size_align_of(ty, locals)? {
|
||||
Some(x) => Ok(x.0),
|
||||
Some(it) => Ok(it.0),
|
||||
None => Err(MirEvalError::TypeIsUnsized(ty.clone(), what)),
|
||||
}
|
||||
}
|
||||
|
@ -1601,7 +1602,7 @@ impl Evaluator<'_> {
|
|||
what: &'static str,
|
||||
) -> Result<(usize, usize)> {
|
||||
match self.size_align_of(ty, locals)? {
|
||||
Some(x) => Ok(x),
|
||||
Some(it) => Ok(it),
|
||||
None => Err(MirEvalError::TypeIsUnsized(ty.clone(), what)),
|
||||
}
|
||||
}
|
||||
|
@ -1751,14 +1752,14 @@ impl Evaluator<'_> {
|
|||
match size {
|
||||
Some(_) => {
|
||||
let current = from_bytes!(usize, self.read_memory(addr, my_size)?);
|
||||
if let Some(x) = patch_map.get(¤t) {
|
||||
self.write_memory(addr, &x.to_le_bytes())?;
|
||||
if let Some(it) = patch_map.get(¤t) {
|
||||
self.write_memory(addr, &it.to_le_bytes())?;
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let current = from_bytes!(usize, self.read_memory(addr, my_size / 2)?);
|
||||
if let Some(x) = patch_map.get(¤t) {
|
||||
self.write_memory(addr, &x.to_le_bytes())?;
|
||||
if let Some(it) = patch_map.get(¤t) {
|
||||
self.write_memory(addr, &it.to_le_bytes())?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1848,7 +1849,7 @@ impl Evaluator<'_> {
|
|||
generic_args.clone(),
|
||||
self.trait_env.clone(),
|
||||
)
|
||||
.map_err(|x| MirEvalError::MirLowerErrorForClosure(closure, x))?;
|
||||
.map_err(|it| MirEvalError::MirLowerErrorForClosure(closure, it))?;
|
||||
let closure_data = if mir_body.locals[mir_body.param_locals[0]].ty.as_reference().is_some()
|
||||
{
|
||||
closure_data.addr.to_bytes()
|
||||
|
@ -1856,7 +1857,7 @@ impl Evaluator<'_> {
|
|||
closure_data.get(self)?.to_owned()
|
||||
};
|
||||
let arg_bytes = iter::once(Ok(closure_data))
|
||||
.chain(args.iter().map(|x| Ok(x.get(&self)?.to_owned())))
|
||||
.chain(args.iter().map(|it| Ok(it.get(&self)?.to_owned())))
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
let bytes = self.interpret_mir(&mir_body, arg_bytes.into_iter()).map_err(|e| {
|
||||
MirEvalError::InFunction(Either::Right(closure), Box::new(e), span, locals.body.owner)
|
||||
|
@ -1890,7 +1891,7 @@ impl Evaluator<'_> {
|
|||
size,
|
||||
&variant_layout,
|
||||
tag,
|
||||
args.iter().map(|x| x.interval.into()),
|
||||
args.iter().map(|it| it.interval.into()),
|
||||
)?;
|
||||
destination.write_from_bytes(self, &result)?;
|
||||
}
|
||||
|
@ -1901,7 +1902,7 @@ impl Evaluator<'_> {
|
|||
size,
|
||||
&variant_layout,
|
||||
tag,
|
||||
args.iter().map(|x| x.interval.into()),
|
||||
args.iter().map(|it| it.interval.into()),
|
||||
)?;
|
||||
destination.write_from_bytes(self, &result)?;
|
||||
}
|
||||
|
@ -1929,7 +1930,7 @@ impl Evaluator<'_> {
|
|||
return Ok(());
|
||||
}
|
||||
let arg_bytes =
|
||||
args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
|
||||
args.iter().map(|it| Ok(it.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
|
||||
if let Some(self_ty_idx) =
|
||||
is_dyn_method(self.db, self.trait_env.clone(), def, generic_args.clone())
|
||||
{
|
||||
|
@ -1945,17 +1946,16 @@ impl Evaluator<'_> {
|
|||
ty: ty.clone(),
|
||||
};
|
||||
let ty = GenericArgData::Ty(ty.clone()).intern(Interner);
|
||||
let generics_for_target =
|
||||
Substitution::from_iter(
|
||||
Interner,
|
||||
generic_args.iter(Interner).enumerate().map(|(i, x)| {
|
||||
if i == self_ty_idx {
|
||||
&ty
|
||||
} else {
|
||||
x
|
||||
}
|
||||
}),
|
||||
);
|
||||
let generics_for_target = Substitution::from_iter(
|
||||
Interner,
|
||||
generic_args.iter(Interner).enumerate().map(|(i, it)| {
|
||||
if i == self_ty_idx {
|
||||
&ty
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}),
|
||||
);
|
||||
return self.exec_fn_with_args(
|
||||
def,
|
||||
&args_for_target,
|
||||
|
@ -2046,7 +2046,7 @@ impl Evaluator<'_> {
|
|||
let arg1 = {
|
||||
let ty = TyKind::Tuple(
|
||||
args.len(),
|
||||
Substitution::from_iter(Interner, args.iter().map(|x| x.ty.clone())),
|
||||
Substitution::from_iter(Interner, args.iter().map(|it| it.ty.clone())),
|
||||
)
|
||||
.intern(Interner);
|
||||
let layout = self.layout(&ty)?;
|
||||
|
@ -2054,7 +2054,7 @@ impl Evaluator<'_> {
|
|||
layout.size.bytes_usize(),
|
||||
&layout,
|
||||
None,
|
||||
args.iter().map(|x| IntervalOrOwned::Borrowed(x.interval)),
|
||||
args.iter().map(|it| IntervalOrOwned::Borrowed(it.interval)),
|
||||
)?;
|
||||
// FIXME: there is some leak here
|
||||
let size = layout.size.bytes_usize();
|
||||
|
@ -2129,7 +2129,7 @@ impl Evaluator<'_> {
|
|||
return Ok(());
|
||||
}
|
||||
let metadata = match metadata {
|
||||
Some(x) => x.get(self)?.to_vec(),
|
||||
Some(it) => it.get(self)?.to_vec(),
|
||||
None => vec![],
|
||||
};
|
||||
self.run_drop_glue_deep(ty, locals, addr, &metadata, span)
|
||||
|
@ -2233,10 +2233,10 @@ impl Evaluator<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn pad16(x: &[u8], is_signed: bool) -> [u8; 16] {
|
||||
let is_negative = is_signed && x.last().unwrap_or(&0) > &127;
|
||||
pub fn pad16(it: &[u8], is_signed: bool) -> [u8; 16] {
|
||||
let is_negative = is_signed && it.last().unwrap_or(&0) > &127;
|
||||
let fill_with = if is_negative { 255 } else { 0 };
|
||||
x.iter()
|
||||
it.iter()
|
||||
.copied()
|
||||
.chain(iter::repeat(fill_with))
|
||||
.take(16)
|
||||
|
|
|
@ -14,15 +14,15 @@ mod simd;
|
|||
macro_rules! from_bytes {
|
||||
($ty:tt, $value:expr) => {
|
||||
($ty::from_le_bytes(match ($value).try_into() {
|
||||
Ok(x) => x,
|
||||
Ok(it) => it,
|
||||
Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! not_supported {
|
||||
($x: expr) => {
|
||||
return Err(MirEvalError::NotSupported(format!($x)))
|
||||
($it: expr) => {
|
||||
return Err(MirEvalError::NotSupported(format!($it)))
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -102,25 +102,25 @@ impl Evaluator<'_> {
|
|||
let alloc_fn = function_data
|
||||
.attrs
|
||||
.iter()
|
||||
.filter_map(|x| x.path().as_ident())
|
||||
.filter_map(|x| x.as_str())
|
||||
.find(|x| {
|
||||
.filter_map(|it| it.path().as_ident())
|
||||
.filter_map(|it| it.as_str())
|
||||
.find(|it| {
|
||||
[
|
||||
"rustc_allocator",
|
||||
"rustc_deallocator",
|
||||
"rustc_reallocator",
|
||||
"rustc_allocator_zeroed",
|
||||
]
|
||||
.contains(x)
|
||||
.contains(it)
|
||||
});
|
||||
if let Some(alloc_fn) = alloc_fn {
|
||||
self.exec_alloc_fn(alloc_fn, args, destination)?;
|
||||
return Ok(true);
|
||||
}
|
||||
if let Some(x) = self.detect_lang_function(def) {
|
||||
if let Some(it) = self.detect_lang_function(def) {
|
||||
let arg_bytes =
|
||||
args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
|
||||
let result = self.exec_lang_item(x, generic_args, &arg_bytes, locals, span)?;
|
||||
args.iter().map(|it| Ok(it.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
|
||||
let result = self.exec_lang_item(it, generic_args, &arg_bytes, locals, span)?;
|
||||
destination.write_from_bytes(self, &result)?;
|
||||
return Ok(true);
|
||||
}
|
||||
|
@ -178,7 +178,7 @@ impl Evaluator<'_> {
|
|||
|
||||
fn exec_lang_item(
|
||||
&mut self,
|
||||
x: LangItem,
|
||||
it: LangItem,
|
||||
generic_args: &Substitution,
|
||||
args: &[Vec<u8>],
|
||||
locals: &Locals<'_>,
|
||||
|
@ -186,12 +186,11 @@ impl Evaluator<'_> {
|
|||
) -> Result<Vec<u8>> {
|
||||
use LangItem::*;
|
||||
let mut args = args.iter();
|
||||
match x {
|
||||
match it {
|
||||
BeginPanic => Err(MirEvalError::Panic("<unknown-panic-payload>".to_string())),
|
||||
PanicFmt => {
|
||||
let message = (|| {
|
||||
let x = self.db.crate_def_map(self.crate_id).crate_root();
|
||||
let resolver = x.resolver(self.db.upcast());
|
||||
let resolver = self.db.crate_def_map(self.crate_id).crate_root().resolver(self.db.upcast());
|
||||
let Some(format_fn) = resolver.resolve_path_in_value_ns_fully(
|
||||
self.db.upcast(),
|
||||
&hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments(
|
||||
|
@ -219,7 +218,7 @@ impl Evaluator<'_> {
|
|||
}
|
||||
DropInPlace => {
|
||||
let ty =
|
||||
generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)).ok_or(
|
||||
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)).ok_or(
|
||||
MirEvalError::TypeError(
|
||||
"generic argument of drop_in_place is not provided",
|
||||
),
|
||||
|
@ -236,7 +235,7 @@ impl Evaluator<'_> {
|
|||
)?;
|
||||
Ok(vec![])
|
||||
}
|
||||
x => not_supported!("Executing lang item {x:?}"),
|
||||
it => not_supported!("Executing lang item {it:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -521,7 +520,7 @@ impl Evaluator<'_> {
|
|||
}
|
||||
match name {
|
||||
"size_of" => {
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner))
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
|
||||
else {
|
||||
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
|
||||
};
|
||||
|
@ -529,14 +528,14 @@ impl Evaluator<'_> {
|
|||
destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size])
|
||||
}
|
||||
"min_align_of" | "pref_align_of" => {
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
|
||||
return Err(MirEvalError::TypeError("align_of generic arg is not provided"));
|
||||
};
|
||||
let align = self.layout(ty)?.align.abi.bytes();
|
||||
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
|
||||
}
|
||||
"size_of_val" => {
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner))
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
|
||||
else {
|
||||
return Err(MirEvalError::TypeError("size_of_val generic arg is not provided"));
|
||||
};
|
||||
|
@ -552,7 +551,7 @@ impl Evaluator<'_> {
|
|||
}
|
||||
}
|
||||
"min_align_of_val" => {
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
|
||||
return Err(MirEvalError::TypeError("min_align_of_val generic arg is not provided"));
|
||||
};
|
||||
let [arg] = args else {
|
||||
|
@ -567,7 +566,7 @@ impl Evaluator<'_> {
|
|||
}
|
||||
}
|
||||
"needs_drop" => {
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner))
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
|
||||
else {
|
||||
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
|
||||
};
|
||||
|
@ -694,7 +693,7 @@ impl Evaluator<'_> {
|
|||
_ => unreachable!(),
|
||||
};
|
||||
let is_overflow = u128overflow
|
||||
|| ans.to_le_bytes()[op_size..].iter().any(|&x| x != 0 && x != 255);
|
||||
|| ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255);
|
||||
let is_overflow = vec![u8::from(is_overflow)];
|
||||
let layout = self.layout(&result_ty)?;
|
||||
let result = self.make_by_layout(
|
||||
|
@ -713,7 +712,7 @@ impl Evaluator<'_> {
|
|||
"copy_nonoverlapping args are not provided",
|
||||
));
|
||||
};
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner))
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
|
||||
else {
|
||||
return Err(MirEvalError::TypeError(
|
||||
"copy_nonoverlapping generic arg is not provided",
|
||||
|
@ -732,7 +731,7 @@ impl Evaluator<'_> {
|
|||
let [ptr, offset] = args else {
|
||||
return Err(MirEvalError::TypeError("offset args are not provided"));
|
||||
};
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner))
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
|
||||
else {
|
||||
return Err(MirEvalError::TypeError("offset generic arg is not provided"));
|
||||
};
|
||||
|
@ -842,7 +841,7 @@ impl Evaluator<'_> {
|
|||
let [arg] = args else {
|
||||
return Err(MirEvalError::TypeError("discriminant_value arg is not provided"));
|
||||
};
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner))
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
|
||||
else {
|
||||
return Err(MirEvalError::TypeError(
|
||||
"discriminant_value generic arg is not provided",
|
||||
|
@ -872,7 +871,7 @@ impl Evaluator<'_> {
|
|||
if let Some(target) = self.db.lang_item(self.crate_id, LangItem::FnOnce) {
|
||||
if let Some(def) = target
|
||||
.as_trait()
|
||||
.and_then(|x| self.db.trait_data(x).method_by_name(&name![call_once]))
|
||||
.and_then(|it| self.db.trait_data(it).method_by_name(&name![call_once]))
|
||||
{
|
||||
return self.exec_fn_trait(
|
||||
def,
|
||||
|
@ -954,7 +953,7 @@ impl Evaluator<'_> {
|
|||
) -> Result<()> {
|
||||
// We are a single threaded runtime with no UB checking and no optimization, so
|
||||
// we can implement these as normal functions.
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
|
||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
|
||||
return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided"));
|
||||
};
|
||||
let Some(arg0) = args.get(0) else {
|
||||
|
|
|
@ -9,15 +9,15 @@ use super::*;
|
|||
macro_rules! from_bytes {
|
||||
($ty:tt, $value:expr) => {
|
||||
($ty::from_le_bytes(match ($value).try_into() {
|
||||
Ok(x) => x,
|
||||
Ok(it) => it,
|
||||
Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! not_supported {
|
||||
($x: expr) => {
|
||||
return Err(MirEvalError::NotSupported(format!($x)))
|
||||
($it: expr) => {
|
||||
return Err(MirEvalError::NotSupported(format!($it)))
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -25,7 +25,8 @@ impl Evaluator<'_> {
|
|||
fn detect_simd_ty(&self, ty: &Ty) -> Result<usize> {
|
||||
match ty.kind(Interner) {
|
||||
TyKind::Adt(id, subst) => {
|
||||
let len = match subst.as_slice(Interner).get(1).and_then(|x| x.constant(Interner)) {
|
||||
let len = match subst.as_slice(Interner).get(1).and_then(|it| it.constant(Interner))
|
||||
{
|
||||
Some(len) => len,
|
||||
_ => {
|
||||
if let AdtId::StructId(id) = id.0 {
|
||||
|
@ -35,7 +36,7 @@ impl Evaluator<'_> {
|
|||
}
|
||||
};
|
||||
match try_const_usize(self.db, len) {
|
||||
Some(x) => Ok(x as usize),
|
||||
Some(it) => Ok(it as usize),
|
||||
None => Err(MirEvalError::TypeError("simd type with unevaluatable len param")),
|
||||
}
|
||||
}
|
||||
|
@ -61,10 +62,10 @@ impl Evaluator<'_> {
|
|||
.get(self)?
|
||||
.iter()
|
||||
.zip(right.get(self)?)
|
||||
.map(|(&x, &y)| match name {
|
||||
"and" => x & y,
|
||||
"or" => x | y,
|
||||
"xor" => x ^ y,
|
||||
.map(|(&it, &y)| match name {
|
||||
"and" => it & y,
|
||||
"or" => it | y,
|
||||
"xor" => it ^ y,
|
||||
_ => unreachable!(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
@ -82,9 +83,9 @@ impl Evaluator<'_> {
|
|||
for (l, r) in vector {
|
||||
let mut result = Ordering::Equal;
|
||||
for (l, r) in l.iter().zip(r).rev() {
|
||||
let x = l.cmp(r);
|
||||
if x != Ordering::Equal {
|
||||
result = x;
|
||||
let it = l.cmp(r);
|
||||
if it != Ordering::Equal {
|
||||
result = it;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -107,7 +108,7 @@ impl Evaluator<'_> {
|
|||
let op_count = op.interval.size / op_len;
|
||||
let mut result: u64 = 0;
|
||||
for (i, val) in op.get(self)?.chunks(op_count).enumerate() {
|
||||
if !val.iter().all(|&x| x == 0) {
|
||||
if !val.iter().all(|&it| it == 0) {
|
||||
result |= 1 << i;
|
||||
}
|
||||
}
|
||||
|
@ -123,7 +124,7 @@ impl Evaluator<'_> {
|
|||
));
|
||||
};
|
||||
let index_len = match try_const_usize(self.db, index_len) {
|
||||
Some(x) => x as usize,
|
||||
Some(it) => it as usize,
|
||||
None => {
|
||||
return Err(MirEvalError::TypeError(
|
||||
"simd type with unevaluatable len param",
|
||||
|
@ -138,7 +139,7 @@ impl Evaluator<'_> {
|
|||
for index in index.get(self)?.chunks(index.interval.size / index_len) {
|
||||
let index = from_bytes!(u32, index) as usize;
|
||||
let val = match vector.clone().nth(index) {
|
||||
Some(x) => x,
|
||||
Some(it) => it,
|
||||
None => {
|
||||
return Err(MirEvalError::TypeError(
|
||||
"out of bound access in simd shuffle",
|
||||
|
|
|
@ -146,12 +146,12 @@ impl MirLowerError {
|
|||
ConstEvalError::MirEvalError(e) => e.pretty_print(f, db, span_formatter)?,
|
||||
}
|
||||
}
|
||||
MirLowerError::MissingFunctionDefinition(owner, x) => {
|
||||
MirLowerError::MissingFunctionDefinition(owner, it) => {
|
||||
let body = db.body(*owner);
|
||||
writeln!(
|
||||
f,
|
||||
"Missing function definition for {}",
|
||||
body.pretty_print_expr(db.upcast(), *owner, *x)
|
||||
body.pretty_print_expr(db.upcast(), *owner, *it)
|
||||
)?;
|
||||
}
|
||||
MirLowerError::TypeMismatch(e) => {
|
||||
|
@ -202,15 +202,15 @@ impl MirLowerError {
|
|||
}
|
||||
|
||||
macro_rules! not_supported {
|
||||
($x: expr) => {
|
||||
return Err(MirLowerError::NotSupported(format!($x)))
|
||||
($it: expr) => {
|
||||
return Err(MirLowerError::NotSupported(format!($it)))
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! implementation_error {
|
||||
($x: expr) => {{
|
||||
::stdx::never!("MIR lower implementation bug: {}", format!($x));
|
||||
return Err(MirLowerError::ImplementationError(format!($x)));
|
||||
($it: expr) => {{
|
||||
::stdx::never!("MIR lower implementation bug: {}", format!($it));
|
||||
return Err(MirLowerError::ImplementationError(format!($it)));
|
||||
}};
|
||||
}
|
||||
|
||||
|
@ -490,8 +490,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
// It's probably a unit struct or a zero sized function, so no action is needed.
|
||||
Ok(Some(current))
|
||||
}
|
||||
x => {
|
||||
not_supported!("unknown name {x:?} in value name space");
|
||||
it => {
|
||||
not_supported!("unknown name {it:?} in value name space");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -799,8 +799,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
Expr::Yield { .. } => not_supported!("yield"),
|
||||
Expr::RecordLit { fields, path, spread, ellipsis: _, is_assignee_expr: _ } => {
|
||||
let spread_place = match spread {
|
||||
&Some(x) => {
|
||||
let Some((p, c)) = self.lower_expr_as_place(current, x, true)? else {
|
||||
&Some(it) => {
|
||||
let Some((p, c)) = self.lower_expr_as_place(current, it, true)? else {
|
||||
return Ok(None);
|
||||
};
|
||||
current = c;
|
||||
|
@ -840,8 +840,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
Some(sp) => operands
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, x)| match x {
|
||||
Some(x) => x,
|
||||
.map(|(i, it)| match it {
|
||||
Some(it) => it,
|
||||
None => {
|
||||
let p =
|
||||
sp.project(ProjectionElem::Field(FieldId {
|
||||
|
@ -893,7 +893,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
Ok(Some(current))
|
||||
}
|
||||
Expr::Cast { expr, type_ref: _ } => {
|
||||
let Some((x, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
|
||||
let Some((it, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
|
||||
return Ok(None);
|
||||
};
|
||||
let source_ty = self.infer[*expr].clone();
|
||||
|
@ -901,7 +901,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
self.push_assignment(
|
||||
current,
|
||||
place,
|
||||
Rvalue::Cast(cast_kind(&source_ty, &target_ty)?, x, target_ty),
|
||||
Rvalue::Cast(cast_kind(&source_ty, &target_ty)?, it, target_ty),
|
||||
expr_id.into(),
|
||||
);
|
||||
Ok(Some(current))
|
||||
|
@ -1003,8 +1003,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
.infer
|
||||
.expr_adjustments
|
||||
.get(lhs)
|
||||
.and_then(|x| x.split_last())
|
||||
.map(|x| x.1)
|
||||
.and_then(|it| it.split_last())
|
||||
.map(|it| it.1)
|
||||
.ok_or(MirLowerError::TypeError(
|
||||
"adjustment of binary op was missing",
|
||||
))?;
|
||||
|
@ -1107,15 +1107,15 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
};
|
||||
let mut lp = None;
|
||||
let mut rp = None;
|
||||
if let Some(x) = lhs {
|
||||
let Some((o, c)) = self.lower_expr_to_some_operand(x, current)? else {
|
||||
if let Some(it) = lhs {
|
||||
let Some((o, c)) = self.lower_expr_to_some_operand(it, current)? else {
|
||||
return Ok(None);
|
||||
};
|
||||
lp = Some(o);
|
||||
current = c;
|
||||
}
|
||||
if let Some(x) = rhs {
|
||||
let Some((o, c)) = self.lower_expr_to_some_operand(x, current)? else {
|
||||
if let Some(it) = rhs {
|
||||
let Some((o, c)) = self.lower_expr_to_some_operand(it, current)? else {
|
||||
return Ok(None);
|
||||
};
|
||||
rp = Some(o);
|
||||
|
@ -1131,8 +1131,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
.variant_data
|
||||
.fields()
|
||||
.iter()
|
||||
.map(|x| {
|
||||
let o = match x.1.name.as_str() {
|
||||
.map(|it| {
|
||||
let o = match it.1.name.as_str() {
|
||||
Some("start") => lp.take(),
|
||||
Some("end") => rp.take(),
|
||||
Some("exhausted") => {
|
||||
|
@ -1164,11 +1164,11 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
.projections
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|x| match x {
|
||||
.map(|it| match it {
|
||||
ProjectionElem::Deref => ProjectionElem::Deref,
|
||||
ProjectionElem::Field(x) => ProjectionElem::Field(x),
|
||||
ProjectionElem::TupleOrClosureField(x) => {
|
||||
ProjectionElem::TupleOrClosureField(x)
|
||||
ProjectionElem::Field(it) => ProjectionElem::Field(it),
|
||||
ProjectionElem::TupleOrClosureField(it) => {
|
||||
ProjectionElem::TupleOrClosureField(it)
|
||||
}
|
||||
ProjectionElem::ConstantIndex { offset, from_end } => {
|
||||
ProjectionElem::ConstantIndex { offset, from_end }
|
||||
|
@ -1176,8 +1176,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
ProjectionElem::Subslice { from, to } => {
|
||||
ProjectionElem::Subslice { from, to }
|
||||
}
|
||||
ProjectionElem::OpaqueCast(x) => ProjectionElem::OpaqueCast(x),
|
||||
ProjectionElem::Index(x) => match x {},
|
||||
ProjectionElem::OpaqueCast(it) => ProjectionElem::OpaqueCast(it),
|
||||
ProjectionElem::Index(it) => match it {},
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
@ -1209,8 +1209,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
Expr::Tuple { exprs, is_assignee_expr: _ } => {
|
||||
let Some(values) = exprs
|
||||
.iter()
|
||||
.map(|x| {
|
||||
let Some((o, c)) = self.lower_expr_to_some_operand(*x, current)? else {
|
||||
.map(|it| {
|
||||
let Some((o, c)) = self.lower_expr_to_some_operand(*it, current)? else {
|
||||
return Ok(None);
|
||||
};
|
||||
current = c;
|
||||
|
@ -1239,8 +1239,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
};
|
||||
let Some(values) = elements
|
||||
.iter()
|
||||
.map(|x| {
|
||||
let Some((o, c)) = self.lower_expr_to_some_operand(*x, current)? else {
|
||||
.map(|it| {
|
||||
let Some((o, c)) = self.lower_expr_to_some_operand(*it, current)? else {
|
||||
return Ok(None);
|
||||
};
|
||||
current = c;
|
||||
|
@ -1285,7 +1285,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
|
||||
fn placeholder_subst(&mut self) -> Substitution {
|
||||
let placeholder_subst = match self.owner.as_generic_def_id() {
|
||||
Some(x) => TyBuilder::placeholder_subst(self.db, x),
|
||||
Some(it) => TyBuilder::placeholder_subst(self.db, it),
|
||||
None => Substitution::empty(Interner),
|
||||
};
|
||||
placeholder_subst
|
||||
|
@ -1375,8 +1375,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
}
|
||||
hir_def::hir::Literal::Char(c) => u32::from(*c).to_le_bytes().into(),
|
||||
hir_def::hir::Literal::Bool(b) => vec![*b as u8],
|
||||
hir_def::hir::Literal::Int(x, _) => x.to_le_bytes()[0..size].into(),
|
||||
hir_def::hir::Literal::Uint(x, _) => x.to_le_bytes()[0..size].into(),
|
||||
hir_def::hir::Literal::Int(it, _) => it.to_le_bytes()[0..size].into(),
|
||||
hir_def::hir::Literal::Uint(it, _) => it.to_le_bytes()[0..size].into(),
|
||||
hir_def::hir::Literal::Float(f, _) => match size {
|
||||
8 => f.into_f64().to_le_bytes().into(),
|
||||
4 => f.into_f32().to_le_bytes().into(),
|
||||
|
@ -1526,9 +1526,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
|
||||
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
|
||||
let mut ty = None;
|
||||
if let Some(x) = self.infer.expr_adjustments.get(&e) {
|
||||
if let Some(x) = x.last() {
|
||||
ty = Some(x.target.clone());
|
||||
if let Some(it) = self.infer.expr_adjustments.get(&e) {
|
||||
if let Some(it) = it.last() {
|
||||
ty = Some(it.target.clone());
|
||||
}
|
||||
}
|
||||
ty.unwrap_or_else(|| self.expr_ty_without_adjust(e))
|
||||
|
@ -1550,7 +1550,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
|
||||
fn discr_temp_place(&mut self, current: BasicBlockId) -> Place {
|
||||
match &self.discr_temp {
|
||||
Some(x) => x.clone(),
|
||||
Some(it) => it.clone(),
|
||||
None => {
|
||||
let tmp: Place = self
|
||||
.temp(TyBuilder::discr_ty(), current, MirSpan::Unknown)
|
||||
|
@ -1597,7 +1597,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
}
|
||||
|
||||
fn has_adjustments(&self, expr_id: ExprId) -> bool {
|
||||
!self.infer.expr_adjustments.get(&expr_id).map(|x| x.is_empty()).unwrap_or(true)
|
||||
!self.infer.expr_adjustments.get(&expr_id).map(|it| it.is_empty()).unwrap_or(true)
|
||||
}
|
||||
|
||||
fn merge_blocks(
|
||||
|
@ -1627,7 +1627,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
))?
|
||||
.end
|
||||
{
|
||||
Some(x) => x,
|
||||
Some(it) => it,
|
||||
None => {
|
||||
let s = self.new_basic_block();
|
||||
self.current_loop_blocks
|
||||
|
@ -1751,10 +1751,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
pick_binding: impl Fn(BindingId) -> bool,
|
||||
) -> Result<BasicBlockId> {
|
||||
let base_param_count = self.result.param_locals.len();
|
||||
self.result.param_locals.extend(params.clone().map(|(x, ty)| {
|
||||
self.result.param_locals.extend(params.clone().map(|(it, ty)| {
|
||||
let local_id = self.result.locals.alloc(Local { ty });
|
||||
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
|
||||
if let Pat::Bind { id, subpat: None } = self.body[x] {
|
||||
if let Pat::Bind { id, subpat: None } = self.body[it] {
|
||||
if matches!(
|
||||
self.body.bindings[id].mode,
|
||||
BindingAnnotation::Unannotated | BindingAnnotation::Mutable
|
||||
|
@ -1795,7 +1795,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
|
||||
fn binding_local(&self, b: BindingId) -> Result<LocalId> {
|
||||
match self.result.binding_locals.get(b) {
|
||||
Some(x) => Ok(*x),
|
||||
Some(it) => Ok(*it),
|
||||
None => {
|
||||
// FIXME: It should never happens, but currently it will happen in `const_dependent_on_local` test, which
|
||||
// is a hir lowering problem IMO.
|
||||
|
@ -1945,7 +1945,7 @@ pub fn mir_body_for_closure_query(
|
|||
implementation_error!("closure has not callable sig");
|
||||
};
|
||||
let current = ctx.lower_params_and_bindings(
|
||||
args.iter().zip(sig.params().iter()).map(|(x, y)| (*x, y.clone())),
|
||||
args.iter().zip(sig.params().iter()).map(|(it, y)| (*it, y.clone())),
|
||||
|_| true,
|
||||
)?;
|
||||
if let Some(current) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? {
|
||||
|
@ -1964,34 +1964,35 @@ pub fn mir_body_for_closure_query(
|
|||
FnTrait::FnMut | FnTrait::Fn => vec![ProjectionElem::Deref],
|
||||
};
|
||||
ctx.result.walk_places(|p| {
|
||||
if let Some(x) = upvar_map.get(&p.local) {
|
||||
let r = x.iter().find(|x| {
|
||||
if p.projection.len() < x.0.place.projections.len() {
|
||||
if let Some(it) = upvar_map.get(&p.local) {
|
||||
let r = it.iter().find(|it| {
|
||||
if p.projection.len() < it.0.place.projections.len() {
|
||||
return false;
|
||||
}
|
||||
for (x, y) in p.projection.iter().zip(x.0.place.projections.iter()) {
|
||||
match (x, y) {
|
||||
for (it, y) in p.projection.iter().zip(it.0.place.projections.iter()) {
|
||||
match (it, y) {
|
||||
(ProjectionElem::Deref, ProjectionElem::Deref) => (),
|
||||
(ProjectionElem::Field(x), ProjectionElem::Field(y)) if x == y => (),
|
||||
(ProjectionElem::Field(it), ProjectionElem::Field(y)) if it == y => (),
|
||||
(
|
||||
ProjectionElem::TupleOrClosureField(x),
|
||||
ProjectionElem::TupleOrClosureField(it),
|
||||
ProjectionElem::TupleOrClosureField(y),
|
||||
) if x == y => (),
|
||||
) if it == y => (),
|
||||
_ => return false,
|
||||
}
|
||||
}
|
||||
true
|
||||
});
|
||||
match r {
|
||||
Some(x) => {
|
||||
Some(it) => {
|
||||
p.local = closure_local;
|
||||
let mut next_projs = closure_projection.clone();
|
||||
next_projs.push(PlaceElem::TupleOrClosureField(x.1));
|
||||
next_projs.push(PlaceElem::TupleOrClosureField(it.1));
|
||||
let prev_projs = mem::take(&mut p.projection);
|
||||
if x.0.kind != CaptureKind::ByValue {
|
||||
if it.0.kind != CaptureKind::ByValue {
|
||||
next_projs.push(ProjectionElem::Deref);
|
||||
}
|
||||
next_projs.extend(prev_projs.iter().cloned().skip(x.0.place.projections.len()));
|
||||
next_projs
|
||||
.extend(prev_projs.iter().cloned().skip(it.0.place.projections.len()));
|
||||
p.projection = next_projs.into();
|
||||
}
|
||||
None => err = Some(p.clone()),
|
||||
|
@ -2051,8 +2052,8 @@ pub fn lower_to_mir(
|
|||
// need to take this input explicitly.
|
||||
root_expr: ExprId,
|
||||
) -> Result<MirBody> {
|
||||
if let Some((_, x)) = infer.type_mismatches().next() {
|
||||
return Err(MirLowerError::TypeMismatch(x.clone()));
|
||||
if let Some((_, it)) = infer.type_mismatches().next() {
|
||||
return Err(MirLowerError::TypeMismatch(it.clone()));
|
||||
}
|
||||
let mut ctx = MirLowerCtx::new(db, owner, body, infer);
|
||||
// 0 is return local
|
||||
|
@ -2078,7 +2079,7 @@ pub fn lower_to_mir(
|
|||
body.params
|
||||
.iter()
|
||||
.zip(callable_sig.params().iter())
|
||||
.map(|(x, y)| (*x, y.clone())),
|
||||
.map(|(it, y)| (*it, y.clone())),
|
||||
binding_picker,
|
||||
)?;
|
||||
}
|
||||
|
|
|
@ -5,8 +5,8 @@ use hir_def::{lang_item::lang_attr, FunctionId};
|
|||
use hir_expand::name;
|
||||
|
||||
macro_rules! not_supported {
|
||||
($x: expr) => {
|
||||
return Err(MirLowerError::NotSupported(format!($x)))
|
||||
($it: expr) => {
|
||||
return Err(MirLowerError::NotSupported(format!($it)))
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -34,7 +34,7 @@ impl MirLowerCtx<'_> {
|
|||
) -> Result<Option<(Place, BasicBlockId)>> {
|
||||
let ty = adjustments
|
||||
.last()
|
||||
.map(|x| x.target.clone())
|
||||
.map(|it| it.target.clone())
|
||||
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id));
|
||||
let place = self.temp(ty, prev_block, expr_id.into())?;
|
||||
let Some(current) =
|
||||
|
@ -61,7 +61,7 @@ impl MirLowerCtx<'_> {
|
|||
if let Some((last, rest)) = adjustments.split_last() {
|
||||
match last.kind {
|
||||
Adjust::Deref(None) => {
|
||||
let Some(mut x) = self.lower_expr_as_place_with_adjust(
|
||||
let Some(mut it) = self.lower_expr_as_place_with_adjust(
|
||||
current,
|
||||
expr_id,
|
||||
upgrade_rvalue,
|
||||
|
@ -70,8 +70,8 @@ impl MirLowerCtx<'_> {
|
|||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
x.0 = x.0.project(ProjectionElem::Deref);
|
||||
Ok(Some(x))
|
||||
it.0 = it.0.project(ProjectionElem::Deref);
|
||||
Ok(Some(it))
|
||||
}
|
||||
Adjust::Deref(Some(od)) => {
|
||||
let Some((r, current)) = self.lower_expr_as_place_with_adjust(
|
||||
|
@ -87,7 +87,7 @@ impl MirLowerCtx<'_> {
|
|||
current,
|
||||
r,
|
||||
rest.last()
|
||||
.map(|x| x.target.clone())
|
||||
.map(|it| it.target.clone())
|
||||
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)),
|
||||
last.target.clone(),
|
||||
expr_id.into(),
|
||||
|
@ -253,8 +253,8 @@ impl MirLowerCtx<'_> {
|
|||
.infer
|
||||
.expr_adjustments
|
||||
.get(base)
|
||||
.and_then(|x| x.split_last())
|
||||
.map(|x| x.1)
|
||||
.and_then(|it| it.split_last())
|
||||
.map(|it| it.1)
|
||||
.unwrap_or(&[]);
|
||||
let Some((mut p_base, current)) =
|
||||
self.lower_expr_as_place_with_adjust(current, *base, true, adjusts)?
|
||||
|
|
|
@ -29,8 +29,8 @@ use crate::{
|
|||
use super::{MirBody, MirLowerError, Operand, Rvalue, StatementKind, TerminatorKind};
|
||||
|
||||
macro_rules! not_supported {
|
||||
($x: expr) => {
|
||||
return Err(MirLowerError::NotSupported(format!($x)))
|
||||
($it: expr) => {
|
||||
return Err(MirLowerError::NotSupported(format!($it)))
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -97,16 +97,16 @@ impl FallibleTypeFolder<Interner> for Filler<'_> {
|
|||
idx: chalk_ir::PlaceholderIndex,
|
||||
_outer_binder: DebruijnIndex,
|
||||
) -> std::result::Result<chalk_ir::Const<Interner>, Self::Error> {
|
||||
let x = from_placeholder_idx(self.db, idx);
|
||||
let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else {
|
||||
let it = from_placeholder_idx(self.db, idx);
|
||||
let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(it)) else {
|
||||
not_supported!("missing idx in generics");
|
||||
};
|
||||
Ok(self
|
||||
.subst
|
||||
.as_slice(Interner)
|
||||
.get(idx)
|
||||
.and_then(|x| x.constant(Interner))
|
||||
.ok_or_else(|| MirLowerError::GenericArgNotProvided(x, self.subst.clone()))?
|
||||
.and_then(|it| it.constant(Interner))
|
||||
.ok_or_else(|| MirLowerError::GenericArgNotProvided(it, self.subst.clone()))?
|
||||
.clone())
|
||||
}
|
||||
|
||||
|
@ -115,16 +115,16 @@ impl FallibleTypeFolder<Interner> for Filler<'_> {
|
|||
idx: chalk_ir::PlaceholderIndex,
|
||||
_outer_binder: DebruijnIndex,
|
||||
) -> std::result::Result<Ty, Self::Error> {
|
||||
let x = from_placeholder_idx(self.db, idx);
|
||||
let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else {
|
||||
let it = from_placeholder_idx(self.db, idx);
|
||||
let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(it)) else {
|
||||
not_supported!("missing idx in generics");
|
||||
};
|
||||
Ok(self
|
||||
.subst
|
||||
.as_slice(Interner)
|
||||
.get(idx)
|
||||
.and_then(|x| x.ty(Interner))
|
||||
.ok_or_else(|| MirLowerError::GenericArgNotProvided(x, self.subst.clone()))?
|
||||
.and_then(|it| it.ty(Interner))
|
||||
.ok_or_else(|| MirLowerError::GenericArgNotProvided(it, self.subst.clone()))?
|
||||
.clone())
|
||||
}
|
||||
|
||||
|
@ -180,7 +180,7 @@ impl Filler<'_> {
|
|||
MirLowerError::GenericArgNotProvided(
|
||||
self.generics
|
||||
.as_ref()
|
||||
.and_then(|x| x.iter().nth(b.index))
|
||||
.and_then(|it| it.iter().nth(b.index))
|
||||
.unwrap()
|
||||
.0,
|
||||
self.subst.clone(),
|
||||
|
|
|
@ -135,7 +135,7 @@ impl<'a> MirPrettyCtx<'a> {
|
|||
|
||||
fn for_closure(&mut self, closure: ClosureId) {
|
||||
let body = match self.db.mir_body_for_closure(closure) {
|
||||
Ok(x) => x,
|
||||
Ok(it) => it,
|
||||
Err(e) => {
|
||||
wln!(self, "// error in {closure:?}: {e:?}");
|
||||
return;
|
||||
|
@ -145,7 +145,7 @@ impl<'a> MirPrettyCtx<'a> {
|
|||
let indent = mem::take(&mut self.indent);
|
||||
let mut ctx = MirPrettyCtx {
|
||||
body: &body,
|
||||
local_to_binding: body.binding_locals.iter().map(|(x, y)| (*y, x)).collect(),
|
||||
local_to_binding: body.binding_locals.iter().map(|(it, y)| (*y, it)).collect(),
|
||||
result,
|
||||
indent,
|
||||
..*self
|
||||
|
@ -167,7 +167,7 @@ impl<'a> MirPrettyCtx<'a> {
|
|||
}
|
||||
|
||||
fn new(body: &'a MirBody, hir_body: &'a Body, db: &'a dyn HirDatabase) -> Self {
|
||||
let local_to_binding = body.binding_locals.iter().map(|(x, y)| (*y, x)).collect();
|
||||
let local_to_binding = body.binding_locals.iter().map(|(it, y)| (*y, it)).collect();
|
||||
MirPrettyCtx {
|
||||
body,
|
||||
db,
|
||||
|
@ -315,17 +315,17 @@ impl<'a> MirPrettyCtx<'a> {
|
|||
}
|
||||
}
|
||||
}
|
||||
ProjectionElem::TupleOrClosureField(x) => {
|
||||
ProjectionElem::TupleOrClosureField(it) => {
|
||||
f(this, local, head);
|
||||
w!(this, ".{}", x);
|
||||
w!(this, ".{}", it);
|
||||
}
|
||||
ProjectionElem::Index(l) => {
|
||||
f(this, local, head);
|
||||
w!(this, "[{}]", this.local_name(*l).display(this.db));
|
||||
}
|
||||
x => {
|
||||
it => {
|
||||
f(this, local, head);
|
||||
w!(this, ".{:?}", x);
|
||||
w!(this, ".{:?}", it);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -356,14 +356,14 @@ impl<'a> MirPrettyCtx<'a> {
|
|||
}
|
||||
self.place(p);
|
||||
}
|
||||
Rvalue::Aggregate(AggregateKind::Tuple(_), x) => {
|
||||
Rvalue::Aggregate(AggregateKind::Tuple(_), it) => {
|
||||
w!(self, "(");
|
||||
self.operand_list(x);
|
||||
self.operand_list(it);
|
||||
w!(self, ")");
|
||||
}
|
||||
Rvalue::Aggregate(AggregateKind::Array(_), x) => {
|
||||
Rvalue::Aggregate(AggregateKind::Array(_), it) => {
|
||||
w!(self, "[");
|
||||
self.operand_list(x);
|
||||
self.operand_list(it);
|
||||
w!(self, "]");
|
||||
}
|
||||
Rvalue::Repeat(op, len) => {
|
||||
|
@ -371,19 +371,19 @@ impl<'a> MirPrettyCtx<'a> {
|
|||
self.operand(op);
|
||||
w!(self, "; {}]", len.display(self.db));
|
||||
}
|
||||
Rvalue::Aggregate(AggregateKind::Adt(_, _), x) => {
|
||||
Rvalue::Aggregate(AggregateKind::Adt(_, _), it) => {
|
||||
w!(self, "Adt(");
|
||||
self.operand_list(x);
|
||||
self.operand_list(it);
|
||||
w!(self, ")");
|
||||
}
|
||||
Rvalue::Aggregate(AggregateKind::Closure(_), x) => {
|
||||
Rvalue::Aggregate(AggregateKind::Closure(_), it) => {
|
||||
w!(self, "Closure(");
|
||||
self.operand_list(x);
|
||||
self.operand_list(it);
|
||||
w!(self, ")");
|
||||
}
|
||||
Rvalue::Aggregate(AggregateKind::Union(_, _), x) => {
|
||||
Rvalue::Aggregate(AggregateKind::Union(_, _), it) => {
|
||||
w!(self, "Union(");
|
||||
self.operand_list(x);
|
||||
self.operand_list(it);
|
||||
w!(self, ")");
|
||||
}
|
||||
Rvalue::Len(p) => {
|
||||
|
@ -428,8 +428,8 @@ impl<'a> MirPrettyCtx<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn operand_list(&mut self, x: &[Operand]) {
|
||||
let mut it = x.iter();
|
||||
fn operand_list(&mut self, it: &[Operand]) {
|
||||
let mut it = it.iter();
|
||||
if let Some(first) = it.next() {
|
||||
self.operand(first);
|
||||
for op in it {
|
||||
|
|
|
@ -251,8 +251,8 @@ impl HirDisplay for GenericParam {
|
|||
impl HirDisplay for TypeOrConstParam {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
match self.split(f.db) {
|
||||
either::Either::Left(x) => x.hir_fmt(f),
|
||||
either::Either::Right(x) => x.hir_fmt(f),
|
||||
either::Either::Left(it) => it.hir_fmt(f),
|
||||
either::Either::Right(it) => it.hir_fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -303,11 +303,11 @@ fn write_generic_params(
|
|||
) -> Result<(), HirDisplayError> {
|
||||
let params = f.db.generic_params(def);
|
||||
if params.lifetimes.is_empty()
|
||||
&& params.type_or_consts.iter().all(|x| x.1.const_param().is_none())
|
||||
&& params.type_or_consts.iter().all(|it| it.1.const_param().is_none())
|
||||
&& params
|
||||
.type_or_consts
|
||||
.iter()
|
||||
.filter_map(|x| x.1.type_param())
|
||||
.filter_map(|it| it.1.type_param())
|
||||
.all(|param| !matches!(param.provenance, TypeParamProvenance::TypeParamList))
|
||||
{
|
||||
return Ok(());
|
||||
|
|
|
@ -1235,7 +1235,7 @@ impl Adt {
|
|||
pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
|
||||
let subst = db.generic_defaults(self.into());
|
||||
subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
|
||||
GenericArgData::Ty(x) => x.is_unknown(),
|
||||
GenericArgData::Ty(it) => it.is_unknown(),
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
|
@ -1636,11 +1636,11 @@ impl DefWithBody {
|
|||
for moof in &borrowck_result.moved_out_of_ref {
|
||||
let span: InFile<SyntaxNodePtr> = match moof.span {
|
||||
mir::MirSpan::ExprId(e) => match source_map.expr_syntax(e) {
|
||||
Ok(s) => s.map(|x| x.into()),
|
||||
Ok(s) => s.map(|it| it.into()),
|
||||
Err(_) => continue,
|
||||
},
|
||||
mir::MirSpan::PatId(p) => match source_map.pat_syntax(p) {
|
||||
Ok(s) => s.map(|x| match x {
|
||||
Ok(s) => s.map(|it| match it {
|
||||
Either::Left(e) => e.into(),
|
||||
Either::Right(e) => e.into(),
|
||||
}),
|
||||
|
@ -1679,11 +1679,11 @@ impl DefWithBody {
|
|||
for span in spans {
|
||||
let span: InFile<SyntaxNodePtr> = match span {
|
||||
mir::MirSpan::ExprId(e) => match source_map.expr_syntax(*e) {
|
||||
Ok(s) => s.map(|x| x.into()),
|
||||
Ok(s) => s.map(|it| it.into()),
|
||||
Err(_) => continue,
|
||||
},
|
||||
mir::MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
|
||||
Ok(s) => s.map(|x| match x {
|
||||
Ok(s) => s.map(|it| match it {
|
||||
Either::Left(e) => e.into(),
|
||||
Either::Right(e) => e.into(),
|
||||
}),
|
||||
|
@ -1696,7 +1696,7 @@ impl DefWithBody {
|
|||
}
|
||||
(mir::MutabilityReason::Not, true) => {
|
||||
if !infer.mutated_bindings_in_closure.contains(&binding_id) {
|
||||
let should_ignore = matches!(body[binding_id].name.as_str(), Some(x) if x.starts_with("_"));
|
||||
let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with("_"));
|
||||
if !should_ignore {
|
||||
acc.push(UnusedMut { local }.into())
|
||||
}
|
||||
|
@ -2294,7 +2294,7 @@ impl TypeAlias {
|
|||
pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
|
||||
let subst = db.generic_defaults(self.id.into());
|
||||
subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
|
||||
GenericArgData::Ty(x) => x.is_unknown(),
|
||||
GenericArgData::Ty(it) => it.is_unknown(),
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
|
@ -2684,8 +2684,8 @@ impl GenericDef {
|
|||
let ty_params = generics.type_or_consts.iter().map(|(local_id, _)| {
|
||||
let toc = TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id } };
|
||||
match toc.split(db) {
|
||||
Either::Left(x) => GenericParam::ConstParam(x),
|
||||
Either::Right(x) => GenericParam::TypeParam(x),
|
||||
Either::Left(it) => GenericParam::ConstParam(it),
|
||||
Either::Right(it) => GenericParam::TypeParam(it),
|
||||
}
|
||||
});
|
||||
self.lifetime_params(db)
|
||||
|
@ -2733,14 +2733,14 @@ pub struct LocalSource {
|
|||
impl LocalSource {
|
||||
pub fn as_ident_pat(&self) -> Option<&ast::IdentPat> {
|
||||
match &self.source.value {
|
||||
Either::Left(x) => Some(x),
|
||||
Either::Left(it) => Some(it),
|
||||
Either::Right(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_ident_pat(self) -> Option<ast::IdentPat> {
|
||||
match self.source.value {
|
||||
Either::Left(x) => Some(x),
|
||||
Either::Left(it) => Some(it),
|
||||
Either::Right(_) => None,
|
||||
}
|
||||
}
|
||||
|
@ -2762,7 +2762,7 @@ impl LocalSource {
|
|||
}
|
||||
|
||||
pub fn syntax_ptr(self) -> InFile<SyntaxNodePtr> {
|
||||
self.source.map(|x| SyntaxNodePtr::new(x.syntax()))
|
||||
self.source.map(|it| SyntaxNodePtr::new(it.syntax()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2821,13 +2821,13 @@ impl Local {
|
|||
Type::new(db, def, ty)
|
||||
}
|
||||
|
||||
/// All definitions for this local. Example: `let (a$0, _) | (_, a$0) = x;`
|
||||
/// All definitions for this local. Example: `let (a$0, _) | (_, a$0) = it;`
|
||||
pub fn sources(self, db: &dyn HirDatabase) -> Vec<LocalSource> {
|
||||
let (body, source_map) = db.body_with_source_map(self.parent);
|
||||
self.sources_(db, &body, &source_map).collect()
|
||||
}
|
||||
|
||||
/// The leftmost definition for this local. Example: `let (a$0, _) | (_, a) = x;`
|
||||
/// The leftmost definition for this local. Example: `let (a$0, _) | (_, a) = it;`
|
||||
pub fn primary_source(self, db: &dyn HirDatabase) -> LocalSource {
|
||||
let (body, source_map) = db.body_with_source_map(self.parent);
|
||||
let src = self.sources_(db, &body, &source_map).next().unwrap();
|
||||
|
@ -3081,7 +3081,9 @@ impl TypeParam {
|
|||
let subst = TyBuilder::placeholder_subst(db, self.id.parent());
|
||||
let ty = ty.substitute(Interner, &subst);
|
||||
match ty.data(Interner) {
|
||||
GenericArgData::Ty(x) => Some(Type::new_with_resolver_inner(db, &resolver, x.clone())),
|
||||
GenericArgData::Ty(it) => {
|
||||
Some(Type::new_with_resolver_inner(db, &resolver, it.clone()))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -3120,7 +3122,7 @@ impl ConstParam {
|
|||
pub fn name(self, db: &dyn HirDatabase) -> Name {
|
||||
let params = db.generic_params(self.id.parent());
|
||||
match params.type_or_consts[self.id.local_id()].name() {
|
||||
Some(x) => x.clone(),
|
||||
Some(it) => it.clone(),
|
||||
None => {
|
||||
never!();
|
||||
Name::missing()
|
||||
|
@ -3177,8 +3179,8 @@ impl TypeOrConstParam {
|
|||
|
||||
pub fn ty(self, db: &dyn HirDatabase) -> Type {
|
||||
match self.split(db) {
|
||||
Either::Left(x) => x.ty(db),
|
||||
Either::Right(x) => x.ty(db),
|
||||
Either::Left(it) => it.ty(db),
|
||||
Either::Right(it) => it.ty(db),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3676,9 +3678,9 @@ impl Type {
|
|||
};
|
||||
let parent_subst = TyBuilder::subst_for_def(db, trait_id, None)
|
||||
.push(self.ty.clone())
|
||||
.fill(|x| {
|
||||
.fill(|it| {
|
||||
// FIXME: this code is not covered in tests.
|
||||
match x {
|
||||
match it {
|
||||
ParamKind::Type => {
|
||||
GenericArgData::Ty(args.next().unwrap().ty.clone()).intern(Interner)
|
||||
}
|
||||
|
@ -3845,7 +3847,7 @@ impl Type {
|
|||
|
||||
pub fn as_array(&self, db: &dyn HirDatabase) -> Option<(Type, usize)> {
|
||||
if let TyKind::Array(ty, len) = &self.ty.kind(Interner) {
|
||||
try_const_usize(db, len).map(|x| (self.derived(ty.clone()), x as usize))
|
||||
try_const_usize(db, len).map(|it| (self.derived(ty.clone()), it as usize))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
|
|
@ -298,7 +298,7 @@ impl SourceToDefCtx<'_, '_> {
|
|||
pub(super) fn type_param_to_def(&mut self, src: InFile<ast::TypeParam>) -> Option<TypeParamId> {
|
||||
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
|
||||
let dyn_map = self.cache_for(container, src.file_id);
|
||||
dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(|x| TypeParamId::from_unchecked(x))
|
||||
dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(|it| TypeParamId::from_unchecked(it))
|
||||
}
|
||||
|
||||
pub(super) fn lifetime_param_to_def(
|
||||
|
@ -316,7 +316,10 @@ impl SourceToDefCtx<'_, '_> {
|
|||
) -> Option<ConstParamId> {
|
||||
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
|
||||
let dyn_map = self.cache_for(container, src.file_id);
|
||||
dyn_map[keys::CONST_PARAM].get(&src.value).copied().map(|x| ConstParamId::from_unchecked(x))
|
||||
dyn_map[keys::CONST_PARAM]
|
||||
.get(&src.value)
|
||||
.copied()
|
||||
.map(|it| ConstParamId::from_unchecked(it))
|
||||
}
|
||||
|
||||
pub(super) fn generic_param_to_def(
|
||||
|
|
|
@ -733,7 +733,7 @@ fn classify_name_ref(
|
|||
return None;
|
||||
}
|
||||
let parent = match ast::Fn::cast(parent.parent()?) {
|
||||
Some(x) => x.param_list(),
|
||||
Some(it) => it.param_list(),
|
||||
None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
|
||||
};
|
||||
|
||||
|
|
|
@ -422,10 +422,10 @@ pub(super) fn definition(
|
|||
|&it| {
|
||||
if !it.parent_enum(db).is_data_carrying(db) {
|
||||
match it.eval(db) {
|
||||
Ok(x) => {
|
||||
Some(if x >= 10 { format!("{x} ({x:#X})") } else { format!("{x}") })
|
||||
Ok(it) => {
|
||||
Some(if it >= 10 { format!("{it} ({it:#X})") } else { format!("{it}") })
|
||||
}
|
||||
Err(_) => it.value(db).map(|x| format!("{x:?}")),
|
||||
Err(_) => it.value(db).map(|it| format!("{it:?}")),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
|
@ -437,7 +437,7 @@ pub(super) fn definition(
|
|||
Definition::Const(it) => label_value_and_docs(db, it, |it| {
|
||||
let body = it.render_eval(db);
|
||||
match body {
|
||||
Ok(x) => Some(x),
|
||||
Ok(it) => Some(it),
|
||||
Err(_) => {
|
||||
let source = it.source(db)?;
|
||||
let mut body = source.value.body()?.syntax().clone();
|
||||
|
|
|
@ -73,7 +73,7 @@ impl TokenStore {
|
|||
}
|
||||
|
||||
pub fn iter(self) -> impl Iterator<Item = (TokenId, TokenStaticData)> {
|
||||
self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x))
|
||||
self.0.into_iter().enumerate().map(|(id, data)| (TokenId(id), data))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -132,9 +132,9 @@ impl StaticIndex<'_> {
|
|||
// hovers
|
||||
let sema = hir::Semantics::new(self.db);
|
||||
let tokens_or_nodes = sema.parse(file_id).syntax().clone();
|
||||
let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x {
|
||||
let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|it| match it {
|
||||
syntax::NodeOrToken::Node(_) => None,
|
||||
syntax::NodeOrToken::Token(x) => Some(x),
|
||||
syntax::NodeOrToken::Token(it) => Some(it),
|
||||
});
|
||||
let hover_config = HoverConfig {
|
||||
links_in_hover: true,
|
||||
|
@ -154,28 +154,29 @@ impl StaticIndex<'_> {
|
|||
let range = token.text_range();
|
||||
let node = token.parent().unwrap();
|
||||
let def = match get_definition(&sema, token.clone()) {
|
||||
Some(x) => x,
|
||||
Some(it) => it,
|
||||
None => continue,
|
||||
};
|
||||
let id = if let Some(x) = self.def_map.get(&def) {
|
||||
*x
|
||||
let id = if let Some(it) = self.def_map.get(&def) {
|
||||
*it
|
||||
} else {
|
||||
let x = self.tokens.insert(TokenStaticData {
|
||||
let it = self.tokens.insert(TokenStaticData {
|
||||
hover: hover_for_definition(&sema, file_id, def, &node, &hover_config),
|
||||
definition: def
|
||||
.try_to_nav(self.db)
|
||||
.map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }),
|
||||
definition: def.try_to_nav(self.db).map(|it| FileRange {
|
||||
file_id: it.file_id,
|
||||
range: it.focus_or_full_range(),
|
||||
}),
|
||||
references: vec![],
|
||||
moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)),
|
||||
});
|
||||
self.def_map.insert(def, x);
|
||||
x
|
||||
self.def_map.insert(def, it);
|
||||
it
|
||||
};
|
||||
let token = self.tokens.get_mut(id).unwrap();
|
||||
token.references.push(ReferenceData {
|
||||
range: FileRange { range, file_id },
|
||||
is_definition: match def.try_to_nav(self.db) {
|
||||
Some(x) => x.file_id == file_id && x.focus_or_full_range() == range,
|
||||
Some(it) => it.file_id == file_id && it.focus_or_full_range() == range,
|
||||
None => false,
|
||||
},
|
||||
});
|
||||
|
@ -216,8 +217,8 @@ impl StaticIndex<'_> {
|
|||
fn get_definition(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Definition> {
|
||||
for token in sema.descend_into_macros(token) {
|
||||
let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
|
||||
if let Some(&[x]) = def.as_deref() {
|
||||
return Some(x);
|
||||
if let Some(&[it]) = def.as_deref() {
|
||||
return Some(it);
|
||||
}
|
||||
}
|
||||
None
|
||||
|
@ -233,14 +234,14 @@ mod tests {
|
|||
fn check_all_ranges(ra_fixture: &str) {
|
||||
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
|
||||
let s = StaticIndex::compute(&analysis);
|
||||
let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
|
||||
let mut range_set: HashSet<_> = ranges.iter().map(|it| it.0).collect();
|
||||
for f in s.files {
|
||||
for (range, _) in f.tokens {
|
||||
let x = FileRange { file_id: f.file_id, range };
|
||||
if !range_set.contains(&x) {
|
||||
panic!("additional range {x:?}");
|
||||
let it = FileRange { file_id: f.file_id, range };
|
||||
if !range_set.contains(&it) {
|
||||
panic!("additional range {it:?}");
|
||||
}
|
||||
range_set.remove(&x);
|
||||
range_set.remove(&it);
|
||||
}
|
||||
}
|
||||
if !range_set.is_empty() {
|
||||
|
@ -251,17 +252,17 @@ mod tests {
|
|||
fn check_definitions(ra_fixture: &str) {
|
||||
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
|
||||
let s = StaticIndex::compute(&analysis);
|
||||
let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
|
||||
let mut range_set: HashSet<_> = ranges.iter().map(|it| it.0).collect();
|
||||
for (_, t) in s.tokens.iter() {
|
||||
if let Some(x) = t.definition {
|
||||
if x.range.start() == TextSize::from(0) {
|
||||
if let Some(t) = t.definition {
|
||||
if t.range.start() == TextSize::from(0) {
|
||||
// ignore definitions that are whole of file
|
||||
continue;
|
||||
}
|
||||
if !range_set.contains(&x) {
|
||||
panic!("additional definition {x:?}");
|
||||
if !range_set.contains(&t) {
|
||||
panic!("additional definition {t:?}");
|
||||
}
|
||||
range_set.remove(&x);
|
||||
range_set.remove(&t);
|
||||
}
|
||||
}
|
||||
if !range_set.is_empty() {
|
||||
|
|
|
@ -48,8 +48,8 @@ struct LsifManager<'a> {
|
|||
struct Id(i32);
|
||||
|
||||
impl From<Id> for lsp_types::NumberOrString {
|
||||
fn from(Id(x): Id) -> Self {
|
||||
lsp_types::NumberOrString::Number(x)
|
||||
fn from(Id(it): Id) -> Self {
|
||||
lsp_types::NumberOrString::Number(it)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -88,8 +88,8 @@ impl LsifManager<'_> {
|
|||
}
|
||||
|
||||
fn get_token_id(&mut self, id: TokenId) -> Id {
|
||||
if let Some(x) = self.token_map.get(&id) {
|
||||
return *x;
|
||||
if let Some(it) = self.token_map.get(&id) {
|
||||
return *it;
|
||||
}
|
||||
let result_set_id = self.add_vertex(lsif::Vertex::ResultSet(lsif::ResultSet { key: None }));
|
||||
self.token_map.insert(id, result_set_id);
|
||||
|
@ -97,8 +97,8 @@ impl LsifManager<'_> {
|
|||
}
|
||||
|
||||
fn get_package_id(&mut self, package_information: PackageInformation) -> Id {
|
||||
if let Some(x) = self.package_map.get(&package_information) {
|
||||
return *x;
|
||||
if let Some(it) = self.package_map.get(&package_information) {
|
||||
return *it;
|
||||
}
|
||||
let pi = package_information.clone();
|
||||
let result_set_id =
|
||||
|
@ -119,8 +119,8 @@ impl LsifManager<'_> {
|
|||
}
|
||||
|
||||
fn get_range_id(&mut self, id: FileRange) -> Id {
|
||||
if let Some(x) = self.range_map.get(&id) {
|
||||
return *x;
|
||||
if let Some(it) = self.range_map.get(&id) {
|
||||
return *it;
|
||||
}
|
||||
let file_id = id.file_id;
|
||||
let doc_id = self.get_file_id(file_id);
|
||||
|
@ -142,8 +142,8 @@ impl LsifManager<'_> {
|
|||
}
|
||||
|
||||
fn get_file_id(&mut self, id: FileId) -> Id {
|
||||
if let Some(x) = self.file_map.get(&id) {
|
||||
return *x;
|
||||
if let Some(it) = self.file_map.get(&id) {
|
||||
return *it;
|
||||
}
|
||||
let path = self.vfs.file_path(id);
|
||||
let path = path.as_path().unwrap();
|
||||
|
@ -216,18 +216,18 @@ impl LsifManager<'_> {
|
|||
}));
|
||||
let mut edges = token.references.iter().fold(
|
||||
HashMap::<_, Vec<lsp_types::NumberOrString>>::new(),
|
||||
|mut edges, x| {
|
||||
|mut edges, it| {
|
||||
let entry =
|
||||
edges.entry((x.range.file_id, x.is_definition)).or_insert_with(Vec::new);
|
||||
entry.push((*self.range_map.get(&x.range).unwrap()).into());
|
||||
edges.entry((it.range.file_id, it.is_definition)).or_insert_with(Vec::new);
|
||||
entry.push((*self.range_map.get(&it.range).unwrap()).into());
|
||||
edges
|
||||
},
|
||||
);
|
||||
for x in token.references {
|
||||
if let Some(vertices) = edges.remove(&(x.range.file_id, x.is_definition)) {
|
||||
for it in token.references {
|
||||
if let Some(vertices) = edges.remove(&(it.range.file_id, it.is_definition)) {
|
||||
self.add_edge(lsif::Edge::Item(lsif::Item {
|
||||
document: (*self.file_map.get(&x.range.file_id).unwrap()).into(),
|
||||
property: Some(if x.is_definition {
|
||||
document: (*self.file_map.get(&it.range.file_id).unwrap()).into(),
|
||||
property: Some(if it.is_definition {
|
||||
lsif::ItemKind::Definitions
|
||||
} else {
|
||||
lsif::ItemKind::References
|
||||
|
|
Loading…
Reference in a new issue