Merge commit '4a8d0f7f565b6df45da5522dd7366a4df3460cd7' into sync-from-ra

This commit is contained in:
Laurențiu Nicola 2024-02-25 09:45:26 +02:00
parent e057365301
commit 9e4ecc60a5
96 changed files with 1830 additions and 705 deletions

View file

@ -226,6 +226,11 @@ jobs:
- name: download typos - name: download typos
run: curl -LsSf https://github.com/crate-ci/typos/releases/download/$TYPOS_VERSION/typos-$TYPOS_VERSION-x86_64-unknown-linux-musl.tar.gz | tar zxf - -C ${CARGO_HOME:-~/.cargo}/bin run: curl -LsSf https://github.com/crate-ci/typos/releases/download/$TYPOS_VERSION/typos-$TYPOS_VERSION-x86_64-unknown-linux-musl.tar.gz | tar zxf - -C ${CARGO_HOME:-~/.cargo}/bin
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: check for typos - name: check for typos
run: typos run: typos

View file

@ -67,7 +67,7 @@ jobs:
other_metrics: other_metrics:
strategy: strategy:
matrix: matrix:
names: [self, rustc_tests, ripgrep-13.0.0, webrender-2022, diesel-1.4.8, hyper-0.14.18] names: [self, ripgrep-13.0.0, webrender-2022, diesel-1.4.8, hyper-0.14.18]
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [setup_cargo, build_metrics] needs: [setup_cargo, build_metrics]
@ -118,11 +118,6 @@ jobs:
with: with:
name: self-${{ github.sha }} name: self-${{ github.sha }}
- name: Download rustc_tests metrics
uses: actions/download-artifact@v3
with:
name: rustc_tests-${{ github.sha }}
- name: Download ripgrep-13.0.0 metrics - name: Download ripgrep-13.0.0 metrics
uses: actions/download-artifact@v3 uses: actions/download-artifact@v3
with: with:
@ -151,7 +146,7 @@ jobs:
chmod 700 ~/.ssh chmod 700 ~/.ssh
git clone --depth 1 git@github.com:rust-analyzer/metrics.git git clone --depth 1 git@github.com:rust-analyzer/metrics.git
jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5] * .[6]" build.json self.json rustc_tests.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5]" build.json self.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json
cd metrics cd metrics
git add . git add .
git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈 git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈

View file

@ -59,7 +59,7 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v3
with: with:
fetch-depth: ${{ env.FETCH_DEPTH }} fetch-depth: ${{ env.FETCH_DEPTH }}
@ -78,9 +78,9 @@ jobs:
rustup component add rust-src rustup component add rust-src
- name: Install Node.js - name: Install Node.js
uses: actions/setup-node@v4 uses: actions/setup-node@v3
with: with:
node-version: 18 node-version: 16
- name: Update apt repositories - name: Update apt repositories
if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf' if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf'

1
Cargo.lock generated
View file

@ -1709,6 +1709,7 @@ dependencies = [
"dissimilar", "dissimilar",
"expect-test", "expect-test",
"indexmap", "indexmap",
"itertools",
"linked-hash-map", "linked-hash-map",
"lock_api", "lock_api",
"oorandom", "oorandom",

View file

@ -6,8 +6,8 @@ use itertools::Itertools;
use crate::{ use crate::{
hir::{ hir::{
Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, LiteralOrConst, Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, LiteralOrConst, Movability,
Movability, Statement, Statement,
}, },
pretty::{print_generic_args, print_path, print_type_ref}, pretty::{print_generic_args, print_path, print_type_ref},
type_ref::TypeRef, type_ref::TypeRef,

View file

@ -40,7 +40,7 @@ pub struct StructData {
} }
bitflags! { bitflags! {
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct StructFlags: u8 { pub struct StructFlags: u8 {
const NO_FLAGS = 0; const NO_FLAGS = 0;
/// Indicates whether the struct is `PhantomData`. /// Indicates whether the struct is `PhantomData`.

View file

@ -477,7 +477,7 @@ mod tests {
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use test_fixture::WithFixture; use test_fixture::WithFixture;
use crate::{db::DefDatabase, test_db::TestDB, ItemContainerId, Lookup}; use crate::{test_db::TestDB, ItemContainerId, Lookup};
use super::*; use super::*;

View file

@ -44,13 +44,13 @@ use std::{
ops::{Index, Range}, ops::{Index, Range},
}; };
use ast::{AstNode, HasName, StructKind}; use ast::{AstNode, StructKind};
use base_db::CrateId; use base_db::CrateId;
use either::Either; use either::Either;
use hir_expand::{ use hir_expand::{
ast_id_map::{AstIdNode, FileAstId}, ast_id_map::{AstIdNode, FileAstId},
attrs::RawAttrs, attrs::RawAttrs,
name::{name, AsName, Name}, name::Name,
ExpandTo, HirFileId, InFile, ExpandTo, HirFileId, InFile,
}; };
use intern::Interned; use intern::Interned;
@ -67,7 +67,7 @@ use crate::{
attr::Attrs, attr::Attrs,
db::DefDatabase, db::DefDatabase,
generics::{GenericParams, LifetimeParamData, TypeOrConstParamData}, generics::{GenericParams, LifetimeParamData, TypeOrConstParamData},
path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind}, path::{GenericArgs, ImportAlias, ModPath, Path, PathKind},
type_ref::{Mutability, TraitRef, TypeBound, TypeRef}, type_ref::{Mutability, TraitRef, TypeBound, TypeRef},
visibility::{RawVisibility, VisibilityExplicitness}, visibility::{RawVisibility, VisibilityExplicitness},
BlockId, Lookup, BlockId, Lookup,

View file

@ -2,17 +2,33 @@
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
use hir_expand::{ast_id_map::AstIdMap, span_map::SpanMapRef}; use hir_expand::{
use syntax::ast::{HasModuleItem, HasTypeBounds, IsString}; ast_id_map::AstIdMap, mod_path::path, name, name::AsName, span_map::SpanMapRef, HirFileId,
};
use la_arena::Arena;
use syntax::{
ast::{self, HasModuleItem, HasName, HasTypeBounds, IsString},
AstNode,
};
use triomphe::Arc;
use crate::{ use crate::{
generics::{GenericParamsCollector, TypeParamData, TypeParamProvenance}, db::DefDatabase,
type_ref::{LifetimeRef, TraitBoundModifier}, generics::{GenericParams, GenericParamsCollector, TypeParamData, TypeParamProvenance},
item_tree::{
AssocItem, AttrOwner, Const, Either, Enum, ExternBlock, ExternCrate, Field, FieldAstId,
Fields, FileItemTreeId, FnFlags, Function, GenericArgs, Idx, IdxRange, Impl, ImportAlias,
Interned, ItemTree, ItemTreeData, ItemTreeNode, Macro2, MacroCall, MacroRules, Mod,
ModItem, ModKind, ModPath, Mutability, Name, Param, ParamAstId, Path, Range, RawAttrs,
RawIdx, RawVisibilityId, Static, Struct, StructKind, Trait, TraitAlias, TypeAlias, Union,
Use, UseTree, UseTreeKind, Variant,
},
path::AssociatedTypeBinding,
type_ref::{LifetimeRef, TraitBoundModifier, TraitRef, TypeBound, TypeRef},
visibility::RawVisibility,
LocalLifetimeParamId, LocalTypeOrConstParamId, LocalLifetimeParamId, LocalTypeOrConstParamId,
}; };
use super::*;
fn id<N: ItemTreeNode>(index: Idx<N>) -> FileItemTreeId<N> { fn id<N: ItemTreeNode>(index: Idx<N>) -> FileItemTreeId<N> {
FileItemTreeId(index) FileItemTreeId(index)
} }

View file

@ -1,16 +1,22 @@
//! `ItemTree` debug printer. //! `ItemTree` debug printer.
use std::fmt::Write; use std::fmt::{self, Write};
use span::ErasedFileAstId; use span::ErasedFileAstId;
use crate::{ use crate::{
generics::{WherePredicate, WherePredicateTypeTarget}, generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget},
item_tree::{
AttrOwner, Const, DefDatabase, Enum, ExternBlock, ExternCrate, Field, FieldAstId, Fields,
FileItemTreeId, FnFlags, Function, GenericParams, Impl, Interned, ItemTree, Macro2,
MacroCall, MacroRules, Mod, ModItem, ModKind, Param, ParamAstId, Path, RawAttrs,
RawVisibilityId, Static, Struct, Trait, TraitAlias, TypeAlias, TypeBound, TypeRef, Union,
Use, UseTree, UseTreeKind, Variant,
},
pretty::{print_path, print_type_bounds, print_type_ref}, pretty::{print_path, print_type_bounds, print_type_ref},
visibility::RawVisibility,
}; };
use super::*;
pub(super) fn print_item_tree(db: &dyn DefDatabase, tree: &ItemTree) -> String { pub(super) fn print_item_tree(db: &dyn DefDatabase, tree: &ItemTree) -> String {
let mut p = Printer { db, tree, buf: String::new(), indent_level: 0, needs_indent: true }; let mut p = Printer { db, tree, buf: String::new(), indent_level: 0, needs_indent: true };

View file

@ -2446,7 +2446,7 @@ mod tests {
use base_db::SourceDatabase; use base_db::SourceDatabase;
use test_fixture::WithFixture; use test_fixture::WithFixture;
use crate::{db::DefDatabase, test_db::TestDB}; use crate::test_db::TestDB;
use super::*; use super::*;

View file

@ -1,10 +1,7 @@
use expect_test::expect; use expect_test::expect;
use test_fixture::WithFixture;
use itertools::Itertools; use itertools::Itertools;
use crate::nameres::tests::check;
use super::*; use super::*;
#[test] #[test]

View file

@ -1,7 +1,7 @@
//! The implementation of `RustIrDatabase` for Chalk, which provides information //! The implementation of `RustIrDatabase` for Chalk, which provides information
//! about the code that Chalk needs. //! about the code that Chalk needs.
use core::ops; use core::ops;
use std::{iter, sync::Arc}; use std::{iter, ops::ControlFlow, sync::Arc};
use tracing::debug; use tracing::debug;
@ -10,9 +10,10 @@ use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait};
use base_db::CrateId; use base_db::CrateId;
use hir_def::{ use hir_def::{
data::adt::StructFlags,
hir::Movability, hir::Movability,
lang_item::{LangItem, LangItemTarget}, lang_item::{LangItem, LangItemTarget},
AssocItemId, BlockId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId, AssocItemId, BlockId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId, VariantId,
}; };
use hir_expand::name::name; use hir_expand::name::name;
@ -33,7 +34,7 @@ use crate::{
pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum<Interner>; pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum<Interner>;
pub(crate) type TraitDatum = chalk_solve::rust_ir::TraitDatum<Interner>; pub(crate) type TraitDatum = chalk_solve::rust_ir::TraitDatum<Interner>;
pub(crate) type StructDatum = chalk_solve::rust_ir::AdtDatum<Interner>; pub(crate) type AdtDatum = chalk_solve::rust_ir::AdtDatum<Interner>;
pub(crate) type ImplDatum = chalk_solve::rust_ir::ImplDatum<Interner>; pub(crate) type ImplDatum = chalk_solve::rust_ir::ImplDatum<Interner>;
pub(crate) type OpaqueTyDatum = chalk_solve::rust_ir::OpaqueTyDatum<Interner>; pub(crate) type OpaqueTyDatum = chalk_solve::rust_ir::OpaqueTyDatum<Interner>;
@ -53,8 +54,8 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
fn trait_datum(&self, trait_id: TraitId) -> Arc<TraitDatum> { fn trait_datum(&self, trait_id: TraitId) -> Arc<TraitDatum> {
self.db.trait_datum(self.krate, trait_id) self.db.trait_datum(self.krate, trait_id)
} }
fn adt_datum(&self, struct_id: AdtId) -> Arc<StructDatum> { fn adt_datum(&self, struct_id: AdtId) -> Arc<AdtDatum> {
self.db.struct_datum(self.krate, struct_id) self.db.adt_datum(self.krate, struct_id)
} }
fn adt_repr(&self, _struct_id: AdtId) -> Arc<rust_ir::AdtRepr<Interner>> { fn adt_repr(&self, _struct_id: AdtId) -> Arc<rust_ir::AdtRepr<Interner>> {
// FIXME: keep track of these // FIXME: keep track of these
@ -136,81 +137,92 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
_ => self_ty_fp.as_ref().map(std::slice::from_ref).unwrap_or(&[]), _ => self_ty_fp.as_ref().map(std::slice::from_ref).unwrap_or(&[]),
}; };
let trait_module = trait_.module(self.db.upcast());
let type_module = match self_ty_fp {
Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db.upcast())),
Some(TyFingerprint::ForeignType(type_id)) => {
Some(from_foreign_def_id(type_id).module(self.db.upcast()))
}
Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db.upcast())),
_ => None,
};
let mut def_blocks =
[trait_module.containing_block(), type_module.and_then(|it| it.containing_block())];
// Note: Since we're using impls_for_trait, only impls where the trait
// can be resolved should ever reach Chalk. impl_datum relies on that
// and will panic if the trait can't be resolved.
let in_deps = self.db.trait_impls_in_deps(self.krate);
let in_self = self.db.trait_impls_in_crate(self.krate);
let block_impls = iter::successors(self.block, |&block_id| {
cov_mark::hit!(block_local_impls);
self.db.block_def_map(block_id).parent().and_then(|module| module.containing_block())
})
.inspect(|&block_id| {
// make sure we don't search the same block twice
def_blocks.iter_mut().for_each(|block| {
if *block == Some(block_id) {
*block = None;
}
});
})
.filter_map(|block_id| self.db.trait_impls_in_block(block_id));
let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db); let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db);
let mut result = vec![]; let mut result = vec![];
match fps { if fps.is_empty() {
[] => {
debug!("Unrestricted search for {:?} impls...", trait_); debug!("Unrestricted search for {:?} impls...", trait_);
let mut f = |impls: &TraitImpls| { self.for_trait_impls(trait_, self_ty_fp, |impls| {
result.extend(impls.for_trait(trait_).map(id_to_chalk)); result.extend(impls.for_trait(trait_).map(id_to_chalk));
}; ControlFlow::Continue(())
f(&in_self); })
in_deps.iter().map(ops::Deref::deref).for_each(&mut f); } else {
block_impls.for_each(|it| f(&it)); self.for_trait_impls(trait_, self_ty_fp, |impls| {
def_blocks result.extend(
.into_iter() fps.iter().flat_map(move |fp| {
.flatten()
.filter_map(|it| self.db.trait_impls_in_block(it))
.for_each(|it| f(&it));
}
fps => {
let mut f =
|impls: &TraitImpls| {
result.extend(fps.iter().flat_map(|fp| {
impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk) impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk)
})); }),
);
ControlFlow::Continue(())
})
}; };
f(&in_self);
in_deps.iter().map(ops::Deref::deref).for_each(&mut f);
block_impls.for_each(|it| f(&it));
def_blocks
.into_iter()
.flatten()
.filter_map(|it| self.db.trait_impls_in_block(it))
.for_each(|it| f(&it));
}
}
debug!("impls_for_trait returned {} impls", result.len()); debug!("impls_for_trait returned {} impls", result.len());
result result
} }
fn impl_provided_for(&self, auto_trait_id: TraitId, kind: &chalk_ir::TyKind<Interner>) -> bool { fn impl_provided_for(&self, auto_trait_id: TraitId, kind: &chalk_ir::TyKind<Interner>) -> bool {
debug!("impl_provided_for {:?}, {:?}", auto_trait_id, kind); debug!("impl_provided_for {:?}, {:?}", auto_trait_id, kind);
false // FIXME
let trait_id = from_chalk_trait_id(auto_trait_id);
let self_ty = kind.clone().intern(Interner);
// We cannot filter impls by `TyFingerprint` for the following types:
let self_ty_fp = match kind {
// because we need to find any impl whose Self type is a ref with the same mutability
// (we don't care about the inner type).
TyKind::Ref(..) => None,
// because we need to find any impl whose Self type is a tuple with the same arity.
TyKind::Tuple(..) => None,
_ => TyFingerprint::for_trait_impl(&self_ty),
};
let check_kind = |impl_id| {
let impl_self_ty = self.db.impl_self_ty(impl_id);
// NOTE(skip_binders): it's safe to skip binders here as we don't check substitutions.
let impl_self_kind = impl_self_ty.skip_binders().kind(Interner);
match (kind, impl_self_kind) {
(TyKind::Adt(id_a, _), TyKind::Adt(id_b, _)) => id_a == id_b,
(TyKind::AssociatedType(id_a, _), TyKind::AssociatedType(id_b, _)) => id_a == id_b,
(TyKind::Scalar(scalar_a), TyKind::Scalar(scalar_b)) => scalar_a == scalar_b,
(TyKind::Error, TyKind::Error)
| (TyKind::Str, TyKind::Str)
| (TyKind::Slice(_), TyKind::Slice(_))
| (TyKind::Never, TyKind::Never)
| (TyKind::Array(_, _), TyKind::Array(_, _)) => true,
(TyKind::Tuple(arity_a, _), TyKind::Tuple(arity_b, _)) => arity_a == arity_b,
(TyKind::OpaqueType(id_a, _), TyKind::OpaqueType(id_b, _)) => id_a == id_b,
(TyKind::FnDef(id_a, _), TyKind::FnDef(id_b, _)) => id_a == id_b,
(TyKind::Ref(id_a, _, _), TyKind::Ref(id_b, _, _))
| (TyKind::Raw(id_a, _), TyKind::Raw(id_b, _)) => id_a == id_b,
(TyKind::Closure(id_a, _), TyKind::Closure(id_b, _)) => id_a == id_b,
(TyKind::Coroutine(id_a, _), TyKind::Coroutine(id_b, _))
| (TyKind::CoroutineWitness(id_a, _), TyKind::CoroutineWitness(id_b, _)) => {
id_a == id_b
} }
(TyKind::Foreign(id_a), TyKind::Foreign(id_b)) => id_a == id_b,
(_, _) => false,
}
};
if let Some(fp) = self_ty_fp {
self.for_trait_impls(trait_id, self_ty_fp, |impls| {
match impls.for_trait_and_self_ty(trait_id, fp).any(check_kind) {
true => ControlFlow::Break(()),
false => ControlFlow::Continue(()),
}
})
} else {
self.for_trait_impls(trait_id, self_ty_fp, |impls| {
match impls.for_trait(trait_id).any(check_kind) {
true => ControlFlow::Break(()),
false => ControlFlow::Continue(()),
}
})
}
.is_break()
}
fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc<AssociatedTyValue> { fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc<AssociatedTyValue> {
self.db.associated_ty_value(self.krate, id) self.db.associated_ty_value(self.krate, id)
} }
@ -489,6 +501,59 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
} }
} }
impl<'a> ChalkContext<'a> {
fn for_trait_impls(
&self,
trait_id: hir_def::TraitId,
self_ty_fp: Option<TyFingerprint>,
mut f: impl FnMut(&TraitImpls) -> ControlFlow<()>,
) -> ControlFlow<()> {
// Note: Since we're using `impls_for_trait` and `impl_provided_for`,
// only impls where the trait can be resolved should ever reach Chalk.
// `impl_datum` relies on that and will panic if the trait can't be resolved.
let in_deps = self.db.trait_impls_in_deps(self.krate);
let in_self = self.db.trait_impls_in_crate(self.krate);
let trait_module = trait_id.module(self.db.upcast());
let type_module = match self_ty_fp {
Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db.upcast())),
Some(TyFingerprint::ForeignType(type_id)) => {
Some(from_foreign_def_id(type_id).module(self.db.upcast()))
}
Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db.upcast())),
_ => None,
};
let mut def_blocks =
[trait_module.containing_block(), type_module.and_then(|it| it.containing_block())];
let block_impls = iter::successors(self.block, |&block_id| {
cov_mark::hit!(block_local_impls);
self.db.block_def_map(block_id).parent().and_then(|module| module.containing_block())
})
.inspect(|&block_id| {
// make sure we don't search the same block twice
def_blocks.iter_mut().for_each(|block| {
if *block == Some(block_id) {
*block = None;
}
});
})
.filter_map(|block_id| self.db.trait_impls_in_block(block_id));
f(&in_self)?;
for it in in_deps.iter().map(ops::Deref::deref) {
f(it)?;
}
for it in block_impls {
f(&it)?;
}
for it in def_blocks.into_iter().flatten().filter_map(|it| self.db.trait_impls_in_block(it))
{
f(&it)?;
}
ControlFlow::Continue(())
}
}
impl chalk_ir::UnificationDatabase<Interner> for &dyn HirDatabase { impl chalk_ir::UnificationDatabase<Interner> for &dyn HirDatabase {
fn fn_def_variance( fn fn_def_variance(
&self, &self,
@ -590,7 +655,7 @@ pub(crate) fn trait_datum_query(
coinductive: false, // only relevant for Chalk testing coinductive: false, // only relevant for Chalk testing
// FIXME: set these flags correctly // FIXME: set these flags correctly
marker: false, marker: false,
fundamental: false, fundamental: trait_data.fundamental,
}; };
let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars); let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect(); let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect();
@ -649,35 +714,75 @@ fn lang_item_from_well_known_trait(trait_: WellKnownTrait) -> LangItem {
} }
} }
pub(crate) fn struct_datum_query( pub(crate) fn adt_datum_query(
db: &dyn HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
struct_id: AdtId, chalk_ir::AdtId(adt_id): AdtId,
) -> Arc<StructDatum> { ) -> Arc<AdtDatum> {
debug!("struct_datum {:?}", struct_id); debug!("adt_datum {:?}", adt_id);
let chalk_ir::AdtId(adt_id) = struct_id;
let generic_params = generics(db.upcast(), adt_id.into()); let generic_params = generics(db.upcast(), adt_id.into());
let upstream = adt_id.module(db.upcast()).krate() != krate; let bound_vars_subst = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
let where_clauses = { let where_clauses = convert_where_clauses(db, adt_id.into(), &bound_vars_subst);
let generic_params = generics(db.upcast(), adt_id.into());
let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); let (fundamental, phantom_data) = match adt_id {
convert_where_clauses(db, adt_id.into(), &bound_vars) hir_def::AdtId::StructId(s) => {
let flags = db.struct_data(s).flags;
(
flags.contains(StructFlags::IS_FUNDAMENTAL),
flags.contains(StructFlags::IS_PHANTOM_DATA),
)
}
// FIXME set fundamental flags correctly
hir_def::AdtId::UnionId(_) => (false, false),
hir_def::AdtId::EnumId(_) => (false, false),
}; };
let flags = rust_ir::AdtFlags { let flags = rust_ir::AdtFlags {
upstream, upstream: adt_id.module(db.upcast()).krate() != krate,
// FIXME set fundamental and phantom_data flags correctly fundamental,
fundamental: false, phantom_data,
phantom_data: false,
}; };
// FIXME provide enum variants properly (for auto traits)
let variant = rust_ir::AdtVariantDatum { #[cfg(FALSE)]
fields: Vec::new(), // FIXME add fields (only relevant for auto traits), // this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it
let variant_id_to_fields = |id: VariantId| {
let variant_data = &id.variant_data(db.upcast());
let fields = if variant_data.fields().is_empty() {
vec![]
} else {
let field_types = db.field_types(id);
variant_data
.fields()
.iter()
.map(|(idx, _)| field_types[idx].clone().substitute(Interner, &bound_vars_subst))
.filter(|it| !it.contains_unknown())
.collect()
}; };
let struct_datum_bound = rust_ir::AdtDatumBound { variants: vec![variant], where_clauses }; rust_ir::AdtVariantDatum { fields }
let struct_datum = StructDatum { };
// FIXME set ADT kind let variant_id_to_fields = |_: VariantId| rust_ir::AdtVariantDatum { fields: vec![] };
kind: rust_ir::AdtKind::Struct,
id: struct_id, let (kind, variants) = match adt_id {
hir_def::AdtId::StructId(id) => {
(rust_ir::AdtKind::Struct, vec![variant_id_to_fields(id.into())])
}
hir_def::AdtId::EnumId(id) => {
let variants = db
.enum_data(id)
.variants
.iter()
.map(|&(variant_id, _)| variant_id_to_fields(variant_id.into()))
.collect();
(rust_ir::AdtKind::Enum, variants)
}
hir_def::AdtId::UnionId(id) => {
(rust_ir::AdtKind::Union, vec![variant_id_to_fields(id.into())])
}
};
let struct_datum_bound = rust_ir::AdtDatumBound { variants, where_clauses };
let struct_datum = AdtDatum {
kind,
id: chalk_ir::AdtId(adt_id),
binders: make_binders(db, &generic_params, struct_datum_bound), binders: make_binders(db, &generic_params, struct_datum_bound),
flags, flags,
}; };

View file

@ -90,7 +90,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::cycle(crate::lower::ty_recover)] #[salsa::cycle(crate::lower::ty_recover)]
fn ty(&self, def: TyDefId) -> Binders<Ty>; fn ty(&self, def: TyDefId) -> Binders<Ty>;
/// Returns the type of the value of the given constant, or `None` if the the `ValueTyDefId` is /// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is
/// a `StructId` or `EnumVariantId` with a record constructor. /// a `StructId` or `EnumVariantId` with a record constructor.
#[salsa::invoke(crate::lower::value_ty_query)] #[salsa::invoke(crate::lower::value_ty_query)]
fn value_ty(&self, def: ValueTyDefId) -> Option<Binders<Ty>>; fn value_ty(&self, def: ValueTyDefId) -> Option<Binders<Ty>>;
@ -220,12 +220,12 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
trait_id: chalk_db::TraitId, trait_id: chalk_db::TraitId,
) -> sync::Arc<chalk_db::TraitDatum>; ) -> sync::Arc<chalk_db::TraitDatum>;
#[salsa::invoke(chalk_db::struct_datum_query)] #[salsa::invoke(chalk_db::adt_datum_query)]
fn struct_datum( fn adt_datum(
&self, &self,
krate: CrateId, krate: CrateId,
struct_id: chalk_db::AdtId, struct_id: chalk_db::AdtId,
) -> sync::Arc<chalk_db::StructDatum>; ) -> sync::Arc<chalk_db::AdtDatum>;
#[salsa::invoke(chalk_db::impl_datum_query)] #[salsa::invoke(chalk_db::impl_datum_query)]
fn impl_datum( fn impl_datum(

View file

@ -12,6 +12,8 @@ use hir_expand::name;
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use rustc_pattern_analysis::usefulness::{compute_match_usefulness, ValidityConstraint}; use rustc_pattern_analysis::usefulness::{compute_match_usefulness, ValidityConstraint};
use syntax::{ast, AstNode};
use tracing::debug;
use triomphe::Arc; use triomphe::Arc;
use typed_arena::Arena; use typed_arena::Arena;
@ -44,6 +46,10 @@ pub enum BodyValidationDiagnostic {
match_expr: ExprId, match_expr: ExprId,
uncovered_patterns: String, uncovered_patterns: String,
}, },
NonExhaustiveLet {
pat: PatId,
uncovered_patterns: String,
},
RemoveTrailingReturn { RemoveTrailingReturn {
return_expr: ExprId, return_expr: ExprId,
}, },
@ -57,7 +63,8 @@ impl BodyValidationDiagnostic {
let _p = let _p =
tracing::span!(tracing::Level::INFO, "BodyValidationDiagnostic::collect").entered(); tracing::span!(tracing::Level::INFO, "BodyValidationDiagnostic::collect").entered();
let infer = db.infer(owner); let infer = db.infer(owner);
let mut validator = ExprValidator::new(owner, infer); let body = db.body(owner);
let mut validator = ExprValidator { owner, body, infer, diagnostics: Vec::new() };
validator.validate_body(db); validator.validate_body(db);
validator.diagnostics validator.diagnostics
} }
@ -65,18 +72,16 @@ impl BodyValidationDiagnostic {
struct ExprValidator { struct ExprValidator {
owner: DefWithBodyId, owner: DefWithBodyId,
body: Arc<Body>,
infer: Arc<InferenceResult>, infer: Arc<InferenceResult>,
pub(super) diagnostics: Vec<BodyValidationDiagnostic>, diagnostics: Vec<BodyValidationDiagnostic>,
} }
impl ExprValidator { impl ExprValidator {
fn new(owner: DefWithBodyId, infer: Arc<InferenceResult>) -> ExprValidator {
ExprValidator { owner, infer, diagnostics: Vec::new() }
}
fn validate_body(&mut self, db: &dyn HirDatabase) { fn validate_body(&mut self, db: &dyn HirDatabase) {
let body = db.body(self.owner);
let mut filter_map_next_checker = None; let mut filter_map_next_checker = None;
// we'll pass &mut self while iterating over body.exprs, so they need to be disjoint
let body = Arc::clone(&self.body);
if matches!(self.owner, DefWithBodyId::FunctionId(_)) { if matches!(self.owner, DefWithBodyId::FunctionId(_)) {
self.check_for_trailing_return(body.body_expr, &body); self.check_for_trailing_return(body.body_expr, &body);
@ -104,7 +109,10 @@ impl ExprValidator {
self.check_for_trailing_return(*body_expr, &body); self.check_for_trailing_return(*body_expr, &body);
} }
Expr::If { .. } => { Expr::If { .. } => {
self.check_for_unnecessary_else(id, expr, &body); self.check_for_unnecessary_else(id, expr, db);
}
Expr::Block { .. } => {
self.validate_block(db, expr);
} }
_ => {} _ => {}
} }
@ -162,8 +170,6 @@ impl ExprValidator {
arms: &[MatchArm], arms: &[MatchArm],
db: &dyn HirDatabase, db: &dyn HirDatabase,
) { ) {
let body = db.body(self.owner);
let scrut_ty = &self.infer[scrutinee_expr]; let scrut_ty = &self.infer[scrutinee_expr];
if scrut_ty.is_unknown() { if scrut_ty.is_unknown() {
return; return;
@ -191,12 +197,12 @@ impl ExprValidator {
.as_reference() .as_reference()
.map(|(match_expr_ty, ..)| match_expr_ty == pat_ty) .map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
.unwrap_or(false)) .unwrap_or(false))
&& types_of_subpatterns_do_match(arm.pat, &body, &self.infer) && types_of_subpatterns_do_match(arm.pat, &self.body, &self.infer)
{ {
// If we had a NotUsefulMatchArm diagnostic, we could // If we had a NotUsefulMatchArm diagnostic, we could
// check the usefulness of each pattern as we added it // check the usefulness of each pattern as we added it
// to the matrix here. // to the matrix here.
let pat = self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors); let pat = self.lower_pattern(&cx, arm.pat, db, &mut has_lowering_errors);
let m_arm = pat_analysis::MatchArm { let m_arm = pat_analysis::MatchArm {
pat: pattern_arena.alloc(pat), pat: pattern_arena.alloc(pat),
has_guard: arm.guard.is_some(), has_guard: arm.guard.is_some(),
@ -234,20 +240,63 @@ impl ExprValidator {
if !witnesses.is_empty() { if !witnesses.is_empty() {
self.diagnostics.push(BodyValidationDiagnostic::MissingMatchArms { self.diagnostics.push(BodyValidationDiagnostic::MissingMatchArms {
match_expr, match_expr,
uncovered_patterns: missing_match_arms(&cx, scrut_ty, witnesses, arms), uncovered_patterns: missing_match_arms(&cx, scrut_ty, witnesses, m_arms.is_empty()),
}); });
} }
} }
fn validate_block(&mut self, db: &dyn HirDatabase, expr: &Expr) {
let Expr::Block { statements, .. } = expr else { return };
let pattern_arena = Arena::new();
let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db);
for stmt in &**statements {
let &Statement::Let { pat, initializer, else_branch: None, .. } = stmt else {
continue;
};
let Some(initializer) = initializer else { continue };
let ty = &self.infer[initializer];
let mut have_errors = false;
let deconstructed_pat = self.lower_pattern(&cx, pat, db, &mut have_errors);
let match_arm = rustc_pattern_analysis::MatchArm {
pat: pattern_arena.alloc(deconstructed_pat),
has_guard: false,
arm_data: (),
};
if have_errors {
continue;
}
let report = match compute_match_usefulness(
&cx,
&[match_arm],
ty.clone(),
ValidityConstraint::ValidOnly,
) {
Ok(v) => v,
Err(e) => {
debug!(?e, "match usefulness error");
continue;
}
};
let witnesses = report.non_exhaustiveness_witnesses;
if !witnesses.is_empty() {
self.diagnostics.push(BodyValidationDiagnostic::NonExhaustiveLet {
pat,
uncovered_patterns: missing_match_arms(&cx, ty, witnesses, false),
});
}
}
}
fn lower_pattern<'p>( fn lower_pattern<'p>(
&self, &self,
cx: &MatchCheckCtx<'p>, cx: &MatchCheckCtx<'p>,
pat: PatId, pat: PatId,
db: &dyn HirDatabase, db: &dyn HirDatabase,
body: &Body,
have_errors: &mut bool, have_errors: &mut bool,
) -> DeconstructedPat<'p> { ) -> DeconstructedPat<'p> {
let mut patcx = match_check::PatCtxt::new(db, &self.infer, body); let mut patcx = match_check::PatCtxt::new(db, &self.infer, &self.body);
let pattern = patcx.lower_pattern(pat); let pattern = patcx.lower_pattern(pat);
let pattern = cx.lower_pat(&pattern); let pattern = cx.lower_pat(&pattern);
if !patcx.errors.is_empty() { if !patcx.errors.is_empty() {
@ -288,12 +337,12 @@ impl ExprValidator {
} }
} }
fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, body: &Body) { fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, db: &dyn HirDatabase) {
if let Expr::If { condition: _, then_branch, else_branch } = expr { if let Expr::If { condition: _, then_branch, else_branch } = expr {
if else_branch.is_none() { if else_branch.is_none() {
return; return;
} }
if let Expr::Block { statements, tail, .. } = &body.exprs[*then_branch] { if let Expr::Block { statements, tail, .. } = &self.body.exprs[*then_branch] {
let last_then_expr = tail.or_else(|| match statements.last()? { let last_then_expr = tail.or_else(|| match statements.last()? {
Statement::Expr { expr, .. } => Some(*expr), Statement::Expr { expr, .. } => Some(*expr),
_ => None, _ => None,
@ -301,6 +350,36 @@ impl ExprValidator {
if let Some(last_then_expr) = last_then_expr { if let Some(last_then_expr) = last_then_expr {
let last_then_expr_ty = &self.infer[last_then_expr]; let last_then_expr_ty = &self.infer[last_then_expr];
if last_then_expr_ty.is_never() { if last_then_expr_ty.is_never() {
// Only look at sources if the then branch diverges and we have an else branch.
let (_, source_map) = db.body_with_source_map(self.owner);
let Ok(source_ptr) = source_map.expr_syntax(id) else {
return;
};
let root = source_ptr.file_syntax(db.upcast());
let ast::Expr::IfExpr(if_expr) = source_ptr.value.to_node(&root) else {
return;
};
let mut top_if_expr = if_expr;
loop {
let parent = top_if_expr.syntax().parent();
let has_parent_expr_stmt_or_stmt_list =
parent.as_ref().map_or(false, |node| {
ast::ExprStmt::can_cast(node.kind())
| ast::StmtList::can_cast(node.kind())
});
if has_parent_expr_stmt_or_stmt_list {
// Only emit diagnostic if parent or direct ancestor is either
// an expr stmt or a stmt list.
break;
}
let Some(parent_if_expr) = parent.and_then(ast::IfExpr::cast) else {
// Bail if parent is neither an if expr, an expr stmt nor a stmt list.
return;
};
// Check parent if expr.
top_if_expr = parent_if_expr;
}
self.diagnostics self.diagnostics
.push(BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr: id }) .push(BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr: id })
} }
@ -448,7 +527,7 @@ fn missing_match_arms<'p>(
cx: &MatchCheckCtx<'p>, cx: &MatchCheckCtx<'p>,
scrut_ty: &Ty, scrut_ty: &Ty,
witnesses: Vec<WitnessPat<'p>>, witnesses: Vec<WitnessPat<'p>>,
arms: &[MatchArm], arms_is_empty: bool,
) -> String { ) -> String {
struct DisplayWitness<'a, 'p>(&'a WitnessPat<'p>, &'a MatchCheckCtx<'p>); struct DisplayWitness<'a, 'p>(&'a WitnessPat<'p>, &'a MatchCheckCtx<'p>);
impl fmt::Display for DisplayWitness<'_, '_> { impl fmt::Display for DisplayWitness<'_, '_> {
@ -463,7 +542,7 @@ fn missing_match_arms<'p>(
Some((AdtId::EnumId(e), _)) => !cx.db.enum_data(e).variants.is_empty(), Some((AdtId::EnumId(e), _)) => !cx.db.enum_data(e).variants.is_empty(),
_ => false, _ => false,
}; };
if arms.is_empty() && !non_empty_enum { if arms_is_empty && !non_empty_enum {
format!("type `{}` is non-empty", scrut_ty.display(cx.db)) format!("type `{}` is non-empty", scrut_ty.display(cx.db))
} else { } else {
let pat_display = |witness| DisplayWitness(witness, cx); let pat_display = |witness| DisplayWitness(witness, cx);

View file

@ -221,6 +221,9 @@ pub enum InferenceDiagnostic {
UnresolvedAssocItem { UnresolvedAssocItem {
id: ExprOrPatId, id: ExprOrPatId,
}, },
UnresolvedIdent {
expr: ExprId,
},
// FIXME: This should be emitted in body lowering // FIXME: This should be emitted in body lowering
BreakOutsideOfLoop { BreakOutsideOfLoop {
expr: ExprId, expr: ExprId,

View file

@ -13,7 +13,7 @@ use hir_def::{
ArithOp, Array, BinaryOp, ClosureKind, Expr, ExprId, LabelId, Literal, Statement, UnaryOp, ArithOp, Array, BinaryOp, ClosureKind, Expr, ExprId, LabelId, Literal, Statement, UnaryOp,
}, },
lang_item::{LangItem, LangItemTarget}, lang_item::{LangItem, LangItemTarget},
path::{GenericArg, GenericArgs}, path::{GenericArg, GenericArgs, Path},
BlockId, ConstParamId, FieldId, ItemContainerId, Lookup, TupleFieldId, TupleId, BlockId, ConstParamId, FieldId, ItemContainerId, Lookup, TupleFieldId, TupleId,
}; };
use hir_expand::name::{name, Name}; use hir_expand::name::{name, Name};
@ -439,7 +439,17 @@ impl InferenceContext<'_> {
} }
Expr::Path(p) => { Expr::Path(p) => {
let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr); let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr);
let ty = self.infer_path(p, tgt_expr.into()).unwrap_or_else(|| self.err_ty()); let ty = match self.infer_path(p, tgt_expr.into()) {
Some(ty) => ty,
None => {
if matches!(p, Path::Normal { mod_path, .. } if mod_path.is_ident()) {
self.push_diagnostic(InferenceDiagnostic::UnresolvedIdent {
expr: tgt_expr,
});
}
self.err_ty()
}
};
self.resolver.reset_to_guard(g); self.resolver.reset_to_guard(g);
ty ty
} }

View file

@ -1,12 +1,20 @@
//! Interpret intrinsics, lang items and `extern "C"` wellknown functions which their implementation //! Interpret intrinsics, lang items and `extern "C"` wellknown functions which their implementation
//! is not available. //! is not available.
//!
use std::cmp; use std::cmp;
use chalk_ir::TyKind; use chalk_ir::TyKind;
use hir_def::builtin_type::{BuiltinInt, BuiltinUint}; use hir_def::{
builtin_type::{BuiltinInt, BuiltinUint},
resolver::HasResolver,
};
use super::*; use crate::mir::eval::{
name, pad16, static_lifetime, Address, AdtId, Arc, BuiltinType, Evaluator, FunctionId,
HasModule, HirDisplay, Interned, InternedClosure, Interner, Interval, IntervalAndTy,
IntervalOrOwned, ItemContainerId, LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan,
ModPath, Mutability, Result, Substitution, Ty, TyBuilder, TyExt,
};
mod simd; mod simd;

View file

@ -2,6 +2,7 @@
use std::cmp::Ordering; use std::cmp::Ordering;
use crate::consteval::try_const_usize;
use crate::TyKind; use crate::TyKind;
use super::*; use super::*;

View file

@ -1,6 +1,6 @@
//! This module generates a polymorphic MIR from a hir body //! This module generates a polymorphic MIR from a hir body
use std::{fmt::Write, mem}; use std::{fmt::Write, iter, mem};
use base_db::{salsa::Cycle, FileId}; use base_db::{salsa::Cycle, FileId};
use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind}; use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind};
@ -14,27 +14,37 @@ use hir_def::{
lang_item::{LangItem, LangItemTarget}, lang_item::{LangItem, LangItemTarget},
path::Path, path::Path,
resolver::{resolver_for_expr, HasResolver, ResolveValueResult, ValueNs}, resolver::{resolver_for_expr, HasResolver, ResolveValueResult, ValueNs},
AdtId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId, AdtId, DefWithBodyId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId,
Lookup, TraitId, TupleId, TypeOrConstParamId, Lookup, TraitId, TupleId, TypeOrConstParamId,
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
use la_arena::ArenaMap;
use rustc_hash::FxHashMap;
use syntax::TextRange; use syntax::TextRange;
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
consteval::ConstEvalError, consteval::ConstEvalError,
db::InternedClosure, db::{HirDatabase, InternedClosure},
display::HirDisplay,
infer::{CaptureKind, CapturedItem, TypeMismatch}, infer::{CaptureKind, CapturedItem, TypeMismatch},
inhabitedness::is_ty_uninhabited_from, inhabitedness::is_ty_uninhabited_from,
layout::LayoutError, layout::LayoutError,
mapping::ToChalk,
mir::{
intern_const_scalar, return_slot, AggregateKind, Arena, BasicBlock, BasicBlockId, BinOp,
BorrowKind, CastKind, ClosureId, ConstScalar, Either, Expr, FieldId, Idx, InferenceResult,
Interner, Local, LocalId, MemoryMap, MirBody, MirSpan, Mutability, Operand, Place,
PlaceElem, PointerCast, ProjectionElem, ProjectionStore, RawIdx, Rvalue, Statement,
StatementKind, Substitution, SwitchTargets, Terminator, TerminatorKind, TupleFieldId, Ty,
UnOp, VariantId,
},
static_lifetime, static_lifetime,
traits::FnTrait, traits::FnTrait,
utils::{generics, ClosureSubst}, utils::{generics, ClosureSubst},
Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt, Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt,
}; };
use super::*;
mod as_place; mod as_place;
mod pattern_matching; mod pattern_matching;

View file

@ -2,9 +2,16 @@
use hir_def::AssocItemId; use hir_def::AssocItemId;
use crate::BindingMode; use crate::{
mir::lower::{
use super::*; BasicBlockId, BinOp, BindingId, BorrowKind, Either, Expr, FieldId, Idx, Interner,
MemoryMap, MirLowerCtx, MirLowerError, MirSpan, Mutability, Operand, Pat, PatId, Place,
PlaceElem, ProjectionElem, RecordFieldPat, ResolveValueResult, Result, Rvalue,
Substitution, SwitchTargets, TerminatorKind, TupleFieldId, TupleId, TyBuilder, TyKind,
ValueNs, VariantData, VariantId,
},
BindingMode,
};
macro_rules! not_supported { macro_rules! not_supported {
($x: expr) => { ($x: expr) => {

View file

@ -4553,3 +4553,58 @@ fn foo() {
"#, "#,
); );
} }
#[test]
fn auto_trait_bound() {
check_types(
r#"
//- minicore: sized
auto trait Send {}
impl<T> !Send for *const T {}
struct Yes;
trait IsSend { const IS_SEND: Yes; }
impl<T: Send> IsSend for T { const IS_SEND: Yes = Yes; }
struct Struct<T>(T);
enum Enum<T> { A, B(T) }
union Union<T> { t: T }
#[lang = "phantom_data"]
struct PhantomData<T: ?Sized>;
fn f<T: Send, U>() {
T::IS_SEND;
//^^^^^^^^^^Yes
U::IS_SEND;
//^^^^^^^^^^{unknown}
<*const T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^{unknown}
Struct::<T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^^Yes
Struct::<U>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^^Yes
Struct::<*const T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^^^^^^^^^Yes
Enum::<T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^Yes
Enum::<U>::IS_SEND;
//^^^^^^^^^^^^^^^^^^Yes
Enum::<*const T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^^^^^^^Yes
Union::<T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^Yes
Union::<U>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^Yes
Union::<*const T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^^^^^^^^Yes
PhantomData::<T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^^^^^^^Yes
PhantomData::<U>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^^^^^^^{unknown}
PhantomData::<*const T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown}
}
"#,
);
}

View file

@ -64,6 +64,7 @@ diagnostics![
MissingUnsafe, MissingUnsafe,
MovedOutOfRef, MovedOutOfRef,
NeedMut, NeedMut,
NonExhaustiveLet,
NoSuchField, NoSuchField,
PrivateAssocItem, PrivateAssocItem,
PrivateField, PrivateField,
@ -86,6 +87,7 @@ diagnostics![
UnresolvedMacroCall, UnresolvedMacroCall,
UnresolvedMethodCall, UnresolvedMethodCall,
UnresolvedModule, UnresolvedModule,
UnresolvedIdent,
UnresolvedProcMacro, UnresolvedProcMacro,
UnusedMut, UnusedMut,
UnusedVariable, UnusedVariable,
@ -241,6 +243,11 @@ pub struct UnresolvedAssocItem {
pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, Either<ast::Pat, ast::SelfParam>>>>, pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, Either<ast::Pat, ast::SelfParam>>>>,
} }
#[derive(Debug)]
pub struct UnresolvedIdent {
pub expr: InFile<AstPtr<ast::Expr>>,
}
#[derive(Debug)] #[derive(Debug)]
pub struct PrivateField { pub struct PrivateField {
pub expr: InFile<AstPtr<ast::Expr>>, pub expr: InFile<AstPtr<ast::Expr>>,
@ -280,6 +287,12 @@ pub struct MissingMatchArms {
pub uncovered_patterns: String, pub uncovered_patterns: String,
} }
#[derive(Debug)]
pub struct NonExhaustiveLet {
pub pat: InFile<AstPtr<ast::Pat>>,
pub uncovered_patterns: String,
}
#[derive(Debug)] #[derive(Debug)]
pub struct TypeMismatch { pub struct TypeMismatch {
pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>, pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
@ -456,6 +469,22 @@ impl AnyDiagnostic {
Err(SyntheticSyntax) => (), Err(SyntheticSyntax) => (),
} }
} }
BodyValidationDiagnostic::NonExhaustiveLet { pat, uncovered_patterns } => {
match source_map.pat_syntax(pat) {
Ok(source_ptr) => {
if let Some(ast_pat) = source_ptr.value.cast::<ast::Pat>() {
return Some(
NonExhaustiveLet {
pat: InFile::new(source_ptr.file_id, ast_pat),
uncovered_patterns,
}
.into(),
);
}
}
Err(SyntheticSyntax) => {}
}
}
BodyValidationDiagnostic::RemoveTrailingReturn { return_expr } => { BodyValidationDiagnostic::RemoveTrailingReturn { return_expr } => {
if let Ok(source_ptr) = source_map.expr_syntax(return_expr) { if let Ok(source_ptr) = source_map.expr_syntax(return_expr) {
// Filters out desugared return expressions (e.g. desugared try operators). // Filters out desugared return expressions (e.g. desugared try operators).
@ -565,6 +594,10 @@ impl AnyDiagnostic {
}; };
UnresolvedAssocItem { expr_or_pat }.into() UnresolvedAssocItem { expr_or_pat }.into()
} }
&InferenceDiagnostic::UnresolvedIdent { expr } => {
let expr = expr_syntax(expr);
UnresolvedIdent { expr }.into()
}
&InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => { &InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => {
let expr = expr_syntax(expr); let expr = expr_syntax(expr);
BreakOutsideOfLoop { expr, is_break, bad_value_break }.into() BreakOutsideOfLoop { expr, is_break, bad_value_break }.into()

View file

@ -2653,6 +2653,37 @@ impl ItemInNs {
} }
} }
/// Invariant: `inner.as_extern_assoc_item(db).is_some()`
/// We do not actively enforce this invariant.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum ExternAssocItem {
Function(Function),
Static(Static),
TypeAlias(TypeAlias),
}
pub trait AsExternAssocItem {
fn as_extern_assoc_item(self, db: &dyn HirDatabase) -> Option<ExternAssocItem>;
}
impl AsExternAssocItem for Function {
fn as_extern_assoc_item(self, db: &dyn HirDatabase) -> Option<ExternAssocItem> {
as_extern_assoc_item(db, ExternAssocItem::Function, self.id)
}
}
impl AsExternAssocItem for Static {
fn as_extern_assoc_item(self, db: &dyn HirDatabase) -> Option<ExternAssocItem> {
as_extern_assoc_item(db, ExternAssocItem::Static, self.id)
}
}
impl AsExternAssocItem for TypeAlias {
fn as_extern_assoc_item(self, db: &dyn HirDatabase) -> Option<ExternAssocItem> {
as_extern_assoc_item(db, ExternAssocItem::TypeAlias, self.id)
}
}
/// Invariant: `inner.as_assoc_item(db).is_some()` /// Invariant: `inner.as_assoc_item(db).is_some()`
/// We do not actively enforce this invariant. /// We do not actively enforce this invariant.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
@ -2727,6 +2758,63 @@ where
} }
} }
fn as_extern_assoc_item<'db, ID, DEF, LOC>(
db: &(dyn HirDatabase + 'db),
ctor: impl FnOnce(DEF) -> ExternAssocItem,
id: ID,
) -> Option<ExternAssocItem>
where
ID: Lookup<Database<'db> = dyn DefDatabase + 'db, Data = AssocItemLoc<LOC>>,
DEF: From<ID>,
LOC: ItemTreeNode,
{
match id.lookup(db.upcast()).container {
ItemContainerId::ExternBlockId(_) => Some(ctor(DEF::from(id))),
ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) | ItemContainerId::ModuleId(_) => {
None
}
}
}
impl ExternAssocItem {
pub fn name(self, db: &dyn HirDatabase) -> Name {
match self {
Self::Function(it) => it.name(db),
Self::Static(it) => it.name(db),
Self::TypeAlias(it) => it.name(db),
}
}
pub fn module(self, db: &dyn HirDatabase) -> Module {
match self {
Self::Function(f) => f.module(db),
Self::Static(c) => c.module(db),
Self::TypeAlias(t) => t.module(db),
}
}
pub fn as_function(self) -> Option<Function> {
match self {
Self::Function(v) => Some(v),
_ => None,
}
}
pub fn as_static(self) -> Option<Static> {
match self {
Self::Static(v) => Some(v),
_ => None,
}
}
pub fn as_type_alias(self) -> Option<TypeAlias> {
match self {
Self::TypeAlias(v) => Some(v),
_ => None,
}
}
}
impl AssocItem { impl AssocItem {
pub fn name(self, db: &dyn HirDatabase) -> Option<Name> { pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
match self { match self {
@ -3798,9 +3886,9 @@ impl Type {
// For non-phantom_data adts we check variants/fields as well as generic parameters // For non-phantom_data adts we check variants/fields as well as generic parameters
TyKind::Adt(adt_id, substitution) TyKind::Adt(adt_id, substitution)
if !db.struct_datum(krate, *adt_id).flags.phantom_data => if !db.adt_datum(krate, *adt_id).flags.phantom_data =>
{ {
let adt_datum = &db.struct_datum(krate, *adt_id); let adt_datum = &db.adt_datum(krate, *adt_id);
let adt_datum_bound = let adt_datum_bound =
adt_datum.binders.clone().substitute(Interner, substitution); adt_datum.binders.clone().substitute(Interner, substitution);
adt_datum_bound adt_datum_bound

View file

@ -281,14 +281,14 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
if ctx.config.enable_borrowcheck && struct_ty.contains_reference(db) { if ctx.config.enable_borrowcheck && struct_ty.contains_reference(db) {
return None; return None;
} }
let fileds = it.fields(db); let fields = it.fields(db);
// Check if all fields are visible, otherwise we cannot fill them // Check if all fields are visible, otherwise we cannot fill them
if fileds.iter().any(|it| !it.is_visible_from(db, module)) { if fields.iter().any(|it| !it.is_visible_from(db, module)) {
return None; return None;
} }
// Early exit if some param cannot be filled from lookup // Early exit if some param cannot be filled from lookup
let param_exprs: Vec<Vec<Expr>> = fileds let param_exprs: Vec<Vec<Expr>> = fields
.into_iter() .into_iter()
.map(|field| lookup.find(db, &field.ty(db))) .map(|field| lookup.find(db, &field.ty(db)))
.collect::<Option<_>>()?; .collect::<Option<_>>()?;

View file

@ -308,7 +308,7 @@ impl CompletionRelevance {
// When a fn is bumped due to return type: // When a fn is bumped due to return type:
// Bump Constructor or Builder methods with no arguments, // Bump Constructor or Builder methods with no arguments,
// over them tha with self arguments // over them than with self arguments
if fn_score > 0 { if fn_score > 0 {
if !asf.has_params { if !asf.has_params {
// bump associated functions // bump associated functions

View file

@ -17,7 +17,7 @@ impl RootDatabase {
pub fn request_cancellation(&mut self) { pub fn request_cancellation(&mut self) {
let _p = let _p =
tracing::span!(tracing::Level::INFO, "RootDatabase::request_cancellation").entered(); tracing::span!(tracing::Level::INFO, "RootDatabase::request_cancellation").entered();
self.salsa_runtime_mut().synthetic_write(Durability::LOW); self.synthetic_write(Durability::LOW);
} }
pub fn apply_change(&mut self, change: Change) { pub fn apply_change(&mut self, change: Change) {
@ -124,7 +124,7 @@ impl RootDatabase {
hir::db::InternCoroutineQuery hir::db::InternCoroutineQuery
hir::db::AssociatedTyDataQuery hir::db::AssociatedTyDataQuery
hir::db::TraitDatumQuery hir::db::TraitDatumQuery
hir::db::StructDatumQuery hir::db::AdtDatumQuery
hir::db::ImplDatumQuery hir::db::ImplDatumQuery
hir::db::FnDefDatumQuery hir::db::FnDefDatumQuery
hir::db::FnDefVarianceQuery hir::db::FnDefVarianceQuery

View file

@ -8,11 +8,11 @@
use arrayvec::ArrayVec; use arrayvec::ArrayVec;
use either::Either; use either::Either;
use hir::{ use hir::{
Adt, AsAssocItem, AssocItem, AttributeTemplate, BuiltinAttr, BuiltinType, Const, Crate, Adt, AsAssocItem, AsExternAssocItem, AssocItem, AttributeTemplate, BuiltinAttr, BuiltinType,
DefWithBody, DeriveHelper, DocLinkDef, ExternCrateDecl, Field, Function, GenericParam, Const, Crate, DefWithBody, DeriveHelper, DocLinkDef, ExternAssocItem, ExternCrateDecl, Field,
HasVisibility, HirDisplay, Impl, Label, Local, Macro, Module, ModuleDef, Name, PathResolution, Function, GenericParam, HasVisibility, HirDisplay, Impl, Label, Local, Macro, Module,
Semantics, Static, ToolModule, Trait, TraitAlias, TupleField, TypeAlias, Variant, VariantDef, ModuleDef, Name, PathResolution, Semantics, Static, ToolModule, Trait, TraitAlias, TupleField,
Visibility, TypeAlias, Variant, VariantDef, Visibility,
}; };
use stdx::{format_to, impl_from}; use stdx::{format_to, impl_from};
use syntax::{ use syntax::{
@ -213,8 +213,8 @@ impl Definition {
}) })
} }
pub fn label(&self, db: &RootDatabase) -> Option<String> { pub fn label(&self, db: &RootDatabase) -> String {
let label = match *self { match *self {
Definition::Macro(it) => it.display(db).to_string(), Definition::Macro(it) => it.display(db).to_string(),
Definition::Field(it) => it.display(db).to_string(), Definition::Field(it) => it.display(db).to_string(),
Definition::TupleField(it) => it.display(db).to_string(), Definition::TupleField(it) => it.display(db).to_string(),
@ -241,7 +241,11 @@ impl Definition {
} }
} }
Definition::SelfType(impl_def) => { Definition::SelfType(impl_def) => {
impl_def.self_ty(db).as_adt().and_then(|adt| Definition::Adt(adt).label(db))? let self_ty = &impl_def.self_ty(db);
match self_ty.as_adt() {
Some(it) => it.display(db).to_string(),
None => self_ty.display(db).to_string(),
}
} }
Definition::GenericParam(it) => it.display(db).to_string(), Definition::GenericParam(it) => it.display(db).to_string(),
Definition::Label(it) => it.name(db).display(db).to_string(), Definition::Label(it) => it.name(db).display(db).to_string(),
@ -249,8 +253,7 @@ impl Definition {
Definition::BuiltinAttr(it) => format!("#[{}]", it.name(db)), Definition::BuiltinAttr(it) => format!("#[{}]", it.name(db)),
Definition::ToolModule(it) => it.name(db).to_string(), Definition::ToolModule(it) => it.name(db).to_string(),
Definition::DeriveHelper(it) => format!("derive_helper {}", it.name(db).display(db)), Definition::DeriveHelper(it) => format!("derive_helper {}", it.name(db).display(db)),
}; }
Some(label)
} }
} }
@ -739,6 +742,17 @@ impl AsAssocItem for Definition {
} }
} }
impl AsExternAssocItem for Definition {
fn as_extern_assoc_item(self, db: &dyn hir::db::HirDatabase) -> Option<ExternAssocItem> {
match self {
Definition::Function(it) => it.as_extern_assoc_item(db),
Definition::Static(it) => it.as_extern_assoc_item(db),
Definition::TypeAlias(it) => it.as_extern_assoc_item(db),
_ => None,
}
}
}
impl From<AssocItem> for Definition { impl From<AssocItem> for Definition {
fn from(assoc_item: AssocItem) -> Self { fn from(assoc_item: AssocItem) -> Self {
match assoc_item { match assoc_item {

View file

@ -1,4 +1,3 @@
use hir::PrefixKind;
use stdx::trim_indent; use stdx::trim_indent;
use test_fixture::WithFixture; use test_fixture::WithFixture;
use test_utils::{assert_eq_text, CURSOR_MARKER}; use test_utils::{assert_eq_text, CURSOR_MARKER};

View file

@ -280,7 +280,7 @@ impl RootDatabase {
// hir_db::InternCoroutineQuery // hir_db::InternCoroutineQuery
hir_db::AssociatedTyDataQuery hir_db::AssociatedTyDataQuery
hir_db::TraitDatumQuery hir_db::TraitDatumQuery
hir_db::StructDatumQuery hir_db::AdtDatumQuery
hir_db::ImplDatumQuery hir_db::ImplDatumQuery
hir_db::FnDefDatumQuery hir_db::FnDefDatumQuery
hir_db::FnDefVarianceQuery hir_db::FnDefVarianceQuery

View file

@ -394,7 +394,6 @@ impl Query {
mod tests { mod tests {
use expect_test::expect_file; use expect_test::expect_file;
use hir::symbols::SymbolCollector;
use test_fixture::WithFixture; use test_fixture::WithFixture;
use super::*; use super::*;

View file

@ -60,6 +60,7 @@ fn f() {
#[cfg(a)] let x = 0; // let statement #[cfg(a)] let x = 0; // let statement
//^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled //^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
fn abc() {}
abc(#[cfg(a)] 0); abc(#[cfg(a)] 0);
//^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled //^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
let x = Struct { let x = Struct {

View file

@ -512,7 +512,7 @@ impl BAD_TRAIT for () {
fn BadFunction() {} fn BadFunction() {}
} }
"#, "#,
std::iter::once("unused_variables".to_owned()), &["unused_variables"],
); );
} }

View file

@ -634,7 +634,8 @@ struct TestStruct { one: i32, two: i64 }
fn test_fn() { fn test_fn() {
let one = 1; let one = 1;
let s = TestStruct{ ..a }; let a = TestStruct{ one, two: 2 };
let _ = TestStruct{ ..a };
} }
"#, "#,
); );

View file

@ -18,7 +18,9 @@ pub(crate) fn missing_match_arms(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::{ use crate::{
tests::{check_diagnostics, check_diagnostics_with_config}, tests::{
check_diagnostics, check_diagnostics_with_config, check_diagnostics_with_disabled,
},
DiagnosticsConfig, DiagnosticsConfig,
}; };
@ -282,7 +284,7 @@ fn main() {
cov_mark::check_count!(validate_match_bailed_out, 4); cov_mark::check_count!(validate_match_bailed_out, 4);
// Match statements with arms that don't match the // Match statements with arms that don't match the
// expression pattern do not fire this diagnostic. // expression pattern do not fire this diagnostic.
check_diagnostics( check_diagnostics_with_disabled(
r#" r#"
enum Either { A, B } enum Either { A, B }
enum Either2 { C, D } enum Either2 { C, D }
@ -307,6 +309,7 @@ fn main() {
match Unresolved::Bar { Unresolved::Baz => () } match Unresolved::Bar { Unresolved::Baz => () }
} }
"#, "#,
&["E0425"],
); );
} }
@ -397,11 +400,11 @@ fn main() {
match loop {} { match loop {} {
Either::A => (), Either::A => (),
} }
match loop { break Foo::A } { match loop { break Either::A } {
//^^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `B` not covered //^^^^^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `B` not covered
Either::A => (), Either::A => (),
} }
match loop { break Foo::A } { match loop { break Either::A } {
Either::A => (), Either::A => (),
Either::B => (), Either::B => (),
} }
@ -977,7 +980,7 @@ fn f(ty: Enum) {
#[test] #[test]
fn unexpected_ty_fndef() { fn unexpected_ty_fndef() {
cov_mark::check!(validate_match_bailed_out); cov_mark::check!(validate_match_bailed_out);
check_diagnostics( check_diagnostics_with_disabled(
r" r"
enum Exp { enum Exp {
Tuple(()), Tuple(()),
@ -987,6 +990,7 @@ fn f() {
Exp::Tuple => {} Exp::Tuple => {}
} }
}", }",
&["E0425"],
); );
} }

View file

@ -448,7 +448,7 @@ fn main(b: bool) {
&mut x; &mut x;
} }
"#, "#,
std::iter::once("remove-unnecessary-else".to_owned()), &["remove-unnecessary-else"],
); );
check_diagnostics_with_disabled( check_diagnostics_with_disabled(
r#" r#"
@ -463,7 +463,7 @@ fn main(b: bool) {
&mut x; &mut x;
} }
"#, "#,
std::iter::once("remove-unnecessary-else".to_owned()), &["remove-unnecessary-else"],
); );
} }
@ -817,7 +817,7 @@ fn f() {
//- minicore: option //- minicore: option
fn f(_: i32) {} fn f(_: i32) {}
fn main() { fn main() {
let ((Some(mut x), None) | (_, Some(mut x))) = (None, Some(7)); let ((Some(mut x), None) | (_, Some(mut x))) = (None, Some(7)) else { return };
//^^^^^ 💡 warn: variable does not need to be mutable //^^^^^ 💡 warn: variable does not need to be mutable
f(x); f(x);
} }

View file

@ -0,0 +1,47 @@
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: non-exhaustive-let
//
// This diagnostic is triggered if a `let` statement without an `else` branch has a non-exhaustive
// pattern.
pub(crate) fn non_exhaustive_let(
ctx: &DiagnosticsContext<'_>,
d: &hir::NonExhaustiveLet,
) -> Diagnostic {
Diagnostic::new_with_syntax_node_ptr(
ctx,
DiagnosticCode::RustcHardError("E0005"),
format!("non-exhaustive pattern: {}", d.uncovered_patterns),
d.pat.map(Into::into),
)
}
#[cfg(test)]
mod tests {
use crate::tests::check_diagnostics;
#[test]
fn option_nonexhaustive() {
check_diagnostics(
r#"
//- minicore: option
fn main() {
let None = Some(5);
//^^^^ error: non-exhaustive pattern: `Some(_)` not covered
}
"#,
);
}
#[test]
fn option_exhaustive() {
check_diagnostics(
r#"
//- minicore: option
fn main() {
let Some(_) | None = Some(5);
}
"#,
);
}
}

View file

@ -140,7 +140,7 @@ fn foo(x: usize) -> u8 {
} //^^^^^^^^^ 💡 weak: replace return <expr>; with <expr> } //^^^^^^^^^ 💡 weak: replace return <expr>; with <expr>
} }
"#, "#,
std::iter::once("remove-unnecessary-else".to_owned()), &["remove-unnecessary-else"],
); );
} }

View file

@ -2,7 +2,10 @@ use hir::{db::ExpandDatabase, diagnostics::RemoveUnnecessaryElse, HirFileIdExt};
use ide_db::{assists::Assist, source_change::SourceChange}; use ide_db::{assists::Assist, source_change::SourceChange};
use itertools::Itertools; use itertools::Itertools;
use syntax::{ use syntax::{
ast::{self, edit::IndentLevel}, ast::{
self,
edit::{AstNodeEdit, IndentLevel},
},
AstNode, SyntaxToken, TextRange, AstNode, SyntaxToken, TextRange,
}; };
use text_edit::TextEdit; use text_edit::TextEdit;
@ -41,10 +44,15 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse) -> Option<Vec<
indent = indent + 1; indent = indent + 1;
} }
let else_replacement = match if_expr.else_branch()? { let else_replacement = match if_expr.else_branch()? {
ast::ElseBranch::Block(ref block) => { ast::ElseBranch::Block(block) => block
block.statements().map(|stmt| format!("\n{indent}{stmt}")).join("") .statements()
.map(|stmt| format!("\n{indent}{stmt}"))
.chain(block.tail_expr().map(|tail| format!("\n{indent}{tail}")))
.join(""),
ast::ElseBranch::IfExpr(mut nested_if_expr) => {
if has_parent_if_expr {
nested_if_expr = nested_if_expr.indent(IndentLevel(1))
} }
ast::ElseBranch::IfExpr(ref nested_if_expr) => {
format!("\n{indent}{nested_if_expr}") format!("\n{indent}{nested_if_expr}")
} }
}; };
@ -87,15 +95,11 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse) -> Option<Vec<
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::tests::{check_diagnostics, check_diagnostics_with_disabled, check_fix}; use crate::tests::{check_diagnostics_with_disabled, check_fix};
fn check_diagnostics_with_needless_return_disabled(ra_fixture: &str) {
check_diagnostics_with_disabled(ra_fixture, std::iter::once("needless_return".to_owned()));
}
#[test] #[test]
fn remove_unnecessary_else_for_return() { fn remove_unnecessary_else_for_return() {
check_diagnostics_with_needless_return_disabled( check_diagnostics_with_disabled(
r#" r#"
fn test() { fn test() {
if foo { if foo {
@ -106,6 +110,7 @@ fn test() {
} }
} }
"#, "#,
&["needless_return", "E0425"],
); );
check_fix( check_fix(
r#" r#"
@ -130,7 +135,7 @@ fn test() {
#[test] #[test]
fn remove_unnecessary_else_for_return2() { fn remove_unnecessary_else_for_return2() {
check_diagnostics_with_needless_return_disabled( check_diagnostics_with_disabled(
r#" r#"
fn test() { fn test() {
if foo { if foo {
@ -143,6 +148,7 @@ fn test() {
} }
} }
"#, "#,
&["needless_return", "E0425"],
); );
check_fix( check_fix(
r#" r#"
@ -171,9 +177,45 @@ fn test() {
); );
} }
#[test]
fn remove_unnecessary_else_for_return3() {
check_diagnostics_with_disabled(
r#"
fn test(a: bool) -> i32 {
if a {
return 1;
} else {
//^^^^ 💡 weak: remove unnecessary else block
0
}
}
"#,
&["needless_return", "E0425"],
);
check_fix(
r#"
fn test(a: bool) -> i32 {
if a {
return 1;
} else$0 {
0
}
}
"#,
r#"
fn test(a: bool) -> i32 {
if a {
return 1;
}
0
}
"#,
);
}
#[test] #[test]
fn remove_unnecessary_else_for_return_in_child_if_expr() { fn remove_unnecessary_else_for_return_in_child_if_expr() {
check_diagnostics_with_needless_return_disabled( check_diagnostics_with_disabled(
r#" r#"
fn test() { fn test() {
if foo { if foo {
@ -186,6 +228,7 @@ fn test() {
} }
} }
"#, "#,
&["needless_return", "E0425"],
); );
check_fix( check_fix(
r#" r#"
@ -214,9 +257,44 @@ fn test() {
); );
} }
#[test]
fn remove_unnecessary_else_for_return_in_child_if_expr2() {
check_fix(
r#"
fn test() {
if foo {
do_something();
} else if qux {
return bar;
} else$0 if quux {
do_something_else();
} else {
do_something_else2();
}
}
"#,
r#"
fn test() {
if foo {
do_something();
} else {
if qux {
return bar;
}
if quux {
do_something_else();
} else {
do_something_else2();
}
}
}
"#,
);
}
#[test] #[test]
fn remove_unnecessary_else_for_break() { fn remove_unnecessary_else_for_break() {
check_diagnostics( check_diagnostics_with_disabled(
r#" r#"
fn test() { fn test() {
loop { loop {
@ -229,6 +307,7 @@ fn test() {
} }
} }
"#, "#,
&["E0425"],
); );
check_fix( check_fix(
r#" r#"
@ -257,7 +336,7 @@ fn test() {
#[test] #[test]
fn remove_unnecessary_else_for_continue() { fn remove_unnecessary_else_for_continue() {
check_diagnostics( check_diagnostics_with_disabled(
r#" r#"
fn test() { fn test() {
loop { loop {
@ -270,6 +349,7 @@ fn test() {
} }
} }
"#, "#,
&["E0425"],
); );
check_fix( check_fix(
r#" r#"
@ -298,7 +378,7 @@ fn test() {
#[test] #[test]
fn remove_unnecessary_else_for_never() { fn remove_unnecessary_else_for_never() {
check_diagnostics( check_diagnostics_with_disabled(
r#" r#"
fn test() { fn test() {
if foo { if foo {
@ -313,6 +393,7 @@ fn never() -> ! {
loop {} loop {}
} }
"#, "#,
&["E0425"],
); );
check_fix( check_fix(
r#" r#"
@ -345,7 +426,7 @@ fn never() -> ! {
#[test] #[test]
fn no_diagnostic_if_no_else_branch() { fn no_diagnostic_if_no_else_branch() {
check_diagnostics( check_diagnostics_with_disabled(
r#" r#"
fn test() { fn test() {
if foo { if foo {
@ -355,12 +436,13 @@ fn test() {
do_something_else(); do_something_else();
} }
"#, "#,
&["E0425"],
); );
} }
#[test] #[test]
fn no_diagnostic_if_no_divergence() { fn no_diagnostic_if_no_divergence() {
check_diagnostics( check_diagnostics_with_disabled(
r#" r#"
fn test() { fn test() {
if foo { if foo {
@ -370,12 +452,13 @@ fn test() {
} }
} }
"#, "#,
&["E0425"],
); );
} }
#[test] #[test]
fn no_diagnostic_if_no_divergence_in_else_branch() { fn no_diagnostic_if_no_divergence_in_else_branch() {
check_diagnostics_with_needless_return_disabled( check_diagnostics_with_disabled(
r#" r#"
fn test() { fn test() {
if foo { if foo {
@ -385,6 +468,43 @@ fn test() {
} }
} }
"#, "#,
&["needless_return", "E0425"],
);
}
#[test]
fn no_diagnostic_if_not_expr_stmt() {
check_diagnostics_with_disabled(
r#"
fn test1() {
let _x = if a {
return;
} else {
1
};
}
fn test2() {
let _x = if a {
return;
} else if b {
return;
} else if c {
1
} else {
return;
};
}
"#,
&["needless_return", "E0425"],
);
check_diagnostics_with_disabled(
r#"
fn test3() -> u8 {
foo(if a { return 1 } else { 0 })
}
"#,
&["E0425"],
); );
} }
} }

View file

@ -730,7 +730,7 @@ fn f() -> i32 {
} }
fn g() { return; } fn g() { return; }
"#, "#,
std::iter::once("needless_return".to_owned()), &["needless_return"],
); );
} }

View file

@ -38,10 +38,12 @@ fn foo() {
fn while_let_loop_with_label_in_condition() { fn while_let_loop_with_label_in_condition() {
check_diagnostics( check_diagnostics(
r#" r#"
//- minicore: option
fn foo() { fn foo() {
let mut optional = Some(0); let mut optional = Some(0);
'my_label: while let Some(a) = match optional { 'my_label: while let Some(_) = match optional {
None => break 'my_label, None => break 'my_label,
Some(val) => Some(val), Some(val) => Some(val),
} { } {
@ -59,8 +61,8 @@ fn foo() {
r#" r#"
//- minicore: iterator //- minicore: iterator
fn foo() { fn foo() {
'xxx: for _ in unknown { 'xxx: for _ in [] {
'yyy: for _ in unknown { 'yyy: for _ in [] {
break 'xxx; break 'xxx;
continue 'yyy; continue 'yyy;
break 'zzz; break 'zzz;

View file

@ -78,7 +78,9 @@ fn method_fix(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::{ use crate::{
tests::{check_diagnostics, check_diagnostics_with_config}, tests::{
check_diagnostics, check_diagnostics_with_config, check_diagnostics_with_disabled,
},
DiagnosticsConfig, DiagnosticsConfig,
}; };
@ -148,7 +150,7 @@ fn foo() {
#[test] #[test]
fn no_diagnostic_on_unknown() { fn no_diagnostic_on_unknown() {
check_diagnostics( check_diagnostics_with_disabled(
r#" r#"
fn foo() { fn foo() {
x.foo; x.foo;
@ -156,6 +158,7 @@ fn foo() {
(&((x,),),).foo; (&((x,),),).foo;
} }
"#, "#,
&["E0425"],
); );
} }

View file

@ -0,0 +1,46 @@
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-ident
//
// This diagnostic is triggered if an expr-position ident is invalid.
pub(crate) fn unresolved_ident(
ctx: &DiagnosticsContext<'_>,
d: &hir::UnresolvedIdent,
) -> Diagnostic {
Diagnostic::new_with_syntax_node_ptr(
ctx,
DiagnosticCode::RustcHardError("E0425"),
"no such value in this scope",
d.expr.map(Into::into),
)
.experimental()
}
#[cfg(test)]
mod tests {
use crate::tests::check_diagnostics;
#[test]
fn missing() {
check_diagnostics(
r#"
fn main() {
let _ = x;
//^ error: no such value in this scope
}
"#,
);
}
#[test]
fn present() {
check_diagnostics(
r#"
fn main() {
let x = 5;
let _ = x;
}
"#,
);
}
}

View file

@ -335,7 +335,7 @@ fn main() {
r#" r#"
struct Foo { bar: i32 } struct Foo { bar: i32 }
fn foo() { fn foo() {
Foo { bar: i32 }.bar(); Foo { bar: 0 }.bar();
// ^^^ error: no method `bar` on type `Foo`, but a field with a similar name exists // ^^^ error: no method `bar` on type `Foo`, but a field with a similar name exists
} }
"#, "#,

View file

@ -4,7 +4,7 @@ use ide_db::{
source_change::SourceChange, source_change::SourceChange,
}; };
use itertools::Itertools; use itertools::Itertools;
use syntax::{ast, AstNode, SyntaxNode}; use syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr};
use text_edit::TextEdit; use text_edit::TextEdit;
use crate::{fix, Diagnostic, DiagnosticCode}; use crate::{fix, Diagnostic, DiagnosticCode};
@ -43,7 +43,7 @@ pub(crate) fn useless_braces(
"Unnecessary braces in use statement".to_owned(), "Unnecessary braces in use statement".to_owned(),
FileRange { file_id, range: use_range }, FileRange { file_id, range: use_range },
) )
.with_main_node(InFile::new(file_id.into(), node.clone())) .with_main_node(InFile::new(file_id.into(), SyntaxNodePtr::new(node)))
.with_fixes(Some(vec![fix( .with_fixes(Some(vec![fix(
"remove_braces", "remove_braces",
"Remove unnecessary braces", "Remove unnecessary braces",

View file

@ -41,6 +41,7 @@ mod handlers {
pub(crate) mod moved_out_of_ref; pub(crate) mod moved_out_of_ref;
pub(crate) mod mutability_errors; pub(crate) mod mutability_errors;
pub(crate) mod no_such_field; pub(crate) mod no_such_field;
pub(crate) mod non_exhaustive_let;
pub(crate) mod private_assoc_item; pub(crate) mod private_assoc_item;
pub(crate) mod private_field; pub(crate) mod private_field;
pub(crate) mod remove_trailing_return; pub(crate) mod remove_trailing_return;
@ -58,6 +59,7 @@ mod handlers {
pub(crate) mod unresolved_assoc_item; pub(crate) mod unresolved_assoc_item;
pub(crate) mod unresolved_extern_crate; pub(crate) mod unresolved_extern_crate;
pub(crate) mod unresolved_field; pub(crate) mod unresolved_field;
pub(crate) mod unresolved_ident;
pub(crate) mod unresolved_import; pub(crate) mod unresolved_import;
pub(crate) mod unresolved_macro_call; pub(crate) mod unresolved_macro_call;
pub(crate) mod unresolved_method; pub(crate) mod unresolved_method;
@ -140,7 +142,7 @@ pub struct Diagnostic {
pub experimental: bool, pub experimental: bool,
pub fixes: Option<Vec<Assist>>, pub fixes: Option<Vec<Assist>>,
// The node that will be affected by `#[allow]` and similar attributes. // The node that will be affected by `#[allow]` and similar attributes.
pub main_node: Option<InFile<SyntaxNode>>, pub main_node: Option<InFile<SyntaxNodePtr>>,
} }
impl Diagnostic { impl Diagnostic {
@ -172,9 +174,8 @@ impl Diagnostic {
message: impl Into<String>, message: impl Into<String>,
node: InFile<SyntaxNodePtr>, node: InFile<SyntaxNodePtr>,
) -> Diagnostic { ) -> Diagnostic {
let file_id = node.file_id;
Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node)) Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node))
.with_main_node(node.map(|x| x.to_node(&ctx.sema.parse_or_expand(file_id)))) .with_main_node(node)
} }
fn experimental(mut self) -> Diagnostic { fn experimental(mut self) -> Diagnostic {
@ -182,7 +183,7 @@ impl Diagnostic {
self self
} }
fn with_main_node(mut self, main_node: InFile<SyntaxNode>) -> Diagnostic { fn with_main_node(mut self, main_node: InFile<SyntaxNodePtr>) -> Diagnostic {
self.main_node = Some(main_node); self.main_node = Some(main_node);
self self
} }
@ -359,6 +360,7 @@ pub fn diagnostics(
AnyDiagnostic::MissingUnsafe(d) => handlers::missing_unsafe::missing_unsafe(&ctx, &d), AnyDiagnostic::MissingUnsafe(d) => handlers::missing_unsafe::missing_unsafe(&ctx, &d),
AnyDiagnostic::MovedOutOfRef(d) => handlers::moved_out_of_ref::moved_out_of_ref(&ctx, &d), AnyDiagnostic::MovedOutOfRef(d) => handlers::moved_out_of_ref::moved_out_of_ref(&ctx, &d),
AnyDiagnostic::NeedMut(d) => handlers::mutability_errors::need_mut(&ctx, &d), AnyDiagnostic::NeedMut(d) => handlers::mutability_errors::need_mut(&ctx, &d),
AnyDiagnostic::NonExhaustiveLet(d) => handlers::non_exhaustive_let::non_exhaustive_let(&ctx, &d),
AnyDiagnostic::NoSuchField(d) => handlers::no_such_field::no_such_field(&ctx, &d), AnyDiagnostic::NoSuchField(d) => handlers::no_such_field::no_such_field(&ctx, &d),
AnyDiagnostic::PrivateAssocItem(d) => handlers::private_assoc_item::private_assoc_item(&ctx, &d), AnyDiagnostic::PrivateAssocItem(d) => handlers::private_assoc_item::private_assoc_item(&ctx, &d),
AnyDiagnostic::PrivateField(d) => handlers::private_field::private_field(&ctx, &d), AnyDiagnostic::PrivateField(d) => handlers::private_field::private_field(&ctx, &d),
@ -375,6 +377,7 @@ pub fn diagnostics(
AnyDiagnostic::UnresolvedAssocItem(d) => handlers::unresolved_assoc_item::unresolved_assoc_item(&ctx, &d), AnyDiagnostic::UnresolvedAssocItem(d) => handlers::unresolved_assoc_item::unresolved_assoc_item(&ctx, &d),
AnyDiagnostic::UnresolvedExternCrate(d) => handlers::unresolved_extern_crate::unresolved_extern_crate(&ctx, &d), AnyDiagnostic::UnresolvedExternCrate(d) => handlers::unresolved_extern_crate::unresolved_extern_crate(&ctx, &d),
AnyDiagnostic::UnresolvedField(d) => handlers::unresolved_field::unresolved_field(&ctx, &d), AnyDiagnostic::UnresolvedField(d) => handlers::unresolved_field::unresolved_field(&ctx, &d),
AnyDiagnostic::UnresolvedIdent(d) => handlers::unresolved_ident::unresolved_ident(&ctx, &d),
AnyDiagnostic::UnresolvedImport(d) => handlers::unresolved_import::unresolved_import(&ctx, &d), AnyDiagnostic::UnresolvedImport(d) => handlers::unresolved_import::unresolved_import(&ctx, &d),
AnyDiagnostic::UnresolvedMacroCall(d) => handlers::unresolved_macro_call::unresolved_macro_call(&ctx, &d), AnyDiagnostic::UnresolvedMacroCall(d) => handlers::unresolved_macro_call::unresolved_macro_call(&ctx, &d),
AnyDiagnostic::UnresolvedMethodCall(d) => handlers::unresolved_method::unresolved_method(&ctx, &d), AnyDiagnostic::UnresolvedMethodCall(d) => handlers::unresolved_method::unresolved_method(&ctx, &d),
@ -390,8 +393,17 @@ pub fn diagnostics(
res.push(d) res.push(d)
} }
let mut diagnostics_of_range = let mut diagnostics_of_range = res
res.iter_mut().filter_map(|x| Some((x.main_node.clone()?, x))).collect::<FxHashMap<_, _>>(); .iter_mut()
.filter_map(|it| {
Some((
it.main_node
.map(|ptr| ptr.map(|node| node.to_node(&ctx.sema.parse_or_expand(ptr.file_id))))
.clone()?,
it,
))
})
.collect::<FxHashMap<_, _>>();
let mut rustc_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default(); let mut rustc_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default();
let mut clippy_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default(); let mut clippy_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default();

View file

@ -198,12 +198,9 @@ pub(crate) fn check_diagnostics(ra_fixture: &str) {
} }
#[track_caller] #[track_caller]
pub(crate) fn check_diagnostics_with_disabled( pub(crate) fn check_diagnostics_with_disabled(ra_fixture: &str, disabled: &[&str]) {
ra_fixture: &str,
disabled: impl Iterator<Item = String>,
) {
let mut config = DiagnosticsConfig::test_sample(); let mut config = DiagnosticsConfig::test_sample();
config.disabled.extend(disabled); config.disabled.extend(disabled.iter().map(|&s| s.to_owned()));
check_diagnostics_with_config(config, ra_fixture) check_diagnostics_with_config(config, ra_fixture)
} }

View file

@ -147,7 +147,7 @@ fn hover_simple(
if let Some(doc_comment) = token_as_doc_comment(&original_token) { if let Some(doc_comment) = token_as_doc_comment(&original_token) {
cov_mark::hit!(no_highlight_on_comment_hover); cov_mark::hit!(no_highlight_on_comment_hover);
return doc_comment.get_definition_with_descend_at(sema, offset, |def, node, range| { return doc_comment.get_definition_with_descend_at(sema, offset, |def, node, range| {
let res = hover_for_definition(sema, file_id, def, &node, config)?; let res = hover_for_definition(sema, file_id, def, &node, config);
Some(RangeInfo::new(range, res)) Some(RangeInfo::new(range, res))
}); });
} }
@ -161,7 +161,7 @@ fn hover_simple(
Definition::from(resolution?), Definition::from(resolution?),
&original_token.parent()?, &original_token.parent()?,
config, config,
)?; );
return Some(RangeInfo::new(range, res)); return Some(RangeInfo::new(range, res));
} }
@ -215,7 +215,7 @@ fn hover_simple(
}) })
.flatten() .flatten()
.unique_by(|&(def, _)| def) .unique_by(|&(def, _)| def)
.filter_map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config)) .map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config))
.reduce(|mut acc: HoverResult, HoverResult { markup, actions }| { .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| {
acc.actions.extend(actions); acc.actions.extend(actions);
acc.markup = Markup::from(format!("{}\n---\n{markup}", acc.markup)); acc.markup = Markup::from(format!("{}\n---\n{markup}", acc.markup));
@ -373,9 +373,9 @@ pub(crate) fn hover_for_definition(
def: Definition, def: Definition,
scope_node: &SyntaxNode, scope_node: &SyntaxNode,
config: &HoverConfig, config: &HoverConfig,
) -> Option<HoverResult> { ) -> HoverResult {
let famous_defs = match &def { let famous_defs = match &def {
Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())), Definition::BuiltinType(_) => sema.scope(scope_node).map(|it| FamousDefs(sema, it.krate())),
_ => None, _ => None,
}; };
@ -396,7 +396,7 @@ pub(crate) fn hover_for_definition(
}; };
let notable_traits = def_ty.map(|ty| notable_traits(db, &ty)).unwrap_or_default(); let notable_traits = def_ty.map(|ty| notable_traits(db, &ty)).unwrap_or_default();
render::definition(sema.db, def, famous_defs.as_ref(), &notable_traits, config).map(|markup| { let markup = render::definition(sema.db, def, famous_defs.as_ref(), &notable_traits, config);
HoverResult { HoverResult {
markup: render::process_markup(sema.db, def, &markup, config), markup: render::process_markup(sema.db, def, &markup, config),
actions: [ actions: [
@ -409,7 +409,6 @@ pub(crate) fn hover_for_definition(
.flatten() .flatten()
.collect(), .collect(),
} }
})
} }
fn notable_traits( fn notable_traits(

View file

@ -3,8 +3,8 @@ use std::{mem, ops::Not};
use either::Either; use either::Either;
use hir::{ use hir::{
Adt, AsAssocItem, CaptureKind, HasCrate, HasSource, HirDisplay, Layout, LayoutError, Name, Adt, AsAssocItem, AsExternAssocItem, CaptureKind, HasCrate, HasSource, HirDisplay, Layout,
Semantics, Trait, Type, TypeInfo, LayoutError, Name, Semantics, Trait, Type, TypeInfo,
}; };
use ide_db::{ use ide_db::{
base_db::SourceDatabase, base_db::SourceDatabase,
@ -264,7 +264,7 @@ pub(super) fn keyword(
let markup = process_markup( let markup = process_markup(
sema.db, sema.db,
Definition::Module(doc_owner), Definition::Module(doc_owner),
&markup(Some(docs.into()), description, None)?, &markup(Some(docs.into()), description, None),
config, config,
); );
Some(HoverResult { markup, actions }) Some(HoverResult { markup, actions })
@ -369,12 +369,20 @@ fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option<String>
match def { match def {
Definition::Field(f) => Some(f.parent_def(db).name(db)), Definition::Field(f) => Some(f.parent_def(db).name(db)),
Definition::Local(l) => l.parent(db).name(db), Definition::Local(l) => l.parent(db).name(db),
Definition::Function(f) => match f.as_assoc_item(db)?.container(db) {
hir::AssocItemContainer::Trait(t) => Some(t.name(db)),
hir::AssocItemContainer::Impl(i) => i.self_ty(db).as_adt().map(|adt| adt.name(db)),
},
Definition::Variant(e) => Some(e.parent_enum(db).name(db)), Definition::Variant(e) => Some(e.parent_enum(db).name(db)),
_ => None,
d => {
if let Some(assoc_item) = d.as_assoc_item(db) {
match assoc_item.container(db) {
hir::AssocItemContainer::Trait(t) => Some(t.name(db)),
hir::AssocItemContainer::Impl(i) => {
i.self_ty(db).as_adt().map(|adt| adt.name(db))
}
}
} else {
return d.as_extern_assoc_item(db).map(|_| "<extern>".to_owned());
}
}
} }
.map(|name| name.display(db).to_string()) .map(|name| name.display(db).to_string())
} }
@ -396,11 +404,11 @@ pub(super) fn definition(
famous_defs: Option<&FamousDefs<'_, '_>>, famous_defs: Option<&FamousDefs<'_, '_>>,
notable_traits: &[(Trait, Vec<(Option<Type>, Name)>)], notable_traits: &[(Trait, Vec<(Option<Type>, Name)>)],
config: &HoverConfig, config: &HoverConfig,
) -> Option<Markup> { ) -> Markup {
let mod_path = definition_mod_path(db, &def); let mod_path = definition_mod_path(db, &def);
let label = def.label(db)?; let label = def.label(db);
let docs = def.docs(db, famous_defs); let docs = def.docs(db, famous_defs);
let value = match def { let value = (|| match def {
Definition::Variant(it) => { Definition::Variant(it) => {
if !it.parent_enum(db).is_data_carrying(db) { if !it.parent_enum(db).is_data_carrying(db) {
match it.eval(db) { match it.eval(db) {
@ -436,7 +444,7 @@ pub(super) fn definition(
Some(body.to_string()) Some(body.to_string())
} }
_ => None, _ => None,
}; })();
let layout_info = match def { let layout_info = match def {
Definition::Field(it) => render_memory_layout( Definition::Field(it) => render_memory_layout(
@ -683,7 +691,7 @@ fn definition_mod_path(db: &RootDatabase, def: &Definition) -> Option<String> {
def.module(db).map(|module| path(db, module, definition_owner_name(db, def))) def.module(db).map(|module| path(db, module, definition_owner_name(db, def)))
} }
fn markup(docs: Option<String>, desc: String, mod_path: Option<String>) -> Option<Markup> { fn markup(docs: Option<String>, desc: String, mod_path: Option<String>) -> Markup {
let mut buf = String::new(); let mut buf = String::new();
if let Some(mod_path) = mod_path { if let Some(mod_path) = mod_path {
@ -696,7 +704,7 @@ fn markup(docs: Option<String>, desc: String, mod_path: Option<String>) -> Optio
if let Some(doc) = docs { if let Some(doc) = docs {
format_to!(buf, "\n___\n\n{}", doc); format_to!(buf, "\n___\n\n{}", doc);
} }
Some(buf.into()) buf.into()
} }
fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option<hir::Module> { fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option<hir::Module> {

View file

@ -1202,7 +1202,7 @@ fn main() {
*C* *C*
```rust ```rust
test test::X
``` ```
```rust ```rust
@ -1298,6 +1298,42 @@ impl Thing {
``` ```
"#]], "#]],
); );
check(
r#"
impl usize {
pub fn thing(a: Self$0) {}
}
"#,
expect![[r#"
*Self*
```rust
test
```
```rust
usize
```
"#]],
);
check(
r#"
impl fn() -> usize {
pub fn thing(a: Self$0) {}
}
"#,
expect![[r#"
*Self*
```rust
test
```
```rust
fn() -> usize
```
"#]],
);
} }
#[test] #[test]
@ -2241,7 +2277,7 @@ fn main() { let foo_test = unsafe { fo$0o(1, 2, 3); } }
*foo* *foo*
```rust ```rust
test test::<extern>
``` ```
```rust ```rust
@ -4230,7 +4266,7 @@ fn main() {
*B* *B*
```rust ```rust
test test::T
``` ```
```rust ```rust
@ -4259,7 +4295,7 @@ fn main() {
*B* *B*
```rust ```rust
test test::T
``` ```
```rust ```rust
@ -4291,7 +4327,7 @@ fn main() {
*B* *B*
```rust ```rust
test test::T
``` ```
```rust ```rust
@ -4883,7 +4919,7 @@ fn test() {
*FOO* *FOO*
```rust ```rust
test test::S
``` ```
```rust ```rust
@ -5248,7 +5284,7 @@ impl T1 for Foo {
*Bar* *Bar*
```rust ```rust
test::t2 test::t2::T2
``` ```
```rust ```rust
@ -5270,7 +5306,7 @@ trait A {
*Assoc* *Assoc*
```rust ```rust
test test::A
``` ```
```rust ```rust
@ -5291,7 +5327,7 @@ trait A {
*Assoc* *Assoc*
```rust ```rust
test test::A
``` ```
```rust ```rust
@ -5310,7 +5346,7 @@ trait A where
*Assoc* *Assoc*
```rust ```rust
test test::A
``` ```
```rust ```rust
@ -6596,7 +6632,7 @@ fn test() {
*A* *A*
```rust ```rust
test test::S
``` ```
```rust ```rust
@ -6625,7 +6661,7 @@ fn test() {
*A* *A*
```rust ```rust
test test::S
``` ```
```rust ```rust
@ -6655,7 +6691,7 @@ mod m {
*A* *A*
```rust ```rust
test test::S
``` ```
```rust ```rust
@ -7201,6 +7237,65 @@ impl Iterator for S {
); );
} }
#[test]
fn extern_items() {
check(
r#"
extern "C" {
static STATIC$0: ();
}
"#,
expect![[r#"
*STATIC*
```rust
test::<extern>
```
```rust
static STATIC: ()
```
"#]],
);
check(
r#"
extern "C" {
fn fun$0();
}
"#,
expect![[r#"
*fun*
```rust
test::<extern>
```
```rust
unsafe fn fun()
```
"#]],
);
check(
r#"
extern "C" {
type Ty$0;
}
"#,
expect![[r#"
*Ty*
```rust
test::<extern>
```
```rust
// size = 0, align = 1
type Ty
```
"#]],
);
}
#[test] #[test]
fn notable_ranged() { fn notable_ranged() {
check_hover_range( check_hover_range(

View file

@ -303,7 +303,6 @@ fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use syntax::SourceFile;
use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range}; use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range};
use super::*; use super::*;

View file

@ -9,6 +9,7 @@ use ide_db::{
base_db::{FileId, FileRange}, base_db::{FileId, FileRange},
defs::{Definition, NameClass, NameRefClass}, defs::{Definition, NameClass, NameRefClass},
rename::{bail, format_err, source_edit_from_references, IdentifierKind}, rename::{bail, format_err, source_edit_from_references, IdentifierKind},
source_change::SourceChangeBuilder,
RootDatabase, RootDatabase,
}; };
use itertools::Itertools; use itertools::Itertools;
@ -90,8 +91,42 @@ pub(crate) fn rename(
let syntax = source_file.syntax(); let syntax = source_file.syntax();
let defs = find_definitions(&sema, syntax, position)?; let defs = find_definitions(&sema, syntax, position)?;
let alias_fallback = alias_fallback(syntax, position, new_name);
let ops: RenameResult<Vec<SourceChange>> = defs let ops: RenameResult<Vec<SourceChange>> = match alias_fallback {
Some(_) => defs
// FIXME: This can use the `ide_db::rename_reference` (or def.rename) method once we can
// properly find "direct" usages/references.
.map(|(.., def)| {
match IdentifierKind::classify(new_name)? {
IdentifierKind::Ident => (),
IdentifierKind::Lifetime => {
bail!("Cannot alias reference to a lifetime identifier")
}
IdentifierKind::Underscore => bail!("Cannot alias reference to `_`"),
};
let mut usages = def.usages(&sema).all();
// FIXME: hack - removes the usage that triggered this rename operation.
match usages.references.get_mut(&position.file_id).and_then(|refs| {
refs.iter()
.position(|ref_| ref_.range.contains_inclusive(position.offset))
.map(|idx| refs.remove(idx))
}) {
Some(_) => (),
None => never!(),
};
let mut source_change = SourceChange::default();
source_change.extend(usages.iter().map(|(&file_id, refs)| {
(file_id, source_edit_from_references(refs, def, new_name))
}));
Ok(source_change)
})
.collect(),
None => defs
.map(|(.., def)| { .map(|(.., def)| {
if let Definition::Local(local) = def { if let Definition::Local(local) = def {
if let Some(self_param) = local.as_self_param(sema.db) { if let Some(self_param) = local.as_self_param(sema.db) {
@ -105,9 +140,11 @@ pub(crate) fn rename(
} }
def.rename(&sema, new_name) def.rename(&sema, new_name)
}) })
.collect(); .collect(),
};
ops?.into_iter() ops?.into_iter()
.chain(alias_fallback)
.reduce(|acc, elem| acc.merge(elem)) .reduce(|acc, elem| acc.merge(elem))
.ok_or_else(|| format_err!("No references found at position")) .ok_or_else(|| format_err!("No references found at position"))
} }
@ -130,6 +167,38 @@ pub(crate) fn will_rename_file(
Some(change) Some(change)
} }
// FIXME: Should support `extern crate`.
fn alias_fallback(
syntax: &SyntaxNode,
FilePosition { file_id, offset }: FilePosition,
new_name: &str,
) -> Option<SourceChange> {
let use_tree = syntax
.token_at_offset(offset)
.flat_map(|syntax| syntax.parent_ancestors())
.find_map(ast::UseTree::cast)?;
let last_path_segment = use_tree.path()?.segments().last()?.name_ref()?;
if !last_path_segment.syntax().text_range().contains_inclusive(offset) {
return None;
};
let mut builder = SourceChangeBuilder::new(file_id);
match use_tree.rename() {
Some(rename) => {
let offset = rename.syntax().text_range();
builder.replace(offset, format!("as {new_name}"));
}
None => {
let offset = use_tree.syntax().text_range().end();
builder.insert(offset, format!(" as {new_name}"));
}
}
Some(builder.finish())
}
fn find_definitions( fn find_definitions(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
syntax: &SyntaxNode, syntax: &SyntaxNode,
@ -2626,7 +2695,8 @@ use qux as frob;
//- /lib.rs crate:lib new_source_root:library //- /lib.rs crate:lib new_source_root:library
pub struct S; pub struct S;
//- /main.rs crate:main deps:lib new_source_root:local //- /main.rs crate:main deps:lib new_source_root:local
use lib::S$0; use lib::S;
fn main() { let _: S$0; }
"#, "#,
"error: Cannot rename a non-local definition", "error: Cannot rename a non-local definition",
); );
@ -2686,4 +2756,27 @@ fn test() {
"#, "#,
); );
} }
#[test]
fn rename_path_inside_use_tree() {
check(
"Baz",
r#"
mod foo { pub struct Foo; }
mod bar { use super::Foo; }
use foo::Foo$0;
fn main() { let _: Foo; }
"#,
r#"
mod foo { pub struct Foo; }
mod bar { use super::Baz; }
use foo::Foo as Baz;
fn main() { let _: Baz; }
"#,
)
}
} }

View file

@ -186,7 +186,7 @@ impl StaticIndex<'_> {
} else { } else {
let it = self.tokens.insert(TokenStaticData { let it = self.tokens.insert(TokenStaticData {
documentation: documentation_for_definition(&sema, def, &node), documentation: documentation_for_definition(&sema, def, &node),
hover: hover_for_definition(&sema, file_id, def, &node, &hover_config), hover: Some(hover_for_definition(&sema, file_id, def, &node, &hover_config)),
definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| { definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| {
FileRange { file_id: it.file_id, range: it.focus_or_full_range() } FileRange { file_id: it.file_id, range: it.focus_or_full_range() }
}), }),
@ -196,7 +196,7 @@ impl StaticIndex<'_> {
enclosing_moniker: current_crate enclosing_moniker: current_crate
.zip(def.enclosing_definition(self.db)) .zip(def.enclosing_definition(self.db))
.and_then(|(cc, enclosing_def)| def_to_moniker(self.db, enclosing_def, cc)), .and_then(|(cc, enclosing_def)| def_to_moniker(self.db, enclosing_def, cc)),
signature: def.label(self.db), signature: Some(def.label(self.db)),
kind: def_to_kind(self.db, def), kind: def_to_kind(self.db, def),
}); });
self.def_map.insert(def, it); self.def_map.insert(def, it);

View file

@ -309,6 +309,10 @@ fn load_crate_graph(
vfs: &mut vfs::Vfs, vfs: &mut vfs::Vfs,
receiver: &Receiver<vfs::loader::Message>, receiver: &Receiver<vfs::loader::Message>,
) -> AnalysisHost { ) -> AnalysisHost {
let (ProjectWorkspace::Cargo { toolchain, target_layout, .. }
| ProjectWorkspace::Json { toolchain, target_layout, .. }
| ProjectWorkspace::DetachedFiles { toolchain, target_layout, .. }) = ws;
let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok()); let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
let mut host = AnalysisHost::new(lru_cap); let mut host = AnalysisHost::new(lru_cap);
let mut analysis_change = Change::new(); let mut analysis_change = Change::new();
@ -344,14 +348,9 @@ fn load_crate_graph(
let num_crates = crate_graph.len(); let num_crates = crate_graph.len();
analysis_change.set_crate_graph(crate_graph); analysis_change.set_crate_graph(crate_graph);
analysis_change.set_proc_macros(proc_macros); analysis_change.set_proc_macros(proc_macros);
if let ProjectWorkspace::Cargo { toolchain, target_layout, .. } analysis_change
| ProjectWorkspace::Json { toolchain, target_layout, .. } = ws .set_target_data_layouts(iter::repeat(target_layout.clone()).take(num_crates).collect());
{
analysis_change.set_target_data_layouts(
iter::repeat(target_layout.clone()).take(num_crates).collect(),
);
analysis_change.set_toolchains(iter::repeat(toolchain.clone()).take(num_crates).collect()); analysis_change.set_toolchains(iter::repeat(toolchain.clone()).take(num_crates).collect());
}
host.apply_change(analysis_change); host.apply_change(analysis_change);
host host

View file

@ -45,7 +45,7 @@ impl ProcMacroProcessSrv {
}) })
}; };
let mut srv = create_srv(true)?; let mut srv = create_srv(true)?;
tracing::info!("sending version check"); tracing::info!("sending proc-macro server version check");
match srv.version_check() { match srv.version_check() {
Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new( Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new(
io::ErrorKind::Other, io::ErrorKind::Other,
@ -55,14 +55,15 @@ impl ProcMacroProcessSrv {
), ),
)), )),
Ok(v) => { Ok(v) => {
tracing::info!("got version {v}"); tracing::info!("Proc-macro server version: {v}");
srv = create_srv(false)?; srv = create_srv(false)?;
srv.version = v; srv.version = v;
if srv.version > RUST_ANALYZER_SPAN_SUPPORT { if srv.version >= RUST_ANALYZER_SPAN_SUPPORT {
if let Ok(mode) = srv.enable_rust_analyzer_spans() { if let Ok(mode) = srv.enable_rust_analyzer_spans() {
srv.mode = mode; srv.mode = mode;
} }
} }
tracing::info!("Proc-macro server span mode: {:?}", srv.mode);
Ok(srv) Ok(srv)
} }
Err(e) => { Err(e) => {

View file

@ -64,7 +64,7 @@ impl ProcMacros {
&bridge::server::SameThread, &bridge::server::SameThread,
S::make_server(call_site, def_site, mixed_site), S::make_server(call_site, def_site, mixed_site),
parsed_body, parsed_body,
false, cfg!(debug_assertions),
); );
return res return res
.map(|it| it.into_subtree(call_site)) .map(|it| it.into_subtree(call_site))
@ -75,7 +75,7 @@ impl ProcMacros {
&bridge::server::SameThread, &bridge::server::SameThread,
S::make_server(call_site, def_site, mixed_site), S::make_server(call_site, def_site, mixed_site),
parsed_body, parsed_body,
false, cfg!(debug_assertions),
); );
return res return res
.map(|it| it.into_subtree(call_site)) .map(|it| it.into_subtree(call_site))
@ -87,7 +87,7 @@ impl ProcMacros {
S::make_server(call_site, def_site, mixed_site), S::make_server(call_site, def_site, mixed_site),
parsed_attributes, parsed_attributes,
parsed_body, parsed_body,
false, cfg!(debug_assertions),
); );
return res return res
.map(|it| it.into_subtree(call_site)) .map(|it| it.into_subtree(call_site))

View file

@ -93,7 +93,14 @@ impl<S> LiteralFormatter<S> {
let hashes = get_hashes_str(n); let hashes = get_hashes_str(n);
f(&["br", hashes, "\"", symbol, "\"", hashes, suffix]) f(&["br", hashes, "\"", symbol, "\"", hashes, suffix])
} }
_ => f(&[symbol, suffix]), bridge::LitKind::CStr => f(&["c\"", symbol, "\"", suffix]),
bridge::LitKind::CStrRaw(n) => {
let hashes = get_hashes_str(n);
f(&["cr", hashes, "\"", symbol, "\"", hashes, suffix])
}
bridge::LitKind::Integer | bridge::LitKind::Float | bridge::LitKind::ErrWithGuar => {
f(&[symbol, suffix])
}
}) })
} }

View file

@ -10,16 +10,16 @@ use std::{
ops::{Bound, Range}, ops::{Bound, Range},
}; };
use ::tt::{TextRange, TextSize};
use proc_macro::bridge::{self, server}; use proc_macro::bridge::{self, server};
use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER}; use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER};
use tt::{TextRange, TextSize};
use crate::server::{ use crate::server::{
delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter, delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter,
Symbol, SymbolInternerRef, SYMBOL_INTERNER, Symbol, SymbolInternerRef, SYMBOL_INTERNER,
}; };
mod tt { mod tt {
pub use ::tt::*; pub use tt::*;
pub type Subtree = ::tt::Subtree<super::Span>; pub type Subtree = ::tt::Subtree<super::Span>;
pub type TokenTree = ::tt::TokenTree<super::Span>; pub type TokenTree = ::tt::TokenTree<super::Span>;
@ -97,22 +97,33 @@ impl server::FreeFunctions for RaSpanServer {
} }
let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) }; let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
let kind = match kind { let (kind, start_offset, end_offset) = match kind {
LiteralKind::Int { .. } => LitKind::Integer, LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
LiteralKind::Float { .. } => LitKind::Float, LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
LiteralKind::Char { .. } => LitKind::Char, LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
LiteralKind::Byte { .. } => LitKind::Byte, LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
LiteralKind::Str { .. } => LitKind::Str, LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
LiteralKind::ByteStr { .. } => LitKind::ByteStr, LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
LiteralKind::CStr { .. } => LitKind::CStr, LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()), LiteralKind::RawStr { n_hashes } => (
LiteralKind::RawByteStr { n_hashes } => { LitKind::StrRaw(n_hashes.unwrap_or_default()),
LitKind::ByteStrRaw(n_hashes.unwrap_or_default()) 2 + n_hashes.unwrap_or_default() as usize,
} 1 + n_hashes.unwrap_or_default() as usize,
LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()), ),
LiteralKind::RawByteStr { n_hashes } => (
LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawCStr { n_hashes } => (
LitKind::CStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
}; };
let (lit, suffix) = s.split_at(suffix_start as usize); let (lit, suffix) = s.split_at(suffix_start as usize);
let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix { let suffix = match suffix {
"" | "_" => None, "" | "_" => None,
suffix => Some(Symbol::intern(self.interner, suffix)), suffix => Some(Symbol::intern(self.interner, suffix)),
@ -248,12 +259,8 @@ impl server::TokenStream for RaSpanServer {
} }
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
bridge::TokenTree::Literal(bridge::Literal { bridge::TokenTree::Literal(bridge::Literal {
// FIXME: handle literal kinds
kind: bridge::LitKind::Integer, // dummy
symbol: Symbol::intern(self.interner, &lit.text),
// FIXME: handle suffixes
suffix: None,
span: lit.span, span: lit.span,
..server::FreeFunctions::literal_from_str(self, &lit.text).unwrap()
}) })
} }
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {

View file

@ -14,7 +14,7 @@ use crate::server::{
mod tt { mod tt {
pub use proc_macro_api::msg::TokenId; pub use proc_macro_api::msg::TokenId;
pub use ::tt::*; pub use tt::*;
pub type Subtree = ::tt::Subtree<TokenId>; pub type Subtree = ::tt::Subtree<TokenId>;
pub type TokenTree = ::tt::TokenTree<TokenId>; pub type TokenTree = ::tt::TokenTree<TokenId>;
@ -89,22 +89,34 @@ impl server::FreeFunctions for TokenIdServer {
} }
let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) }; let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
let kind = match kind {
LiteralKind::Int { .. } => LitKind::Integer, let (kind, start_offset, end_offset) = match kind {
LiteralKind::Float { .. } => LitKind::Float, LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
LiteralKind::Char { .. } => LitKind::Char, LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
LiteralKind::Byte { .. } => LitKind::Byte, LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
LiteralKind::Str { .. } => LitKind::Str, LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
LiteralKind::ByteStr { .. } => LitKind::ByteStr, LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
LiteralKind::CStr { .. } => LitKind::CStr, LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()), LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
LiteralKind::RawByteStr { n_hashes } => { LiteralKind::RawStr { n_hashes } => (
LitKind::ByteStrRaw(n_hashes.unwrap_or_default()) LitKind::StrRaw(n_hashes.unwrap_or_default()),
} 2 + n_hashes.unwrap_or_default() as usize,
LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()), 1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawByteStr { n_hashes } => (
LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawCStr { n_hashes } => (
LitKind::CStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
}; };
let (lit, suffix) = s.split_at(suffix_start as usize); let (lit, suffix) = s.split_at(suffix_start as usize);
let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix { let suffix = match suffix {
"" | "_" => None, "" | "_" => None,
suffix => Some(Symbol::intern(self.interner, suffix)), suffix => Some(Symbol::intern(self.interner, suffix)),
@ -233,12 +245,9 @@ impl server::TokenStream for TokenIdServer {
} }
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
bridge::TokenTree::Literal(bridge::Literal { bridge::TokenTree::Literal(bridge::Literal {
// FIXME: handle literal kinds
kind: bridge::LitKind::Integer, // dummy
symbol: Symbol::intern(self.interner, &lit.text),
// FIXME: handle suffixes
suffix: None,
span: lit.span, span: lit.span,
..server::FreeFunctions::literal_from_str(self, &lit.text)
.unwrap_or_else(|_| panic!("`{}`", lit.text))
}) })
} }
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {

View file

@ -115,8 +115,6 @@ pub(super) mod token_stream {
} }
} }
type LexError = String;
/// Attempts to break the string into tokens and parse those tokens into a token stream. /// Attempts to break the string into tokens and parse those tokens into a token stream.
/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
/// or characters not existing in the language. /// or characters not existing in the language.
@ -124,13 +122,10 @@ pub(super) mod token_stream {
/// ///
/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
/// change these errors into `LexError`s later. /// change these errors into `LexError`s later.
#[rustfmt::skip] impl<S: tt::Span> TokenStream<S> {
impl<S: tt::Span> /*FromStr for*/ TokenStream<S> { pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> {
// type Err = LexError;
pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, LexError> {
let subtree = let subtree =
mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?; mbe::parse_to_token_tree_static_span(call_site, src).ok_or("lexing error")?;
Ok(TokenStream::with_subtree(subtree)) Ok(TokenStream::with_subtree(subtree))
} }

View file

@ -169,7 +169,7 @@ fn test_fn_like_mk_idents() {
fn test_fn_like_macro_clone_literals() { fn test_fn_like_macro_clone_literals() {
assert_expand( assert_expand(
"fn_like_clone_tokens", "fn_like_clone_tokens",
r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##"###, r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##, 'a', b'b', c"null""###,
expect![[r###" expect![[r###"
SUBTREE $$ 1 1 SUBTREE $$ 1 1
LITERAL 1u16 1 LITERAL 1u16 1
@ -181,11 +181,17 @@ fn test_fn_like_macro_clone_literals() {
PUNCH , [alone] 1 PUNCH , [alone] 1
LITERAL 3.14f32 1 LITERAL 3.14f32 1
PUNCH , [alone] 1 PUNCH , [alone] 1
LITERAL ""hello bridge"" 1 LITERAL "hello bridge" 1
PUNCH , [alone] 1 PUNCH , [alone] 1
LITERAL ""suffixed""suffix 1 LITERAL "suffixed"suffix 1
PUNCH , [alone] 1 PUNCH , [alone] 1
LITERAL r##"r##"raw"##"## 1"###]], LITERAL r##"raw"## 1
PUNCH , [alone] 1
LITERAL 'a' 1
PUNCH , [alone] 1
LITERAL b'b' 1
PUNCH , [alone] 1
LITERAL c"null" 1"###]],
expect![[r###" expect![[r###"
SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL 1u16 SpanData { range: 0..4, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL 1u16 SpanData { range: 0..4, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
@ -197,11 +203,17 @@ fn test_fn_like_macro_clone_literals() {
PUNCH , [alone] SpanData { range: 18..19, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 18..19, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL 3.14f32 SpanData { range: 20..27, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL 3.14f32 SpanData { range: 20..27, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 27..28, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 27..28, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL ""hello bridge"" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL "hello bridge" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 43..44, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 43..44, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL ""suffixed""suffix SpanData { range: 45..61, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL "suffixed"suffix SpanData { range: 45..61, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 61..62, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 61..62, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL r##"r##"raw"##"## SpanData { range: 63..73, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"###]], LITERAL r##"raw"## SpanData { range: 63..73, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 73..74, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL 'a' SpanData { range: 75..78, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 78..79, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL b'b' SpanData { range: 80..84, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 84..85, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL c"null" SpanData { range: 86..93, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"###]],
); );
} }

View file

@ -138,7 +138,7 @@ impl WorkspaceBuildScripts {
toolchain: &Option<Version>, toolchain: &Option<Version>,
sysroot: Option<&Sysroot>, sysroot: Option<&Sysroot>,
) -> io::Result<WorkspaceBuildScripts> { ) -> io::Result<WorkspaceBuildScripts> {
const RUST_1_62: Version = Version::new(1, 62, 0); const RUST_1_75: Version = Version::new(1, 75, 0);
let current_dir = match &config.invocation_location { let current_dir = match &config.invocation_location {
InvocationLocation::Root(root) if config.run_build_script_command.is_some() => { InvocationLocation::Root(root) if config.run_build_script_command.is_some() => {
@ -162,7 +162,7 @@ impl WorkspaceBuildScripts {
progress, progress,
) { ) {
Ok(WorkspaceBuildScripts { error: Some(error), .. }) Ok(WorkspaceBuildScripts { error: Some(error), .. })
if toolchain.as_ref().map_or(false, |it| *it >= RUST_1_62) => if toolchain.as_ref().map_or(false, |it| *it >= RUST_1_75) =>
{ {
// building build scripts failed, attempt to build with --keep-going so // building build scripts failed, attempt to build with --keep-going so
// that we potentially get more build data // that we potentially get more build data
@ -172,7 +172,8 @@ impl WorkspaceBuildScripts {
&workspace.workspace_root().to_path_buf(), &workspace.workspace_root().to_path_buf(),
sysroot, sysroot,
)?; )?;
cmd.args(["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1");
cmd.args(["--keep-going"]);
let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?; let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?;
res.error = Some(error); res.error = Some(error);
Ok(res) Ok(res)

View file

@ -32,7 +32,16 @@ pub fn get(
Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot);
cmd.envs(extra_env); cmd.envs(extra_env);
cmd.current_dir(cargo_toml.parent()) cmd.current_dir(cargo_toml.parent())
.args(["rustc", "--", "-Z", "unstable-options", "--print", "target-spec-json"]) .args([
"rustc",
"-Z",
"unstable-options",
"--print",
"target-spec-json",
"--",
"-Z",
"unstable-options",
])
.env("RUSTC_BOOTSTRAP", "1"); .env("RUSTC_BOOTSTRAP", "1");
if let Some(target) = target { if let Some(target) = target {
cmd.args(["--target", target]); cmd.args(["--target", target]);

View file

@ -100,6 +100,8 @@ pub enum ProjectWorkspace {
/// Holds cfg flags for the current target. We get those by running /// Holds cfg flags for the current target. We get those by running
/// `rustc --print cfg`. /// `rustc --print cfg`.
rustc_cfg: Vec<CfgFlag>, rustc_cfg: Vec<CfgFlag>,
toolchain: Option<Version>,
target_layout: TargetLayoutLoadResult,
}, },
} }
@ -145,16 +147,24 @@ impl fmt::Debug for ProjectWorkspace {
debug_struct.field("n_sysroot_crates", &sysroot.num_packages()); debug_struct.field("n_sysroot_crates", &sysroot.num_packages());
} }
debug_struct debug_struct
.field("toolchain", &toolchain)
.field("n_rustc_cfg", &rustc_cfg.len()) .field("n_rustc_cfg", &rustc_cfg.len())
.field("toolchain", &toolchain)
.field("data_layout", &data_layout); .field("data_layout", &data_layout);
debug_struct.finish() debug_struct.finish()
} }
ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f ProjectWorkspace::DetachedFiles {
files,
sysroot,
rustc_cfg,
toolchain,
target_layout,
} => f
.debug_struct("DetachedFiles") .debug_struct("DetachedFiles")
.field("n_files", &files.len()) .field("n_files", &files.len())
.field("sysroot", &sysroot.is_ok()) .field("sysroot", &sysroot.is_ok())
.field("n_rustc_cfg", &rustc_cfg.len()) .field("n_rustc_cfg", &rustc_cfg.len())
.field("toolchain", &toolchain)
.field("data_layout", &target_layout)
.finish(), .finish(),
} }
} }
@ -403,32 +413,54 @@ impl ProjectWorkspace {
detached_files: Vec<AbsPathBuf>, detached_files: Vec<AbsPathBuf>,
config: &CargoConfig, config: &CargoConfig,
) -> anyhow::Result<ProjectWorkspace> { ) -> anyhow::Result<ProjectWorkspace> {
let dir = detached_files
.first()
.and_then(|it| it.parent())
.ok_or_else(|| format_err!("No detached files to load"))?;
let sysroot = match &config.sysroot { let sysroot = match &config.sysroot {
Some(RustLibSource::Path(path)) => { Some(RustLibSource::Path(path)) => {
Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata) Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata)
.map_err(|e| Some(format!("Failed to find sysroot at {path}:{e}"))) .map_err(|e| Some(format!("Failed to find sysroot at {path}:{e}")))
} }
Some(RustLibSource::Discover) => { Some(RustLibSource::Discover) => Sysroot::discover(
let dir = &detached_files dir,
.first() &config.extra_env,
.and_then(|it| it.parent()) config.sysroot_query_metadata,
.ok_or_else(|| format_err!("No detached files to load"))?;
Sysroot::discover(dir, &config.extra_env, config.sysroot_query_metadata).map_err(
|e| {
Some(format!(
"Failed to find sysroot for {dir}. Is rust-src installed? {e}"
))
},
) )
} .map_err(|e| {
Some(format!("Failed to find sysroot for {dir}. Is rust-src installed? {e}"))
}),
None => Err(None), None => Err(None),
}; };
let rustc_cfg = rustc_cfg::get(
let sysroot_ref = sysroot.as_ref().ok();
let toolchain = match get_toolchain_version(
dir,
sysroot_ref,
toolchain::Tool::Rustc,
&config.extra_env,
"rustc ",
) {
Ok(it) => it,
Err(e) => {
tracing::error!("{e}");
None
}
};
let rustc_cfg = rustc_cfg::get(None, &config.extra_env, RustcCfgConfig::Rustc(sysroot_ref));
let data_layout = target_data_layout::get(
RustcDataLayoutConfig::Rustc(sysroot_ref),
None, None,
&FxHashMap::default(), &config.extra_env,
RustcCfgConfig::Rustc(sysroot.as_ref().ok()),
); );
Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg }) Ok(ProjectWorkspace::DetachedFiles {
files: detached_files,
sysroot,
rustc_cfg,
toolchain,
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
})
} }
/// Runs the build scripts for this [`ProjectWorkspace`]. /// Runs the build scripts for this [`ProjectWorkspace`].
@ -724,7 +756,13 @@ impl ProjectWorkspace {
cfg_overrides, cfg_overrides,
build_scripts, build_scripts,
), ),
ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => { ProjectWorkspace::DetachedFiles {
files,
sysroot,
rustc_cfg,
toolchain: _,
target_layout: _,
} => {
detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot.as_ref().ok()) detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot.as_ref().ok())
} }
}; };
@ -786,9 +824,21 @@ impl ProjectWorkspace {
&& toolchain == o_toolchain && toolchain == o_toolchain
} }
( (
Self::DetachedFiles { files, sysroot, rustc_cfg }, Self::DetachedFiles { files, sysroot, rustc_cfg, toolchain, target_layout },
Self::DetachedFiles { files: o_files, sysroot: o_sysroot, rustc_cfg: o_rustc_cfg }, Self::DetachedFiles {
) => files == o_files && sysroot == o_sysroot && rustc_cfg == o_rustc_cfg, files: o_files,
sysroot: o_sysroot,
rustc_cfg: o_rustc_cfg,
toolchain: o_toolchain,
target_layout: o_target_layout,
},
) => {
files == o_files
&& sysroot == o_sysroot
&& rustc_cfg == o_rustc_cfg
&& toolchain == o_toolchain
&& target_layout == o_target_layout
}
_ => false, _ => false,
} }
} }

View file

@ -208,7 +208,6 @@ fn required_features(cfg_expr: &CfgExpr, features: &mut Vec<String>) {
mod tests { mod tests {
use super::*; use super::*;
use cfg::CfgExpr;
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
use syntax::{ use syntax::{
ast::{self, AstNode}, ast::{self, AstNode},

View file

@ -30,7 +30,7 @@ xflags::xflags! {
default cmd lsp-server { default cmd lsp-server {
/// Print version. /// Print version.
optional --version optional -V, --version
/// Dump a LSP config JSON schema. /// Dump a LSP config JSON schema.
optional --print-config-schema optional --print-config-schema

View file

@ -1,11 +1,16 @@
//! Run all tests in a project, similar to `cargo test`, but using the mir interpreter. //! Run all tests in a project, similar to `cargo test`, but using the mir interpreter.
use std::convert::identity;
use std::thread::Builder;
use std::time::{Duration, Instant};
use std::{cell::RefCell, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf}; use std::{cell::RefCell, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf};
use hir::{Change, Crate}; use hir::{Change, Crate};
use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig}; use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig};
use itertools::Either;
use profile::StopWatch; use profile::StopWatch;
use project_model::{CargoConfig, ProjectWorkspace, RustLibSource, Sysroot}; use project_model::target_data_layout::RustcDataLayoutConfig;
use project_model::{target_data_layout, CargoConfig, ProjectWorkspace, RustLibSource, Sysroot};
use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
@ -60,15 +65,22 @@ impl Tester {
std::fs::write(&tmp_file, "")?; std::fs::write(&tmp_file, "")?;
let cargo_config = let cargo_config =
CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
let sysroot =
Ok(Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env, false)
.unwrap());
let data_layout = target_data_layout::get(
RustcDataLayoutConfig::Rustc(sysroot.as_ref().ok()),
None,
&cargo_config.extra_env,
);
let workspace = ProjectWorkspace::DetachedFiles { let workspace = ProjectWorkspace::DetachedFiles {
files: vec![tmp_file.clone()], files: vec![tmp_file.clone()],
sysroot: Ok(Sysroot::discover( sysroot,
tmp_file.parent().unwrap(),
&cargo_config.extra_env,
false,
)
.unwrap()),
rustc_cfg: vec![], rustc_cfg: vec![],
toolchain: None,
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
}; };
let load_cargo_config = LoadCargoConfig { let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: false, load_out_dirs_from_check: false,
@ -92,6 +104,7 @@ impl Tester {
} }
fn test(&mut self, p: PathBuf) { fn test(&mut self, p: PathBuf) {
println!("{}", p.display());
if p.parent().unwrap().file_name().unwrap() == "auxiliary" { if p.parent().unwrap().file_name().unwrap() == "auxiliary" {
// These are not tests // These are not tests
return; return;
@ -124,15 +137,44 @@ impl Tester {
self.host.apply_change(change); self.host.apply_change(change);
let diagnostic_config = DiagnosticsConfig::test_sample(); let diagnostic_config = DiagnosticsConfig::test_sample();
let res = std::thread::scope(|s| {
let worker = Builder::new()
.stack_size(40 * 1024 * 1024)
.spawn_scoped(s, {
let diagnostic_config = &diagnostic_config;
let main = std::thread::current();
let analysis = self.host.analysis();
let root_file = self.root_file;
move || {
let res = std::panic::catch_unwind(move || {
analysis.diagnostics(
diagnostic_config,
ide::AssistResolveStrategy::None,
root_file,
)
});
main.unpark();
res
}
})
.unwrap();
let timeout = Duration::from_secs(5);
let now = Instant::now();
while now.elapsed() <= timeout && !worker.is_finished() {
std::thread::park_timeout(timeout - now.elapsed());
}
if !worker.is_finished() {
// attempt to cancel the worker, won't work for chalk hangs unfortunately
self.host.request_cancellation();
}
worker.join().and_then(identity)
});
let mut actual = FxHashMap::default(); let mut actual = FxHashMap::default();
let panicked = match std::panic::catch_unwind(|| { let panicked = match res {
self.host Err(e) => Some(Either::Left(e)),
.analysis() Ok(Ok(diags)) => {
.diagnostics(&diagnostic_config, ide::AssistResolveStrategy::None, self.root_file)
.unwrap()
}) {
Err(e) => Some(e),
Ok(diags) => {
for diag in diags { for diag in diags {
if !matches!(diag.code, DiagnosticCode::RustcHardError(_)) { if !matches!(diag.code, DiagnosticCode::RustcHardError(_)) {
continue; continue;
@ -144,6 +186,7 @@ impl Tester {
} }
None None
} }
Ok(Err(e)) => Some(Either::Right(e)),
}; };
// Ignore tests with diagnostics that we don't emit. // Ignore tests with diagnostics that we don't emit.
ignore_test |= expected.keys().any(|k| !SUPPORTED_DIAGNOSTICS.contains(k)); ignore_test |= expected.keys().any(|k| !SUPPORTED_DIAGNOSTICS.contains(k));
@ -151,6 +194,8 @@ impl Tester {
println!("{p:?} IGNORE"); println!("{p:?} IGNORE");
self.ignore_count += 1; self.ignore_count += 1;
} else if let Some(panic) = panicked { } else if let Some(panic) = panicked {
match panic {
Either::Left(panic) => {
if let Some(msg) = panic if let Some(msg) = panic
.downcast_ref::<String>() .downcast_ref::<String>()
.map(String::as_str) .map(String::as_str)
@ -158,7 +203,10 @@ impl Tester {
{ {
println!("{msg:?} ") println!("{msg:?} ")
} }
println!("PANIC"); println!("{p:?} PANIC");
}
Either::Right(_) => println!("{p:?} CANCELLED"),
}
self.fail_count += 1; self.fail_count += 1;
} else if actual == expected { } else if actual == expected {
println!("{p:?} PASS"); println!("{p:?} PASS");
@ -228,6 +276,7 @@ impl flags::RustcTests {
pub fn run(self) -> Result<()> { pub fn run(self) -> Result<()> {
let mut tester = Tester::new()?; let mut tester = Tester::new()?;
let walk_dir = WalkDir::new(self.rustc_repo.join("tests/ui")); let walk_dir = WalkDir::new(self.rustc_repo.join("tests/ui"));
eprintln!("Running tests for tests/ui");
for i in walk_dir { for i in walk_dir {
let i = i?; let i = i?;
let p = i.into_path(); let p = i.into_path();

View file

@ -324,7 +324,7 @@ fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
use ide::{AnalysisHost, FilePosition, StaticIndex, TextSize}; use ide::{AnalysisHost, FilePosition, TextSize};
use scip::symbol::format_symbol; use scip::symbol::format_symbol;
use test_fixture::ChangeFixture; use test_fixture::ChangeFixture;

View file

@ -301,19 +301,12 @@ impl GlobalState {
if let Some(path) = vfs_path.as_path() { if let Some(path) = vfs_path.as_path() {
let path = path.to_path_buf(); let path = path.to_path_buf();
if reload::should_refresh_for_change(&path, file.kind()) { if reload::should_refresh_for_change(&path, file.kind()) {
workspace_structure_change = Some(( workspace_structure_change = Some((path.clone(), false));
path.clone(),
false,
AsRef::<std::path::Path>::as_ref(&path).ends_with("build.rs"),
));
} }
if file.is_created_or_deleted() { if file.is_created_or_deleted() {
has_structure_changes = true; has_structure_changes = true;
workspace_structure_change = Some(( workspace_structure_change =
path, Some((path, self.crate_graph_file_dependencies.contains(vfs_path)));
self.crate_graph_file_dependencies.contains(vfs_path),
false,
));
} else if path.extension() == Some("rs".as_ref()) { } else if path.extension() == Some("rs".as_ref()) {
modified_rust_files.push(file.file_id); modified_rust_files.push(file.file_id);
} }
@ -365,16 +358,11 @@ impl GlobalState {
// FIXME: ideally we should only trigger a workspace fetch for non-library changes // FIXME: ideally we should only trigger a workspace fetch for non-library changes
// but something's going wrong with the source root business when we add a new local // but something's going wrong with the source root business when we add a new local
// crate see https://github.com/rust-lang/rust-analyzer/issues/13029 // crate see https://github.com/rust-lang/rust-analyzer/issues/13029
if let Some((path, force_crate_graph_reload, build_scripts_touched)) = if let Some((path, force_crate_graph_reload)) = workspace_structure_change {
workspace_structure_change
{
self.fetch_workspaces_queue.request_op( self.fetch_workspaces_queue.request_op(
format!("workspace vfs file change: {path}"), format!("workspace vfs file change: {path}"),
force_crate_graph_reload, force_crate_graph_reload,
); );
if build_scripts_touched {
self.fetch_build_data_queue.request_op(format!("build.rs changed: {path}"), ());
}
} }
} }

View file

@ -16,6 +16,7 @@ use ide::{
ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
}; };
use ide_db::SymbolKind; use ide_db::SymbolKind;
use itertools::Itertools;
use lsp_server::ErrorCode; use lsp_server::ErrorCode;
use lsp_types::{ use lsp_types::{
CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem, CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem,
@ -1055,9 +1056,8 @@ pub(crate) fn handle_references(
let exclude_imports = snap.config.find_all_refs_exclude_imports(); let exclude_imports = snap.config.find_all_refs_exclude_imports();
let exclude_tests = snap.config.find_all_refs_exclude_tests(); let exclude_tests = snap.config.find_all_refs_exclude_tests();
let refs = match snap.analysis.find_all_refs(position, None)? { let Some(refs) = snap.analysis.find_all_refs(position, None)? else {
None => return Ok(None), return Ok(None);
Some(refs) => refs,
}; };
let include_declaration = params.context.include_declaration; let include_declaration = params.context.include_declaration;
@ -1084,6 +1084,7 @@ pub(crate) fn handle_references(
}) })
.chain(decl) .chain(decl)
}) })
.unique()
.filter_map(|frange| to_proto::location(&snap, frange).ok()) .filter_map(|frange| to_proto::location(&snap, frange).ok())
.collect(); .collect();
@ -1802,10 +1803,10 @@ fn show_ref_command_link(
.into_iter() .into_iter()
.flat_map(|res| res.references) .flat_map(|res| res.references)
.flat_map(|(file_id, ranges)| { .flat_map(|(file_id, ranges)| {
ranges.into_iter().filter_map(move |(range, _)| { ranges.into_iter().map(move |(range, _)| FileRange { file_id, range })
to_proto::location(snap, FileRange { file_id, range }).ok()
})
}) })
.unique()
.filter_map(|range| to_proto::location(snap, range).ok())
.collect(); .collect();
let title = to_proto::reference_title(locations.len()); let title = to_proto::reference_title(locations.len());
let command = to_proto::command::show_references(title, &uri, position, locations); let command = to_proto::command::show_references(title, &uri, position, locations);

View file

@ -904,15 +904,16 @@ pub(crate) fn goto_definition_response(
if snap.config.location_link() { if snap.config.location_link() {
let links = targets let links = targets
.into_iter() .into_iter()
.unique_by(|nav| (nav.file_id, nav.full_range, nav.focus_range))
.map(|nav| location_link(snap, src, nav)) .map(|nav| location_link(snap, src, nav))
.collect::<Cancellable<Vec<_>>>()?; .collect::<Cancellable<Vec<_>>>()?;
Ok(links.into()) Ok(links.into())
} else { } else {
let locations = targets let locations = targets
.into_iter() .into_iter()
.map(|nav| { .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
location(snap, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) .unique()
}) .map(|range| location(snap, range))
.collect::<Cancellable<Vec<_>>>()?; .collect::<Cancellable<Vec<_>>>()?;
Ok(locations.into()) Ok(locations.into())
} }
@ -1001,10 +1002,8 @@ fn merge_text_and_snippet_edits(
let mut new_text = current_indel.insert; let mut new_text = current_indel.insert;
// find which snippet bits need to be escaped // find which snippet bits need to be escaped
let escape_places = new_text let escape_places =
.rmatch_indices(['\\', '$', '{', '}']) new_text.rmatch_indices(['\\', '$', '}']).map(|(insert, _)| insert).collect_vec();
.map(|(insert, _)| insert)
.collect_vec();
let mut escape_places = escape_places.into_iter().peekable(); let mut escape_places = escape_places.into_iter().peekable();
let mut escape_prior_bits = |new_text: &mut String, up_to: usize| { let mut escape_prior_bits = |new_text: &mut String, up_to: usize| {
for before in escape_places.peeking_take_while(|insert| *insert >= up_to) { for before in escape_places.peeking_take_while(|insert| *insert >= up_to) {
@ -2175,7 +2174,7 @@ fn bar(_: usize) {}
character: 0, character: 0,
}, },
}, },
new_text: "\\$${1:ab\\{\\}\\$c\\\\d}ef", new_text: "\\$${1:ab{\\}\\$c\\\\d}ef",
insert_text_format: Some( insert_text_format: Some(
Snippet, Snippet,
), ),
@ -2271,7 +2270,7 @@ struct ProcMacro {
character: 5, character: 5,
}, },
}, },
new_text: "$0disabled = false;\n ProcMacro \\{\n disabled,\n \\}", new_text: "$0disabled = false;\n ProcMacro {\n disabled,\n \\}",
insert_text_format: Some( insert_text_format: Some(
Snippet, Snippet,
), ),
@ -2335,7 +2334,7 @@ struct P {
character: 5, character: 5,
}, },
}, },
new_text: "$0disabled = false;\n ProcMacro \\{\n disabled,\n \\}", new_text: "$0disabled = false;\n ProcMacro {\n disabled,\n \\}",
insert_text_format: Some( insert_text_format: Some(
Snippet, Snippet,
), ),
@ -2400,7 +2399,7 @@ struct ProcMacro {
character: 5, character: 5,
}, },
}, },
new_text: "${0:disabled} = false;\n ProcMacro \\{\n disabled,\n \\}", new_text: "${0:disabled} = false;\n ProcMacro {\n disabled,\n \\}",
insert_text_format: Some( insert_text_format: Some(
Snippet, Snippet,
), ),
@ -2465,7 +2464,7 @@ struct P {
character: 5, character: 5,
}, },
}, },
new_text: "${0:disabled} = false;\n ProcMacro \\{\n disabled,\n \\}", new_text: "${0:disabled} = false;\n ProcMacro {\n disabled,\n \\}",
insert_text_format: Some( insert_text_format: Some(
Snippet, Snippet,
), ),

View file

@ -134,6 +134,7 @@ impl GlobalState {
let token = lsp_types::ProgressToken::String( let token = lsp_types::ProgressToken::String(
cancel_token.unwrap_or_else(|| format!("rustAnalyzer/{title}")), cancel_token.unwrap_or_else(|| format!("rustAnalyzer/{title}")),
); );
tracing::debug!(?token, ?state, "report_progress {message:?}");
let work_done_progress = match state { let work_done_progress = match state {
Progress::Begin => { Progress::Begin => {
self.send_request::<lsp_types::request::WorkDoneProgressCreate>( self.send_request::<lsp_types::request::WorkDoneProgressCreate>(

View file

@ -411,10 +411,7 @@ impl GlobalState {
if *force_reload_crate_graph { if *force_reload_crate_graph {
self.recreate_crate_graph(cause); self.recreate_crate_graph(cause);
} }
if self.build_deps_changed && self.config.run_build_scripts() {
self.build_deps_changed = false;
self.fetch_build_data_queue.request_op("build_deps_changed".to_owned(), ());
}
// Current build scripts do not match the version of the active // Current build scripts do not match the version of the active
// workspace, so there's nothing for us to update. // workspace, so there's nothing for us to update.
return; return;
@ -424,7 +421,7 @@ impl GlobalState {
// Here, we completely changed the workspace (Cargo.toml edit), so // Here, we completely changed the workspace (Cargo.toml edit), so
// we don't care about build-script results, they are stale. // we don't care about build-script results, they are stale.
// FIXME: can we abort the build scripts here? // FIXME: can we abort the build scripts here if they are already running?
self.workspaces = Arc::new(workspaces); self.workspaces = Arc::new(workspaces);
if self.config.run_build_scripts() { if self.config.run_build_scripts() {
@ -525,14 +522,15 @@ impl GlobalState {
} }
fn recreate_crate_graph(&mut self, cause: String) { fn recreate_crate_graph(&mut self, cause: String) {
{
// Create crate graph from all the workspaces
let vfs = &mut self.vfs.write().0;
let loader = &mut self.loader;
// crate graph construction relies on these paths, record them so when one of them gets // crate graph construction relies on these paths, record them so when one of them gets
// deleted or created we trigger a reconstruction of the crate graph // deleted or created we trigger a reconstruction of the crate graph
let mut crate_graph_file_dependencies = FxHashSet::default(); let mut crate_graph_file_dependencies = FxHashSet::default();
let (crate_graph, proc_macro_paths, layouts, toolchains) = {
// Create crate graph from all the workspaces
let vfs = &mut self.vfs.write().0;
let loader = &mut self.loader;
let load = |path: &AbsPath| { let load = |path: &AbsPath| {
let _p = tracing::span!(tracing::Level::DEBUG, "switch_workspaces::load").entered(); let _p = tracing::span!(tracing::Level::DEBUG, "switch_workspaces::load").entered();
let vfs_path = vfs::VfsPath::from(path.to_path_buf()); let vfs_path = vfs::VfsPath::from(path.to_path_buf());
@ -548,9 +546,8 @@ impl GlobalState {
} }
}; };
let (crate_graph, proc_macro_paths, layouts, toolchains) = ws_to_crate_graph(&self.workspaces, self.config.extra_env(), load)
ws_to_crate_graph(&self.workspaces, self.config.extra_env(), load); };
let mut change = Change::new(); let mut change = Change::new();
if self.config.expand_proc_macros() { if self.config.expand_proc_macros() {
change.set_proc_macros( change.set_proc_macros(
@ -566,7 +563,7 @@ impl GlobalState {
change.set_toolchains(toolchains); change.set_toolchains(toolchains);
self.analysis_host.apply_change(change); self.analysis_host.apply_change(change);
self.crate_graph_file_dependencies = crate_graph_file_dependencies; self.crate_graph_file_dependencies = crate_graph_file_dependencies;
}
self.process_changes(); self.process_changes();
self.reload_flycheck(); self.reload_flycheck();
} }

View file

@ -243,7 +243,7 @@ impl Server {
to_string_pretty(actual_part).unwrap(), to_string_pretty(actual_part).unwrap(),
); );
} else { } else {
tracing::debug!("sucessfully matched notification"); tracing::debug!("successfully matched notification");
return; return;
} }
} else { } else {

View file

@ -21,6 +21,7 @@ rustc-hash = "1.0"
smallvec = "1.0.0" smallvec = "1.0.0"
oorandom = "11" oorandom = "11"
triomphe = "0.1.11" triomphe = "0.1.11"
itertools.workspace = true
salsa-macros = { version = "0.0.0", path = "salsa-macros" } salsa-macros = { version = "0.0.0", path = "salsa-macros" }

View file

@ -154,8 +154,8 @@ pub(crate) fn database(args: TokenStream, input: TokenStream) -> TokenStream {
self.#db_storage_field.salsa_runtime() self.#db_storage_field.salsa_runtime()
} }
fn ops_salsa_runtime_mut(&mut self) -> &mut salsa::Runtime { fn synthetic_write(&mut self, durability: salsa::Durability) {
self.#db_storage_field.salsa_runtime_mut() self.#db_storage_field.salsa_runtime_mut().synthetic_write(durability)
} }
fn fmt_index( fn fmt_index(

View file

@ -526,7 +526,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
fmt_ops.extend(quote! { fmt_ops.extend(quote! {
#query_index => { #query_index => {
salsa::plumbing::QueryStorageOps::fmt_index( salsa::plumbing::QueryStorageOps::fmt_index(
&*self.#fn_name, db, input, fmt, &*self.#fn_name, db, input.key_index(), fmt,
) )
} }
}); });
@ -537,7 +537,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
maybe_changed_ops.extend(quote! { maybe_changed_ops.extend(quote! {
#query_index => { #query_index => {
salsa::plumbing::QueryStorageOps::maybe_changed_after( salsa::plumbing::QueryStorageOps::maybe_changed_after(
&*self.#fn_name, db, input, revision &*self.#fn_name, db, input.key_index(), revision
) )
} }
}); });

View file

@ -102,13 +102,13 @@ where
let mut write = self.slot_map.write(); let mut write = self.slot_map.write();
let entry = write.entry(key.clone()); let entry = write.entry(key.clone());
let key_index = u32::try_from(entry.index()).unwrap(); let key_index = entry.index() as u32;
let database_key_index = DatabaseKeyIndex { let database_key_index = DatabaseKeyIndex {
group_index: self.group_index, group_index: self.group_index,
query_index: Q::QUERY_INDEX, query_index: Q::QUERY_INDEX,
key_index, key_index,
}; };
entry.or_insert_with(|| Arc::new(Slot::new(key.clone(), database_key_index))).clone() entry.or_insert_with(|| Arc::new(Slot::new(database_key_index))).clone()
} }
} }
@ -131,34 +131,36 @@ where
fn fmt_index( fn fmt_index(
&self, &self,
_db: &<Q as QueryDb<'_>>::DynDb, _db: &<Q as QueryDb<'_>>::DynDb,
index: DatabaseKeyIndex, index: u32,
fmt: &mut std::fmt::Formatter<'_>, fmt: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result { ) -> std::fmt::Result {
assert_eq!(index.group_index, self.group_index);
assert_eq!(index.query_index, Q::QUERY_INDEX);
let slot_map = self.slot_map.read(); let slot_map = self.slot_map.read();
let key = slot_map.get_index(index.key_index as usize).unwrap().0; let key = slot_map.get_index(index as usize).unwrap().0;
write!(fmt, "{}({:?})", Q::QUERY_NAME, key) write!(fmt, "{}({:?})", Q::QUERY_NAME, key)
} }
fn maybe_changed_after( fn maybe_changed_after(
&self, &self,
db: &<Q as QueryDb<'_>>::DynDb, db: &<Q as QueryDb<'_>>::DynDb,
input: DatabaseKeyIndex, index: u32,
revision: Revision, revision: Revision,
) -> bool { ) -> bool {
assert_eq!(input.group_index, self.group_index);
assert_eq!(input.query_index, Q::QUERY_INDEX);
debug_assert!(revision < db.salsa_runtime().current_revision()); debug_assert!(revision < db.salsa_runtime().current_revision());
let slot = self.slot_map.read().get_index(input.key_index as usize).unwrap().1.clone(); let read = self.slot_map.read();
slot.maybe_changed_after(db, revision) let Some((key, slot)) = read.get_index(index as usize) else {
return false;
};
let (key, slot) = (key.clone(), slot.clone());
// note: this drop is load-bearing. removing it would causes deadlocks.
drop(read);
slot.maybe_changed_after(db, revision, &key)
} }
fn fetch(&self, db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Q::Value { fn fetch(&self, db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Q::Value {
db.unwind_if_cancelled(); db.unwind_if_cancelled();
let slot = self.slot(key); let slot = self.slot(key);
let StampedValue { value, durability, changed_at } = slot.read(db); let StampedValue { value, durability, changed_at } = slot.read(db, key);
if let Some(evicted) = self.lru_list.record_use(&slot) { if let Some(evicted) = self.lru_list.record_use(&slot) {
evicted.evict(); evicted.evict();
@ -182,7 +184,7 @@ where
C: std::iter::FromIterator<TableEntry<Q::Key, Q::Value>>, C: std::iter::FromIterator<TableEntry<Q::Key, Q::Value>>,
{ {
let slot_map = self.slot_map.read(); let slot_map = self.slot_map.read();
slot_map.values().filter_map(|slot| slot.as_table_entry()).collect() slot_map.iter().filter_map(|(key, slot)| slot.as_table_entry(key)).collect()
} }
} }

View file

@ -26,8 +26,8 @@ where
Q: QueryFunction, Q: QueryFunction,
MP: MemoizationPolicy<Q>, MP: MemoizationPolicy<Q>,
{ {
key: Q::Key, key_index: u32,
database_key_index: DatabaseKeyIndex, group_index: u16,
state: RwLock<QueryState<Q>>, state: RwLock<QueryState<Q>>,
policy: PhantomData<MP>, policy: PhantomData<MP>,
lru_index: LruIndex, lru_index: LruIndex,
@ -110,10 +110,10 @@ where
Q: QueryFunction, Q: QueryFunction,
MP: MemoizationPolicy<Q>, MP: MemoizationPolicy<Q>,
{ {
pub(super) fn new(key: Q::Key, database_key_index: DatabaseKeyIndex) -> Self { pub(super) fn new(database_key_index: DatabaseKeyIndex) -> Self {
Self { Self {
key, key_index: database_key_index.key_index,
database_key_index, group_index: database_key_index.group_index,
state: RwLock::new(QueryState::NotComputed), state: RwLock::new(QueryState::NotComputed),
lru_index: LruIndex::default(), lru_index: LruIndex::default(),
policy: PhantomData, policy: PhantomData,
@ -121,10 +121,18 @@ where
} }
pub(super) fn database_key_index(&self) -> DatabaseKeyIndex { pub(super) fn database_key_index(&self) -> DatabaseKeyIndex {
self.database_key_index DatabaseKeyIndex {
group_index: self.group_index,
query_index: Q::QUERY_INDEX,
key_index: self.key_index,
}
} }
pub(super) fn read(&self, db: &<Q as QueryDb<'_>>::DynDb) -> StampedValue<Q::Value> { pub(super) fn read(
&self,
db: &<Q as QueryDb<'_>>::DynDb,
key: &Q::Key,
) -> StampedValue<Q::Value> {
let runtime = db.salsa_runtime(); let runtime = db.salsa_runtime();
// NB: We don't need to worry about people modifying the // NB: We don't need to worry about people modifying the
@ -147,7 +155,7 @@ where
} }
} }
self.read_upgrade(db, revision_now) self.read_upgrade(db, key, revision_now)
} }
/// Second phase of a read operation: acquires an upgradable-read /// Second phase of a read operation: acquires an upgradable-read
@ -157,6 +165,7 @@ where
fn read_upgrade( fn read_upgrade(
&self, &self,
db: &<Q as QueryDb<'_>>::DynDb, db: &<Q as QueryDb<'_>>::DynDb,
key: &Q::Key,
revision_now: Revision, revision_now: Revision,
) -> StampedValue<Q::Value> { ) -> StampedValue<Q::Value> {
let runtime = db.salsa_runtime(); let runtime = db.salsa_runtime();
@ -186,8 +195,8 @@ where
} }
}; };
let panic_guard = PanicGuard::new(self.database_key_index, self, runtime); let panic_guard = PanicGuard::new(self, runtime);
let active_query = runtime.push_query(self.database_key_index); let active_query = runtime.push_query(self.database_key_index());
// If we have an old-value, it *may* now be stale, since there // If we have an old-value, it *may* now be stale, since there
// has been a new revision since the last time we checked. So, // has been a new revision since the last time we checked. So,
@ -200,7 +209,7 @@ where
db.salsa_event(Event { db.salsa_event(Event {
runtime_id: runtime.id(), runtime_id: runtime.id(),
kind: EventKind::DidValidateMemoizedValue { kind: EventKind::DidValidateMemoizedValue {
database_key: self.database_key_index, database_key: self.database_key_index(),
}, },
}); });
@ -210,7 +219,7 @@ where
} }
} }
self.execute(db, runtime, revision_now, active_query, panic_guard, old_memo) self.execute(db, runtime, revision_now, active_query, panic_guard, old_memo, key)
} }
fn execute( fn execute(
@ -221,22 +230,23 @@ where
active_query: ActiveQueryGuard<'_>, active_query: ActiveQueryGuard<'_>,
panic_guard: PanicGuard<'_, Q, MP>, panic_guard: PanicGuard<'_, Q, MP>,
old_memo: Option<Memo<Q::Value>>, old_memo: Option<Memo<Q::Value>>,
key: &Q::Key,
) -> StampedValue<Q::Value> { ) -> StampedValue<Q::Value> {
tracing::info!("{:?}: executing query", self.database_key_index.debug(db)); tracing::info!("{:?}: executing query", self.database_key_index().debug(db));
db.salsa_event(Event { db.salsa_event(Event {
runtime_id: db.salsa_runtime().id(), runtime_id: db.salsa_runtime().id(),
kind: EventKind::WillExecute { database_key: self.database_key_index }, kind: EventKind::WillExecute { database_key: self.database_key_index() },
}); });
// Query was not previously executed, or value is potentially // Query was not previously executed, or value is potentially
// stale, or value is absent. Let's execute! // stale, or value is absent. Let's execute!
let value = match Cycle::catch(|| Q::execute(db, self.key.clone())) { let value = match Cycle::catch(|| Q::execute(db, key.clone())) {
Ok(v) => v, Ok(v) => v,
Err(cycle) => { Err(cycle) => {
tracing::debug!( tracing::debug!(
"{:?}: caught cycle {:?}, have strategy {:?}", "{:?}: caught cycle {:?}, have strategy {:?}",
self.database_key_index.debug(db), self.database_key_index().debug(db),
cycle, cycle,
Q::CYCLE_STRATEGY, Q::CYCLE_STRATEGY,
); );
@ -248,12 +258,12 @@ where
crate::plumbing::CycleRecoveryStrategy::Fallback => { crate::plumbing::CycleRecoveryStrategy::Fallback => {
if let Some(c) = active_query.take_cycle() { if let Some(c) = active_query.take_cycle() {
assert!(c.is(&cycle)); assert!(c.is(&cycle));
Q::cycle_fallback(db, &cycle, &self.key) Q::cycle_fallback(db, &cycle, key)
} else { } else {
// we are not a participant in this cycle // we are not a participant in this cycle
debug_assert!(!cycle debug_assert!(!cycle
.participant_keys() .participant_keys()
.any(|k| k == self.database_key_index)); .any(|k| k == self.database_key_index()));
cycle.throw() cycle.throw()
} }
} }
@ -303,7 +313,7 @@ where
}; };
let memo_value = let memo_value =
if self.should_memoize_value(&self.key) { Some(new_value.value.clone()) } else { None }; if self.should_memoize_value(key) { Some(new_value.value.clone()) } else { None };
debug!("read_upgrade({:?}): result.revisions = {:#?}", self, revisions,); debug!("read_upgrade({:?}): result.revisions = {:#?}", self, revisions,);
@ -395,13 +405,11 @@ where
} }
} }
pub(super) fn as_table_entry(&self) -> Option<TableEntry<Q::Key, Q::Value>> { pub(super) fn as_table_entry(&self, key: &Q::Key) -> Option<TableEntry<Q::Key, Q::Value>> {
match &*self.state.read() { match &*self.state.read() {
QueryState::NotComputed => None, QueryState::NotComputed => None,
QueryState::InProgress { .. } => Some(TableEntry::new(self.key.clone(), None)), QueryState::InProgress { .. } => Some(TableEntry::new(key.clone(), None)),
QueryState::Memoized(memo) => { QueryState::Memoized(memo) => Some(TableEntry::new(key.clone(), memo.value.clone())),
Some(TableEntry::new(self.key.clone(), memo.value.clone()))
}
} }
} }
@ -436,6 +444,7 @@ where
&self, &self,
db: &<Q as QueryDb<'_>>::DynDb, db: &<Q as QueryDb<'_>>::DynDb,
revision: Revision, revision: Revision,
key: &Q::Key,
) -> bool { ) -> bool {
let runtime = db.salsa_runtime(); let runtime = db.salsa_runtime();
let revision_now = runtime.current_revision(); let revision_now = runtime.current_revision();
@ -458,7 +467,7 @@ where
MaybeChangedSinceProbeState::ChangedAt(changed_at) => return changed_at > revision, MaybeChangedSinceProbeState::ChangedAt(changed_at) => return changed_at > revision,
MaybeChangedSinceProbeState::Stale(state) => { MaybeChangedSinceProbeState::Stale(state) => {
drop(state); drop(state);
return self.maybe_changed_after_upgrade(db, revision); return self.maybe_changed_after_upgrade(db, revision, key);
} }
} }
} }
@ -495,6 +504,7 @@ where
&self, &self,
db: &<Q as QueryDb<'_>>::DynDb, db: &<Q as QueryDb<'_>>::DynDb,
revision: Revision, revision: Revision,
key: &Q::Key,
) -> bool { ) -> bool {
let runtime = db.salsa_runtime(); let runtime = db.salsa_runtime();
let revision_now = runtime.current_revision(); let revision_now = runtime.current_revision();
@ -513,7 +523,9 @@ where
// If another thread was active, then the cache line is going to be // If another thread was active, then the cache line is going to be
// either verified or cleared out. Just recurse to figure out which. // either verified or cleared out. Just recurse to figure out which.
// Note that we don't need an upgradable read. // Note that we don't need an upgradable read.
MaybeChangedSinceProbeState::Retry => return self.maybe_changed_after(db, revision), MaybeChangedSinceProbeState::Retry => {
return self.maybe_changed_after(db, revision, key)
}
MaybeChangedSinceProbeState::Stale(state) => { MaybeChangedSinceProbeState::Stale(state) => {
type RwLockUpgradableReadGuard<'a, T> = type RwLockUpgradableReadGuard<'a, T> =
@ -527,8 +539,8 @@ where
} }
}; };
let panic_guard = PanicGuard::new(self.database_key_index, self, runtime); let panic_guard = PanicGuard::new(self, runtime);
let active_query = runtime.push_query(self.database_key_index); let active_query = runtime.push_query(self.database_key_index());
if old_memo.verify_revisions(db.ops_database(), revision_now, &active_query) { if old_memo.verify_revisions(db.ops_database(), revision_now, &active_query) {
let maybe_changed = old_memo.revisions.changed_at > revision; let maybe_changed = old_memo.revisions.changed_at > revision;
@ -538,8 +550,15 @@ where
// We found that this memoized value may have changed // We found that this memoized value may have changed
// but we have an old value. We can re-run the code and // but we have an old value. We can re-run the code and
// actually *check* if it has changed. // actually *check* if it has changed.
let StampedValue { changed_at, .. } = let StampedValue { changed_at, .. } = self.execute(
self.execute(db, runtime, revision_now, active_query, panic_guard, Some(old_memo)); db,
runtime,
revision_now,
active_query,
panic_guard,
Some(old_memo),
key,
);
changed_at > revision changed_at > revision
} else { } else {
// We found that inputs to this memoized value may have chanced // We found that inputs to this memoized value may have chanced
@ -560,7 +579,7 @@ where
) { ) {
runtime.block_on_or_unwind( runtime.block_on_or_unwind(
db.ops_database(), db.ops_database(),
self.database_key_index, self.database_key_index(),
other_id, other_id,
mutex_guard, mutex_guard,
) )
@ -585,7 +604,6 @@ where
Q: QueryFunction, Q: QueryFunction,
MP: MemoizationPolicy<Q>, MP: MemoizationPolicy<Q>,
{ {
database_key_index: DatabaseKeyIndex,
slot: &'me Slot<Q, MP>, slot: &'me Slot<Q, MP>,
runtime: &'me Runtime, runtime: &'me Runtime,
} }
@ -595,12 +613,8 @@ where
Q: QueryFunction, Q: QueryFunction,
MP: MemoizationPolicy<Q>, MP: MemoizationPolicy<Q>,
{ {
fn new( fn new(slot: &'me Slot<Q, MP>, runtime: &'me Runtime) -> Self {
database_key_index: DatabaseKeyIndex, Self { slot, runtime }
slot: &'me Slot<Q, MP>,
runtime: &'me Runtime,
) -> Self {
Self { database_key_index, slot, runtime }
} }
/// Indicates that we have concluded normally (without panicking). /// Indicates that we have concluded normally (without panicking).
@ -616,9 +630,9 @@ where
/// inserted; if others were blocked, waiting for us to finish, /// inserted; if others were blocked, waiting for us to finish,
/// then notify them. /// then notify them.
fn overwrite_placeholder(&mut self, wait_result: WaitResult, opt_memo: Option<Memo<Q::Value>>) { fn overwrite_placeholder(&mut self, wait_result: WaitResult, opt_memo: Option<Memo<Q::Value>>) {
let old_value = {
let mut write = self.slot.state.write(); let mut write = self.slot.state.write();
match opt_memo {
let old_value = match opt_memo {
// Replace the `InProgress` marker that we installed with the new // Replace the `InProgress` marker that we installed with the new
// memo, thus releasing our unique access to this key. // memo, thus releasing our unique access to this key.
Some(memo) => std::mem::replace(&mut *write, QueryState::Memoized(memo)), Some(memo) => std::mem::replace(&mut *write, QueryState::Memoized(memo)),
@ -627,6 +641,7 @@ where
// it could be removed. At this point, we therefore "own" unique // it could be removed. At this point, we therefore "own" unique
// access to our slot, so we can just remove the key. // access to our slot, so we can just remove the key.
None => std::mem::replace(&mut *write, QueryState::NotComputed), None => std::mem::replace(&mut *write, QueryState::NotComputed),
}
}; };
match old_value { match old_value {
@ -638,7 +653,8 @@ where
// acquire a mutex; the mutex will guarantee that all writes // acquire a mutex; the mutex will guarantee that all writes
// we are interested in are visible. // we are interested in are visible.
if anyone_waiting.load(Ordering::Relaxed) { if anyone_waiting.load(Ordering::Relaxed) {
self.runtime.unblock_queries_blocked_on(self.database_key_index, wait_result); self.runtime
.unblock_queries_blocked_on(self.slot.database_key_index(), wait_result);
} }
} }
_ => panic!( _ => panic!(
@ -692,10 +708,10 @@ where
return None; return None;
} }
if self.verify_revisions(db, revision_now, active_query) { if self.verify_revisions(db, revision_now, active_query) {
Some(StampedValue { self.value.clone().map(|value| StampedValue {
durability: self.revisions.durability, durability: self.revisions.durability,
changed_at: self.revisions.changed_at, changed_at: self.revisions.changed_at,
value: self.value.as_ref().unwrap().clone(), value,
}) })
} else { } else {
None None
@ -748,7 +764,7 @@ where
// input changed *again*. // input changed *again*.
QueryInputs::Tracked { inputs } => { QueryInputs::Tracked { inputs } => {
let changed_input = let changed_input =
inputs.iter().find(|&&input| db.maybe_changed_after(input, verified_at)); inputs.slice.iter().find(|&&input| db.maybe_changed_after(input, verified_at));
if let Some(input) = changed_input { if let Some(input) = changed_input {
debug!("validate_memoized_value: `{:?}` may have changed", input); debug!("validate_memoized_value: `{:?}` may have changed", input);
@ -788,7 +804,7 @@ where
MP: MemoizationPolicy<Q>, MP: MemoizationPolicy<Q>,
{ {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(fmt, "{:?}({:?})", Q::default(), self.key) write!(fmt, "{:?}", Q::default())
} }
} }

View file

@ -42,9 +42,9 @@ impl Durability {
pub(crate) const MAX: Durability = Self::HIGH; pub(crate) const MAX: Durability = Self::HIGH;
/// Number of durability levels. /// Number of durability levels.
pub(crate) const LEN: usize = 3; pub(crate) const LEN: usize = Self::MAX.index() + 1;
pub(crate) fn index(self) -> usize { pub(crate) const fn index(self) -> usize {
self.0 as usize self.0 as usize
} }
} }

View file

@ -29,7 +29,7 @@ where
} }
struct Slot<V> { struct Slot<V> {
database_key_index: DatabaseKeyIndex, key_index: u32,
stamped_value: RwLock<StampedValue<V>>, stamped_value: RwLock<StampedValue<V>>,
} }
@ -54,27 +54,25 @@ where
fn fmt_index( fn fmt_index(
&self, &self,
_db: &<Q as QueryDb<'_>>::DynDb, _db: &<Q as QueryDb<'_>>::DynDb,
index: DatabaseKeyIndex, index: u32,
fmt: &mut std::fmt::Formatter<'_>, fmt: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result { ) -> std::fmt::Result {
assert_eq!(index.group_index, self.group_index);
assert_eq!(index.query_index, Q::QUERY_INDEX);
let slot_map = self.slots.read(); let slot_map = self.slots.read();
let key = slot_map.get_index(index.key_index as usize).unwrap().0; let key = slot_map.get_index(index as usize).unwrap().0;
write!(fmt, "{}({:?})", Q::QUERY_NAME, key) write!(fmt, "{}({:?})", Q::QUERY_NAME, key)
} }
fn maybe_changed_after( fn maybe_changed_after(
&self, &self,
db: &<Q as QueryDb<'_>>::DynDb, db: &<Q as QueryDb<'_>>::DynDb,
input: DatabaseKeyIndex, index: u32,
revision: Revision, revision: Revision,
) -> bool { ) -> bool {
assert_eq!(input.group_index, self.group_index);
assert_eq!(input.query_index, Q::QUERY_INDEX);
debug_assert!(revision < db.salsa_runtime().current_revision()); debug_assert!(revision < db.salsa_runtime().current_revision());
let slots = &self.slots.read(); let slots = &self.slots.read();
let slot = slots.get_index(input.key_index as usize).unwrap().1; let Some((_, slot)) = slots.get_index(index as usize) else {
return true;
};
debug!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,); debug!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,);
@ -96,7 +94,11 @@ where
let StampedValue { value, durability, changed_at } = slot.stamped_value.read().clone(); let StampedValue { value, durability, changed_at } = slot.stamped_value.read().clone();
db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted( db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted(
slot.database_key_index, DatabaseKeyIndex {
group_index: self.group_index,
query_index: Q::QUERY_INDEX,
key_index: slot.key_index,
},
durability, durability,
changed_at, changed_at,
); );
@ -174,16 +176,8 @@ where
} }
Entry::Vacant(entry) => { Entry::Vacant(entry) => {
let key_index = u32::try_from(entry.index()).unwrap(); let key_index = entry.index() as u32;
let database_key_index = DatabaseKeyIndex { entry.insert(Slot { key_index, stamped_value: RwLock::new(stamped_value) });
group_index: self.group_index,
query_index: Q::QUERY_INDEX,
key_index,
};
entry.insert(Slot {
database_key_index,
stamped_value: RwLock::new(stamped_value),
});
None None
} }
} }
@ -196,7 +190,6 @@ pub struct UnitInputStorage<Q>
where where
Q: Query<Key = ()>, Q: Query<Key = ()>,
{ {
group_index: u16,
slot: UnitSlot<Q::Value>, slot: UnitSlot<Q::Value>,
} }
@ -222,36 +215,32 @@ where
fn new(group_index: u16) -> Self { fn new(group_index: u16) -> Self {
let database_key_index = let database_key_index =
DatabaseKeyIndex { group_index, query_index: Q::QUERY_INDEX, key_index: 0 }; DatabaseKeyIndex { group_index, query_index: Q::QUERY_INDEX, key_index: 0 };
UnitInputStorage { UnitInputStorage { slot: UnitSlot { database_key_index, stamped_value: RwLock::new(None) } }
group_index,
slot: UnitSlot { database_key_index, stamped_value: RwLock::new(None) },
}
} }
fn fmt_index( fn fmt_index(
&self, &self,
_db: &<Q as QueryDb<'_>>::DynDb, _db: &<Q as QueryDb<'_>>::DynDb,
index: DatabaseKeyIndex, _index: u32,
fmt: &mut std::fmt::Formatter<'_>, fmt: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result { ) -> std::fmt::Result {
assert_eq!(index.group_index, self.group_index);
assert_eq!(index.query_index, Q::QUERY_INDEX);
write!(fmt, "{}", Q::QUERY_NAME) write!(fmt, "{}", Q::QUERY_NAME)
} }
fn maybe_changed_after( fn maybe_changed_after(
&self, &self,
db: &<Q as QueryDb<'_>>::DynDb, db: &<Q as QueryDb<'_>>::DynDb,
input: DatabaseKeyIndex, _index: u32,
revision: Revision, revision: Revision,
) -> bool { ) -> bool {
assert_eq!(input.group_index, self.group_index);
assert_eq!(input.query_index, Q::QUERY_INDEX);
debug_assert!(revision < db.salsa_runtime().current_revision()); debug_assert!(revision < db.salsa_runtime().current_revision());
debug!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,); debug!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,);
let changed_at = self.slot.stamped_value.read().as_ref().unwrap().changed_at; let Some(value) = &*self.slot.stamped_value.read() else {
return true;
};
let changed_at = value.changed_at;
debug!("maybe_changed_after: changed_at = {:?}", changed_at); debug!("maybe_changed_after: changed_at = {:?}", changed_at);

View file

@ -265,12 +265,10 @@ where
fn fmt_index( fn fmt_index(
&self, &self,
_db: &<Q as QueryDb<'_>>::DynDb, _db: &<Q as QueryDb<'_>>::DynDb,
index: DatabaseKeyIndex, index: u32,
fmt: &mut std::fmt::Formatter<'_>, fmt: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result { ) -> std::fmt::Result {
assert_eq!(index.group_index, self.group_index); let intern_id = InternId::from(index);
assert_eq!(index.query_index, Q::QUERY_INDEX);
let intern_id = InternId::from(index.key_index);
let slot = self.lookup_value(intern_id); let slot = self.lookup_value(intern_id);
write!(fmt, "{}({:?})", Q::QUERY_NAME, slot.value) write!(fmt, "{}({:?})", Q::QUERY_NAME, slot.value)
} }
@ -278,13 +276,11 @@ where
fn maybe_changed_after( fn maybe_changed_after(
&self, &self,
db: &<Q as QueryDb<'_>>::DynDb, db: &<Q as QueryDb<'_>>::DynDb,
input: DatabaseKeyIndex, input: u32,
revision: Revision, revision: Revision,
) -> bool { ) -> bool {
assert_eq!(input.group_index, self.group_index);
assert_eq!(input.query_index, Q::QUERY_INDEX);
debug_assert!(revision < db.salsa_runtime().current_revision()); debug_assert!(revision < db.salsa_runtime().current_revision());
let intern_id = InternId::from(input.key_index); let intern_id = InternId::from(input);
let slot = self.lookup_value(intern_id); let slot = self.lookup_value(intern_id);
slot.maybe_changed_after(revision) slot.maybe_changed_after(revision)
} }
@ -388,7 +384,7 @@ where
fn fmt_index( fn fmt_index(
&self, &self,
db: &<Q as QueryDb<'_>>::DynDb, db: &<Q as QueryDb<'_>>::DynDb,
index: DatabaseKeyIndex, index: u32,
fmt: &mut std::fmt::Formatter<'_>, fmt: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result { ) -> std::fmt::Result {
let group_storage = let group_storage =
@ -400,7 +396,7 @@ where
fn maybe_changed_after( fn maybe_changed_after(
&self, &self,
db: &<Q as QueryDb<'_>>::DynDb, db: &<Q as QueryDb<'_>>::DynDb,
input: DatabaseKeyIndex, input: u32,
revision: Revision, revision: Revision,
) -> bool { ) -> bool {
let group_storage = let group_storage =

View file

@ -54,7 +54,7 @@ pub trait Database: plumbing::DatabaseOps {
/// runtime. It permits the database to be customized and to /// runtime. It permits the database to be customized and to
/// inject logging or other custom behavior. /// inject logging or other custom behavior.
fn salsa_event(&self, event_fn: Event) { fn salsa_event(&self, event_fn: Event) {
#![allow(unused_variables)] _ = event_fn;
} }
/// Starts unwinding the stack if the current revision is cancelled. /// Starts unwinding the stack if the current revision is cancelled.
@ -96,11 +96,16 @@ pub trait Database: plumbing::DatabaseOps {
self.ops_salsa_runtime() self.ops_salsa_runtime()
} }
/// Gives access to the underlying salsa runtime. /// A "synthetic write" causes the system to act *as though* some
/// input of durability `durability` has changed. This is mostly
/// useful for profiling scenarios.
/// ///
/// This method should not be overridden by `Database` implementors. /// **WARNING:** Just like an ordinary write, this method triggers
fn salsa_runtime_mut(&mut self) -> &mut Runtime { /// cancellation. If you invoke it while a snapshot exists, it
self.ops_salsa_runtime_mut() /// will block until that snapshot is dropped -- if that snapshot
/// is owned by the current thread, this could trigger deadlock.
fn synthetic_write(&mut self, durability: Durability) {
plumbing::DatabaseOps::synthetic_write(self, durability)
} }
} }
@ -456,12 +461,12 @@ pub trait Query: Debug + Default + Sized + for<'d> QueryDb<'d> {
/// Name of the query method (e.g., `foo`) /// Name of the query method (e.g., `foo`)
const QUERY_NAME: &'static str; const QUERY_NAME: &'static str;
/// Extact storage for this query from the storage for its group. /// Extract storage for this query from the storage for its group.
fn query_storage<'a>( fn query_storage<'a>(
group_storage: &'a <Self as QueryDb<'_>>::GroupStorage, group_storage: &'a <Self as QueryDb<'_>>::GroupStorage,
) -> &'a std::sync::Arc<Self::Storage>; ) -> &'a std::sync::Arc<Self::Storage>;
/// Extact storage for this query from the storage for its group. /// Extract storage for this query from the storage for its group.
fn query_storage_mut<'a>( fn query_storage_mut<'a>(
group_storage: &'a <Self as QueryDb<'_>>::GroupStorage, group_storage: &'a <Self as QueryDb<'_>>::GroupStorage,
) -> &'a std::sync::Arc<Self::Storage>; ) -> &'a std::sync::Arc<Self::Storage>;

View file

@ -40,7 +40,7 @@ pub(crate) trait LruNode: Sized + Debug {
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct LruIndex { pub(crate) struct LruIndex {
/// Index in the approprate LRU list, or std::usize::MAX if not a /// Index in the appropriate LRU list, or std::usize::MAX if not a
/// member. /// member.
index: AtomicUsize, index: AtomicUsize,
} }

View file

@ -38,8 +38,15 @@ pub trait DatabaseOps {
/// Gives access to the underlying salsa runtime. /// Gives access to the underlying salsa runtime.
fn ops_salsa_runtime(&self) -> &Runtime; fn ops_salsa_runtime(&self) -> &Runtime;
/// Gives access to the underlying salsa runtime. /// A "synthetic write" causes the system to act *as though* some
fn ops_salsa_runtime_mut(&mut self) -> &mut Runtime; /// input of durability `durability` has changed. This is mostly
/// useful for profiling scenarios.
///
/// **WARNING:** Just like an ordinary write, this method triggers
/// cancellation. If you invoke it while a snapshot exists, it
/// will block until that snapshot is dropped -- if that snapshot
/// is owned by the current thread, this could trigger deadlock.
fn synthetic_write(&mut self, durability: Durability);
/// Formats a database key index in a human readable fashion. /// Formats a database key index in a human readable fashion.
fn fmt_index( fn fmt_index(
@ -166,7 +173,7 @@ where
fn fmt_index( fn fmt_index(
&self, &self,
db: &<Q as QueryDb<'_>>::DynDb, db: &<Q as QueryDb<'_>>::DynDb,
index: DatabaseKeyIndex, index: u32,
fmt: &mut std::fmt::Formatter<'_>, fmt: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result; ) -> std::fmt::Result;
@ -179,7 +186,7 @@ where
fn maybe_changed_after( fn maybe_changed_after(
&self, &self,
db: &<Q as QueryDb<'_>>::DynDb, db: &<Q as QueryDb<'_>>::DynDb,
input: DatabaseKeyIndex, index: u32,
revision: Revision, revision: Revision,
) -> bool; ) -> bool;
// ANCHOR_END:maybe_changed_after // ANCHOR_END:maybe_changed_after

View file

@ -46,7 +46,7 @@ pub(crate) struct AtomicRevision {
} }
impl AtomicRevision { impl AtomicRevision {
pub(crate) fn start() -> Self { pub(crate) const fn start() -> Self {
Self { data: AtomicU32::new(START) } Self { data: AtomicU32::new(START) }
} }

View file

@ -4,13 +4,14 @@ use crate::hash::FxIndexSet;
use crate::plumbing::CycleRecoveryStrategy; use crate::plumbing::CycleRecoveryStrategy;
use crate::revision::{AtomicRevision, Revision}; use crate::revision::{AtomicRevision, Revision};
use crate::{Cancelled, Cycle, Database, DatabaseKeyIndex, Event, EventKind}; use crate::{Cancelled, Cycle, Database, DatabaseKeyIndex, Event, EventKind};
use itertools::Itertools;
use parking_lot::lock_api::{RawRwLock, RawRwLockRecursive}; use parking_lot::lock_api::{RawRwLock, RawRwLockRecursive};
use parking_lot::{Mutex, RwLock}; use parking_lot::{Mutex, RwLock};
use std::hash::Hash; use std::hash::Hash;
use std::panic::panic_any; use std::panic::panic_any;
use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::atomic::{AtomicU32, Ordering};
use tracing::debug; use tracing::debug;
use triomphe::Arc; use triomphe::{Arc, ThinArc};
mod dependency_graph; mod dependency_graph;
use dependency_graph::DependencyGraph; use dependency_graph::DependencyGraph;
@ -297,8 +298,7 @@ impl Runtime {
// (at least for this execution, not necessarily across executions), // (at least for this execution, not necessarily across executions),
// no matter where it started on the stack. Find the minimum // no matter where it started on the stack. Find the minimum
// key and rotate it to the front. // key and rotate it to the front.
let min = v.iter().min().unwrap(); let index = v.iter().position_min().unwrap_or_default();
let index = v.iter().position(|p| p == min).unwrap();
v.rotate_left(index); v.rotate_left(index);
// No need to store extra memory. // No need to store extra memory.
@ -440,7 +440,7 @@ impl Runtime {
/// State that will be common to all threads (when we support multiple threads) /// State that will be common to all threads (when we support multiple threads)
struct SharedState { struct SharedState {
/// Stores the next id to use for a snapshotted runtime (starts at 1). /// Stores the next id to use for a snapshotted runtime (starts at 1).
next_id: AtomicUsize, next_id: AtomicU32,
/// Whenever derived queries are executing, they acquire this lock /// Whenever derived queries are executing, they acquire this lock
/// in read mode. Mutating inputs (and thus creating a new /// in read mode. Mutating inputs (and thus creating a new
@ -457,50 +457,46 @@ struct SharedState {
/// revision is cancelled). /// revision is cancelled).
pending_revision: AtomicRevision, pending_revision: AtomicRevision,
/// Stores the "last change" revision for values of each duration. /// Stores the "last change" revision for values of each Durability.
/// This vector is always of length at least 1 (for Durability 0) /// This vector is always of length at least 1 (for Durability 0)
/// but its total length depends on the number of durations. The /// but its total length depends on the number of Durabilities. The
/// element at index 0 is special as it represents the "current /// element at index 0 is special as it represents the "current
/// revision". In general, we have the invariant that revisions /// revision". In general, we have the invariant that revisions
/// in here are *declining* -- that is, `revisions[i] >= /// in here are *declining* -- that is, `revisions[i] >=
/// revisions[i + 1]`, for all `i`. This is because when you /// revisions[i + 1]`, for all `i`. This is because when you
/// modify a value with durability D, that implies that values /// modify a value with durability D, that implies that values
/// with durability less than D may have changed too. /// with durability less than D may have changed too.
revisions: Vec<AtomicRevision>, revisions: [AtomicRevision; Durability::LEN],
/// The dependency graph tracks which runtimes are blocked on one /// The dependency graph tracks which runtimes are blocked on one
/// another, waiting for queries to terminate. /// another, waiting for queries to terminate.
dependency_graph: Mutex<DependencyGraph>, dependency_graph: Mutex<DependencyGraph>,
} }
impl SharedState { impl std::panic::RefUnwindSafe for SharedState {}
fn with_durabilities(durabilities: usize) -> Self {
impl Default for SharedState {
fn default() -> Self {
#[allow(clippy::declare_interior_mutable_const)]
const START: AtomicRevision = AtomicRevision::start();
SharedState { SharedState {
next_id: AtomicUsize::new(1), next_id: AtomicU32::new(1),
query_lock: Default::default(), query_lock: Default::default(),
revisions: (0..durabilities).map(|_| AtomicRevision::start()).collect(), revisions: [START; Durability::LEN],
pending_revision: AtomicRevision::start(), pending_revision: START,
dependency_graph: Default::default(), dependency_graph: Default::default(),
} }
} }
} }
impl std::panic::RefUnwindSafe for SharedState {}
impl Default for SharedState {
fn default() -> Self {
Self::with_durabilities(Durability::LEN)
}
}
impl std::fmt::Debug for SharedState { impl std::fmt::Debug for SharedState {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let query_lock = if self.query_lock.try_write().is_some() { let query_lock = if self.query_lock.is_locked_exclusive() {
"<unlocked>" "<wlocked>"
} else if self.query_lock.try_read().is_some() { } else if self.query_lock.is_locked() {
"<rlocked>" "<rlocked>"
} else { } else {
"<wlocked>" "<unlocked>"
}; };
fmt.debug_struct("SharedState") fmt.debug_struct("SharedState")
.field("query_lock", &query_lock) .field("query_lock", &query_lock)
@ -570,7 +566,9 @@ impl ActiveQuery {
if dependencies.is_empty() { if dependencies.is_empty() {
QueryInputs::NoInputs QueryInputs::NoInputs
} else { } else {
QueryInputs::Tracked { inputs: dependencies.iter().copied().collect() } QueryInputs::Tracked {
inputs: ThinArc::from_header_and_iter((), dependencies.iter().copied()),
}
} }
} }
}; };
@ -616,7 +614,7 @@ impl ActiveQuery {
/// complete, its `RuntimeId` may potentially be re-used. /// complete, its `RuntimeId` may potentially be re-used.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct RuntimeId { pub struct RuntimeId {
counter: usize, counter: u32,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]

View file

@ -12,7 +12,7 @@ type QueryStack = Vec<ActiveQuery>;
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub(super) struct DependencyGraph { pub(super) struct DependencyGraph {
/// A `(K -> V)` pair in this map indicates that the the runtime /// A `(K -> V)` pair in this map indicates that the runtime
/// `K` is blocked on some query executing in the runtime `V`. /// `K` is blocked on some query executing in the runtime `V`.
/// This encodes a graph that must be acyclic (or else deadlock /// This encodes a graph that must be acyclic (or else deadlock
/// will result). /// will result).

View file

@ -1,5 +1,6 @@
//! //!
use tracing::debug; use tracing::debug;
use triomphe::ThinArc;
use crate::durability::Durability; use crate::durability::Durability;
use crate::runtime::ActiveQuery; use crate::runtime::ActiveQuery;
@ -7,7 +8,6 @@ use crate::runtime::Revision;
use crate::Cycle; use crate::Cycle;
use crate::DatabaseKeyIndex; use crate::DatabaseKeyIndex;
use std::cell::RefCell; use std::cell::RefCell;
use triomphe::Arc;
/// State that is specific to a single execution thread. /// State that is specific to a single execution thread.
/// ///
@ -43,7 +43,7 @@ pub(crate) struct QueryRevisions {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) enum QueryInputs { pub(crate) enum QueryInputs {
/// Non-empty set of inputs, fully known /// Non-empty set of inputs, fully known
Tracked { inputs: Arc<[DatabaseKeyIndex]> }, Tracked { inputs: ThinArc<(), DatabaseKeyIndex> },
/// Empty set of inputs, fully known. /// Empty set of inputs, fully known.
NoInputs, NoInputs,
@ -145,8 +145,7 @@ impl LocalState {
/// the current thread is blocking. The stack must be restored /// the current thread is blocking. The stack must be restored
/// with [`Self::restore_query_stack`] when the thread unblocks. /// with [`Self::restore_query_stack`] when the thread unblocks.
pub(super) fn take_query_stack(&self) -> Vec<ActiveQuery> { pub(super) fn take_query_stack(&self) -> Vec<ActiveQuery> {
assert!(self.query_stack.borrow().is_some(), "query stack already taken"); self.query_stack.take().expect("query stack already taken")
self.query_stack.take().unwrap()
} }
/// Restores a query stack taken with [`Self::take_query_stack`] once /// Restores a query stack taken with [`Self::take_query_stack`] once

View file

@ -58,7 +58,7 @@ fn revalidate() {
// Second generation: volatile will change (to 1) but memoized1 // Second generation: volatile will change (to 1) but memoized1
// will not (still 0, as 1/2 = 0) // will not (still 0, as 1/2 = 0)
query.salsa_runtime_mut().synthetic_write(Durability::LOW); query.synthetic_write(Durability::LOW);
query.memoized2(); query.memoized2();
query.assert_log(&["Volatile invoked", "Memoized1 invoked"]); query.assert_log(&["Volatile invoked", "Memoized1 invoked"]);
query.memoized2(); query.memoized2();
@ -67,7 +67,7 @@ fn revalidate() {
// Third generation: volatile will change (to 2) and memoized1 // Third generation: volatile will change (to 2) and memoized1
// will too (to 1). Therefore, after validating that Memoized1 // will too (to 1). Therefore, after validating that Memoized1
// changed, we now invoke Memoized2. // changed, we now invoke Memoized2.
query.salsa_runtime_mut().synthetic_write(Durability::LOW); query.synthetic_write(Durability::LOW);
query.memoized2(); query.memoized2();
query.assert_log(&["Volatile invoked", "Memoized1 invoked", "Memoized2 invoked"]); query.assert_log(&["Volatile invoked", "Memoized1 invoked", "Memoized2 invoked"]);

View file

@ -111,7 +111,7 @@ fn on_demand_input_durability() {
} }
"#]].assert_debug_eq(&events); "#]].assert_debug_eq(&events);
db.salsa_runtime_mut().synthetic_write(Durability::LOW); db.synthetic_write(Durability::LOW);
events.replace(vec![]); events.replace(vec![]);
assert_eq!(db.c(1), 10); assert_eq!(db.c(1), 10);
assert_eq!(db.c(2), 20); assert_eq!(db.c(2), 20);
@ -128,7 +128,7 @@ fn on_demand_input_durability() {
} }
"#]].assert_debug_eq(&events); "#]].assert_debug_eq(&events);
db.salsa_runtime_mut().synthetic_write(Durability::HIGH); db.synthetic_write(Durability::HIGH);
events.replace(vec![]); events.replace(vec![]);
assert_eq!(db.c(1), 10); assert_eq!(db.c(1), 10);
assert_eq!(db.c(2), 20); assert_eq!(db.c(2), 20);

View file

@ -20,7 +20,7 @@ fn volatile_twice() {
let v2 = db.volatile(); // volatiles are cached, so 2nd read returns the same let v2 = db.volatile(); // volatiles are cached, so 2nd read returns the same
assert_eq!(v1, v2); assert_eq!(v1, v2);
db.salsa_runtime_mut().synthetic_write(Durability::LOW); // clears volatile caches db.synthetic_write(Durability::LOW); // clears volatile caches
let v3 = db.volatile(); // will re-increment the counter let v3 = db.volatile(); // will re-increment the counter
let v4 = db.volatile(); // second call will be cached let v4 = db.volatile(); // second call will be cached
@ -40,7 +40,7 @@ fn intermingled() {
assert_eq!(v1, v3); assert_eq!(v1, v3);
assert_eq!(v2, v4); assert_eq!(v2, v4);
db.salsa_runtime_mut().synthetic_write(Durability::LOW); // clears volatile caches db.synthetic_write(Durability::LOW); // clears volatile caches
let v5 = db.memoized(); // re-executes volatile, caches new result let v5 = db.memoized(); // re-executes volatile, caches new result
let v6 = db.memoized(); // re-use cached result let v6 = db.memoized(); // re-use cached result

View file

@ -302,6 +302,22 @@ pub fn slice_tails<T>(this: &[T]) -> impl Iterator<Item = &[T]> {
(0..this.len()).map(|i| &this[i..]) (0..this.len()).map(|i| &this[i..])
} }
pub trait IsNoneOr {
type Type;
#[allow(clippy::wrong_self_convention)]
fn is_none_or(self, s: impl FnOnce(Self::Type) -> bool) -> bool;
}
#[allow(unstable_name_collisions)]
impl<T> IsNoneOr for Option<T> {
type Type = T;
fn is_none_or(self, f: impl FnOnce(T) -> bool) -> bool {
match self {
Some(v) => f(v),
None => true,
}
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View file

@ -4,7 +4,11 @@ import * as ra from "./lsp_ext";
import * as path from "path"; import * as path from "path";
import type { Ctx, Cmd, CtxInit } from "./ctx"; import type { Ctx, Cmd, CtxInit } from "./ctx";
import { applySnippetWorkspaceEdit, applySnippetTextEdits } from "./snippets"; import {
applySnippetWorkspaceEdit,
applySnippetTextEdits,
type SnippetTextDocumentEdit,
} from "./snippets";
import { spawnSync } from "child_process"; import { spawnSync } from "child_process";
import { type RunnableQuickPick, selectRunnable, createTask, createArgs } from "./run"; import { type RunnableQuickPick, selectRunnable, createTask, createArgs } from "./run";
import { AstInspector } from "./ast_inspector"; import { AstInspector } from "./ast_inspector";
@ -1006,7 +1010,6 @@ export function resolveCodeAction(ctx: CtxInit): Cmd {
return; return;
} }
const itemEdit = item.edit; const itemEdit = item.edit;
const edit = await client.protocol2CodeConverter.asWorkspaceEdit(itemEdit);
// filter out all text edits and recreate the WorkspaceEdit without them so we can apply // filter out all text edits and recreate the WorkspaceEdit without them so we can apply
// snippet edits on our own // snippet edits on our own
const lcFileSystemEdit = { const lcFileSystemEdit = {
@ -1017,16 +1020,71 @@ export function resolveCodeAction(ctx: CtxInit): Cmd {
lcFileSystemEdit, lcFileSystemEdit,
); );
await vscode.workspace.applyEdit(fileSystemEdit); await vscode.workspace.applyEdit(fileSystemEdit);
await applySnippetWorkspaceEdit(edit);
// replace all text edits so that we can convert snippet text edits into `vscode.SnippetTextEdit`s
// FIXME: this is a workaround until vscode-languageclient supports doing the SnippeTextEdit conversion itself
// also need to carry the snippetTextDocumentEdits separately, since we can't retrieve them again using WorkspaceEdit.entries
const [workspaceTextEdit, snippetTextDocumentEdits] = asWorkspaceSnippetEdit(ctx, itemEdit);
await applySnippetWorkspaceEdit(workspaceTextEdit, snippetTextDocumentEdits);
if (item.command != null) { if (item.command != null) {
await vscode.commands.executeCommand(item.command.command, item.command.arguments); await vscode.commands.executeCommand(item.command.command, item.command.arguments);
} }
}; };
} }
function asWorkspaceSnippetEdit(
ctx: CtxInit,
item: lc.WorkspaceEdit,
): [vscode.WorkspaceEdit, SnippetTextDocumentEdit[]] {
const client = ctx.client;
// partially borrowed from https://github.com/microsoft/vscode-languageserver-node/blob/295aaa393fda8ecce110c38880a00466b9320e63/client/src/common/protocolConverter.ts#L1060-L1101
const result = new vscode.WorkspaceEdit();
if (item.documentChanges) {
const snippetTextDocumentEdits: SnippetTextDocumentEdit[] = [];
for (const change of item.documentChanges) {
if (lc.TextDocumentEdit.is(change)) {
const uri = client.protocol2CodeConverter.asUri(change.textDocument.uri);
const snippetTextEdits: (vscode.TextEdit | vscode.SnippetTextEdit)[] = [];
for (const edit of change.edits) {
if (
"insertTextFormat" in edit &&
edit.insertTextFormat === lc.InsertTextFormat.Snippet
) {
// is a snippet text edit
snippetTextEdits.push(
new vscode.SnippetTextEdit(
client.protocol2CodeConverter.asRange(edit.range),
new vscode.SnippetString(edit.newText),
),
);
} else {
// always as a text document edit
snippetTextEdits.push(
vscode.TextEdit.replace(
client.protocol2CodeConverter.asRange(edit.range),
edit.newText,
),
);
}
}
snippetTextDocumentEdits.push([uri, snippetTextEdits]);
}
}
return [result, snippetTextDocumentEdits];
} else {
// we don't handle WorkspaceEdit.changes since it's not relevant for code actions
return [result, []];
}
}
export function applySnippetWorkspaceEditCommand(_ctx: CtxInit): Cmd { export function applySnippetWorkspaceEditCommand(_ctx: CtxInit): Cmd {
return async (edit: vscode.WorkspaceEdit) => { return async (edit: vscode.WorkspaceEdit) => {
await applySnippetWorkspaceEdit(edit); await applySnippetWorkspaceEdit(edit, edit.entries());
}; };
} }

View file

@ -3,20 +3,28 @@ import * as vscode from "vscode";
import { assert } from "./util"; import { assert } from "./util";
import { unwrapUndefinable } from "./undefinable"; import { unwrapUndefinable } from "./undefinable";
export async function applySnippetWorkspaceEdit(edit: vscode.WorkspaceEdit) { export type SnippetTextDocumentEdit = [vscode.Uri, (vscode.TextEdit | vscode.SnippetTextEdit)[]];
if (edit.entries().length === 1) {
const [uri, edits] = unwrapUndefinable(edit.entries()[0]); export async function applySnippetWorkspaceEdit(
edit: vscode.WorkspaceEdit,
editEntries: SnippetTextDocumentEdit[],
) {
if (editEntries.length === 1) {
const [uri, edits] = unwrapUndefinable(editEntries[0]);
const editor = await editorFromUri(uri); const editor = await editorFromUri(uri);
if (editor) await applySnippetTextEdits(editor, edits); if (editor) {
edit.set(uri, removeLeadingWhitespace(editor, edits));
await vscode.workspace.applyEdit(edit);
}
return; return;
} }
for (const [uri, edits] of edit.entries()) { for (const [uri, edits] of editEntries) {
const editor = await editorFromUri(uri); const editor = await editorFromUri(uri);
if (editor) { if (editor) {
await editor.edit((builder) => { await editor.edit((builder) => {
for (const indel of edits) { for (const indel of edits) {
assert( assert(
!parseSnippet(indel.newText), !(indel instanceof vscode.SnippetTextEdit),
`bad ws edit: snippet received with multiple edits: ${JSON.stringify( `bad ws edit: snippet received with multiple edits: ${JSON.stringify(
edit, edit,
)}`, )}`,
@ -39,53 +47,97 @@ async function editorFromUri(uri: vscode.Uri): Promise<vscode.TextEditor | undef
} }
export async function applySnippetTextEdits(editor: vscode.TextEditor, edits: vscode.TextEdit[]) { export async function applySnippetTextEdits(editor: vscode.TextEditor, edits: vscode.TextEdit[]) {
const selections: vscode.Selection[] = []; const edit = new vscode.WorkspaceEdit();
let lineDelta = 0; const snippetEdits = toSnippetTextEdits(edits);
await editor.edit((builder) => { edit.set(editor.document.uri, removeLeadingWhitespace(editor, snippetEdits));
for (const indel of edits) { await vscode.workspace.applyEdit(edit);
const parsed = parseSnippet(indel.newText);
if (parsed) {
const [newText, [placeholderStart, placeholderLength]] = parsed;
const prefix = newText.substr(0, placeholderStart);
const lastNewline = prefix.lastIndexOf("\n");
const startLine = indel.range.start.line + lineDelta + countLines(prefix);
const startColumn =
lastNewline === -1
? indel.range.start.character + placeholderStart
: prefix.length - lastNewline - 1;
const endColumn = startColumn + placeholderLength;
selections.push(
new vscode.Selection(
new vscode.Position(startLine, startColumn),
new vscode.Position(startLine, endColumn),
),
);
builder.replace(indel.range, newText);
} else {
builder.replace(indel.range, indel.newText);
} }
lineDelta +=
countLines(indel.newText) - (indel.range.end.line - indel.range.start.line); function hasSnippet(snip: string): boolean {
const m = snip.match(/\$\d+|\{\d+:[^}]*\}/);
return m != null;
}
function toSnippetTextEdits(
edits: vscode.TextEdit[],
): (vscode.TextEdit | vscode.SnippetTextEdit)[] {
return edits.map((textEdit) => {
// Note: text edits without any snippets are returned as-is instead of
// being wrapped in a SnippetTextEdit, as otherwise it would be
// treated as if it had a tab stop at the end.
if (hasSnippet(textEdit.newText)) {
return new vscode.SnippetTextEdit(
textEdit.range,
new vscode.SnippetString(textEdit.newText),
);
} else {
return textEdit;
} }
}); });
if (selections.length > 0) editor.selections = selections;
if (selections.length === 1) {
const selection = unwrapUndefinable(selections[0]);
editor.revealRange(selection, vscode.TextEditorRevealType.InCenterIfOutsideViewport);
}
} }
function parseSnippet(snip: string): [string, [number, number]] | undefined { /**
const m = snip.match(/\$(0|\{0:([^}]*)\})/); * Removes the leading whitespace from snippet edits, so as to not double up
if (!m) return undefined; * on indentation.
const placeholder = m[2] ?? ""; *
if (m.index == null) return undefined; * Snippet edits by default adjust any multi-line snippets to match the
const range: [number, number] = [m.index, placeholder.length]; * indentation of the line to insert at. Unfortunately, we (the server) also
const insert = snip.replace(m[0], placeholder); * include the required indentation to match what we line insert at, so we end
return [insert, range]; * up doubling up the indentation. Since there isn't any way to tell vscode to
* not fixup indentation for us, we instead opt to remove the indentation and
* then let vscode add it back in.
*
* This assumes that the source snippet text edits have the required
* indentation, but that's okay as even without this workaround and the problem
* to workaround, those snippet edits would already be inserting at the wrong
* indentation.
*/
function removeLeadingWhitespace(
editor: vscode.TextEditor,
edits: (vscode.TextEdit | vscode.SnippetTextEdit)[],
) {
return edits.map((edit) => {
if (edit instanceof vscode.SnippetTextEdit) {
const snippetEdit: vscode.SnippetTextEdit = edit;
const firstLineEnd = snippetEdit.snippet.value.indexOf("\n");
if (firstLineEnd !== -1) {
// Is a multi-line snippet, remove the indentation which
// would be added back in by vscode.
const startLine = editor.document.lineAt(snippetEdit.range.start.line);
const leadingWhitespace = getLeadingWhitespace(
startLine.text,
0,
startLine.firstNonWhitespaceCharacterIndex,
);
const [firstLine, rest] = splitAt(snippetEdit.snippet.value, firstLineEnd + 1);
const unindentedLines = rest
.split("\n")
.map((line) => line.replace(leadingWhitespace, ""))
.join("\n");
snippetEdit.snippet.value = firstLine + unindentedLines;
} }
function countLines(text: string): number { return snippetEdit;
return (text.match(/\n/g) || []).length; } else {
return edit;
}
});
}
// based on https://github.com/microsoft/vscode/blob/main/src/vs/base/common/strings.ts#L284
function getLeadingWhitespace(str: string, start: number = 0, end: number = str.length): string {
for (let i = start; i < end; i++) {
const chCode = str.charCodeAt(i);
if (chCode !== " ".charCodeAt(0) && chCode !== " ".charCodeAt(0)) {
return str.substring(start, i);
}
}
return str.substring(start, end);
}
function splitAt(str: string, index: number): [string, string] {
return [str.substring(0, index), str.substring(index)];
} }

View file

@ -86,7 +86,11 @@ impl Metrics {
fn measure_rustc_tests(&mut self, sh: &Shell) -> anyhow::Result<()> { fn measure_rustc_tests(&mut self, sh: &Shell) -> anyhow::Result<()> {
eprintln!("\nMeasuring rustc tests"); eprintln!("\nMeasuring rustc tests");
cmd!(sh, "git clone --depth=1 https://github.com/rust-lang/rust").run()?; cmd!(
sh,
"git clone --depth=1 --branch 1.76.0 https://github.com/rust-lang/rust.git --single-branch"
)
.run()?;
let output = cmd!(sh, "./target/release/rust-analyzer rustc-tests ./rust").read()?; let output = cmd!(sh, "./target/release/rust-analyzer rustc-tests ./rust").read()?;
for (metric, value, unit) in parse_metrics(&output) { for (metric, value, unit) in parse_metrics(&output) {