8866: Update salsa r=matklad a=jonas-schievink

This updates salsa to include https://github.com/salsa-rs/salsa/pull/265, and removes all cancellation-related code from rust-analyzer

Co-authored-by: Jonas Schievink <jonasschievink@gmail.com>
This commit is contained in:
bors[bot] 2021-05-31 12:42:32 +00:00 committed by GitHub
commit e9a797748d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 98 additions and 189 deletions

8
Cargo.lock generated
View file

@ -1375,9 +1375,9 @@ checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
[[package]]
name = "salsa"
version = "0.16.1"
version = "0.17.0-pre.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b84d9f96071f3f3be0dc818eae3327625d8ebc95b58da37d6850724f31d3403"
checksum = "58038261ea8cd5a7730c4d8c97a22063d7c7eb1c2809e55c3c15f0a5903e5582"
dependencies = [
"crossbeam-utils",
"indexmap",
@ -1392,9 +1392,9 @@ dependencies = [
[[package]]
name = "salsa-macros"
version = "0.16.0"
version = "0.17.0-pre.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd3904a4ba0a9d0211816177fd34b04c7095443f8cdacd11175064fe541c8fe2"
checksum = "2e2fc060627fa5d44bffac98f6089b9497779e2deccc26687f60adc2638e32fb"
dependencies = [
"heck",
"proc-macro2",

View file

@ -10,7 +10,7 @@ edition = "2018"
doctest = false
[dependencies]
salsa = "0.16.0"
salsa = "0.17.0-pre.1"
rustc-hash = "1.1.0"
syntax = { path = "../syntax", version = "0.0.0" }

View file

@ -1,48 +0,0 @@
//! Utility types to support cancellation.
//!
//! In a typical IDE use-case, requests and modification happen concurrently, as
//! in the following scenario:
//!
//! * user types a character,
//! * a syntax highlighting process is started
//! * user types next character, while syntax highlighting *is still in
//! progress*.
//!
//! In this situation, we want to react to modification as quickly as possible.
//! At the same time, in-progress results are not very interesting, because they
//! are invalidated by the edit anyway. So, we first cancel all in-flight
//! requests, and then apply modification knowing that it won't interfere with
//! any background processing (this bit is handled by salsa, see the
//! `BaseDatabase::check_canceled` method).
/// An "error" signifying that the operation was canceled.
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Canceled {
_private: (),
}
impl Canceled {
pub(crate) fn new() -> Canceled {
Canceled { _private: () }
}
pub fn throw() -> ! {
// We use resume and not panic here to avoid running the panic
// hook (that is, to avoid collecting and printing backtrace).
std::panic::resume_unwind(Box::new(Canceled::new()))
}
}
impl std::fmt::Display for Canceled {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fmt.write_str("canceled")
}
}
impl std::fmt::Debug for Canceled {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(fmt, "Canceled")
}
}
impl std::error::Error for Canceled {}

View file

@ -1,5 +1,4 @@
//! base_db defines basic database traits. The concrete DB is defined by ide.
mod cancellation;
mod input;
mod change;
pub mod fixture;
@ -10,14 +9,13 @@ use rustc_hash::FxHashSet;
use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
pub use crate::{
cancellation::Canceled,
change::Change,
input::{
CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, Dependency, Edition, Env,
ProcMacro, ProcMacroExpander, ProcMacroId, ProcMacroKind, SourceRoot, SourceRootId,
},
};
pub use salsa;
pub use salsa::{self, Cancelled};
pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath};
#[macro_export]
@ -38,45 +36,6 @@ pub trait Upcast<T: ?Sized> {
fn upcast(&self) -> &T;
}
pub trait CheckCanceled {
/// Aborts current query if there are pending changes.
///
/// rust-analyzer needs to be able to answer semantic questions about the
/// code while the code is being modified. A common problem is that a
/// long-running query is being calculated when a new change arrives.
///
/// We can't just apply the change immediately: this will cause the pending
/// query to see inconsistent state (it will observe an absence of
/// repeatable read). So what we do is we **cancel** all pending queries
/// before applying the change.
///
/// We implement cancellation by panicking with a special value and catching
/// it on the API boundary. Salsa explicitly supports this use-case.
fn check_canceled(&self);
fn catch_canceled<F, T>(&self, f: F) -> Result<T, Canceled>
where
Self: Sized + panic::RefUnwindSafe,
F: FnOnce(&Self) -> T + panic::UnwindSafe,
{
// Uncomment to debug missing cancellations.
// let _span = profile::heartbeat_span();
panic::catch_unwind(|| f(self)).map_err(|err| match err.downcast::<Canceled>() {
Ok(canceled) => *canceled,
Err(payload) => panic::resume_unwind(payload),
})
}
}
impl<T: salsa::Database> CheckCanceled for T {
fn check_canceled(&self) {
// profile::heartbeat();
if self.salsa_runtime().is_current_revision_canceled() {
Canceled::throw()
}
}
}
#[derive(Clone, Copy, Debug)]
pub struct FilePosition {
pub file_id: FileId,
@ -101,7 +60,7 @@ pub trait FileLoader {
/// Database which stores all significant input facts: source code and project
/// model. Everything else in rust-analyzer is derived from these queries.
#[salsa::query_group(SourceDatabaseStorage)]
pub trait SourceDatabase: CheckCanceled + FileLoader + std::fmt::Debug {
pub trait SourceDatabase: FileLoader + std::fmt::Debug {
// Parses the file into the syntax tree.
#[salsa::invoke(parse_query)]
fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;

View file

@ -361,7 +361,7 @@ impl<'db> SemanticsImpl<'db> {
let sa = self.analyze(&parent);
let token = successors(Some(InFile::new(sa.file_id, token)), |token| {
self.db.check_canceled();
self.db.unwind_if_cancelled();
let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
let tt = macro_call.token_tree()?;
if !tt.syntax().text_range().contains_range(token.value.text_range()) {

View file

@ -351,7 +351,7 @@ impl DefCollector<'_> {
let mut i = 0;
'outer: loop {
loop {
self.db.check_canceled();
self.db.unwind_if_cancelled();
loop {
if self.resolve_imports() == ReachedFixedPoint::Yes {
break;
@ -836,7 +836,7 @@ impl DefCollector<'_> {
vis: Visibility,
import_type: ImportType,
) {
self.db.check_canceled();
self.db.unwind_if_cancelled();
self.update_recursive(module_id, resolutions, vis, import_type, 0)
}

View file

@ -119,7 +119,7 @@ impl<'a> InferenceContext<'a> {
}
fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
self.db.check_canceled();
self.db.unwind_if_cancelled();
let body = Arc::clone(&self.body); // avoid borrow checker problem
let ty = match &body[tgt_expr] {

View file

@ -112,7 +112,7 @@ fn solve(
let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL);
let should_continue = || {
context.db.check_canceled();
db.unwind_if_cancelled();
let remaining = fuel.get();
fuel.set(remaining - 1);
if remaining == 0 {

View file

@ -58,7 +58,7 @@ use cfg::CfgOptions;
use ide_db::base_db::{
salsa::{self, ParallelDatabase},
CheckCanceled, Env, FileLoader, FileSet, SourceDatabase, VfsPath,
Env, FileLoader, FileSet, SourceDatabase, VfsPath,
};
use ide_db::{
symbol_index::{self, FileSymbol},
@ -98,7 +98,7 @@ pub use ide_completion::{
};
pub use ide_db::{
base_db::{
Canceled, Change, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange,
Cancelled, Change, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange,
SourceRoot, SourceRootId,
},
call_info::CallInfo,
@ -113,7 +113,7 @@ pub use ide_ssr::SsrError;
pub use syntax::{TextRange, TextSize};
pub use text_edit::{Indel, TextEdit};
pub type Cancelable<T> = Result<T, Canceled>;
pub type Cancellable<T> = Result<T, Cancelled>;
/// Info associated with a text range.
#[derive(Debug)]
@ -227,11 +227,11 @@ impl Analysis {
}
/// Debug info about the current state of the analysis.
pub fn status(&self, file_id: Option<FileId>) -> Cancelable<String> {
pub fn status(&self, file_id: Option<FileId>) -> Cancellable<String> {
self.with_db(|db| status::status(&*db, file_id))
}
pub fn prime_caches<F>(&self, cb: F) -> Cancelable<()>
pub fn prime_caches<F>(&self, cb: F) -> Cancellable<()>
where
F: Fn(PrimeCachesProgress) + Sync + std::panic::UnwindSafe,
{
@ -239,35 +239,35 @@ impl Analysis {
}
/// Gets the text of the source file.
pub fn file_text(&self, file_id: FileId) -> Cancelable<Arc<String>> {
pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<String>> {
self.with_db(|db| db.file_text(file_id))
}
/// Gets the syntax tree of the file.
pub fn parse(&self, file_id: FileId) -> Cancelable<SourceFile> {
pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> {
self.with_db(|db| db.parse(file_id).tree())
}
/// Returns true if this file belongs to an immutable library.
pub fn is_library_file(&self, file_id: FileId) -> Cancelable<bool> {
pub fn is_library_file(&self, file_id: FileId) -> Cancellable<bool> {
use ide_db::base_db::SourceDatabaseExt;
self.with_db(|db| db.source_root(db.file_source_root(file_id)).is_library)
}
/// Gets the file's `LineIndex`: data structure to convert between absolute
/// offsets and line/column representation.
pub fn file_line_index(&self, file_id: FileId) -> Cancelable<Arc<LineIndex>> {
pub fn file_line_index(&self, file_id: FileId) -> Cancellable<Arc<LineIndex>> {
self.with_db(|db| db.line_index(file_id))
}
/// Selects the next syntactic nodes encompassing the range.
pub fn extend_selection(&self, frange: FileRange) -> Cancelable<TextRange> {
pub fn extend_selection(&self, frange: FileRange) -> Cancellable<TextRange> {
self.with_db(|db| extend_selection::extend_selection(db, frange))
}
/// Returns position of the matching brace (all types of braces are
/// supported).
pub fn matching_brace(&self, position: FilePosition) -> Cancelable<Option<TextSize>> {
pub fn matching_brace(&self, position: FilePosition) -> Cancellable<Option<TextSize>> {
self.with_db(|db| {
let parse = db.parse(position.file_id);
let file = parse.tree();
@ -281,30 +281,30 @@ impl Analysis {
&self,
file_id: FileId,
text_range: Option<TextRange>,
) -> Cancelable<String> {
) -> Cancellable<String> {
self.with_db(|db| syntax_tree::syntax_tree(&db, file_id, text_range))
}
pub fn view_hir(&self, position: FilePosition) -> Cancelable<String> {
pub fn view_hir(&self, position: FilePosition) -> Cancellable<String> {
self.with_db(|db| view_hir::view_hir(&db, position))
}
pub fn view_item_tree(&self, file_id: FileId) -> Cancelable<String> {
pub fn view_item_tree(&self, file_id: FileId) -> Cancellable<String> {
self.with_db(|db| view_item_tree::view_item_tree(&db, file_id))
}
/// Renders the crate graph to GraphViz "dot" syntax.
pub fn view_crate_graph(&self) -> Cancelable<Result<String, String>> {
pub fn view_crate_graph(&self) -> Cancellable<Result<String, String>> {
self.with_db(|db| view_crate_graph::view_crate_graph(&db))
}
pub fn expand_macro(&self, position: FilePosition) -> Cancelable<Option<ExpandedMacro>> {
pub fn expand_macro(&self, position: FilePosition) -> Cancellable<Option<ExpandedMacro>> {
self.with_db(|db| expand_macro::expand_macro(db, position))
}
/// Returns an edit to remove all newlines in the range, cleaning up minor
/// stuff like trailing commas.
pub fn join_lines(&self, frange: FileRange) -> Cancelable<TextEdit> {
pub fn join_lines(&self, frange: FileRange) -> Cancellable<TextEdit> {
self.with_db(|db| {
let parse = db.parse(frange.file_id);
join_lines::join_lines(&parse.tree(), frange.range)
@ -314,7 +314,7 @@ impl Analysis {
/// Returns an edit which should be applied when opening a new line, fixing
/// up minor stuff like continuing the comment.
/// The edit will be a snippet (with `$0`).
pub fn on_enter(&self, position: FilePosition) -> Cancelable<Option<TextEdit>> {
pub fn on_enter(&self, position: FilePosition) -> Cancellable<Option<TextEdit>> {
self.with_db(|db| typing::on_enter(&db, position))
}
@ -326,7 +326,7 @@ impl Analysis {
&self,
position: FilePosition,
char_typed: char,
) -> Cancelable<Option<SourceChange>> {
) -> Cancellable<Option<SourceChange>> {
// Fast path to not even parse the file.
if !typing::TRIGGER_CHARS.contains(char_typed) {
return Ok(None);
@ -336,7 +336,7 @@ impl Analysis {
/// Returns a tree representation of symbols in the file. Useful to draw a
/// file outline.
pub fn file_structure(&self, file_id: FileId) -> Cancelable<Vec<StructureNode>> {
pub fn file_structure(&self, file_id: FileId) -> Cancellable<Vec<StructureNode>> {
self.with_db(|db| file_structure::file_structure(&db.parse(file_id).tree()))
}
@ -345,17 +345,17 @@ impl Analysis {
&self,
file_id: FileId,
config: &InlayHintsConfig,
) -> Cancelable<Vec<InlayHint>> {
) -> Cancellable<Vec<InlayHint>> {
self.with_db(|db| inlay_hints::inlay_hints(db, file_id, config))
}
/// Returns the set of folding ranges.
pub fn folding_ranges(&self, file_id: FileId) -> Cancelable<Vec<Fold>> {
pub fn folding_ranges(&self, file_id: FileId) -> Cancellable<Vec<Fold>> {
self.with_db(|db| folding_ranges::folding_ranges(&db.parse(file_id).tree()))
}
/// Fuzzy searches for a symbol.
pub fn symbol_search(&self, query: Query) -> Cancelable<Vec<NavigationTarget>> {
pub fn symbol_search(&self, query: Query) -> Cancellable<Vec<NavigationTarget>> {
self.with_db(|db| {
symbol_index::world_symbols(db, query)
.into_iter()
@ -368,7 +368,7 @@ impl Analysis {
pub fn goto_definition(
&self,
position: FilePosition,
) -> Cancelable<Option<RangeInfo<Vec<NavigationTarget>>>> {
) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
self.with_db(|db| goto_definition::goto_definition(db, position))
}
@ -376,7 +376,7 @@ impl Analysis {
pub fn goto_implementation(
&self,
position: FilePosition,
) -> Cancelable<Option<RangeInfo<Vec<NavigationTarget>>>> {
) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
self.with_db(|db| goto_implementation::goto_implementation(db, position))
}
@ -384,7 +384,7 @@ impl Analysis {
pub fn goto_type_definition(
&self,
position: FilePosition,
) -> Cancelable<Option<RangeInfo<Vec<NavigationTarget>>>> {
) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
self.with_db(|db| goto_type_definition::goto_type_definition(db, position))
}
@ -393,12 +393,12 @@ impl Analysis {
&self,
position: FilePosition,
search_scope: Option<SearchScope>,
) -> Cancelable<Option<ReferenceSearchResult>> {
) -> Cancellable<Option<ReferenceSearchResult>> {
self.with_db(|db| references::find_all_refs(&Semantics::new(db), position, search_scope))
}
/// Finds all methods and free functions for the file. Does not return tests!
pub fn find_all_methods(&self, file_id: FileId) -> Cancelable<Vec<FileRange>> {
pub fn find_all_methods(&self, file_id: FileId) -> Cancellable<Vec<FileRange>> {
self.with_db(|db| fn_references::find_all_methods(db, file_id))
}
@ -408,7 +408,7 @@ impl Analysis {
position: FilePosition,
links_in_hover: bool,
markdown: bool,
) -> Cancelable<Option<RangeInfo<HoverResult>>> {
) -> Cancellable<Option<RangeInfo<HoverResult>>> {
self.with_db(|db| hover::hover(db, position, links_in_hover, markdown))
}
@ -416,12 +416,12 @@ impl Analysis {
pub fn external_docs(
&self,
position: FilePosition,
) -> Cancelable<Option<doc_links::DocumentationLink>> {
) -> Cancellable<Option<doc_links::DocumentationLink>> {
self.with_db(|db| doc_links::external_docs(db, &position))
}
/// Computes parameter information for the given call expression.
pub fn call_info(&self, position: FilePosition) -> Cancelable<Option<CallInfo>> {
pub fn call_info(&self, position: FilePosition) -> Cancellable<Option<CallInfo>> {
self.with_db(|db| ide_db::call_info::call_info(db, position))
}
@ -429,42 +429,42 @@ impl Analysis {
pub fn call_hierarchy(
&self,
position: FilePosition,
) -> Cancelable<Option<RangeInfo<Vec<NavigationTarget>>>> {
) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
self.with_db(|db| call_hierarchy::call_hierarchy(db, position))
}
/// Computes incoming calls for the given file position.
pub fn incoming_calls(&self, position: FilePosition) -> Cancelable<Option<Vec<CallItem>>> {
pub fn incoming_calls(&self, position: FilePosition) -> Cancellable<Option<Vec<CallItem>>> {
self.with_db(|db| call_hierarchy::incoming_calls(db, position))
}
/// Computes incoming calls for the given file position.
pub fn outgoing_calls(&self, position: FilePosition) -> Cancelable<Option<Vec<CallItem>>> {
pub fn outgoing_calls(&self, position: FilePosition) -> Cancellable<Option<Vec<CallItem>>> {
self.with_db(|db| call_hierarchy::outgoing_calls(db, position))
}
/// Returns a `mod name;` declaration which created the current module.
pub fn parent_module(&self, position: FilePosition) -> Cancelable<Vec<NavigationTarget>> {
pub fn parent_module(&self, position: FilePosition) -> Cancellable<Vec<NavigationTarget>> {
self.with_db(|db| parent_module::parent_module(db, position))
}
/// Returns crates this file belongs too.
pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> {
pub fn crate_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
self.with_db(|db| parent_module::crate_for(db, file_id))
}
/// Returns the edition of the given crate.
pub fn crate_edition(&self, crate_id: CrateId) -> Cancelable<Edition> {
pub fn crate_edition(&self, crate_id: CrateId) -> Cancellable<Edition> {
self.with_db(|db| db.crate_graph()[crate_id].edition)
}
/// Returns the root file of the given crate.
pub fn crate_root(&self, crate_id: CrateId) -> Cancelable<FileId> {
pub fn crate_root(&self, crate_id: CrateId) -> Cancellable<FileId> {
self.with_db(|db| db.crate_graph()[crate_id].root_file_id)
}
/// Returns the set of possible targets to run for the current file.
pub fn runnables(&self, file_id: FileId) -> Cancelable<Vec<Runnable>> {
pub fn runnables(&self, file_id: FileId) -> Cancellable<Vec<Runnable>> {
self.with_db(|db| runnables::runnables(db, file_id))
}
@ -473,24 +473,24 @@ impl Analysis {
&self,
position: FilePosition,
search_scope: Option<SearchScope>,
) -> Cancelable<Vec<Runnable>> {
) -> Cancellable<Vec<Runnable>> {
self.with_db(|db| runnables::related_tests(db, position, search_scope))
}
/// Computes syntax highlighting for the given file
pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HlRange>> {
pub fn highlight(&self, file_id: FileId) -> Cancellable<Vec<HlRange>> {
self.with_db(|db| syntax_highlighting::highlight(db, file_id, None, false))
}
/// Computes syntax highlighting for the given file range.
pub fn highlight_range(&self, frange: FileRange) -> Cancelable<Vec<HlRange>> {
pub fn highlight_range(&self, frange: FileRange) -> Cancellable<Vec<HlRange>> {
self.with_db(|db| {
syntax_highlighting::highlight(db, frange.file_id, Some(frange.range), false)
})
}
/// Computes syntax highlighting for the given file.
pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancelable<String> {
pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancellable<String> {
self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow))
}
@ -499,7 +499,7 @@ impl Analysis {
&self,
config: &CompletionConfig,
position: FilePosition,
) -> Cancelable<Option<Vec<CompletionItem>>> {
) -> Cancellable<Option<Vec<CompletionItem>>> {
self.with_db(|db| ide_completion::completions(db, config, position).map(Into::into))
}
@ -510,7 +510,7 @@ impl Analysis {
position: FilePosition,
full_import_path: &str,
imported_name: String,
) -> Cancelable<Vec<TextEdit>> {
) -> Cancellable<Vec<TextEdit>> {
Ok(self
.with_db(|db| {
ide_completion::resolve_completion_edits(
@ -533,7 +533,7 @@ impl Analysis {
config: &AssistConfig,
resolve: AssistResolveStrategy,
frange: FileRange,
) -> Cancelable<Vec<Assist>> {
) -> Cancellable<Vec<Assist>> {
self.with_db(|db| {
let ssr_assists = ssr::ssr_assists(db, &resolve, frange);
let mut acc = Assist::get(db, config, resolve, frange);
@ -548,7 +548,7 @@ impl Analysis {
config: &DiagnosticsConfig,
resolve: AssistResolveStrategy,
file_id: FileId,
) -> Cancelable<Vec<Diagnostic>> {
) -> Cancellable<Vec<Diagnostic>> {
self.with_db(|db| diagnostics::diagnostics(db, config, &resolve, file_id))
}
@ -559,7 +559,7 @@ impl Analysis {
diagnostics_config: &DiagnosticsConfig,
resolve: AssistResolveStrategy,
frange: FileRange,
) -> Cancelable<Vec<Assist>> {
) -> Cancellable<Vec<Assist>> {
let include_fixes = match &assist_config.allowed {
Some(it) => it.iter().any(|&it| it == AssistKind::None || it == AssistKind::QuickFix),
None => true,
@ -591,14 +591,14 @@ impl Analysis {
&self,
position: FilePosition,
new_name: &str,
) -> Cancelable<Result<SourceChange, RenameError>> {
) -> Cancellable<Result<SourceChange, RenameError>> {
self.with_db(|db| references::rename::rename(db, position, new_name))
}
pub fn prepare_rename(
&self,
position: FilePosition,
) -> Cancelable<Result<RangeInfo<()>, RenameError>> {
) -> Cancellable<Result<RangeInfo<()>, RenameError>> {
self.with_db(|db| references::rename::prepare_rename(db, position))
}
@ -606,7 +606,7 @@ impl Analysis {
&self,
file_id: FileId,
new_name_stem: &str,
) -> Cancelable<Option<SourceChange>> {
) -> Cancellable<Option<SourceChange>> {
self.with_db(|db| references::rename::will_rename_file(db, file_id, new_name_stem))
}
@ -616,7 +616,7 @@ impl Analysis {
parse_only: bool,
resolve_context: FilePosition,
selections: Vec<FileRange>,
) -> Cancelable<Result<SourceChange, SsrError>> {
) -> Cancellable<Result<SourceChange, SsrError>> {
self.with_db(|db| {
let rule: ide_ssr::SsrRule = query.parse()?;
let mut match_finder =
@ -631,11 +631,11 @@ impl Analysis {
&self,
file_id: FileId,
config: AnnotationConfig,
) -> Cancelable<Vec<Annotation>> {
) -> Cancellable<Vec<Annotation>> {
self.with_db(|db| annotations::annotations(db, file_id, config))
}
pub fn resolve_annotation(&self, annotation: Annotation) -> Cancelable<Annotation> {
pub fn resolve_annotation(&self, annotation: Annotation) -> Cancellable<Annotation> {
self.with_db(|db| annotations::resolve_annotation(db, annotation))
}
@ -643,16 +643,28 @@ impl Analysis {
&self,
range: FileRange,
direction: Direction,
) -> Cancelable<Option<TextEdit>> {
) -> Cancellable<Option<TextEdit>> {
self.with_db(|db| move_item::move_item(db, range, direction))
}
/// Performs an operation on that may be Canceled.
fn with_db<F, T>(&self, f: F) -> Cancelable<T>
/// Performs an operation on the database that may be canceled.
///
/// rust-analyzer needs to be able to answer semantic questions about the
/// code while the code is being modified. A common problem is that a
/// long-running query is being calculated when a new change arrives.
///
/// We can't just apply the change immediately: this will cause the pending
/// query to see inconsistent state (it will observe an absence of
/// repeatable read). So what we do is we **cancel** all pending queries
/// before applying the change.
///
/// Salsa implements cancelation by unwinding with a special value and
/// catching it on the API boundary.
fn with_db<F, T>(&self, f: F) -> Cancellable<T>
where
F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
{
self.db.catch_canceled(f)
Cancelled::catch(|| f(&self.db))
}
}

View file

@ -19,8 +19,7 @@ use std::{fmt, sync::Arc};
use base_db::{
salsa::{self, Durability},
AnchoredPath, Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate,
SourceDatabase, Upcast,
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
};
use hir::db::{AstDatabase, DefDatabase, HirDatabase};
use rustc_hash::FxHashSet;
@ -80,20 +79,7 @@ impl FileLoader for RootDatabase {
}
}
impl salsa::Database for RootDatabase {
fn on_propagated_panic(&self) -> ! {
Canceled::throw()
}
fn salsa_event(&self, event: salsa::Event) {
match event.kind {
salsa::EventKind::DidValidateMemoizedValue { .. }
| salsa::EventKind::WillExecute { .. } => {
self.check_canceled();
}
_ => (),
}
}
}
impl salsa::Database for RootDatabase {}
impl Default for RootDatabase {
fn default() -> RootDatabase {
@ -126,7 +112,7 @@ impl salsa::ParallelDatabase for RootDatabase {
}
#[salsa::query_group(LineIndexDatabaseStorage)]
pub trait LineIndexDatabase: base_db::SourceDatabase + CheckCanceled {
pub trait LineIndexDatabase: base_db::SourceDatabase {
fn line_index(&self, file_id: FileId) -> Arc<LineIndex>;
}

View file

@ -127,7 +127,7 @@ fn library_symbols(db: &dyn SymbolsDatabase) -> Arc<FxHashMap<SourceRootId, Symb
}
fn file_symbols(db: &dyn SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> {
db.check_canceled();
db.unwind_if_cancelled();
let parse = db.parse(file_id);
let symbols = source_file_to_file_symbols(&parse.tree(), file_id);

View file

@ -5,7 +5,7 @@ use serde::{de::DeserializeOwned, Serialize};
use crate::{
global_state::{GlobalState, GlobalStateSnapshot},
lsp_utils::is_canceled,
lsp_utils::is_cancelled,
main_loop::Task,
LspError, Result,
};
@ -132,7 +132,7 @@ where
Err(e) => match e.downcast::<LspError>() {
Ok(lsp_error) => lsp_server::Response::new_err(id, lsp_error.code, lsp_error.message),
Err(e) => {
if is_canceled(&*e) {
if is_cancelled(&*e) {
lsp_server::Response::new_err(
id,
lsp_server::ErrorCode::ContentModified as i32,

View file

@ -7,7 +7,7 @@ use std::{sync::Arc, time::Instant};
use crossbeam_channel::{unbounded, Receiver, Sender};
use flycheck::FlycheckHandle;
use ide::{Analysis, AnalysisHost, Cancelable, Change, FileId};
use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId};
use ide_db::base_db::{CrateId, VfsPath};
use lsp_types::{SemanticTokens, Url};
use parking_lot::{Mutex, RwLock};
@ -280,7 +280,7 @@ impl GlobalStateSnapshot {
file_id_to_url(&self.vfs.read().0, id)
}
pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancelable<LineIndex> {
pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable<LineIndex> {
let endings = self.vfs.read().1[&file_id];
let index = self.analysis.file_line_index(file_id)?;
let res = LineIndex { index, endings, encoding: self.config.offset_encoding() };

View file

@ -1,7 +1,7 @@
//! Utilities for LSP-related boilerplate code.
use std::{error::Error, ops::Range, sync::Arc};
use ide_db::base_db::Canceled;
use ide_db::base_db::Cancelled;
use lsp_server::Notification;
use crate::{
@ -10,8 +10,8 @@ use crate::{
line_index::{LineEndings, LineIndex, OffsetEncoding},
};
pub(crate) fn is_canceled(e: &(dyn Error + 'static)) -> bool {
e.downcast_ref::<Canceled>().is_some()
pub(crate) fn is_cancelled(e: &(dyn Error + 'static)) -> bool {
e.downcast_ref::<Cancelled>().is_some()
}
pub(crate) fn notification_is<N: lsp_types::notification::Notification>(

View file

@ -22,7 +22,7 @@ use crate::{
from_proto,
global_state::{file_id_to_url, url_to_file_id, GlobalState},
handlers, lsp_ext,
lsp_utils::{apply_document_changes, is_canceled, notification_is, Progress},
lsp_utils::{apply_document_changes, is_cancelled, notification_is, Progress},
reload::{BuildDataProgress, ProjectWorkspaceProgress},
Result,
};
@ -752,7 +752,7 @@ impl GlobalState {
.filter_map(|file_id| {
handlers::publish_diagnostics(&snapshot, file_id)
.map_err(|err| {
if !is_canceled(&*err) {
if !is_cancelled(&*err) {
log::error!("failed to compute diagnostics: {:?}", err);
}
()

View file

@ -6,7 +6,7 @@ use std::{
};
use ide::{
Annotation, AnnotationKind, Assist, AssistKind, CallInfo, Cancelable, CompletionItem,
Annotation, AnnotationKind, Assist, AssistKind, CallInfo, Cancellable, CompletionItem,
CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, FileSystemEdit,
Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel, InlayHint,
InlayKind, InsertTextFormat, Markup, NavigationTarget, ReferenceAccess, RenameError, Runnable,
@ -727,7 +727,7 @@ pub(crate) fn snippet_text_document_edit(
pub(crate) fn snippet_text_document_ops(
snap: &GlobalStateSnapshot,
file_system_edit: FileSystemEdit,
) -> Cancelable<Vec<lsp_ext::SnippetDocumentChangeOperation>> {
) -> Cancellable<Vec<lsp_ext::SnippetDocumentChangeOperation>> {
let mut ops = Vec::new();
match file_system_edit {
FileSystemEdit::CreateFile { dst, initial_contents } => {
@ -757,7 +757,7 @@ pub(crate) fn snippet_text_document_ops(
let new_uri = snap.anchored_path(&dst);
let mut rename_file =
lsp_types::RenameFile { old_uri, new_uri, options: None, annotation_id: None };
if snap.analysis.is_library_file(src) == Ok(true)
if snap.analysis.is_library_file(src).ok() == Some(true)
&& snap.config.change_annotation_support()
{
rename_file.annotation_id = Some(outside_workspace_annotation_id())