147: Cancelation r=matklad a=matklad

This series of commits switch cancellation strategy from `JobToken` (which are cancellation tokens, explicitly controlled by the called) to salsa built-in auto cancellation. "Auto" means that, as soon as we advance the revision, all pending queries are cancelled automatically, and this looks like a semantic we actually want. 

"client-side" cancellation is a rare event, and it's ok to just punt on it. Automatic cancellation after the user types something in happens all the time. 

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2018-10-20 20:04:06 +00:00
commit fd336d1134
16 changed files with 229 additions and 279 deletions

1
Cargo.lock generated
View file

@ -609,7 +609,6 @@ dependencies = [
name = "ra_analysis"
version = "0.1.0"
dependencies = [
"crossbeam-channel 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
"fst 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"im 12.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",

View file

@ -7,7 +7,6 @@ authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
[dependencies]
relative-path = "0.3.7"
log = "0.4.2"
crossbeam-channel = "0.2.4"
parking_lot = "0.6.3"
once_cell = "0.1.5"
rayon = "1.0.2"

View file

@ -1,17 +1,20 @@
use crate::{
module_map::{ModuleDescriptorQuery, ModuleTreeQuery, ModulesDatabase},
symbol_index::SymbolIndex,
FileId, FileResolverImp,
use std::{
fmt,
hash::{Hash, Hasher},
sync::Arc,
};
use ra_editor::LineIndex;
use ra_syntax::File;
use rustc_hash::FxHashSet;
use salsa;
use std::{
fmt,
hash::{Hash, Hasher},
sync::Arc,
use crate::{
db,
Cancelable, Canceled,
module_map::{ModuleDescriptorQuery, ModuleTreeQuery, ModulesDatabase},
symbol_index::SymbolIndex,
FileId, FileResolverImp,
};
#[derive(Default)]
@ -31,6 +34,14 @@ impl salsa::Database for RootDatabase {
}
}
pub(crate) fn check_canceled(db: &impl salsa::Database) -> Cancelable<()> {
if db.salsa_runtime().is_current_revision_canceled() {
Err(Canceled)
} else {
Ok(())
}
}
impl salsa::ParallelDatabase for RootDatabase {
fn fork(&self) -> Self {
RootDatabase {
@ -98,7 +109,7 @@ salsa::query_group! {
fn file_lines(file_id: FileId) -> Arc<LineIndex> {
type FileLinesQuery;
}
fn file_symbols(file_id: FileId) -> Arc<SymbolIndex> {
fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
type FileSymbolsQuery;
}
}
@ -112,7 +123,8 @@ fn file_lines(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<LineIndex> {
let text = db.file_text(file_id);
Arc::new(LineIndex::new(&*text))
}
fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<SymbolIndex> {
fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
db::check_canceled(db)?;
let syntax = db.file_syntax(file_id);
Arc::new(SymbolIndex::for_file(file_id, syntax))
Ok(Arc::new(SymbolIndex::for_file(file_id, syntax)))
}

View file

@ -1,4 +1,5 @@
use crate::{imp::FileResolverImp, FileId};
use std::collections::BTreeMap;
use ra_syntax::{
ast::{self, AstNode, NameOwner},
text_utils::is_subrange,
@ -6,7 +7,7 @@ use ra_syntax::{
};
use relative_path::RelativePathBuf;
use std::collections::BTreeMap;
use crate::{imp::FileResolverImp, FileId};
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct ModuleDescriptor {

View file

@ -19,8 +19,8 @@ use rustc_hash::FxHashSet;
use crate::{
descriptors::{FnDescriptor, ModuleTreeDescriptor, Problem},
roots::{ReadonlySourceRoot, SourceRoot, WritableSourceRoot},
CrateGraph, CrateId, Diagnostic, FileId, FileResolver, FileSystemEdit, JobToken, Position,
Query, SourceChange, SourceFileEdit,
CrateGraph, CrateId, Diagnostic, FileId, FileResolver, FileSystemEdit, Position,
Query, SourceChange, SourceFileEdit, Cancelable,
};
#[derive(Clone, Debug)]
@ -148,19 +148,21 @@ impl AnalysisImpl {
pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
self.root(file_id).lines(file_id)
}
pub fn world_symbols(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
pub fn world_symbols(&self, query: Query) -> Cancelable<Vec<(FileId, FileSymbol)>> {
let mut buf = Vec::new();
if query.libs {
self.data.libs.iter().for_each(|it| it.symbols(&mut buf));
for lib in self.data.libs.iter() {
lib.symbols(&mut buf)?;
}
} else {
self.data.root.symbols(&mut buf);
self.data.root.symbols(&mut buf)?;
}
query.search(&buf, token)
Ok(query.search(&buf))
}
pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> {
pub fn parent_module(&self, file_id: FileId) -> Cancelable<Vec<(FileId, FileSymbol)>> {
let root = self.root(file_id);
let module_tree = root.module_tree();
module_tree
let module_tree = root.module_tree()?;
let res = module_tree
.parent_modules(file_id)
.iter()
.map(|link| {
@ -174,10 +176,11 @@ impl AnalysisImpl {
};
(file_id, sym)
})
.collect()
.collect();
Ok(res)
}
pub fn crate_for(&self, file_id: FileId) -> Vec<CrateId> {
let module_tree = self.root(file_id).module_tree();
pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> {
let module_tree = self.root(file_id).module_tree()?;
let crate_graph = &self.data.crate_graph;
let mut res = Vec::new();
let mut work = VecDeque::new();
@ -195,7 +198,7 @@ impl AnalysisImpl {
.filter(|&id| visited.insert(id));
work.extend(parents);
}
res
Ok(res)
}
pub fn crate_root(&self, crate_id: CrateId) -> FileId {
self.data.crate_graph.crate_roots[&crate_id]
@ -204,15 +207,14 @@ impl AnalysisImpl {
&self,
file_id: FileId,
offset: TextUnit,
token: &JobToken,
) -> Vec<(FileId, FileSymbol)> {
) -> Cancelable<Vec<(FileId, FileSymbol)>> {
let root = self.root(file_id);
let module_tree = root.module_tree();
let module_tree = root.module_tree()?;
let file = root.syntax(file_id);
let syntax = file.syntax();
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) {
// First try to resolve the symbol locally
if let Some((name, range)) = resolve_local_name(&file, offset, name_ref) {
return if let Some((name, range)) = resolve_local_name(&file, offset, name_ref) {
let mut vec = vec![];
vec.push((
file_id,
@ -222,12 +224,11 @@ impl AnalysisImpl {
kind: NAME,
},
));
return vec;
Ok(vec)
} else {
// If that fails try the index based approach.
return self.index_resolve(name_ref, token);
}
self.index_resolve(name_ref)
};
}
if let Some(name) = find_node_at_offset::<ast::Name>(syntax, offset) {
if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) {
@ -250,14 +251,14 @@ impl AnalysisImpl {
})
.collect();
return res;
return Ok(res);
}
}
}
vec![]
Ok(vec![])
}
pub fn find_all_refs(&self, file_id: FileId, offset: TextUnit, _token: &JobToken) -> Vec<(FileId, TextRange)> {
pub fn find_all_refs(&self, file_id: FileId, offset: TextUnit) -> Vec<(FileId, TextRange)> {
let root = self.root(file_id);
let file = root.syntax(file_id);
let syntax = file.syntax();
@ -289,9 +290,9 @@ impl AnalysisImpl {
ret
}
pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> {
pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> {
let root = self.root(file_id);
let module_tree = root.module_tree();
let module_tree = root.module_tree()?;
let syntax = root.syntax(file_id);
let mut res = ra_editor::diagnostics(&syntax)
@ -346,7 +347,7 @@ impl AnalysisImpl {
};
res.push(diag)
}
res
Ok(res)
}
pub fn assists(&self, file_id: FileId, range: TextRange) -> Vec<SourceChange> {
@ -379,18 +380,23 @@ impl AnalysisImpl {
&self,
file_id: FileId,
offset: TextUnit,
token: &JobToken,
) -> Option<(FnDescriptor, Option<usize>)> {
) -> Cancelable<Option<(FnDescriptor, Option<usize>)>> {
let root = self.root(file_id);
let file = root.syntax(file_id);
let syntax = file.syntax();
// Find the calling expression and it's NameRef
let calling_node = FnCallNode::with_node(syntax, offset)?;
let name_ref = calling_node.name_ref()?;
let calling_node = match FnCallNode::with_node(syntax, offset) {
Some(node) => node,
None => return Ok(None),
};
let name_ref = match calling_node.name_ref() {
Some(name) => name,
None => return Ok(None),
};
// Resolve the function's NameRef (NOTE: this isn't entirely accurate).
let file_symbols = self.index_resolve(name_ref, token);
let file_symbols = self.index_resolve(name_ref)?;
for (_, fs) in file_symbols {
if fs.kind == FN_DEF {
if let Some(fn_def) = find_node_at_offset(syntax, fs.node_range.start()) {
@ -432,21 +438,21 @@ impl AnalysisImpl {
}
}
return Some((descriptor, current_parameter));
return Ok(Some((descriptor, current_parameter)));
}
}
}
}
None
Ok(None)
}
fn index_resolve(&self, name_ref: ast::NameRef, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
fn index_resolve(&self, name_ref: ast::NameRef) -> Cancelable<Vec<(FileId, FileSymbol)>> {
let name = name_ref.text();
let mut query = Query::new(name.to_string());
query.exact();
query.limit(4);
self.world_symbols(query, token)
self.world_symbols(query)
}
fn resolve_module(

View file

@ -1,53 +0,0 @@
use crossbeam_channel::{bounded, Receiver, Sender};
pub struct JobHandle {
job_alive: Receiver<Never>,
_job_canceled: Sender<Never>,
}
pub struct JobToken {
_job_alive: Sender<Never>,
job_canceled: Receiver<Never>,
}
impl JobHandle {
pub fn new() -> (JobHandle, JobToken) {
let (sender_alive, receiver_alive) = bounded(0);
let (sender_canceled, receiver_canceled) = bounded(0);
let token = JobToken {
_job_alive: sender_alive,
job_canceled: receiver_canceled,
};
let handle = JobHandle {
job_alive: receiver_alive,
_job_canceled: sender_canceled,
};
(handle, token)
}
pub fn has_completed(&self) -> bool {
is_closed(&self.job_alive)
}
pub fn cancel(self) {}
}
impl JobToken {
pub fn is_canceled(&self) -> bool {
is_closed(&self.job_canceled)
}
}
// We don't actually send messages through the channels,
// and instead just check if the channel is closed,
// so we use uninhabited enum as a message type
enum Never {}
/// Nonblocking
fn is_closed(chan: &Receiver<Never>) -> bool {
select! {
recv(chan, msg) => match msg {
None => true,
Some(never) => match never {}
}
default => false,
}
}

View file

@ -7,8 +7,6 @@ extern crate ra_editor;
extern crate ra_syntax;
extern crate rayon;
extern crate relative_path;
#[macro_use]
extern crate crossbeam_channel;
extern crate im;
extern crate rustc_hash;
extern crate salsa;
@ -16,27 +14,40 @@ extern crate salsa;
mod db;
mod descriptors;
mod imp;
mod job;
mod module_map;
mod roots;
mod symbol_index;
use std::{fmt::Debug, sync::Arc};
use crate::imp::{AnalysisHostImpl, AnalysisImpl, FileResolverImp};
use ra_syntax::{AtomEdit, File, TextRange, TextUnit};
use relative_path::{RelativePath, RelativePathBuf};
use rustc_hash::FxHashMap;
use crate::imp::{AnalysisHostImpl, AnalysisImpl, FileResolverImp};
pub use crate::{
descriptors::FnDescriptor,
job::{JobHandle, JobToken},
};
pub use ra_editor::{
CompletionItem, FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable,
RunnableKind, StructureNode,
};
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct Canceled;
pub type Cancelable<T> = Result<T, Canceled>;
impl std::fmt::Display for Canceled {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fmt.write_str("Canceled")
}
}
impl std::error::Error for Canceled {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct FileId(pub u32);
@ -205,60 +216,57 @@ impl Analysis {
let file = self.imp.file_syntax(file_id);
ra_editor::file_structure(&file)
}
pub fn symbol_search(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
self.imp.world_symbols(query, token)
}
pub fn approximately_resolve_symbol(
&self,
file_id: FileId,
offset: TextUnit,
token: &JobToken,
) -> Vec<(FileId, FileSymbol)> {
self.imp
.approximately_resolve_symbol(file_id, offset, token)
}
pub fn find_all_refs(&self, file_id: FileId, offset: TextUnit, token: &JobToken) -> Vec<(FileId, TextRange)> {
self.imp.find_all_refs(file_id, offset, token)
}
pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> {
self.imp.parent_module(file_id)
}
pub fn crate_for(&self, file_id: FileId) -> Vec<CrateId> {
self.imp.crate_for(file_id)
}
pub fn crate_root(&self, crate_id: CrateId) -> FileId {
self.imp.crate_root(crate_id)
}
pub fn runnables(&self, file_id: FileId) -> Vec<Runnable> {
let file = self.imp.file_syntax(file_id);
ra_editor::runnables(&file)
}
pub fn highlight(&self, file_id: FileId) -> Vec<HighlightedRange> {
let file = self.imp.file_syntax(file_id);
ra_editor::highlight(&file)
}
pub fn completions(&self, file_id: FileId, offset: TextUnit) -> Option<Vec<CompletionItem>> {
let file = self.imp.file_syntax(file_id);
ra_editor::scope_completion(&file, offset)
}
pub fn assists(&self, file_id: FileId, range: TextRange) -> Vec<SourceChange> {
self.imp.assists(file_id, range)
}
pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> {
self.imp.diagnostics(file_id)
}
pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> {
let file = self.imp.file_syntax(file_id);
ra_editor::folding_ranges(&file)
}
pub fn symbol_search(&self, query: Query) -> Cancelable<Vec<(FileId, FileSymbol)>> {
self.imp.world_symbols(query)
}
pub fn approximately_resolve_symbol(
&self,
file_id: FileId,
offset: TextUnit
) -> Cancelable<Vec<(FileId, FileSymbol)>> {
self.imp
.approximately_resolve_symbol(file_id, offset)
}
pub fn find_all_refs(&self, file_id: FileId, offset: TextUnit, ) -> Cancelable<Vec<(FileId, TextRange)>> {
Ok(self.imp.find_all_refs(file_id, offset))
}
pub fn parent_module(&self, file_id: FileId) -> Cancelable<Vec<(FileId, FileSymbol)>> {
self.imp.parent_module(file_id)
}
pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> {
self.imp.crate_for(file_id)
}
pub fn crate_root(&self, crate_id: CrateId) -> Cancelable<FileId> {
Ok(self.imp.crate_root(crate_id))
}
pub fn runnables(&self, file_id: FileId) -> Cancelable<Vec<Runnable>> {
let file = self.imp.file_syntax(file_id);
Ok(ra_editor::runnables(&file))
}
pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> {
let file = self.imp.file_syntax(file_id);
Ok(ra_editor::highlight(&file))
}
pub fn completions(&self, file_id: FileId, offset: TextUnit) -> Cancelable<Option<Vec<CompletionItem>>> {
let file = self.imp.file_syntax(file_id);
Ok(ra_editor::scope_completion(&file, offset))
}
pub fn assists(&self, file_id: FileId, range: TextRange) -> Cancelable<Vec<SourceChange>> {
Ok(self.imp.assists(file_id, range))
}
pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> {
self.imp.diagnostics(file_id)
}
pub fn resolve_callable(
&self,
file_id: FileId,
offset: TextUnit,
token: &JobToken,
) -> Option<(FnDescriptor, Option<usize>)> {
self.imp.resolve_callable(file_id, offset, token)
) -> Cancelable<Option<(FnDescriptor, Option<usize>)>> {
self.imp.resolve_callable(file_id, offset)
}
}

View file

@ -1,37 +1,41 @@
use std::sync::Arc;
use crate::{
db,
Cancelable,
db::SyntaxDatabase,
descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
FileId,
};
use std::sync::Arc;
salsa::query_group! {
pub(crate) trait ModulesDatabase: SyntaxDatabase {
fn module_tree() -> Arc<ModuleTreeDescriptor> {
fn module_tree() -> Cancelable<Arc<ModuleTreeDescriptor>> {
type ModuleTreeQuery;
}
fn module_descriptor(file_id: FileId) -> Arc<ModuleDescriptor> {
fn module_descriptor(file_id: FileId) -> Cancelable<Arc<ModuleDescriptor>> {
type ModuleDescriptorQuery;
}
}
}
fn module_descriptor(db: &impl ModulesDatabase, file_id: FileId) -> Arc<ModuleDescriptor> {
fn module_descriptor(db: &impl ModulesDatabase, file_id: FileId) -> Cancelable<Arc<ModuleDescriptor>> {
db::check_canceled(db)?;
let file = db.file_syntax(file_id);
Arc::new(ModuleDescriptor::new(file.ast()))
Ok(Arc::new(ModuleDescriptor::new(file.ast())))
}
fn module_tree(db: &impl ModulesDatabase) -> Arc<ModuleTreeDescriptor> {
fn module_tree(db: &impl ModulesDatabase) -> Cancelable<Arc<ModuleTreeDescriptor>> {
db::check_canceled(db)?;
let file_set = db.file_set();
let mut files = Vec::new();
for &file_id in file_set.files.iter() {
let module_descr = db.module_descriptor(file_id);
let module_descr = db.module_descriptor(file_id)?;
files.push((file_id, module_descr));
}
let res = ModuleTreeDescriptor::new(
files.iter().map(|(file_id, descr)| (*file_id, &**descr)),
&file_set.resolver,
);
Arc::new(res)
Ok(Arc::new(res))
}

View file

@ -8,6 +8,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
use salsa::Database;
use crate::{
Cancelable,
db::{self, FilesDatabase, SyntaxDatabase},
descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
imp::FileResolverImp,
@ -18,10 +19,10 @@ use crate::{
pub(crate) trait SourceRoot {
fn contains(&self, file_id: FileId) -> bool;
fn module_tree(&self) -> Arc<ModuleTreeDescriptor>;
fn module_tree(&self) -> Cancelable<Arc<ModuleTreeDescriptor>>;
fn lines(&self, file_id: FileId) -> Arc<LineIndex>;
fn syntax(&self, file_id: FileId) -> File;
fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>);
fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>) -> Cancelable<()>;
}
#[derive(Default, Debug, Clone)]
@ -64,7 +65,7 @@ impl WritableSourceRoot {
}
impl SourceRoot for WritableSourceRoot {
fn module_tree(&self) -> Arc<ModuleTreeDescriptor> {
fn module_tree(&self) -> Cancelable<Arc<ModuleTreeDescriptor>> {
self.db.module_tree()
}
fn contains(&self, file_id: FileId) -> bool {
@ -76,14 +77,12 @@ impl SourceRoot for WritableSourceRoot {
fn syntax(&self, file_id: FileId) -> File {
self.db.file_syntax(file_id)
}
fn symbols<'a>(&'a self, acc: &mut Vec<Arc<SymbolIndex>>) {
let db = &self.db;
let symbols = db.file_set();
let symbols = symbols
.files
.iter()
.map(|&file_id| db.file_symbols(file_id));
acc.extend(symbols);
fn symbols<'a>(&'a self, acc: &mut Vec<Arc<SymbolIndex>>) -> Cancelable<()> {
for &file_id in self.db.file_set().files.iter() {
let symbols = self.db.file_symbols(file_id)?;
acc.push(symbols)
}
Ok(())
}
}
@ -167,8 +166,8 @@ impl ReadonlySourceRoot {
}
impl SourceRoot for ReadonlySourceRoot {
fn module_tree(&self) -> Arc<ModuleTreeDescriptor> {
Arc::clone(&self.module_tree)
fn module_tree(&self) -> Cancelable<Arc<ModuleTreeDescriptor>> {
Ok(Arc::clone(&self.module_tree))
}
fn contains(&self, file_id: FileId) -> bool {
self.file_map.contains_key(&file_id)
@ -179,7 +178,8 @@ impl SourceRoot for ReadonlySourceRoot {
fn syntax(&self, file_id: FileId) -> File {
self.data(file_id).syntax().clone()
}
fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>) {
acc.push(Arc::clone(&self.symbol_index))
fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>) -> Cancelable<()> {
acc.push(Arc::clone(&self.symbol_index));
Ok(())
}
}

View file

@ -1,4 +1,8 @@
use crate::{FileId, JobToken, Query};
use std::{
hash::{Hash, Hasher},
sync::Arc,
};
use fst::{self, Streamer};
use ra_editor::{file_symbols, FileSymbol};
use ra_syntax::{
@ -7,10 +11,7 @@ use ra_syntax::{
};
use rayon::prelude::*;
use std::{
hash::{Hash, Hasher},
sync::Arc,
};
use crate::{FileId, Query};
#[derive(Debug)]
pub(crate) struct SymbolIndex {
@ -59,7 +60,6 @@ impl Query {
pub(crate) fn search(
self,
indices: &[Arc<SymbolIndex>],
token: &JobToken,
) -> Vec<(FileId, FileSymbol)> {
let mut op = fst::map::OpBuilder::new();
for file_symbols in indices.iter() {
@ -69,7 +69,7 @@ impl Query {
let mut stream = op.union();
let mut res = Vec::new();
while let Some((_, indexed_values)) = stream.next() {
if res.len() >= self.limit || token.is_canceled() {
if res.len() >= self.limit {
break;
}
for indexed_value in indexed_values {

View file

@ -7,15 +7,15 @@ extern crate test_utils;
use std::sync::Arc;
use ra_analysis::{
Analysis, AnalysisHost, CrateGraph, CrateId, FileId, FileResolver, FnDescriptor, JobHandle,
};
use ra_syntax::TextRange;
use relative_path::{RelativePath, RelativePathBuf};
use rustc_hash::FxHashMap;
use test_utils::{assert_eq_dbg, extract_offset};
use ra_analysis::{
Analysis, AnalysisHost, CrateGraph, CrateId, FileId, FileResolver, FnDescriptor,
};
#[derive(Debug)]
struct FileMap(Vec<(FileId, RelativePathBuf)>);
@ -64,24 +64,22 @@ fn get_signature(text: &str) -> (FnDescriptor, Option<usize>) {
let (offset, code) = extract_offset(text);
let code = code.as_str();
let (_handle, token) = JobHandle::new();
let snap = analysis(&[("/lib.rs", code)]);
snap.resolve_callable(FileId(1), offset, &token).unwrap()
snap.resolve_callable(FileId(1), offset).unwrap().unwrap()
}
#[test]
fn test_resolve_module() {
let snap = analysis(&[("/lib.rs", "mod foo;"), ("/foo.rs", "")]);
let (_handle, token) = JobHandle::new();
let symbols = snap.approximately_resolve_symbol(FileId(1), 4.into(), &token);
let symbols = snap.approximately_resolve_symbol(FileId(1), 4.into()).unwrap();
assert_eq_dbg(
r#"[(FileId(2), FileSymbol { name: "foo", node_range: [0; 0), kind: MODULE })]"#,
&symbols,
);
let snap = analysis(&[("/lib.rs", "mod foo;"), ("/foo/mod.rs", "")]);
let symbols = snap.approximately_resolve_symbol(FileId(1), 4.into(), &token);
let symbols = snap.approximately_resolve_symbol(FileId(1), 4.into()).unwrap();
assert_eq_dbg(
r#"[(FileId(2), FileSymbol { name: "foo", node_range: [0; 0), kind: MODULE })]"#,
&symbols,
@ -91,7 +89,7 @@ fn test_resolve_module() {
#[test]
fn test_unresolved_module_diagnostic() {
let snap = analysis(&[("/lib.rs", "mod foo;")]);
let diagnostics = snap.diagnostics(FileId(1));
let diagnostics = snap.diagnostics(FileId(1)).unwrap();
assert_eq_dbg(
r#"[Diagnostic {
message: "unresolved module",
@ -108,14 +106,14 @@ fn test_unresolved_module_diagnostic() {
#[test]
fn test_unresolved_module_diagnostic_no_diag_for_inline_mode() {
let snap = analysis(&[("/lib.rs", "mod foo {}")]);
let diagnostics = snap.diagnostics(FileId(1));
let diagnostics = snap.diagnostics(FileId(1)).unwrap();
assert_eq_dbg(r#"[]"#, &diagnostics);
}
#[test]
fn test_resolve_parent_module() {
let snap = analysis(&[("/lib.rs", "mod foo;"), ("/foo.rs", "")]);
let symbols = snap.parent_module(FileId(2));
let symbols = snap.parent_module(FileId(2)).unwrap();
assert_eq_dbg(
r#"[(FileId(1), FileSymbol { name: "foo", node_range: [0; 8), kind: MODULE })]"#,
&symbols,
@ -126,7 +124,7 @@ fn test_resolve_parent_module() {
fn test_resolve_crate_root() {
let mut host = analysis_host(&[("/lib.rs", "mod foo;"), ("/foo.rs", "")]);
let snap = host.analysis();
assert!(snap.crate_for(FileId(2)).is_empty());
assert!(snap.crate_for(FileId(2)).unwrap().is_empty());
let crate_graph = CrateGraph {
crate_roots: {
@ -138,7 +136,7 @@ fn test_resolve_crate_root() {
host.set_crate_graph(crate_graph);
let snap = host.analysis();
assert_eq!(snap.crate_for(FileId(2)), vec![CrateId(1)],);
assert_eq!(snap.crate_for(FileId(2)).unwrap(), vec![CrateId(1)],);
}
#[test]
@ -232,10 +230,9 @@ fn get_all_refs(text: &str) -> Vec<(FileId, TextRange)> {
let (offset, code) = extract_offset(text);
let code = code.as_str();
let (_handle, token) = JobHandle::new();
let snap = analysis(&[("/lib.rs", code)]);
snap.find_all_refs(FileId(1), offset, &token)
snap.find_all_refs(FileId(1), offset).unwrap()
}
#[test]

View file

@ -1,12 +1,13 @@
use rustc_hash::FxHashMap;
use std::collections::HashMap;
use rustc_hash::FxHashMap;
use languageserver_types::{
CodeActionResponse, Command, CompletionItem, CompletionItemKind, Diagnostic,
DiagnosticSeverity, DocumentSymbol, FoldingRange, FoldingRangeKind, FoldingRangeParams,
InsertTextFormat, Location, Position, SymbolInformation, TextDocumentIdentifier, TextEdit,
RenameParams, WorkspaceEdit, PrepareRenameResponse
};
use ra_analysis::{FileId, FoldKind, JobToken, Query, RunnableKind};
use ra_analysis::{FileId, FoldKind, Query, RunnableKind};
use ra_syntax::text_utils::contains_offset_nonstrict;
use serde_json::to_value;
@ -18,12 +19,9 @@ use crate::{
Result,
};
use std::collections::HashMap;
pub fn handle_syntax_tree(
world: ServerWorld,
params: req::SyntaxTreeParams,
_token: JobToken,
) -> Result<String> {
let id = params.text_document.try_conv_with(&world)?;
let res = world.analysis().syntax_tree(id);
@ -33,7 +31,6 @@ pub fn handle_syntax_tree(
pub fn handle_extend_selection(
world: ServerWorld,
params: req::ExtendSelectionParams,
_token: JobToken,
) -> Result<req::ExtendSelectionResult> {
let file_id = params.text_document.try_conv_with(&world)?;
let file = world.analysis().file_syntax(file_id);
@ -51,7 +48,6 @@ pub fn handle_extend_selection(
pub fn handle_find_matching_brace(
world: ServerWorld,
params: req::FindMatchingBraceParams,
_token: JobToken,
) -> Result<Vec<Position>> {
let file_id = params.text_document.try_conv_with(&world)?;
let file = world.analysis().file_syntax(file_id);
@ -74,7 +70,6 @@ pub fn handle_find_matching_brace(
pub fn handle_join_lines(
world: ServerWorld,
params: req::JoinLinesParams,
_token: JobToken,
) -> Result<req::SourceChange> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
@ -88,7 +83,6 @@ pub fn handle_join_lines(
pub fn handle_on_enter(
world: ServerWorld,
params: req::TextDocumentPositionParams,
_token: JobToken,
) -> Result<Option<req::SourceChange>> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
@ -102,7 +96,6 @@ pub fn handle_on_enter(
pub fn handle_on_type_formatting(
world: ServerWorld,
params: req::DocumentOnTypeFormattingParams,
_token: JobToken,
) -> Result<Option<Vec<TextEdit>>> {
if params.ch != "=" {
return Ok(None);
@ -122,7 +115,6 @@ pub fn handle_on_type_formatting(
pub fn handle_document_symbol(
world: ServerWorld,
params: req::DocumentSymbolParams,
_token: JobToken,
) -> Result<Option<req::DocumentSymbolResponse>> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
@ -161,7 +153,6 @@ pub fn handle_document_symbol(
pub fn handle_workspace_symbol(
world: ServerWorld,
params: req::WorkspaceSymbolParams,
token: JobToken,
) -> Result<Option<Vec<SymbolInformation>>> {
let all_symbols = params.query.contains("#");
let libs = params.query.contains("*");
@ -181,11 +172,11 @@ pub fn handle_workspace_symbol(
q.limit(128);
q
};
let mut res = exec_query(&world, query, &token)?;
let mut res = exec_query(&world, query)?;
if res.is_empty() && !all_symbols {
let mut query = Query::new(params.query);
query.limit(128);
res = exec_query(&world, query, &token)?;
res = exec_query(&world, query)?;
}
return Ok(Some(res));
@ -193,10 +184,9 @@ pub fn handle_workspace_symbol(
fn exec_query(
world: &ServerWorld,
query: Query,
token: &JobToken,
) -> Result<Vec<SymbolInformation>> {
let mut res = Vec::new();
for (file_id, symbol) in world.analysis().symbol_search(query, token) {
for (file_id, symbol) in world.analysis().symbol_search(query)? {
let line_index = world.analysis().file_line_index(file_id);
let info = SymbolInformation {
name: symbol.name.to_string(),
@ -214,7 +204,6 @@ pub fn handle_workspace_symbol(
pub fn handle_goto_definition(
world: ServerWorld,
params: req::TextDocumentPositionParams,
token: JobToken,
) -> Result<Option<req::GotoDefinitionResponse>> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
@ -222,7 +211,7 @@ pub fn handle_goto_definition(
let mut res = Vec::new();
for (file_id, symbol) in world
.analysis()
.approximately_resolve_symbol(file_id, offset, &token)
.approximately_resolve_symbol(file_id, offset)?
{
let line_index = world.analysis().file_line_index(file_id);
let location = to_location(file_id, symbol.node_range, &world, &line_index)?;
@ -234,11 +223,10 @@ pub fn handle_goto_definition(
pub fn handle_parent_module(
world: ServerWorld,
params: TextDocumentIdentifier,
_token: JobToken,
) -> Result<Vec<Location>> {
let file_id = params.try_conv_with(&world)?;
let mut res = Vec::new();
for (file_id, symbol) in world.analysis().parent_module(file_id) {
for (file_id, symbol) in world.analysis().parent_module(file_id)? {
let line_index = world.analysis().file_line_index(file_id);
let location = to_location(file_id, symbol.node_range, &world, &line_index)?;
res.push(location);
@ -249,20 +237,19 @@ pub fn handle_parent_module(
pub fn handle_runnables(
world: ServerWorld,
params: req::RunnablesParams,
_token: JobToken,
) -> Result<Vec<req::Runnable>> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
let offset = params.position.map(|it| it.conv_with(&line_index));
let mut res = Vec::new();
for runnable in world.analysis().runnables(file_id) {
for runnable in world.analysis().runnables(file_id)? {
if let Some(offset) = offset {
if !contains_offset_nonstrict(runnable.range, offset) {
continue;
}
}
let args = runnable_args(&world, file_id, &runnable.kind);
let args = runnable_args(&world, file_id, &runnable.kind)?;
let r = req::Runnable {
range: runnable.range.conv_with(&line_index),
@ -282,9 +269,9 @@ pub fn handle_runnables(
}
return Ok(res);
fn runnable_args(world: &ServerWorld, file_id: FileId, kind: &RunnableKind) -> Vec<String> {
let spec = if let Some(&crate_id) = world.analysis().crate_for(file_id).first() {
let file_id = world.analysis().crate_root(crate_id);
fn runnable_args(world: &ServerWorld, file_id: FileId, kind: &RunnableKind) -> Result<Vec<String>> {
let spec = if let Some(&crate_id) = world.analysis().crate_for(file_id)?.first() {
let file_id = world.analysis().crate_root(crate_id)?;
let path = world.path_map.get_path(file_id);
world
.workspaces
@ -319,7 +306,7 @@ pub fn handle_runnables(
}
}
}
res
Ok(res)
}
fn spec_args(pkg_name: &str, tgt_name: &str, tgt_kind: TargetKind, buf: &mut Vec<String>) {
@ -353,21 +340,19 @@ pub fn handle_runnables(
pub fn handle_decorations(
world: ServerWorld,
params: TextDocumentIdentifier,
_token: JobToken,
) -> Result<Vec<Decoration>> {
let file_id = params.try_conv_with(&world)?;
Ok(highlight(&world, file_id))
highlight(&world, file_id)
}
pub fn handle_completion(
world: ServerWorld,
params: req::CompletionParams,
_token: JobToken,
) -> Result<Option<req::CompletionResponse>> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
let offset = params.position.conv_with(&line_index);
let items = match world.analysis().completions(file_id, offset) {
let items = match world.analysis().completions(file_id, offset)? {
None => return Ok(None),
Some(items) => items,
};
@ -394,7 +379,6 @@ pub fn handle_completion(
pub fn handle_folding_range(
world: ServerWorld,
params: FoldingRangeParams,
_token: JobToken,
) -> Result<Option<Vec<FoldingRange>>> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
@ -427,7 +411,6 @@ pub fn handle_folding_range(
pub fn handle_signature_help(
world: ServerWorld,
params: req::TextDocumentPositionParams,
token: JobToken,
) -> Result<Option<req::SignatureHelp>> {
use languageserver_types::{ParameterInformation, SignatureInformation};
@ -436,7 +419,7 @@ pub fn handle_signature_help(
let offset = params.position.conv_with(&line_index);
if let Some((descriptor, active_param)) =
world.analysis().resolve_callable(file_id, offset, &token)
world.analysis().resolve_callable(file_id, offset)?
{
let parameters: Vec<ParameterInformation> = descriptor
.params
@ -466,7 +449,6 @@ pub fn handle_signature_help(
pub fn handle_prepare_rename(
world: ServerWorld,
params: req::TextDocumentPositionParams,
token: JobToken,
) -> Result<Option<PrepareRenameResponse>> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
@ -474,7 +456,7 @@ pub fn handle_prepare_rename(
// We support renaming references like handle_rename does.
// In the future we may want to reject the renaming of things like keywords here too.
let refs = world.analysis().find_all_refs(file_id, offset, &token);
let refs = world.analysis().find_all_refs(file_id, offset)?;
if refs.is_empty() {
return Ok(None);
}
@ -488,7 +470,6 @@ pub fn handle_prepare_rename(
pub fn handle_rename(
world: ServerWorld,
params: RenameParams,
token: JobToken,
) -> Result<Option<WorkspaceEdit>> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
@ -498,7 +479,7 @@ pub fn handle_rename(
return Ok(None);
}
let refs = world.analysis().find_all_refs(file_id, offset, &token);
let refs = world.analysis().find_all_refs(file_id, offset)?;
if refs.is_empty() {
return Ok(None);
}
@ -525,13 +506,12 @@ pub fn handle_rename(
pub fn handle_references(
world: ServerWorld,
params: req::ReferenceParams,
token: JobToken,
) -> Result<Option<Vec<Location>>> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
let offset = params.position.conv_with(&line_index);
let refs = world.analysis().find_all_refs(file_id, offset, &token);
let refs = world.analysis().find_all_refs(file_id, offset)?;
Ok(Some(refs.into_iter()
.filter_map(|r| to_location(r.0, r.1, &world, &line_index).ok())
@ -541,16 +521,15 @@ pub fn handle_references(
pub fn handle_code_action(
world: ServerWorld,
params: req::CodeActionParams,
_token: JobToken,
) -> Result<Option<CodeActionResponse>> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
let range = params.range.conv_with(&line_index);
let assists = world.analysis().assists(file_id, range).into_iter();
let assists = world.analysis().assists(file_id, range)?.into_iter();
let fixes = world
.analysis()
.diagnostics(file_id)
.diagnostics(file_id)?
.into_iter()
.filter_map(|d| Some((d.range, d.fix?)))
.filter(|(range, _fix)| contains_offset_nonstrict(*range, range.start()))
@ -579,7 +558,7 @@ pub fn publish_diagnostics(
let line_index = world.analysis().file_line_index(file_id);
let diagnostics = world
.analysis()
.diagnostics(file_id)
.diagnostics(file_id)?
.into_iter()
.map(|d| Diagnostic {
range: d.range.conv_with(&line_index),
@ -600,19 +579,20 @@ pub fn publish_decorations(
let uri = world.file_id_to_uri(file_id)?;
Ok(req::PublishDecorationsParams {
uri,
decorations: highlight(&world, file_id),
decorations: highlight(&world, file_id)?,
})
}
fn highlight(world: &ServerWorld, file_id: FileId) -> Vec<Decoration> {
fn highlight(world: &ServerWorld, file_id: FileId) -> Result<Vec<Decoration>> {
let line_index = world.analysis().file_line_index(file_id);
world
let res = world
.analysis()
.highlight(file_id)
.highlight(file_id)?
.into_iter()
.map(|h| Decoration {
range: h.range.conv_with(&line_index),
tag: h.tag,
})
.collect()
.collect();
Ok(res)
}

View file

@ -8,9 +8,9 @@ use gen_lsp_server::{
handle_shutdown, ErrorCode, RawMessage, RawNotification, RawRequest, RawResponse,
};
use languageserver_types::NumberOrString;
use ra_analysis::{FileId, JobHandle, JobToken, LibraryData};
use ra_analysis::{FileId, LibraryData};
use rayon::{self, ThreadPool};
use rustc_hash::FxHashMap;
use rustc_hash::FxHashSet;
use serde::{de::DeserializeOwned, Serialize};
use crate::{
@ -47,7 +47,7 @@ pub fn main_loop(
info!("server initialized, serving requests");
let mut state = ServerWorldState::new();
let mut pending_requests = FxHashMap::default();
let mut pending_requests = FxHashSet::default();
let mut subs = Subscriptions::new();
let main_res = main_loop_inner(
internal_mode,
@ -92,7 +92,7 @@ fn main_loop_inner(
fs_worker: Worker<PathBuf, (PathBuf, Vec<FileEvent>)>,
ws_worker: Worker<PathBuf, Result<CargoWorkspace>>,
state: &mut ServerWorldState,
pending_requests: &mut FxHashMap<u64, JobHandle>,
pending_requests: &mut FxHashSet<u64>,
subs: &mut Subscriptions,
) -> Result<()> {
let (libdata_sender, libdata_receiver) = unbounded();
@ -204,22 +204,21 @@ fn main_loop_inner(
fn on_task(
task: Task,
msg_sender: &Sender<RawMessage>,
pending_requests: &mut FxHashMap<u64, JobHandle>,
pending_requests: &mut FxHashSet<u64>,
) {
match task {
Task::Respond(response) => {
if let Some(handle) = pending_requests.remove(&response.id) {
assert!(handle.has_completed());
}
if pending_requests.remove(&response.id) {
msg_sender.send(RawMessage::Response(response))
}
}
Task::Notify(n) => msg_sender.send(RawMessage::Notification(n)),
}
}
fn on_request(
world: &mut ServerWorldState,
pending_requests: &mut FxHashMap<u64, JobHandle>,
pending_requests: &mut FxHashSet<u64>,
pool: &ThreadPool,
sender: &Sender<Task>,
req: RawRequest,
@ -253,8 +252,8 @@ fn on_request(
.on::<req::References>(handlers::handle_references)?
.finish();
match req {
Ok((id, handle)) => {
let inserted = pending_requests.insert(id, handle).is_none();
Ok(id) => {
let inserted = pending_requests.insert(id);
assert!(inserted, "duplicate request: {}", id);
Ok(None)
}
@ -265,7 +264,7 @@ fn on_request(
fn on_notification(
msg_sender: &Sender<RawMessage>,
state: &mut ServerWorldState,
pending_requests: &mut FxHashMap<u64, JobHandle>,
pending_requests: &mut FxHashSet<u64>,
subs: &mut Subscriptions,
not: RawNotification,
) -> Result<()> {
@ -277,9 +276,7 @@ fn on_notification(
panic!("string id's not supported: {:?}", id);
}
};
if let Some(handle) = pending_requests.remove(&id) {
handle.cancel();
}
pending_requests.remove(&id);
return Ok(());
}
Err(not) => not,
@ -336,7 +333,7 @@ fn on_notification(
struct PoolDispatcher<'a> {
req: Option<RawRequest>,
res: Option<(u64, JobHandle)>,
res: Option<u64>,
pool: &'a ThreadPool,
world: &'a ServerWorldState,
sender: &'a Sender<Task>,
@ -345,7 +342,7 @@ struct PoolDispatcher<'a> {
impl<'a> PoolDispatcher<'a> {
fn on<'b, R>(
&'b mut self,
f: fn(ServerWorld, R::Params, JobToken) -> Result<R::Result>,
f: fn(ServerWorld, R::Params) -> Result<R::Result>,
) -> Result<&'b mut Self>
where
R: req::Request,
@ -358,11 +355,10 @@ impl<'a> PoolDispatcher<'a> {
};
match req.cast::<R>() {
Ok((id, params)) => {
let (handle, token) = JobHandle::new();
let world = self.world.snapshot();
let sender = self.sender.clone();
self.pool.spawn(move || {
let resp = match f(world, params, token) {
let resp = match f(world, params) {
Ok(resp) => RawResponse::ok::<R>(id, &resp),
Err(e) => {
RawResponse::err(id, ErrorCode::InternalError as i32, e.to_string())
@ -371,14 +367,14 @@ impl<'a> PoolDispatcher<'a> {
let task = Task::Respond(resp);
sender.send(task);
});
self.res = Some((id, handle));
self.res = Some(id);
}
Err(req) => self.req = Some(req),
}
Ok(self)
}
fn finish(&mut self) -> ::std::result::Result<(u64, JobHandle), RawRequest> {
fn finish(&mut self) -> ::std::result::Result<u64, RawRequest> {
match (self.res.take(), self.req.take()) {
(Some(res), None) => Ok(res),
(None, Some(req)) => Err(req),

View file

@ -1,9 +1,9 @@
use std::path::{Component, Path, PathBuf};
use im;
use ra_analysis::{FileId, FileResolver};
use relative_path::RelativePath;
use std::path::{Component, Path, PathBuf};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Root {
Workspace,

View file

@ -1,9 +1,9 @@
use std::path::{Path, PathBuf};
use cargo_metadata::{metadata_run, CargoOpt};
use ra_syntax::SmolStr;
use rustc_hash::{FxHashMap, FxHashSet};
use std::path::{Path, PathBuf};
use crate::{
thread_watcher::{ThreadWatcher, Worker},
Result,

View file

@ -1,8 +1,9 @@
use crate::Result;
use std::thread;
use crossbeam_channel::{bounded, unbounded, Receiver, Sender};
use drop_bomb::DropBomb;
use std::thread;
use crate::Result;
pub struct Worker<I, O> {
pub inp: Sender<I>,