fix usages after rename

This commit is contained in:
Aleksey Kladov 2019-01-08 22:33:36 +03:00
parent 6bca91af53
commit 5b573deb20
30 changed files with 4280 additions and 31 deletions

44
Cargo.lock generated
View file

@ -636,27 +636,6 @@ dependencies = [
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "ra_analysis"
version = "0.1.0"
dependencies = [
"fst 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_db 0.1.0",
"ra_hir 0.1.0",
"ra_ide_api_light 0.1.0",
"ra_syntax 0.1.0",
"ra_text_edit 0.1.0",
"rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"salsa 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0",
"unicase 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "ra_arena"
version = "0.1.0"
@ -704,6 +683,27 @@ dependencies = [
"test_utils 0.1.0",
]
[[package]]
name = "ra_ide_api"
version = "0.1.0"
dependencies = [
"fst 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_db 0.1.0",
"ra_hir 0.1.0",
"ra_ide_api_light 0.1.0",
"ra_syntax 0.1.0",
"ra_text_edit 0.1.0",
"rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"salsa 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0",
"unicase 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "ra_ide_api_light"
version = "0.1.0"
@ -733,7 +733,7 @@ dependencies = [
"languageserver-types 0.53.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_analysis 0.1.0",
"ra_ide_api 0.1.0",
"ra_syntax 0.1.0",
"ra_text_edit 0.1.0",
"ra_vfs 0.1.0",

View file

@ -0,0 +1,23 @@
[package]
edition = "2018"
name = "ra_ide_api"
version = "0.1.0"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
[dependencies]
itertools = "0.8.0"
log = "0.4.5"
relative-path = "0.4.0"
rayon = "1.0.2"
fst = "0.3.1"
salsa = "0.9.1"
rustc-hash = "1.0"
parking_lot = "0.7.0"
unicase = "2.2.0"
ra_syntax = { path = "../ra_syntax" }
ra_ide_api_light = { path = "../ra_ide_api_light" }
ra_text_edit = { path = "../ra_text_edit" }
ra_db = { path = "../ra_db" }
hir = { path = "../ra_hir", package = "ra_hir" }
test_utils = { path = "../test_utils" }

View file

@ -0,0 +1,451 @@
use std::cmp::{max, min};
use ra_db::{SyntaxDatabase, Cancelable};
use ra_syntax::{
AstNode, SyntaxNode, TextUnit, TextRange,
SyntaxKind::FN_DEF,
ast::{self, ArgListOwner, DocCommentsOwner},
algo::find_node_at_offset,
};
use crate::{FilePosition, CallInfo, db::RootDatabase};
/// Computes parameter information for the given call expression.
pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Cancelable<Option<CallInfo>> {
let file = db.source_file(position.file_id);
let syntax = file.syntax();
// Find the calling expression and it's NameRef
let calling_node = ctry!(FnCallNode::with_node(syntax, position.offset));
let name_ref = ctry!(calling_node.name_ref());
// Resolve the function's NameRef (NOTE: this isn't entirely accurate).
let file_symbols = db.index_resolve(name_ref)?;
let symbol = ctry!(file_symbols.into_iter().find(|it| it.ptr.kind() == FN_DEF));
let fn_file = db.source_file(symbol.file_id);
let fn_def = symbol.ptr.resolve(&fn_file);
let fn_def = ast::FnDef::cast(&fn_def).unwrap();
let mut call_info = ctry!(CallInfo::new(fn_def));
// If we have a calling expression let's find which argument we are on
let num_params = call_info.parameters.len();
let has_self = fn_def.param_list().and_then(|l| l.self_param()).is_some();
if num_params == 1 {
if !has_self {
call_info.active_parameter = Some(0);
}
} else if num_params > 1 {
// Count how many parameters into the call we are.
// TODO: This is best effort for now and should be fixed at some point.
// It may be better to see where we are in the arg_list and then check
// where offset is in that list (or beyond).
// Revisit this after we get documentation comments in.
if let Some(ref arg_list) = calling_node.arg_list() {
let start = arg_list.syntax().range().start();
let range_search = TextRange::from_to(start, position.offset);
let mut commas: usize = arg_list
.syntax()
.text()
.slice(range_search)
.to_string()
.matches(',')
.count();
// If we have a method call eat the first param since it's just self.
if has_self {
commas += 1;
}
call_info.active_parameter = Some(commas);
}
}
Ok(Some(call_info))
}
enum FnCallNode<'a> {
CallExpr(&'a ast::CallExpr),
MethodCallExpr(&'a ast::MethodCallExpr),
}
impl<'a> FnCallNode<'a> {
pub fn with_node(syntax: &'a SyntaxNode, offset: TextUnit) -> Option<FnCallNode<'a>> {
if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) {
return Some(FnCallNode::CallExpr(expr));
}
if let Some(expr) = find_node_at_offset::<ast::MethodCallExpr>(syntax, offset) {
return Some(FnCallNode::MethodCallExpr(expr));
}
None
}
pub fn name_ref(&self) -> Option<&'a ast::NameRef> {
match *self {
FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() {
ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?,
_ => return None,
}),
FnCallNode::MethodCallExpr(call_expr) => call_expr
.syntax()
.children()
.filter_map(ast::NameRef::cast)
.nth(0),
}
}
pub fn arg_list(&self) -> Option<&'a ast::ArgList> {
match *self {
FnCallNode::CallExpr(expr) => expr.arg_list(),
FnCallNode::MethodCallExpr(expr) => expr.arg_list(),
}
}
}
impl CallInfo {
fn new(node: &ast::FnDef) -> Option<Self> {
let mut doc = None;
// Strip the body out for the label.
let mut label: String = if let Some(body) = node.body() {
let body_range = body.syntax().range();
let label: String = node
.syntax()
.children()
.filter(|child| !child.range().is_subrange(&body_range))
.map(|node| node.text().to_string())
.collect();
label
} else {
node.syntax().text().to_string()
};
if let Some((comment_range, docs)) = extract_doc_comments(node) {
let comment_range = comment_range
.checked_sub(node.syntax().range().start())
.unwrap();
let start = comment_range.start().to_usize();
let end = comment_range.end().to_usize();
// Remove the comment from the label
label.replace_range(start..end, "");
// Massage markdown
let mut processed_lines = Vec::new();
let mut in_code_block = false;
for line in docs.lines() {
if line.starts_with("```") {
in_code_block = !in_code_block;
}
let line = if in_code_block && line.starts_with("```") && !line.contains("rust") {
"```rust".into()
} else {
line.to_string()
};
processed_lines.push(line);
}
if !processed_lines.is_empty() {
doc = Some(processed_lines.join("\n"));
}
}
Some(CallInfo {
parameters: param_list(node),
label: label.trim().to_owned(),
doc,
active_parameter: None,
})
}
}
fn extract_doc_comments(node: &ast::FnDef) -> Option<(TextRange, String)> {
if node.doc_comments().count() == 0 {
return None;
}
let comment_text = node.doc_comment_text();
let (begin, end) = node
.doc_comments()
.map(|comment| comment.syntax().range())
.map(|range| (range.start().to_usize(), range.end().to_usize()))
.fold((std::usize::MAX, std::usize::MIN), |acc, range| {
(min(acc.0, range.0), max(acc.1, range.1))
});
let range = TextRange::from_to(TextUnit::from_usize(begin), TextUnit::from_usize(end));
Some((range, comment_text))
}
fn param_list(node: &ast::FnDef) -> Vec<String> {
let mut res = vec![];
if let Some(param_list) = node.param_list() {
if let Some(self_param) = param_list.self_param() {
res.push(self_param.syntax().text().to_string())
}
// Maybe use param.pat here? See if we can just extract the name?
//res.extend(param_list.params().map(|p| p.syntax().text().to_string()));
res.extend(
param_list
.params()
.filter_map(|p| p.pat())
.map(|pat| pat.syntax().text().to_string()),
);
}
res
}
#[cfg(test)]
mod tests {
use super::*;
use crate::mock_analysis::single_file_with_position;
fn call_info(text: &str) -> CallInfo {
let (analysis, position) = single_file_with_position(text);
analysis.call_info(position).unwrap().unwrap()
}
#[test]
fn test_fn_signature_two_args_first() {
let info = call_info(
r#"fn foo(x: u32, y: u32) -> u32 {x + y}
fn bar() { foo(<|>3, ); }"#,
);
assert_eq!(info.parameters, vec!("x".to_string(), "y".to_string()));
assert_eq!(info.active_parameter, Some(0));
}
#[test]
fn test_fn_signature_two_args_second() {
let info = call_info(
r#"fn foo(x: u32, y: u32) -> u32 {x + y}
fn bar() { foo(3, <|>); }"#,
);
assert_eq!(info.parameters, vec!("x".to_string(), "y".to_string()));
assert_eq!(info.active_parameter, Some(1));
}
#[test]
fn test_fn_signature_for_impl() {
let info = call_info(
r#"struct F; impl F { pub fn new() { F{}} }
fn bar() {let _ : F = F::new(<|>);}"#,
);
assert_eq!(info.parameters, Vec::<String>::new());
assert_eq!(info.active_parameter, None);
}
#[test]
fn test_fn_signature_for_method_self() {
let info = call_info(
r#"struct F;
impl F {
pub fn new() -> F{
F{}
}
pub fn do_it(&self) {}
}
fn bar() {
let f : F = F::new();
f.do_it(<|>);
}"#,
);
assert_eq!(info.parameters, vec!["&self".to_string()]);
assert_eq!(info.active_parameter, None);
}
#[test]
fn test_fn_signature_for_method_with_arg() {
let info = call_info(
r#"struct F;
impl F {
pub fn new() -> F{
F{}
}
pub fn do_it(&self, x: i32) {}
}
fn bar() {
let f : F = F::new();
f.do_it(<|>);
}"#,
);
assert_eq!(info.parameters, vec!["&self".to_string(), "x".to_string()]);
assert_eq!(info.active_parameter, Some(1));
}
#[test]
fn test_fn_signature_with_docs_simple() {
let info = call_info(
r#"
/// test
// non-doc-comment
fn foo(j: u32) -> u32 {
j
}
fn bar() {
let _ = foo(<|>);
}
"#,
);
assert_eq!(info.parameters, vec!["j".to_string()]);
assert_eq!(info.active_parameter, Some(0));
assert_eq!(info.label, "fn foo(j: u32) -> u32".to_string());
assert_eq!(info.doc, Some("test".into()));
}
#[test]
fn test_fn_signature_with_docs() {
let info = call_info(
r#"
/// Adds one to the number given.
///
/// # Examples
///
/// ```
/// let five = 5;
///
/// assert_eq!(6, my_crate::add_one(5));
/// ```
pub fn add_one(x: i32) -> i32 {
x + 1
}
pub fn do() {
add_one(<|>
}"#,
);
assert_eq!(info.parameters, vec!["x".to_string()]);
assert_eq!(info.active_parameter, Some(0));
assert_eq!(info.label, "pub fn add_one(x: i32) -> i32".to_string());
assert_eq!(
info.doc,
Some(
r#"Adds one to the number given.
# Examples
```rust
let five = 5;
assert_eq!(6, my_crate::add_one(5));
```"#
.into()
)
);
}
#[test]
fn test_fn_signature_with_docs_impl() {
let info = call_info(
r#"
struct addr;
impl addr {
/// Adds one to the number given.
///
/// # Examples
///
/// ```
/// let five = 5;
///
/// assert_eq!(6, my_crate::add_one(5));
/// ```
pub fn add_one(x: i32) -> i32 {
x + 1
}
}
pub fn do_it() {
addr {};
addr::add_one(<|>);
}"#,
);
assert_eq!(info.parameters, vec!["x".to_string()]);
assert_eq!(info.active_parameter, Some(0));
assert_eq!(info.label, "pub fn add_one(x: i32) -> i32".to_string());
assert_eq!(
info.doc,
Some(
r#"Adds one to the number given.
# Examples
```rust
let five = 5;
assert_eq!(6, my_crate::add_one(5));
```"#
.into()
)
);
}
#[test]
fn test_fn_signature_with_docs_from_actix() {
let info = call_info(
r#"
pub trait WriteHandler<E>
where
Self: Actor,
Self::Context: ActorContext,
{
/// Method is called when writer emits error.
///
/// If this method returns `ErrorAction::Continue` writer processing
/// continues otherwise stream processing stops.
fn error(&mut self, err: E, ctx: &mut Self::Context) -> Running {
Running::Stop
}
/// Method is called when writer finishes.
///
/// By default this method stops actor's `Context`.
fn finished(&mut self, ctx: &mut Self::Context) {
ctx.stop()
}
}
pub fn foo() {
WriteHandler r;
r.finished(<|>);
}
"#,
);
assert_eq!(
info.parameters,
vec!["&mut self".to_string(), "ctx".to_string()]
);
assert_eq!(info.active_parameter, Some(1));
assert_eq!(
info.doc,
Some(
r#"Method is called when writer finishes.
By default this method stops actor's `Context`."#
.into()
)
);
}
}

View file

@ -0,0 +1,77 @@
mod completion_item;
mod completion_context;
mod complete_dot;
mod complete_fn_param;
mod complete_keyword;
mod complete_snippet;
mod complete_path;
mod complete_scope;
use ra_db::SyntaxDatabase;
use crate::{
db,
Cancelable, FilePosition,
completion::{
completion_item::{Completions, CompletionKind},
completion_context::CompletionContext,
},
};
pub use crate::completion::completion_item::{CompletionItem, InsertText, CompletionItemKind};
/// Main entry point for completion. We run completion as a two-phase process.
///
/// First, we look at the position and collect a so-called `CompletionContext.
/// This is a somewhat messy process, because, during completion, syntax tree is
/// incomplete and can look really weird.
///
/// Once the context is collected, we run a series of completion routines which
/// look at the context and produce completion items. One subtelty about this
/// phase is that completion engine should not filter by the substring which is
/// already present, it should give all possible variants for the identifier at
/// the caret. In other words, for
///
/// ```no-run
/// fn f() {
/// let foo = 92;
/// let _ = bar<|>
/// }
/// ```
///
/// `foo` *should* be present among the completion variants. Filtering by
/// identifier prefix/fuzzy match should be done higher in the stack, together
/// with ordering of completions (currently this is done by the client).
pub(crate) fn completions(
db: &db::RootDatabase,
position: FilePosition,
) -> Cancelable<Option<Completions>> {
let original_file = db.source_file(position.file_id);
let ctx = ctry!(CompletionContext::new(db, &original_file, position)?);
let mut acc = Completions::default();
complete_fn_param::complete_fn_param(&mut acc, &ctx);
complete_keyword::complete_expr_keyword(&mut acc, &ctx);
complete_keyword::complete_use_tree_keyword(&mut acc, &ctx);
complete_snippet::complete_expr_snippet(&mut acc, &ctx);
complete_snippet::complete_item_snippet(&mut acc, &ctx);
complete_path::complete_path(&mut acc, &ctx)?;
complete_scope::complete_scope(&mut acc, &ctx)?;
complete_dot::complete_dot(&mut acc, &ctx)?;
Ok(Some(acc))
}
#[cfg(test)]
fn check_completion(code: &str, expected_completions: &str, kind: CompletionKind) {
use crate::mock_analysis::{single_file_with_position, analysis_and_position};
let (analysis, position) = if code.contains("//-") {
analysis_and_position(code)
} else {
single_file_with_position(code)
};
let completions = completions(&analysis.db, position).unwrap().unwrap();
completions.assert_match(expected_completions, kind);
}

View file

@ -0,0 +1,121 @@
use hir::{Ty, Def};
use crate::Cancelable;
use crate::completion::{CompletionContext, Completions, CompletionKind, CompletionItem, CompletionItemKind};
/// Complete dot accesses, i.e. fields or methods (currently only fields).
pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> {
let (function, receiver) = match (&ctx.function, ctx.dot_receiver) {
(Some(function), Some(receiver)) => (function, receiver),
_ => return Ok(()),
};
let infer_result = function.infer(ctx.db)?;
let syntax_mapping = function.body_syntax_mapping(ctx.db)?;
let expr = match syntax_mapping.node_expr(receiver) {
Some(expr) => expr,
None => return Ok(()),
};
let receiver_ty = infer_result[expr].clone();
if !ctx.is_method_call {
complete_fields(acc, ctx, receiver_ty)?;
}
Ok(())
}
fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) -> Cancelable<()> {
for receiver in receiver.autoderef(ctx.db) {
match receiver {
Ty::Adt { def_id, .. } => {
match def_id.resolve(ctx.db)? {
Def::Struct(s) => {
let variant_data = s.variant_data(ctx.db)?;
for field in variant_data.fields() {
CompletionItem::new(
CompletionKind::Reference,
field.name().to_string(),
)
.kind(CompletionItemKind::Field)
.add_to(acc);
}
}
// TODO unions
_ => {}
}
}
Ty::Tuple(fields) => {
for (i, _ty) in fields.iter().enumerate() {
CompletionItem::new(CompletionKind::Reference, i.to_string())
.kind(CompletionItemKind::Field)
.add_to(acc);
}
}
_ => {}
};
}
Ok(())
}
#[cfg(test)]
mod tests {
use crate::completion::*;
fn check_ref_completion(code: &str, expected_completions: &str) {
check_completion(code, expected_completions, CompletionKind::Reference);
}
#[test]
fn test_struct_field_completion() {
check_ref_completion(
r"
struct A { the_field: u32 }
fn foo(a: A) {
a.<|>
}
",
r#"the_field"#,
);
}
#[test]
fn test_struct_field_completion_self() {
check_ref_completion(
r"
struct A { the_field: u32 }
impl A {
fn foo(self) {
self.<|>
}
}
",
r#"the_field"#,
);
}
#[test]
fn test_struct_field_completion_autoderef() {
check_ref_completion(
r"
struct A { the_field: u32 }
impl A {
fn foo(&self) {
self.<|>
}
}
",
r#"the_field"#,
);
}
#[test]
fn test_no_struct_field_completion_for_method_call() {
check_ref_completion(
r"
struct A { the_field: u32 }
fn foo(a: A) {
a.<|>()
}
",
r#""#,
);
}
}

View file

@ -0,0 +1,102 @@
use ra_syntax::{
algo::visit::{visitor_ctx, VisitorCtx},
ast,
AstNode,
};
use rustc_hash::FxHashMap;
use crate::completion::{CompletionContext, Completions, CompletionKind, CompletionItem};
/// Complete repeated parametes, both name and type. For example, if all
/// functions in a file have a `spam: &mut Spam` parameter, a completion with
/// `spam: &mut Spam` insert text/label and `spam` lookup string will be
/// suggested.
pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) {
if !ctx.is_param {
return;
}
let mut params = FxHashMap::default();
for node in ctx.leaf.ancestors() {
let _ = visitor_ctx(&mut params)
.visit::<ast::SourceFile, _>(process)
.visit::<ast::ItemList, _>(process)
.accept(node);
}
params
.into_iter()
.filter_map(|(label, (count, param))| {
let lookup = param.pat()?.syntax().text().to_string();
if count < 2 {
None
} else {
Some((label, lookup))
}
})
.for_each(|(label, lookup)| {
CompletionItem::new(CompletionKind::Magic, label)
.lookup_by(lookup)
.add_to(acc)
});
fn process<'a, N: ast::FnDefOwner>(
node: &'a N,
params: &mut FxHashMap<String, (u32, &'a ast::Param)>,
) {
node.functions()
.filter_map(|it| it.param_list())
.flat_map(|it| it.params())
.for_each(|param| {
let text = param.syntax().text().to_string();
params.entry(text).or_insert((0, param)).0 += 1;
})
}
}
#[cfg(test)]
mod tests {
use crate::completion::*;
fn check_magic_completion(code: &str, expected_completions: &str) {
check_completion(code, expected_completions, CompletionKind::Magic);
}
#[test]
fn test_param_completion_last_param() {
check_magic_completion(
r"
fn foo(file_id: FileId) {}
fn bar(file_id: FileId) {}
fn baz(file<|>) {}
",
r#"file_id "file_id: FileId""#,
);
}
#[test]
fn test_param_completion_nth_param() {
check_magic_completion(
r"
fn foo(file_id: FileId) {}
fn bar(file_id: FileId) {}
fn baz(file<|>, x: i32) {}
",
r#"file_id "file_id: FileId""#,
);
}
#[test]
fn test_param_completion_trait_param() {
check_magic_completion(
r"
pub(crate) trait SourceRoot {
pub fn contains(&self, file_id: FileId) -> bool;
pub fn module_map(&self) -> &ModuleMap;
pub fn lines(&self, file_id: FileId) -> &LineIndex;
pub fn syntax(&self, file<|>)
}
",
r#"file_id "file_id: FileId""#,
);
}
}

View file

@ -0,0 +1,339 @@
use ra_syntax::{
algo::visit::{visitor, Visitor},
AstNode,
ast::{self, LoopBodyOwner},
SyntaxKind::*, SyntaxNode,
};
use crate::completion::{CompletionContext, CompletionItem, Completions, CompletionKind, CompletionItemKind};
pub(super) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionContext) {
// complete keyword "crate" in use stmt
match (ctx.use_item_syntax.as_ref(), ctx.path_prefix.as_ref()) {
(Some(_), None) => {
CompletionItem::new(CompletionKind::Keyword, "crate")
.kind(CompletionItemKind::Keyword)
.lookup_by("crate")
.snippet("crate::")
.add_to(acc);
CompletionItem::new(CompletionKind::Keyword, "self")
.kind(CompletionItemKind::Keyword)
.lookup_by("self")
.add_to(acc);
CompletionItem::new(CompletionKind::Keyword, "super")
.kind(CompletionItemKind::Keyword)
.lookup_by("super")
.add_to(acc);
}
(Some(_), Some(_)) => {
CompletionItem::new(CompletionKind::Keyword, "self")
.kind(CompletionItemKind::Keyword)
.lookup_by("self")
.add_to(acc);
CompletionItem::new(CompletionKind::Keyword, "super")
.kind(CompletionItemKind::Keyword)
.lookup_by("super")
.add_to(acc);
}
_ => {}
}
}
fn keyword(kw: &str, snippet: &str) -> CompletionItem {
CompletionItem::new(CompletionKind::Keyword, kw)
.kind(CompletionItemKind::Keyword)
.snippet(snippet)
.build()
}
pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) {
if !ctx.is_trivial_path {
return;
}
let fn_def = match ctx.function_syntax {
Some(it) => it,
None => return,
};
acc.add(keyword("if", "if $0 {}"));
acc.add(keyword("match", "match $0 {}"));
acc.add(keyword("while", "while $0 {}"));
acc.add(keyword("loop", "loop {$0}"));
if ctx.after_if {
acc.add(keyword("else", "else {$0}"));
acc.add(keyword("else if", "else if $0 {}"));
}
if is_in_loop_body(ctx.leaf) {
if ctx.can_be_stmt {
acc.add(keyword("continue", "continue;"));
acc.add(keyword("break", "break;"));
} else {
acc.add(keyword("continue", "continue"));
acc.add(keyword("break", "break"));
}
}
acc.add_all(complete_return(fn_def, ctx.can_be_stmt));
}
fn is_in_loop_body(leaf: &SyntaxNode) -> bool {
for node in leaf.ancestors() {
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
break;
}
let loop_body = visitor()
.visit::<ast::ForExpr, _>(LoopBodyOwner::loop_body)
.visit::<ast::WhileExpr, _>(LoopBodyOwner::loop_body)
.visit::<ast::LoopExpr, _>(LoopBodyOwner::loop_body)
.accept(node);
if let Some(Some(body)) = loop_body {
if leaf.range().is_subrange(&body.syntax().range()) {
return true;
}
}
}
false
}
fn complete_return(fn_def: &ast::FnDef, can_be_stmt: bool) -> Option<CompletionItem> {
let snip = match (can_be_stmt, fn_def.ret_type().is_some()) {
(true, true) => "return $0;",
(true, false) => "return;",
(false, true) => "return $0",
(false, false) => "return",
};
Some(keyword("return", snip))
}
#[cfg(test)]
mod tests {
use crate::completion::{CompletionKind, check_completion};
fn check_keyword_completion(code: &str, expected_completions: &str) {
check_completion(code, expected_completions, CompletionKind::Keyword);
}
#[test]
fn completes_keywords_in_use_stmt() {
check_keyword_completion(
r"
use <|>
",
r#"
crate "crate" "crate::"
self "self"
super "super"
"#,
);
check_keyword_completion(
r"
use a::<|>
",
r#"
self "self"
super "super"
"#,
);
check_keyword_completion(
r"
use a::{b, <|>}
",
r#"
self "self"
super "super"
"#,
);
}
#[test]
fn completes_various_keywords_in_function() {
check_keyword_completion(
r"
fn quux() {
<|>
}
",
r#"
if "if $0 {}"
match "match $0 {}"
while "while $0 {}"
loop "loop {$0}"
return "return;"
"#,
);
}
#[test]
fn completes_else_after_if() {
check_keyword_completion(
r"
fn quux() {
if true {
()
} <|>
}
",
r#"
if "if $0 {}"
match "match $0 {}"
while "while $0 {}"
loop "loop {$0}"
else "else {$0}"
else if "else if $0 {}"
return "return;"
"#,
);
}
#[test]
fn test_completion_return_value() {
check_keyword_completion(
r"
fn quux() -> i32 {
<|>
92
}
",
r#"
if "if $0 {}"
match "match $0 {}"
while "while $0 {}"
loop "loop {$0}"
return "return $0;"
"#,
);
check_keyword_completion(
r"
fn quux() {
<|>
92
}
",
r#"
if "if $0 {}"
match "match $0 {}"
while "while $0 {}"
loop "loop {$0}"
return "return;"
"#,
);
}
#[test]
fn dont_add_semi_after_return_if_not_a_statement() {
check_keyword_completion(
r"
fn quux() -> i32 {
match () {
() => <|>
}
}
",
r#"
if "if $0 {}"
match "match $0 {}"
while "while $0 {}"
loop "loop {$0}"
return "return $0"
"#,
);
}
#[test]
fn last_return_in_block_has_semi() {
check_keyword_completion(
r"
fn quux() -> i32 {
if condition {
<|>
}
}
",
r#"
if "if $0 {}"
match "match $0 {}"
while "while $0 {}"
loop "loop {$0}"
return "return $0;"
"#,
);
check_keyword_completion(
r"
fn quux() -> i32 {
if condition {
<|>
}
let x = 92;
x
}
",
r#"
if "if $0 {}"
match "match $0 {}"
while "while $0 {}"
loop "loop {$0}"
return "return $0;"
"#,
);
}
#[test]
fn completes_break_and_continue_in_loops() {
check_keyword_completion(
r"
fn quux() -> i32 {
loop { <|> }
}
",
r#"
if "if $0 {}"
match "match $0 {}"
while "while $0 {}"
loop "loop {$0}"
continue "continue;"
break "break;"
return "return $0;"
"#,
);
// No completion: lambda isolates control flow
check_keyword_completion(
r"
fn quux() -> i32 {
loop { || { <|> } }
}
",
r#"
if "if $0 {}"
match "match $0 {}"
while "while $0 {}"
loop "loop {$0}"
return "return $0;"
"#,
);
}
#[test]
fn no_semi_after_break_continue_in_expr() {
check_keyword_completion(
r"
fn f() {
loop {
match () {
() => br<|>
}
}
}
",
r#"
if "if $0 {}"
match "match $0 {}"
while "while $0 {}"
loop "loop {$0}"
continue "continue"
break "break"
return "return"
"#,
)
}
}

View file

@ -0,0 +1,128 @@
use crate::{
Cancelable,
completion::{CompletionItem, CompletionItemKind, Completions, CompletionKind, CompletionContext},
};
pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> {
let (path, module) = match (&ctx.path_prefix, &ctx.module) {
(Some(path), Some(module)) => (path.clone(), module),
_ => return Ok(()),
};
let def_id = match module.resolve_path(ctx.db, &path)?.take_types() {
Some(it) => it,
None => return Ok(()),
};
match def_id.resolve(ctx.db)? {
hir::Def::Module(module) => {
let module_scope = module.scope(ctx.db)?;
module_scope.entries().for_each(|(name, res)| {
CompletionItem::new(CompletionKind::Reference, name.to_string())
.from_resolution(ctx, res)
.add_to(acc)
});
}
hir::Def::Enum(e) => e
.variants(ctx.db)?
.into_iter()
.for_each(|(name, _variant)| {
CompletionItem::new(CompletionKind::Reference, name.to_string())
.kind(CompletionItemKind::EnumVariant)
.add_to(acc)
}),
_ => return Ok(()),
};
Ok(())
}
#[cfg(test)]
mod tests {
use crate::completion::{CompletionKind, check_completion};
fn check_reference_completion(code: &str, expected_completions: &str) {
check_completion(code, expected_completions, CompletionKind::Reference);
}
#[test]
fn completes_use_item_starting_with_self() {
check_reference_completion(
r"
use self::m::<|>;
mod m {
struct Bar;
}
",
"Bar",
);
}
#[test]
fn completes_use_item_starting_with_crate() {
check_reference_completion(
"
//- /lib.rs
mod foo;
struct Spam;
//- /foo.rs
use crate::Sp<|>
",
"Spam;foo",
);
}
#[test]
fn completes_nested_use_tree() {
check_reference_completion(
"
//- /lib.rs
mod foo;
struct Spam;
//- /foo.rs
use crate::{Sp<|>};
",
"Spam;foo",
);
}
#[test]
fn completes_deeply_nested_use_tree() {
check_reference_completion(
"
//- /lib.rs
mod foo;
pub mod bar {
pub mod baz {
pub struct Spam;
}
}
//- /foo.rs
use crate::{bar::{baz::Sp<|>}};
",
"Spam",
);
}
#[test]
fn completes_enum_variant() {
check_reference_completion(
"
//- /lib.rs
enum E { Foo, Bar(i32) }
fn foo() { let _ = E::<|> }
",
"Foo;Bar",
);
}
#[test]
fn dont_render_function_parens_in_use_item() {
check_reference_completion(
"
//- /lib.rs
mod m { pub fn foo() {} }
use crate::m::f<|>;
",
"foo",
)
}
}

View file

@ -0,0 +1,192 @@
use rustc_hash::FxHashSet;
use ra_syntax::TextUnit;
use crate::{
Cancelable,
completion::{CompletionItem, CompletionItemKind, Completions, CompletionKind, CompletionContext},
};
pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> {
if !ctx.is_trivial_path {
return Ok(());
}
let module = match &ctx.module {
Some(it) => it,
None => return Ok(()),
};
if let Some(function) = &ctx.function {
let scopes = function.scopes(ctx.db)?;
complete_fn(acc, &scopes, ctx.offset);
}
let module_scope = module.scope(ctx.db)?;
let (file_id, _) = module.defenition_source(ctx.db)?;
module_scope
.entries()
.filter(|(_name, res)| {
// Don't expose this item
// FIXME: this penetrates through all kinds of abstractions,
// we need to figura out the way to do it less ugly.
match res.import {
None => true,
Some(import) => {
let range = import.range(ctx.db, file_id);
!range.is_subrange(&ctx.leaf.range())
}
}
})
.for_each(|(name, res)| {
CompletionItem::new(CompletionKind::Reference, name.to_string())
.from_resolution(ctx, res)
.add_to(acc)
});
Ok(())
}
fn complete_fn(acc: &mut Completions, scopes: &hir::ScopesWithSyntaxMapping, offset: TextUnit) {
let mut shadowed = FxHashSet::default();
scopes
.scope_chain_for_offset(offset)
.flat_map(|scope| scopes.scopes.entries(scope).iter())
.filter(|entry| shadowed.insert(entry.name()))
.for_each(|entry| {
CompletionItem::new(CompletionKind::Reference, entry.name().to_string())
.kind(CompletionItemKind::Binding)
.add_to(acc)
});
}
#[cfg(test)]
mod tests {
use crate::completion::{CompletionKind, check_completion};
fn check_reference_completion(code: &str, expected_completions: &str) {
check_completion(code, expected_completions, CompletionKind::Reference);
}
#[test]
fn completes_bindings_from_let() {
check_reference_completion(
r"
fn quux(x: i32) {
let y = 92;
1 + <|>;
let z = ();
}
",
r#"y;x;quux "quux($0)""#,
);
}
#[test]
fn completes_bindings_from_if_let() {
check_reference_completion(
r"
fn quux() {
if let Some(x) = foo() {
let y = 92;
};
if let Some(a) = bar() {
let b = 62;
1 + <|>
}
}
",
r#"b;a;quux "quux()$0""#,
);
}
#[test]
fn completes_bindings_from_for() {
check_reference_completion(
r"
fn quux() {
for x in &[1, 2, 3] {
<|>
}
}
",
r#"x;quux "quux()$0""#,
);
}
#[test]
fn completes_module_items() {
check_reference_completion(
r"
struct Foo;
enum Baz {}
fn quux() {
<|>
}
",
r#"quux "quux()$0";Foo;Baz"#,
);
}
#[test]
fn completes_module_items_in_nested_modules() {
check_reference_completion(
r"
struct Foo;
mod m {
struct Bar;
fn quux() { <|> }
}
",
r#"quux "quux()$0";Bar"#,
);
}
#[test]
fn completes_return_type() {
check_reference_completion(
r"
struct Foo;
fn x() -> <|>
",
r#"Foo;x "x()$0""#,
)
}
#[test]
fn dont_show_both_completions_for_shadowing() {
check_reference_completion(
r"
fn foo() -> {
let bar = 92;
{
let bar = 62;
<|>
}
}
",
r#"bar;foo "foo()$0""#,
)
}
#[test]
fn completes_self_in_methods() {
check_reference_completion(r"impl S { fn foo(&self) { <|> } }", "self")
}
#[test]
fn inserts_parens_for_function_calls() {
check_reference_completion(
r"
fn no_args() {}
fn main() { no_<|> }
",
r#"no_args "no_args()$0"
main "main()$0""#,
);
check_reference_completion(
r"
fn with_args(x: i32, y: String) {}
fn main() { with_<|> }
",
r#"main "main()$0"
with_args "with_args($0)""#,
);
}
}

View file

@ -0,0 +1,73 @@
use crate::completion::{CompletionItem, Completions, CompletionKind, CompletionItemKind, CompletionContext, completion_item::Builder};
fn snippet(label: &str, snippet: &str) -> Builder {
CompletionItem::new(CompletionKind::Snippet, label)
.snippet(snippet)
.kind(CompletionItemKind::Snippet)
}
pub(super) fn complete_expr_snippet(acc: &mut Completions, ctx: &CompletionContext) {
if !(ctx.is_trivial_path && ctx.function_syntax.is_some()) {
return;
}
snippet("pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc);
snippet("ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc);
}
pub(super) fn complete_item_snippet(acc: &mut Completions, ctx: &CompletionContext) {
if !ctx.is_new_item {
return;
}
snippet(
"Test function",
"\
#[test]
fn ${1:feature}() {
$0
}",
)
.lookup_by("tfn")
.add_to(acc);
snippet("pub(crate)", "pub(crate) $0").add_to(acc);
}
#[cfg(test)]
mod tests {
use crate::completion::{CompletionKind, check_completion};
fn check_snippet_completion(code: &str, expected_completions: &str) {
check_completion(code, expected_completions, CompletionKind::Snippet);
}
#[test]
fn completes_snippets_in_expressions() {
check_snippet_completion(
r"fn foo(x: i32) { <|> }",
r##"
pd "eprintln!(\"$0 = {:?}\", $0);"
ppd "eprintln!(\"$0 = {:#?}\", $0);"
"##,
);
}
#[test]
fn completes_snippets_in_items() {
// check_snippet_completion(r"
// <|>
// ",
// r##"[CompletionItem { label: "Test function", lookup: None, snippet: Some("#[test]\nfn test_${1:feature}() {\n$0\n}"##,
// );
check_snippet_completion(
r"
#[cfg(test)]
mod tests {
<|>
}
",
r##"
tfn "Test function" "#[test]\nfn ${1:feature}() {\n $0\n}"
pub(crate) "pub(crate) $0"
"##,
);
}
}

View file

@ -0,0 +1,205 @@
use ra_text_edit::AtomTextEdit;
use ra_syntax::{
AstNode, SyntaxNode, SourceFile, TextUnit, TextRange,
ast,
algo::{find_leaf_at_offset, find_covering_node, find_node_at_offset},
SyntaxKind::*,
};
use hir::source_binder;
use crate::{db, FilePosition, Cancelable};
/// `CompletionContext` is created early during completion to figure out, where
/// exactly is the cursor, syntax-wise.
#[derive(Debug)]
pub(super) struct CompletionContext<'a> {
pub(super) db: &'a db::RootDatabase,
pub(super) offset: TextUnit,
pub(super) leaf: &'a SyntaxNode,
pub(super) module: Option<hir::Module>,
pub(super) function: Option<hir::Function>,
pub(super) function_syntax: Option<&'a ast::FnDef>,
pub(super) use_item_syntax: Option<&'a ast::UseItem>,
pub(super) is_param: bool,
/// A single-indent path, like `foo`.
pub(super) is_trivial_path: bool,
/// If not a trivial, path, the prefix (qualifier).
pub(super) path_prefix: Option<hir::Path>,
pub(super) after_if: bool,
/// `true` if we are a statement or a last expr in the block.
pub(super) can_be_stmt: bool,
/// Something is typed at the "top" level, in module or impl/trait.
pub(super) is_new_item: bool,
/// The receiver if this is a field or method access, i.e. writing something.<|>
pub(super) dot_receiver: Option<&'a ast::Expr>,
/// If this is a method call in particular, i.e. the () are already there.
pub(super) is_method_call: bool,
}
impl<'a> CompletionContext<'a> {
pub(super) fn new(
db: &'a db::RootDatabase,
original_file: &'a SourceFile,
position: FilePosition,
) -> Cancelable<Option<CompletionContext<'a>>> {
let module = source_binder::module_from_position(db, position)?;
let leaf =
ctry!(find_leaf_at_offset(original_file.syntax(), position.offset).left_biased());
let mut ctx = CompletionContext {
db,
leaf,
offset: position.offset,
module,
function: None,
function_syntax: None,
use_item_syntax: None,
is_param: false,
is_trivial_path: false,
path_prefix: None,
after_if: false,
can_be_stmt: false,
is_new_item: false,
dot_receiver: None,
is_method_call: false,
};
ctx.fill(original_file, position.offset);
Ok(Some(ctx))
}
fn fill(&mut self, original_file: &'a SourceFile, offset: TextUnit) {
// Insert a fake ident to get a valid parse tree. We will use this file
// to determine context, though the original_file will be used for
// actual completion.
let file = {
let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string());
original_file.reparse(&edit)
};
// First, let's try to complete a reference to some declaration.
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), offset) {
// Special case, `trait T { fn foo(i_am_a_name_ref) {} }`.
// See RFC#1685.
if is_node::<ast::Param>(name_ref.syntax()) {
self.is_param = true;
return;
}
self.classify_name_ref(original_file, name_ref);
}
// Otherwise, see if this is a declaration. We can use heuristics to
// suggest declaration names, see `CompletionKind::Magic`.
if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) {
if is_node::<ast::Param>(name.syntax()) {
self.is_param = true;
return;
}
}
}
fn classify_name_ref(&mut self, original_file: &'a SourceFile, name_ref: &ast::NameRef) {
let name_range = name_ref.syntax().range();
let top_node = name_ref
.syntax()
.ancestors()
.take_while(|it| it.range() == name_range)
.last()
.unwrap();
match top_node.parent().map(|it| it.kind()) {
Some(SOURCE_FILE) | Some(ITEM_LIST) => {
self.is_new_item = true;
return;
}
_ => (),
}
self.use_item_syntax = self.leaf.ancestors().find_map(ast::UseItem::cast);
self.function_syntax = self
.leaf
.ancestors()
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
.find_map(ast::FnDef::cast);
match (&self.module, self.function_syntax) {
(Some(module), Some(fn_def)) => {
let function = source_binder::function_from_module(self.db, module, fn_def);
self.function = Some(function);
}
_ => (),
}
let parent = match name_ref.syntax().parent() {
Some(it) => it,
None => return,
};
if let Some(segment) = ast::PathSegment::cast(parent) {
let path = segment.parent_path();
if let Some(mut path) = hir::Path::from_ast(path) {
if !path.is_ident() {
path.segments.pop().unwrap();
self.path_prefix = Some(path);
return;
}
}
if path.qualifier().is_none() {
self.is_trivial_path = true;
// Find either enclosing expr statement (thing with `;`) or a
// block. If block, check that we are the last expr.
self.can_be_stmt = name_ref
.syntax()
.ancestors()
.find_map(|node| {
if let Some(stmt) = ast::ExprStmt::cast(node) {
return Some(stmt.syntax().range() == name_ref.syntax().range());
}
if let Some(block) = ast::Block::cast(node) {
return Some(
block.expr().map(|e| e.syntax().range())
== Some(name_ref.syntax().range()),
);
}
None
})
.unwrap_or(false);
if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) {
if let Some(if_expr) =
find_node_at_offset::<ast::IfExpr>(original_file.syntax(), off)
{
if if_expr.syntax().range().end() < name_ref.syntax().range().start() {
self.after_if = true;
}
}
}
}
}
if let Some(field_expr) = ast::FieldExpr::cast(parent) {
// The receiver comes before the point of insertion of the fake
// ident, so it should have the same range in the non-modified file
self.dot_receiver = field_expr
.expr()
.map(|e| e.syntax().range())
.and_then(|r| find_node_with_range(original_file.syntax(), r));
}
if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent) {
// As above
self.dot_receiver = method_call_expr
.expr()
.map(|e| e.syntax().range())
.and_then(|r| find_node_with_range(original_file.syntax(), r));
self.is_method_call = true;
}
}
}
fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<&N> {
let node = find_covering_node(syntax, range);
node.ancestors().find_map(N::cast)
}
fn is_node<N: AstNode>(node: &SyntaxNode) -> bool {
match node.ancestors().filter_map(N::cast).next() {
None => false,
Some(n) => n.syntax().range() == node.range(),
}
}

View file

@ -0,0 +1,244 @@
use hir::PerNs;
use crate::completion::CompletionContext;
/// `CompletionItem` describes a single completion variant in the editor pop-up.
/// It is basically a POD with various properties. To construct a
/// `CompletionItem`, use `new` method and the `Builder` struct.
#[derive(Debug)]
pub struct CompletionItem {
/// Used only internally in tests, to check only specific kind of
/// completion.
completion_kind: CompletionKind,
label: String,
lookup: Option<String>,
snippet: Option<String>,
kind: Option<CompletionItemKind>,
}
pub enum InsertText {
PlainText { text: String },
Snippet { text: String },
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum CompletionItemKind {
Snippet,
Keyword,
Module,
Function,
Struct,
Enum,
EnumVariant,
Binding,
Field,
}
#[derive(Debug, PartialEq, Eq)]
pub(crate) enum CompletionKind {
/// Parser-based keyword completion.
Keyword,
/// Your usual "complete all valid identifiers".
Reference,
/// "Secret sauce" completions.
Magic,
Snippet,
}
impl CompletionItem {
pub(crate) fn new(completion_kind: CompletionKind, label: impl Into<String>) -> Builder {
let label = label.into();
Builder {
completion_kind,
label,
lookup: None,
snippet: None,
kind: None,
}
}
/// What user sees in pop-up in the UI.
pub fn label(&self) -> &str {
&self.label
}
/// What string is used for filtering.
pub fn lookup(&self) -> &str {
self.lookup
.as_ref()
.map(|it| it.as_str())
.unwrap_or(self.label())
}
/// What is inserted.
pub fn insert_text(&self) -> InsertText {
match &self.snippet {
None => InsertText::PlainText {
text: self.label.clone(),
},
Some(it) => InsertText::Snippet { text: it.clone() },
}
}
pub fn kind(&self) -> Option<CompletionItemKind> {
self.kind
}
}
/// A helper to make `CompletionItem`s.
#[must_use]
pub(crate) struct Builder {
completion_kind: CompletionKind,
label: String,
lookup: Option<String>,
snippet: Option<String>,
kind: Option<CompletionItemKind>,
}
impl Builder {
pub(crate) fn add_to(self, acc: &mut Completions) {
acc.add(self.build())
}
pub(crate) fn build(self) -> CompletionItem {
CompletionItem {
label: self.label,
lookup: self.lookup,
snippet: self.snippet,
kind: self.kind,
completion_kind: self.completion_kind,
}
}
pub(crate) fn lookup_by(mut self, lookup: impl Into<String>) -> Builder {
self.lookup = Some(lookup.into());
self
}
pub(crate) fn snippet(mut self, snippet: impl Into<String>) -> Builder {
self.snippet = Some(snippet.into());
self
}
pub(crate) fn kind(mut self, kind: CompletionItemKind) -> Builder {
self.kind = Some(kind);
self
}
pub(super) fn from_resolution(
mut self,
ctx: &CompletionContext,
resolution: &hir::Resolution,
) -> Builder {
let resolved = resolution.def_id.and_then(|d| d.resolve(ctx.db).ok());
let kind = match resolved {
PerNs {
types: Some(hir::Def::Module(..)),
..
} => CompletionItemKind::Module,
PerNs {
types: Some(hir::Def::Struct(..)),
..
} => CompletionItemKind::Struct,
PerNs {
types: Some(hir::Def::Enum(..)),
..
} => CompletionItemKind::Enum,
PerNs {
values: Some(hir::Def::Function(function)),
..
} => return self.from_function(ctx, function),
_ => return self,
};
self.kind = Some(kind);
self
}
fn from_function(mut self, ctx: &CompletionContext, function: hir::Function) -> Builder {
// If not an import, add parenthesis automatically.
if ctx.use_item_syntax.is_none() {
if function.signature(ctx.db).args().is_empty() {
self.snippet = Some(format!("{}()$0", self.label));
} else {
self.snippet = Some(format!("{}($0)", self.label));
}
}
self.kind = Some(CompletionItemKind::Function);
self
}
}
impl Into<CompletionItem> for Builder {
fn into(self) -> CompletionItem {
self.build()
}
}
/// Represents an in-progress set of completions being built.
#[derive(Debug, Default)]
pub(crate) struct Completions {
buf: Vec<CompletionItem>,
}
impl Completions {
pub(crate) fn add(&mut self, item: impl Into<CompletionItem>) {
self.buf.push(item.into())
}
pub(crate) fn add_all<I>(&mut self, items: I)
where
I: IntoIterator,
I::Item: Into<CompletionItem>,
{
items.into_iter().for_each(|item| self.add(item.into()))
}
#[cfg(test)]
pub(crate) fn assert_match(&self, expected: &str, kind: CompletionKind) {
let expected = normalize(expected);
let actual = self.debug_render(kind);
test_utils::assert_eq_text!(expected.as_str(), actual.as_str(),);
/// Normalize the textual representation of `Completions`:
/// replace `;` with newlines, normalize whitespace
fn normalize(expected: &str) -> String {
use ra_syntax::{tokenize, TextUnit, TextRange, SyntaxKind::SEMI};
let mut res = String::new();
for line in expected.trim().lines() {
let line = line.trim();
let mut start_offset: TextUnit = 0.into();
// Yep, we use rust tokenize in completion tests :-)
for token in tokenize(line) {
let range = TextRange::offset_len(start_offset, token.len);
start_offset += token.len;
if token.kind == SEMI {
res.push('\n');
} else {
res.push_str(&line[range]);
}
}
res.push('\n');
}
res
}
}
#[cfg(test)]
fn debug_render(&self, kind: CompletionKind) -> String {
let mut res = String::new();
for c in self.buf.iter() {
if c.completion_kind == kind {
if let Some(lookup) = &c.lookup {
res.push_str(lookup);
res.push_str(&format!(" {:?}", c.label));
} else {
res.push_str(&c.label);
}
if let Some(snippet) = &c.snippet {
res.push_str(&format!(" {:?}", snippet));
}
res.push('\n');
}
}
res
}
}
impl Into<Vec<CompletionItem>> for Completions {
fn into(self) -> Vec<CompletionItem> {
self.buf
}
}

128
crates/ra_ide_api/src/db.rs Normal file
View file

@ -0,0 +1,128 @@
use std::{fmt, sync::Arc};
use salsa::{self, Database};
use ra_db::{LocationIntener, BaseDatabase, FileId};
use crate::{symbol_index, LineIndex};
#[derive(Debug)]
pub(crate) struct RootDatabase {
runtime: salsa::Runtime<RootDatabase>,
id_maps: Arc<IdMaps>,
}
#[derive(Default)]
struct IdMaps {
defs: LocationIntener<hir::DefLoc, hir::DefId>,
macros: LocationIntener<hir::MacroCallLoc, hir::MacroCallId>,
}
impl fmt::Debug for IdMaps {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("IdMaps")
.field("n_defs", &self.defs.len())
.finish()
}
}
impl salsa::Database for RootDatabase {
fn salsa_runtime(&self) -> &salsa::Runtime<RootDatabase> {
&self.runtime
}
}
impl Default for RootDatabase {
fn default() -> RootDatabase {
let mut db = RootDatabase {
runtime: salsa::Runtime::default(),
id_maps: Default::default(),
};
db.query_mut(ra_db::CrateGraphQuery)
.set((), Default::default());
db.query_mut(ra_db::LocalRootsQuery)
.set((), Default::default());
db.query_mut(ra_db::LibraryRootsQuery)
.set((), Default::default());
db
}
}
impl salsa::ParallelDatabase for RootDatabase {
fn snapshot(&self) -> salsa::Snapshot<RootDatabase> {
salsa::Snapshot::new(RootDatabase {
runtime: self.runtime.snapshot(self),
id_maps: self.id_maps.clone(),
})
}
}
impl BaseDatabase for RootDatabase {}
impl AsRef<LocationIntener<hir::DefLoc, hir::DefId>> for RootDatabase {
fn as_ref(&self) -> &LocationIntener<hir::DefLoc, hir::DefId> {
&self.id_maps.defs
}
}
impl AsRef<LocationIntener<hir::MacroCallLoc, hir::MacroCallId>> for RootDatabase {
fn as_ref(&self) -> &LocationIntener<hir::MacroCallLoc, hir::MacroCallId> {
&self.id_maps.macros
}
}
salsa::query_group! {
pub(crate) trait LineIndexDatabase: ra_db::FilesDatabase + BaseDatabase {
fn line_index(file_id: FileId) -> Arc<LineIndex> {
type LineIndexQuery;
}
}
}
fn line_index(db: &impl ra_db::FilesDatabase, file_id: FileId) -> Arc<LineIndex> {
let text = db.file_text(file_id);
Arc::new(LineIndex::new(&*text))
}
salsa::database_storage! {
pub(crate) struct RootDatabaseStorage for RootDatabase {
impl ra_db::FilesDatabase {
fn file_text() for ra_db::FileTextQuery;
fn file_relative_path() for ra_db::FileRelativePathQuery;
fn file_source_root() for ra_db::FileSourceRootQuery;
fn source_root() for ra_db::SourceRootQuery;
fn local_roots() for ra_db::LocalRootsQuery;
fn library_roots() for ra_db::LibraryRootsQuery;
fn crate_graph() for ra_db::CrateGraphQuery;
}
impl ra_db::SyntaxDatabase {
fn source_file() for ra_db::SourceFileQuery;
}
impl LineIndexDatabase {
fn line_index() for LineIndexQuery;
}
impl symbol_index::SymbolsDatabase {
fn file_symbols() for symbol_index::FileSymbolsQuery;
fn library_symbols() for symbol_index::LibrarySymbolsQuery;
}
impl hir::db::HirDatabase {
fn hir_source_file() for hir::db::HirSourceFileQuery;
fn expand_macro_invocation() for hir::db::ExpandMacroCallQuery;
fn module_tree() for hir::db::ModuleTreeQuery;
fn fn_scopes() for hir::db::FnScopesQuery;
fn file_items() for hir::db::SourceFileItemsQuery;
fn file_item() for hir::db::FileItemQuery;
fn input_module_items() for hir::db::InputModuleItemsQuery;
fn item_map() for hir::db::ItemMapQuery;
fn submodules() for hir::db::SubmodulesQuery;
fn infer() for hir::db::InferQuery;
fn type_for_def() for hir::db::TypeForDefQuery;
fn type_for_field() for hir::db::TypeForFieldQuery;
fn struct_data() for hir::db::StructDataQuery;
fn enum_data() for hir::db::EnumDataQuery;
fn impls_in_module() for hir::db::ImplsInModuleQuery;
fn body_hir() for hir::db::BodyHirQuery;
fn body_syntax_mapping() for hir::db::BodySyntaxMappingQuery;
fn fn_signature() for hir::db::FnSignatureQuery;
}
}
}

View file

@ -0,0 +1,56 @@
use ra_db::SyntaxDatabase;
use ra_syntax::{
SyntaxNode, AstNode, SourceFile,
ast, algo::find_covering_node,
};
use crate::{
TextRange, FileRange,
db::RootDatabase,
};
pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
let source_file = db.source_file(frange.file_id);
if let Some(range) = extend_selection_in_macro(db, &source_file, frange) {
return range;
}
ra_ide_api_light::extend_selection(source_file.syntax(), frange.range).unwrap_or(frange.range)
}
fn extend_selection_in_macro(
_db: &RootDatabase,
source_file: &SourceFile,
frange: FileRange,
) -> Option<TextRange> {
let macro_call = find_macro_call(source_file.syntax(), frange.range)?;
let (off, exp) = hir::MacroDef::ast_expand(macro_call)?;
let dst_range = exp.map_range_forward(frange.range - off)?;
let dst_range = ra_ide_api_light::extend_selection(&exp.syntax(), dst_range)?;
let src_range = exp.map_range_back(dst_range)? + off;
Some(src_range)
}
fn find_macro_call(node: &SyntaxNode, range: TextRange) -> Option<&ast::MacroCall> {
find_covering_node(node, range)
.ancestors()
.find_map(ast::MacroCall::cast)
}
#[cfg(test)]
mod tests {
use crate::mock_analysis::single_file_with_range;
use test_utils::assert_eq_dbg;
#[test]
fn extend_selection_inside_macros() {
let (analysis, frange) = single_file_with_range(
"
fn main() {
ctry!(foo(|x| <|>x<|>));
}
",
);
let r = analysis.extend_selection(frange);
assert_eq_dbg("[51; 56)", &r);
}
}

View file

@ -0,0 +1,139 @@
use ra_db::{FileId, Cancelable, SyntaxDatabase};
use ra_syntax::{
TextRange, AstNode, ast, SyntaxKind::{NAME, MODULE},
algo::find_node_at_offset,
};
use crate::{FilePosition, NavigationTarget, db::RootDatabase};
pub(crate) fn goto_defenition(
db: &RootDatabase,
position: FilePosition,
) -> Cancelable<Option<Vec<NavigationTarget>>> {
let file = db.source_file(position.file_id);
let syntax = file.syntax();
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) {
return Ok(Some(reference_defenition(db, position.file_id, name_ref)?));
}
if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) {
return name_defenition(db, position.file_id, name);
}
Ok(None)
}
pub(crate) fn reference_defenition(
db: &RootDatabase,
file_id: FileId,
name_ref: &ast::NameRef,
) -> Cancelable<Vec<NavigationTarget>> {
if let Some(fn_descr) =
hir::source_binder::function_from_child_node(db, file_id, name_ref.syntax())?
{
let scope = fn_descr.scopes(db)?;
// First try to resolve the symbol locally
if let Some(entry) = scope.resolve_local_name(name_ref) {
let nav = NavigationTarget {
file_id,
name: entry.name().to_string().into(),
range: entry.ptr().range(),
kind: NAME,
ptr: None,
};
return Ok(vec![nav]);
};
}
// If that fails try the index based approach.
let navs = db
.index_resolve(name_ref)?
.into_iter()
.map(NavigationTarget::from_symbol)
.collect();
Ok(navs)
}
fn name_defenition(
db: &RootDatabase,
file_id: FileId,
name: &ast::Name,
) -> Cancelable<Option<Vec<NavigationTarget>>> {
if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) {
if module.has_semi() {
if let Some(child_module) =
hir::source_binder::module_from_declaration(db, file_id, module)?
{
let (file_id, _) = child_module.defenition_source(db)?;
let name = match child_module.name(db)? {
Some(name) => name.to_string().into(),
None => "".into(),
};
let nav = NavigationTarget {
file_id,
name,
range: TextRange::offset_len(0.into(), 0.into()),
kind: MODULE,
ptr: None,
};
return Ok(Some(vec![nav]));
}
}
}
Ok(None)
}
#[cfg(test)]
mod tests {
use test_utils::assert_eq_dbg;
use crate::mock_analysis::analysis_and_position;
#[test]
fn goto_defenition_works_in_items() {
let (analysis, pos) = analysis_and_position(
"
//- /lib.rs
struct Foo;
enum E { X(Foo<|>) }
",
);
let symbols = analysis.goto_defenition(pos).unwrap().unwrap();
assert_eq_dbg(
r#"[NavigationTarget { file_id: FileId(1), name: "Foo",
kind: STRUCT_DEF, range: [0; 11),
ptr: Some(LocalSyntaxPtr { range: [0; 11), kind: STRUCT_DEF }) }]"#,
&symbols,
);
}
#[test]
fn goto_defenition_works_for_module_declaration() {
let (analysis, pos) = analysis_and_position(
"
//- /lib.rs
mod <|>foo;
//- /foo.rs
// empty
",
);
let symbols = analysis.goto_defenition(pos).unwrap().unwrap();
assert_eq_dbg(
r#"[NavigationTarget { file_id: FileId(2), name: "foo", kind: MODULE, range: [0; 0), ptr: None }]"#,
&symbols,
);
let (analysis, pos) = analysis_and_position(
"
//- /lib.rs
mod <|>foo;
//- /foo/mod.rs
// empty
",
);
let symbols = analysis.goto_defenition(pos).unwrap().unwrap();
assert_eq_dbg(
r#"[NavigationTarget { file_id: FileId(2), name: "foo", kind: MODULE, range: [0; 0), ptr: None }]"#,
&symbols,
);
}
}

View file

@ -0,0 +1,257 @@
use ra_db::{Cancelable, SyntaxDatabase};
use ra_syntax::{
AstNode, SyntaxNode, TreePtr,
ast::{self, NameOwner},
algo::{find_covering_node, find_node_at_offset, find_leaf_at_offset, visit::{visitor, Visitor}},
};
use crate::{db::RootDatabase, RangeInfo, FilePosition, FileRange, NavigationTarget};
pub(crate) fn hover(
db: &RootDatabase,
position: FilePosition,
) -> Cancelable<Option<RangeInfo<String>>> {
let file = db.source_file(position.file_id);
let mut res = Vec::new();
let mut range = None;
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset) {
let navs = crate::goto_defenition::reference_defenition(db, position.file_id, name_ref)?;
for nav in navs {
res.extend(doc_text_for(db, nav)?)
}
if !res.is_empty() {
range = Some(name_ref.syntax().range())
}
}
if range.is_none() {
let node = find_leaf_at_offset(file.syntax(), position.offset).find_map(|leaf| {
leaf.ancestors()
.find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some())
});
let node = ctry!(node);
let frange = FileRange {
file_id: position.file_id,
range: node.range(),
};
res.extend(type_of(db, frange)?);
range = Some(node.range());
};
let range = ctry!(range);
if res.is_empty() {
return Ok(None);
}
let res = RangeInfo::new(range, res.join("\n\n---\n"));
Ok(Some(res))
}
pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Cancelable<Option<String>> {
let file = db.source_file(frange.file_id);
let syntax = file.syntax();
let leaf_node = find_covering_node(syntax, frange.range);
// if we picked identifier, expand to pattern/expression
let node = leaf_node
.ancestors()
.take_while(|it| it.range() == leaf_node.range())
.find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some())
.unwrap_or(leaf_node);
let parent_fn = ctry!(node.ancestors().find_map(ast::FnDef::cast));
let function = ctry!(hir::source_binder::function_from_source(
db,
frange.file_id,
parent_fn
)?);
let infer = function.infer(db)?;
let syntax_mapping = function.body_syntax_mapping(db)?;
if let Some(expr) = ast::Expr::cast(node).and_then(|e| syntax_mapping.node_expr(e)) {
Ok(Some(infer[expr].to_string()))
} else if let Some(pat) = ast::Pat::cast(node).and_then(|p| syntax_mapping.node_pat(p)) {
Ok(Some(infer[pat].to_string()))
} else {
Ok(None)
}
}
// FIXME: this should not really use navigation target. Rather, approximatelly
// resovled symbol should return a `DefId`.
fn doc_text_for(db: &RootDatabase, nav: NavigationTarget) -> Cancelable<Option<String>> {
let result = match (nav.description(db), nav.docs(db)) {
(Some(desc), Some(docs)) => Some("```rust\n".to_string() + &*desc + "\n```\n\n" + &*docs),
(Some(desc), None) => Some("```rust\n".to_string() + &*desc + "\n```"),
(None, Some(docs)) => Some(docs),
_ => None,
};
Ok(result)
}
impl NavigationTarget {
fn node(&self, db: &RootDatabase) -> Option<TreePtr<SyntaxNode>> {
let source_file = db.source_file(self.file_id);
let source_file = source_file.syntax();
let node = source_file
.descendants()
.find(|node| node.kind() == self.kind && node.range() == self.range)?
.to_owned();
Some(node)
}
fn docs(&self, db: &RootDatabase) -> Option<String> {
let node = self.node(db)?;
fn doc_comments<N: ast::DocCommentsOwner>(node: &N) -> Option<String> {
let comments = node.doc_comment_text();
if comments.is_empty() {
None
} else {
Some(comments)
}
}
visitor()
.visit(doc_comments::<ast::FnDef>)
.visit(doc_comments::<ast::StructDef>)
.visit(doc_comments::<ast::EnumDef>)
.visit(doc_comments::<ast::TraitDef>)
.visit(doc_comments::<ast::Module>)
.visit(doc_comments::<ast::TypeDef>)
.visit(doc_comments::<ast::ConstDef>)
.visit(doc_comments::<ast::StaticDef>)
.accept(&node)?
}
/// Get a description of this node.
///
/// e.g. `struct Name`, `enum Name`, `fn Name`
fn description(&self, db: &RootDatabase) -> Option<String> {
// TODO: After type inference is done, add type information to improve the output
let node = self.node(db)?;
// TODO: Refactor to be have less repetition
visitor()
.visit(|node: &ast::FnDef| {
let mut string = "fn ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.visit(|node: &ast::StructDef| {
let mut string = "struct ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.visit(|node: &ast::EnumDef| {
let mut string = "enum ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.visit(|node: &ast::TraitDef| {
let mut string = "trait ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.visit(|node: &ast::Module| {
let mut string = "mod ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.visit(|node: &ast::TypeDef| {
let mut string = "type ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.visit(|node: &ast::ConstDef| {
let mut string = "const ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.visit(|node: &ast::StaticDef| {
let mut string = "static ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.accept(&node)?
}
}
#[cfg(test)]
mod tests {
use ra_syntax::TextRange;
use crate::mock_analysis::{single_file_with_position, single_file_with_range};
#[test]
fn hover_shows_type_of_an_expression() {
let (analysis, position) = single_file_with_position(
"
pub fn foo() -> u32 { 1 }
fn main() {
let foo_test = foo()<|>;
}
",
);
let hover = analysis.hover(position).unwrap().unwrap();
assert_eq!(hover.range, TextRange::from_to(95.into(), 100.into()));
assert_eq!(hover.info, "u32");
}
#[test]
fn hover_for_local_variable() {
let (analysis, position) = single_file_with_position("fn func(foo: i32) { fo<|>o; }");
let hover = analysis.hover(position).unwrap().unwrap();
assert_eq!(hover.info, "i32");
}
#[test]
fn hover_for_local_variable_pat() {
let (analysis, position) = single_file_with_position("fn func(fo<|>o: i32) {}");
let hover = analysis.hover(position).unwrap().unwrap();
assert_eq!(hover.info, "i32");
}
#[test]
fn test_type_of_for_function() {
let (analysis, range) = single_file_with_range(
"
pub fn foo() -> u32 { 1 };
fn main() {
let foo_test = <|>foo()<|>;
}
",
);
let type_name = analysis.type_of(range).unwrap().unwrap();
assert_eq!("u32", &type_name);
}
// FIXME: improve type_of to make this work
#[test]
fn test_type_of_for_expr_1() {
let (analysis, range) = single_file_with_range(
"
fn main() {
let foo = <|>1 + foo_test<|>;
}
",
);
let type_name = analysis.type_of(range).unwrap().unwrap();
assert_eq!("[unknown]", &type_name);
}
// FIXME: improve type_of to make this work
#[test]
fn test_type_of_for_expr_2() {
let (analysis, range) = single_file_with_range(
"
fn main() {
let foo: usize = 1;
let bar = <|>1 + foo_test<|>;
}
",
);
let type_name = analysis.type_of(range).unwrap().unwrap();
assert_eq!("[unknown]", &type_name);
}
}

View file

@ -0,0 +1,309 @@
use std::sync::Arc;
use salsa::Database;
use hir::{
self, Problem, source_binder,
};
use ra_db::{FilesDatabase, SourceRoot, SourceRootId, SyntaxDatabase};
use ra_ide_api_light::{self, assists, LocalEdit, Severity};
use ra_syntax::{
TextRange, AstNode, SourceFile,
ast::{self, NameOwner},
algo::find_node_at_offset,
SyntaxKind::*,
};
use crate::{
AnalysisChange,
Cancelable, NavigationTarget,
CrateId, db, Diagnostic, FileId, FilePosition, FileRange, FileSystemEdit,
Query, RootChange, SourceChange, SourceFileEdit,
symbol_index::{LibrarySymbolsQuery, FileSymbol},
};
impl db::RootDatabase {
pub(crate) fn apply_change(&mut self, change: AnalysisChange) {
log::info!("apply_change {:?}", change);
// self.gc_syntax_trees();
if !change.new_roots.is_empty() {
let mut local_roots = Vec::clone(&self.local_roots());
for (root_id, is_local) in change.new_roots {
self.query_mut(ra_db::SourceRootQuery)
.set(root_id, Default::default());
if is_local {
local_roots.push(root_id);
}
}
self.query_mut(ra_db::LocalRootsQuery)
.set((), Arc::new(local_roots));
}
for (root_id, root_change) in change.roots_changed {
self.apply_root_change(root_id, root_change);
}
for (file_id, text) in change.files_changed {
self.query_mut(ra_db::FileTextQuery).set(file_id, text)
}
if !change.libraries_added.is_empty() {
let mut libraries = Vec::clone(&self.library_roots());
for library in change.libraries_added {
libraries.push(library.root_id);
self.query_mut(ra_db::SourceRootQuery)
.set(library.root_id, Default::default());
self.query_mut(LibrarySymbolsQuery)
.set_constant(library.root_id, Arc::new(library.symbol_index));
self.apply_root_change(library.root_id, library.root_change);
}
self.query_mut(ra_db::LibraryRootsQuery)
.set((), Arc::new(libraries));
}
if let Some(crate_graph) = change.crate_graph {
self.query_mut(ra_db::CrateGraphQuery)
.set((), Arc::new(crate_graph))
}
}
fn apply_root_change(&mut self, root_id: SourceRootId, root_change: RootChange) {
let mut source_root = SourceRoot::clone(&self.source_root(root_id));
for add_file in root_change.added {
self.query_mut(ra_db::FileTextQuery)
.set(add_file.file_id, add_file.text);
self.query_mut(ra_db::FileRelativePathQuery)
.set(add_file.file_id, add_file.path.clone());
self.query_mut(ra_db::FileSourceRootQuery)
.set(add_file.file_id, root_id);
source_root.files.insert(add_file.path, add_file.file_id);
}
for remove_file in root_change.removed {
self.query_mut(ra_db::FileTextQuery)
.set(remove_file.file_id, Default::default());
source_root.files.remove(&remove_file.path);
}
self.query_mut(ra_db::SourceRootQuery)
.set(root_id, Arc::new(source_root));
}
#[allow(unused)]
/// Ideally, we should call this function from time to time to collect heavy
/// syntax trees. However, if we actually do that, everything is recomputed
/// for some reason. Needs investigation.
fn gc_syntax_trees(&mut self) {
self.query(ra_db::SourceFileQuery)
.sweep(salsa::SweepStrategy::default().discard_values());
self.query(hir::db::SourceFileItemsQuery)
.sweep(salsa::SweepStrategy::default().discard_values());
self.query(hir::db::FileItemQuery)
.sweep(salsa::SweepStrategy::default().discard_values());
}
}
impl db::RootDatabase {
/// This returns `Vec` because a module may be included from several places. We
/// don't handle this case yet though, so the Vec has length at most one.
pub(crate) fn parent_module(
&self,
position: FilePosition,
) -> Cancelable<Vec<NavigationTarget>> {
let module = match source_binder::module_from_position(self, position)? {
None => return Ok(Vec::new()),
Some(it) => it,
};
let (file_id, ast_module) = match module.declaration_source(self)? {
None => return Ok(Vec::new()),
Some(it) => it,
};
let name = ast_module.name().unwrap();
Ok(vec![NavigationTarget {
file_id,
name: name.text().clone(),
range: name.syntax().range(),
kind: MODULE,
ptr: None,
}])
}
/// Returns `Vec` for the same reason as `parent_module`
pub(crate) fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> {
let module = match source_binder::module_from_file_id(self, file_id)? {
Some(it) => it,
None => return Ok(Vec::new()),
};
let krate = match module.krate(self)? {
Some(it) => it,
None => return Ok(Vec::new()),
};
Ok(vec![krate.crate_id()])
}
pub(crate) fn find_all_refs(
&self,
position: FilePosition,
) -> Cancelable<Vec<(FileId, TextRange)>> {
let file = self.source_file(position.file_id);
// Find the binding associated with the offset
let (binding, descr) = match find_binding(self, &file, position)? {
None => return Ok(Vec::new()),
Some(it) => it,
};
let mut ret = binding
.name()
.into_iter()
.map(|name| (position.file_id, name.syntax().range()))
.collect::<Vec<_>>();
ret.extend(
descr
.scopes(self)?
.find_all_refs(binding)
.into_iter()
.map(|ref_desc| (position.file_id, ref_desc.range)),
);
return Ok(ret);
fn find_binding<'a>(
db: &db::RootDatabase,
source_file: &'a SourceFile,
position: FilePosition,
) -> Cancelable<Option<(&'a ast::BindPat, hir::Function)>> {
let syntax = source_file.syntax();
if let Some(binding) = find_node_at_offset::<ast::BindPat>(syntax, position.offset) {
let descr = ctry!(source_binder::function_from_child_node(
db,
position.file_id,
binding.syntax(),
)?);
return Ok(Some((binding, descr)));
};
let name_ref = ctry!(find_node_at_offset::<ast::NameRef>(syntax, position.offset));
let descr = ctry!(source_binder::function_from_child_node(
db,
position.file_id,
name_ref.syntax(),
)?);
let scope = descr.scopes(db)?;
let resolved = ctry!(scope.resolve_local_name(name_ref));
let resolved = resolved.ptr().resolve(source_file);
let binding = ctry!(find_node_at_offset::<ast::BindPat>(
syntax,
resolved.range().end()
));
Ok(Some((binding, descr)))
}
}
pub(crate) fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> {
let syntax = self.source_file(file_id);
let mut res = ra_ide_api_light::diagnostics(&syntax)
.into_iter()
.map(|d| Diagnostic {
range: d.range,
message: d.msg,
severity: d.severity,
fix: d.fix.map(|fix| SourceChange::from_local_edit(file_id, fix)),
})
.collect::<Vec<_>>();
if let Some(m) = source_binder::module_from_file_id(self, file_id)? {
for (name_node, problem) in m.problems(self)? {
let source_root = self.file_source_root(file_id);
let diag = match problem {
Problem::UnresolvedModule { candidate } => {
let create_file = FileSystemEdit::CreateFile {
source_root,
path: candidate.clone(),
};
let fix = SourceChange {
label: "create module".to_string(),
source_file_edits: Vec::new(),
file_system_edits: vec![create_file],
cursor_position: None,
};
Diagnostic {
range: name_node.range(),
message: "unresolved module".to_string(),
severity: Severity::Error,
fix: Some(fix),
}
}
Problem::NotDirOwner { move_to, candidate } => {
let move_file = FileSystemEdit::MoveFile {
src: file_id,
dst_source_root: source_root,
dst_path: move_to.clone(),
};
let create_file = FileSystemEdit::CreateFile {
source_root,
path: move_to.join(candidate),
};
let fix = SourceChange {
label: "move file and create module".to_string(),
source_file_edits: Vec::new(),
file_system_edits: vec![move_file, create_file],
cursor_position: None,
};
Diagnostic {
range: name_node.range(),
message: "can't declare module at this location".to_string(),
severity: Severity::Error,
fix: Some(fix),
}
}
};
res.push(diag)
}
};
Ok(res)
}
pub(crate) fn assists(&self, frange: FileRange) -> Vec<SourceChange> {
let file = self.source_file(frange.file_id);
assists::assists(&file, frange.range)
.into_iter()
.map(|local_edit| SourceChange::from_local_edit(frange.file_id, local_edit))
.collect()
}
pub(crate) fn rename(
&self,
position: FilePosition,
new_name: &str,
) -> Cancelable<Vec<SourceFileEdit>> {
let res = self
.find_all_refs(position)?
.iter()
.map(|(file_id, text_range)| SourceFileEdit {
file_id: *file_id,
edit: {
let mut builder = ra_text_edit::TextEditBuilder::default();
builder.replace(*text_range, new_name.into());
builder.finish()
},
})
.collect::<Vec<_>>();
Ok(res)
}
pub(crate) fn index_resolve(&self, name_ref: &ast::NameRef) -> Cancelable<Vec<FileSymbol>> {
let name = name_ref.text();
let mut query = Query::new(name.to_string());
query.exact();
query.limit(4);
crate::symbol_index::world_symbols(self, query)
}
}
impl SourceChange {
pub(crate) fn from_local_edit(file_id: FileId, edit: LocalEdit) -> SourceChange {
let file_edit = SourceFileEdit {
file_id,
edit: edit.edit,
};
SourceChange {
label: edit.label,
source_file_edits: vec![file_edit],
file_system_edits: vec![],
cursor_position: edit
.cursor_position
.map(|offset| FilePosition { offset, file_id }),
}
}
}

View file

@ -0,0 +1,509 @@
//! ra_analyzer crate provides "ide-centric" APIs for the rust-analyzer. What
//! powers this API are the `RootDatabase` struct, which defines a `salsa`
//! database, and the `ra_hir` crate, where majority of the analysis happens.
//! However, IDE specific bits of the analysis (most notably completion) happen
//! in this crate.
macro_rules! ctry {
($expr:expr) => {
match $expr {
None => return Ok(None),
Some(it) => it,
}
};
}
mod completion;
mod db;
mod goto_defenition;
mod imp;
pub mod mock_analysis;
mod runnables;
mod symbol_index;
mod extend_selection;
mod hover;
mod call_info;
mod syntax_highlighting;
use std::{fmt, sync::Arc};
use ra_syntax::{SmolStr, SourceFile, TreePtr, SyntaxKind, TextRange, TextUnit};
use ra_text_edit::TextEdit;
use ra_db::{SyntaxDatabase, FilesDatabase, LocalSyntaxPtr};
use rayon::prelude::*;
use relative_path::RelativePathBuf;
use rustc_hash::FxHashMap;
use salsa::ParallelDatabase;
use crate::{
symbol_index::{FileSymbol, SymbolIndex},
db::LineIndexDatabase,
};
pub use crate::{
completion::{CompletionItem, CompletionItemKind, InsertText},
runnables::{Runnable, RunnableKind},
};
pub use ra_ide_api_light::{
Fold, FoldKind, HighlightedRange, Severity, StructureNode,
LineIndex, LineCol, translate_offset_with_edit,
};
pub use ra_db::{
Cancelable, Canceled, CrateGraph, CrateId, FileId, FilePosition, FileRange, SourceRootId
};
#[derive(Default)]
pub struct AnalysisChange {
new_roots: Vec<(SourceRootId, bool)>,
roots_changed: FxHashMap<SourceRootId, RootChange>,
files_changed: Vec<(FileId, Arc<String>)>,
libraries_added: Vec<LibraryData>,
crate_graph: Option<CrateGraph>,
}
#[derive(Default)]
struct RootChange {
added: Vec<AddFile>,
removed: Vec<RemoveFile>,
}
#[derive(Debug)]
struct AddFile {
file_id: FileId,
path: RelativePathBuf,
text: Arc<String>,
}
#[derive(Debug)]
struct RemoveFile {
file_id: FileId,
path: RelativePathBuf,
}
impl fmt::Debug for AnalysisChange {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let mut d = fmt.debug_struct("AnalysisChange");
if !self.new_roots.is_empty() {
d.field("new_roots", &self.new_roots);
}
if !self.roots_changed.is_empty() {
d.field("roots_changed", &self.roots_changed);
}
if !self.files_changed.is_empty() {
d.field("files_changed", &self.files_changed.len());
}
if !self.libraries_added.is_empty() {
d.field("libraries_added", &self.libraries_added.len());
}
if !self.crate_graph.is_some() {
d.field("crate_graph", &self.crate_graph);
}
d.finish()
}
}
impl fmt::Debug for RootChange {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("AnalysisChange")
.field("added", &self.added.len())
.field("removed", &self.removed.len())
.finish()
}
}
impl AnalysisChange {
pub fn new() -> AnalysisChange {
AnalysisChange::default()
}
pub fn add_root(&mut self, root_id: SourceRootId, is_local: bool) {
self.new_roots.push((root_id, is_local));
}
pub fn add_file(
&mut self,
root_id: SourceRootId,
file_id: FileId,
path: RelativePathBuf,
text: Arc<String>,
) {
let file = AddFile {
file_id,
path,
text,
};
self.roots_changed
.entry(root_id)
.or_default()
.added
.push(file);
}
pub fn change_file(&mut self, file_id: FileId, new_text: Arc<String>) {
self.files_changed.push((file_id, new_text))
}
pub fn remove_file(&mut self, root_id: SourceRootId, file_id: FileId, path: RelativePathBuf) {
let file = RemoveFile { file_id, path };
self.roots_changed
.entry(root_id)
.or_default()
.removed
.push(file);
}
pub fn add_library(&mut self, data: LibraryData) {
self.libraries_added.push(data)
}
pub fn set_crate_graph(&mut self, graph: CrateGraph) {
self.crate_graph = Some(graph);
}
}
#[derive(Debug)]
pub struct SourceChange {
pub label: String,
pub source_file_edits: Vec<SourceFileEdit>,
pub file_system_edits: Vec<FileSystemEdit>,
pub cursor_position: Option<FilePosition>,
}
#[derive(Debug)]
pub struct SourceFileEdit {
pub file_id: FileId,
pub edit: TextEdit,
}
#[derive(Debug)]
pub enum FileSystemEdit {
CreateFile {
source_root: SourceRootId,
path: RelativePathBuf,
},
MoveFile {
src: FileId,
dst_source_root: SourceRootId,
dst_path: RelativePathBuf,
},
}
#[derive(Debug)]
pub struct Diagnostic {
pub message: String,
pub range: TextRange,
pub fix: Option<SourceChange>,
pub severity: Severity,
}
#[derive(Debug)]
pub struct Query {
query: String,
lowercased: String,
only_types: bool,
libs: bool,
exact: bool,
limit: usize,
}
impl Query {
pub fn new(query: String) -> Query {
let lowercased = query.to_lowercase();
Query {
query,
lowercased,
only_types: false,
libs: false,
exact: false,
limit: usize::max_value(),
}
}
pub fn only_types(&mut self) {
self.only_types = true;
}
pub fn libs(&mut self) {
self.libs = true;
}
pub fn exact(&mut self) {
self.exact = true;
}
pub fn limit(&mut self, limit: usize) {
self.limit = limit
}
}
/// `NavigationTarget` represents and element in the editor's UI whihc you can
/// click on to navigate to a particular piece of code.
///
/// Typically, a `NavigationTarget` corresponds to some element in the source
/// code, like a function or a struct, but this is not strictly required.
#[derive(Debug, Clone)]
pub struct NavigationTarget {
file_id: FileId,
name: SmolStr,
kind: SyntaxKind,
range: TextRange,
// Should be DefId ideally
ptr: Option<LocalSyntaxPtr>,
}
impl NavigationTarget {
fn from_symbol(symbol: FileSymbol) -> NavigationTarget {
NavigationTarget {
file_id: symbol.file_id,
name: symbol.name.clone(),
kind: symbol.ptr.kind(),
range: symbol.ptr.range(),
ptr: Some(symbol.ptr.clone()),
}
}
pub fn name(&self) -> &SmolStr {
&self.name
}
pub fn kind(&self) -> SyntaxKind {
self.kind
}
pub fn file_id(&self) -> FileId {
self.file_id
}
pub fn range(&self) -> TextRange {
self.range
}
}
#[derive(Debug)]
pub struct RangeInfo<T> {
pub range: TextRange,
pub info: T,
}
impl<T> RangeInfo<T> {
fn new(range: TextRange, info: T) -> RangeInfo<T> {
RangeInfo { range, info }
}
}
#[derive(Debug)]
pub struct CallInfo {
pub label: String,
pub doc: Option<String>,
pub parameters: Vec<String>,
pub active_parameter: Option<usize>,
}
/// `AnalysisHost` stores the current state of the world.
#[derive(Debug, Default)]
pub struct AnalysisHost {
db: db::RootDatabase,
}
impl AnalysisHost {
/// Returns a snapshot of the current state, which you can query for
/// semantic information.
pub fn analysis(&self) -> Analysis {
Analysis {
db: self.db.snapshot(),
}
}
/// Applies changes to the current state of the world. If there are
/// outstanding snapshots, they will be canceled.
pub fn apply_change(&mut self, change: AnalysisChange) {
self.db.apply_change(change)
}
}
/// Analysis is a snapshot of a world state at a moment in time. It is the main
/// entry point for asking semantic information about the world. When the world
/// state is advanced using `AnalysisHost::apply_change` method, all existing
/// `Analysis` are canceled (most method return `Err(Canceled)`).
#[derive(Debug)]
pub struct Analysis {
db: salsa::Snapshot<db::RootDatabase>,
}
impl Analysis {
/// Gets the text of the source file.
pub fn file_text(&self, file_id: FileId) -> Arc<String> {
self.db.file_text(file_id)
}
/// Gets the syntax tree of the file.
pub fn file_syntax(&self, file_id: FileId) -> TreePtr<SourceFile> {
self.db.source_file(file_id).clone()
}
/// Gets the file's `LineIndex`: data structure to convert between absolute
/// offsets and line/column representation.
pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
self.db.line_index(file_id)
}
/// Selects the next syntactic nodes encopasing the range.
pub fn extend_selection(&self, frange: FileRange) -> TextRange {
extend_selection::extend_selection(&self.db, frange)
}
/// Returns position of the mathcing brace (all types of braces are
/// supported).
pub fn matching_brace(&self, file: &SourceFile, offset: TextUnit) -> Option<TextUnit> {
ra_ide_api_light::matching_brace(file, offset)
}
/// Returns a syntax tree represented as `String`, for debug purposes.
// FIXME: use a better name here.
pub fn syntax_tree(&self, file_id: FileId) -> String {
let file = self.db.source_file(file_id);
ra_ide_api_light::syntax_tree(&file)
}
/// Returns an edit to remove all newlines in the range, cleaning up minor
/// stuff like trailing commas.
pub fn join_lines(&self, frange: FileRange) -> SourceChange {
let file = self.db.source_file(frange.file_id);
SourceChange::from_local_edit(
frange.file_id,
ra_ide_api_light::join_lines(&file, frange.range),
)
}
/// Returns an edit which should be applied when opening a new line, fixing
/// up minor stuff like continuing the comment.
pub fn on_enter(&self, position: FilePosition) -> Option<SourceChange> {
let file = self.db.source_file(position.file_id);
let edit = ra_ide_api_light::on_enter(&file, position.offset)?;
Some(SourceChange::from_local_edit(position.file_id, edit))
}
/// Returns an edit which should be applied after `=` was typed. Primarily,
/// this works when adding `let =`.
// FIXME: use a snippet completion instead of this hack here.
pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> {
let file = self.db.source_file(position.file_id);
let edit = ra_ide_api_light::on_eq_typed(&file, position.offset)?;
Some(SourceChange::from_local_edit(position.file_id, edit))
}
/// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately.
pub fn on_dot_typed(&self, position: FilePosition) -> Option<SourceChange> {
let file = self.db.source_file(position.file_id);
let edit = ra_ide_api_light::on_dot_typed(&file, position.offset)?;
Some(SourceChange::from_local_edit(position.file_id, edit))
}
/// Returns a tree representation of symbols in the file. Useful to draw a
/// file outline.
pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> {
let file = self.db.source_file(file_id);
ra_ide_api_light::file_structure(&file)
}
/// Returns the set of folding ranges.
pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> {
let file = self.db.source_file(file_id);
ra_ide_api_light::folding_ranges(&file)
}
/// Fuzzy searches for a symbol.
pub fn symbol_search(&self, query: Query) -> Cancelable<Vec<NavigationTarget>> {
let res = symbol_index::world_symbols(&*self.db, query)?
.into_iter()
.map(NavigationTarget::from_symbol)
.collect();
Ok(res)
}
pub fn goto_defenition(
&self,
position: FilePosition,
) -> Cancelable<Option<Vec<NavigationTarget>>> {
goto_defenition::goto_defenition(&*self.db, position)
}
/// Finds all usages of the reference at point.
pub fn find_all_refs(&self, position: FilePosition) -> Cancelable<Vec<(FileId, TextRange)>> {
self.db.find_all_refs(position)
}
/// Returns a short text descrbing element at position.
pub fn hover(&self, position: FilePosition) -> Cancelable<Option<RangeInfo<String>>> {
hover::hover(&*self.db, position)
}
/// Computes parameter information for the given call expression.
pub fn call_info(&self, position: FilePosition) -> Cancelable<Option<CallInfo>> {
call_info::call_info(&*self.db, position)
}
/// Returns a `mod name;` declaration which created the current module.
pub fn parent_module(&self, position: FilePosition) -> Cancelable<Vec<NavigationTarget>> {
self.db.parent_module(position)
}
/// Returns crates this file belongs too.
pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> {
self.db.crate_for(file_id)
}
/// Returns the root file of the given crate.
pub fn crate_root(&self, crate_id: CrateId) -> Cancelable<FileId> {
Ok(self.db.crate_graph().crate_root(crate_id))
}
/// Returns the set of possible targets to run for the current file.
pub fn runnables(&self, file_id: FileId) -> Cancelable<Vec<Runnable>> {
runnables::runnables(&*self.db, file_id)
}
/// Computes syntax highlighting for the given file.
pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> {
syntax_highlighting::highlight(&*self.db, file_id)
}
/// Computes completions at the given position.
pub fn completions(&self, position: FilePosition) -> Cancelable<Option<Vec<CompletionItem>>> {
let completions = completion::completions(&self.db, position)?;
Ok(completions.map(|it| it.into()))
}
/// Computes assists (aks code actons aka intentions) for the given
/// position.
pub fn assists(&self, frange: FileRange) -> Cancelable<Vec<SourceChange>> {
Ok(self.db.assists(frange))
}
/// Computes the set of diagnostics for the given file.
pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> {
self.db.diagnostics(file_id)
}
/// Computes the type of the expression at the given position.
pub fn type_of(&self, frange: FileRange) -> Cancelable<Option<String>> {
hover::type_of(&*self.db, frange)
}
/// Returns the edit required to rename reference at the position to the new
/// name.
pub fn rename(
&self,
position: FilePosition,
new_name: &str,
) -> Cancelable<Vec<SourceFileEdit>> {
self.db.rename(position, new_name)
}
}
pub struct LibraryData {
root_id: SourceRootId,
root_change: RootChange,
symbol_index: SymbolIndex,
}
impl fmt::Debug for LibraryData {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("LibraryData")
.field("root_id", &self.root_id)
.field("root_change", &self.root_change)
.field("n_symbols", &self.symbol_index.len())
.finish()
}
}
impl LibraryData {
pub fn prepare(
root_id: SourceRootId,
files: Vec<(FileId, RelativePathBuf, Arc<String>)>,
) -> LibraryData {
let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, _, text)| {
let file = SourceFile::parse(text);
(*file_id, file)
}));
let mut root_change = RootChange::default();
root_change.added = files
.into_iter()
.map(|(file_id, path, text)| AddFile {
file_id,
path,
text,
})
.collect();
LibraryData {
root_id,
root_change,
symbol_index,
}
}
}
#[test]
fn analysis_is_send() {
fn is_send<T: Send>() {}
is_send::<Analysis>();
}

View file

@ -0,0 +1,135 @@
use std::sync::Arc;
use relative_path::RelativePathBuf;
use test_utils::{extract_offset, extract_range, parse_fixture, CURSOR_MARKER};
use ra_db::mock::FileMap;
use crate::{Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, FilePosition, FileRange, SourceRootId};
/// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis
/// from a set of in-memory files.
#[derive(Debug, Default)]
pub struct MockAnalysis {
files: Vec<(String, String)>,
}
impl MockAnalysis {
pub fn new() -> MockAnalysis {
MockAnalysis::default()
}
/// Creates `MockAnalysis` using a fixture data in the following format:
///
/// ```notrust
/// //- /main.rs
/// mod foo;
/// fn main() {}
///
/// //- /foo.rs
/// struct Baz;
/// ```
pub fn with_files(fixture: &str) -> MockAnalysis {
let mut res = MockAnalysis::new();
for entry in parse_fixture(fixture) {
res.add_file(&entry.meta, &entry.text);
}
res
}
/// Same as `with_files`, but requires that a single file contains a `<|>` marker,
/// whose position is also returned.
pub fn with_files_and_position(fixture: &str) -> (MockAnalysis, FilePosition) {
let mut position = None;
let mut res = MockAnalysis::new();
for entry in parse_fixture(fixture) {
if entry.text.contains(CURSOR_MARKER) {
assert!(
position.is_none(),
"only one marker (<|>) per fixture is allowed"
);
position = Some(res.add_file_with_position(&entry.meta, &entry.text));
} else {
res.add_file(&entry.meta, &entry.text);
}
}
let position = position.expect("expected a marker (<|>)");
(res, position)
}
pub fn add_file(&mut self, path: &str, text: &str) -> FileId {
let file_id = FileId((self.files.len() + 1) as u32);
self.files.push((path.to_string(), text.to_string()));
file_id
}
pub fn add_file_with_position(&mut self, path: &str, text: &str) -> FilePosition {
let (offset, text) = extract_offset(text);
let file_id = FileId((self.files.len() + 1) as u32);
self.files.push((path.to_string(), text.to_string()));
FilePosition { file_id, offset }
}
pub fn add_file_with_range(&mut self, path: &str, text: &str) -> FileRange {
let (range, text) = extract_range(text);
let file_id = FileId((self.files.len() + 1) as u32);
self.files.push((path.to_string(), text.to_string()));
FileRange { file_id, range }
}
pub fn id_of(&self, path: &str) -> FileId {
let (idx, _) = self
.files
.iter()
.enumerate()
.find(|(_, (p, _text))| path == p)
.expect("no file in this mock");
FileId(idx as u32 + 1)
}
pub fn analysis_host(self) -> AnalysisHost {
let mut host = AnalysisHost::default();
let mut file_map = FileMap::default();
let source_root = SourceRootId(0);
let mut change = AnalysisChange::new();
change.add_root(source_root, true);
let mut crate_graph = CrateGraph::default();
for (path, contents) in self.files.into_iter() {
assert!(path.starts_with('/'));
let path = RelativePathBuf::from_path(&path[1..]).unwrap();
let file_id = file_map.add(path.clone());
if path == "/lib.rs" || path == "/main.rs" {
crate_graph.add_crate_root(file_id);
}
change.add_file(source_root, file_id, path, Arc::new(contents));
}
change.set_crate_graph(crate_graph);
// change.set_file_resolver(Arc::new(file_map));
host.apply_change(change);
host
}
pub fn analysis(self) -> Analysis {
self.analysis_host().analysis()
}
}
/// Creates analysis from a multi-file fixture, returns positions marked with <|>.
pub fn analysis_and_position(fixture: &str) -> (Analysis, FilePosition) {
let (mock, position) = MockAnalysis::with_files_and_position(fixture);
(mock.analysis(), position)
}
/// Creates analysis for a single file.
pub fn single_file(code: &str) -> (Analysis, FileId) {
let mut mock = MockAnalysis::new();
let file_id = mock.add_file("/main.rs", code);
(mock.analysis(), file_id)
}
/// Creates analysis for a single file, returns position marked with <|>.
pub fn single_file_with_position(code: &str) -> (Analysis, FilePosition) {
let mut mock = MockAnalysis::new();
let pos = mock.add_file_with_position("/main.rs", code);
(mock.analysis(), pos)
}
/// Creates analysis for a single file, returns range marked with a pair of <|>.
pub fn single_file_with_range(code: &str) -> (Analysis, FileRange) {
let mut mock = MockAnalysis::new();
let pos = mock.add_file_with_range("/main.rs", code);
(mock.analysis(), pos)
}

View file

@ -0,0 +1,89 @@
use itertools::Itertools;
use ra_syntax::{
TextRange, SyntaxNode,
ast::{self, AstNode, NameOwner, ModuleItemOwner},
};
use ra_db::{Cancelable, SyntaxDatabase};
use crate::{db::RootDatabase, FileId};
#[derive(Debug)]
pub struct Runnable {
pub range: TextRange,
pub kind: RunnableKind,
}
#[derive(Debug)]
pub enum RunnableKind {
Test { name: String },
TestMod { path: String },
Bin,
}
pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Cancelable<Vec<Runnable>> {
let source_file = db.source_file(file_id);
let res = source_file
.syntax()
.descendants()
.filter_map(|i| runnable(db, file_id, i))
.collect();
Ok(res)
}
fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option<Runnable> {
if let Some(fn_def) = ast::FnDef::cast(item) {
runnable_fn(fn_def)
} else if let Some(m) = ast::Module::cast(item) {
runnable_mod(db, file_id, m)
} else {
None
}
}
fn runnable_fn(fn_def: &ast::FnDef) -> Option<Runnable> {
let name = fn_def.name()?.text();
let kind = if name == "main" {
RunnableKind::Bin
} else if fn_def.has_atom_attr("test") {
RunnableKind::Test {
name: name.to_string(),
}
} else {
return None;
};
Some(Runnable {
range: fn_def.syntax().range(),
kind,
})
}
fn runnable_mod(db: &RootDatabase, file_id: FileId, module: &ast::Module) -> Option<Runnable> {
let has_test_function = module
.item_list()?
.items()
.filter_map(|it| match it.kind() {
ast::ModuleItemKind::FnDef(it) => Some(it),
_ => None,
})
.any(|f| f.has_atom_attr("test"));
if !has_test_function {
return None;
}
let range = module.syntax().range();
let module =
hir::source_binder::module_from_child_node(db, file_id, module.syntax()).ok()??;
// FIXME: thread cancellation instead of `.ok`ing
let path = module
.path_to_root(db)
.ok()?
.into_iter()
.rev()
.filter_map(|it| it.name(db).ok())
.filter_map(|it| it)
.join("::");
Some(Runnable {
range,
kind: RunnableKind::TestMod { path },
})
}

View file

@ -0,0 +1,222 @@
//! This module handles fuzzy-searching of functions, structs and other symbols
//! by name across the whole workspace and dependencies.
//!
//! It works by building an incrementally-updated text-search index of all
//! symbols. The backbone of the index is the **awesome** `fst` crate by
//! @BurntSushi.
//!
//! In a nutshell, you give a set of strings to the `fst`, and it builds a
//! finite state machine describing this set of strtings. The strings which
//! could fuzzy-match a pattern can also be described by a finite state machine.
//! What is freakingly cool is that you can now traverse both state machines in
//! lock-step to enumerate the strings which are both in the input set and
//! fuzz-match the query. Or, more formally, given two langauges described by
//! fsts, one can build an product fst which describes the intersection of the
//! languages.
//!
//! `fst` does not support cheap updating of the index, but it supports unioning
//! of state machines. So, to account for changing source code, we build an fst
//! for each library (which is assumed to never change) and an fst for each rust
//! file in the current workspace, and run a query aginst the union of all
//! thouse fsts.
use std::{
cmp::Ordering,
hash::{Hash, Hasher},
sync::Arc,
};
use fst::{self, Streamer};
use ra_syntax::{
SyntaxNode, SourceFile, SmolStr, TreePtr, AstNode,
algo::{visit::{visitor, Visitor}, find_covering_node},
SyntaxKind::{self, *},
ast::{self, NameOwner},
};
use ra_db::{SourceRootId, FilesDatabase, LocalSyntaxPtr};
use salsa::ParallelDatabase;
use rayon::prelude::*;
use crate::{
Cancelable, FileId, Query,
db::RootDatabase,
};
salsa::query_group! {
pub(crate) trait SymbolsDatabase: hir::db::HirDatabase {
fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
type FileSymbolsQuery;
}
fn library_symbols(id: SourceRootId) -> Arc<SymbolIndex> {
type LibrarySymbolsQuery;
storage input;
}
}
}
fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
db.check_canceled()?;
let source_file = db.source_file(file_id);
let mut symbols = source_file
.syntax()
.descendants()
.filter_map(to_symbol)
.map(move |(name, ptr)| FileSymbol { name, ptr, file_id })
.collect::<Vec<_>>();
for (name, text_range) in hir::source_binder::macro_symbols(db, file_id)? {
let node = find_covering_node(source_file.syntax(), text_range);
let ptr = LocalSyntaxPtr::new(node);
symbols.push(FileSymbol { file_id, name, ptr })
}
Ok(Arc::new(SymbolIndex::new(symbols)))
}
pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Cancelable<Vec<FileSymbol>> {
/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
struct Snap(salsa::Snapshot<RootDatabase>);
impl Clone for Snap {
fn clone(&self) -> Snap {
Snap(self.0.snapshot())
}
}
let buf: Vec<Arc<SymbolIndex>> = if query.libs {
let snap = Snap(db.snapshot());
db.library_roots()
.par_iter()
.map_with(snap, |db, &lib_id| db.0.library_symbols(lib_id))
.collect()
} else {
let mut files = Vec::new();
for &root in db.local_roots().iter() {
let sr = db.source_root(root);
files.extend(sr.files.values().map(|&it| it))
}
let snap = Snap(db.snapshot());
files
.par_iter()
.map_with(snap, |db, &file_id| db.0.file_symbols(file_id))
.filter_map(|it| it.ok())
.collect()
};
Ok(query.search(&buf))
}
#[derive(Default, Debug)]
pub(crate) struct SymbolIndex {
symbols: Vec<FileSymbol>,
map: fst::Map,
}
impl PartialEq for SymbolIndex {
fn eq(&self, other: &SymbolIndex) -> bool {
self.symbols == other.symbols
}
}
impl Eq for SymbolIndex {}
impl Hash for SymbolIndex {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.symbols.hash(hasher)
}
}
impl SymbolIndex {
fn new(mut symbols: Vec<FileSymbol>) -> SymbolIndex {
fn cmp(s1: &FileSymbol, s2: &FileSymbol) -> Ordering {
unicase::Ascii::new(s1.name.as_str()).cmp(&unicase::Ascii::new(s2.name.as_str()))
}
symbols.par_sort_by(cmp);
symbols.dedup_by(|s1, s2| cmp(s1, s2) == Ordering::Equal);
let names = symbols.iter().map(|it| it.name.as_str().to_lowercase());
let map = fst::Map::from_iter(names.into_iter().zip(0u64..)).unwrap();
SymbolIndex { symbols, map }
}
pub(crate) fn len(&self) -> usize {
self.symbols.len()
}
pub(crate) fn for_files(
files: impl ParallelIterator<Item = (FileId, TreePtr<SourceFile>)>,
) -> SymbolIndex {
let symbols = files
.flat_map(|(file_id, file)| {
file.syntax()
.descendants()
.filter_map(to_symbol)
.map(move |(name, ptr)| FileSymbol { name, ptr, file_id })
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
SymbolIndex::new(symbols)
}
}
impl Query {
pub(crate) fn search(self, indices: &[Arc<SymbolIndex>]) -> Vec<FileSymbol> {
let mut op = fst::map::OpBuilder::new();
for file_symbols in indices.iter() {
let automaton = fst::automaton::Subsequence::new(&self.lowercased);
op = op.add(file_symbols.map.search(automaton))
}
let mut stream = op.union();
let mut res = Vec::new();
while let Some((_, indexed_values)) = stream.next() {
if res.len() >= self.limit {
break;
}
for indexed_value in indexed_values {
let file_symbols = &indices[indexed_value.index];
let idx = indexed_value.value as usize;
let symbol = &file_symbols.symbols[idx];
if self.only_types && !is_type(symbol.ptr.kind()) {
continue;
}
if self.exact && symbol.name != self.query {
continue;
}
res.push(symbol.clone());
}
}
res
}
}
fn is_type(kind: SyntaxKind) -> bool {
match kind {
STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_DEF => true,
_ => false,
}
}
/// The actual data that is stored in the index. It should be as compact as
/// possible.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub(crate) struct FileSymbol {
pub(crate) file_id: FileId,
pub(crate) name: SmolStr,
pub(crate) ptr: LocalSyntaxPtr,
}
fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, LocalSyntaxPtr)> {
fn decl<N: NameOwner>(node: &N) -> Option<(SmolStr, LocalSyntaxPtr)> {
let name = node.name()?.text().clone();
let ptr = LocalSyntaxPtr::new(node.syntax());
Some((name, ptr))
}
visitor()
.visit(decl::<ast::FnDef>)
.visit(decl::<ast::StructDef>)
.visit(decl::<ast::EnumDef>)
.visit(decl::<ast::TraitDef>)
.visit(decl::<ast::Module>)
.visit(decl::<ast::TypeDef>)
.visit(decl::<ast::ConstDef>)
.visit(decl::<ast::StaticDef>)
.accept(node)?
}

View file

@ -0,0 +1,92 @@
use ra_syntax::{ast, AstNode,};
use ra_db::SyntaxDatabase;
use crate::{
FileId, Cancelable, HighlightedRange,
db::RootDatabase,
};
pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> {
let source_file = db.source_file(file_id);
let mut res = ra_ide_api_light::highlight(source_file.syntax());
for macro_call in source_file
.syntax()
.descendants()
.filter_map(ast::MacroCall::cast)
{
if let Some((off, exp)) = hir::MacroDef::ast_expand(macro_call) {
let mapped_ranges = ra_ide_api_light::highlight(&exp.syntax())
.into_iter()
.filter_map(|r| {
let mapped_range = exp.map_range_back(r.range)?;
let res = HighlightedRange {
range: mapped_range + off,
tag: r.tag,
};
Some(res)
});
res.extend(mapped_ranges);
}
}
Ok(res)
}
#[cfg(test)]
mod tests {
use crate::mock_analysis::single_file;
use test_utils::assert_eq_dbg;
#[test]
fn highlights_code_inside_macros() {
let (analysis, file_id) = single_file(
"
fn main() {
ctry!({ let x = 92; x});
vec![{ let x = 92; x}];
}
",
);
let highlights = analysis.highlight(file_id).unwrap();
assert_eq_dbg(
r#"[HighlightedRange { range: [13; 15), tag: "keyword" },
HighlightedRange { range: [16; 20), tag: "function" },
HighlightedRange { range: [41; 46), tag: "macro" },
HighlightedRange { range: [49; 52), tag: "keyword" },
HighlightedRange { range: [57; 59), tag: "literal" },
HighlightedRange { range: [82; 86), tag: "macro" },
HighlightedRange { range: [89; 92), tag: "keyword" },
HighlightedRange { range: [97; 99), tag: "literal" },
HighlightedRange { range: [49; 52), tag: "keyword" },
HighlightedRange { range: [53; 54), tag: "function" },
HighlightedRange { range: [57; 59), tag: "literal" },
HighlightedRange { range: [61; 62), tag: "text" },
HighlightedRange { range: [89; 92), tag: "keyword" },
HighlightedRange { range: [93; 94), tag: "function" },
HighlightedRange { range: [97; 99), tag: "literal" },
HighlightedRange { range: [101; 102), tag: "text" }]"#,
&highlights,
)
}
// FIXME: this test is not really necessary: artifact of the inital hacky
// macros implementation.
#[test]
fn highlight_query_group_macro() {
let (analysis, file_id) = single_file(
"
salsa::query_group! {
pub trait HirDatabase: SyntaxDatabase {}
}
",
);
let highlights = analysis.highlight(file_id).unwrap();
assert_eq_dbg(
r#"[HighlightedRange { range: [20; 32), tag: "macro" },
HighlightedRange { range: [13; 18), tag: "text" },
HighlightedRange { range: [51; 54), tag: "keyword" },
HighlightedRange { range: [55; 60), tag: "keyword" },
HighlightedRange { range: [61; 72), tag: "function" }]"#,
&highlights,
)
}
}

View file

@ -0,0 +1,249 @@
mod runnables;
use ra_syntax::TextRange;
use test_utils::{assert_eq_dbg, assert_eq_text};
use ra_ide_api::{
mock_analysis::{analysis_and_position, single_file, single_file_with_position, MockAnalysis},
AnalysisChange, CrateGraph, FileId, Query
};
#[test]
fn test_unresolved_module_diagnostic() {
let (analysis, file_id) = single_file("mod foo;");
let diagnostics = analysis.diagnostics(file_id).unwrap();
assert_eq_dbg(
r#"[Diagnostic {
message: "unresolved module",
range: [4; 7),
fix: Some(SourceChange {
label: "create module",
source_file_edits: [],
file_system_edits: [CreateFile { source_root: SourceRootId(0), path: "foo.rs" }],
cursor_position: None }),
severity: Error }]"#,
&diagnostics,
);
}
// FIXME: move this test to hir
#[test]
fn test_unresolved_module_diagnostic_no_diag_for_inline_mode() {
let (analysis, file_id) = single_file("mod foo {}");
let diagnostics = analysis.diagnostics(file_id).unwrap();
assert_eq_dbg(r#"[]"#, &diagnostics);
}
#[test]
fn test_resolve_parent_module() {
let (analysis, pos) = analysis_and_position(
"
//- /lib.rs
mod foo;
//- /foo.rs
<|>// empty
",
);
let symbols = analysis.parent_module(pos).unwrap();
assert_eq_dbg(
r#"[NavigationTarget { file_id: FileId(1), name: "foo", kind: MODULE, range: [4; 7), ptr: None }]"#,
&symbols,
);
}
#[test]
fn test_resolve_parent_module_for_inline() {
let (analysis, pos) = analysis_and_position(
"
//- /lib.rs
mod foo {
mod bar {
mod baz { <|> }
}
}
",
);
let symbols = analysis.parent_module(pos).unwrap();
assert_eq_dbg(
r#"[NavigationTarget { file_id: FileId(1), name: "baz", kind: MODULE, range: [36; 39), ptr: None }]"#,
&symbols,
);
}
#[test]
fn test_resolve_crate_root() {
let mock = MockAnalysis::with_files(
"
//- /bar.rs
mod foo;
//- /bar/foo.rs
// emtpy <|>
",
);
let root_file = mock.id_of("/bar.rs");
let mod_file = mock.id_of("/bar/foo.rs");
let mut host = mock.analysis_host();
assert!(host.analysis().crate_for(mod_file).unwrap().is_empty());
let mut crate_graph = CrateGraph::default();
let crate_id = crate_graph.add_crate_root(root_file);
let mut change = AnalysisChange::new();
change.set_crate_graph(crate_graph);
host.apply_change(change);
assert_eq!(host.analysis().crate_for(mod_file).unwrap(), vec![crate_id]);
}
fn get_all_refs(text: &str) -> Vec<(FileId, TextRange)> {
let (analysis, position) = single_file_with_position(text);
analysis.find_all_refs(position).unwrap()
}
#[test]
fn test_find_all_refs_for_local() {
let code = r#"
fn main() {
let mut i = 1;
let j = 1;
i = i<|> + j;
{
i = 0;
}
i = 5;
}"#;
let refs = get_all_refs(code);
assert_eq!(refs.len(), 5);
}
#[test]
fn test_find_all_refs_for_param_inside() {
let code = r#"
fn foo(i : u32) -> u32 {
i<|>
}"#;
let refs = get_all_refs(code);
assert_eq!(refs.len(), 2);
}
#[test]
fn test_find_all_refs_for_fn_param() {
let code = r#"
fn foo(i<|> : u32) -> u32 {
i
}"#;
let refs = get_all_refs(code);
assert_eq!(refs.len(), 2);
}
#[test]
fn test_rename_for_local() {
test_rename(
r#"
fn main() {
let mut i = 1;
let j = 1;
i = i<|> + j;
{
i = 0;
}
i = 5;
}"#,
"k",
r#"
fn main() {
let mut k = 1;
let j = 1;
k = k + j;
{
k = 0;
}
k = 5;
}"#,
);
}
#[test]
fn test_rename_for_param_inside() {
test_rename(
r#"
fn foo(i : u32) -> u32 {
i<|>
}"#,
"j",
r#"
fn foo(j : u32) -> u32 {
j
}"#,
);
}
#[test]
fn test_rename_refs_for_fn_param() {
test_rename(
r#"
fn foo(i<|> : u32) -> u32 {
i
}"#,
"new_name",
r#"
fn foo(new_name : u32) -> u32 {
new_name
}"#,
);
}
#[test]
fn test_rename_for_mut_param() {
test_rename(
r#"
fn foo(mut i<|> : u32) -> u32 {
i
}"#,
"new_name",
r#"
fn foo(mut new_name : u32) -> u32 {
new_name
}"#,
);
}
fn test_rename(text: &str, new_name: &str, expected: &str) {
let (analysis, position) = single_file_with_position(text);
let edits = analysis.rename(position, new_name).unwrap();
let mut text_edit_bulder = ra_text_edit::TextEditBuilder::default();
let mut file_id: Option<FileId> = None;
for edit in edits {
file_id = Some(edit.file_id);
for atom in edit.edit.as_atoms() {
text_edit_bulder.replace(atom.delete, atom.insert.clone());
}
}
let result = text_edit_bulder
.finish()
.apply(&*analysis.file_text(file_id.unwrap()));
assert_eq_text!(expected, &*result);
}
#[test]
fn world_symbols_include_stuff_from_macros() {
let (analysis, _) = single_file(
"
salsa::query_group! {
pub trait HirDatabase: SyntaxDatabase {}
}
",
);
let mut symbols = analysis.symbol_search(Query::new("Hir".into())).unwrap();
let s = symbols.pop().unwrap();
assert_eq!(s.name(), "HirDatabase");
assert_eq!(s.range(), TextRange::from_to(33.into(), 44.into()));
}

View file

@ -0,0 +1,109 @@
use test_utils::assert_eq_dbg;
use ra_ide_api::mock_analysis::analysis_and_position;
#[test]
fn test_runnables() {
let (analysis, pos) = analysis_and_position(
r#"
//- /lib.rs
<|> //empty
fn main() {}
#[test]
fn test_foo() {}
#[test]
#[ignore]
fn test_foo() {}
"#,
);
let runnables = analysis.runnables(pos.file_id).unwrap();
assert_eq_dbg(
r#"[Runnable { range: [1; 21), kind: Bin },
Runnable { range: [22; 46), kind: Test { name: "test_foo" } },
Runnable { range: [47; 81), kind: Test { name: "test_foo" } }]"#,
&runnables,
)
}
#[test]
fn test_runnables_module() {
let (analysis, pos) = analysis_and_position(
r#"
//- /lib.rs
<|> //empty
mod test_mod {
#[test]
fn test_foo1() {}
}
"#,
);
let runnables = analysis.runnables(pos.file_id).unwrap();
assert_eq_dbg(
r#"[Runnable { range: [1; 59), kind: TestMod { path: "test_mod" } },
Runnable { range: [28; 57), kind: Test { name: "test_foo1" } }]"#,
&runnables,
)
}
#[test]
fn test_runnables_one_depth_layer_module() {
let (analysis, pos) = analysis_and_position(
r#"
//- /lib.rs
<|> //empty
mod foo {
mod test_mod {
#[test]
fn test_foo1() {}
}
}
"#,
);
let runnables = analysis.runnables(pos.file_id).unwrap();
assert_eq_dbg(
r#"[Runnable { range: [23; 85), kind: TestMod { path: "foo::test_mod" } },
Runnable { range: [46; 79), kind: Test { name: "test_foo1" } }]"#,
&runnables,
)
}
#[test]
fn test_runnables_multiple_depth_module() {
let (analysis, pos) = analysis_and_position(
r#"
//- /lib.rs
<|> //empty
mod foo {
mod bar {
mod test_mod {
#[test]
fn test_foo1() {}
}
}
}
"#,
);
let runnables = analysis.runnables(pos.file_id).unwrap();
assert_eq_dbg(
r#"[Runnable { range: [41; 115), kind: TestMod { path: "foo::bar::test_mod" } },
Runnable { range: [68; 105), kind: Test { name: "test_foo1" } }]"#,
&runnables,
)
}
#[test]
fn test_runnables_no_test_function_in_module() {
let (analysis, pos) = analysis_and_position(
r#"
//- /lib.rs
<|> //empty
mod test_mod {
fn foo1() {}
}
"#,
);
let runnables = analysis.runnables(pos.file_id).unwrap();
assert_eq_dbg(r#"[]"#, &runnables)
}

View file

@ -29,7 +29,7 @@ parking_lot = "0.7.0"
thread_worker = { path = "../thread_worker" }
ra_syntax = { path = "../ra_syntax" }
ra_text_edit = { path = "../ra_text_edit" }
ra_analysis = { path = "../ra_analysis" }
ra_ide_api = { path = "../ra_ide_api" }
gen_lsp_server = { path = "../gen_lsp_server" }
ra_vfs = { path = "../ra_vfs" }

View file

@ -4,7 +4,7 @@ use languageserver_types::{
TextDocumentItem, TextDocumentPositionParams, Url, VersionedTextDocumentIdentifier,
WorkspaceEdit,
};
use ra_analysis::{
use ra_ide_api::{
CompletionItem, CompletionItemKind, FileId, FilePosition, FileRange, FileSystemEdit,
InsertText, NavigationTarget, SourceChange, SourceFileEdit,
LineCol, LineIndex, translate_offset_with_edit

View file

@ -10,7 +10,7 @@ use gen_lsp_server::{
handle_shutdown, ErrorCode, RawMessage, RawNotification, RawRequest, RawResponse,
};
use languageserver_types::NumberOrString;
use ra_analysis::{Canceled, FileId, LibraryData};
use ra_ide_api::{Canceled, FileId, LibraryData};
use ra_vfs::VfsTask;
use rayon;
use rustc_hash::FxHashSet;

View file

@ -8,7 +8,7 @@ use languageserver_types::{
ParameterInformation, ParameterLabel, Position, PrepareRenameResponse, Range, RenameParams,
SignatureInformation, SymbolInformation, TextDocumentIdentifier, TextEdit, WorkspaceEdit,
};
use ra_analysis::{
use ra_ide_api::{
FileId, FilePosition, FileRange, FoldKind, Query, RunnableKind, Severity, SourceChange,
};
use ra_syntax::{TextUnit, AstNode};
@ -736,7 +736,7 @@ fn highlight(world: &ServerWorld, file_id: FileId) -> Result<Vec<Decoration>> {
}
fn to_diagnostic_severity(severity: Severity) -> DiagnosticSeverity {
use ra_analysis::Severity::*;
use ra_ide_api::Severity::*;
match severity {
Error => DiagnosticSeverity::Error,

View file

@ -1,4 +1,4 @@
use ra_analysis::FileId;
use ra_ide_api::FileId;
use rustc_hash::FxHashSet;
pub struct Subscriptions {

View file

@ -1,10 +1,10 @@
use std::{
path::{PathBuf},
path::PathBuf,
sync::Arc,
};
use languageserver_types::Url;
use ra_analysis::{
use ra_ide_api::{
Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, LibraryData,
SourceRootId
};
@ -12,7 +12,7 @@ use ra_vfs::{Vfs, VfsChange, VfsFile, VfsRoot};
use rustc_hash::FxHashMap;
use relative_path::RelativePathBuf;
use parking_lot::RwLock;
use failure::{format_err};
use failure::format_err;
use crate::{
project_model::{CargoWorkspace, TargetKind},