Merge branch 'master' of https://github.com/rust-analyzer/rust-analyzer into feature/themes

This commit is contained in:
Seivan Heidari 2019-11-15 16:30:21 +01:00
commit cb26df9506
40 changed files with 2630 additions and 2146 deletions

2
.gitattributes vendored
View file

@ -1,2 +1,2 @@
* text=auto eol=lf
crates/ra_syntax/test_data/** -text eof=LF crates/ra_syntax/test_data/** -text eof=LF
crates/ra_ide_api/src/snapshots/** -text eof=LF

8
Cargo.lock generated
View file

@ -1135,7 +1135,7 @@ dependencies = [
"ra_syntax 0.1.0", "ra_syntax 0.1.0",
"ra_tt 0.1.0", "ra_tt 0.1.0",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0", "test_utils 0.1.0",
] ]
@ -1603,6 +1603,11 @@ dependencies = [
"maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "smallvec"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "smol_str" name = "smol_str"
version = "0.1.15" version = "0.1.15"
@ -2007,6 +2012,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)" = "691b17f19fc1ec9d94ec0b5864859290dff279dbd7b03f017afda54eb36c3c35" "checksum serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)" = "691b17f19fc1ec9d94ec0b5864859290dff279dbd7b03f017afda54eb36c3c35"
"checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" "checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
"checksum smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "f7b0758c52e15a8b5e3691eae6cc559f08eee9406e548a4477ba4e67770a82b6" "checksum smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "f7b0758c52e15a8b5e3691eae6cc559f08eee9406e548a4477ba4e67770a82b6"
"checksum smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ecf3b85f68e8abaa7555aa5abdb1153079387e60b718283d732f03897fcfc86"
"checksum smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34836c9a295c62c2ce3514471117c5cb269891e8421b2aafdd910050576c4d8b" "checksum smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34836c9a295c62c2ce3514471117c5cb269891e8421b2aafdd910050576c4d8b"
"checksum stacker 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "d96fc4f13a0ac088e9a3cd9af1cc8c5cc1ab5deb2145cef661267dfc9c542f8a" "checksum stacker 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "d96fc4f13a0ac088e9a3cd9af1cc8c5cc1ab5deb2145cef661267dfc9c542f8a"
"checksum superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f" "checksum superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f"

View file

@ -0,0 +1,379 @@
use format_buf::format;
use hir::{db::HirDatabase, FromSource};
use join_to_string::join;
use ra_syntax::{
ast::{
self, AstNode, NameOwner, StructKind, TypeAscriptionOwner, TypeParamsOwner, VisibilityOwner,
},
TextUnit, T,
};
use std::fmt::Write;
use crate::{Assist, AssistCtx, AssistId};
// Assist: add_new
//
// Adds a new inherent impl for a type.
//
// ```
// struct Ctx<T: Clone> {
// data: T,<|>
// }
// ```
// ->
// ```
// struct Ctx<T: Clone> {
// data: T,
// }
//
// impl<T: Clone> Ctx<T> {
// fn new(data: T) -> Self { Self { data } }
// }
//
// ```
pub(crate) fn add_new(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let strukt = ctx.find_node_at_offset::<ast::StructDef>()?;
// We want to only apply this to non-union structs with named fields
let field_list = match (strukt.kind(), strukt.is_union()) {
(StructKind::Named(named), false) => named,
_ => return None,
};
// Return early if we've found an existing new fn
let impl_block = find_struct_impl(&ctx, &strukt)?;
ctx.add_assist(AssistId("add_new"), "add new fn", |edit| {
edit.target(strukt.syntax().text_range());
let mut buf = String::with_capacity(512);
if impl_block.is_some() {
buf.push('\n');
}
let vis = strukt.visibility().map(|v| format!("{} ", v.syntax()));
let vis = vis.as_ref().map(String::as_str).unwrap_or("");
write!(&mut buf, " {}fn new(", vis).unwrap();
join(field_list.fields().map(|f| {
format!(
"{}: {}",
f.name().unwrap().syntax().text(),
f.ascribed_type().unwrap().syntax().text()
)
}))
.separator(", ")
.to_buf(&mut buf);
buf.push_str(") -> Self { Self {");
join(field_list.fields().map(|f| f.name().unwrap().syntax().text()))
.separator(", ")
.surround_with(" ", " ")
.to_buf(&mut buf);
buf.push_str("} }");
let (start_offset, end_offset) = if let Some(impl_block) = impl_block {
buf.push('\n');
let start = impl_block
.syntax()
.descendants_with_tokens()
.find(|t| t.kind() == T!['{'])
.unwrap()
.text_range()
.end();
(start, TextUnit::from_usize(1))
} else {
buf = generate_impl_text(&strukt, &buf);
let start = strukt.syntax().text_range().end();
(start, TextUnit::from_usize(3))
};
edit.set_cursor(start_offset + TextUnit::of_str(&buf) - end_offset);
edit.insert(start_offset, buf);
})
}
// Generates the surrounding `impl Type { <code> }` including type and lifetime
// parameters
fn generate_impl_text(strukt: &ast::StructDef, code: &str) -> String {
let type_params = strukt.type_param_list();
let mut buf = String::with_capacity(code.len());
buf.push_str("\n\nimpl");
if let Some(type_params) = &type_params {
format!(buf, "{}", type_params.syntax());
}
buf.push_str(" ");
buf.push_str(strukt.name().unwrap().text().as_str());
if let Some(type_params) = type_params {
let lifetime_params = type_params
.lifetime_params()
.filter_map(|it| it.lifetime_token())
.map(|it| it.text().clone());
let type_params =
type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone());
join(lifetime_params.chain(type_params)).surround_with("<", ">").to_buf(&mut buf);
}
format!(&mut buf, " {{\n{}\n}}\n", code);
buf
}
// Uses a syntax-driven approach to find any impl blocks for the struct that
// exist within the module/file
//
// Returns `None` if we've found an existing `new` fn
//
// FIXME: change the new fn checking to a more semantic approach when that's more
// viable (e.g. we process proc macros, etc)
fn find_struct_impl(
ctx: &AssistCtx<impl HirDatabase>,
strukt: &ast::StructDef,
) -> Option<Option<ast::ImplBlock>> {
let db = ctx.db;
let module = strukt.syntax().ancestors().find(|node| {
ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind())
})?;
let struct_ty = {
let src = hir::Source { file_id: ctx.frange.file_id.into(), ast: strukt.clone() };
hir::Struct::from_source(db, src).unwrap().ty(db)
};
let mut found_new_fn = false;
let block = module.descendants().filter_map(ast::ImplBlock::cast).find(|impl_blk| {
if found_new_fn {
return false;
}
let src = hir::Source { file_id: ctx.frange.file_id.into(), ast: impl_blk.clone() };
let blk = hir::ImplBlock::from_source(db, src).unwrap();
let same_ty = blk.target_ty(db) == struct_ty;
let not_trait_impl = blk.target_trait(db).is_none();
found_new_fn = has_new_fn(impl_blk);
same_ty && not_trait_impl
});
if found_new_fn {
None
} else {
Some(block)
}
}
fn has_new_fn(imp: &ast::ImplBlock) -> bool {
if let Some(il) = imp.item_list() {
for item in il.impl_items() {
if let ast::ImplItem::FnDef(f) = item {
if f.name().unwrap().text().eq_ignore_ascii_case("new") {
return true;
}
}
}
}
false
}
#[cfg(test)]
mod tests {
use super::*;
use crate::helpers::{check_assist, check_assist_not_applicable, check_assist_target};
#[test]
#[rustfmt::skip]
fn test_add_new() {
// Check output of generation
check_assist(
add_new,
"struct Foo {<|>}",
"struct Foo {}
impl Foo {
fn new() -> Self { Self { } }<|>
}
",
);
check_assist(
add_new,
"struct Foo<T: Clone> {<|>}",
"struct Foo<T: Clone> {}
impl<T: Clone> Foo<T> {
fn new() -> Self { Self { } }<|>
}
",
);
check_assist(
add_new,
"struct Foo<'a, T: Foo<'a>> {<|>}",
"struct Foo<'a, T: Foo<'a>> {}
impl<'a, T: Foo<'a>> Foo<'a, T> {
fn new() -> Self { Self { } }<|>
}
",
);
check_assist(
add_new,
"struct Foo { baz: String <|>}",
"struct Foo { baz: String }
impl Foo {
fn new(baz: String) -> Self { Self { baz } }<|>
}
",
);
check_assist(
add_new,
"struct Foo { baz: String, qux: Vec<i32> <|>}",
"struct Foo { baz: String, qux: Vec<i32> }
impl Foo {
fn new(baz: String, qux: Vec<i32>) -> Self { Self { baz, qux } }<|>
}
",
);
// Check that visibility modifiers don't get brought in for fields
check_assist(
add_new,
"struct Foo { pub baz: String, pub qux: Vec<i32> <|>}",
"struct Foo { pub baz: String, pub qux: Vec<i32> }
impl Foo {
fn new(baz: String, qux: Vec<i32>) -> Self { Self { baz, qux } }<|>
}
",
);
// Check that it reuses existing impls
check_assist(
add_new,
"struct Foo {<|>}
impl Foo {}
",
"struct Foo {}
impl Foo {
fn new() -> Self { Self { } }<|>
}
",
);
check_assist(
add_new,
"struct Foo {<|>}
impl Foo {
fn qux(&self) {}
}
",
"struct Foo {}
impl Foo {
fn new() -> Self { Self { } }<|>
fn qux(&self) {}
}
",
);
check_assist(
add_new,
"struct Foo {<|>}
impl Foo {
fn qux(&self) {}
fn baz() -> i32 {
5
}
}
",
"struct Foo {}
impl Foo {
fn new() -> Self { Self { } }<|>
fn qux(&self) {}
fn baz() -> i32 {
5
}
}
",
);
// Check visibility of new fn based on struct
check_assist(
add_new,
"pub struct Foo {<|>}",
"pub struct Foo {}
impl Foo {
pub fn new() -> Self { Self { } }<|>
}
",
);
check_assist(
add_new,
"pub(crate) struct Foo {<|>}",
"pub(crate) struct Foo {}
impl Foo {
pub(crate) fn new() -> Self { Self { } }<|>
}
",
);
}
#[test]
fn add_new_not_applicable_if_fn_exists() {
check_assist_not_applicable(
add_new,
"
struct Foo {<|>}
impl Foo {
fn new() -> Self {
Self
}
}",
);
check_assist_not_applicable(
add_new,
"
struct Foo {<|>}
impl Foo {
fn New() -> Self {
Self
}
}",
);
}
#[test]
fn add_new_target() {
check_assist_target(
add_new,
"
struct SomeThingIrrelevant;
/// Has a lifetime parameter
struct Foo<'a, T: Foo<'a>> {<|>}
struct EvenMoreIrrelevant;
",
"/// Has a lifetime parameter
struct Foo<'a, T: Foo<'a>> {}",
);
}
}

View file

@ -156,6 +156,28 @@ fn process(map: HashMap<String, String>) {}
) )
} }
#[test]
fn doctest_add_new() {
check(
"add_new",
r#####"
struct Ctx<T: Clone> {
data: T,<|>
}
"#####,
r#####"
struct Ctx<T: Clone> {
data: T,
}
impl<T: Clone> Ctx<T> {
fn new(data: T) -> Self { Self { data } }
}
"#####,
)
}
#[test] #[test]
fn doctest_apply_demorgan() { fn doctest_apply_demorgan() {
check( check(

View file

@ -95,6 +95,7 @@ mod assists {
mod add_derive; mod add_derive;
mod add_explicit_type; mod add_explicit_type;
mod add_impl; mod add_impl;
mod add_new;
mod apply_demorgan; mod apply_demorgan;
mod flip_comma; mod flip_comma;
mod flip_binexpr; mod flip_binexpr;
@ -119,6 +120,7 @@ mod assists {
add_derive::add_derive, add_derive::add_derive,
add_explicit_type::add_explicit_type, add_explicit_type::add_explicit_type,
add_impl::add_impl, add_impl::add_impl,
add_new::add_new,
apply_demorgan::apply_demorgan, apply_demorgan::apply_demorgan,
change_visibility::change_visibility, change_visibility::change_visibility,
fill_match_arms::fill_match_arms, fill_match_arms::fill_match_arms,

View file

@ -8,8 +8,8 @@ use rustc_hash::FxHashMap;
use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER}; use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER};
use crate::{ use crate::{
CrateGraph, Edition, FileId, FilePosition, RelativePathBuf, SourceDatabaseExt, SourceRoot, CrateGraph, CrateId, Edition, FileId, FilePosition, RelativePathBuf, SourceDatabaseExt,
SourceRootId, SourceRoot, SourceRootId,
}; };
pub const WORKSPACE: SourceRootId = SourceRootId(0); pub const WORKSPACE: SourceRootId = SourceRootId(0);
@ -33,6 +33,14 @@ pub trait WithFixture: Default + SourceDatabaseExt + 'static {
let pos = with_files(&mut db, fixture); let pos = with_files(&mut db, fixture);
(db, pos.unwrap()) (db, pos.unwrap())
} }
fn test_crate(&self) -> CrateId {
let crate_graph = self.crate_graph();
let mut it = crate_graph.iter();
let res = it.next().unwrap();
assert!(it.next().is_none());
res
}
} }
impl<DB: SourceDatabaseExt + Default + 'static> WithFixture for DB {} impl<DB: SourceDatabaseExt + Default + 'static> WithFixture for DB {}

View file

@ -23,7 +23,7 @@ use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner};
use crate::{ use crate::{
adt::VariantDef, adt::VariantDef,
db::{AstDatabase, DefDatabase, HirDatabase}, db::{AstDatabase, DefDatabase, HirDatabase},
expr::{validation::ExprValidator, BindingAnnotation, Body, BodySourceMap, Pat, PatId}, expr::{BindingAnnotation, Body, BodySourceMap, ExprValidator, Pat, PatId},
generics::{GenericDef, HasGenericParams}, generics::{GenericDef, HasGenericParams},
ids::{ ids::{
AstItemDef, ConstId, EnumId, FunctionId, MacroDefId, StaticId, StructId, TraitId, AstItemDef, ConstId, EnumId, FunctionId, MacroDefId, StaticId, StructId, TraitId,
@ -157,7 +157,7 @@ impl Module {
} }
/// Finds a child module with the specified name. /// Finds a child module with the specified name.
pub fn child(self, db: &impl HirDatabase, name: &Name) -> Option<Module> { pub fn child(self, db: &impl DefDatabase, name: &Name) -> Option<Module> {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
let child_id = def_map[self.id.module_id].children.get(name)?; let child_id = def_map[self.id.module_id].children.get(name)?;
Some(self.with_module_id(*child_id)) Some(self.with_module_id(*child_id))

View file

@ -1,12 +1,19 @@
//! FIXME: write short doc here //! FIXME: write short doc here
pub(crate) mod validation;
use std::sync::Arc; use std::sync::Arc;
use hir_def::path::known;
use hir_expand::diagnostics::DiagnosticSink;
use ra_syntax::ast;
use ra_syntax::AstPtr; use ra_syntax::AstPtr;
use rustc_hash::FxHashSet;
use crate::{db::HirDatabase, DefWithBody, HasBody, Resolver}; use crate::{
db::HirDatabase,
diagnostics::{MissingFields, MissingOkInTailExpr},
ty::{ApplicationTy, InferenceResult, Ty, TypeCtor},
Adt, DefWithBody, Function, HasBody, Name, Path, Resolver,
};
pub use hir_def::{ pub use hir_def::{
body::{ body::{
@ -38,196 +45,126 @@ pub(crate) fn resolver_for_scope(
let scopes = owner.expr_scopes(db); let scopes = owner.expr_scopes(db);
let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>(); let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>();
for scope in scope_chain.into_iter().rev() { for scope in scope_chain.into_iter().rev() {
r = r.push_expr_scope(Arc::clone(&scopes), scope); r = r.push_expr_scope(owner, Arc::clone(&scopes), scope);
} }
r r
} }
#[cfg(test)] pub(crate) struct ExprValidator<'a, 'b: 'a> {
mod tests { func: Function,
use hir_expand::Source; infer: Arc<InferenceResult>,
use ra_db::{fixture::WithFixture, SourceDatabase}; sink: &'a mut DiagnosticSink<'b>,
use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; }
use test_utils::{assert_eq_text, extract_offset};
use crate::{source_binder::SourceAnalyzer, test_db::TestDB}; impl<'a, 'b> ExprValidator<'a, 'b> {
pub(crate) fn new(
func: Function,
infer: Arc<InferenceResult>,
sink: &'a mut DiagnosticSink<'b>,
) -> ExprValidator<'a, 'b> {
ExprValidator { func, infer, sink }
}
fn do_check(code: &str, expected: &[&str]) { pub(crate) fn validate_body(&mut self, db: &impl HirDatabase) {
let (off, code) = extract_offset(code); let body = self.func.body(db);
let code = {
let mut buf = String::new(); for e in body.exprs() {
let off = u32::from(off) as usize; if let (id, Expr::RecordLit { path, fields, spread }) = e {
buf.push_str(&code[..off]); self.validate_record_literal(id, path, fields, *spread, db);
buf.push_str("marker"); }
buf.push_str(&code[off..]); }
buf
let body_expr = &body[body.body_expr()];
if let Expr::Block { statements: _, tail: Some(t) } = body_expr {
self.validate_results_in_tail_expr(body.body_expr(), *t, db);
}
}
fn validate_record_literal(
&mut self,
id: ExprId,
_path: &Option<Path>,
fields: &[RecordLitField],
spread: Option<ExprId>,
db: &impl HirDatabase,
) {
if spread.is_some() {
return;
}
let struct_def = match self.infer[id].as_adt() {
Some((Adt::Struct(s), _)) => s,
_ => return,
}; };
let (db, file_id) = TestDB::with_single_file(&code); let lit_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<Name> = struct_def
.fields(db)
.iter()
.filter_map(|f| {
let name = f.name(db);
if lit_fields.contains(&name) {
None
} else {
Some(name)
}
})
.collect();
if missed_fields.is_empty() {
return;
}
let source_map = self.func.body_source_map(db);
let file = db.parse(file_id).ok().unwrap(); if let Some(source_ptr) = source_map.expr_syntax(id) {
let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); if let Some(expr) = source_ptr.ast.a() {
let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None); let root = source_ptr.file_syntax(db);
if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
let scopes = analyzer.scopes(); if let Some(field_list) = record_lit.record_field_list() {
let expr_id = analyzer self.sink.push(MissingFields {
.body_source_map() file: source_ptr.file_id,
.node_expr(Source { file_id: file_id.into(), ast: &marker.into() }) field_list: AstPtr::new(&field_list),
.unwrap(); missed_fields,
let scope = scopes.scope_for(expr_id); })
}
let actual = scopes }
.scope_chain(scope) }
.flat_map(|scope| scopes.entries(scope)) }
.map(|it| it.name().to_string())
.collect::<Vec<_>>()
.join("\n");
let expected = expected.join("\n");
assert_eq_text!(&expected, &actual);
} }
#[test] fn validate_results_in_tail_expr(
fn test_lambda_scope() { &mut self,
do_check( body_id: ExprId,
r" id: ExprId,
fn quux(foo: i32) { db: &impl HirDatabase,
let f = |bar, baz: i32| { ) {
<|> // the mismatch will be on the whole block currently
let mismatch = match self.infer.type_mismatch_for_expr(body_id) {
Some(m) => m,
None => return,
}; };
}",
&["bar", "baz", "foo"],
);
}
#[test] let std_result_path = known::std_result_result();
fn test_call_scope() {
do_check(
r"
fn quux() {
f(|x| <|> );
}",
&["x"],
);
}
#[test] let resolver = self.func.resolver(db);
fn test_method_call_scope() { let std_result_enum = match resolver.resolve_known_enum(db, &std_result_path) {
do_check( Some(it) => it,
r" _ => return,
fn quux() {
z.f(|x| <|> );
}",
&["x"],
);
}
#[test]
fn test_loop_scope() {
do_check(
r"
fn quux() {
loop {
let x = ();
<|>
}; };
}",
&["x"],
);
}
#[test] let std_result_ctor = TypeCtor::Adt(Adt::Enum(std_result_enum));
fn test_match() { let params = match &mismatch.expected {
do_check( Ty::Apply(ApplicationTy { ctor, parameters }) if ctor == &std_result_ctor => parameters,
r" _ => return,
fn quux() {
match () {
Some(x) => {
<|>
}
}; };
}",
&["x"],
);
}
#[test] if params.len() == 2 && &params[0] == &mismatch.actual {
fn test_shadow_variable() { let source_map = self.func.body_source_map(db);
do_check(
r"
fn foo(x: String) {
let x : &str = &x<|>;
}",
&["x"],
);
}
fn do_check_local_name(code: &str, expected_offset: u32) { if let Some(source_ptr) = source_map.expr_syntax(id) {
let (off, code) = extract_offset(code); if let Some(expr) = source_ptr.ast.a() {
self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr });
let (db, file_id) = TestDB::with_single_file(&code); }
let file = db.parse(file_id).ok().unwrap();
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
.expect("failed to find a name at the target offset");
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None);
let local_name_entry = analyzer.resolve_local_name(&name_ref).unwrap();
let local_name =
local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
assert_eq!(local_name.range(), expected_name.syntax().text_range());
}
#[test]
fn test_resolve_local_name() {
do_check_local_name(
r#"
fn foo(x: i32, y: u32) {
{
let z = x * 2;
}
{
let t = x<|> * 3;
}
}"#,
21,
);
}
#[test]
fn test_resolve_local_name_declaration() {
do_check_local_name(
r#"
fn foo(x: String) {
let x : &str = &x<|>;
}"#,
21,
);
}
#[test]
fn test_resolve_local_name_shadow() {
do_check_local_name(
r"
fn foo(x: String) {
let x : &str = &x;
x<|>
}
",
53,
);
}
#[test]
fn ref_patterns_contribute_bindings() {
do_check_local_name(
r"
fn foo() {
if let Some(&from) = bar() {
from<|>;
} }
} }
",
53,
);
} }
} }

View file

@ -1,137 +0,0 @@
//! FIXME: write short doc here
use std::sync::Arc;
use hir_def::path::known;
use hir_expand::diagnostics::DiagnosticSink;
use ra_syntax::ast;
use rustc_hash::FxHashSet;
use crate::{
db::HirDatabase,
diagnostics::{MissingFields, MissingOkInTailExpr},
expr::AstPtr,
ty::{ApplicationTy, InferenceResult, Ty, TypeCtor},
Adt, Function, Name, Path,
};
use super::{Expr, ExprId, RecordLitField};
pub(crate) struct ExprValidator<'a, 'b: 'a> {
func: Function,
infer: Arc<InferenceResult>,
sink: &'a mut DiagnosticSink<'b>,
}
impl<'a, 'b> ExprValidator<'a, 'b> {
pub(crate) fn new(
func: Function,
infer: Arc<InferenceResult>,
sink: &'a mut DiagnosticSink<'b>,
) -> ExprValidator<'a, 'b> {
ExprValidator { func, infer, sink }
}
pub(crate) fn validate_body(&mut self, db: &impl HirDatabase) {
let body = self.func.body(db);
for e in body.exprs() {
if let (id, Expr::RecordLit { path, fields, spread }) = e {
self.validate_record_literal(id, path, fields, *spread, db);
}
}
let body_expr = &body[body.body_expr()];
if let Expr::Block { statements: _, tail: Some(t) } = body_expr {
self.validate_results_in_tail_expr(body.body_expr(), *t, db);
}
}
fn validate_record_literal(
&mut self,
id: ExprId,
_path: &Option<Path>,
fields: &[RecordLitField],
spread: Option<ExprId>,
db: &impl HirDatabase,
) {
if spread.is_some() {
return;
}
let struct_def = match self.infer[id].as_adt() {
Some((Adt::Struct(s), _)) => s,
_ => return,
};
let lit_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<Name> = struct_def
.fields(db)
.iter()
.filter_map(|f| {
let name = f.name(db);
if lit_fields.contains(&name) {
None
} else {
Some(name)
}
})
.collect();
if missed_fields.is_empty() {
return;
}
let source_map = self.func.body_source_map(db);
if let Some(source_ptr) = source_map.expr_syntax(id) {
if let Some(expr) = source_ptr.ast.a() {
let root = source_ptr.file_syntax(db);
if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
if let Some(field_list) = record_lit.record_field_list() {
self.sink.push(MissingFields {
file: source_ptr.file_id,
field_list: AstPtr::new(&field_list),
missed_fields,
})
}
}
}
}
}
fn validate_results_in_tail_expr(
&mut self,
body_id: ExprId,
id: ExprId,
db: &impl HirDatabase,
) {
// the mismatch will be on the whole block currently
let mismatch = match self.infer.type_mismatch_for_expr(body_id) {
Some(m) => m,
None => return,
};
let std_result_path = known::std_result_result();
let resolver = self.func.resolver(db);
let std_result_enum = match resolver.resolve_known_enum(db, &std_result_path) {
Some(it) => it,
_ => return,
};
let std_result_ctor = TypeCtor::Adt(Adt::Enum(std_result_enum));
let params = match &mismatch.expected {
Ty::Apply(ApplicationTy { ctor, parameters }) if ctor == &std_result_ctor => parameters,
_ => return,
};
if params.len() == 2 && &params[0] == &mismatch.actual {
let source_map = self.func.body_source_map(db);
if let Some(source_ptr) = source_map.expr_syntax(id) {
if let Some(expr) = source_ptr.ast.a() {
self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr });
}
}
}
}
}

View file

@ -1,6 +1,6 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use hir_def::{StructId, StructOrUnionId, UnionId}; use hir_def::{ModuleId, StructId, StructOrUnionId, UnionId};
use hir_expand::name::AsName; use hir_expand::name::AsName;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, NameOwner}, ast::{self, AstNode, NameOwner},
@ -10,9 +10,9 @@ use ra_syntax::{
use crate::{ use crate::{
db::{AstDatabase, DefDatabase, HirDatabase}, db::{AstDatabase, DefDatabase, HirDatabase},
ids::{AstItemDef, LocationCtx}, ids::{AstItemDef, LocationCtx},
AstId, Const, Crate, DefWithBody, Enum, EnumVariant, FieldSource, Function, HasBody, HasSource, Const, DefWithBody, Enum, EnumVariant, FieldSource, Function, HasBody, HasSource, ImplBlock,
ImplBlock, Local, Module, ModuleSource, Source, Static, Struct, StructField, Trait, TypeAlias, Local, Module, ModuleSource, Source, Static, Struct, StructField, Trait, TypeAlias, Union,
Union, VariantDef, VariantDef,
}; };
pub trait FromSource: Sized { pub trait FromSource: Sized {
@ -152,44 +152,48 @@ impl Local {
} }
impl Module { impl Module {
pub fn from_declaration(db: &impl HirDatabase, src: Source<ast::Module>) -> Option<Self> { pub fn from_declaration(db: &impl DefDatabase, src: Source<ast::Module>) -> Option<Self> {
let parent_declaration = src.ast.syntax().ancestors().skip(1).find_map(ast::Module::cast);
let parent_module = match parent_declaration {
Some(parent_declaration) => {
let src_parent = Source { file_id: src.file_id, ast: parent_declaration };
Module::from_declaration(db, src_parent)
}
_ => {
let src_parent = Source { let src_parent = Source {
file_id: src.file_id, file_id: src.file_id,
ast: ModuleSource::new(db, Some(src.file_id.original_file(db)), None), ast: ModuleSource::new(db, Some(src.file_id.original_file(db)), None),
}; };
let parent_module = Module::from_definition(db, src_parent)?; Module::from_definition(db, src_parent)
}
}?;
let child_name = src.ast.name()?; let child_name = src.ast.name()?;
parent_module.child(db, &child_name.as_name()) parent_module.child(db, &child_name.as_name())
} }
pub fn from_definition( pub fn from_definition(db: &impl DefDatabase, src: Source<ModuleSource>) -> Option<Self> {
db: &(impl DefDatabase + AstDatabase), match src.ast {
src: Source<ModuleSource>,
) -> Option<Self> {
let decl_id = match src.ast {
ModuleSource::Module(ref module) => { ModuleSource::Module(ref module) => {
assert!(!module.has_semi()); assert!(!module.has_semi());
let ast_id_map = db.ast_id_map(src.file_id); return Module::from_declaration(
let item_id = AstId::new(src.file_id, ast_id_map.ast_id(module)); db,
Some(item_id) Source { file_id: src.file_id, ast: module.clone() },
);
} }
ModuleSource::SourceFile(_) => None, ModuleSource::SourceFile(_) => (),
}; };
db.relevant_crates(src.file_id.original_file(db)).iter().find_map(|&crate_id| { let original_file = src.file_id.original_file(db);
let def_map = db.crate_def_map(crate_id);
let (module_id, _module_data) = let (krate, module_id) =
def_map.modules.iter().find(|(_module_id, module_data)| { db.relevant_crates(original_file).iter().find_map(|&crate_id| {
if decl_id.is_some() { let crate_def_map = db.crate_def_map(crate_id);
module_data.declaration == decl_id let local_module_id = crate_def_map.modules_for_file(original_file).next()?;
} else { Some((crate_id, local_module_id))
module_data.definition.map(|it| it.into()) == Some(src.file_id)
}
})?; })?;
Some(Module { id: ModuleId { krate, module_id } })
Some(Module::new(Crate { crate_id }, module_id))
})
} }
} }

View file

@ -16,8 +16,8 @@ use crate::{
expr::{ExprScopes, PatId, ScopeId}, expr::{ExprScopes, PatId, ScopeId},
generics::GenericParams, generics::GenericParams,
impl_block::ImplBlock, impl_block::ImplBlock,
Adt, Const, Enum, EnumVariant, Function, MacroDef, ModuleDef, PerNs, Static, Struct, Trait, Adt, Const, DefWithBody, Enum, EnumVariant, Function, Local, MacroDef, ModuleDef, PerNs,
TypeAlias, Static, Struct, Trait, TypeAlias,
}; };
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
@ -34,6 +34,7 @@ pub(crate) struct ModuleItemMap {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) struct ExprScope { pub(crate) struct ExprScope {
owner: DefWithBody,
expr_scopes: Arc<ExprScopes>, expr_scopes: Arc<ExprScopes>,
scope_id: ScopeId, scope_id: ScopeId,
} }
@ -53,7 +54,7 @@ pub(crate) enum Scope {
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum TypeNs { pub(crate) enum TypeNs {
SelfType(ImplBlock), SelfType(ImplBlock),
GenericParam(u32), GenericParam(u32),
Adt(Adt), Adt(Adt),
@ -68,13 +69,13 @@ pub enum TypeNs {
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ResolveValueResult { pub(crate) enum ResolveValueResult {
ValueNs(ValueNs), ValueNs(ValueNs),
Partial(TypeNs, usize), Partial(TypeNs, usize),
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ValueNs { pub(crate) enum ValueNs {
LocalBinding(PatId), LocalBinding(PatId),
Function(Function), Function(Function),
Const(Const), Const(Const),
@ -399,10 +400,11 @@ impl Resolver {
pub(crate) fn push_expr_scope( pub(crate) fn push_expr_scope(
self, self,
owner: DefWithBody,
expr_scopes: Arc<ExprScopes>, expr_scopes: Arc<ExprScopes>,
scope_id: ScopeId, scope_id: ScopeId,
) -> Resolver { ) -> Resolver {
self.push_scope(Scope::ExprScope(ExprScope { expr_scopes, scope_id })) self.push_scope(Scope::ExprScope(ExprScope { owner, expr_scopes, scope_id }))
} }
} }
@ -413,7 +415,7 @@ pub enum ScopeDef {
GenericParam(u32), GenericParam(u32),
ImplSelfType(ImplBlock), ImplSelfType(ImplBlock),
AdtSelfType(Adt), AdtSelfType(Adt),
LocalBinding(PatId), Local(Local),
Unknown, Unknown,
} }
@ -467,9 +469,10 @@ impl Scope {
Scope::AdtScope(i) => { Scope::AdtScope(i) => {
f(name::SELF_TYPE, ScopeDef::AdtSelfType(*i)); f(name::SELF_TYPE, ScopeDef::AdtSelfType(*i));
} }
Scope::ExprScope(e) => { Scope::ExprScope(scope) => {
e.expr_scopes.entries(e.scope_id).iter().for_each(|e| { scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| {
f(e.name().clone(), ScopeDef::LocalBinding(e.pat())); let local = Local { parent: scope.owner, pat_id: e.pat() };
f(e.name().clone(), ScopeDef::Local(local));
}); });
} }
} }

View file

@ -19,7 +19,6 @@ use ra_syntax::{
SyntaxKind::*, SyntaxKind::*,
SyntaxNode, SyntaxNodePtr, TextRange, TextUnit, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit,
}; };
use rustc_hash::FxHashSet;
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase,
@ -195,14 +194,6 @@ impl SourceAnalyzer {
Some(self.infer.as_ref()?[pat_id].clone()) Some(self.infer.as_ref()?[pat_id].clone())
} }
pub fn type_of_pat_by_id(
&self,
_db: &impl HirDatabase,
pat_id: expr::PatId,
) -> Option<crate::Ty> {
Some(self.infer.as_ref()?[pat_id].clone())
}
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
let expr_id = self.expr_id(&call.clone().into())?; let expr_id = self.expr_id(&call.clone().into())?;
self.infer.as_ref()?.method_resolution(expr_id) self.infer.as_ref()?.method_resolution(expr_id)
@ -293,33 +284,25 @@ impl SourceAnalyzer {
self.resolve_hir_path(db, &hir_path) self.resolve_hir_path(db, &hir_path)
} }
pub fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option<ScopeEntryWithSyntax> { fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option<ScopeEntryWithSyntax> {
let mut shadowed = FxHashSet::default();
let name = name_ref.as_name(); let name = name_ref.as_name();
let source_map = self.body_source_map.as_ref()?; let source_map = self.body_source_map.as_ref()?;
let scopes = self.scopes.as_ref()?; let scopes = self.scopes.as_ref()?;
let scope = scope_for(scopes, source_map, self.file_id.into(), name_ref.syntax()); let scope = scope_for(scopes, source_map, self.file_id.into(), name_ref.syntax())?;
let ret = scopes let entry = scopes.resolve_name_in_scope(scope, &name)?;
.scope_chain(scope)
.flat_map(|scope| scopes.entries(scope).iter())
.filter(|entry| shadowed.insert(entry.name()))
.filter(|entry| entry.name() == &name)
.nth(0);
ret.and_then(|entry| {
Some(ScopeEntryWithSyntax { Some(ScopeEntryWithSyntax {
name: entry.name().clone(), name: entry.name().clone(),
ptr: source_map.pat_syntax(entry.pat())?.ast, ptr: source_map.pat_syntax(entry.pat())?.ast,
}) })
})
} }
pub fn process_all_names(&self, db: &impl HirDatabase, f: &mut dyn FnMut(Name, ScopeDef)) { pub fn process_all_names(&self, db: &impl HirDatabase, f: &mut dyn FnMut(Name, ScopeDef)) {
self.resolver.process_all_names(db, f) self.resolver.process_all_names(db, f)
} }
// FIXME: we only use this in `inline_local_variable` assist, ideally, we
// should switch to general reference search infra there.
pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> { pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
// FIXME: at least, this should work with any DefWithBody, but ideally
// this should be hir-based altogether
let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
let ptr = Either::A(AstPtr::new(&ast::Pat::from(pat.clone()))); let ptr = Either::A(AstPtr::new(&ast::Pat::from(pat.clone())));
fn_def fn_def
@ -421,11 +404,6 @@ impl SourceAnalyzer {
pub(crate) fn inference_result(&self) -> Arc<crate::ty::InferenceResult> { pub(crate) fn inference_result(&self) -> Arc<crate::ty::InferenceResult> {
self.infer.clone().unwrap() self.infer.clone().unwrap()
} }
#[cfg(test)]
pub(crate) fn scopes(&self) -> Arc<ExprScopes> {
self.scopes.clone().unwrap()
}
} }
fn scope_for( fn scope_for(

View file

@ -81,7 +81,7 @@ impl TestDB {
let crate_graph = self.crate_graph(); let crate_graph = self.crate_graph();
for krate in crate_graph.iter().next() { for krate in crate_graph.iter().next() {
let crate_def_map = self.crate_def_map(krate); let crate_def_map = self.crate_def_map(krate);
for (module_id, _) in crate_def_map.modules.iter() { for module_id in crate_def_map.modules() {
let module_id = ModuleId { krate, module_id }; let module_id = ModuleId { krate, module_id };
let module = crate::Module::from(module_id); let module = crate::Module::from(module_id);
module.diagnostics( module.diagnostics(

View file

@ -254,7 +254,6 @@ fn test(a: u32, b: isize, c: !, d: &str) {
1.0f32; 1.0f32;
}"#), }"#),
@r###" @r###"
[9; 10) 'a': u32 [9; 10) 'a': u32
[17; 18) 'b': isize [17; 18) 'b': isize
[27; 28) 'c': ! [27; 28) 'c': !
@ -317,7 +316,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[15; 20) '{ 1 }': u32 [15; 20) '{ 1 }': u32
[17; 18) '1': u32 [17; 18) '1': u32
[48; 53) '{ 1 }': u32 [48; 53) '{ 1 }': u32
@ -409,7 +407,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[72; 154) '{ ...a.c; }': () [72; 154) '{ ...a.c; }': ()
[82; 83) 'c': C [82; 83) 'c': C
[86; 87) 'C': C(usize) -> C [86; 87) 'C': C(usize) -> C
@ -443,7 +440,6 @@ fn test() {
E::V2; E::V2;
}"#), }"#),
@r###" @r###"
[48; 82) '{ E:...:V2; }': () [48; 82) '{ E:...:V2; }': ()
[52; 70) 'E::V1 ...d: 1 }': E [52; 70) 'E::V1 ...d: 1 }': E
[67; 68) '1': u32 [67; 68) '1': u32
@ -471,7 +467,6 @@ fn test(a: &u32, b: &mut u32, c: *const u32, d: *mut u32) {
} }
"#), "#),
@r###" @r###"
[9; 10) 'a': &u32 [9; 10) 'a': &u32
[18; 19) 'b': &mut u32 [18; 19) 'b': &mut u32
[31; 32) 'c': *const u32 [31; 32) 'c': *const u32
@ -524,7 +519,6 @@ fn test() {
} }
"##), "##),
@r###" @r###"
[11; 221) '{ ...o"#; }': () [11; 221) '{ ...o"#; }': ()
[17; 21) '5i32': i32 [17; 21) '5i32': i32
[27; 31) '5f32': f32 [27; 31) '5f32': f32
@ -568,7 +562,6 @@ fn test(x: SomeType) {
} }
"#), "#),
@r###" @r###"
[27; 28) 'x': SomeType [27; 28) 'x': SomeType
[40; 272) '{ ...lo"; }': () [40; 272) '{ ...lo"; }': ()
[50; 51) 'b': bool [50; 51) 'b': bool
@ -632,7 +625,6 @@ fn test() -> &mut &f64 {
} }
"#), "#),
@r###" @r###"
[14; 15) 'x': u32 [14; 15) 'x': u32
[22; 24) '{}': () [22; 24) '{}': ()
[78; 231) '{ ...t &c }': &mut &f64 [78; 231) '{ ...t &c }': &mut &f64
@ -679,7 +671,6 @@ impl S {
} }
"#), "#),
@r###" @r###"
[34; 38) 'self': &S [34; 38) 'self': &S
[40; 61) '{ ... }': () [40; 61) '{ ... }': ()
[50; 54) 'self': &S [50; 54) 'self': &S
@ -719,7 +710,6 @@ fn test() -> bool {
} }
"#), "#),
@r###" @r###"
[6; 7) 'x': bool [6; 7) 'x': bool
[22; 34) '{ 0i32 }': i32 [22; 34) '{ 0i32 }': i32
[28; 32) '0i32': i32 [28; 32) '0i32': i32
@ -802,7 +792,6 @@ fn test2(a1: *const A, a2: *mut A) {
} }
"#), "#),
@r###" @r###"
[44; 45) 'a': A [44; 45) 'a': A
[50; 213) '{ ...5.b; }': () [50; 213) '{ ...5.b; }': ()
[60; 62) 'a1': A [60; 62) 'a1': A
@ -983,7 +972,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[11; 37) '{ l... {}; }': () [11; 37) '{ l... {}; }': ()
[20; 21) 'x': () [20; 21) 'x': ()
[24; 34) 'if true {}': () [24; 34) 'if true {}': ()
@ -1105,7 +1093,6 @@ fn test(a: A) {
} }
"#), "#),
@r###" @r###"
[32; 36) 'self': A [32; 36) 'self': A
[38; 39) 'x': u32 [38; 39) 'x': u32
[53; 55) '{}': () [53; 55) '{}': ()
@ -1142,7 +1129,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[40; 44) 'self': &str [40; 44) 'self': &str
[53; 55) '{}': () [53; 55) '{}': ()
[69; 89) '{ ...o(); }': () [69; 89) '{ ...o(); }': ()
@ -1166,7 +1152,6 @@ fn test(x: &str, y: isize) {
} }
"#), "#),
@r###" @r###"
[9; 10) 'x': &str [9; 10) 'x': &str
[18; 19) 'y': isize [18; 19) 'y': isize
[28; 170) '{ ...d"); }': () [28; 170) '{ ...d"); }': ()
@ -1367,7 +1352,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[28; 79) '{ ...(1); }': () [28; 79) '{ ...(1); }': ()
[38; 42) 'A(n)': A<i32> [38; 42) 'A(n)': A<i32>
[40; 41) 'n': &i32 [40; 41) 'n': &i32
@ -1396,7 +1380,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[11; 57) '{ ...= v; }': () [11; 57) '{ ...= v; }': ()
[21; 22) 'v': &(i32, &i32) [21; 22) 'v': &(i32, &i32)
[25; 33) '&(1, &2)': &(i32, &i32) [25; 33) '&(1, &2)': &(i32, &i32)
@ -1441,7 +1424,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[68; 289) '{ ... d; }': () [68; 289) '{ ... d; }': ()
[78; 79) 'e': E [78; 79) 'e': E
[82; 95) 'E::A { x: 3 }': E [82; 95) 'E::A { x: 3 }': E
@ -1488,7 +1470,6 @@ fn test(a1: A<u32>, i: i32) {
} }
"#), "#),
@r###" @r###"
[36; 38) 'a1': A<u32> [36; 38) 'a1': A<u32>
[48; 49) 'i': i32 [48; 49) 'i': i32
[56; 147) '{ ...3.x; }': () [56; 147) '{ ...3.x; }': ()
@ -1569,7 +1550,6 @@ fn test(a1: A<u32>, o: Option<u64>) {
} }
"#), "#),
@r###" @r###"
[79; 81) 'a1': A<u32> [79; 81) 'a1': A<u32>
[91; 92) 'o': Option<u64> [91; 92) 'o': Option<u64>
[107; 244) '{ ... }; }': () [107; 244) '{ ... }; }': ()
@ -1604,7 +1584,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[10; 11) 't': T [10; 11) 't': T
[21; 26) '{ t }': T [21; 26) '{ t }': T
[23; 24) 't': T [23; 24) 't': T
@ -1652,7 +1631,6 @@ fn test() -> i128 {
} }
"#), "#),
@r###" @r###"
[74; 78) 'self': A<X, Y> [74; 78) 'self': A<X, Y>
[85; 107) '{ ... }': X [85; 107) '{ ... }': X
[95; 99) 'self': A<X, Y> [95; 99) 'self': A<X, Y>
@ -1706,7 +1684,6 @@ fn test(o: Option<u32>) {
} }
"#), "#),
@r###" @r###"
[78; 82) 'self': &Option<T> [78; 82) 'self': &Option<T>
[98; 100) '{}': () [98; 100) '{}': ()
[111; 112) 'o': Option<u32> [111; 112) 'o': Option<u32>
@ -1744,7 +1721,6 @@ fn test() -> i128 {
} }
"#), "#),
@r###" @r###"
[53; 57) 'self': A<T2> [53; 57) 'self': A<T2>
[65; 87) '{ ... }': T2 [65; 87) '{ ... }': T2
[75; 79) 'self': A<T2> [75; 79) 'self': A<T2>
@ -1921,7 +1897,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[56; 64) '{ A {} }': A [56; 64) '{ A {} }': A
[58; 62) 'A {}': A [58; 62) 'A {}': A
[126; 132) '{ 99 }': u32 [126; 132) '{ 99 }': u32
@ -1961,7 +1936,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[64; 67) 'val': T [64; 67) 'val': T
[82; 109) '{ ... }': Gen<T> [82; 109) '{ ... }': Gen<T>
[92; 103) 'Gen { val }': Gen<T> [92; 103) 'Gen { val }': Gen<T>
@ -2129,7 +2103,6 @@ fn test(x: X) {
} }
"#), "#),
@r###" @r###"
[20; 21) 'x': X [20; 21) 'x': X
[26; 47) '{ ...eld; }': () [26; 47) '{ ...eld; }': ()
[32; 33) 'x': X [32; 33) 'x': X
@ -2151,7 +2124,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[11; 89) '{ ... } }': () [11; 89) '{ ... } }': ()
[17; 21) 'X {}': {unknown} [17; 21) 'X {}': {unknown}
[27; 87) 'match ... }': () [27; 87) 'match ... }': ()
@ -2174,7 +2146,6 @@ fn quux() {
} }
"#), "#),
@r###" @r###"
[11; 41) '{ ...+ y; }': () [11; 41) '{ ...+ y; }': ()
[21; 22) 'y': i32 [21; 22) 'y': i32
[25; 27) '92': i32 [25; 27) '92': i32
@ -2300,7 +2271,6 @@ fn write() {
} }
"#), "#),
@r###" @r###"
[54; 139) '{ ... } }': () [54; 139) '{ ... } }': ()
[60; 137) 'match ... }': () [60; 137) 'match ... }': ()
[66; 83) 'someth...nknown': Maybe<{unknown}> [66; 83) 'someth...nknown': Maybe<{unknown}>
@ -2322,7 +2292,6 @@ fn test_line_buffer() {
} }
"#), "#),
@r###" @r###"
[23; 53) '{ ...n']; }': () [23; 53) '{ ...n']; }': ()
[29; 50) '&[0, b...b'\n']': &[u8;_] [29; 50) '&[0, b...b'\n']': &[u8;_]
[30; 50) '[0, b'...b'\n']': [u8;_] [30; 50) '[0, b'...b'\n']': [u8;_]
@ -2446,7 +2415,6 @@ fn test<R>(query_response: Canonical<QueryResponse<R>>) {
} }
"#), "#),
@r###" @r###"
[92; 106) 'query_response': Canonical<QueryResponse<R>> [92; 106) 'query_response': Canonical<QueryResponse<R>>
[137; 167) '{ ...lue; }': () [137; 167) '{ ...lue; }': ()
[143; 164) '&query....value': &QueryResponse<R> [143; 164) '&query....value': &QueryResponse<R>
@ -2472,7 +2440,6 @@ pub fn main_loop() {
} }
"#), "#),
@r###" @r###"
[144; 146) '{}': () [144; 146) '{}': ()
[169; 198) '{ ...t(); }': () [169; 198) '{ ...t(); }': ()
[175; 193) 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<T, H> [175; 193) 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<T, H>
@ -2518,7 +2485,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[49; 50) '0': u32 [49; 50) '0': u32
[80; 83) '101': u32 [80; 83) '101': u32
[95; 213) '{ ...NST; }': () [95; 213) '{ ...NST; }': ()
@ -2549,7 +2515,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[29; 32) '101': u32 [29; 32) '101': u32
[70; 73) '101': u32 [70; 73) '101': u32
[85; 280) '{ ...MUT; }': () [85; 280) '{ ...MUT; }': ()
@ -2588,7 +2553,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[31; 35) 'self': &Self [31; 35) 'self': &Self
[110; 114) 'self': &Self [110; 114) 'self': &Self
[170; 228) '{ ...i128 }': () [170; 228) '{ ...i128 }': ()
@ -2636,7 +2600,6 @@ mod bar_test {
} }
"#), "#),
@r###" @r###"
[63; 67) 'self': &Self [63; 67) 'self': &Self
[169; 173) 'self': &Self [169; 173) 'self': &Self
[300; 337) '{ ... }': () [300; 337) '{ ... }': ()
@ -2664,7 +2627,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[33; 37) 'self': &Self [33; 37) 'self': &Self
[92; 111) '{ ...d(); }': () [92; 111) '{ ...d(); }': ()
[98; 99) 'S': S [98; 99) 'S': S
@ -2694,7 +2656,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[43; 47) 'self': &Self [43; 47) 'self': &Self
[82; 86) 'self': &Self [82; 86) 'self': &Self
[210; 361) '{ ..., i8 }': () [210; 361) '{ ..., i8 }': ()
@ -2725,7 +2686,6 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[33; 37) 'self': &Self [33; 37) 'self': &Self
[102; 127) '{ ...d(); }': () [102; 127) '{ ...d(); }': ()
[108; 109) 'S': S<u32>(T) -> S<T> [108; 109) 'S': S<u32>(T) -> S<T>
@ -3130,7 +3090,6 @@ fn test<T: Iterable<Item=u32>>() {
} }
"#), "#),
@r###" @r###"
[67; 100) '{ ...own; }': () [67; 100) '{ ...own; }': ()
[77; 78) 'y': {unknown} [77; 78) 'y': {unknown}
[90; 97) 'unknown': {unknown} [90; 97) 'unknown': {unknown}
@ -3146,7 +3105,6 @@ const A: u32 = 1 + 1;
static B: u64 = { let x = 1; x }; static B: u64 = { let x = 1; x };
"#), "#),
@r###" @r###"
[16; 17) '1': u32 [16; 17) '1': u32
[16; 21) '1 + 1': u32 [16; 21) '1 + 1': u32
[20; 21) '1': u32 [20; 21) '1': u32
@ -3170,7 +3128,6 @@ fn test() -> u64 {
} }
"#), "#),
@r###" @r###"
[38; 87) '{ ... a.1 }': u64 [38; 87) '{ ... a.1 }': u64
[48; 49) 'a': S [48; 49) 'a': S
[52; 53) 'S': S(i32, u64) -> S [52; 53) 'S': S(i32, u64) -> S
@ -3225,7 +3182,6 @@ fn indexing_arrays() {
assert_snapshot!( assert_snapshot!(
infer("fn main() { &mut [9][2]; }"), infer("fn main() { &mut [9][2]; }"),
@r###" @r###"
[10; 26) '{ &mut...[2]; }': () [10; 26) '{ &mut...[2]; }': ()
[12; 23) '&mut [9][2]': &mut {unknown} [12; 23) '&mut [9][2]': &mut {unknown}
[17; 20) '[9]': [i32;_] [17; 20) '[9]': [i32;_]

View file

@ -67,6 +67,11 @@ impl ExprScopes {
std::iter::successors(scope, move |&scope| self.scopes[scope].parent) std::iter::successors(scope, move |&scope| self.scopes[scope].parent)
} }
pub fn resolve_name_in_scope(&self, scope: ScopeId, name: &Name) -> Option<&ScopeEntry> {
self.scope_chain(Some(scope))
.find_map(|scope| self.entries(scope).iter().find(|it| it.name == *name))
}
pub fn scope_for(&self, expr: ExprId) -> Option<ScopeId> { pub fn scope_for(&self, expr: ExprId) -> Option<ScopeId> {
self.scope_by_expr.get(&expr).copied() self.scope_by_expr.get(&expr).copied()
} }
@ -163,3 +168,217 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
e => e.walk_child_exprs(|e| compute_expr_scopes(e, body, scopes, scope)), e => e.walk_child_exprs(|e| compute_expr_scopes(e, body, scopes, scope)),
}; };
} }
#[cfg(test)]
mod tests {
use hir_expand::{name::AsName, Source};
use ra_db::{fixture::WithFixture, FileId, SourceDatabase};
use ra_syntax::{algo::find_node_at_offset, ast, AstNode};
use test_utils::{assert_eq_text, extract_offset};
use crate::{db::DefDatabase2, test_db::TestDB, FunctionId, ModuleDefId};
fn find_function(db: &TestDB, file_id: FileId) -> FunctionId {
let krate = db.test_crate();
let crate_def_map = db.crate_def_map(krate);
let module = crate_def_map.modules_for_file(file_id).next().unwrap();
let (_, res) = crate_def_map[module].scope.entries().next().unwrap();
match res.def.take_values().unwrap() {
ModuleDefId::FunctionId(it) => it,
_ => panic!(),
}
}
fn do_check(code: &str, expected: &[&str]) {
let (off, code) = extract_offset(code);
let code = {
let mut buf = String::new();
let off = u32::from(off) as usize;
buf.push_str(&code[..off]);
buf.push_str("marker");
buf.push_str(&code[off..]);
buf
};
let (db, file_id) = TestDB::with_single_file(&code);
let file_syntax = db.parse(file_id).syntax_node();
let marker: ast::PathExpr = find_node_at_offset(&file_syntax, off).unwrap();
let function = find_function(&db, file_id);
let scopes = db.expr_scopes(function.into());
let (_body, source_map) = db.body_with_source_map(function.into());
let expr_id =
source_map.node_expr(Source { file_id: file_id.into(), ast: &marker.into() }).unwrap();
let scope = scopes.scope_for(expr_id);
let actual = scopes
.scope_chain(scope)
.flat_map(|scope| scopes.entries(scope))
.map(|it| it.name().to_string())
.collect::<Vec<_>>()
.join("\n");
let expected = expected.join("\n");
assert_eq_text!(&expected, &actual);
}
#[test]
fn test_lambda_scope() {
do_check(
r"
fn quux(foo: i32) {
let f = |bar, baz: i32| {
<|>
};
}",
&["bar", "baz", "foo"],
);
}
#[test]
fn test_call_scope() {
do_check(
r"
fn quux() {
f(|x| <|> );
}",
&["x"],
);
}
#[test]
fn test_method_call_scope() {
do_check(
r"
fn quux() {
z.f(|x| <|> );
}",
&["x"],
);
}
#[test]
fn test_loop_scope() {
do_check(
r"
fn quux() {
loop {
let x = ();
<|>
};
}",
&["x"],
);
}
#[test]
fn test_match() {
do_check(
r"
fn quux() {
match () {
Some(x) => {
<|>
}
};
}",
&["x"],
);
}
#[test]
fn test_shadow_variable() {
do_check(
r"
fn foo(x: String) {
let x : &str = &x<|>;
}",
&["x"],
);
}
fn do_check_local_name(code: &str, expected_offset: u32) {
let (off, code) = extract_offset(code);
let (db, file_id) = TestDB::with_single_file(&code);
let file = db.parse(file_id).ok().unwrap();
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
.expect("failed to find a name at the target offset");
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
let function = find_function(&db, file_id);
let scopes = db.expr_scopes(function.into());
let (_body, source_map) = db.body_with_source_map(function.into());
let expr_scope = {
let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap();
let expr_id =
source_map.node_expr(Source { file_id: file_id.into(), ast: &expr_ast }).unwrap();
scopes.scope_for(expr_id).unwrap()
};
let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap();
let pat_src = source_map.pat_syntax(resolved.pat()).unwrap();
let local_name = pat_src.ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
assert_eq!(local_name.range(), expected_name.syntax().text_range());
}
#[test]
fn test_resolve_local_name() {
do_check_local_name(
r#"
fn foo(x: i32, y: u32) {
{
let z = x * 2;
}
{
let t = x<|> * 3;
}
}"#,
21,
);
}
#[test]
fn test_resolve_local_name_declaration() {
do_check_local_name(
r#"
fn foo(x: String) {
let x : &str = &x<|>;
}"#,
21,
);
}
#[test]
fn test_resolve_local_name_shadow() {
do_check_local_name(
r"
fn foo(x: String) {
let x : &str = &x;
x<|>
}
",
53,
);
}
#[test]
fn ref_patterns_contribute_bindings() {
do_check_local_name(
r"
fn foo() {
if let Some(&from) = bar() {
from<|>;
}
}
",
53,
);
}
}

View file

@ -58,7 +58,7 @@ mod tests;
use std::sync::Arc; use std::sync::Arc;
use hir_expand::{diagnostics::DiagnosticSink, name::Name, MacroDefId}; use hir_expand::{ast_id_map::FileAstId, diagnostics::DiagnosticSink, name::Name, MacroDefId};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use ra_arena::Arena; use ra_arena::Arena;
use ra_db::{CrateId, Edition, FileId}; use ra_db::{CrateId, Edition, FileId};
@ -73,7 +73,7 @@ use crate::{
diagnostics::DefDiagnostic, path_resolution::ResolveMode, per_ns::PerNs, raw::ImportId, diagnostics::DefDiagnostic, path_resolution::ResolveMode, per_ns::PerNs, raw::ImportId,
}, },
path::Path, path::Path,
AstId, CrateModuleId, ModuleDefId, ModuleId, TraitId, AstId, CrateModuleId, FunctionId, ModuleDefId, ModuleId, TraitId,
}; };
/// Contains all top-level defs from a macro-expanded crate /// Contains all top-level defs from a macro-expanded crate
@ -87,7 +87,7 @@ pub struct CrateDefMap {
prelude: Option<ModuleId>, prelude: Option<ModuleId>,
extern_prelude: FxHashMap<Name, ModuleDefId>, extern_prelude: FxHashMap<Name, ModuleDefId>,
root: CrateModuleId, root: CrateModuleId,
pub modules: Arena<CrateModuleId, ModuleData>, modules: Arena<CrateModuleId, ModuleData>,
/// Some macros are not well-behavior, which leads to infinite loop /// Some macros are not well-behavior, which leads to infinite loop
/// e.g. macro_rules! foo { ($ty:ty) => { foo!($ty); } } /// e.g. macro_rules! foo { ($ty:ty) => { foo!($ty); } }
@ -124,6 +124,11 @@ pub struct ModuleData {
pub definition: Option<FileId>, pub definition: Option<FileId>,
} }
#[derive(Default, Debug, PartialEq, Eq, Clone)]
pub(crate) struct Declarations {
fns: FxHashMap<FileAstId<ast::FnDef>, FunctionId>,
}
#[derive(Debug, Default, PartialEq, Eq, Clone)] #[derive(Debug, Default, PartialEq, Eq, Clone)]
pub struct ModuleScope { pub struct ModuleScope {
pub items: FxHashMap<Name, Resolution>, pub items: FxHashMap<Name, Resolution>,
@ -258,6 +263,17 @@ impl CrateDefMap {
let res = self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path); let res = self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path);
(res.resolved_def, res.segment_index) (res.resolved_def, res.segment_index)
} }
pub fn modules(&self) -> impl Iterator<Item = CrateModuleId> + '_ {
self.modules.iter().map(|(id, _data)| id)
}
pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = CrateModuleId> + '_ {
self.modules
.iter()
.filter(move |(_id, data)| data.definition == Some(file_id))
.map(|(id, _data)| id)
}
} }
mod diagnostics { mod diagnostics {

View file

@ -664,7 +664,8 @@ where
let name = def.name.clone(); let name = def.name.clone();
let def: PerNs = match def.kind { let def: PerNs = match def.kind {
raw::DefKind::Function(ast_id) => { raw::DefKind::Function(ast_id) => {
PerNs::values(FunctionId::from_ast_id(ctx, ast_id).into()) let f = FunctionId::from_ast_id(ctx, ast_id);
PerNs::values(f.into())
} }
raw::DefKind::Struct(ast_id) => { raw::DefKind::Struct(ast_id) => {
let id = StructOrUnionId::from_ast_id(ctx, ast_id).into(); let id = StructOrUnionId::from_ast_id(ctx, ast_id).into();
@ -798,7 +799,7 @@ mod tests {
fn do_limited_resolve(code: &str, limit: u32, poison_limit: u32) -> CrateDefMap { fn do_limited_resolve(code: &str, limit: u32, poison_limit: u32) -> CrateDefMap {
let (db, _file_id) = TestDB::with_single_file(&code); let (db, _file_id) = TestDB::with_single_file(&code);
let krate = db.crate_graph().iter().next().unwrap(); let krate = db.test_crate();
let def_map = { let def_map = {
let edition = db.crate_graph().edition(krate); let edition = db.crate_graph().edition(krate);

View file

@ -1,12 +1,12 @@
use std::sync::Arc; use std::sync::Arc;
use ra_db::{SourceDatabase, SourceDatabaseExt}; use ra_db::SourceDatabaseExt;
use super::*; use super::*;
fn check_def_map_is_not_recomputed(initial: &str, file_change: &str) { fn check_def_map_is_not_recomputed(initial: &str, file_change: &str) {
let (mut db, pos) = TestDB::with_position(initial); let (mut db, pos) = TestDB::with_position(initial);
let krate = db.crate_graph().iter().next().unwrap(); let krate = db.test_crate();
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
db.crate_def_map(krate); db.crate_def_map(krate);
@ -111,7 +111,7 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() {
m!(X); m!(X);
", ",
); );
let krate = db.crate_graph().iter().next().unwrap(); let krate = db.test_crate();
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
let crate_def_map = db.crate_def_map(krate); let crate_def_map = db.crate_def_map(krate);

View file

@ -656,7 +656,7 @@ fn unresolved_module_diagnostics() {
//- /foo.rs //- /foo.rs
", ",
); );
let krate = db.crate_graph().iter().next().unwrap(); let krate = db.test_crate();
let crate_def_map = db.crate_def_map(krate); let crate_def_map = db.crate_def_map(krate);

View file

@ -241,10 +241,8 @@ mod tests {
// } // }
let struct_name = mk_ident("Foo"); let struct_name = mk_ident("Foo");
let fields = [mk_ident("name"), mk_ident("id")]; let fields = [mk_ident("name"), mk_ident("id")];
let fields = fields let fields =
.into_iter() fields.iter().map(|it| quote!(#it: self.#it.clone(), ).token_trees.clone()).flatten();
.map(|it| quote!(#it: self.#it.clone(), ).token_trees.clone())
.flatten();
let list = tt::Subtree { delimiter: tt::Delimiter::Brace, token_trees: fields.collect() }; let list = tt::Subtree { delimiter: tt::Delimiter::Brace, token_trees: fields.collect() };

View file

@ -88,16 +88,16 @@ mod tests {
", ",
), ),
@r###" @r###"
[ [
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [94; 94), source_range: [94; 94),
delete: [94; 94), delete: [94; 94),
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
}, },
] ]
"### "###
); );
} }
@ -349,24 +349,24 @@ mod tests {
", ",
), ),
@r###" @r###"
[ [
CompletionItem { CompletionItem {
label: "0", label: "0",
source_range: [75; 75), source_range: [75; 75),
delete: [75; 75), delete: [75; 75),
insert: "0", insert: "0",
kind: Field, kind: Field,
detail: "i32", detail: "i32",
}, },
CompletionItem { CompletionItem {
label: "1", label: "1",
source_range: [75; 75), source_range: [75; 75),
delete: [75; 75), delete: [75; 75),
insert: "1", insert: "1",
kind: Field, kind: Field,
detail: "f64", detail: "f64",
}, },
] ]
"### "###
); );
} }
@ -419,16 +419,16 @@ mod tests {
", ",
), ),
@r###" @r###"
[ [
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [106; 106), source_range: [106; 106),
delete: [106; 106), delete: [106; 106),
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
}, },
] ]
"### "###
); );
} }
@ -452,15 +452,15 @@ mod tests {
} }
"###, CompletionKind::Keyword), "###, CompletionKind::Keyword),
@r###" @r###"
[ [
CompletionItem { CompletionItem {
label: "await", label: "await",
source_range: [74; 74), source_range: [74; 74),
delete: [74; 74), delete: [74; 74),
insert: "await", insert: "await",
detail: "expr.await", detail: "expr.await",
}, },
] ]
"### "###
) )
} }

View file

@ -70,15 +70,15 @@ mod tests {
", ",
), ),
@r###" @r###"
[ [
CompletionItem { CompletionItem {
label: "file_id: FileId", label: "file_id: FileId",
source_range: [110; 114), source_range: [110; 114),
delete: [110; 114), delete: [110; 114),
insert: "file_id: FileId", insert: "file_id: FileId",
lookup: "file_id", lookup: "file_id",
}, },
] ]
"### "###
); );
} }
@ -94,15 +94,15 @@ mod tests {
", ",
), ),
@r###" @r###"
[ [
CompletionItem { CompletionItem {
label: "file_id: FileId", label: "file_id: FileId",
source_range: [110; 114), source_range: [110; 114),
delete: [110; 114), delete: [110; 114),
insert: "file_id: FileId", insert: "file_id: FileId",
lookup: "file_id", lookup: "file_id",
}, },
] ]
"### "###
); );
} }
@ -121,15 +121,15 @@ mod tests {
", ",
), ),
@r###" @r###"
[ [
CompletionItem { CompletionItem {
label: "file_id: FileId", label: "file_id: FileId",
source_range: [289; 293), source_range: [289; 293),
delete: [289; 293), delete: [289; 293),
insert: "file_id: FileId", insert: "file_id: FileId",
lookup: "file_id", lookup: "file_id",
}, },
] ]
"### "###
); );
} }

View file

@ -131,7 +131,8 @@ mod tests {
use <|> use <|>
", ",
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "crate", label: "crate",
source_range: [21; 21), source_range: [21; 21),
@ -153,7 +154,8 @@ mod tests {
insert: "super::", insert: "super::",
kind: Keyword, kind: Keyword,
}, },
]"### ]
"###
); );
assert_debug_snapshot!( assert_debug_snapshot!(
@ -162,7 +164,8 @@ mod tests {
use a::<|> use a::<|>
", ",
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "self", label: "self",
source_range: [24; 24), source_range: [24; 24),
@ -177,7 +180,8 @@ mod tests {
insert: "super::", insert: "super::",
kind: Keyword, kind: Keyword,
}, },
]"### ]
"###
); );
assert_debug_snapshot!( assert_debug_snapshot!(
@ -186,7 +190,8 @@ mod tests {
use a::{b, <|>} use a::{b, <|>}
", ",
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "self", label: "self",
source_range: [28; 28), source_range: [28; 28),
@ -201,7 +206,8 @@ mod tests {
insert: "super::", insert: "super::",
kind: Keyword, kind: Keyword,
}, },
]"### ]
"###
); );
} }
@ -215,7 +221,8 @@ mod tests {
} }
", ",
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [49; 49), source_range: [49; 49),
@ -251,7 +258,8 @@ mod tests {
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
]"### ]
"###
); );
} }
@ -267,7 +275,8 @@ mod tests {
} }
", ",
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "else", label: "else",
source_range: [108; 108), source_range: [108; 108),
@ -317,7 +326,8 @@ mod tests {
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
]"### ]
"###
); );
} }
@ -332,7 +342,8 @@ mod tests {
} }
", ",
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [56; 56), source_range: [56; 56),
@ -368,7 +379,8 @@ mod tests {
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
]"### ]
"###
); );
assert_debug_snapshot!( assert_debug_snapshot!(
do_keyword_completion( do_keyword_completion(
@ -379,7 +391,8 @@ mod tests {
} }
", ",
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [49; 49), source_range: [49; 49),
@ -415,7 +428,8 @@ mod tests {
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
]"### ]
"###
); );
} }
@ -431,7 +445,8 @@ mod tests {
} }
", ",
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [97; 97), source_range: [97; 97),
@ -467,7 +482,8 @@ mod tests {
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
]"### ]
"###
); );
} }
@ -483,7 +499,8 @@ mod tests {
} }
", ",
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [95; 95), source_range: [95; 95),
@ -519,7 +536,8 @@ mod tests {
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
]"### ]
"###
); );
assert_debug_snapshot!( assert_debug_snapshot!(
do_keyword_completion( do_keyword_completion(
@ -533,7 +551,8 @@ mod tests {
} }
", ",
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [95; 95), source_range: [95; 95),
@ -569,7 +588,8 @@ mod tests {
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
]"### ]
"###
); );
} }
@ -583,7 +603,8 @@ mod tests {
} }
", ",
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "break", label: "break",
source_range: [63; 63), source_range: [63; 63),
@ -633,7 +654,8 @@ mod tests {
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
]"### ]
"###
); );
// No completion: lambda isolates control flow // No completion: lambda isolates control flow
@ -645,7 +667,8 @@ mod tests {
} }
", ",
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [68; 68), source_range: [68; 68),
@ -681,7 +704,8 @@ mod tests {
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
]"### ]
"###
); );
} }
@ -699,7 +723,8 @@ mod tests {
} }
", ",
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "break", label: "break",
source_range: [122; 124), source_range: [122; 124),
@ -749,7 +774,8 @@ mod tests {
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
]"### ]
"###
) )
} }
} }

View file

@ -37,7 +37,8 @@ mod tests {
<|> <|>
" "
), ),
@r##"[ @r###"
[
CompletionItem { CompletionItem {
label: "foo!", label: "foo!",
source_range: [46; 46), source_range: [46; 46),
@ -46,7 +47,8 @@ mod tests {
kind: Macro, kind: Macro,
detail: "macro_rules! foo", detail: "macro_rules! foo",
}, },
]"## ]
"###
); );
} }
@ -75,7 +77,8 @@ mod tests {
<|> <|>
" "
), ),
@r##"[ @r###"
[
CompletionItem { CompletionItem {
label: "vec!", label: "vec!",
source_range: [280; 280), source_range: [280; 280),
@ -87,7 +90,8 @@ mod tests {
"Creates a [`Vec`] containing the arguments.\n\n- Create a [`Vec`] containing a given list of elements:\n\n```\nlet v = vec![1, 2, 3];\nassert_eq!(v[0], 1);\nassert_eq!(v[1], 2);\nassert_eq!(v[2], 3);\n```", "Creates a [`Vec`] containing the arguments.\n\n- Create a [`Vec`] containing a given list of elements:\n\n```\nlet v = vec![1, 2, 3];\nassert_eq!(v[0], 1);\nassert_eq!(v[1], 2);\nassert_eq!(v[2], 3);\n```",
), ),
}, },
]"## ]
"###
); );
} }
@ -110,7 +114,8 @@ mod tests {
} }
" "
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "foo!", label: "foo!",
source_range: [163; 163), source_range: [163; 163),

View file

@ -152,7 +152,8 @@ mod tests {
} }
" "
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "my", label: "my",
source_range: [27; 29), source_range: [27; 29),
@ -163,7 +164,8 @@ mod tests {
"Some simple\ndocs describing `mod my`.", "Some simple\ndocs describing `mod my`.",
), ),
}, },
]"### ]
"###
); );
} }
@ -179,7 +181,8 @@ mod tests {
} }
" "
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "Bar", label: "Bar",
source_range: [30; 30), source_range: [30; 30),
@ -187,7 +190,8 @@ mod tests {
insert: "Bar", insert: "Bar",
kind: Struct, kind: Struct,
}, },
]"### ]
"###
); );
} }
@ -203,7 +207,8 @@ mod tests {
use crate::Sp<|> use crate::Sp<|>
" "
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "Spam", label: "Spam",
source_range: [11; 13), source_range: [11; 13),
@ -218,7 +223,8 @@ mod tests {
insert: "foo", insert: "foo",
kind: Module, kind: Module,
}, },
]"### ]
"###
); );
} }
@ -234,7 +240,8 @@ mod tests {
use crate::{Sp<|>}; use crate::{Sp<|>};
" "
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "Spam", label: "Spam",
source_range: [12; 14), source_range: [12; 14),
@ -249,7 +256,8 @@ mod tests {
insert: "foo", insert: "foo",
kind: Module, kind: Module,
}, },
]"### ]
"###
); );
} }
@ -269,7 +277,8 @@ mod tests {
use crate::{bar::{baz::Sp<|>}}; use crate::{bar::{baz::Sp<|>}};
" "
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "Spam", label: "Spam",
source_range: [23; 25), source_range: [23; 25),
@ -277,7 +286,8 @@ mod tests {
insert: "Spam", insert: "Spam",
kind: Struct, kind: Struct,
}, },
]"### ]
"###
); );
} }
@ -297,7 +307,8 @@ mod tests {
fn foo() { let _ = E::<|> } fn foo() { let _ = E::<|> }
" "
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "Bar", label: "Bar",
source_range: [116; 116), source_range: [116; 116),
@ -320,7 +331,8 @@ mod tests {
"Foo Variant", "Foo Variant",
), ),
}, },
]"### ]
"###
); );
} }
@ -343,7 +355,8 @@ mod tests {
fn foo() { let _ = E::<|> } fn foo() { let _ = E::<|> }
" "
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "Bar", label: "Bar",
source_range: [180; 180), source_range: [180; 180),
@ -377,7 +390,8 @@ mod tests {
"", "",
), ),
}, },
]"### ]
"###
); );
} }
@ -434,7 +448,8 @@ mod tests {
fn foo() { let _ = S::<|> } fn foo() { let _ = S::<|> }
" "
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "C", label: "C",
source_range: [107; 107), source_range: [107; 107),
@ -446,7 +461,8 @@ mod tests {
"An associated const", "An associated const",
), ),
}, },
]"### ]
"###
); );
} }
@ -467,7 +483,8 @@ mod tests {
fn foo() { let _ = S::<|> } fn foo() { let _ = S::<|> }
" "
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "T", label: "T",
source_range: [101; 101), source_range: [101; 101),
@ -479,7 +496,8 @@ mod tests {
"An associated type", "An associated type",
), ),
}, },
]"### ]
"###
); );
} }
@ -569,7 +587,8 @@ mod tests {
} }
" "
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "bar", label: "bar",
source_range: [9; 9), source_range: [9; 9),
@ -577,7 +596,8 @@ mod tests {
insert: "bar", insert: "bar",
kind: Module, kind: Module,
}, },
]"### ]
"###
); );
} }

View file

@ -54,36 +54,36 @@ mod tests {
", ",
); );
assert_debug_snapshot!(completions, @r###" assert_debug_snapshot!(completions, @r###"
[ [
CompletionItem { CompletionItem {
label: "E", label: "E",
source_range: [246; 246), source_range: [246; 246),
delete: [246; 246), delete: [246; 246),
insert: "E", insert: "E",
kind: Enum, kind: Enum,
}, },
CompletionItem { CompletionItem {
label: "X", label: "X",
source_range: [246; 246), source_range: [246; 246),
delete: [246; 246), delete: [246; 246),
insert: "X", insert: "X",
kind: EnumVariant, kind: EnumVariant,
}, },
CompletionItem { CompletionItem {
label: "Z", label: "Z",
source_range: [246; 246), source_range: [246; 246),
delete: [246; 246), delete: [246; 246),
insert: "Z", insert: "Z",
kind: Const, kind: Const,
}, },
CompletionItem { CompletionItem {
label: "m", label: "m",
source_range: [246; 246), source_range: [246; 246),
delete: [246; 246), delete: [246; 246),
insert: "m", insert: "m",
kind: Module, kind: Module,
}, },
] ]
"###); "###);
} }
} }

View file

@ -104,7 +104,8 @@ mod tests {
} }
"#, "#,
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "box", label: "box",
source_range: [89; 89), source_range: [89; 89),
@ -161,7 +162,8 @@ mod tests {
insert: "while bar {\n$0\n}", insert: "while bar {\n$0\n}",
detail: "while expr {}", detail: "while expr {}",
}, },
]"### ]
"###
); );
} }
@ -176,7 +178,8 @@ mod tests {
} }
"#, "#,
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "box", label: "box",
source_range: [91; 91), source_range: [91; 91),
@ -219,7 +222,8 @@ mod tests {
insert: "&mut bar", insert: "&mut bar",
detail: "&mut expr", detail: "&mut expr",
}, },
]"### ]
"###
); );
} }
@ -233,7 +237,8 @@ mod tests {
} }
"#, "#,
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "box", label: "box",
source_range: [52; 52), source_range: [52; 52),
@ -276,7 +281,8 @@ mod tests {
insert: "&mut 42", insert: "&mut 42",
detail: "&mut expr", detail: "&mut expr",
}, },
]"### ]
"###
); );
} }
} }

View file

@ -45,17 +45,17 @@ mod tests {
", ",
); );
assert_debug_snapshot!(completions, @r###" assert_debug_snapshot!(completions, @r###"
[ [
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [142; 145), source_range: [142; 145),
delete: [142; 145), delete: [142; 145),
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
deprecated: true, deprecated: true,
}, },
] ]
"###); "###);
} }
@ -70,16 +70,16 @@ mod tests {
", ",
); );
assert_debug_snapshot!(completions, @r###" assert_debug_snapshot!(completions, @r###"
[ [
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [83; 86), source_range: [83; 86),
delete: [83; 86), delete: [83; 86),
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
}, },
] ]
"###); "###);
} }
@ -96,16 +96,16 @@ mod tests {
", ",
); );
assert_debug_snapshot!(completions, @r###" assert_debug_snapshot!(completions, @r###"
[ [
CompletionItem { CompletionItem {
label: "a", label: "a",
source_range: [119; 119), source_range: [119; 119),
delete: [119; 119), delete: [119; 119),
insert: "a", insert: "a",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
}, },
] ]
"###); "###);
} }
@ -122,16 +122,16 @@ mod tests {
", ",
); );
assert_debug_snapshot!(completions, @r###" assert_debug_snapshot!(completions, @r###"
[ [
CompletionItem { CompletionItem {
label: "b", label: "b",
source_range: [119; 119), source_range: [119; 119),
delete: [119; 119), delete: [119; 119),
insert: "b", insert: "b",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
}, },
] ]
"###); "###);
} }
@ -147,16 +147,16 @@ mod tests {
", ",
); );
assert_debug_snapshot!(completions, @r###" assert_debug_snapshot!(completions, @r###"
[ [
CompletionItem { CompletionItem {
label: "a", label: "a",
source_range: [93; 93), source_range: [93; 93),
delete: [93; 93), delete: [93; 93),
insert: "a", insert: "a",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
}, },
] ]
"###); "###);
} }
} }

View file

@ -44,16 +44,16 @@ mod tests {
", ",
); );
assert_debug_snapshot!(completions, @r###" assert_debug_snapshot!(completions, @r###"
[ [
CompletionItem { CompletionItem {
label: "foo", label: "foo",
source_range: [117; 118), source_range: [117; 118),
delete: [117; 118), delete: [117; 118),
insert: "foo", insert: "foo",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
}, },
] ]
"###); "###);
} }
@ -73,24 +73,24 @@ mod tests {
", ",
); );
assert_debug_snapshot!(completions, @r###" assert_debug_snapshot!(completions, @r###"
[ [
CompletionItem { CompletionItem {
label: "bar", label: "bar",
source_range: [161; 161), source_range: [161; 161),
delete: [161; 161), delete: [161; 161),
insert: "bar", insert: "bar",
kind: Field, kind: Field,
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
label: "foo", label: "foo",
source_range: [161; 161), source_range: [161; 161),
delete: [161; 161), delete: [161; 161),
insert: "foo", insert: "foo",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
}, },
] ]
"###); "###);
} }
} }

View file

@ -409,7 +409,8 @@ mod tests {
// nothing here // nothing here
" "
), ),
@r#"[ @r###"
[
CompletionItem { CompletionItem {
label: "other_crate", label: "other_crate",
source_range: [4; 4), source_range: [4; 4),
@ -417,7 +418,8 @@ mod tests {
insert: "other_crate", insert: "other_crate",
kind: Module, kind: Module,
}, },
]"# ]
"###
); );
} }
@ -530,7 +532,8 @@ mod tests {
fn completes_self_in_methods() { fn completes_self_in_methods() {
assert_debug_snapshot!( assert_debug_snapshot!(
do_reference_completion(r"impl S { fn foo(&self) { <|> } }"), do_reference_completion(r"impl S { fn foo(&self) { <|> } }"),
@r#"[ @r###"
[
CompletionItem { CompletionItem {
label: "Self", label: "Self",
source_range: [25; 25), source_range: [25; 25),
@ -546,7 +549,8 @@ mod tests {
kind: Binding, kind: Binding,
detail: "&{unknown}", detail: "&{unknown}",
}, },
]"# ]
"###
); );
} }

View file

@ -52,7 +52,8 @@ mod tests {
fn completes_snippets_in_expressions() { fn completes_snippets_in_expressions() {
assert_debug_snapshot!( assert_debug_snapshot!(
do_snippet_completion(r"fn foo(x: i32) { <|> }"), do_snippet_completion(r"fn foo(x: i32) { <|> }"),
@r#"[ @r###"
[
CompletionItem { CompletionItem {
label: "pd", label: "pd",
source_range: [17; 17), source_range: [17; 17),
@ -67,7 +68,8 @@ mod tests {
insert: "eprintln!(\"$0 = {:#?}\", $0);", insert: "eprintln!(\"$0 = {:#?}\", $0);",
kind: Snippet, kind: Snippet,
}, },
]"# ]
"###
); );
} }
@ -75,11 +77,11 @@ mod tests {
fn should_not_complete_snippets_in_path() { fn should_not_complete_snippets_in_path() {
assert_debug_snapshot!( assert_debug_snapshot!(
do_snippet_completion(r"fn foo(x: i32) { ::foo<|> }"), do_snippet_completion(r"fn foo(x: i32) { ::foo<|> }"),
@r#"[]"# @"[]"
); );
assert_debug_snapshot!( assert_debug_snapshot!(
do_snippet_completion(r"fn foo(x: i32) { ::<|> }"), do_snippet_completion(r"fn foo(x: i32) { ::<|> }"),
@r#"[]"# @"[]"
); );
} }
@ -94,7 +96,8 @@ mod tests {
} }
" "
), ),
@r###"[ @r###"
[
CompletionItem { CompletionItem {
label: "Test function", label: "Test function",
source_range: [78; 78), source_range: [78; 78),
@ -110,7 +113,8 @@ mod tests {
insert: "pub(crate) $0", insert: "pub(crate) $0",
kind: Snippet, kind: Snippet,
}, },
]"### ]
"###
); );
} }
} }

View file

@ -68,7 +68,7 @@ impl Completions {
ScopeDef::ModuleDef(TypeAlias(..)) => CompletionItemKind::TypeAlias, ScopeDef::ModuleDef(TypeAlias(..)) => CompletionItemKind::TypeAlias,
ScopeDef::ModuleDef(BuiltinType(..)) => CompletionItemKind::BuiltinType, ScopeDef::ModuleDef(BuiltinType(..)) => CompletionItemKind::BuiltinType,
ScopeDef::GenericParam(..) => CompletionItemKind::TypeParam, ScopeDef::GenericParam(..) => CompletionItemKind::TypeParam,
ScopeDef::LocalBinding(..) => CompletionItemKind::Binding, ScopeDef::Local(..) => CompletionItemKind::Binding,
// (does this need its own kind?) // (does this need its own kind?)
ScopeDef::AdtSelfType(..) | ScopeDef::ImplSelfType(..) => CompletionItemKind::TypeParam, ScopeDef::AdtSelfType(..) | ScopeDef::ImplSelfType(..) => CompletionItemKind::TypeParam,
ScopeDef::MacroDef(mac) => { ScopeDef::MacroDef(mac) => {
@ -96,13 +96,11 @@ impl Completions {
let mut completion_item = let mut completion_item =
CompletionItem::new(completion_kind, ctx.source_range(), local_name.clone()); CompletionItem::new(completion_kind, ctx.source_range(), local_name.clone());
if let ScopeDef::LocalBinding(pat_id) = resolution { if let ScopeDef::Local(local) = resolution {
let ty = ctx let ty = local.ty(ctx.db);
.analyzer if ty != Ty::Unknown {
.type_of_pat_by_id(ctx.db, pat_id.clone()) completion_item = completion_item.detail(ty.display(ctx.db).to_string());
.filter(|t| t != &Ty::Unknown) }
.map(|t| t.display(ctx.db).to_string());
completion_item = completion_item.set_detail(ty);
}; };
// If not an import, add parenthesis automatically. // If not an import, add parenthesis automatically.
@ -461,7 +459,8 @@ mod tests {
use crate::m::f<|>; use crate::m::f<|>;
" "
), ),
@r#"[ @r###"
[
CompletionItem { CompletionItem {
label: "foo", label: "foo",
source_range: [40; 41), source_range: [40; 41),
@ -470,7 +469,8 @@ mod tests {
kind: Function, kind: Function,
detail: "pub fn foo()", detail: "pub fn foo()",
}, },
]"# ]
"###
); );
} }
@ -486,7 +486,8 @@ mod tests {
} }
" "
), ),
@r#"[ @r###"
[
CompletionItem { CompletionItem {
label: "frobnicate", label: "frobnicate",
source_range: [35; 39), source_range: [35; 39),
@ -503,7 +504,8 @@ mod tests {
kind: Function, kind: Function,
detail: "fn main()", detail: "fn main()",
}, },
]"# ]
"###
); );
assert_debug_snapshot!( assert_debug_snapshot!(
do_reference_completion( do_reference_completion(
@ -516,7 +518,8 @@ mod tests {
} }
" "
), ),
@r#"[ @r###"
[
CompletionItem { CompletionItem {
label: "new", label: "new",
source_range: [67; 69), source_range: [67; 69),
@ -525,7 +528,8 @@ mod tests {
kind: Function, kind: Function,
detail: "fn new() -> Foo", detail: "fn new() -> Foo",
}, },
]"# ]
"###
); );
} }

View file

@ -526,28 +526,28 @@ mod tests {
let (analysis, file_id) = single_file("mod foo;"); let (analysis, file_id) = single_file("mod foo;");
let diagnostics = analysis.diagnostics(file_id).unwrap(); let diagnostics = analysis.diagnostics(file_id).unwrap();
assert_debug_snapshot!(diagnostics, @r###" assert_debug_snapshot!(diagnostics, @r###"
[ [
Diagnostic { Diagnostic {
message: "unresolved module", message: "unresolved module",
range: [0; 8), range: [0; 8),
fix: Some( fix: Some(
SourceChange { SourceChange {
label: "create module", label: "create module",
source_file_edits: [], source_file_edits: [],
file_system_edits: [ file_system_edits: [
CreateFile { CreateFile {
source_root: SourceRootId( source_root: SourceRootId(
0, 0,
), ),
path: "foo.rs", path: "foo.rs",
}, },
], ],
cursor_position: None, cursor_position: None,
}, },
), ),
severity: Error, severity: Error,
}, },
] ]
"###); "###);
} }

View file

@ -209,7 +209,8 @@ fn very_obsolete() {}
.unwrap(); .unwrap();
let structure = file_structure(&file); let structure = file_structure(&file);
assert_debug_snapshot!(structure, assert_debug_snapshot!(structure,
@r#"[ @r###"
[
StructureNode { StructureNode {
parent: None, parent: None,
label: "Foo", label: "Foo",
@ -393,7 +394,8 @@ fn very_obsolete() {}
), ),
deprecated: true, deprecated: true,
}, },
]"# ]
"###
); );
} }
} }

View file

@ -5,7 +5,7 @@ use ra_db::SourceDatabase;
use ra_syntax::{ use ra_syntax::{
algo::{ancestors_at_offset, find_covering_element, find_node_at_offset}, algo::{ancestors_at_offset, find_covering_element, find_node_at_offset},
ast::{self, DocCommentsOwner}, ast::{self, DocCommentsOwner},
match_ast, AstNode, AstNode,
}; };
use crate::{ use crate::{
@ -14,7 +14,7 @@ use crate::{
description_from_symbol, docs_from_symbol, macro_label, rust_code_markup, description_from_symbol, docs_from_symbol, macro_label, rust_code_markup,
rust_code_markup_with_doc, ShortLabel, rust_code_markup_with_doc, ShortLabel,
}, },
references::{classify_name_ref, NameKind::*}, references::{classify_name, classify_name_ref, NameKind, NameKind::*},
FilePosition, FileRange, RangeInfo, FilePosition, FileRange, RangeInfo,
}; };
@ -92,65 +92,88 @@ fn hover_text(docs: Option<String>, desc: Option<String>) -> Option<String> {
} }
} }
pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<HoverResult>> { fn hover_text_from_name_kind(
let parse = db.parse(position.file_id); db: &RootDatabase,
let file = parse.tree(); name_kind: NameKind,
let mut res = HoverResult::new(); no_fallback: &mut bool,
) -> Option<String> {
let mut range = None; return match name_kind {
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset) { Macro(it) => {
let mut no_fallback = false;
let name_kind = classify_name_ref(db, position.file_id, &name_ref).map(|d| d.kind);
match name_kind {
Some(Macro(it)) => {
let src = it.source(db); let src = it.source(db);
res.extend(hover_text(src.ast.doc_comment_text(), Some(macro_label(&src.ast)))); hover_text(src.ast.doc_comment_text(), Some(macro_label(&src.ast)))
} }
Some(Field(it)) => { Field(it) => {
let src = it.source(db); let src = it.source(db);
if let hir::FieldSource::Named(it) = src.ast { match src.ast {
res.extend(hover_text(it.doc_comment_text(), it.short_label())); hir::FieldSource::Named(it) => hover_text(it.doc_comment_text(), it.short_label()),
_ => None,
} }
} }
Some(AssocItem(it)) => res.extend(match it { AssocItem(it) => match it {
hir::AssocItem::Function(it) => from_def_source(db, it), hir::AssocItem::Function(it) => from_def_source(db, it),
hir::AssocItem::Const(it) => from_def_source(db, it), hir::AssocItem::Const(it) => from_def_source(db, it),
hir::AssocItem::TypeAlias(it) => from_def_source(db, it), hir::AssocItem::TypeAlias(it) => from_def_source(db, it),
}),
Some(Def(it)) => match it {
hir::ModuleDef::Module(it) => {
if let hir::ModuleSource::Module(it) = it.definition_source(db).ast {
res.extend(hover_text(it.doc_comment_text(), it.short_label()))
}
}
hir::ModuleDef::Function(it) => res.extend(from_def_source(db, it)),
hir::ModuleDef::Adt(Adt::Struct(it)) => res.extend(from_def_source(db, it)),
hir::ModuleDef::Adt(Adt::Union(it)) => res.extend(from_def_source(db, it)),
hir::ModuleDef::Adt(Adt::Enum(it)) => res.extend(from_def_source(db, it)),
hir::ModuleDef::EnumVariant(it) => res.extend(from_def_source(db, it)),
hir::ModuleDef::Const(it) => res.extend(from_def_source(db, it)),
hir::ModuleDef::Static(it) => res.extend(from_def_source(db, it)),
hir::ModuleDef::Trait(it) => res.extend(from_def_source(db, it)),
hir::ModuleDef::TypeAlias(it) => res.extend(from_def_source(db, it)),
hir::ModuleDef::BuiltinType(it) => res.extend(Some(it.to_string())),
}, },
Some(SelfType(ty)) => { Def(it) => match it {
if let Some((adt_def, _)) = ty.as_adt() { hir::ModuleDef::Module(it) => match it.definition_source(db).ast {
res.extend(match adt_def { hir::ModuleSource::Module(it) => {
hover_text(it.doc_comment_text(), it.short_label())
}
_ => None,
},
hir::ModuleDef::Function(it) => from_def_source(db, it),
hir::ModuleDef::Adt(Adt::Struct(it)) => from_def_source(db, it),
hir::ModuleDef::Adt(Adt::Union(it)) => from_def_source(db, it),
hir::ModuleDef::Adt(Adt::Enum(it)) => from_def_source(db, it),
hir::ModuleDef::EnumVariant(it) => from_def_source(db, it),
hir::ModuleDef::Const(it) => from_def_source(db, it),
hir::ModuleDef::Static(it) => from_def_source(db, it),
hir::ModuleDef::Trait(it) => from_def_source(db, it),
hir::ModuleDef::TypeAlias(it) => from_def_source(db, it),
hir::ModuleDef::BuiltinType(it) => Some(it.to_string()),
},
SelfType(ty) => match ty.as_adt() {
Some((adt_def, _)) => match adt_def {
hir::Adt::Struct(it) => from_def_source(db, it), hir::Adt::Struct(it) => from_def_source(db, it),
hir::Adt::Union(it) => from_def_source(db, it), hir::Adt::Union(it) => from_def_source(db, it),
hir::Adt::Enum(it) => from_def_source(db, it), hir::Adt::Enum(it) => from_def_source(db, it),
}) },
} _ => None,
} },
Some(Local(_)) => { Local(_) => {
// Hover for these shows type names // Hover for these shows type names
no_fallback = true; *no_fallback = true;
None
} }
Some(GenericParam(_)) => { GenericParam(_) => {
// FIXME: Hover for generic param // FIXME: Hover for generic param
None
} }
None => {} };
fn from_def_source<A, D>(db: &RootDatabase, def: D) -> Option<String>
where
D: HasSource<Ast = A>,
A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel,
{
let src = def.source(db);
hover_text(src.ast.doc_comment_text(), src.ast.short_label())
}
}
pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<HoverResult>> {
let parse = db.parse(position.file_id);
let file = parse.tree();
let mut res = HoverResult::new();
let mut range = if let Some(name_ref) =
find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset)
{
let mut no_fallback = false;
if let Some(name_kind) = classify_name_ref(db, position.file_id, &name_ref).map(|d| d.kind)
{
res.extend(hover_text_from_name_kind(db, name_kind, &mut no_fallback))
} }
if res.is_empty() && !no_fallback { if res.is_empty() && !no_fallback {
@ -164,55 +187,24 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
} }
if !res.is_empty() { if !res.is_empty() {
range = Some(name_ref.syntax().text_range()) Some(name_ref.syntax().text_range())
} else {
None
} }
} else if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) { } else if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) {
if let Some(parent) = name.syntax().parent() { if let Some(name_kind) = classify_name(db, position.file_id, &name).map(|d| d.kind) {
let text = match_ast! { let mut _b: bool = true;
match parent { res.extend(hover_text_from_name_kind(db, name_kind, &mut _b));
ast::StructDef(it) => {
hover_text(it.doc_comment_text(), it.short_label())
},
ast::EnumDef(it) => {
hover_text(it.doc_comment_text(), it.short_label())
},
ast::EnumVariant(it) => {
hover_text(it.doc_comment_text(), it.short_label())
},
ast::FnDef(it) => {
hover_text(it.doc_comment_text(), it.short_label())
},
ast::TypeAliasDef(it) => {
hover_text(it.doc_comment_text(), it.short_label())
},
ast::ConstDef(it) => {
hover_text(it.doc_comment_text(), it.short_label())
},
ast::StaticDef(it) => {
hover_text(it.doc_comment_text(), it.short_label())
},
ast::TraitDef(it) => {
hover_text(it.doc_comment_text(), it.short_label())
},
ast::RecordFieldDef(it) => {
hover_text(it.doc_comment_text(), it.short_label())
},
ast::Module(it) => {
hover_text(it.doc_comment_text(), it.short_label())
},
ast::MacroCall(it) => {
hover_text(it.doc_comment_text(), None)
},
_ => None,
}
};
res.extend(text);
} }
if !res.is_empty() && range.is_none() { if !res.is_empty() {
range = Some(name.syntax().text_range()); Some(name.syntax().text_range())
} } else {
None
} }
} else {
None
};
if range.is_none() { if range.is_none() {
let node = ancestors_at_offset(file.syntax(), position.offset).find(|n| { let node = ancestors_at_offset(file.syntax(), position.offset).find(|n| {
@ -221,23 +213,13 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
let frange = FileRange { file_id: position.file_id, range: node.text_range() }; let frange = FileRange { file_id: position.file_id, range: node.text_range() };
res.extend(type_of(db, frange).map(rust_code_markup)); res.extend(type_of(db, frange).map(rust_code_markup));
range = Some(node.text_range()); range = Some(node.text_range());
} };
let range = range?; let range = range?;
if res.is_empty() { if res.is_empty() {
return None; return None;
} }
let res = RangeInfo::new(range, res); Some(RangeInfo::new(range, res))
return Some(res);
fn from_def_source<A, D>(db: &RootDatabase, def: D) -> Option<String>
where
D: HasSource<Ast = A>,
A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel,
{
let src = def.source(db);
hover_text(src.ast.doc_comment_text(), src.ast.short_label())
}
} }
pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {

View file

@ -214,7 +214,8 @@ fn main() {
}"#, }"#,
); );
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r#"[ assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###"
[
InlayHint { InlayHint {
range: [193; 197), range: [193; 197),
kind: TypeHint, kind: TypeHint,
@ -265,7 +266,8 @@ fn main() {
kind: TypeHint, kind: TypeHint,
label: "f64", label: "f64",
}, },
]"# ]
"###
); );
} }
@ -281,7 +283,8 @@ fn main() {
}"#, }"#,
); );
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r#"[ assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###"
[
InlayHint { InlayHint {
range: [21; 30), range: [21; 30),
kind: TypeHint, kind: TypeHint,
@ -292,7 +295,8 @@ fn main() {
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
]"# ]
"###
); );
} }
@ -308,7 +312,8 @@ fn main() {
}"#, }"#,
); );
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r#"[ assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###"
[
InlayHint { InlayHint {
range: [21; 30), range: [21; 30),
kind: TypeHint, kind: TypeHint,
@ -319,7 +324,8 @@ fn main() {
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
]"# ]
"###
); );
} }
@ -354,7 +360,8 @@ fn main() {
}"#, }"#,
); );
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r#"[ assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###"
[
InlayHint { InlayHint {
range: [166; 170), range: [166; 170),
kind: TypeHint, kind: TypeHint,
@ -380,7 +387,8 @@ fn main() {
kind: TypeHint, kind: TypeHint,
label: "&u32", label: "&u32",
}, },
]"# ]
"###
); );
} }
@ -416,33 +424,33 @@ fn main() {
); );
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [166; 170), range: [166; 170),
kind: TypeHint, kind: TypeHint,
label: "CustomOption<Test>", label: "CustomOption<Test>",
}, },
InlayHint { InlayHint {
range: [343; 347), range: [343; 347),
kind: TypeHint, kind: TypeHint,
label: "&Test", label: "&Test",
}, },
InlayHint { InlayHint {
range: [401; 402), range: [401; 402),
kind: TypeHint, kind: TypeHint,
label: "&CustomOption<u32>", label: "&CustomOption<u32>",
}, },
InlayHint { InlayHint {
range: [404; 405), range: [404; 405),
kind: TypeHint, kind: TypeHint,
label: "&u8", label: "&u8",
}, },
InlayHint { InlayHint {
range: [549; 550), range: [549; 550),
kind: TypeHint, kind: TypeHint,
label: "&u32", label: "&u32",
}, },
] ]
"### "###
); );
} }
@ -478,7 +486,8 @@ fn main() {
}"#, }"#,
); );
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r#"[ assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###"
[
InlayHint { InlayHint {
range: [311; 315), range: [311; 315),
kind: TypeHint, kind: TypeHint,
@ -499,7 +508,8 @@ fn main() {
kind: TypeHint, kind: TypeHint,
label: "u32", label: "u32",
}, },
]"# ]
"###
); );
} }
} }

View file

@ -97,7 +97,8 @@ mod tests {
); );
let runnables = analysis.runnables(pos.file_id).unwrap(); let runnables = analysis.runnables(pos.file_id).unwrap();
assert_debug_snapshot!(&runnables, assert_debug_snapshot!(&runnables,
@r#"[ @r###"
[
Runnable { Runnable {
range: [1; 21), range: [1; 21),
kind: Bin, kind: Bin,
@ -114,7 +115,8 @@ mod tests {
name: "test_foo", name: "test_foo",
}, },
}, },
]"# ]
"###
); );
} }
@ -132,7 +134,8 @@ mod tests {
); );
let runnables = analysis.runnables(pos.file_id).unwrap(); let runnables = analysis.runnables(pos.file_id).unwrap();
assert_debug_snapshot!(&runnables, assert_debug_snapshot!(&runnables,
@r#"[ @r###"
[
Runnable { Runnable {
range: [1; 59), range: [1; 59),
kind: TestMod { kind: TestMod {
@ -145,7 +148,8 @@ mod tests {
name: "test_foo1", name: "test_foo1",
}, },
}, },
]"# ]
"###
); );
} }
@ -165,7 +169,8 @@ mod tests {
); );
let runnables = analysis.runnables(pos.file_id).unwrap(); let runnables = analysis.runnables(pos.file_id).unwrap();
assert_debug_snapshot!(&runnables, assert_debug_snapshot!(&runnables,
@r#"[ @r###"
[
Runnable { Runnable {
range: [23; 85), range: [23; 85),
kind: TestMod { kind: TestMod {
@ -178,7 +183,8 @@ mod tests {
name: "test_foo1", name: "test_foo1",
}, },
}, },
]"# ]
"###
); );
} }
@ -200,7 +206,8 @@ mod tests {
); );
let runnables = analysis.runnables(pos.file_id).unwrap(); let runnables = analysis.runnables(pos.file_id).unwrap();
assert_debug_snapshot!(&runnables, assert_debug_snapshot!(&runnables,
@r#"[ @r###"
[
Runnable { Runnable {
range: [41; 115), range: [41; 115),
kind: TestMod { kind: TestMod {
@ -213,7 +220,8 @@ mod tests {
name: "test_foo1", name: "test_foo1",
}, },
}, },
]"# ]
"###
); );
} }

View file

@ -9,7 +9,7 @@ ra_syntax = { path = "../ra_syntax" }
ra_parser = { path = "../ra_parser" } ra_parser = { path = "../ra_parser" }
tt = { path = "../ra_tt", package = "ra_tt" } tt = { path = "../ra_tt", package = "ra_tt" }
rustc-hash = "1.0.0" rustc-hash = "1.0.0"
smallvec = "0.6.9" smallvec = "1.0.0"
log = "0.4.5" log = "0.4.5"
[dev-dependencies] [dev-dependencies]

View file

@ -150,6 +150,27 @@ use std::collections::HashMap;
fn process(map: HashMap<String, String>) {} fn process(map: HashMap<String, String>) {}
``` ```
## `add_new`
Adds a new inherent impl for a type.
```rust
// BEFORE
struct Ctx<T: Clone> {
data: T,┃
}
// AFTER
struct Ctx<T: Clone> {
data: T,
}
impl<T: Clone> Ctx<T> {
fn new(data: T) -> Self { Self { data } }
}
```
## `apply_demorgan` ## `apply_demorgan`
Apply [De Morgan's law](https://en.wikipedia.org/wiki/De_Morgan%27s_laws). Apply [De Morgan's law](https://en.wikipedia.org/wiki/De_Morgan%27s_laws).