mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 13:03:31 +00:00
Merge #2837
2837: Accidentally quadratic r=matklad a=matklad Our syntax highlighting is accdentally quadratic. Current state of the PR fixes it in a pretty crude way, looks like for the proper fix we need to redo how source-analyzer works. **NB:** don't be scared by diff stats, that's mostly a test-data file Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
c78d269b66
12 changed files with 4309 additions and 177 deletions
12
Cargo.lock
generated
12
Cargo.lock
generated
|
@ -739,7 +739,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "once_cell"
|
name = "once_cell"
|
||||||
version = "1.2.0"
|
version = "1.3.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -981,7 +981,7 @@ dependencies = [
|
||||||
"either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"once_cell 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ra_arena 0.1.0",
|
"ra_arena 0.1.0",
|
||||||
"ra_cfg 0.1.0",
|
"ra_cfg 0.1.0",
|
||||||
"ra_db 0.1.0",
|
"ra_db 0.1.0",
|
||||||
|
@ -1043,7 +1043,7 @@ dependencies = [
|
||||||
"itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"once_cell 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"proptest 0.9.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
"proptest 0.9.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ra_assists 0.1.0",
|
"ra_assists 0.1.0",
|
||||||
"ra_cfg 0.1.0",
|
"ra_cfg 0.1.0",
|
||||||
|
@ -1118,7 +1118,7 @@ dependencies = [
|
||||||
"itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"jemalloc-ctl 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"jemalloc-ctl 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"jemallocator 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"jemallocator 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"once_cell 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1141,7 +1141,7 @@ version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"once_cell 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ra_parser 0.1.0",
|
"ra_parser 0.1.0",
|
||||||
"ra_text_edit 0.1.0",
|
"ra_text_edit 0.1.0",
|
||||||
"rowan 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rowan 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1864,7 +1864,7 @@ dependencies = [
|
||||||
"checksum notify 4.0.15 (registry+https://github.com/rust-lang/crates.io-index)" = "80ae4a7688d1fab81c5bf19c64fc8db920be8d519ce6336ed4e7efe024724dbd"
|
"checksum notify 4.0.15 (registry+https://github.com/rust-lang/crates.io-index)" = "80ae4a7688d1fab81c5bf19c64fc8db920be8d519ce6336ed4e7efe024724dbd"
|
||||||
"checksum num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "c62be47e61d1842b9170f0fdeec8eba98e60e90e5446449a0545e5152acd7096"
|
"checksum num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "c62be47e61d1842b9170f0fdeec8eba98e60e90e5446449a0545e5152acd7096"
|
||||||
"checksum num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "76dac5ed2a876980778b8b85f75a71b6cbf0db0b1232ee12f826bccb00d09d72"
|
"checksum num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "76dac5ed2a876980778b8b85f75a71b6cbf0db0b1232ee12f826bccb00d09d72"
|
||||||
"checksum once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "891f486f630e5c5a4916c7e16c4b24a53e78c860b646e9f8e005e4f16847bfed"
|
"checksum once_cell 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5941ec2d5ee5916c709580d71553b81a633df245bcc73c04dcbd62152ceefc4"
|
||||||
"checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
|
"checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
|
||||||
"checksum parking_lot 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "92e98c49ab0b7ce5b222f2cc9193fc4efe11c6d0bd4f648e374684a6857b1cfc"
|
"checksum parking_lot 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "92e98c49ab0b7ce5b222f2cc9193fc4efe11c6d0bd4f648e374684a6857b1cfc"
|
||||||
"checksum parking_lot_core 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7582838484df45743c8434fbff785e8edf260c28748353d44bc0da32e0ceabf1"
|
"checksum parking_lot_core 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7582838484df45743c8434fbff785e8edf260c28748353d44bc0da32e0ceabf1"
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
#![recursion_limit = "512"]
|
#![recursion_limit = "512"]
|
||||||
|
|
||||||
macro_rules! impl_froms {
|
macro_rules! impl_froms {
|
||||||
($e:ident: $($v:ident $(($($sv:ident),*))?),*) => {
|
($e:ident: $($v:ident $(($($sv:ident),*))?),*$(,)?) => {
|
||||||
$(
|
$(
|
||||||
impl From<$v> for $e {
|
impl From<$v> for $e {
|
||||||
fn from(it: $v) -> $e {
|
fn from(it: $v) -> $e {
|
||||||
|
@ -28,6 +28,7 @@ macro_rules! impl_froms {
|
||||||
|
|
||||||
pub mod db;
|
pub mod db;
|
||||||
pub mod source_analyzer;
|
pub mod source_analyzer;
|
||||||
|
pub mod source_binder;
|
||||||
|
|
||||||
pub mod diagnostics;
|
pub mod diagnostics;
|
||||||
|
|
||||||
|
@ -47,6 +48,7 @@ pub use crate::{
|
||||||
from_source::FromSource,
|
from_source::FromSource,
|
||||||
has_source::HasSource,
|
has_source::HasSource,
|
||||||
source_analyzer::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer},
|
source_analyzer::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer},
|
||||||
|
source_binder::SourceBinder,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub use hir_def::{
|
pub use hir_def::{
|
||||||
|
|
|
@ -14,26 +14,22 @@ use hir_def::{
|
||||||
BodySourceMap,
|
BodySourceMap,
|
||||||
},
|
},
|
||||||
expr::{ExprId, PatId},
|
expr::{ExprId, PatId},
|
||||||
nameres::ModuleSource,
|
resolver::{self, resolver_for_scope, Resolver, TypeNs, ValueNs},
|
||||||
resolver::{self, resolver_for_scope, HasResolver, Resolver, TypeNs, ValueNs},
|
|
||||||
DefWithBodyId, TraitId,
|
DefWithBodyId, TraitId,
|
||||||
};
|
};
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
hygiene::Hygiene, name::AsName, AstId, HirFileId, InFile, MacroCallId, MacroCallKind,
|
hygiene::Hygiene, name::AsName, AstId, HirFileId, InFile, MacroCallId, MacroCallKind,
|
||||||
};
|
};
|
||||||
use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment};
|
use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment};
|
||||||
use ra_prof::profile;
|
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
ast::{self, AstNode},
|
ast::{self, AstNode},
|
||||||
match_ast, AstPtr,
|
AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextUnit,
|
||||||
SyntaxKind::*,
|
|
||||||
SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextUnit,
|
|
||||||
};
|
};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::HirDatabase, Adt, Const, DefWithBody, Enum, EnumVariant, FromSource, Function, ImplBlock,
|
db::HirDatabase, Adt, Const, DefWithBody, EnumVariant, Function, Local, MacroDef, Name, Path,
|
||||||
Local, MacroDef, Name, Path, ScopeDef, Static, Struct, Trait, Type, TypeAlias, TypeParam,
|
ScopeDef, Static, Struct, Trait, Type, TypeAlias, TypeParam,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
|
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
|
||||||
|
@ -109,37 +105,43 @@ impl SourceAnalyzer {
|
||||||
node: InFile<&SyntaxNode>,
|
node: InFile<&SyntaxNode>,
|
||||||
offset: Option<TextUnit>,
|
offset: Option<TextUnit>,
|
||||||
) -> SourceAnalyzer {
|
) -> SourceAnalyzer {
|
||||||
let _p = profile("SourceAnalyzer::new");
|
crate::source_binder::SourceBinder::new(db).analyze(node, offset)
|
||||||
let def_with_body = def_with_body_from_child_node(db, node);
|
}
|
||||||
if let Some(def) = def_with_body {
|
|
||||||
let (_body, source_map) = db.body_with_source_map(def.into());
|
pub(crate) fn new_for_body(
|
||||||
let scopes = db.expr_scopes(def.into());
|
db: &impl HirDatabase,
|
||||||
let scope = match offset {
|
def: DefWithBodyId,
|
||||||
None => scope_for(&scopes, &source_map, node),
|
node: InFile<&SyntaxNode>,
|
||||||
Some(offset) => scope_for_offset(&scopes, &source_map, node.with_value(offset)),
|
offset: Option<TextUnit>,
|
||||||
};
|
) -> SourceAnalyzer {
|
||||||
let resolver = resolver_for_scope(db, def.into(), scope);
|
let (_body, source_map) = db.body_with_source_map(def);
|
||||||
SourceAnalyzer {
|
let scopes = db.expr_scopes(def);
|
||||||
resolver,
|
let scope = match offset {
|
||||||
body_owner: Some(def),
|
None => scope_for(&scopes, &source_map, node),
|
||||||
body_source_map: Some(source_map),
|
Some(offset) => scope_for_offset(&scopes, &source_map, node.with_value(offset)),
|
||||||
infer: Some(db.infer(def.into())),
|
};
|
||||||
scopes: Some(scopes),
|
let resolver = resolver_for_scope(db, def, scope);
|
||||||
file_id: node.file_id,
|
SourceAnalyzer {
|
||||||
}
|
resolver,
|
||||||
} else {
|
body_owner: Some(def.into()),
|
||||||
SourceAnalyzer {
|
body_source_map: Some(source_map),
|
||||||
resolver: node
|
infer: Some(db.infer(def)),
|
||||||
.value
|
scopes: Some(scopes),
|
||||||
.ancestors()
|
file_id: node.file_id,
|
||||||
.find_map(|it| try_get_resolver_for_node(db, node.with_value(&it)))
|
}
|
||||||
.unwrap_or_default(),
|
}
|
||||||
body_owner: None,
|
|
||||||
body_source_map: None,
|
pub(crate) fn new_for_resolver(
|
||||||
infer: None,
|
resolver: Resolver,
|
||||||
scopes: None,
|
node: InFile<&SyntaxNode>,
|
||||||
file_id: node.file_id,
|
) -> SourceAnalyzer {
|
||||||
}
|
SourceAnalyzer {
|
||||||
|
resolver,
|
||||||
|
body_owner: None,
|
||||||
|
body_source_map: None,
|
||||||
|
infer: None,
|
||||||
|
scopes: None,
|
||||||
|
file_id: node.file_id,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -366,64 +368,6 @@ impl SourceAnalyzer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_get_resolver_for_node(db: &impl HirDatabase, node: InFile<&SyntaxNode>) -> Option<Resolver> {
|
|
||||||
match_ast! {
|
|
||||||
match (node.value) {
|
|
||||||
ast::Module(it) => {
|
|
||||||
let src = node.with_value(it);
|
|
||||||
Some(crate::Module::from_declaration(db, src)?.id.resolver(db))
|
|
||||||
},
|
|
||||||
ast::SourceFile(it) => {
|
|
||||||
let src = node.with_value(ModuleSource::SourceFile(it));
|
|
||||||
Some(crate::Module::from_definition(db, src)?.id.resolver(db))
|
|
||||||
},
|
|
||||||
ast::StructDef(it) => {
|
|
||||||
let src = node.with_value(it);
|
|
||||||
Some(Struct::from_source(db, src)?.id.resolver(db))
|
|
||||||
},
|
|
||||||
ast::EnumDef(it) => {
|
|
||||||
let src = node.with_value(it);
|
|
||||||
Some(Enum::from_source(db, src)?.id.resolver(db))
|
|
||||||
},
|
|
||||||
ast::ImplBlock(it) => {
|
|
||||||
let src = node.with_value(it);
|
|
||||||
Some(ImplBlock::from_source(db, src)?.id.resolver(db))
|
|
||||||
},
|
|
||||||
ast::TraitDef(it) => {
|
|
||||||
let src = node.with_value(it);
|
|
||||||
Some(Trait::from_source(db, src)?.id.resolver(db))
|
|
||||||
},
|
|
||||||
_ => match node.value.kind() {
|
|
||||||
FN_DEF | CONST_DEF | STATIC_DEF => {
|
|
||||||
let def = def_with_body_from_child_node(db, node)?;
|
|
||||||
let def = DefWithBodyId::from(def);
|
|
||||||
Some(def.resolver(db))
|
|
||||||
}
|
|
||||||
// FIXME add missing cases
|
|
||||||
_ => None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn def_with_body_from_child_node(
|
|
||||||
db: &impl HirDatabase,
|
|
||||||
child: InFile<&SyntaxNode>,
|
|
||||||
) -> Option<DefWithBody> {
|
|
||||||
let _p = profile("def_with_body_from_child_node");
|
|
||||||
child.cloned().ancestors_with_macros(db).find_map(|node| {
|
|
||||||
let n = &node.value;
|
|
||||||
match_ast! {
|
|
||||||
match n {
|
|
||||||
ast::FnDef(def) => { return Function::from_source(db, node.with_value(def)).map(DefWithBody::from); },
|
|
||||||
ast::ConstDef(def) => { return Const::from_source(db, node.with_value(def)).map(DefWithBody::from); },
|
|
||||||
ast::StaticDef(def) => { return Static::from_source(db, node.with_value(def)).map(DefWithBody::from); },
|
|
||||||
_ => { None },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn scope_for(
|
fn scope_for(
|
||||||
scopes: &ExprScopes,
|
scopes: &ExprScopes,
|
||||||
source_map: &BodySourceMap,
|
source_map: &BodySourceMap,
|
||||||
|
|
173
crates/ra_hir/src/source_binder.rs
Normal file
173
crates/ra_hir/src/source_binder.rs
Normal file
|
@ -0,0 +1,173 @@
|
||||||
|
//! `SourceBinder` should be the main entry point for getting info about source code.
|
||||||
|
//! It's main task is to map source syntax trees to hir-level IDs.
|
||||||
|
//!
|
||||||
|
//! It is intended to subsume `FromSource` and `SourceAnalyzer`.
|
||||||
|
|
||||||
|
use hir_def::{
|
||||||
|
child_by_source::ChildBySource,
|
||||||
|
dyn_map::DynMap,
|
||||||
|
keys::{self, Key},
|
||||||
|
resolver::{HasResolver, Resolver},
|
||||||
|
ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, ImplId, ModuleId, StaticId,
|
||||||
|
StructFieldId, StructId, TraitId, TypeAliasId, UnionId, VariantId,
|
||||||
|
};
|
||||||
|
use hir_expand::InFile;
|
||||||
|
use ra_prof::profile;
|
||||||
|
use ra_syntax::{ast, match_ast, AstNode, SyntaxNode, TextUnit};
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
|
use crate::{db::HirDatabase, ModuleSource, SourceAnalyzer};
|
||||||
|
|
||||||
|
pub struct SourceBinder<'a, DB> {
|
||||||
|
pub db: &'a DB,
|
||||||
|
child_by_source_cache: FxHashMap<ChildContainer, DynMap>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<DB: HirDatabase> SourceBinder<'_, DB> {
|
||||||
|
pub fn new(db: &DB) -> SourceBinder<DB> {
|
||||||
|
SourceBinder { db, child_by_source_cache: FxHashMap::default() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn analyze(
|
||||||
|
&mut self,
|
||||||
|
src: InFile<&SyntaxNode>,
|
||||||
|
offset: Option<TextUnit>,
|
||||||
|
) -> SourceAnalyzer {
|
||||||
|
let _p = profile("SourceBinder::analyzer");
|
||||||
|
let container = match self.find_container(src) {
|
||||||
|
Some(it) => it,
|
||||||
|
None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src),
|
||||||
|
};
|
||||||
|
|
||||||
|
let resolver = match container {
|
||||||
|
ChildContainer::DefWithBodyId(def) => {
|
||||||
|
return SourceAnalyzer::new_for_body(self.db, def, src, offset)
|
||||||
|
}
|
||||||
|
ChildContainer::TraitId(it) => it.resolver(self.db),
|
||||||
|
ChildContainer::ImplId(it) => it.resolver(self.db),
|
||||||
|
ChildContainer::ModuleId(it) => it.resolver(self.db),
|
||||||
|
ChildContainer::EnumId(it) => it.resolver(self.db),
|
||||||
|
ChildContainer::VariantId(it) => it.resolver(self.db),
|
||||||
|
};
|
||||||
|
SourceAnalyzer::new_for_resolver(resolver, src)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_def<D, T>(&mut self, src: InFile<T>) -> Option<D>
|
||||||
|
where
|
||||||
|
D: From<T::ID>,
|
||||||
|
T: ToId,
|
||||||
|
{
|
||||||
|
let id: T::ID = self.to_id(src)?;
|
||||||
|
Some(id.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_id<T: ToId>(&mut self, src: InFile<T>) -> Option<T::ID> {
|
||||||
|
let container = self.find_container(src.as_ref().map(|it| it.syntax()))?;
|
||||||
|
let db = self.db;
|
||||||
|
let dyn_map =
|
||||||
|
&*self.child_by_source_cache.entry(container).or_insert_with(|| match container {
|
||||||
|
ChildContainer::DefWithBodyId(it) => it.child_by_source(db),
|
||||||
|
ChildContainer::ModuleId(it) => it.child_by_source(db),
|
||||||
|
ChildContainer::TraitId(it) => it.child_by_source(db),
|
||||||
|
ChildContainer::ImplId(it) => it.child_by_source(db),
|
||||||
|
ChildContainer::EnumId(it) => it.child_by_source(db),
|
||||||
|
ChildContainer::VariantId(it) => it.child_by_source(db),
|
||||||
|
});
|
||||||
|
dyn_map[T::KEY].get(&src).copied()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> {
|
||||||
|
for container in src.cloned().ancestors_with_macros(self.db).skip(1) {
|
||||||
|
let res: ChildContainer = match_ast! {
|
||||||
|
match (container.value) {
|
||||||
|
ast::TraitDef(it) => {
|
||||||
|
let def: TraitId = self.to_id(container.with_value(it))?;
|
||||||
|
def.into()
|
||||||
|
},
|
||||||
|
ast::ImplBlock(it) => {
|
||||||
|
let def: ImplId = self.to_id(container.with_value(it))?;
|
||||||
|
def.into()
|
||||||
|
},
|
||||||
|
ast::FnDef(it) => {
|
||||||
|
let def: FunctionId = self.to_id(container.with_value(it))?;
|
||||||
|
DefWithBodyId::from(def).into()
|
||||||
|
},
|
||||||
|
ast::StaticDef(it) => {
|
||||||
|
let def: StaticId = self.to_id(container.with_value(it))?;
|
||||||
|
DefWithBodyId::from(def).into()
|
||||||
|
},
|
||||||
|
ast::ConstDef(it) => {
|
||||||
|
let def: ConstId = self.to_id(container.with_value(it))?;
|
||||||
|
DefWithBodyId::from(def).into()
|
||||||
|
},
|
||||||
|
ast::EnumDef(it) => {
|
||||||
|
let def: EnumId = self.to_id(container.with_value(it))?;
|
||||||
|
def.into()
|
||||||
|
},
|
||||||
|
ast::StructDef(it) => {
|
||||||
|
let def: StructId = self.to_id(container.with_value(it))?;
|
||||||
|
VariantId::from(def).into()
|
||||||
|
},
|
||||||
|
ast::UnionDef(it) => {
|
||||||
|
let def: UnionId = self.to_id(container.with_value(it))?;
|
||||||
|
VariantId::from(def).into()
|
||||||
|
},
|
||||||
|
// FIXME: handle out-of-line modules here
|
||||||
|
_ => { continue },
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return Some(res);
|
||||||
|
}
|
||||||
|
|
||||||
|
let module_source = ModuleSource::from_child_node(self.db, src);
|
||||||
|
let c = crate::Module::from_definition(self.db, src.with_value(module_source))?;
|
||||||
|
Some(c.id.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||||
|
enum ChildContainer {
|
||||||
|
DefWithBodyId(DefWithBodyId),
|
||||||
|
ModuleId(ModuleId),
|
||||||
|
TraitId(TraitId),
|
||||||
|
ImplId(ImplId),
|
||||||
|
EnumId(EnumId),
|
||||||
|
VariantId(VariantId),
|
||||||
|
}
|
||||||
|
impl_froms! {
|
||||||
|
ChildContainer:
|
||||||
|
DefWithBodyId,
|
||||||
|
ModuleId,
|
||||||
|
TraitId,
|
||||||
|
ImplId,
|
||||||
|
EnumId,
|
||||||
|
VariantId,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait ToId: Sized + AstNode + 'static {
|
||||||
|
type ID: Sized + Copy + 'static;
|
||||||
|
const KEY: Key<Self, Self::ID>;
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! to_id_impls {
|
||||||
|
($(($id:ident, $ast:path, $key:path)),* ,) => {$(
|
||||||
|
impl ToId for $ast {
|
||||||
|
type ID = $id;
|
||||||
|
const KEY: Key<Self, Self::ID> = $key;
|
||||||
|
}
|
||||||
|
)*}
|
||||||
|
}
|
||||||
|
|
||||||
|
to_id_impls![
|
||||||
|
(StructId, ast::StructDef, keys::STRUCT),
|
||||||
|
(UnionId, ast::UnionDef, keys::UNION),
|
||||||
|
(EnumId, ast::EnumDef, keys::ENUM),
|
||||||
|
(TraitId, ast::TraitDef, keys::TRAIT),
|
||||||
|
(FunctionId, ast::FnDef, keys::FUNCTION),
|
||||||
|
(StaticId, ast::StaticDef, keys::STATIC),
|
||||||
|
(ConstId, ast::ConstDef, keys::CONST),
|
||||||
|
(TypeAliasId, ast::TypeAliasDef, keys::TYPE_ALIAS),
|
||||||
|
(ImplId, ast::ImplBlock, keys::IMPL),
|
||||||
|
(StructFieldId, ast::RecordFieldDef, keys::RECORD_FIELD),
|
||||||
|
(EnumVariantId, ast::EnumVariant, keys::ENUM_VARIANT),
|
||||||
|
];
|
|
@ -332,7 +332,7 @@ pub enum VariantId {
|
||||||
StructId(StructId),
|
StructId(StructId),
|
||||||
UnionId(UnionId),
|
UnionId(UnionId),
|
||||||
}
|
}
|
||||||
impl_froms!(VariantId: EnumVariantId, StructId);
|
impl_froms!(VariantId: EnumVariantId, StructId, UnionId);
|
||||||
|
|
||||||
trait Intern {
|
trait Intern {
|
||||||
type ID;
|
type ID;
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//! FIXME: write short doc here
|
//! FIXME: write short doc here
|
||||||
|
|
||||||
use hir::{db::AstDatabase, InFile};
|
use hir::{db::AstDatabase, InFile, SourceBinder};
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
ast::{self, DocCommentsOwner},
|
ast::{self, DocCommentsOwner},
|
||||||
match_ast, AstNode,
|
match_ast, AstNode,
|
||||||
|
@ -72,7 +72,8 @@ pub(crate) fn reference_definition(
|
||||||
) -> ReferenceResult {
|
) -> ReferenceResult {
|
||||||
use self::ReferenceResult::*;
|
use self::ReferenceResult::*;
|
||||||
|
|
||||||
let name_kind = classify_name_ref(db, name_ref).map(|d| d.kind);
|
let mut sb = SourceBinder::new(db);
|
||||||
|
let name_kind = classify_name_ref(&mut sb, name_ref).map(|d| d.kind);
|
||||||
match name_kind {
|
match name_kind {
|
||||||
Some(Macro(it)) => return Exact(it.to_nav(db)),
|
Some(Macro(it)) => return Exact(it.to_nav(db)),
|
||||||
Some(Field(it)) => return Exact(it.to_nav(db)),
|
Some(Field(it)) => return Exact(it.to_nav(db)),
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//! FIXME: write short doc here
|
//! FIXME: write short doc here
|
||||||
|
|
||||||
use hir::{db::AstDatabase, Adt, HasSource, HirDisplay};
|
use hir::{db::AstDatabase, Adt, HasSource, HirDisplay, SourceBinder};
|
||||||
use ra_db::SourceDatabase;
|
use ra_db::SourceDatabase;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
algo::find_covering_element,
|
algo::find_covering_element,
|
||||||
|
@ -152,13 +152,14 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||||
|
|
||||||
let mut res = HoverResult::new();
|
let mut res = HoverResult::new();
|
||||||
|
|
||||||
|
let mut sb = SourceBinder::new(db);
|
||||||
if let Some((range, name_kind)) = match_ast! {
|
if let Some((range, name_kind)) = match_ast! {
|
||||||
match (token.value.parent()) {
|
match (token.value.parent()) {
|
||||||
ast::NameRef(name_ref) => {
|
ast::NameRef(name_ref) => {
|
||||||
classify_name_ref(db, token.with_value(&name_ref)).map(|d| (name_ref.syntax().text_range(), d.kind))
|
classify_name_ref(&mut sb, token.with_value(&name_ref)).map(|d| (name_ref.syntax().text_range(), d.kind))
|
||||||
},
|
},
|
||||||
ast::Name(name) => {
|
ast::Name(name) => {
|
||||||
classify_name(db, token.with_value(&name)).map(|d| (name.syntax().text_range(), d.kind))
|
classify_name(&mut sb, token.with_value(&name)).map(|d| (name.syntax().text_range(), d.kind))
|
||||||
},
|
},
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
|
@ -742,7 +743,7 @@ fn func(foo: i32) { if true { <|>foo; }; }
|
||||||
}
|
}
|
||||||
fn foo(bar:u32) {
|
fn foo(bar:u32) {
|
||||||
let a = id!(ba<|>r);
|
let a = id!(ba<|>r);
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
&["u32"],
|
&["u32"],
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//! FIXME: write short doc here
|
//! FIXME: write short doc here
|
||||||
|
|
||||||
use hir::{HirDisplay, SourceAnalyzer};
|
use hir::{HirDisplay, SourceAnalyzer, SourceBinder};
|
||||||
use once_cell::unsync::Lazy;
|
use once_cell::unsync::Lazy;
|
||||||
use ra_prof::profile;
|
use ra_prof::profile;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
|
@ -29,22 +29,23 @@ pub(crate) fn inlay_hints(
|
||||||
file: &SourceFile,
|
file: &SourceFile,
|
||||||
max_inlay_hint_length: Option<usize>,
|
max_inlay_hint_length: Option<usize>,
|
||||||
) -> Vec<InlayHint> {
|
) -> Vec<InlayHint> {
|
||||||
|
let mut sb = SourceBinder::new(db);
|
||||||
file.syntax()
|
file.syntax()
|
||||||
.descendants()
|
.descendants()
|
||||||
.flat_map(|node| get_inlay_hints(db, file_id, &node, max_inlay_hint_length))
|
.flat_map(|node| get_inlay_hints(&mut sb, file_id, &node, max_inlay_hint_length))
|
||||||
.flatten()
|
.flatten()
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_inlay_hints(
|
fn get_inlay_hints(
|
||||||
db: &RootDatabase,
|
sb: &mut SourceBinder<RootDatabase>,
|
||||||
file_id: FileId,
|
file_id: FileId,
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
max_inlay_hint_length: Option<usize>,
|
max_inlay_hint_length: Option<usize>,
|
||||||
) -> Option<Vec<InlayHint>> {
|
) -> Option<Vec<InlayHint>> {
|
||||||
let _p = profile("get_inlay_hints");
|
let _p = profile("get_inlay_hints");
|
||||||
let analyzer =
|
let db = sb.db;
|
||||||
Lazy::new(|| SourceAnalyzer::new(db, hir::InFile::new(file_id.into(), node), None));
|
let analyzer = Lazy::new(move || sb.analyze(hir::InFile::new(file_id.into(), node), None));
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match node {
|
match node {
|
||||||
ast::LetStmt(it) => {
|
ast::LetStmt(it) => {
|
||||||
|
|
|
@ -14,7 +14,7 @@ mod name_definition;
|
||||||
mod rename;
|
mod rename;
|
||||||
mod search_scope;
|
mod search_scope;
|
||||||
|
|
||||||
use hir::InFile;
|
use hir::{InFile, SourceBinder};
|
||||||
use once_cell::unsync::Lazy;
|
use once_cell::unsync::Lazy;
|
||||||
use ra_db::{SourceDatabase, SourceDatabaseExt};
|
use ra_db::{SourceDatabase, SourceDatabaseExt};
|
||||||
use ra_prof::profile;
|
use ra_prof::profile;
|
||||||
|
@ -171,13 +171,14 @@ fn find_name(
|
||||||
syntax: &SyntaxNode,
|
syntax: &SyntaxNode,
|
||||||
position: FilePosition,
|
position: FilePosition,
|
||||||
) -> Option<RangeInfo<(String, NameDefinition)>> {
|
) -> Option<RangeInfo<(String, NameDefinition)>> {
|
||||||
|
let mut sb = SourceBinder::new(db);
|
||||||
if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, position.offset) {
|
if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, position.offset) {
|
||||||
let def = classify_name(db, InFile::new(position.file_id.into(), &name))?;
|
let def = classify_name(&mut sb, InFile::new(position.file_id.into(), &name))?;
|
||||||
let range = name.syntax().text_range();
|
let range = name.syntax().text_range();
|
||||||
return Some(RangeInfo::new(range, (name.text().to_string(), def)));
|
return Some(RangeInfo::new(range, (name.text().to_string(), def)));
|
||||||
}
|
}
|
||||||
let name_ref = find_node_at_offset::<ast::NameRef>(&syntax, position.offset)?;
|
let name_ref = find_node_at_offset::<ast::NameRef>(&syntax, position.offset)?;
|
||||||
let def = classify_name_ref(db, InFile::new(position.file_id.into(), &name_ref))?;
|
let def = classify_name_ref(&mut sb, InFile::new(position.file_id.into(), &name_ref))?;
|
||||||
let range = name_ref.syntax().text_range();
|
let range = name_ref.syntax().text_range();
|
||||||
Some(RangeInfo::new(range, (name_ref.text().to_string(), def)))
|
Some(RangeInfo::new(range, (name_ref.text().to_string(), def)))
|
||||||
}
|
}
|
||||||
|
@ -195,7 +196,9 @@ fn process_definition(
|
||||||
|
|
||||||
for (file_id, search_range) in scope {
|
for (file_id, search_range) in scope {
|
||||||
let text = db.file_text(file_id);
|
let text = db.file_text(file_id);
|
||||||
|
|
||||||
let parse = Lazy::new(|| SourceFile::parse(&text));
|
let parse = Lazy::new(|| SourceFile::parse(&text));
|
||||||
|
let mut sb = Lazy::new(|| SourceBinder::new(db));
|
||||||
|
|
||||||
for (idx, _) in text.match_indices(pat) {
|
for (idx, _) in text.match_indices(pat) {
|
||||||
let offset = TextUnit::from_usize(idx);
|
let offset = TextUnit::from_usize(idx);
|
||||||
|
@ -209,7 +212,11 @@ fn process_definition(
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(d) = classify_name_ref(db, InFile::new(file_id.into(), &name_ref)) {
|
// FIXME: reuse sb
|
||||||
|
// See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098
|
||||||
|
|
||||||
|
if let Some(d) = classify_name_ref(&mut sb, InFile::new(file_id.into(), &name_ref))
|
||||||
|
{
|
||||||
if d == def {
|
if d == def {
|
||||||
let kind = if name_ref
|
let kind = if name_ref
|
||||||
.syntax()
|
.syntax()
|
||||||
|
@ -309,7 +316,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
impl Foo {
|
impl Foo {
|
||||||
fn f() -> i32 { 42 }
|
fn f() -> i32 { 42 }
|
||||||
}
|
}
|
||||||
fn main() {
|
fn main() {
|
||||||
let f: Foo;
|
let f: Foo;
|
||||||
f = Foo {a: Foo::f()};
|
f = Foo {a: Foo::f()};
|
||||||
|
@ -319,7 +326,7 @@ mod tests {
|
||||||
check_result(
|
check_result(
|
||||||
refs,
|
refs,
|
||||||
"Foo STRUCT_DEF FileId(1) [5; 39) [12; 15) Other",
|
"Foo STRUCT_DEF FileId(1) [5; 39) [12; 15) Other",
|
||||||
&["FileId(1) [142; 145) StructLiteral"],
|
&["FileId(1) [138; 141) StructLiteral"],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//! Functions that are used to classify an element from its definition or reference.
|
//! Functions that are used to classify an element from its definition or reference.
|
||||||
|
|
||||||
use hir::{FromSource, InFile, Module, ModuleSource, PathResolution, SourceAnalyzer};
|
use hir::{FromSource, InFile, Module, ModuleSource, PathResolution, SourceBinder};
|
||||||
use ra_prof::profile;
|
use ra_prof::profile;
|
||||||
use ra_syntax::{ast, match_ast, AstNode};
|
use ra_syntax::{ast, match_ast, AstNode};
|
||||||
use test_utils::tested_by;
|
use test_utils::tested_by;
|
||||||
|
@ -11,7 +11,10 @@ use super::{
|
||||||
};
|
};
|
||||||
use crate::db::RootDatabase;
|
use crate::db::RootDatabase;
|
||||||
|
|
||||||
pub(crate) fn classify_name(db: &RootDatabase, name: InFile<&ast::Name>) -> Option<NameDefinition> {
|
pub(crate) fn classify_name(
|
||||||
|
sb: &mut SourceBinder<RootDatabase>,
|
||||||
|
name: InFile<&ast::Name>,
|
||||||
|
) -> Option<NameDefinition> {
|
||||||
let _p = profile("classify_name");
|
let _p = profile("classify_name");
|
||||||
let parent = name.value.syntax().parent()?;
|
let parent = name.value.syntax().parent()?;
|
||||||
|
|
||||||
|
@ -19,90 +22,89 @@ pub(crate) fn classify_name(db: &RootDatabase, name: InFile<&ast::Name>) -> Opti
|
||||||
match parent {
|
match parent {
|
||||||
ast::BindPat(it) => {
|
ast::BindPat(it) => {
|
||||||
let src = name.with_value(it);
|
let src = name.with_value(it);
|
||||||
let local = hir::Local::from_source(db, src)?;
|
let local = hir::Local::from_source(sb.db, src)?;
|
||||||
Some(NameDefinition {
|
Some(NameDefinition {
|
||||||
visibility: None,
|
visibility: None,
|
||||||
container: local.module(db),
|
container: local.module(sb.db),
|
||||||
kind: NameKind::Local(local),
|
kind: NameKind::Local(local),
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
ast::RecordFieldDef(it) => {
|
ast::RecordFieldDef(it) => {
|
||||||
let ast = hir::FieldSource::Named(it);
|
let src = name.with_value(it);
|
||||||
let src = name.with_value(ast);
|
let field: hir::StructField = sb.to_def(src)?;
|
||||||
let field = hir::StructField::from_source(db, src)?;
|
Some(from_struct_field(sb.db, field))
|
||||||
Some(from_struct_field(db, field))
|
|
||||||
},
|
},
|
||||||
ast::Module(it) => {
|
ast::Module(it) => {
|
||||||
let def = {
|
let def = {
|
||||||
if !it.has_semi() {
|
if !it.has_semi() {
|
||||||
let ast = hir::ModuleSource::Module(it);
|
let ast = hir::ModuleSource::Module(it);
|
||||||
let src = name.with_value(ast);
|
let src = name.with_value(ast);
|
||||||
hir::Module::from_definition(db, src)
|
hir::Module::from_definition(sb.db, src)
|
||||||
} else {
|
} else {
|
||||||
let src = name.with_value(it);
|
let src = name.with_value(it);
|
||||||
hir::Module::from_declaration(db, src)
|
hir::Module::from_declaration(sb.db, src)
|
||||||
}
|
}
|
||||||
}?;
|
}?;
|
||||||
Some(from_module_def(db, def.into(), None))
|
Some(from_module_def(sb.db, def.into(), None))
|
||||||
},
|
},
|
||||||
ast::StructDef(it) => {
|
ast::StructDef(it) => {
|
||||||
let src = name.with_value(it);
|
let src = name.with_value(it);
|
||||||
let def = hir::Struct::from_source(db, src)?;
|
let def: hir::Struct = sb.to_def(src)?;
|
||||||
Some(from_module_def(db, def.into(), None))
|
Some(from_module_def(sb.db, def.into(), None))
|
||||||
},
|
},
|
||||||
ast::EnumDef(it) => {
|
ast::EnumDef(it) => {
|
||||||
let src = name.with_value(it);
|
let src = name.with_value(it);
|
||||||
let def = hir::Enum::from_source(db, src)?;
|
let def: hir::Enum = sb.to_def(src)?;
|
||||||
Some(from_module_def(db, def.into(), None))
|
Some(from_module_def(sb.db, def.into(), None))
|
||||||
},
|
},
|
||||||
ast::TraitDef(it) => {
|
ast::TraitDef(it) => {
|
||||||
let src = name.with_value(it);
|
let src = name.with_value(it);
|
||||||
let def = hir::Trait::from_source(db, src)?;
|
let def: hir::Trait = sb.to_def(src)?;
|
||||||
Some(from_module_def(db, def.into(), None))
|
Some(from_module_def(sb.db, def.into(), None))
|
||||||
},
|
},
|
||||||
ast::StaticDef(it) => {
|
ast::StaticDef(it) => {
|
||||||
let src = name.with_value(it);
|
let src = name.with_value(it);
|
||||||
let def = hir::Static::from_source(db, src)?;
|
let def: hir::Static = sb.to_def(src)?;
|
||||||
Some(from_module_def(db, def.into(), None))
|
Some(from_module_def(sb.db, def.into(), None))
|
||||||
},
|
},
|
||||||
ast::EnumVariant(it) => {
|
ast::EnumVariant(it) => {
|
||||||
let src = name.with_value(it);
|
let src = name.with_value(it);
|
||||||
let def = hir::EnumVariant::from_source(db, src)?;
|
let def: hir::EnumVariant = sb.to_def(src)?;
|
||||||
Some(from_module_def(db, def.into(), None))
|
Some(from_module_def(sb.db, def.into(), None))
|
||||||
},
|
},
|
||||||
ast::FnDef(it) => {
|
ast::FnDef(it) => {
|
||||||
let src = name.with_value(it);
|
let src = name.with_value(it);
|
||||||
let def = hir::Function::from_source(db, src)?;
|
let def: hir::Function = sb.to_def(src)?;
|
||||||
if parent.parent().and_then(ast::ItemList::cast).is_some() {
|
if parent.parent().and_then(ast::ItemList::cast).is_some() {
|
||||||
Some(from_assoc_item(db, def.into()))
|
Some(from_assoc_item(sb.db, def.into()))
|
||||||
} else {
|
} else {
|
||||||
Some(from_module_def(db, def.into(), None))
|
Some(from_module_def(sb.db, def.into(), None))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ast::ConstDef(it) => {
|
ast::ConstDef(it) => {
|
||||||
let src = name.with_value(it);
|
let src = name.with_value(it);
|
||||||
let def = hir::Const::from_source(db, src)?;
|
let def: hir::Const = sb.to_def(src)?;
|
||||||
if parent.parent().and_then(ast::ItemList::cast).is_some() {
|
if parent.parent().and_then(ast::ItemList::cast).is_some() {
|
||||||
Some(from_assoc_item(db, def.into()))
|
Some(from_assoc_item(sb.db, def.into()))
|
||||||
} else {
|
} else {
|
||||||
Some(from_module_def(db, def.into(), None))
|
Some(from_module_def(sb.db, def.into(), None))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ast::TypeAliasDef(it) => {
|
ast::TypeAliasDef(it) => {
|
||||||
let src = name.with_value(it);
|
let src = name.with_value(it);
|
||||||
let def = hir::TypeAlias::from_source(db, src)?;
|
let def: hir::TypeAlias = sb.to_def(src)?;
|
||||||
if parent.parent().and_then(ast::ItemList::cast).is_some() {
|
if parent.parent().and_then(ast::ItemList::cast).is_some() {
|
||||||
Some(from_assoc_item(db, def.into()))
|
Some(from_assoc_item(sb.db, def.into()))
|
||||||
} else {
|
} else {
|
||||||
Some(from_module_def(db, def.into(), None))
|
Some(from_module_def(sb.db, def.into(), None))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ast::MacroCall(it) => {
|
ast::MacroCall(it) => {
|
||||||
let src = name.with_value(it);
|
let src = name.with_value(it);
|
||||||
let def = hir::MacroDef::from_source(db, src.clone())?;
|
let def = hir::MacroDef::from_source(sb.db, src.clone())?;
|
||||||
|
|
||||||
let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax()));
|
let module_src = ModuleSource::from_child_node(sb.db, src.as_ref().map(|it| it.syntax()));
|
||||||
let module = Module::from_definition(db, src.with_value(module_src))?;
|
let module = Module::from_definition(sb.db, src.with_value(module_src))?;
|
||||||
|
|
||||||
Some(NameDefinition {
|
Some(NameDefinition {
|
||||||
visibility: None,
|
visibility: None,
|
||||||
|
@ -112,10 +114,10 @@ pub(crate) fn classify_name(db: &RootDatabase, name: InFile<&ast::Name>) -> Opti
|
||||||
},
|
},
|
||||||
ast::TypeParam(it) => {
|
ast::TypeParam(it) => {
|
||||||
let src = name.with_value(it);
|
let src = name.with_value(it);
|
||||||
let def = hir::TypeParam::from_source(db, src)?;
|
let def = hir::TypeParam::from_source(sb.db, src)?;
|
||||||
Some(NameDefinition {
|
Some(NameDefinition {
|
||||||
visibility: None,
|
visibility: None,
|
||||||
container: def.module(db),
|
container: def.module(sb.db),
|
||||||
kind: NameKind::TypeParam(def),
|
kind: NameKind::TypeParam(def),
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
@ -125,25 +127,25 @@ pub(crate) fn classify_name(db: &RootDatabase, name: InFile<&ast::Name>) -> Opti
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn classify_name_ref(
|
pub(crate) fn classify_name_ref(
|
||||||
db: &RootDatabase,
|
sb: &mut SourceBinder<RootDatabase>,
|
||||||
name_ref: InFile<&ast::NameRef>,
|
name_ref: InFile<&ast::NameRef>,
|
||||||
) -> Option<NameDefinition> {
|
) -> Option<NameDefinition> {
|
||||||
let _p = profile("classify_name_ref");
|
let _p = profile("classify_name_ref");
|
||||||
|
|
||||||
let parent = name_ref.value.syntax().parent()?;
|
let parent = name_ref.value.syntax().parent()?;
|
||||||
let analyzer = SourceAnalyzer::new(db, name_ref.map(|it| it.syntax()), None);
|
let analyzer = sb.analyze(name_ref.map(|it| it.syntax()), None);
|
||||||
|
|
||||||
if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
|
if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
|
||||||
tested_by!(goto_def_for_methods);
|
tested_by!(goto_def_for_methods);
|
||||||
if let Some(func) = analyzer.resolve_method_call(&method_call) {
|
if let Some(func) = analyzer.resolve_method_call(&method_call) {
|
||||||
return Some(from_assoc_item(db, func.into()));
|
return Some(from_assoc_item(sb.db, func.into()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) {
|
if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) {
|
||||||
tested_by!(goto_def_for_fields);
|
tested_by!(goto_def_for_fields);
|
||||||
if let Some(field) = analyzer.resolve_field(&field_expr) {
|
if let Some(field) = analyzer.resolve_field(&field_expr) {
|
||||||
return Some(from_struct_field(db, field));
|
return Some(from_struct_field(sb.db, field));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,30 +153,32 @@ pub(crate) fn classify_name_ref(
|
||||||
tested_by!(goto_def_for_record_fields);
|
tested_by!(goto_def_for_record_fields);
|
||||||
tested_by!(goto_def_for_field_init_shorthand);
|
tested_by!(goto_def_for_field_init_shorthand);
|
||||||
if let Some(field_def) = analyzer.resolve_record_field(&record_field) {
|
if let Some(field_def) = analyzer.resolve_record_field(&record_field) {
|
||||||
return Some(from_struct_field(db, field_def));
|
return Some(from_struct_field(sb.db, field_def));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let ast = ModuleSource::from_child_node(db, name_ref.with_value(&parent));
|
let ast = ModuleSource::from_child_node(sb.db, name_ref.with_value(&parent));
|
||||||
// FIXME: find correct container and visibility for each case
|
// FIXME: find correct container and visibility for each case
|
||||||
let container = Module::from_definition(db, name_ref.with_value(ast))?;
|
let container = Module::from_definition(sb.db, name_ref.with_value(ast))?;
|
||||||
let visibility = None;
|
let visibility = None;
|
||||||
|
|
||||||
if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) {
|
if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) {
|
||||||
tested_by!(goto_def_for_macros);
|
tested_by!(goto_def_for_macros);
|
||||||
if let Some(macro_def) = analyzer.resolve_macro_call(db, name_ref.with_value(¯o_call)) {
|
if let Some(macro_def) =
|
||||||
|
analyzer.resolve_macro_call(sb.db, name_ref.with_value(¯o_call))
|
||||||
|
{
|
||||||
let kind = NameKind::Macro(macro_def);
|
let kind = NameKind::Macro(macro_def);
|
||||||
return Some(NameDefinition { kind, container, visibility });
|
return Some(NameDefinition { kind, container, visibility });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let path = name_ref.value.syntax().ancestors().find_map(ast::Path::cast)?;
|
let path = name_ref.value.syntax().ancestors().find_map(ast::Path::cast)?;
|
||||||
let resolved = analyzer.resolve_path(db, &path)?;
|
let resolved = analyzer.resolve_path(sb.db, &path)?;
|
||||||
match resolved {
|
match resolved {
|
||||||
PathResolution::Def(def) => Some(from_module_def(db, def, Some(container))),
|
PathResolution::Def(def) => Some(from_module_def(sb.db, def, Some(container))),
|
||||||
PathResolution::AssocItem(item) => Some(from_assoc_item(db, item)),
|
PathResolution::AssocItem(item) => Some(from_assoc_item(sb.db, item)),
|
||||||
PathResolution::Local(local) => {
|
PathResolution::Local(local) => {
|
||||||
let container = local.module(db);
|
let container = local.module(sb.db);
|
||||||
let kind = NameKind::Local(local);
|
let kind = NameKind::Local(local);
|
||||||
Some(NameDefinition { kind, container, visibility: None })
|
Some(NameDefinition { kind, container, visibility: None })
|
||||||
}
|
}
|
||||||
|
@ -188,7 +192,7 @@ pub(crate) fn classify_name_ref(
|
||||||
}
|
}
|
||||||
PathResolution::SelfType(impl_block) => {
|
PathResolution::SelfType(impl_block) => {
|
||||||
let kind = NameKind::SelfType(impl_block);
|
let kind = NameKind::SelfType(impl_block);
|
||||||
let container = impl_block.module(db);
|
let container = impl_block.module(sb.db);
|
||||||
Some(NameDefinition { kind, container, visibility })
|
Some(NameDefinition { kind, container, visibility })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
|
|
||||||
use hir::{InFile, Name};
|
use hir::{InFile, Name, SourceBinder};
|
||||||
use ra_db::SourceDatabase;
|
use ra_db::SourceDatabase;
|
||||||
use ra_prof::profile;
|
use ra_prof::profile;
|
||||||
use ra_syntax::{ast, AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxKind::*, TextRange, T};
|
use ra_syntax::{ast, AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxKind::*, TextRange, T};
|
||||||
|
@ -84,6 +84,8 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||||
hash((file_id, name, shadow_count))
|
hash((file_id, name, shadow_count))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let mut sb = SourceBinder::new(db);
|
||||||
|
|
||||||
// Visited nodes to handle highlighting priorities
|
// Visited nodes to handle highlighting priorities
|
||||||
// FIXME: retain only ranges here
|
// FIXME: retain only ranges here
|
||||||
let mut highlighted: FxHashSet<SyntaxElement> = FxHashSet::default();
|
let mut highlighted: FxHashSet<SyntaxElement> = FxHashSet::default();
|
||||||
|
@ -108,8 +110,8 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||||
NAME_REF if node.ancestors().any(|it| it.kind() == ATTR) => continue,
|
NAME_REF if node.ancestors().any(|it| it.kind() == ATTR) => continue,
|
||||||
NAME_REF => {
|
NAME_REF => {
|
||||||
let name_ref = node.as_node().cloned().and_then(ast::NameRef::cast).unwrap();
|
let name_ref = node.as_node().cloned().and_then(ast::NameRef::cast).unwrap();
|
||||||
let name_kind =
|
let name_kind = classify_name_ref(&mut sb, InFile::new(file_id.into(), &name_ref))
|
||||||
classify_name_ref(db, InFile::new(file_id.into(), &name_ref)).map(|d| d.kind);
|
.map(|d| d.kind);
|
||||||
match name_kind {
|
match name_kind {
|
||||||
Some(name_kind) => {
|
Some(name_kind) => {
|
||||||
if let Local(local) = &name_kind {
|
if let Local(local) = &name_kind {
|
||||||
|
@ -129,7 +131,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||||
NAME => {
|
NAME => {
|
||||||
let name = node.as_node().cloned().and_then(ast::Name::cast).unwrap();
|
let name = node.as_node().cloned().and_then(ast::Name::cast).unwrap();
|
||||||
let name_kind =
|
let name_kind =
|
||||||
classify_name(db, InFile::new(file_id.into(), &name)).map(|d| d.kind);
|
classify_name(&mut sb, InFile::new(file_id.into(), &name)).map(|d| d.kind);
|
||||||
|
|
||||||
if let Some(Local(local)) = &name_kind {
|
if let Some(Local(local)) = &name_kind {
|
||||||
if let Some(name) = local.name(db) {
|
if let Some(name) = local.name(db) {
|
||||||
|
@ -308,9 +310,12 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::mock_analysis::single_file;
|
use std::fs;
|
||||||
|
|
||||||
use test_utils::{assert_eq_text, project_dir, read_text};
|
use test_utils::{assert_eq_text, project_dir, read_text};
|
||||||
|
|
||||||
|
use crate::mock_analysis::{single_file, MockAnalysis};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_highlighting() {
|
fn test_highlighting() {
|
||||||
let (analysis, file_id) = single_file(
|
let (analysis, file_id) = single_file(
|
||||||
|
@ -357,7 +362,7 @@ impl<X> E<X> {
|
||||||
let dst_file = project_dir().join("crates/ra_ide/src/snapshots/highlighting.html");
|
let dst_file = project_dir().join("crates/ra_ide/src/snapshots/highlighting.html");
|
||||||
let actual_html = &analysis.highlight_as_html(file_id, false).unwrap();
|
let actual_html = &analysis.highlight_as_html(file_id, false).unwrap();
|
||||||
let expected_html = &read_text(&dst_file);
|
let expected_html = &read_text(&dst_file);
|
||||||
std::fs::write(dst_file, &actual_html).unwrap();
|
fs::write(dst_file, &actual_html).unwrap();
|
||||||
assert_eq_text!(expected_html, actual_html);
|
assert_eq_text!(expected_html, actual_html);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -383,7 +388,21 @@ fn bar() {
|
||||||
let dst_file = project_dir().join("crates/ra_ide/src/snapshots/rainbow_highlighting.html");
|
let dst_file = project_dir().join("crates/ra_ide/src/snapshots/rainbow_highlighting.html");
|
||||||
let actual_html = &analysis.highlight_as_html(file_id, true).unwrap();
|
let actual_html = &analysis.highlight_as_html(file_id, true).unwrap();
|
||||||
let expected_html = &read_text(&dst_file);
|
let expected_html = &read_text(&dst_file);
|
||||||
std::fs::write(dst_file, &actual_html).unwrap();
|
fs::write(dst_file, &actual_html).unwrap();
|
||||||
assert_eq_text!(expected_html, actual_html);
|
assert_eq_text!(expected_html, actual_html);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn accidentally_quadratic() {
|
||||||
|
let file = project_dir().join("crates/ra_syntax/test_data/accidentally_quadratic");
|
||||||
|
let src = fs::read_to_string(file).unwrap();
|
||||||
|
|
||||||
|
let mut mock = MockAnalysis::new();
|
||||||
|
let file_id = mock.add_file("/main.rs", &src);
|
||||||
|
let host = mock.analysis_host();
|
||||||
|
|
||||||
|
// let t = std::time::Instant::now();
|
||||||
|
let _ = host.analysis().highlight(file_id).unwrap();
|
||||||
|
// eprintln!("elapsed: {:?}", t.elapsed());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
3980
crates/ra_syntax/test_data/accidentally_quadratic
Normal file
3980
crates/ra_syntax/test_data/accidentally_quadratic
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue