introduce hir crate

This commit is contained in:
Aleksey Kladov 2018-11-28 03:42:26 +03:00
parent f66e5b6e6b
commit 0e4b710af8
14 changed files with 2270 additions and 17 deletions

16
Cargo.lock generated
View file

@ -656,6 +656,22 @@ dependencies = [
"test_utils 0.1.0",
]
[[package]]
name = "ra_hir"
version = "0.1.0"
dependencies = [
"id-arena 1.0.2 (git+https://github.com/fitzgen/id-arena/?rev=43ecd67)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_db 0.1.0",
"ra_editor 0.1.0",
"ra_syntax 0.1.0",
"relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"salsa 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0",
]
[[package]]
name = "ra_lsp_server"
version = "0.1.0"

View file

@ -9,15 +9,6 @@ extern crate relative_path;
extern crate rustc_hash;
extern crate salsa;
macro_rules! ctry {
($expr:expr) => {
match $expr {
None => return Ok(None),
Some(it) => it,
}
};
}
mod arena;
mod db;
mod imp;
@ -47,7 +38,7 @@ pub use ra_editor::{
};
pub use ra_db::{
Canceled, Cancelable,
Canceled, Cancelable, FilePosition,
CrateGraph, CrateId, FileId, FileResolver
};
@ -119,12 +110,6 @@ impl AnalysisHost {
}
}
#[derive(Clone, Copy, Debug)]
pub struct FilePosition {
pub file_id: FileId,
pub offset: TextUnit,
}
#[derive(Debug)]
pub struct SourceChange {
pub label: String,

View file

@ -13,7 +13,7 @@ mod loc2id;
use std::sync::Arc;
use ra_editor::LineIndex;
use ra_syntax::SourceFileNode;
use ra_syntax::{TextUnit, SourceFileNode};
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct Canceled;
@ -81,3 +81,9 @@ fn file_lines(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<LineIndex> {
let text = db.file_text(file_id);
Arc::new(LineIndex::new(&*text))
}
#[derive(Clone, Copy, Debug)]
pub struct FilePosition {
pub file_id: FileId,
pub offset: TextUnit,
}

17
crates/ra_hir/Cargo.toml Normal file
View file

@ -0,0 +1,17 @@
[package]
edition = "2018"
name = "ra_hir"
version = "0.1.0"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
[dependencies]
log = "0.4.5"
relative-path = "0.4.0"
salsa = "0.8.0"
rustc-hash = "1.0"
parking_lot = "0.6.4"
id-arena = { git = "https://github.com/fitzgen/id-arena/", rev = "43ecd67" }
ra_syntax = { path = "../ra_syntax" }
ra_editor = { path = "../ra_editor" }
ra_db = { path = "../ra_db" }
test_utils = { path = "../test_utils" }

View file

@ -0,0 +1,66 @@
//! A simple id-based arena, similar to https://github.com/fitzgen/id-arena.
//! We use our own version for more compact id's and to allow inherent impls
//! on Ids.
use std::{
fmt,
hash::{Hash, Hasher},
marker::PhantomData,
};
pub(crate) struct Id<T> {
idx: u32,
_ty: PhantomData<fn() -> T>,
}
impl<T> fmt::Debug for Id<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Id").field(&self.idx).finish()
}
}
impl<T> Copy for Id<T> {}
impl<T> Clone for Id<T> {
fn clone(&self) -> Id<T> {
*self
}
}
impl<T> PartialEq for Id<T> {
fn eq(&self, other: &Id<T>) -> bool {
self.idx == other.idx
}
}
impl<T> Eq for Id<T> {}
impl<T> Hash for Id<T> {
fn hash<H: Hasher>(&self, h: &mut H) {
self.idx.hash(h);
}
}
#[derive(Debug, PartialEq, Eq)]
pub(crate) struct ArenaBehavior<T> {
_ty: PhantomData<T>,
}
impl<T> id_arena::ArenaBehavior for ArenaBehavior<T> {
type Id = Id<T>;
fn new_arena_id() -> usize {
0
}
fn new_id(_arena_id: usize, index: usize) -> Id<T> {
Id {
idx: index as u32,
_ty: PhantomData,
}
}
fn index(id: Id<T>) -> usize {
id.idx as usize
}
fn arena_id(_id: Id<T>) -> usize {
0
}
}
pub(crate) type Arena<T> = id_arena::Arena<T, ArenaBehavior<T>>;

66
crates/ra_hir/src/db.rs Normal file
View file

@ -0,0 +1,66 @@
use std::sync::Arc;
use ra_syntax::{
SyntaxNode,
ast::FnDefNode,
};
use ra_db::{SourceRootId, LocationIntener, SyntaxDatabase, FileId, Cancelable};
use crate::{
DefLoc, DefId, FnId,
SourceFileItems, SourceItemId,
query_definitions,
function::{FnScopes},
module::{ModuleId, ModuleTree, ModuleSource,
nameres::{ItemMap, InputModuleItems}},
};
salsa::query_group! {
pub(crate) trait HirDatabase: SyntaxDatabase
+ AsRef<LocationIntener<DefLoc, DefId>>
+ AsRef<LocationIntener<SourceItemId, FnId>>
{
fn fn_scopes(fn_id: FnId) -> Arc<FnScopes> {
type FnScopesQuery;
use fn query_definitions::fn_scopes;
}
fn fn_syntax(fn_id: FnId) -> FnDefNode {
type FnSyntaxQuery;
// Don't retain syntax trees in memory
storage dependencies;
use fn query_definitions::fn_syntax;
}
fn file_items(file_id: FileId) -> Arc<SourceFileItems> {
type SourceFileItemsQuery;
storage dependencies;
use fn query_definitions::file_items;
}
fn file_item(source_item_id: SourceItemId) -> SyntaxNode {
type FileItemQuery;
storage dependencies;
use fn query_definitions::file_item;
}
fn submodules(source: ModuleSource) -> Cancelable<Arc<Vec<crate::module::imp::Submodule>>> {
type SubmodulesQuery;
use fn query_definitions::submodules;
}
fn input_module_items(source_root_id: SourceRootId, module_id: ModuleId) -> Cancelable<Arc<InputModuleItems>> {
type InputModuleItemsQuery;
use fn query_definitions::input_module_items;
}
fn item_map(source_root_id: SourceRootId) -> Cancelable<Arc<ItemMap>> {
type ItemMapQuery;
use fn query_definitions::item_map;
}
fn module_tree(source_root_id: SourceRootId) -> Cancelable<Arc<ModuleTree>> {
type ModuleTreeQuery;
use fn crate::module::imp::module_tree;
}
}
}

View file

@ -0,0 +1,190 @@
mod scope;
use std::{
cmp::{max, min},
sync::Arc,
};
use ra_syntax::{
TextRange, TextUnit, SyntaxNodeRef,
ast::{self, AstNode, DocCommentsOwner, NameOwner},
};
use ra_db::FileId;
use crate::{
FnId, HirDatabase, SourceItemId,
};
pub(crate) use self::scope::FnScopes;
impl FnId {
pub(crate) fn get(db: &impl HirDatabase, file_id: FileId, fn_def: ast::FnDef) -> FnId {
let file_items = db.file_items(file_id);
let item_id = file_items.id_of(fn_def.syntax());
let item_id = SourceItemId { file_id, item_id };
FnId::from_loc(db, &item_id)
}
}
pub(crate) struct Function {
fn_id: FnId,
}
impl Function {
pub(crate) fn guess_from_source(
db: &impl HirDatabase,
file_id: FileId,
fn_def: ast::FnDef,
) -> Function {
let fn_id = FnId::get(db, file_id, fn_def);
Function { fn_id }
}
pub(crate) fn guess_for_name_ref(
db: &impl HirDatabase,
file_id: FileId,
name_ref: ast::NameRef,
) -> Option<Function> {
Function::guess_for_node(db, file_id, name_ref.syntax())
}
pub(crate) fn guess_for_bind_pat(
db: &impl HirDatabase,
file_id: FileId,
bind_pat: ast::BindPat,
) -> Option<Function> {
Function::guess_for_node(db, file_id, bind_pat.syntax())
}
fn guess_for_node(
db: &impl HirDatabase,
file_id: FileId,
node: SyntaxNodeRef,
) -> Option<Function> {
let fn_def = node.ancestors().find_map(ast::FnDef::cast)?;
let res = Function::guess_from_source(db, file_id, fn_def);
Some(res)
}
pub(crate) fn scope(&self, db: &impl HirDatabase) -> Arc<FnScopes> {
db.fn_scopes(self.fn_id)
}
pub(crate) fn signature_info(&self, db: &impl HirDatabase) -> Option<FnSignatureInfo> {
let syntax = db.fn_syntax(self.fn_id);
FnSignatureInfo::new(syntax.borrowed())
}
}
#[derive(Debug, Clone)]
pub struct FnSignatureInfo {
pub name: String,
pub label: String,
pub ret_type: Option<String>,
pub params: Vec<String>,
pub doc: Option<String>,
}
impl FnSignatureInfo {
fn new(node: ast::FnDef) -> Option<Self> {
let name = node.name()?.text().to_string();
let mut doc = None;
// Strip the body out for the label.
let mut label: String = if let Some(body) = node.body() {
let body_range = body.syntax().range();
let label: String = node
.syntax()
.children()
.filter(|child| !child.range().is_subrange(&body_range))
.map(|node| node.text().to_string())
.collect();
label
} else {
node.syntax().text().to_string()
};
if let Some((comment_range, docs)) = FnSignatureInfo::extract_doc_comments(node) {
let comment_range = comment_range
.checked_sub(node.syntax().range().start())
.unwrap();
let start = comment_range.start().to_usize();
let end = comment_range.end().to_usize();
// Remove the comment from the label
label.replace_range(start..end, "");
// Massage markdown
let mut processed_lines = Vec::new();
let mut in_code_block = false;
for line in docs.lines() {
if line.starts_with("```") {
in_code_block = !in_code_block;
}
let line = if in_code_block && line.starts_with("```") && !line.contains("rust") {
"```rust".into()
} else {
line.to_string()
};
processed_lines.push(line);
}
if !processed_lines.is_empty() {
doc = Some(processed_lines.join("\n"));
}
}
let params = FnSignatureInfo::param_list(node);
let ret_type = node.ret_type().map(|r| r.syntax().text().to_string());
Some(FnSignatureInfo {
name,
ret_type,
params,
label: label.trim().to_owned(),
doc,
})
}
fn extract_doc_comments(node: ast::FnDef) -> Option<(TextRange, String)> {
if node.doc_comments().count() == 0 {
return None;
}
let comment_text = node.doc_comment_text();
let (begin, end) = node
.doc_comments()
.map(|comment| comment.syntax().range())
.map(|range| (range.start().to_usize(), range.end().to_usize()))
.fold((std::usize::MAX, std::usize::MIN), |acc, range| {
(min(acc.0, range.0), max(acc.1, range.1))
});
let range = TextRange::from_to(TextUnit::from_usize(begin), TextUnit::from_usize(end));
Some((range, comment_text))
}
fn param_list(node: ast::FnDef) -> Vec<String> {
let mut res = vec![];
if let Some(param_list) = node.param_list() {
if let Some(self_param) = param_list.self_param() {
res.push(self_param.syntax().text().to_string())
}
// Maybe use param.pat here? See if we can just extract the name?
//res.extend(param_list.params().map(|p| p.syntax().text().to_string()));
res.extend(
param_list
.params()
.filter_map(|p| p.pat())
.map(|pat| pat.syntax().text().to_string()),
);
}
res
}
}

View file

@ -0,0 +1,450 @@
use rustc_hash::{FxHashMap, FxHashSet};
use ra_syntax::{
AstNode, SmolStr, SyntaxNodeRef, TextRange,
algo::generate,
ast::{self, ArgListOwner, LoopBodyOwner, NameOwner},
};
use ra_db::LocalSyntaxPtr;
use crate::{
arena::{Arena, Id},
};
pub(crate) type ScopeId = Id<ScopeData>;
#[derive(Debug, PartialEq, Eq)]
pub struct FnScopes {
pub(crate) self_param: Option<LocalSyntaxPtr>,
scopes: Arena<ScopeData>,
scope_for: FxHashMap<LocalSyntaxPtr, ScopeId>,
}
#[derive(Debug, PartialEq, Eq)]
pub struct ScopeEntry {
name: SmolStr,
ptr: LocalSyntaxPtr,
}
#[derive(Debug, PartialEq, Eq)]
pub(crate) struct ScopeData {
parent: Option<ScopeId>,
entries: Vec<ScopeEntry>,
}
impl FnScopes {
pub(crate) fn new(fn_def: ast::FnDef) -> FnScopes {
let mut scopes = FnScopes {
self_param: fn_def
.param_list()
.and_then(|it| it.self_param())
.map(|it| LocalSyntaxPtr::new(it.syntax())),
scopes: Arena::default(),
scope_for: FxHashMap::default(),
};
let root = scopes.root_scope();
scopes.add_params_bindings(root, fn_def.param_list());
if let Some(body) = fn_def.body() {
compute_block_scopes(body, &mut scopes, root)
}
scopes
}
pub(crate) fn entries(&self, scope: ScopeId) -> &[ScopeEntry] {
&self.scopes[scope].entries
}
pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator<Item = ScopeId> + 'a {
generate(self.scope_for(node), move |&scope| {
self.scopes[scope].parent
})
}
pub(crate) fn resolve_local_name<'a>(
&'a self,
name_ref: ast::NameRef,
) -> Option<&'a ScopeEntry> {
let mut shadowed = FxHashSet::default();
let ret = self
.scope_chain(name_ref.syntax())
.flat_map(|scope| self.entries(scope).iter())
.filter(|entry| shadowed.insert(entry.name()))
.filter(|entry| entry.name() == &name_ref.text())
.nth(0);
ret
}
pub fn find_all_refs(&self, pat: ast::BindPat) -> Vec<ReferenceDescriptor> {
let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
let name_ptr = LocalSyntaxPtr::new(pat.syntax());
let refs: Vec<_> = fn_def
.syntax()
.descendants()
.filter_map(ast::NameRef::cast)
.filter(|name_ref| match self.resolve_local_name(*name_ref) {
None => false,
Some(entry) => entry.ptr() == name_ptr,
})
.map(|name_ref| ReferenceDescriptor {
name: name_ref.syntax().text().to_string(),
range: name_ref.syntax().range(),
})
.collect();
refs
}
fn root_scope(&mut self) -> ScopeId {
self.scopes.alloc(ScopeData {
parent: None,
entries: vec![],
})
}
fn new_scope(&mut self, parent: ScopeId) -> ScopeId {
self.scopes.alloc(ScopeData {
parent: Some(parent),
entries: vec![],
})
}
fn add_bindings(&mut self, scope: ScopeId, pat: ast::Pat) {
let entries = pat
.syntax()
.descendants()
.filter_map(ast::BindPat::cast)
.filter_map(ScopeEntry::new);
self.scopes[scope].entries.extend(entries);
}
fn add_params_bindings(&mut self, scope: ScopeId, params: Option<ast::ParamList>) {
params
.into_iter()
.flat_map(|it| it.params())
.filter_map(|it| it.pat())
.for_each(|it| self.add_bindings(scope, it));
}
fn set_scope(&mut self, node: SyntaxNodeRef, scope: ScopeId) {
self.scope_for.insert(LocalSyntaxPtr::new(node), scope);
}
fn scope_for(&self, node: SyntaxNodeRef) -> Option<ScopeId> {
node.ancestors()
.map(LocalSyntaxPtr::new)
.filter_map(|it| self.scope_for.get(&it).map(|&scope| scope))
.next()
}
}
impl ScopeEntry {
fn new(pat: ast::BindPat) -> Option<ScopeEntry> {
let name = pat.name()?;
let res = ScopeEntry {
name: name.text(),
ptr: LocalSyntaxPtr::new(pat.syntax()),
};
Some(res)
}
pub(crate) fn name(&self) -> &SmolStr {
&self.name
}
pub(crate) fn ptr(&self) -> LocalSyntaxPtr {
self.ptr
}
}
fn compute_block_scopes(block: ast::Block, scopes: &mut FnScopes, mut scope: ScopeId) {
for stmt in block.statements() {
match stmt {
ast::Stmt::LetStmt(stmt) => {
if let Some(expr) = stmt.initializer() {
scopes.set_scope(expr.syntax(), scope);
compute_expr_scopes(expr, scopes, scope);
}
scope = scopes.new_scope(scope);
if let Some(pat) = stmt.pat() {
scopes.add_bindings(scope, pat);
}
}
ast::Stmt::ExprStmt(expr_stmt) => {
if let Some(expr) = expr_stmt.expr() {
scopes.set_scope(expr.syntax(), scope);
compute_expr_scopes(expr, scopes, scope);
}
}
}
}
if let Some(expr) = block.expr() {
scopes.set_scope(expr.syntax(), scope);
compute_expr_scopes(expr, scopes, scope);
}
}
fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) {
match expr {
ast::Expr::IfExpr(e) => {
let cond_scope = e
.condition()
.and_then(|cond| compute_cond_scopes(cond, scopes, scope));
if let Some(block) = e.then_branch() {
compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope));
}
if let Some(block) = e.else_branch() {
compute_block_scopes(block, scopes, scope);
}
}
ast::Expr::BlockExpr(e) => {
if let Some(block) = e.block() {
compute_block_scopes(block, scopes, scope);
}
}
ast::Expr::LoopExpr(e) => {
if let Some(block) = e.loop_body() {
compute_block_scopes(block, scopes, scope);
}
}
ast::Expr::WhileExpr(e) => {
let cond_scope = e
.condition()
.and_then(|cond| compute_cond_scopes(cond, scopes, scope));
if let Some(block) = e.loop_body() {
compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope));
}
}
ast::Expr::ForExpr(e) => {
if let Some(expr) = e.iterable() {
compute_expr_scopes(expr, scopes, scope);
}
let mut scope = scope;
if let Some(pat) = e.pat() {
scope = scopes.new_scope(scope);
scopes.add_bindings(scope, pat);
}
if let Some(block) = e.loop_body() {
compute_block_scopes(block, scopes, scope);
}
}
ast::Expr::LambdaExpr(e) => {
let scope = scopes.new_scope(scope);
scopes.add_params_bindings(scope, e.param_list());
if let Some(body) = e.body() {
scopes.set_scope(body.syntax(), scope);
compute_expr_scopes(body, scopes, scope);
}
}
ast::Expr::CallExpr(e) => {
compute_call_scopes(e.expr(), e.arg_list(), scopes, scope);
}
ast::Expr::MethodCallExpr(e) => {
compute_call_scopes(e.expr(), e.arg_list(), scopes, scope);
}
ast::Expr::MatchExpr(e) => {
if let Some(expr) = e.expr() {
compute_expr_scopes(expr, scopes, scope);
}
for arm in e.match_arm_list().into_iter().flat_map(|it| it.arms()) {
let scope = scopes.new_scope(scope);
for pat in arm.pats() {
scopes.add_bindings(scope, pat);
}
if let Some(expr) = arm.expr() {
compute_expr_scopes(expr, scopes, scope);
}
}
}
_ => expr
.syntax()
.children()
.filter_map(ast::Expr::cast)
.for_each(|expr| compute_expr_scopes(expr, scopes, scope)),
};
fn compute_call_scopes(
receiver: Option<ast::Expr>,
arg_list: Option<ast::ArgList>,
scopes: &mut FnScopes,
scope: ScopeId,
) {
arg_list
.into_iter()
.flat_map(|it| it.args())
.chain(receiver)
.for_each(|expr| compute_expr_scopes(expr, scopes, scope));
}
fn compute_cond_scopes(
cond: ast::Condition,
scopes: &mut FnScopes,
scope: ScopeId,
) -> Option<ScopeId> {
if let Some(expr) = cond.expr() {
compute_expr_scopes(expr, scopes, scope);
}
if let Some(pat) = cond.pat() {
let s = scopes.new_scope(scope);
scopes.add_bindings(s, pat);
Some(s)
} else {
None
}
}
}
#[derive(Debug)]
pub struct ReferenceDescriptor {
pub range: TextRange,
pub name: String,
}
#[cfg(test)]
mod tests {
use ra_editor::find_node_at_offset;
use ra_syntax::SourceFileNode;
use test_utils::extract_offset;
use super::*;
fn do_check(code: &str, expected: &[&str]) {
let (off, code) = extract_offset(code);
let code = {
let mut buf = String::new();
let off = u32::from(off) as usize;
buf.push_str(&code[..off]);
buf.push_str("marker");
buf.push_str(&code[off..]);
buf
};
let file = SourceFileNode::parse(&code);
let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
let scopes = FnScopes::new(fn_def);
let actual = scopes
.scope_chain(marker.syntax())
.flat_map(|scope| scopes.entries(scope))
.map(|it| it.name())
.collect::<Vec<_>>();
assert_eq!(actual.as_slice(), expected);
}
#[test]
fn test_lambda_scope() {
do_check(
r"
fn quux(foo: i32) {
let f = |bar, baz: i32| {
<|>
};
}",
&["bar", "baz", "foo"],
);
}
#[test]
fn test_call_scope() {
do_check(
r"
fn quux() {
f(|x| <|> );
}",
&["x"],
);
}
#[test]
fn test_metod_call_scope() {
do_check(
r"
fn quux() {
z.f(|x| <|> );
}",
&["x"],
);
}
#[test]
fn test_loop_scope() {
do_check(
r"
fn quux() {
loop {
let x = ();
<|>
};
}",
&["x"],
);
}
#[test]
fn test_match() {
do_check(
r"
fn quux() {
match () {
Some(x) => {
<|>
}
};
}",
&["x"],
);
}
#[test]
fn test_shadow_variable() {
do_check(
r"
fn foo(x: String) {
let x : &str = &x<|>;
}",
&["x"],
);
}
fn do_check_local_name(code: &str, expected_offset: u32) {
let (off, code) = extract_offset(code);
let file = SourceFileNode::parse(&code);
let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
let scopes = FnScopes::new(fn_def);
let local_name_entry = scopes.resolve_local_name(name_ref).unwrap();
let local_name = local_name_entry.ptr().resolve(&file);
let expected_name =
find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()).unwrap();
assert_eq!(local_name.range(), expected_name.syntax().range());
}
#[test]
fn test_resolve_local_name() {
do_check_local_name(
r#"
fn foo(x: i32, y: u32) {
{
let z = x * 2;
}
{
let t = x<|> * 3;
}
}"#,
21,
);
}
#[test]
fn test_resolve_local_name_declaration() {
do_check_local_name(
r#"
fn foo(x: String) {
let x : &str = &x<|>;
}"#,
21,
);
}
#[test]
fn test_resolve_local_name_shadow() {
do_check_local_name(
r"
fn foo(x: String) {
let x : &str = &x;
x<|>
}",
46,
);
}
}

139
crates/ra_hir/src/lib.rs Normal file
View file

@ -0,0 +1,139 @@
//! HIR (previsouly known as descriptors) provides a high-level OO acess to Rust
//! code.
//!
//! The principal difference between HIR and syntax trees is that HIR is bound
//! to a particular crate instance. That is, it has cfg flags and features
//! applied. So, there relation between syntax and HIR is many-to-one.
macro_rules! ctry {
($expr:expr) => {
match $expr {
None => return Ok(None),
Some(it) => it,
}
};
}
pub(crate) mod db;
mod query_definitions;
mod function;
mod module;
mod path;
mod arena;
use std::ops::Index;
use ra_syntax::{SyntaxNodeRef, SyntaxNode};
use ra_db::{LocationIntener, SourceRootId, FileId, Cancelable};
use crate::{
db::HirDatabase,
arena::{Arena, Id},
};
pub(crate) use self::{
path::{Path, PathKind},
module::{Module, ModuleId, Problem},
function::{Function, FnScopes},
};
pub use self::function::FnSignatureInfo;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) struct FnId(u32);
ra_db::impl_numeric_id!(FnId);
impl FnId {
pub(crate) fn from_loc(
db: &impl AsRef<LocationIntener<SourceItemId, FnId>>,
loc: &SourceItemId,
) -> FnId {
db.as_ref().loc2id(loc)
}
pub(crate) fn loc(self, db: &impl AsRef<LocationIntener<SourceItemId, FnId>>) -> SourceItemId {
db.as_ref().id2loc(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) struct DefId(u32);
ra_db::impl_numeric_id!(DefId);
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub(crate) enum DefLoc {
Module {
id: ModuleId,
source_root: SourceRootId,
},
Item {
source_item_id: SourceItemId,
},
}
impl DefId {
pub(crate) fn loc(self, db: &impl AsRef<LocationIntener<DefLoc, DefId>>) -> DefLoc {
db.as_ref().id2loc(self)
}
}
impl DefLoc {
pub(crate) fn id(&self, db: &impl AsRef<LocationIntener<DefLoc, DefId>>) -> DefId {
db.as_ref().loc2id(&self)
}
}
pub(crate) enum Def {
Module(Module),
Item,
}
impl DefId {
pub(crate) fn resolve(self, db: &impl HirDatabase) -> Cancelable<Def> {
let loc = self.loc(db);
let res = match loc {
DefLoc::Module { id, source_root } => {
let descr = Module::new(db, source_root, id)?;
Def::Module(descr)
}
DefLoc::Item { .. } => Def::Item,
};
Ok(res)
}
}
/// Identifier of item within a specific file. This is stable over reparses, so
/// it's OK to use it as a salsa key/value.
pub(crate) type SourceFileItemId = Id<SyntaxNode>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) struct SourceItemId {
file_id: FileId,
item_id: SourceFileItemId,
}
/// Maps item's `SyntaxNode`s to `SourceFileItemId` and back.
#[derive(Debug, PartialEq, Eq, Default)]
pub(crate) struct SourceFileItems {
arena: Arena<SyntaxNode>,
}
impl SourceFileItems {
fn alloc(&mut self, item: SyntaxNode) -> SourceFileItemId {
self.arena.alloc(item)
}
fn id_of(&self, item: SyntaxNodeRef) -> SourceFileItemId {
let (id, _item) = self
.arena
.iter()
.find(|(_id, i)| i.borrowed() == item)
.unwrap();
id
}
}
impl Index<SourceFileItemId> for SourceFileItems {
type Output = SyntaxNode;
fn index(&self, idx: SourceFileItemId) -> &SyntaxNode {
&self.arena[idx]
}
}

View file

@ -0,0 +1,194 @@
use std::sync::Arc;
use ra_syntax::{
ast::{self, NameOwner},
SmolStr,
};
use relative_path::RelativePathBuf;
use rustc_hash::{FxHashMap, FxHashSet};
use ra_db::{SourceRoot, SourceRootId, FileResolverImp, Cancelable, FileId,};
use crate::{
HirDatabase,
};
use super::{
LinkData, LinkId, ModuleData, ModuleId, ModuleSource,
ModuleTree, Problem,
};
#[derive(Clone, Hash, PartialEq, Eq, Debug)]
pub(crate) enum Submodule {
Declaration(SmolStr),
Definition(SmolStr, ModuleSource),
}
impl Submodule {
fn name(&self) -> &SmolStr {
match self {
Submodule::Declaration(name) => name,
Submodule::Definition(name, _) => name,
}
}
}
pub(crate) fn modules<'a>(
root: impl ast::ModuleItemOwner<'a>,
) -> impl Iterator<Item = (SmolStr, ast::Module<'a>)> {
root.items()
.filter_map(|item| match item {
ast::ModuleItem::Module(m) => Some(m),
_ => None,
})
.filter_map(|module| {
let name = module.name()?.text();
Some((name, module))
})
}
pub(crate) fn module_tree(
db: &impl HirDatabase,
source_root: SourceRootId,
) -> Cancelable<Arc<ModuleTree>> {
db.check_canceled()?;
let res = create_module_tree(db, source_root)?;
Ok(Arc::new(res))
}
fn create_module_tree<'a>(
db: &impl HirDatabase,
source_root: SourceRootId,
) -> Cancelable<ModuleTree> {
let mut tree = ModuleTree::default();
let mut roots = FxHashMap::default();
let mut visited = FxHashSet::default();
let source_root = db.source_root(source_root);
for &file_id in source_root.files.iter() {
let source = ModuleSource::SourceFile(file_id);
if visited.contains(&source) {
continue; // TODO: use explicit crate_roots here
}
assert!(!roots.contains_key(&file_id));
let module_id = build_subtree(
db,
&source_root,
&mut tree,
&mut visited,
&mut roots,
None,
source,
)?;
roots.insert(file_id, module_id);
}
Ok(tree)
}
fn build_subtree(
db: &impl HirDatabase,
source_root: &SourceRoot,
tree: &mut ModuleTree,
visited: &mut FxHashSet<ModuleSource>,
roots: &mut FxHashMap<FileId, ModuleId>,
parent: Option<LinkId>,
source: ModuleSource,
) -> Cancelable<ModuleId> {
visited.insert(source);
let id = tree.push_mod(ModuleData {
source,
parent,
children: Vec::new(),
});
for sub in db.submodules(source)?.iter() {
let link = tree.push_link(LinkData {
name: sub.name().clone(),
owner: id,
points_to: Vec::new(),
problem: None,
});
let (points_to, problem) = match sub {
Submodule::Declaration(name) => {
let (points_to, problem) =
resolve_submodule(source, &name, &source_root.file_resolver);
let points_to = points_to
.into_iter()
.map(|file_id| match roots.remove(&file_id) {
Some(module_id) => {
tree.mods[module_id].parent = Some(link);
Ok(module_id)
}
None => build_subtree(
db,
source_root,
tree,
visited,
roots,
Some(link),
ModuleSource::SourceFile(file_id),
),
})
.collect::<Cancelable<Vec<_>>>()?;
(points_to, problem)
}
Submodule::Definition(_name, submodule_source) => {
let points_to = build_subtree(
db,
source_root,
tree,
visited,
roots,
Some(link),
*submodule_source,
)?;
(vec![points_to], None)
}
};
tree.links[link].points_to = points_to;
tree.links[link].problem = problem;
}
Ok(id)
}
fn resolve_submodule(
source: ModuleSource,
name: &SmolStr,
file_resolver: &FileResolverImp,
) -> (Vec<FileId>, Option<Problem>) {
let file_id = match source {
ModuleSource::SourceFile(it) => it,
ModuleSource::Module(..) => {
// TODO
return (Vec::new(), None);
}
};
let mod_name = file_resolver.file_stem(file_id);
let is_dir_owner = mod_name == "mod" || mod_name == "lib" || mod_name == "main";
let file_mod = RelativePathBuf::from(format!("../{}.rs", name));
let dir_mod = RelativePathBuf::from(format!("../{}/mod.rs", name));
let points_to: Vec<FileId>;
let problem: Option<Problem>;
if is_dir_owner {
points_to = [&file_mod, &dir_mod]
.iter()
.filter_map(|path| file_resolver.resolve(file_id, path))
.collect();
problem = if points_to.is_empty() {
Some(Problem::UnresolvedModule {
candidate: file_mod,
})
} else {
None
}
} else {
points_to = Vec::new();
problem = Some(Problem::NotDirOwner {
move_to: RelativePathBuf::from(format!("../{}/mod.rs", mod_name)),
candidate: file_mod,
});
}
(points_to, problem)
}

View file

@ -0,0 +1,378 @@
pub(super) mod imp;
pub(super) mod nameres;
use std::sync::Arc;
use ra_editor::find_node_at_offset;
use ra_syntax::{
algo::generate,
ast::{self, AstNode, NameOwner},
SmolStr, SyntaxNode,
};
use ra_db::{SourceRootId, FileId, FilePosition, Cancelable};
use relative_path::RelativePathBuf;
use crate::{
DefLoc, DefId, Path, PathKind, HirDatabase, SourceItemId,
arena::{Arena, Id},
};
pub(crate) use self::nameres::ModuleScope;
/// `Module` is API entry point to get all the information
/// about a particular module.
#[derive(Debug, Clone)]
pub(crate) struct Module {
tree: Arc<ModuleTree>,
source_root_id: SourceRootId,
module_id: ModuleId,
}
impl Module {
/// Lookup `Module` by `FileId`. Note that this is inherently
/// lossy transformation: in general, a single source might correspond to
/// several modules.
pub fn guess_from_file_id(
db: &impl HirDatabase,
file_id: FileId,
) -> Cancelable<Option<Module>> {
Module::guess_from_source(db, file_id, ModuleSource::SourceFile(file_id))
}
/// Lookup `Module` by position in the source code. Note that this
/// is inherently lossy transformation: in general, a single source might
/// correspond to several modules.
pub fn guess_from_position(
db: &impl HirDatabase,
position: FilePosition,
) -> Cancelable<Option<Module>> {
let file = db.source_file(position.file_id);
let module_source = match find_node_at_offset::<ast::Module>(file.syntax(), position.offset)
{
Some(m) if !m.has_semi() => ModuleSource::new_inline(db, position.file_id, m),
_ => ModuleSource::SourceFile(position.file_id),
};
Module::guess_from_source(db, position.file_id, module_source)
}
fn guess_from_source(
db: &impl HirDatabase,
file_id: FileId,
module_source: ModuleSource,
) -> Cancelable<Option<Module>> {
let source_root_id = db.file_source_root(file_id);
let module_tree = db.module_tree(source_root_id)?;
let res = match module_tree.any_module_for_source(module_source) {
None => None,
Some(module_id) => Some(Module {
tree: module_tree,
source_root_id,
module_id,
}),
};
Ok(res)
}
pub(super) fn new(
db: &impl HirDatabase,
source_root_id: SourceRootId,
module_id: ModuleId,
) -> Cancelable<Module> {
let module_tree = db.module_tree(source_root_id)?;
let res = Module {
tree: module_tree,
source_root_id,
module_id,
};
Ok(res)
}
/// Returns `mod foo;` or `mod foo {}` node whihc declared this module.
/// Returns `None` for the root module
pub fn parent_link_source(&self, db: &impl HirDatabase) -> Option<(FileId, ast::ModuleNode)> {
let link = self.module_id.parent_link(&self.tree)?;
let file_id = link.owner(&self.tree).source(&self.tree).file_id();
let src = link.bind_source(&self.tree, db);
Some((file_id, src))
}
pub fn source(&self) -> ModuleSource {
self.module_id.source(&self.tree)
}
/// Parent module. Returns `None` if this is a root module.
pub fn parent(&self) -> Option<Module> {
let parent_id = self.module_id.parent(&self.tree)?;
Some(Module {
module_id: parent_id,
..self.clone()
})
}
/// The root of the tree this module is part of
pub fn crate_root(&self) -> Module {
let root_id = self.module_id.crate_root(&self.tree);
Module {
module_id: root_id,
..self.clone()
}
}
/// `name` is `None` for the crate's root module
#[allow(unused)]
pub fn name(&self) -> Option<SmolStr> {
let link = self.module_id.parent_link(&self.tree)?;
Some(link.name(&self.tree))
}
pub fn def_id(&self, db: &impl HirDatabase) -> DefId {
let def_loc = DefLoc::Module {
id: self.module_id,
source_root: self.source_root_id,
};
def_loc.id(db)
}
/// Finds a child module with the specified name.
pub fn child(&self, name: &str) -> Option<Module> {
let child_id = self.module_id.child(&self.tree, name)?;
Some(Module {
module_id: child_id,
..self.clone()
})
}
/// Returns a `ModuleScope`: a set of items, visible in this module.
pub(crate) fn scope(&self, db: &impl HirDatabase) -> Cancelable<ModuleScope> {
let item_map = db.item_map(self.source_root_id)?;
let res = item_map.per_module[&self.module_id].clone();
Ok(res)
}
pub(crate) fn resolve_path(
&self,
db: &impl HirDatabase,
path: Path,
) -> Cancelable<Option<DefId>> {
let mut curr = match path.kind {
PathKind::Crate => self.crate_root(),
PathKind::Self_ | PathKind::Plain => self.clone(),
PathKind::Super => ctry!(self.parent()),
}
.def_id(db);
let segments = path.segments;
for name in segments.iter() {
let module = match curr.loc(db) {
DefLoc::Module { id, source_root } => Module::new(db, source_root, id)?,
_ => return Ok(None),
};
let scope = module.scope(db)?;
curr = ctry!(ctry!(scope.get(&name)).def_id);
}
Ok(Some(curr))
}
pub fn problems(&self, db: &impl HirDatabase) -> Vec<(SyntaxNode, Problem)> {
self.module_id.problems(&self.tree, db)
}
}
/// Phisically, rust source is organized as a set of files, but logically it is
/// organized as a tree of modules. Usually, a single file corresponds to a
/// single module, but it is not nessary the case.
///
/// Module encapsulate the logic of transitioning from the fuzzy world of files
/// (which can have multiple parents) to the precise world of modules (which
/// always have one parent).
#[derive(Default, Debug, PartialEq, Eq)]
pub(crate) struct ModuleTree {
mods: Arena<ModuleData>,
links: Arena<LinkData>,
}
impl ModuleTree {
pub(crate) fn modules<'a>(&'a self) -> impl Iterator<Item = ModuleId> + 'a {
self.mods.iter().map(|(id, _)| id)
}
fn modules_for_source(&self, source: ModuleSource) -> Vec<ModuleId> {
self.mods
.iter()
.filter(|(_idx, it)| it.source == source)
.map(|(idx, _)| idx)
.collect()
}
fn any_module_for_source(&self, source: ModuleSource) -> Option<ModuleId> {
self.modules_for_source(source).pop()
}
}
/// `ModuleSource` is the syntax tree element that produced this module:
/// either a file, or an inlinde module.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub(crate) enum ModuleSource {
SourceFile(FileId),
Module(SourceItemId),
}
/// An owned syntax node for a module. Unlike `ModuleSource`,
/// this holds onto the AST for the whole file.
pub(crate) enum ModuleSourceNode {
SourceFile(ast::SourceFileNode),
Module(ast::ModuleNode),
}
pub(crate) type ModuleId = Id<ModuleData>;
type LinkId = Id<LinkData>;
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub enum Problem {
UnresolvedModule {
candidate: RelativePathBuf,
},
NotDirOwner {
move_to: RelativePathBuf,
candidate: RelativePathBuf,
},
}
impl ModuleId {
pub(crate) fn source(self, tree: &ModuleTree) -> ModuleSource {
tree.mods[self].source
}
fn parent_link(self, tree: &ModuleTree) -> Option<LinkId> {
tree.mods[self].parent
}
fn parent(self, tree: &ModuleTree) -> Option<ModuleId> {
let link = self.parent_link(tree)?;
Some(tree.links[link].owner)
}
fn crate_root(self, tree: &ModuleTree) -> ModuleId {
generate(Some(self), move |it| it.parent(tree))
.last()
.unwrap()
}
fn child(self, tree: &ModuleTree, name: &str) -> Option<ModuleId> {
let link = tree.mods[self]
.children
.iter()
.map(|&it| &tree.links[it])
.find(|it| it.name == name)?;
Some(*link.points_to.first()?)
}
fn children<'a>(self, tree: &'a ModuleTree) -> impl Iterator<Item = (SmolStr, ModuleId)> + 'a {
tree.mods[self].children.iter().filter_map(move |&it| {
let link = &tree.links[it];
let module = *link.points_to.first()?;
Some((link.name.clone(), module))
})
}
fn problems(self, tree: &ModuleTree, db: &impl HirDatabase) -> Vec<(SyntaxNode, Problem)> {
tree.mods[self]
.children
.iter()
.filter_map(|&it| {
let p = tree.links[it].problem.clone()?;
let s = it.bind_source(tree, db);
let s = s.borrowed().name().unwrap().syntax().owned();
Some((s, p))
})
.collect()
}
}
impl LinkId {
fn owner(self, tree: &ModuleTree) -> ModuleId {
tree.links[self].owner
}
fn name(self, tree: &ModuleTree) -> SmolStr {
tree.links[self].name.clone()
}
fn bind_source<'a>(self, tree: &ModuleTree, db: &impl HirDatabase) -> ast::ModuleNode {
let owner = self.owner(tree);
match owner.source(tree).resolve(db) {
ModuleSourceNode::SourceFile(root) => {
let ast = imp::modules(root.borrowed())
.find(|(name, _)| name == &tree.links[self].name)
.unwrap()
.1;
ast.owned()
}
ModuleSourceNode::Module(it) => it,
}
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub(crate) struct ModuleData {
source: ModuleSource,
parent: Option<LinkId>,
children: Vec<LinkId>,
}
impl ModuleSource {
pub(crate) fn new_inline(
db: &impl HirDatabase,
file_id: FileId,
module: ast::Module,
) -> ModuleSource {
assert!(!module.has_semi());
let items = db.file_items(file_id);
let item_id = items.id_of(module.syntax());
let id = SourceItemId { file_id, item_id };
ModuleSource::Module(id)
}
pub(crate) fn as_file(self) -> Option<FileId> {
match self {
ModuleSource::SourceFile(f) => Some(f),
ModuleSource::Module(..) => None,
}
}
pub(crate) fn file_id(self) -> FileId {
match self {
ModuleSource::SourceFile(f) => f,
ModuleSource::Module(source_item_id) => source_item_id.file_id,
}
}
pub(crate) fn resolve(self, db: &impl HirDatabase) -> ModuleSourceNode {
match self {
ModuleSource::SourceFile(file_id) => {
let syntax = db.source_file(file_id);
ModuleSourceNode::SourceFile(syntax.ast().owned())
}
ModuleSource::Module(item_id) => {
let syntax = db.file_item(item_id);
let syntax = syntax.borrowed();
let module = ast::Module::cast(syntax).unwrap();
ModuleSourceNode::Module(module.owned())
}
}
}
}
#[derive(Hash, Debug, PartialEq, Eq)]
struct LinkData {
owner: ModuleId,
name: SmolStr,
points_to: Vec<ModuleId>,
problem: Option<Problem>,
}
impl ModuleTree {
fn push_mod(&mut self, data: ModuleData) -> ModuleId {
self.mods.alloc(data)
}
fn push_link(&mut self, data: LinkData) -> LinkId {
let owner = data.owner;
let id = self.links.alloc(data);
self.mods[owner].children.push(id);
id
}
}

View file

@ -0,0 +1,444 @@
//! Name resolution algorithm. The end result of the algorithm is `ItemMap`: a
//! map with maps each module to it's scope: the set of items, visible in the
//! module. That is, we only resolve imports here, name resolution of item
//! bodies will be done in a separate step.
//!
//! Like Rustc, we use an interative per-crate algorithm: we start with scopes
//! containing only directly defined items, and then iteratively resolve
//! imports.
//!
//! To make this work nicely in the IDE scenarios, we place `InputModuleItems`
//! in between raw syntax and name resolution. `InputModuleItems` are computed
//! using only the module's syntax, and it is all directly defined items plus
//! imports. The plain is to make `InputModuleItems` independent of local
//! modifications (that is, typing inside a function shold not change IMIs),
//! such that the results of name resolution can be preserved unless the module
//! structure itself is modified.
use std::{
sync::Arc,
};
use rustc_hash::FxHashMap;
use ra_syntax::{
TextRange,
SmolStr, SyntaxKind::{self, *},
ast::{self, AstNode}
};
use ra_db::SourceRootId;
use crate::{
Cancelable, FileId,
DefId, DefLoc,
SourceItemId, SourceFileItemId, SourceFileItems,
Path, PathKind,
HirDatabase,
module::{ModuleId, ModuleTree},
};
/// Item map is the result of the name resolution. Item map contains, for each
/// module, the set of visible items.
#[derive(Default, Debug, PartialEq, Eq)]
pub(crate) struct ItemMap {
pub(crate) per_module: FxHashMap<ModuleId, ModuleScope>,
}
#[derive(Debug, Default, PartialEq, Eq, Clone)]
pub(crate) struct ModuleScope {
items: FxHashMap<SmolStr, Resolution>,
}
impl ModuleScope {
pub(crate) fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a SmolStr, &Resolution)> + 'a {
self.items.iter()
}
pub(crate) fn get(&self, name: &SmolStr) -> Option<&Resolution> {
self.items.get(name)
}
}
/// A set of items and imports declared inside a module, without relation to
/// other modules.
///
/// This stands in-between raw syntax and name resolution and alow us to avoid
/// recomputing name res: if `InputModuleItems` are the same, we can avoid
/// running name resolution.
#[derive(Debug, Default, PartialEq, Eq)]
pub(crate) struct InputModuleItems {
items: Vec<ModuleItem>,
imports: Vec<Import>,
}
#[derive(Debug, PartialEq, Eq)]
struct ModuleItem {
id: SourceFileItemId,
name: SmolStr,
kind: SyntaxKind,
vis: Vis,
}
#[derive(Debug, PartialEq, Eq)]
enum Vis {
// Priv,
Other,
}
#[derive(Debug, Clone, PartialEq, Eq)]
struct Import {
path: Path,
kind: ImportKind,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) struct NamedImport {
file_item_id: SourceFileItemId,
relative_range: TextRange,
}
impl NamedImport {
pub(crate) fn range(&self, db: &impl HirDatabase, file_id: FileId) -> TextRange {
let source_item_id = SourceItemId {
file_id,
item_id: self.file_item_id,
};
let syntax = db.file_item(source_item_id);
let offset = syntax.borrowed().range().start();
self.relative_range + offset
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
enum ImportKind {
Glob,
Named(NamedImport),
}
/// Resolution is basically `DefId` atm, but it should account for stuff like
/// multiple namespaces, ambiguity and errors.
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct Resolution {
/// None for unresolved
pub(crate) def_id: Option<DefId>,
/// ident by whitch this is imported into local scope.
pub(crate) import: Option<NamedImport>,
}
// #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
// enum Namespace {
// Types,
// Values,
// }
// #[derive(Debug)]
// struct PerNs<T> {
// types: Option<T>,
// values: Option<T>,
// }
impl InputModuleItems {
pub(crate) fn new<'a>(
file_items: &SourceFileItems,
items: impl Iterator<Item = ast::ModuleItem<'a>>,
) -> InputModuleItems {
let mut res = InputModuleItems::default();
for item in items {
res.add_item(file_items, item);
}
res
}
fn add_item(&mut self, file_items: &SourceFileItems, item: ast::ModuleItem) -> Option<()> {
match item {
ast::ModuleItem::StructDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
ast::ModuleItem::EnumDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
ast::ModuleItem::FnDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
ast::ModuleItem::TraitDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
ast::ModuleItem::TypeDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
ast::ModuleItem::ImplItem(_) => {
// impls don't define items
}
ast::ModuleItem::UseItem(it) => self.add_use_item(file_items, it),
ast::ModuleItem::ExternCrateItem(_) => {
// TODO
}
ast::ModuleItem::ConstDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
ast::ModuleItem::StaticDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
ast::ModuleItem::Module(it) => self.items.push(ModuleItem::new(file_items, it)?),
}
Some(())
}
fn add_use_item(&mut self, file_items: &SourceFileItems, item: ast::UseItem) {
let file_item_id = file_items.id_of(item.syntax());
let start_offset = item.syntax().range().start();
Path::expand_use_item(item, |path, range| {
let kind = match range {
None => ImportKind::Glob,
Some(range) => ImportKind::Named(NamedImport {
file_item_id,
relative_range: range - start_offset,
}),
};
self.imports.push(Import { kind, path })
})
}
}
impl ModuleItem {
fn new<'a>(file_items: &SourceFileItems, item: impl ast::NameOwner<'a>) -> Option<ModuleItem> {
let name = item.name()?.text();
let kind = item.syntax().kind();
let vis = Vis::Other;
let id = file_items.id_of(item.syntax());
let res = ModuleItem {
id,
name,
kind,
vis,
};
Some(res)
}
}
pub(crate) struct Resolver<'a, DB> {
pub db: &'a DB,
pub input: &'a FxHashMap<ModuleId, Arc<InputModuleItems>>,
pub source_root: SourceRootId,
pub module_tree: Arc<ModuleTree>,
pub result: ItemMap,
}
impl<'a, DB> Resolver<'a, DB>
where
DB: HirDatabase,
{
pub(crate) fn resolve(mut self) -> Cancelable<ItemMap> {
for (&module_id, items) in self.input.iter() {
self.populate_module(module_id, items)
}
for &module_id in self.input.keys() {
self.db.check_canceled()?;
self.resolve_imports(module_id);
}
Ok(self.result)
}
fn populate_module(&mut self, module_id: ModuleId, input: &InputModuleItems) {
let file_id = module_id.source(&self.module_tree).file_id();
let mut module_items = ModuleScope::default();
for import in input.imports.iter() {
if let Some(name) = import.path.segments.iter().last() {
if let ImportKind::Named(import) = import.kind {
module_items.items.insert(
name.clone(),
Resolution {
def_id: None,
import: Some(import),
},
);
}
}
}
for item in input.items.iter() {
if item.kind == MODULE {
// handle submodules separatelly
continue;
}
let def_loc = DefLoc::Item {
source_item_id: SourceItemId {
file_id,
item_id: item.id,
},
};
let def_id = def_loc.id(self.db);
let resolution = Resolution {
def_id: Some(def_id),
import: None,
};
module_items.items.insert(item.name.clone(), resolution);
}
for (name, mod_id) in module_id.children(&self.module_tree) {
let def_loc = DefLoc::Module {
id: mod_id,
source_root: self.source_root,
};
let def_id = def_loc.id(self.db);
let resolution = Resolution {
def_id: Some(def_id),
import: None,
};
module_items.items.insert(name, resolution);
}
self.result.per_module.insert(module_id, module_items);
}
fn resolve_imports(&mut self, module_id: ModuleId) {
for import in self.input[&module_id].imports.iter() {
self.resolve_import(module_id, import);
}
}
fn resolve_import(&mut self, module_id: ModuleId, import: &Import) {
let ptr = match import.kind {
ImportKind::Glob => return,
ImportKind::Named(ptr) => ptr,
};
let mut curr = match import.path.kind {
// TODO: handle extern crates
PathKind::Plain => return,
PathKind::Self_ => module_id,
PathKind::Super => {
match module_id.parent(&self.module_tree) {
Some(it) => it,
// TODO: error
None => return,
}
}
PathKind::Crate => module_id.crate_root(&self.module_tree),
};
for (i, name) in import.path.segments.iter().enumerate() {
let is_last = i == import.path.segments.len() - 1;
let def_id = match self.result.per_module[&curr].items.get(name) {
None => return,
Some(res) => match res.def_id {
Some(it) => it,
None => return,
},
};
if !is_last {
curr = match def_id.loc(self.db) {
DefLoc::Module { id, .. } => id,
_ => return,
}
} else {
self.update(module_id, |items| {
let res = Resolution {
def_id: Some(def_id),
import: Some(ptr),
};
items.items.insert(name.clone(), res);
})
}
}
}
fn update(&mut self, module_id: ModuleId, f: impl FnOnce(&mut ModuleScope)) {
let module_items = self.result.per_module.get_mut(&module_id).unwrap();
f(module_items)
}
}
#[cfg(test)]
mod tests {
use ra_db::FilesDatabase;
use crate::{
AnalysisChange,
mock_analysis::{MockAnalysis, analysis_and_position},
hir::{self, HirDatabase},
};
use super::*;
fn item_map(fixture: &str) -> (Arc<ItemMap>, ModuleId) {
let (analysis, pos) = analysis_and_position(fixture);
let db = analysis.imp.db;
let source_root = db.file_source_root(pos.file_id);
let descr = hir::Module::guess_from_position(&*db, pos)
.unwrap()
.unwrap();
let module_id = descr.module_id;
(db.item_map(source_root).unwrap(), module_id)
}
#[test]
fn test_item_map() {
let (item_map, module_id) = item_map(
"
//- /lib.rs
mod foo;
use crate::foo::bar::Baz;
<|>
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
pub struct Baz;
",
);
let name = SmolStr::from("Baz");
let resolution = &item_map.per_module[&module_id].items[&name];
assert!(resolution.def_id.is_some());
}
#[test]
fn typing_inside_a_function_should_not_invalidate_item_map() {
let mock_analysis = MockAnalysis::with_files(
"
//- /lib.rs
mod foo;
use crate::foo::bar::Baz;
fn foo() -> i32 {
1 + 1
}
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
pub struct Baz;
",
);
let file_id = mock_analysis.id_of("/lib.rs");
let mut host = mock_analysis.analysis_host();
let source_root = host.analysis().imp.db.file_source_root(file_id);
{
let db = host.analysis().imp.db;
let events = db.log_executed(|| {
db.item_map(source_root).unwrap();
});
assert!(format!("{:?}", events).contains("item_map"))
}
let mut change = AnalysisChange::new();
change.change_file(
file_id,
"
mod foo;
use crate::foo::bar::Baz;
fn foo() -> i32 { 92 }
"
.to_string(),
);
host.apply_change(change);
{
let db = host.analysis().imp.db;
let events = db.log_executed(|| {
db.item_map(source_root).unwrap();
});
assert!(
!format!("{:?}", events).contains("_item_map"),
"{:#?}",
events
)
}
}
}

148
crates/ra_hir/src/path.rs Normal file
View file

@ -0,0 +1,148 @@
use ra_syntax::{SmolStr, ast, AstNode, TextRange};
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct Path {
pub(crate) kind: PathKind,
pub(crate) segments: Vec<SmolStr>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum PathKind {
Plain,
Self_,
Super,
Crate,
}
impl Path {
/// Calls `cb` with all paths, represented by this use item.
pub(crate) fn expand_use_item(item: ast::UseItem, mut cb: impl FnMut(Path, Option<TextRange>)) {
if let Some(tree) = item.use_tree() {
expand_use_tree(None, tree, &mut cb);
}
}
/// Converts an `ast::Path` to `Path`. Works with use trees.
pub(crate) fn from_ast(mut path: ast::Path) -> Option<Path> {
let mut kind = PathKind::Plain;
let mut segments = Vec::new();
loop {
let segment = path.segment()?;
match segment.kind()? {
ast::PathSegmentKind::Name(name) => segments.push(name.text()),
ast::PathSegmentKind::CrateKw => {
kind = PathKind::Crate;
break;
}
ast::PathSegmentKind::SelfKw => {
kind = PathKind::Self_;
break;
}
ast::PathSegmentKind::SuperKw => {
kind = PathKind::Super;
break;
}
}
path = match qualifier(path) {
Some(it) => it,
None => break,
};
}
segments.reverse();
return Some(Path { kind, segments });
fn qualifier(path: ast::Path) -> Option<ast::Path> {
if let Some(q) = path.qualifier() {
return Some(q);
}
// TODO: this bottom up traversal is not too precise.
// Should we handle do a top-down analysiss, recording results?
let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
let use_tree = use_tree_list.parent_use_tree();
use_tree.path()
}
}
/// `true` is this path is a single identifier, like `foo`
pub(crate) fn is_ident(&self) -> bool {
self.kind == PathKind::Plain && self.segments.len() == 1
}
}
fn expand_use_tree(
prefix: Option<Path>,
tree: ast::UseTree,
cb: &mut impl FnMut(Path, Option<TextRange>),
) {
if let Some(use_tree_list) = tree.use_tree_list() {
let prefix = match tree.path() {
None => prefix,
Some(path) => match convert_path(prefix, path) {
Some(it) => Some(it),
None => return, // TODO: report errors somewhere
},
};
for tree in use_tree_list.use_trees() {
expand_use_tree(prefix.clone(), tree, cb);
}
} else {
if let Some(ast_path) = tree.path() {
if let Some(path) = convert_path(prefix, ast_path) {
let range = if tree.has_star() {
None
} else {
let range = ast_path.segment().unwrap().syntax().range();
Some(range)
};
cb(path, range)
}
}
}
}
fn convert_path(prefix: Option<Path>, path: ast::Path) -> Option<Path> {
let prefix = if let Some(qual) = path.qualifier() {
Some(convert_path(prefix, qual)?)
} else {
None
};
let segment = path.segment()?;
let res = match segment.kind()? {
ast::PathSegmentKind::Name(name) => {
let mut res = prefix.unwrap_or_else(|| Path {
kind: PathKind::Plain,
segments: Vec::with_capacity(1),
});
res.segments.push(name.text());
res
}
ast::PathSegmentKind::CrateKw => {
if prefix.is_some() {
return None;
}
Path {
kind: PathKind::Crate,
segments: Vec::new(),
}
}
ast::PathSegmentKind::SelfKw => {
if prefix.is_some() {
return None;
}
Path {
kind: PathKind::Self_,
segments: Vec::new(),
}
}
ast::PathSegmentKind::SuperKw => {
if prefix.is_some() {
return None;
}
Path {
kind: PathKind::Super,
segments: Vec::new(),
}
}
};
Some(res)
}

View file

@ -0,0 +1,154 @@
use std::{
sync::Arc,
time::Instant,
};
use rustc_hash::FxHashMap;
use ra_syntax::{
AstNode, SyntaxNode, SmolStr,
ast::{self, FnDef, FnDefNode, NameOwner, ModuleItemOwner}
};
use ra_db::{SourceRootId, FileId, Cancelable,};
use crate::{
FnId,
SourceFileItems, SourceItemId,
db::HirDatabase,
function::FnScopes,
module::{
ModuleSource, ModuleSourceNode, ModuleId,
imp::Submodule,
nameres::{InputModuleItems, ItemMap, Resolver},
},
};
/// Resolve `FnId` to the corresponding `SyntaxNode`
pub(super) fn fn_syntax(db: &impl HirDatabase, fn_id: FnId) -> FnDefNode {
let item_id = fn_id.loc(db);
let syntax = db.file_item(item_id);
FnDef::cast(syntax.borrowed()).unwrap().owned()
}
pub(super) fn fn_scopes(db: &impl HirDatabase, fn_id: FnId) -> Arc<FnScopes> {
let syntax = db.fn_syntax(fn_id);
let res = FnScopes::new(syntax.borrowed());
Arc::new(res)
}
pub(super) fn file_items(db: &impl HirDatabase, file_id: FileId) -> Arc<SourceFileItems> {
let source_file = db.source_file(file_id);
let source_file = source_file.borrowed();
let mut res = SourceFileItems::default();
source_file
.syntax()
.descendants()
.filter_map(ast::ModuleItem::cast)
.map(|it| it.syntax().owned())
.for_each(|it| {
res.alloc(it);
});
Arc::new(res)
}
pub(super) fn file_item(db: &impl HirDatabase, source_item_id: SourceItemId) -> SyntaxNode {
db.file_items(source_item_id.file_id)[source_item_id.item_id].clone()
}
pub(crate) fn submodules(
db: &impl HirDatabase,
source: ModuleSource,
) -> Cancelable<Arc<Vec<Submodule>>> {
db.check_canceled()?;
let file_id = source.file_id();
let submodules = match source.resolve(db) {
ModuleSourceNode::SourceFile(it) => collect_submodules(db, file_id, it.borrowed()),
ModuleSourceNode::Module(it) => it
.borrowed()
.item_list()
.map(|it| collect_submodules(db, file_id, it))
.unwrap_or_else(Vec::new),
};
return Ok(Arc::new(submodules));
fn collect_submodules<'a>(
db: &impl HirDatabase,
file_id: FileId,
root: impl ast::ModuleItemOwner<'a>,
) -> Vec<Submodule> {
modules(root)
.map(|(name, m)| {
if m.has_semi() {
Submodule::Declaration(name)
} else {
let src = ModuleSource::new_inline(db, file_id, m);
Submodule::Definition(name, src)
}
})
.collect()
}
}
pub(crate) fn modules<'a>(
root: impl ast::ModuleItemOwner<'a>,
) -> impl Iterator<Item = (SmolStr, ast::Module<'a>)> {
root.items()
.filter_map(|item| match item {
ast::ModuleItem::Module(m) => Some(m),
_ => None,
})
.filter_map(|module| {
let name = module.name()?.text();
Some((name, module))
})
}
pub(super) fn input_module_items(
db: &impl HirDatabase,
source_root: SourceRootId,
module_id: ModuleId,
) -> Cancelable<Arc<InputModuleItems>> {
let module_tree = db.module_tree(source_root)?;
let source = module_id.source(&module_tree);
let file_items = db.file_items(source.file_id());
let res = match source.resolve(db) {
ModuleSourceNode::SourceFile(it) => {
let items = it.borrowed().items();
InputModuleItems::new(&file_items, items)
}
ModuleSourceNode::Module(it) => {
let items = it
.borrowed()
.item_list()
.into_iter()
.flat_map(|it| it.items());
InputModuleItems::new(&file_items, items)
}
};
Ok(Arc::new(res))
}
pub(super) fn item_map(
db: &impl HirDatabase,
source_root: SourceRootId,
) -> Cancelable<Arc<ItemMap>> {
let start = Instant::now();
let module_tree = db.module_tree(source_root)?;
let input = module_tree
.modules()
.map(|id| {
let items = db.input_module_items(source_root, id)?;
Ok((id, items))
})
.collect::<Cancelable<FxHashMap<_, _>>>()?;
let resolver = Resolver {
db: db,
input: &input,
source_root,
module_tree,
result: ItemMap::default(),
};
let res = resolver.resolve()?;
let elapsed = start.elapsed();
log::info!("item_map: {:?}", elapsed);
Ok(Arc::new(res))
}