start ra_hir_def crate

This commit is contained in:
Aleksey Kladov 2019-10-29 11:15:51 +03:00
parent 120000609a
commit 77f90caf2d
12 changed files with 181 additions and 131 deletions

10
Cargo.lock generated
View file

@ -991,6 +991,7 @@ dependencies = [
"ra_arena 0.1.0",
"ra_cfg 0.1.0",
"ra_db 0.1.0",
"ra_hir_def 0.1.0",
"ra_mbe 0.1.0",
"ra_prof 0.1.0",
"ra_syntax 0.1.0",
@ -1000,6 +1001,15 @@ dependencies = [
"test_utils 0.1.0",
]
[[package]]
name = "ra_hir_def"
version = "0.1.0"
dependencies = [
"ra_arena 0.1.0",
"ra_db 0.1.0",
"ra_syntax 0.1.0",
]
[[package]]
name = "ra_ide_api"
version = "0.1.0"

View file

@ -19,6 +19,7 @@ ra_cfg = { path = "../ra_cfg" }
ra_db = { path = "../ra_db" }
mbe = { path = "../ra_mbe", package = "ra_mbe" }
tt = { path = "../ra_tt", package = "ra_tt" }
hir_def = { path = "../ra_hir_def", package = "ra_hir_def" }
test_utils = { path = "../test_utils" }
ra_prof = { path = "../ra_prof" }

View file

@ -59,11 +59,11 @@ pub trait InternDatabase: SourceDatabase {
/// incremental.
#[salsa::query_group(AstDatabaseStorage)]
pub trait AstDatabase: InternDatabase {
#[salsa::invoke(crate::source_id::AstIdMap::ast_id_map_query)]
#[salsa::invoke(crate::source_id::ast_id_map_query)]
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
#[salsa::transparent]
#[salsa::invoke(crate::source_id::AstIdMap::file_item_query)]
#[salsa::invoke(crate::source_id::file_item_query)]
fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> SyntaxNode;
#[salsa::transparent]

View file

@ -16,7 +16,7 @@ use crate::{
path::GenericArgs,
ty::primitive::{FloatTy, IntTy, UncertainFloatTy, UncertainIntTy},
type_ref::TypeRef,
DefWithBody, Either, HirFileId, MacroCallLoc, MacroFileKind, Mutability, Path, Resolver,
AstId, DefWithBody, Either, HirFileId, MacroCallLoc, MacroFileKind, Mutability, Path, Resolver,
Source,
};
@ -458,11 +458,10 @@ where
ast::Expr::Label(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
ast::Expr::RangeExpr(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
ast::Expr::MacroCall(e) => {
let ast_id = self
.db
.ast_id_map(self.current_file_id)
.ast_id(&e)
.with_file_id(self.current_file_id);
let ast_id = AstId::new(
self.current_file_id,
self.db.ast_id_map(self.current_file_id).ast_id(&e),
);
if let Some(path) = e.path().and_then(|path| self.parse_path(path)) {
if let Some(def) = self.resolver.resolve_path_as_macro(self.db, &path) {

View file

@ -11,7 +11,7 @@ use crate::{
db::{AstDatabase, DefDatabase, HirDatabase},
ids::{AstItemDef, LocationCtx},
name::AsName,
Const, Crate, Enum, EnumVariant, FieldSource, Function, HasSource, ImplBlock, Module,
AstId, Const, Crate, Enum, EnumVariant, FieldSource, Function, HasSource, ImplBlock, Module,
ModuleSource, Source, Static, Struct, StructField, Trait, TypeAlias, Union, VariantDef,
};
@ -183,7 +183,7 @@ impl Module {
ModuleSource::Module(ref module) => {
assert!(!module.has_semi());
let ast_id_map = db.ast_id_map(src.file_id);
let item_id = ast_id_map.ast_id(module).with_file_id(src.file_id);
let item_id = AstId::new(src.file_id, ast_id_map.ast_id(module));
Some(item_id)
}
ModuleSource::SourceFile(_) => None,

View file

@ -264,7 +264,7 @@ pub(crate) trait AstItemDef<N: AstNode>: salsa::InternKey + Clone {
Self::from_ast_id(ctx, item_id)
}
fn from_ast_id(ctx: LocationCtx<&impl InternDatabase>, ast_id: FileAstId<N>) -> Self {
let loc = ItemLoc { module: ctx.module, ast_id: ast_id.with_file_id(ctx.file_id) };
let loc = ItemLoc { module: ctx.module, ast_id: AstId::new(ctx.file_id, ast_id) };
Self::intern(ctx.db, loc)
}
fn source(self, db: &impl AstDatabase) -> Source<N> {

View file

@ -20,7 +20,7 @@ use crate::{
resolve::Resolver,
ty::Ty,
type_ref::TypeRef,
AssocItem, Const, Function, HasSource, HirFileId, MacroFileKind, Path, Source, TraitRef,
AssocItem, AstId, Const, Function, HasSource, HirFileId, MacroFileKind, Path, Source, TraitRef,
TypeAlias,
};
@ -256,7 +256,7 @@ impl ModuleImplBlocks {
}
//FIXME: we should really cut down on the boilerplate required to process a macro
let ast_id = db.ast_id_map(file_id).ast_id(&macro_call).with_file_id(file_id);
let ast_id = AstId::new(file_id, db.ast_id_map(file_id).ast_id(&macro_call));
if let Some(path) = macro_call
.path()
.and_then(|path| Path::from_src(Source { ast: path, file_id }, db))

View file

@ -567,7 +567,7 @@ where
// inline module, just recurse
raw::ModuleData::Definition { name, items, ast_id } => {
let module_id =
self.push_child_module(name.clone(), ast_id.with_file_id(self.file_id), None);
self.push_child_module(name.clone(), AstId::new(self.file_id, *ast_id), None);
ModCollector {
def_collector: &mut *self.def_collector,
@ -583,7 +583,7 @@ where
}
// out of line module, resolve, parse and recurse
raw::ModuleData::Declaration { name, ast_id } => {
let ast_id = ast_id.with_file_id(self.file_id);
let ast_id = AstId::new(self.file_id, *ast_id);
match self.mod_dir.resolve_declaration(
self.def_collector.db,
self.file_id,
@ -671,21 +671,18 @@ where
}
fn collect_macro(&mut self, mac: &raw::MacroData) {
let ast_id = AstId::new(self.file_id, mac.ast_id);
// Case 1: macro rules, define a macro in crate-global mutable scope
if is_macro_rules(&mac.path) {
if let Some(name) = &mac.name {
let macro_id = MacroDefId {
ast_id: mac.ast_id.with_file_id(self.file_id),
krate: self.def_collector.def_map.krate,
};
let macro_id = MacroDefId { ast_id, krate: self.def_collector.def_map.krate };
let macro_ = MacroDef { id: macro_id };
self.def_collector.define_macro(self.module_id, name.clone(), macro_, mac.export);
}
return;
}
let ast_id = mac.ast_id.with_file_id(self.file_id);
// Case 2: try to resolve in legacy scope and expand macro_rules, triggering
// recursive item collection.
if let Some(macro_def) = mac.path.as_ident().and_then(|name| {

View file

@ -2,18 +2,18 @@
use std::{
hash::{Hash, Hasher},
marker::PhantomData,
sync::Arc,
};
use ra_arena::{impl_arena_id, Arena, RawId};
use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr};
pub use hir_def::ast_id_map::{AstIdMap, ErasedFileAstId, FileAstId};
use ra_syntax::{AstNode, SyntaxNode};
use crate::{db::AstDatabase, HirFileId};
/// `AstId` points to an AST node in any file.
///
/// It is stable across reparses, and can be used as salsa key/value.
// FIXME: isn't this just a `Source<FileAstId<N>>` ?
#[derive(Debug)]
pub(crate) struct AstId<N: AstNode> {
file_id: HirFileId,
@ -40,122 +40,34 @@ impl<N: AstNode> Hash for AstId<N> {
}
impl<N: AstNode> AstId<N> {
pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> {
AstId { file_id, file_ast_id }
}
pub(crate) fn file_id(&self) -> HirFileId {
self.file_id
}
pub(crate) fn to_node(&self, db: &impl AstDatabase) -> N {
let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.raw);
let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.into());
N::cast(syntax_node).unwrap()
}
}
/// `AstId` points to an AST node in a specific file.
#[derive(Debug)]
pub(crate) struct FileAstId<N: AstNode> {
raw: ErasedFileAstId,
_ty: PhantomData<fn() -> N>,
pub(crate) fn ast_id_map_query(db: &impl AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
let map = if let Some(node) = db.parse_or_expand(file_id) {
AstIdMap::from_source(&node)
} else {
AstIdMap::default()
};
Arc::new(map)
}
impl<N: AstNode> Clone for FileAstId<N> {
fn clone(&self) -> FileAstId<N> {
*self
}
}
impl<N: AstNode> Copy for FileAstId<N> {}
impl<N: AstNode> PartialEq for FileAstId<N> {
fn eq(&self, other: &Self) -> bool {
self.raw == other.raw
}
}
impl<N: AstNode> Eq for FileAstId<N> {}
impl<N: AstNode> Hash for FileAstId<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.raw.hash(hasher);
}
}
impl<N: AstNode> FileAstId<N> {
pub(crate) fn with_file_id(self, file_id: HirFileId) -> AstId<N> {
AstId { file_id, file_ast_id: self }
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ErasedFileAstId(RawId);
impl_arena_id!(ErasedFileAstId);
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
#[derive(Debug, PartialEq, Eq, Default)]
pub struct AstIdMap {
arena: Arena<ErasedFileAstId, SyntaxNodePtr>,
}
impl AstIdMap {
pub(crate) fn ast_id_map_query(db: &impl AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
let map = if let Some(node) = db.parse_or_expand(file_id) {
AstIdMap::from_source(&node)
} else {
AstIdMap::default()
};
Arc::new(map)
}
pub(crate) fn file_item_query(
db: &impl AstDatabase,
file_id: HirFileId,
ast_id: ErasedFileAstId,
) -> SyntaxNode {
let node = db.parse_or_expand(file_id).unwrap();
db.ast_id_map(file_id).arena[ast_id].to_node(&node)
}
pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
let ptr = SyntaxNodePtr::new(item.syntax());
let raw = match self.arena.iter().find(|(_id, i)| **i == ptr) {
Some((it, _)) => it,
None => panic!(
"Can't find {:?} in AstIdMap:\n{:?}",
item.syntax(),
self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
),
};
FileAstId { raw, _ty: PhantomData }
}
fn from_source(node: &SyntaxNode) -> AstIdMap {
assert!(node.parent().is_none());
let mut res = AstIdMap { arena: Arena::default() };
// By walking the tree in bread-first order we make sure that parents
// get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a
// trait does not change ids of top-level items, which helps caching.
bfs(node, |it| {
if let Some(module_item) = ast::ModuleItem::cast(it.clone()) {
res.alloc(module_item.syntax());
} else if let Some(macro_call) = ast::MacroCall::cast(it) {
res.alloc(macro_call.syntax());
}
});
res
}
fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId {
self.arena.alloc(SyntaxNodePtr::new(item))
}
}
/// Walks the subtree in bfs order, calling `f` for each node.
fn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) {
let mut curr_layer = vec![node.clone()];
let mut next_layer = vec![];
while !curr_layer.is_empty() {
curr_layer.drain(..).for_each(|node| {
next_layer.extend(node.children());
f(node);
});
std::mem::swap(&mut curr_layer, &mut next_layer);
}
pub(crate) fn file_item_query(
db: &impl AstDatabase,
file_id: HirFileId,
ast_id: ErasedFileAstId,
) -> SyntaxNode {
let node = db.parse_or_expand(file_id).unwrap();
db.ast_id_map(file_id)[ast_id].to_node(&node)
}

View file

@ -0,0 +1,10 @@
[package]
edition = "2018"
name = "ra_hir_def"
version = "0.1.0"
authors = ["rust-analyzer developers"]
[dependencies]
ra_arena = { path = "../ra_arena" }
ra_db = { path = "../ra_db" }
ra_syntax = { path = "../ra_syntax" }

View file

@ -0,0 +1,114 @@
//! `AstIdMap` allows to create stable IDs for "large" syntax nodes like items
//! and macro calls.
//!
//! Specifically, it enumerates all items in a file and uses position of a an
//! item as an ID. That way, id's don't change unless the set of items itself
//! changes.
use std::{
hash::{Hash, Hasher},
marker::PhantomData,
ops,
};
use ra_arena::{impl_arena_id, Arena, RawId};
use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr};
/// `AstId` points to an AST node in a specific file.
#[derive(Debug)]
pub struct FileAstId<N: AstNode> {
raw: ErasedFileAstId,
_ty: PhantomData<fn() -> N>,
}
impl<N: AstNode> Clone for FileAstId<N> {
fn clone(&self) -> FileAstId<N> {
*self
}
}
impl<N: AstNode> Copy for FileAstId<N> {}
impl<N: AstNode> PartialEq for FileAstId<N> {
fn eq(&self, other: &Self) -> bool {
self.raw == other.raw
}
}
impl<N: AstNode> Eq for FileAstId<N> {}
impl<N: AstNode> Hash for FileAstId<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.raw.hash(hasher);
}
}
impl<N: AstNode> From<FileAstId<N>> for ErasedFileAstId {
fn from(id: FileAstId<N>) -> Self {
id.raw
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ErasedFileAstId(RawId);
impl_arena_id!(ErasedFileAstId);
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
#[derive(Debug, PartialEq, Eq, Default)]
pub struct AstIdMap {
arena: Arena<ErasedFileAstId, SyntaxNodePtr>,
}
impl AstIdMap {
pub fn from_source(node: &SyntaxNode) -> AstIdMap {
assert!(node.parent().is_none());
let mut res = AstIdMap { arena: Arena::default() };
// By walking the tree in bread-first order we make sure that parents
// get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a
// trait does not change ids of top-level items, which helps caching.
bfs(node, |it| {
if let Some(module_item) = ast::ModuleItem::cast(it.clone()) {
res.alloc(module_item.syntax());
} else if let Some(macro_call) = ast::MacroCall::cast(it) {
res.alloc(macro_call.syntax());
}
});
res
}
pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
let ptr = SyntaxNodePtr::new(item.syntax());
let raw = match self.arena.iter().find(|(_id, i)| **i == ptr) {
Some((it, _)) => it,
None => panic!(
"Can't find {:?} in AstIdMap:\n{:?}",
item.syntax(),
self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
),
};
FileAstId { raw, _ty: PhantomData }
}
fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId {
self.arena.alloc(SyntaxNodePtr::new(item))
}
}
impl ops::Index<ErasedFileAstId> for AstIdMap {
type Output = SyntaxNodePtr;
fn index(&self, index: ErasedFileAstId) -> &SyntaxNodePtr {
&self.arena[index]
}
}
/// Walks the subtree in bfs order, calling `f` for each node.
fn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) {
let mut curr_layer = vec![node.clone()];
let mut next_layer = vec![];
while !curr_layer.is_empty() {
curr_layer.drain(..).for_each(|node| {
next_layer.extend(node.children());
f(node);
});
std::mem::swap(&mut curr_layer, &mut next_layer);
}
}

View file

@ -0,0 +1,7 @@
//! `ra_hir_def` contains initial "phases" of the compiler. Roughly, everything
//! before types.
//!
//! Note that we are in the process of moving parts of `ra_hir` into
//! `ra_hir_def`, so this crates doesn't contain a lot at the moment.
pub mod ast_id_map;