Merge branch 'master' of https://github.com/rust-analyzer/rust-analyzer into feature/themes

This commit is contained in:
Seivan Heidari 2019-11-25 01:54:54 +01:00
commit 15ea338ac9
94 changed files with 2474 additions and 1910 deletions

View file

@ -37,7 +37,7 @@ impl fmt::Display for RawId {
} }
#[derive(Clone, PartialEq, Eq)] #[derive(Clone, PartialEq, Eq)]
pub struct Arena<ID: ArenaId, T> { pub struct Arena<ID, T> {
data: Vec<T>, data: Vec<T>,
_ty: PhantomData<ID>, _ty: PhantomData<ID>,
} }
@ -67,6 +67,12 @@ pub trait ArenaId {
fn into_raw(self) -> RawId; fn into_raw(self) -> RawId;
} }
impl<ID, T> Arena<ID, T> {
pub const fn new() -> Arena<ID, T> {
Arena { data: Vec::new(), _ty: PhantomData }
}
}
impl<ID: ArenaId, T> Arena<ID, T> { impl<ID: ArenaId, T> Arena<ID, T> {
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
self.data.len() self.data.len()
@ -79,7 +85,7 @@ impl<ID: ArenaId, T> Arena<ID, T> {
self.data.push(value); self.data.push(value);
ID::from_raw(id) ID::from_raw(id)
} }
pub fn iter(&self) -> impl Iterator<Item = (ID, &T)> + ExactSizeIterator { pub fn iter(&self) -> impl Iterator<Item = (ID, &T)> + ExactSizeIterator + DoubleEndedIterator {
self.data.iter().enumerate().map(|(idx, value)| (ID::from_raw(RawId(idx as u32)), value)) self.data.iter().enumerate().map(|(idx, value)| (ID::from_raw(RawId(idx as u32)), value))
} }
} }

View file

@ -36,7 +36,7 @@ pub(crate) fn add_new(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
// We want to only apply this to non-union structs with named fields // We want to only apply this to non-union structs with named fields
let field_list = match (strukt.kind(), strukt.is_union()) { let field_list = match (strukt.kind(), strukt.is_union()) {
(StructKind::Named(named), false) => named, (StructKind::Record(named), false) => named,
_ => return None, _ => return None,
}; };

View file

@ -1,6 +1,6 @@
use super::invert_if::invert_boolean_expression;
use hir::db::HirDatabase; use hir::db::HirDatabase;
use ra_syntax::ast::{self, AstNode}; use ra_syntax::ast::{self, AstNode};
use ra_syntax::SyntaxNode;
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
@ -32,18 +32,18 @@ pub(crate) fn apply_demorgan(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist>
if !cursor_in_range { if !cursor_in_range {
return None; return None;
} }
let lhs = expr.lhs()?.syntax().clone(); let lhs = expr.lhs()?;
let lhs_range = lhs.text_range(); let lhs_range = lhs.syntax().text_range();
let rhs = expr.rhs()?.syntax().clone(); let rhs = expr.rhs()?;
let rhs_range = rhs.text_range(); let rhs_range = rhs.syntax().text_range();
let not_lhs = undo_negation(lhs)?; let not_lhs = invert_boolean_expression(&lhs)?;
let not_rhs = undo_negation(rhs)?; let not_rhs = invert_boolean_expression(&rhs)?;
ctx.add_assist(AssistId("apply_demorgan"), "apply demorgan's law", |edit| { ctx.add_assist(AssistId("apply_demorgan"), "apply demorgan's law", |edit| {
edit.target(op_range); edit.target(op_range);
edit.replace(op_range, opposite_op); edit.replace(op_range, opposite_op);
edit.replace(lhs_range, format!("!({}", not_lhs)); edit.replace(lhs_range, format!("!({}", not_lhs.syntax().text()));
edit.replace(rhs_range, format!("{})", not_rhs)); edit.replace(rhs_range, format!("{})", not_rhs.syntax().text()));
}) })
} }
@ -56,28 +56,6 @@ fn opposite_logic_op(kind: ast::BinOp) -> Option<&'static str> {
} }
} }
// This function tries to undo unary negation, or inequality
fn undo_negation(node: SyntaxNode) -> Option<String> {
match ast::Expr::cast(node)? {
ast::Expr::BinExpr(bin) => match bin.op_kind()? {
ast::BinOp::NegatedEqualityTest => {
let lhs = bin.lhs()?.syntax().text();
let rhs = bin.rhs()?.syntax().text();
Some(format!("{} == {}", lhs, rhs))
}
_ => None,
},
ast::Expr::PrefixExpr(pe) => match pe.op_kind()? {
ast::PrefixOp::Not => {
let child = pe.expr()?.syntax().text();
Some(String::from(child))
}
_ => None,
},
_ => None,
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View file

@ -101,7 +101,7 @@ fn build_pat(var: ast::EnumVariant) -> Option<ast::Pat> {
iter::repeat(make::placeholder_pat().into()).take(field_list.fields().count()); iter::repeat(make::placeholder_pat().into()).take(field_list.fields().count());
make::tuple_struct_pat(path, pats).into() make::tuple_struct_pat(path, pats).into()
} }
ast::StructKind::Named(field_list) => { ast::StructKind::Record(field_list) => {
let pats = field_list.fields().map(|f| make::bind_pat(f.name().unwrap()).into()); let pats = field_list.fields().map(|f| make::bind_pat(f.name().unwrap()).into());
make::record_pat(path, pats).into() make::record_pat(path, pats).into()
} }

View file

@ -0,0 +1,102 @@
use hir::db::HirDatabase;
use ra_syntax::ast::{self, AstNode};
use ra_syntax::T;
use crate::{Assist, AssistCtx, AssistId};
// Assist: invert_if
//
// Apply invert_if
// This transforms if expressions of the form `if !x {A} else {B}` into `if x {B} else {A}`
// This also works with `!=`. This assist can only be applied with the cursor
// on `if`.
//
// ```
// fn main() {
// if<|> !y { A } else { B }
// }
// ```
// ->
// ```
// fn main() {
// if y { B } else { A }
// }
// ```
pub(crate) fn invert_if(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let if_keyword = ctx.find_token_at_offset(T![if])?;
let expr = ast::IfExpr::cast(if_keyword.parent())?;
let if_range = if_keyword.text_range();
let cursor_in_range = ctx.frange.range.is_subrange(&if_range);
if !cursor_in_range {
return None;
}
let cond = expr.condition()?.expr()?;
let then_node = expr.then_branch()?.syntax().clone();
if let ast::ElseBranch::Block(else_block) = expr.else_branch()? {
let flip_cond = invert_boolean_expression(&cond)?;
let cond_range = cond.syntax().text_range();
let else_node = else_block.syntax();
let else_range = else_node.text_range();
let then_range = then_node.text_range();
return ctx.add_assist(AssistId("invert_if"), "invert if branches", |edit| {
edit.target(if_range);
edit.replace(cond_range, flip_cond.syntax().text());
edit.replace(else_range, then_node.text());
edit.replace(then_range, else_node.text());
});
}
None
}
pub(crate) fn invert_boolean_expression(expr: &ast::Expr) -> Option<ast::Expr> {
match expr {
ast::Expr::BinExpr(bin) => match bin.op_kind()? {
ast::BinOp::NegatedEqualityTest => bin.replace_op(T![==]).map(|it| it.into()),
_ => None,
},
ast::Expr::PrefixExpr(pe) => match pe.op_kind()? {
ast::PrefixOp::Not => pe.expr(),
_ => None,
},
_ => None,
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::helpers::{check_assist, check_assist_not_applicable};
#[test]
fn invert_if_remove_inequality() {
check_assist(
invert_if,
"fn f() { i<|>f x != 3 { 1 } else { 3 + 2 } }",
"fn f() { i<|>f x == 3 { 3 + 2 } else { 1 } }",
)
}
#[test]
fn invert_if_remove_not() {
check_assist(
invert_if,
"fn f() { <|>if !cond { 3 * 2 } else { 1 } }",
"fn f() { <|>if cond { 1 } else { 3 * 2 } }",
)
}
#[test]
fn invert_if_doesnt_apply_with_cursor_not_on_if() {
check_assist_not_applicable(invert_if, "fn f() { if !<|>cond { 3 * 2 } else { 1 } }")
}
#[test]
fn invert_if_doesnt_apply_without_negated() {
check_assist_not_applicable(invert_if, "fn f() { i<|>f cond { 3 * 2 } else { 1 } }")
}
}

View file

@ -341,6 +341,23 @@ fn main() {
) )
} }
#[test]
fn doctest_invert_if() {
check(
"invert_if",
r#####"
fn main() {
if<|> !y { A } else { B }
}
"#####,
r#####"
fn main() {
if y { B } else { A }
}
"#####,
)
}
#[test] #[test]
fn doctest_make_raw_string() { fn doctest_make_raw_string() {
check( check(

View file

@ -97,6 +97,7 @@ mod assists {
mod add_impl; mod add_impl;
mod add_new; mod add_new;
mod apply_demorgan; mod apply_demorgan;
mod invert_if;
mod flip_comma; mod flip_comma;
mod flip_binexpr; mod flip_binexpr;
mod flip_trait_bound; mod flip_trait_bound;
@ -122,6 +123,7 @@ mod assists {
add_impl::add_impl, add_impl::add_impl,
add_new::add_new, add_new::add_new,
apply_demorgan::apply_demorgan, apply_demorgan::apply_demorgan,
invert_if::invert_if,
change_visibility::change_visibility, change_visibility::change_visibility,
fill_match_arms::fill_match_arms, fill_match_arms::fill_match_arms,
merge_match_arms::merge_match_arms, merge_match_arms::merge_match_arms,

View file

@ -10,7 +10,6 @@ use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath
hir::db::InternDatabaseStorage, hir::db::InternDatabaseStorage,
hir::db::AstDatabaseStorage, hir::db::AstDatabaseStorage,
hir::db::DefDatabaseStorage, hir::db::DefDatabaseStorage,
hir::db::DefDatabase2Storage,
hir::db::HirDatabaseStorage hir::db::HirDatabaseStorage
)] )]
#[derive(Debug, Default)] #[derive(Debug, Default)]

View file

@ -117,9 +117,12 @@ pub fn load(
done = true; done = true;
} }
} }
VfsChange::AddFile { .. } VfsChange::AddFile { root, file, path, text } => {
| VfsChange::RemoveFile { .. } let source_root_id = vfs_root_to_id(root);
| VfsChange::ChangeFile { .. } => { let file_id = vfs_file_to_id(file);
analysis_change.add_file(source_root_id, file_id, path, text);
}
VfsChange::RemoveFile { .. } | VfsChange::ChangeFile { .. } => {
// We just need the first scan, so just ignore these // We just need the first scan, so just ignore these
} }
} }

View file

@ -3,7 +3,7 @@
use std::{collections::HashSet, fmt::Write, path::Path, time::Instant}; use std::{collections::HashSet, fmt::Write, path::Path, time::Instant};
use ra_db::SourceDatabaseExt; use ra_db::SourceDatabaseExt;
use ra_hir::{AssocItem, Crate, HasBodySource, HasSource, HirDisplay, ModuleDef, Ty, TypeWalk}; use ra_hir::{AssocItem, Crate, HasSource, HirDisplay, ModuleDef, Ty, TypeWalk};
use ra_syntax::AstNode; use ra_syntax::AstNode;
use crate::{Result, Verbosity}; use crate::{Result, Verbosity};
@ -109,7 +109,7 @@ pub fn run(
} }
let body = f.body(db); let body = f.body(db);
let inference_result = f.infer(db); let inference_result = f.infer(db);
for (expr_id, _) in body.exprs() { for (expr_id, _) in body.exprs.iter() {
let ty = &inference_result[expr_id]; let ty = &inference_result[expr_id];
num_exprs += 1; num_exprs += 1;
if let Ty::Unknown = ty { if let Ty::Unknown = ty {
@ -128,15 +128,16 @@ pub fn run(
if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) { if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
num_type_mismatches += 1; num_type_mismatches += 1;
if verbosity.is_verbose() { if verbosity.is_verbose() {
let src = f.expr_source(db, expr_id); let src = f.body_source_map(db).expr_syntax(expr_id);
if let Some(src) = src { if let Some(src) = src {
// FIXME: it might be nice to have a function (on Analysis?) that goes from Source<T> -> (LineCol, LineCol) directly // FIXME: it might be nice to have a function (on Analysis?) that goes from Source<T> -> (LineCol, LineCol) directly
let original_file = src.file_id.original_file(db); let original_file = src.file_id.original_file(db);
let path = db.file_relative_path(original_file); let path = db.file_relative_path(original_file);
let line_index = host.analysis().file_line_index(original_file).unwrap(); let line_index = host.analysis().file_line_index(original_file).unwrap();
let text_range = src let text_range = src.value.either(
.value |it| it.syntax_node_ptr().range(),
.either(|it| it.syntax().text_range(), |it| it.syntax().text_range()); |it| it.syntax_node_ptr().range(),
);
let (start, end) = ( let (start, end) = (
line_index.line_col(text_range.start()), line_index.line_col(text_range.start()),
line_index.line_col(text_range.end()), line_index.line_col(text_range.end()),

View file

@ -8,7 +8,7 @@ use rustc_hash::FxHashMap;
use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER}; use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER};
use crate::{ use crate::{
CrateGraph, CrateId, Edition, FileId, FilePosition, RelativePathBuf, SourceDatabaseExt, CrateGraph, CrateId, Edition, Env, FileId, FilePosition, RelativePathBuf, SourceDatabaseExt,
SourceRoot, SourceRootId, SourceRoot, SourceRootId,
}; };
@ -53,7 +53,12 @@ fn with_single_file(db: &mut dyn SourceDatabaseExt, text: &str) -> FileId {
source_root.insert_file(rel_path.clone(), file_id); source_root.insert_file(rel_path.clone(), file_id);
let mut crate_graph = CrateGraph::default(); let mut crate_graph = CrateGraph::default();
crate_graph.add_crate_root(file_id, Edition::Edition2018, CfgOptions::default()); crate_graph.add_crate_root(
file_id,
Edition::Edition2018,
CfgOptions::default(),
Env::default(),
);
db.set_file_text(file_id, Arc::new(text.to_string())); db.set_file_text(file_id, Arc::new(text.to_string()));
db.set_file_relative_path(file_id, rel_path); db.set_file_relative_path(file_id, rel_path);
@ -93,7 +98,8 @@ fn with_files(db: &mut dyn SourceDatabaseExt, fixture: &str) -> Option<FilePosit
assert!(meta.path.starts_with(&source_root_prefix)); assert!(meta.path.starts_with(&source_root_prefix));
if let Some(krate) = meta.krate { if let Some(krate) = meta.krate {
let crate_id = crate_graph.add_crate_root(file_id, meta.edition, meta.cfg); let crate_id =
crate_graph.add_crate_root(file_id, meta.edition, meta.cfg, Env::default());
let prev = crates.insert(krate.clone(), crate_id); let prev = crates.insert(krate.clone(), crate_id);
assert!(prev.is_none()); assert!(prev.is_none());
for dep in meta.deps { for dep in meta.deps {
@ -123,7 +129,12 @@ fn with_files(db: &mut dyn SourceDatabaseExt, fixture: &str) -> Option<FilePosit
if crates.is_empty() { if crates.is_empty() {
let crate_root = default_crate_root.unwrap(); let crate_root = default_crate_root.unwrap();
crate_graph.add_crate_root(crate_root, Edition::Edition2018, CfgOptions::default()); crate_graph.add_crate_root(
crate_root,
Edition::Edition2018,
CfgOptions::default(),
Env::default(),
);
} else { } else {
for (from, to) in crate_deps { for (from, to) in crate_deps {
let from_id = crates[&from]; let from_id = crates[&from];

View file

@ -6,14 +6,14 @@
//! actual IO. See `vfs` and `project_model` in the `ra_lsp_server` crate for how //! actual IO. See `vfs` and `project_model` in the `ra_lsp_server` crate for how
//! actual IO is done and lowered to input. //! actual IO is done and lowered to input.
use rustc_hash::FxHashMap; use std::{fmt, str::FromStr};
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
use ra_syntax::SmolStr; use ra_syntax::SmolStr;
use rustc_hash::FxHashMap;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use crate::{RelativePath, RelativePathBuf}; use crate::{RelativePath, RelativePathBuf};
use std::str::FromStr;
/// `FileId` is an integer which uniquely identifies a file. File paths are /// `FileId` is an integer which uniquely identifies a file. File paths are
/// messy and system-dependent, so most of the code should work directly with /// messy and system-dependent, so most of the code should work directly with
@ -80,16 +80,16 @@ pub struct CrateGraph {
arena: FxHashMap<CrateId, CrateData>, arena: FxHashMap<CrateId, CrateData>,
} }
#[derive(Debug)]
pub struct CyclicDependencies;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct CrateId(pub u32); pub struct CrateId(pub u32);
impl CrateId { #[derive(Debug, Clone, PartialEq, Eq)]
pub fn shift(self, amount: u32) -> CrateId { struct CrateData {
CrateId(self.0 + amount) file_id: FileId,
} edition: Edition,
cfg_options: CfgOptions,
env: Env,
dependencies: Vec<Dependency>,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -98,38 +98,9 @@ pub enum Edition {
Edition2015, Edition2015,
} }
#[derive(Debug)] #[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct ParseEditionError { pub struct Env {
pub msg: String, entries: FxHashMap<String, String>,
}
impl FromStr for Edition {
type Err = ParseEditionError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"2015" => Ok(Edition::Edition2015),
"2018" => Ok(Edition::Edition2018),
_ => Err(ParseEditionError { msg: format!("unknown edition: {}", s) }),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
struct CrateData {
file_id: FileId,
edition: Edition,
dependencies: Vec<Dependency>,
cfg_options: CfgOptions,
}
impl CrateData {
fn new(file_id: FileId, edition: Edition, cfg_options: CfgOptions) -> CrateData {
CrateData { file_id, edition, dependencies: Vec::new(), cfg_options }
}
fn add_dep(&mut self, name: SmolStr, crate_id: CrateId) {
self.dependencies.push(Dependency { name, crate_id })
}
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
@ -138,21 +109,17 @@ pub struct Dependency {
pub name: SmolStr, pub name: SmolStr,
} }
impl Dependency {
pub fn crate_id(&self) -> CrateId {
self.crate_id
}
}
impl CrateGraph { impl CrateGraph {
pub fn add_crate_root( pub fn add_crate_root(
&mut self, &mut self,
file_id: FileId, file_id: FileId,
edition: Edition, edition: Edition,
cfg_options: CfgOptions, cfg_options: CfgOptions,
env: Env,
) -> CrateId { ) -> CrateId {
let data = CrateData::new(file_id, edition, cfg_options, env);
let crate_id = CrateId(self.arena.len() as u32); let crate_id = CrateId(self.arena.len() as u32);
let prev = self.arena.insert(crate_id, CrateData::new(file_id, edition, cfg_options)); let prev = self.arena.insert(crate_id, data);
assert!(prev.is_none()); assert!(prev.is_none());
crate_id crate_id
} }
@ -166,9 +133,9 @@ impl CrateGraph {
from: CrateId, from: CrateId,
name: SmolStr, name: SmolStr,
to: CrateId, to: CrateId,
) -> Result<(), CyclicDependencies> { ) -> Result<(), CyclicDependenciesError> {
if self.dfs_find(from, to, &mut FxHashSet::default()) { if self.dfs_find(from, to, &mut FxHashSet::default()) {
return Err(CyclicDependencies); return Err(CyclicDependenciesError);
} }
self.arena.get_mut(&from).unwrap().add_dep(name, to); self.arena.get_mut(&from).unwrap().add_dep(name, to);
Ok(()) Ok(())
@ -239,16 +206,70 @@ impl CrateGraph {
} }
} }
impl CrateId {
pub fn shift(self, amount: u32) -> CrateId {
CrateId(self.0 + amount)
}
}
impl CrateData {
fn new(file_id: FileId, edition: Edition, cfg_options: CfgOptions, env: Env) -> CrateData {
CrateData { file_id, edition, dependencies: Vec::new(), cfg_options, env }
}
fn add_dep(&mut self, name: SmolStr, crate_id: CrateId) {
self.dependencies.push(Dependency { name, crate_id })
}
}
impl FromStr for Edition {
type Err = ParseEditionError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let res = match s {
"2015" => Edition::Edition2015,
"2018" => Edition::Edition2018,
_ => Err(ParseEditionError { invalid_input: s.to_string() })?,
};
Ok(res)
}
}
impl Dependency {
pub fn crate_id(&self) -> CrateId {
self.crate_id
}
}
#[derive(Debug)]
pub struct ParseEditionError {
invalid_input: String,
}
impl fmt::Display for ParseEditionError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "invalid edition: {:?}", self.invalid_input)
}
}
impl std::error::Error for ParseEditionError {}
#[derive(Debug)]
pub struct CyclicDependenciesError;
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{CfgOptions, CrateGraph, Edition::Edition2018, FileId, SmolStr}; use super::{CfgOptions, CrateGraph, Edition::Edition2018, Env, FileId, SmolStr};
#[test] #[test]
fn it_should_panic_because_of_cycle_dependencies() { fn it_should_panic_because_of_cycle_dependencies() {
let mut graph = CrateGraph::default(); let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(FileId(1u32), Edition2018, CfgOptions::default()); let crate1 =
let crate2 = graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default()); graph.add_crate_root(FileId(1u32), Edition2018, CfgOptions::default(), Env::default());
let crate3 = graph.add_crate_root(FileId(3u32), Edition2018, CfgOptions::default()); let crate2 =
graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default(), Env::default());
let crate3 =
graph.add_crate_root(FileId(3u32), Edition2018, CfgOptions::default(), Env::default());
assert!(graph.add_dep(crate1, SmolStr::new("crate2"), crate2).is_ok()); assert!(graph.add_dep(crate1, SmolStr::new("crate2"), crate2).is_ok());
assert!(graph.add_dep(crate2, SmolStr::new("crate3"), crate3).is_ok()); assert!(graph.add_dep(crate2, SmolStr::new("crate3"), crate3).is_ok());
assert!(graph.add_dep(crate3, SmolStr::new("crate1"), crate1).is_err()); assert!(graph.add_dep(crate3, SmolStr::new("crate1"), crate1).is_err());
@ -257,9 +278,12 @@ mod tests {
#[test] #[test]
fn it_works() { fn it_works() {
let mut graph = CrateGraph::default(); let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(FileId(1u32), Edition2018, CfgOptions::default()); let crate1 =
let crate2 = graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default()); graph.add_crate_root(FileId(1u32), Edition2018, CfgOptions::default(), Env::default());
let crate3 = graph.add_crate_root(FileId(3u32), Edition2018, CfgOptions::default()); let crate2 =
graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default(), Env::default());
let crate3 =
graph.add_crate_root(FileId(3u32), Edition2018, CfgOptions::default(), Env::default());
assert!(graph.add_dep(crate1, SmolStr::new("crate2"), crate2).is_ok()); assert!(graph.add_dep(crate1, SmolStr::new("crate2"), crate2).is_ok());
assert!(graph.add_dep(crate2, SmolStr::new("crate3"), crate3).is_ok()); assert!(graph.add_dep(crate2, SmolStr::new("crate3"), crate3).is_ok());
} }

View file

@ -10,11 +10,25 @@ use ra_syntax::{ast, Parse, SourceFile, TextRange, TextUnit};
pub use crate::{ pub use crate::{
cancellation::Canceled, cancellation::Canceled,
input::{CrateGraph, CrateId, Dependency, Edition, FileId, SourceRoot, SourceRootId}, input::{CrateGraph, CrateId, Dependency, Edition, Env, FileId, SourceRoot, SourceRootId},
}; };
pub use relative_path::{RelativePath, RelativePathBuf}; pub use relative_path::{RelativePath, RelativePathBuf};
pub use salsa; pub use salsa;
#[macro_export]
macro_rules! impl_intern_key {
($name:ident) => {
impl $crate::salsa::InternKey for $name {
fn from_intern_id(v: $crate::salsa::InternId) -> Self {
$name(v)
}
fn as_intern_id(&self) -> $crate::salsa::InternId {
self.0
}
}
};
}
pub trait CheckCanceled { pub trait CheckCanceled {
/// Aborts current query if there are pending changes. /// Aborts current query if there are pending changes.
/// ///

View file

@ -1,38 +1,33 @@
//! FIXME: write short doc here //! FIXME: write short doc here
pub(crate) mod src; pub(crate) mod src;
pub(crate) mod docs;
pub(crate) mod attrs;
use std::sync::Arc; use std::sync::Arc;
use hir_def::{ use hir_def::{
adt::VariantData, adt::VariantData,
body::scope::ExprScopes,
builtin_type::BuiltinType, builtin_type::BuiltinType,
nameres::per_ns::PerNs, docs::Documentation,
per_ns::PerNs,
resolver::{HasResolver, TypeNs}, resolver::{HasResolver, TypeNs},
traits::TraitData, type_ref::TypeRef,
type_ref::{Mutability, TypeRef}, AstItemDef, ConstId, ContainerId, EnumId, FunctionId, HasModule, ImplId, LocalEnumVariantId,
ContainerId, CrateModuleId, HasModule, ImplId, LocalEnumVariantId, LocalStructFieldId, Lookup, LocalImportId, LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, StructId,
ModuleId, UnionId, TraitId, TypeAliasId, UnionId,
}; };
use hir_expand::{ use hir_expand::{
diagnostics::DiagnosticSink, diagnostics::DiagnosticSink,
name::{self, AsName}, name::{self, AsName},
AstId, MacroDefId,
}; };
use ra_db::{CrateId, Edition}; use ra_db::{CrateId, Edition, FileId, FilePosition};
use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner}; use ra_syntax::{ast, AstNode, SyntaxNode};
use crate::{ use crate::{
db::{AstDatabase, DefDatabase, HirDatabase}, db::{DefDatabase, HirDatabase},
expr::{BindingAnnotation, Body, BodySourceMap, ExprValidator, Pat, PatId}, expr::{BindingAnnotation, Body, BodySourceMap, ExprValidator, Pat, PatId},
ids::{
AstItemDef, ConstId, EnumId, FunctionId, MacroDefId, StaticId, StructId, TraitId,
TypeAliasId,
},
ty::{InferenceResult, Namespace, TraitRef}, ty::{InferenceResult, Namespace, TraitRef},
Either, HasSource, ImportId, Name, Source, Ty, Either, Name, Source, Ty,
}; };
/// hir::Crate describes a single crate. It's the main interface with which /// hir::Crate describes a single crate. It's the main interface with which
@ -66,7 +61,7 @@ impl Crate {
} }
pub fn root_module(self, db: &impl DefDatabase) -> Option<Module> { pub fn root_module(self, db: &impl DefDatabase) -> Option<Module> {
let module_id = db.crate_def_map(self.crate_id).root(); let module_id = db.crate_def_map(self.crate_id).root;
Some(Module::new(self, module_id)) Some(Module::new(self, module_id))
} }
@ -80,6 +75,64 @@ impl Crate {
} }
} }
pub enum ModuleSource {
SourceFile(ast::SourceFile),
Module(ast::Module),
}
impl ModuleSource {
pub fn new(
db: &impl DefDatabase,
file_id: Option<FileId>,
decl_id: Option<AstId<ast::Module>>,
) -> ModuleSource {
match (file_id, decl_id) {
(Some(file_id), _) => {
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
}
(None, Some(item_id)) => {
let module = item_id.to_node(db);
assert!(module.item_list().is_some(), "expected inline module");
ModuleSource::Module(module)
}
(None, None) => panic!(),
}
}
// FIXME: this methods do not belong here
pub fn from_position(db: &impl DefDatabase, position: FilePosition) -> ModuleSource {
let parse = db.parse(position.file_id);
match &ra_syntax::algo::find_node_at_offset::<ast::Module>(
parse.tree().syntax(),
position.offset,
) {
Some(m) if !m.has_semi() => ModuleSource::Module(m.clone()),
_ => {
let source_file = parse.tree();
ModuleSource::SourceFile(source_file)
}
}
}
pub fn from_child_node(db: &impl DefDatabase, child: Source<&SyntaxNode>) -> ModuleSource {
if let Some(m) =
child.value.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi())
{
ModuleSource::Module(m)
} else {
let file_id = child.file_id.original_file(db);
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
}
}
pub fn from_file_id(db: &impl DefDatabase, file_id: FileId) -> ModuleSource {
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Module { pub struct Module {
pub(crate) id: ModuleId, pub(crate) id: ModuleId,
@ -111,10 +164,10 @@ impl_froms!(
BuiltinType BuiltinType
); );
pub use hir_def::ModuleSource; pub use hir_def::attr::Attrs;
impl Module { impl Module {
pub(crate) fn new(krate: Crate, crate_module_id: CrateModuleId) -> Module { pub(crate) fn new(krate: Crate, crate_module_id: LocalModuleId) -> Module {
Module { id: ModuleId { krate: krate.crate_id, module_id: crate_module_id } } Module { id: ModuleId { krate: krate.crate_id, module_id: crate_module_id } }
} }
@ -131,17 +184,6 @@ impl Module {
}) })
} }
/// Returns the syntax of the last path segment corresponding to this import
pub fn import_source(
self,
db: &impl HirDatabase,
import: ImportId,
) -> Either<ast::UseTree, ast::ExternCrateItem> {
let src = self.definition_source(db);
let (_, source_map) = db.raw_items_with_source_map(src.file_id);
source_map.get(&src.value, import)
}
/// Returns the crate this module is part of. /// Returns the crate this module is part of.
pub fn krate(self) -> Crate { pub fn krate(self) -> Crate {
Crate { crate_id: self.id.krate } Crate { crate_id: self.id.krate }
@ -152,7 +194,7 @@ impl Module {
/// in the module tree of any target in `Cargo.toml`. /// in the module tree of any target in `Cargo.toml`.
pub fn crate_root(self, db: &impl DefDatabase) -> Module { pub fn crate_root(self, db: &impl DefDatabase) -> Module {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
self.with_module_id(def_map.root()) self.with_module_id(def_map.root)
} }
/// Finds a child module with the specified name. /// Finds a child module with the specified name.
@ -191,11 +233,13 @@ impl Module {
} }
/// Returns a `ModuleScope`: a set of items, visible in this module. /// Returns a `ModuleScope`: a set of items, visible in this module.
pub fn scope(self, db: &impl HirDatabase) -> Vec<(Name, ScopeDef, Option<ImportId>)> { pub fn scope(self, db: &impl HirDatabase) -> Vec<(Name, ScopeDef, Option<Import>)> {
db.crate_def_map(self.id.krate)[self.id.module_id] db.crate_def_map(self.id.krate)[self.id.module_id]
.scope .scope
.entries() .entries()
.map(|(name, res)| (name.clone(), res.def.into(), res.import)) .map(|(name, res)| {
(name.clone(), res.def.into(), res.import.map(|id| Import { parent: self, id }))
})
.collect() .collect()
} }
@ -233,11 +277,16 @@ impl Module {
def_map[self.id.module_id].impls.iter().copied().map(ImplBlock::from).collect() def_map[self.id.module_id].impls.iter().copied().map(ImplBlock::from).collect()
} }
fn with_module_id(self, module_id: CrateModuleId) -> Module { fn with_module_id(self, module_id: LocalModuleId) -> Module {
Module::new(self.krate(), module_id) Module::new(self.krate(), module_id)
} }
} }
pub struct Import {
pub(crate) parent: Module,
pub(crate) id: LocalImportId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StructField { pub struct StructField {
pub(crate) parent: VariantDef, pub(crate) parent: VariantDef,
@ -252,11 +301,11 @@ pub enum FieldSource {
impl StructField { impl StructField {
pub fn name(&self, db: &impl HirDatabase) -> Name { pub fn name(&self, db: &impl HirDatabase) -> Name {
self.parent.variant_data(db).fields().unwrap()[self.id].name.clone() self.parent.variant_data(db).fields()[self.id].name.clone()
} }
pub fn ty(&self, db: &impl HirDatabase) -> Ty { pub fn ty(&self, db: &impl HirDatabase) -> Ty {
db.type_for_field(*self) db.field_types(self.parent.into())[self.id].clone()
} }
pub fn parent_def(&self, _db: &impl HirDatabase) -> VariantDef { pub fn parent_def(&self, _db: &impl HirDatabase) -> VariantDef {
@ -286,8 +335,7 @@ impl Struct {
db.struct_data(self.id.into()) db.struct_data(self.id.into())
.variant_data .variant_data
.fields() .fields()
.into_iter() .iter()
.flat_map(|it| it.iter())
.map(|(id, _)| StructField { parent: self.into(), id }) .map(|(id, _)| StructField { parent: self.into(), id })
.collect() .collect()
} }
@ -296,8 +344,7 @@ impl Struct {
db.struct_data(self.id.into()) db.struct_data(self.id.into())
.variant_data .variant_data
.fields() .fields()
.into_iter() .iter()
.flat_map(|it| it.iter())
.find(|(_id, data)| data.name == *name) .find(|(_id, data)| data.name == *name)
.map(|(id, _)| StructField { parent: self.into(), id }) .map(|(id, _)| StructField { parent: self.into(), id })
} }
@ -394,8 +441,7 @@ impl EnumVariant {
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> { pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> {
self.variant_data(db) self.variant_data(db)
.fields() .fields()
.into_iter() .iter()
.flat_map(|it| it.iter())
.map(|(id, _)| StructField { parent: self.into(), id }) .map(|(id, _)| StructField { parent: self.into(), id })
.collect() .collect()
} }
@ -403,8 +449,7 @@ impl EnumVariant {
pub fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> { pub fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> {
self.variant_data(db) self.variant_data(db)
.fields() .fields()
.into_iter() .iter()
.flat_map(|it| it.iter())
.find(|(_id, data)| data.name == *name) .find(|(_id, data)| data.name == *name)
.map(|(id, _)| StructField { parent: self.into(), id }) .map(|(id, _)| StructField { parent: self.into(), id })
} }
@ -460,7 +505,7 @@ impl VariantDef {
} }
} }
pub fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> { pub(crate) fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> {
match self { match self {
VariantDef::Struct(it) => it.field(db, name), VariantDef::Struct(it) => it.field(db, name),
VariantDef::EnumVariant(it) => it.field(db, name), VariantDef::EnumVariant(it) => it.field(db, name),
@ -510,128 +555,11 @@ impl DefWithBody {
} }
} }
pub trait HasBody: Copy {
fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult>;
fn body(self, db: &impl HirDatabase) -> Arc<Body>;
fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap>;
fn expr_scopes(self, db: &impl HirDatabase) -> Arc<ExprScopes>;
}
impl<T> HasBody for T
where
T: Into<DefWithBody> + Copy + HasSource,
{
fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
db.infer(self.into())
}
fn body(self, db: &impl HirDatabase) -> Arc<Body> {
self.into().body(db)
}
fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
self.into().body_source_map(db)
}
fn expr_scopes(self, db: &impl HirDatabase) -> Arc<ExprScopes> {
self.into().expr_scopes(db)
}
}
impl HasBody for DefWithBody {
fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
db.infer(self)
}
fn body(self, db: &impl HirDatabase) -> Arc<Body> {
db.body(self.into())
}
fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
db.body_with_source_map(self.into()).1
}
fn expr_scopes(self, db: &impl HirDatabase) -> Arc<ExprScopes> {
db.expr_scopes(self.into())
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Function { pub struct Function {
pub(crate) id: FunctionId, pub(crate) id: FunctionId,
} }
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FnData {
pub(crate) name: Name,
pub(crate) params: Vec<TypeRef>,
pub(crate) ret_type: TypeRef,
/// True if the first param is `self`. This is relevant to decide whether this
/// can be called as a method.
pub(crate) has_self_param: bool,
}
impl FnData {
pub(crate) fn fn_data_query(
db: &(impl DefDatabase + AstDatabase),
func: Function,
) -> Arc<FnData> {
let src = func.source(db);
let name = src.value.name().map(|n| n.as_name()).unwrap_or_else(Name::missing);
let mut params = Vec::new();
let mut has_self_param = false;
if let Some(param_list) = src.value.param_list() {
if let Some(self_param) = param_list.self_param() {
let self_type = if let Some(type_ref) = self_param.ascribed_type() {
TypeRef::from_ast(type_ref)
} else {
let self_type = TypeRef::Path(name::SELF_TYPE.into());
match self_param.kind() {
ast::SelfParamKind::Owned => self_type,
ast::SelfParamKind::Ref => {
TypeRef::Reference(Box::new(self_type), Mutability::Shared)
}
ast::SelfParamKind::MutRef => {
TypeRef::Reference(Box::new(self_type), Mutability::Mut)
}
}
};
params.push(self_type);
has_self_param = true;
}
for param in param_list.params() {
let type_ref = TypeRef::from_ast_opt(param.ascribed_type());
params.push(type_ref);
}
}
let ret_type = if let Some(type_ref) = src.value.ret_type().and_then(|rt| rt.type_ref()) {
TypeRef::from_ast(type_ref)
} else {
TypeRef::unit()
};
let sig = FnData { name, params, ret_type, has_self_param };
Arc::new(sig)
}
pub fn name(&self) -> &Name {
&self.name
}
pub fn params(&self) -> &[TypeRef] {
&self.params
}
pub fn ret_type(&self) -> &TypeRef {
&self.ret_type
}
/// True if the first arg is `self`. This is relevant to decide whether this
/// can be called as a method.
pub fn has_self_param(&self) -> bool {
self.has_self_param
}
}
impl Function { impl Function {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
self.id.lookup(db).module(db).into() self.id.lookup(db).module(db).into()
@ -642,10 +570,18 @@ impl Function {
} }
pub fn name(self, db: &impl HirDatabase) -> Name { pub fn name(self, db: &impl HirDatabase) -> Name {
self.data(db).name.clone() db.function_data(self.id).name.clone()
} }
pub(crate) fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap> { pub fn has_self_param(self, db: &impl HirDatabase) -> bool {
db.function_data(self.id).has_self_param
}
pub fn params(self, db: &impl HirDatabase) -> Vec<TypeRef> {
db.function_data(self.id).params.clone()
}
pub fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
db.body_with_source_map(self.id.into()).1 db.body_with_source_map(self.id.into()).1
} }
@ -657,10 +593,6 @@ impl Function {
db.type_for_def(self.into(), Namespace::Values) db.type_for_def(self.into(), Namespace::Values)
} }
pub fn data(self, db: &impl HirDatabase) -> Arc<FnData> {
db.fn_data(self)
}
pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> { pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
db.infer(self.into()) db.infer(self.into())
} }
@ -711,12 +643,8 @@ impl Const {
Some(self.module(db).krate()) Some(self.module(db).krate())
} }
pub fn data(self, db: &impl HirDatabase) -> Arc<ConstData> {
db.const_data(self)
}
pub fn name(self, db: &impl HirDatabase) -> Option<Name> { pub fn name(self, db: &impl HirDatabase) -> Option<Name> {
self.data(db).name().cloned() db.const_data(self.id).name.clone()
} }
pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> { pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
@ -748,45 +676,6 @@ impl Const {
} }
} }
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ConstData {
pub(crate) name: Option<Name>,
pub(crate) type_ref: TypeRef,
}
impl ConstData {
pub fn name(&self) -> Option<&Name> {
self.name.as_ref()
}
pub fn type_ref(&self) -> &TypeRef {
&self.type_ref
}
pub(crate) fn const_data_query(
db: &(impl DefDatabase + AstDatabase),
konst: Const,
) -> Arc<ConstData> {
let node = konst.source(db).value;
const_data_for(&node)
}
pub(crate) fn static_data_query(
db: &(impl DefDatabase + AstDatabase),
konst: Static,
) -> Arc<ConstData> {
let node = konst.source(db).value;
const_data_for(&node)
}
}
fn const_data_for<N: NameOwner + TypeAscriptionOwner>(node: &N) -> Arc<ConstData> {
let name = node.name().map(|n| n.as_name());
let type_ref = TypeRef::from_ast_opt(node.ascribed_type());
let sig = ConstData { name, type_ref };
Arc::new(sig)
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Static { pub struct Static {
pub(crate) id: StaticId, pub(crate) id: StaticId,
@ -794,17 +683,13 @@ pub struct Static {
impl Static { impl Static {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
Module { id: self.id.module(db) } Module { id: self.id.lookup(db).module(db) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
Some(self.module(db).krate()) Some(self.module(db).krate())
} }
pub fn data(self, db: &impl HirDatabase) -> Arc<ConstData> {
db.static_data(self)
}
pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> { pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
db.infer(self.into()) db.infer(self.into())
} }
@ -821,11 +706,11 @@ impl Trait {
} }
pub fn name(self, db: &impl DefDatabase) -> Option<Name> { pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
self.trait_data(db).name.clone() db.trait_data(self.id).name.clone()
} }
pub fn items(self, db: &impl DefDatabase) -> Vec<AssocItem> { pub fn items(self, db: &impl DefDatabase) -> Vec<AssocItem> {
self.trait_data(db).items.iter().map(|it| (*it).into()).collect() db.trait_data(self.id).items.iter().map(|it| (*it).into()).collect()
} }
fn direct_super_traits(self, db: &impl HirDatabase) -> Vec<Trait> { fn direct_super_traits(self, db: &impl HirDatabase) -> Vec<Trait> {
@ -871,7 +756,7 @@ impl Trait {
} }
pub fn associated_type_by_name(self, db: &impl DefDatabase, name: &Name) -> Option<TypeAlias> { pub fn associated_type_by_name(self, db: &impl DefDatabase, name: &Name) -> Option<TypeAlias> {
let trait_data = self.trait_data(db); let trait_data = db.trait_data(self.id);
let res = let res =
trait_data.associated_types().map(TypeAlias::from).find(|t| &t.name(db) == name)?; trait_data.associated_types().map(TypeAlias::from).find(|t| &t.name(db) == name)?;
Some(res) Some(res)
@ -885,16 +770,12 @@ impl Trait {
self.all_super_traits(db).into_iter().find_map(|t| t.associated_type_by_name(db, name)) self.all_super_traits(db).into_iter().find_map(|t| t.associated_type_by_name(db, name))
} }
pub(crate) fn trait_data(self, db: &impl DefDatabase) -> Arc<TraitData> {
db.trait_data(self.id)
}
pub fn trait_ref(self, db: &impl HirDatabase) -> TraitRef { pub fn trait_ref(self, db: &impl HirDatabase) -> TraitRef {
TraitRef::for_trait(db, self) TraitRef::for_trait(db, self)
} }
pub fn is_auto(self, db: &impl DefDatabase) -> bool { pub fn is_auto(self, db: &impl DefDatabase) -> bool {
self.trait_data(db).auto db.trait_data(self.id).auto
} }
} }
@ -937,7 +818,7 @@ impl TypeAlias {
} }
pub fn type_ref(self, db: &impl DefDatabase) -> Option<TypeRef> { pub fn type_ref(self, db: &impl DefDatabase) -> Option<TypeRef> {
db.type_alias_data(self).type_ref.clone() db.type_alias_data(self.id).type_ref.clone()
} }
pub fn ty(self, db: &impl HirDatabase) -> Ty { pub fn ty(self, db: &impl HirDatabase) -> Ty {
@ -945,7 +826,7 @@ impl TypeAlias {
} }
pub fn name(self, db: &impl DefDatabase) -> Name { pub fn name(self, db: &impl DefDatabase) -> Name {
db.type_alias_data(self).name.clone() db.type_alias_data(self.id).name.clone()
} }
} }
@ -1034,7 +915,7 @@ pub struct Local {
impl Local { impl Local {
pub fn name(self, db: &impl HirDatabase) -> Option<Name> { pub fn name(self, db: &impl HirDatabase) -> Option<Name> {
let body = self.parent.body(db); let body = db.body(self.parent.into());
match &body[self.pat_id] { match &body[self.pat_id] {
Pat::Bind { name, .. } => Some(name.clone()), Pat::Bind { name, .. } => Some(name.clone()),
_ => None, _ => None,
@ -1046,7 +927,7 @@ impl Local {
} }
pub fn is_mut(self, db: &impl HirDatabase) -> bool { pub fn is_mut(self, db: &impl HirDatabase) -> bool {
let body = self.parent.body(db); let body = db.body(self.parent.into());
match &body[self.pat_id] { match &body[self.pat_id] {
Pat::Bind { mode, .. } => match mode { Pat::Bind { mode, .. } => match mode {
BindingAnnotation::Mutable | BindingAnnotation::RefMut => true, BindingAnnotation::Mutable | BindingAnnotation::RefMut => true,
@ -1070,7 +951,7 @@ impl Local {
} }
pub fn source(self, db: &impl HirDatabase) -> Source<Either<ast::BindPat, ast::SelfParam>> { pub fn source(self, db: &impl HirDatabase) -> Source<Either<ast::BindPat, ast::SelfParam>> {
let source_map = self.parent.body_source_map(db); let (_body, source_map) = db.body_with_source_map(self.parent.into());
let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm... let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm...
let root = src.file_syntax(db); let root = src.file_syntax(db);
src.map(|ast| ast.map(|it| it.cast().unwrap().to_node(&root), |it| it.to_node(&root))) src.map(|ast| ast.map(|it| it.cast().unwrap().to_node(&root), |it| it.to_node(&root)))
@ -1088,6 +969,41 @@ pub struct ImplBlock {
pub(crate) id: ImplId, pub(crate) id: ImplId,
} }
impl ImplBlock {
pub fn target_trait(&self, db: &impl DefDatabase) -> Option<TypeRef> {
db.impl_data(self.id).target_trait.clone()
}
pub fn target_type(&self, db: &impl DefDatabase) -> TypeRef {
db.impl_data(self.id).target_type.clone()
}
pub fn target_ty(&self, db: &impl HirDatabase) -> Ty {
Ty::from_hir(db, &self.id.resolver(db), &self.target_type(db))
}
pub fn target_trait_ref(&self, db: &impl HirDatabase) -> Option<TraitRef> {
let target_ty = self.target_ty(db);
TraitRef::from_hir(db, &self.id.resolver(db), &self.target_trait(db)?, Some(target_ty))
}
pub fn items(&self, db: &impl DefDatabase) -> Vec<AssocItem> {
db.impl_data(self.id).items.iter().map(|it| (*it).into()).collect()
}
pub fn is_negative(&self, db: &impl DefDatabase) -> bool {
db.impl_data(self.id).is_negative
}
pub fn module(&self, db: &impl DefDatabase) -> Module {
self.id.module(db).into()
}
pub fn krate(&self, db: &impl DefDatabase) -> Crate {
Crate { crate_id: self.module(db).id.krate }
}
}
/// For IDE only /// For IDE only
pub enum ScopeDef { pub enum ScopeDef {
ModuleDef(ModuleDef), ModuleDef(ModuleDef),
@ -1105,8 +1021,56 @@ impl From<PerNs> for ScopeDef {
.or_else(|| def.take_values()) .or_else(|| def.take_values())
.map(|module_def_id| ScopeDef::ModuleDef(module_def_id.into())) .map(|module_def_id| ScopeDef::ModuleDef(module_def_id.into()))
.or_else(|| { .or_else(|| {
def.get_macros().map(|macro_def_id| ScopeDef::MacroDef(macro_def_id.into())) def.take_macros().map(|macro_def_id| ScopeDef::MacroDef(macro_def_id.into()))
}) })
.unwrap_or(ScopeDef::Unknown) .unwrap_or(ScopeDef::Unknown)
} }
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum AttrDef {
Module(Module),
StructField(StructField),
Adt(Adt),
Function(Function),
EnumVariant(EnumVariant),
Static(Static),
Const(Const),
Trait(Trait),
TypeAlias(TypeAlias),
MacroDef(MacroDef),
}
impl_froms!(
AttrDef: Module,
StructField,
Adt(Struct, Enum, Union),
EnumVariant,
Static,
Const,
Function,
Trait,
TypeAlias,
MacroDef
);
pub trait HasAttrs {
fn attrs(self, db: &impl DefDatabase) -> Attrs;
}
impl<T: Into<AttrDef>> HasAttrs for T {
fn attrs(self, db: &impl DefDatabase) -> Attrs {
let def: AttrDef = self.into();
db.attrs(def.into())
}
}
pub trait Docs {
fn docs(&self, db: &impl HirDatabase) -> Option<Documentation>;
}
impl<T: Into<AttrDef> + Copy> Docs for T {
fn docs(&self, db: &impl HirDatabase) -> Option<Documentation> {
let def: AttrDef = (*self).into();
db.documentation(def.into())
}
}

View file

@ -1,92 +0,0 @@
//! FIXME: write short doc here
use crate::{
db::{AstDatabase, DefDatabase, HirDatabase},
Adt, Const, Enum, EnumVariant, FieldSource, Function, HasSource, MacroDef, Module, Static,
Struct, StructField, Trait, TypeAlias, Union,
};
use hir_def::attr::Attr;
use hir_expand::hygiene::Hygiene;
use ra_syntax::ast;
use std::sync::Arc;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum AttrDef {
Module(Module),
StructField(StructField),
Adt(Adt),
Function(Function),
EnumVariant(EnumVariant),
Static(Static),
Const(Const),
Trait(Trait),
TypeAlias(TypeAlias),
MacroDef(MacroDef),
}
impl_froms!(
AttrDef: Module,
StructField,
Adt(Struct, Enum, Union),
EnumVariant,
Static,
Const,
Function,
Trait,
TypeAlias,
MacroDef
);
pub trait Attrs {
fn attrs(&self, db: &impl HirDatabase) -> Option<Arc<[Attr]>>;
}
pub(crate) fn attributes_query(
db: &(impl DefDatabase + AstDatabase),
def: AttrDef,
) -> Option<Arc<[Attr]>> {
match def {
AttrDef::Module(it) => {
let src = it.declaration_source(db)?;
let hygiene = Hygiene::new(db, src.file_id);
Attr::from_attrs_owner(&src.value, &hygiene)
}
AttrDef::StructField(it) => match it.source(db).value {
FieldSource::Named(named) => {
let src = it.source(db);
let hygiene = Hygiene::new(db, src.file_id);
Attr::from_attrs_owner(&named, &hygiene)
}
FieldSource::Pos(..) => None,
},
AttrDef::Adt(it) => match it {
Adt::Struct(it) => attrs_from_ast(it, db),
Adt::Enum(it) => attrs_from_ast(it, db),
Adt::Union(it) => attrs_from_ast(it, db),
},
AttrDef::EnumVariant(it) => attrs_from_ast(it, db),
AttrDef::Static(it) => attrs_from_ast(it, db),
AttrDef::Const(it) => attrs_from_ast(it, db),
AttrDef::Function(it) => attrs_from_ast(it, db),
AttrDef::Trait(it) => attrs_from_ast(it, db),
AttrDef::TypeAlias(it) => attrs_from_ast(it, db),
AttrDef::MacroDef(it) => attrs_from_ast(it, db),
}
}
fn attrs_from_ast<T, D>(node: T, db: &D) -> Option<Arc<[Attr]>>
where
T: HasSource,
T::Ast: ast::AttrsOwner,
D: DefDatabase + AstDatabase,
{
let src = node.source(db);
let hygiene = Hygiene::new(db, src.file_id);
Attr::from_attrs_owner(&src.value, &hygiene)
}
impl<T: Into<AttrDef> + Copy> Attrs for T {
fn attrs(&self, db: &impl HirDatabase) -> Option<Arc<[Attr]>> {
db.attrs((*self).into())
}
}

View file

@ -1,97 +0,0 @@
//! FIXME: write short doc here
use std::sync::Arc;
use ra_syntax::ast;
use crate::{
db::{AstDatabase, DefDatabase, HirDatabase},
Adt, Const, Enum, EnumVariant, FieldSource, Function, HasSource, MacroDef, Module, Static,
Struct, StructField, Trait, TypeAlias, Union,
};
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum DocDef {
Module(Module),
StructField(StructField),
Adt(Adt),
EnumVariant(EnumVariant),
Static(Static),
Const(Const),
Function(Function),
Trait(Trait),
TypeAlias(TypeAlias),
MacroDef(MacroDef),
}
impl_froms!(
DocDef: Module,
StructField,
Adt(Struct, Enum, Union),
EnumVariant,
Static,
Const,
Function,
Trait,
TypeAlias,
MacroDef
);
/// Holds documentation
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Documentation(Arc<str>);
impl Documentation {
fn new(s: &str) -> Documentation {
Documentation(s.into())
}
pub fn as_str(&self) -> &str {
&*self.0
}
}
impl Into<String> for Documentation {
fn into(self) -> String {
self.as_str().to_owned()
}
}
pub trait Docs {
fn docs(&self, db: &impl HirDatabase) -> Option<Documentation>;
}
pub(crate) fn docs_from_ast(node: &impl ast::DocCommentsOwner) -> Option<Documentation> {
node.doc_comment_text().map(|it| Documentation::new(&it))
}
pub(crate) fn documentation_query(
db: &(impl DefDatabase + AstDatabase),
def: DocDef,
) -> Option<Documentation> {
match def {
DocDef::Module(it) => docs_from_ast(&it.declaration_source(db)?.value),
DocDef::StructField(it) => match it.source(db).value {
FieldSource::Named(named) => docs_from_ast(&named),
FieldSource::Pos(..) => None,
},
DocDef::Adt(it) => match it {
Adt::Struct(it) => docs_from_ast(&it.source(db).value),
Adt::Enum(it) => docs_from_ast(&it.source(db).value),
Adt::Union(it) => docs_from_ast(&it.source(db).value),
},
DocDef::EnumVariant(it) => docs_from_ast(&it.source(db).value),
DocDef::Static(it) => docs_from_ast(&it.source(db).value),
DocDef::Const(it) => docs_from_ast(&it.source(db).value),
DocDef::Function(it) => docs_from_ast(&it.source(db).value),
DocDef::Trait(it) => docs_from_ast(&it.source(db).value),
DocDef::TypeAlias(it) => docs_from_ast(&it.source(db).value),
DocDef::MacroDef(it) => docs_from_ast(&it.source(db).value),
}
}
impl<T: Into<DocDef> + Copy> Docs for T {
fn docs(&self, db: &impl HirDatabase) -> Option<Documentation> {
db.documentation((*self).into())
}
}

View file

@ -1,172 +1,128 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use hir_def::{HasSource as _, Lookup}; use hir_def::{AstItemDef, HasChildSource, HasSource as _, Lookup, VariantId};
use ra_syntax::ast::{self, AstNode}; use hir_expand::either::Either;
use ra_syntax::ast;
use crate::{ use crate::{
db::{AstDatabase, DefDatabase, HirDatabase}, db::DefDatabase, Const, Enum, EnumVariant, FieldSource, Function, ImplBlock, Import, MacroDef,
ids::AstItemDef, Module, ModuleSource, Static, Struct, StructField, Trait, TypeAlias, Union,
Const, Either, Enum, EnumVariant, FieldSource, Function, HasBody, HirFileId, MacroDef, Module,
ModuleSource, Static, Struct, StructField, Trait, TypeAlias, Union, VariantDef,
}; };
pub use hir_expand::Source; pub use hir_expand::Source;
pub trait HasSource { pub trait HasSource {
type Ast; type Ast;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<Self::Ast>; fn source(self, db: &impl DefDatabase) -> Source<Self::Ast>;
} }
/// NB: Module is !HasSource, because it has two source nodes at the same time: /// NB: Module is !HasSource, because it has two source nodes at the same time:
/// definition and declaration. /// definition and declaration.
impl Module { impl Module {
/// Returns a node which defines this module. That is, a file or a `mod foo {}` with items. /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
pub fn definition_source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ModuleSource> { pub fn definition_source(self, db: &impl DefDatabase) -> Source<ModuleSource> {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
let decl_id = def_map[self.id.module_id].declaration; let src = def_map[self.id.module_id].definition_source(db);
let file_id = def_map[self.id.module_id].definition; src.map(|it| match it {
let value = ModuleSource::new(db, file_id, decl_id); Either::A(it) => ModuleSource::SourceFile(it),
let file_id = file_id.map(HirFileId::from).unwrap_or_else(|| decl_id.unwrap().file_id()); Either::B(it) => ModuleSource::Module(it),
Source { file_id, value } })
} }
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
/// `None` for the crate root. /// `None` for the crate root.
pub fn declaration_source( pub fn declaration_source(self, db: &impl DefDatabase) -> Option<Source<ast::Module>> {
self,
db: &(impl DefDatabase + AstDatabase),
) -> Option<Source<ast::Module>> {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
let decl = def_map[self.id.module_id].declaration?; def_map[self.id.module_id].declaration_source(db)
let value = decl.to_node(db);
Some(Source { file_id: decl.file_id(), value })
} }
} }
impl HasSource for StructField { impl HasSource for StructField {
type Ast = FieldSource; type Ast = FieldSource;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<FieldSource> { fn source(self, db: &impl DefDatabase) -> Source<FieldSource> {
let var_data = self.parent.variant_data(db); let var = VariantId::from(self.parent);
let fields = var_data.fields().unwrap(); let src = var.child_source(db);
let ss; src.map(|it| match it[self.id].clone() {
let es; Either::A(it) => FieldSource::Pos(it),
let (file_id, struct_kind) = match self.parent { Either::B(it) => FieldSource::Named(it),
VariantDef::Struct(s) => { })
ss = s.source(db);
(ss.file_id, ss.value.kind())
}
VariantDef::EnumVariant(e) => {
es = e.source(db);
(es.file_id, es.value.kind())
}
};
let field_sources = match struct_kind {
ast::StructKind::Tuple(fl) => fl.fields().map(|it| FieldSource::Pos(it)).collect(),
ast::StructKind::Named(fl) => fl.fields().map(|it| FieldSource::Named(it)).collect(),
ast::StructKind::Unit => Vec::new(),
};
let value = field_sources
.into_iter()
.zip(fields.iter())
.find(|(_syntax, (id, _))| *id == self.id)
.unwrap()
.0;
Source { file_id, value }
} }
} }
impl HasSource for Struct { impl HasSource for Struct {
type Ast = ast::StructDef; type Ast = ast::StructDef;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StructDef> { fn source(self, db: &impl DefDatabase) -> Source<ast::StructDef> {
self.id.0.source(db) self.id.0.source(db)
} }
} }
impl HasSource for Union { impl HasSource for Union {
type Ast = ast::StructDef; type Ast = ast::StructDef;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StructDef> { fn source(self, db: &impl DefDatabase) -> Source<ast::StructDef> {
self.id.0.source(db) self.id.0.source(db)
} }
} }
impl HasSource for Enum { impl HasSource for Enum {
type Ast = ast::EnumDef; type Ast = ast::EnumDef;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumDef> { fn source(self, db: &impl DefDatabase) -> Source<ast::EnumDef> {
self.id.source(db) self.id.source(db)
} }
} }
impl HasSource for EnumVariant { impl HasSource for EnumVariant {
type Ast = ast::EnumVariant; type Ast = ast::EnumVariant;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumVariant> { fn source(self, db: &impl DefDatabase) -> Source<ast::EnumVariant> {
let enum_data = db.enum_data(self.parent.id); self.parent.id.child_source(db).map(|map| map[self.id].clone())
let src = self.parent.id.source(db);
let value = src
.value
.variant_list()
.into_iter()
.flat_map(|it| it.variants())
.zip(enum_data.variants.iter())
.find(|(_syntax, (id, _))| *id == self.id)
.unwrap()
.0;
Source { file_id: src.file_id, value }
} }
} }
impl HasSource for Function { impl HasSource for Function {
type Ast = ast::FnDef; type Ast = ast::FnDef;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::FnDef> { fn source(self, db: &impl DefDatabase) -> Source<ast::FnDef> {
self.id.lookup(db).source(db) self.id.lookup(db).source(db)
} }
} }
impl HasSource for Const { impl HasSource for Const {
type Ast = ast::ConstDef; type Ast = ast::ConstDef;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ConstDef> { fn source(self, db: &impl DefDatabase) -> Source<ast::ConstDef> {
self.id.lookup(db).source(db) self.id.lookup(db).source(db)
} }
} }
impl HasSource for Static { impl HasSource for Static {
type Ast = ast::StaticDef; type Ast = ast::StaticDef;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StaticDef> { fn source(self, db: &impl DefDatabase) -> Source<ast::StaticDef> {
self.id.source(db) self.id.lookup(db).source(db)
} }
} }
impl HasSource for Trait { impl HasSource for Trait {
type Ast = ast::TraitDef; type Ast = ast::TraitDef;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::TraitDef> { fn source(self, db: &impl DefDatabase) -> Source<ast::TraitDef> {
self.id.source(db) self.id.source(db)
} }
} }
impl HasSource for TypeAlias { impl HasSource for TypeAlias {
type Ast = ast::TypeAliasDef; type Ast = ast::TypeAliasDef;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::TypeAliasDef> { fn source(self, db: &impl DefDatabase) -> Source<ast::TypeAliasDef> {
self.id.lookup(db).source(db) self.id.lookup(db).source(db)
} }
} }
impl HasSource for MacroDef { impl HasSource for MacroDef {
type Ast = ast::MacroCall; type Ast = ast::MacroCall;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::MacroCall> { fn source(self, db: &impl DefDatabase) -> Source<ast::MacroCall> {
Source { file_id: self.id.ast_id.file_id(), value: self.id.ast_id.to_node(db) } Source { file_id: self.id.ast_id.file_id(), value: self.id.ast_id.to_node(db) }
} }
} }
impl HasSource for ImplBlock {
pub trait HasBodySource: HasBody + HasSource type Ast = ast::ImplBlock;
where fn source(self, db: &impl DefDatabase) -> Source<ast::ImplBlock> {
Self::Ast: AstNode, self.id.source(db)
{
fn expr_source(
self,
db: &impl HirDatabase,
expr_id: crate::expr::ExprId,
) -> Option<Source<Either<ast::Expr, ast::RecordField>>> {
let source_map = self.body_source_map(db);
let source_ptr = source_map.expr_syntax(expr_id)?;
let root = source_ptr.file_syntax(db);
let source = source_ptr.map(|ast| ast.map(|it| it.to_node(&root), |it| it.to_node(&root)));
Some(source)
} }
} }
impl HasSource for Import {
type Ast = Either<ast::UseTree, ast::ExternCrateItem>;
impl<T> HasBodySource for T /// Returns the syntax of the last path segment corresponding to this import
where fn source(self, db: &impl DefDatabase) -> Source<Self::Ast> {
T: HasBody + HasSource, let src = self.parent.definition_source(db);
T::Ast: AstNode, let (_, source_map) = db.raw_items_with_source_map(src.file_id);
{ let root = db.parse_or_expand(src.file_id).unwrap();
let ptr = source_map.get(self.id);
src.with_value(ptr.map(|it| it.to_node(&root), |it| it.to_node(&root)))
}
} }

View file

@ -2,79 +2,46 @@
use std::sync::Arc; use std::sync::Arc;
use hir_def::attr::Attr; use ra_arena::map::ArenaMap;
use ra_db::salsa; use ra_db::salsa;
use ra_syntax::SmolStr;
use crate::{ use crate::{
debug::HirDebugDatabase,
ids,
lang_item::{LangItemTarget, LangItems},
ty::{ ty::{
method_resolution::CrateImplBlocks, method_resolution::CrateImplBlocks,
traits::{AssocTyValue, Impl}, traits::{AssocTyValue, Impl},
CallableDef, FnSig, GenericPredicate, InferenceResult, Namespace, Substs, Ty, TypableDef, CallableDef, FnSig, GenericPredicate, InferenceResult, Namespace, Substs, Ty, TypableDef,
TypeCtor, TypeCtor,
}, },
type_alias::TypeAliasData, Crate, DefWithBody, GenericDef, ImplBlock, Trait,
Const, ConstData, Crate, DefWithBody, FnData, Function, GenericDef, ImplBlock, Module, Static,
StructField, Trait, TypeAlias,
}; };
pub use hir_def::db::{ pub use hir_def::{
BodyQuery, BodyWithSourceMapQuery, CrateDefMapQuery, DefDatabase2, DefDatabase2Storage, db::{
EnumDataQuery, ExprScopesQuery, GenericParamsQuery, ImplDataQuery, InternDatabase, BodyQuery, BodyWithSourceMapQuery, ConstDataQuery, CrateDefMapQuery, CrateLangItemsQuery,
InternDatabaseStorage, RawItemsQuery, RawItemsWithSourceMapQuery, StructDataQuery, DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, ExprScopesQuery,
TraitDataQuery, FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternDatabase,
InternDatabaseStorage, LangItemQuery, ModuleLangItemsQuery, RawItemsQuery,
RawItemsWithSourceMapQuery, StaticDataQuery, StructDataQuery, TraitDataQuery,
TypeAliasDataQuery,
},
LocalStructFieldId, VariantId,
}; };
pub use hir_expand::db::{ pub use hir_expand::db::{
AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery, AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery,
ParseMacroQuery, ParseMacroQuery,
}; };
// This database uses `AstDatabase` internally,
#[salsa::query_group(DefDatabaseStorage)]
#[salsa::requires(AstDatabase)]
pub trait DefDatabase: HirDebugDatabase + DefDatabase2 {
#[salsa::invoke(FnData::fn_data_query)]
fn fn_data(&self, func: Function) -> Arc<FnData>;
#[salsa::invoke(TypeAliasData::type_alias_data_query)]
fn type_alias_data(&self, typ: TypeAlias) -> Arc<TypeAliasData>;
#[salsa::invoke(ConstData::const_data_query)]
fn const_data(&self, konst: Const) -> Arc<ConstData>;
#[salsa::invoke(ConstData::static_data_query)]
fn static_data(&self, konst: Static) -> Arc<ConstData>;
#[salsa::invoke(LangItems::module_lang_items_query)]
fn module_lang_items(&self, module: Module) -> Option<Arc<LangItems>>;
#[salsa::invoke(LangItems::crate_lang_items_query)]
fn crate_lang_items(&self, krate: Crate) -> Arc<LangItems>;
#[salsa::invoke(LangItems::lang_item_query)]
fn lang_item(&self, start_crate: Crate, item: SmolStr) -> Option<LangItemTarget>;
#[salsa::invoke(crate::code_model::docs::documentation_query)]
fn documentation(&self, def: crate::DocDef) -> Option<crate::Documentation>;
#[salsa::invoke(crate::code_model::attrs::attributes_query)]
fn attrs(&self, def: crate::AttrDef) -> Option<Arc<[Attr]>>;
}
#[salsa::query_group(HirDatabaseStorage)] #[salsa::query_group(HirDatabaseStorage)]
#[salsa::requires(salsa::Database)] #[salsa::requires(salsa::Database)]
pub trait HirDatabase: DefDatabase + AstDatabase { pub trait HirDatabase: DefDatabase {
#[salsa::invoke(crate::ty::infer_query)] #[salsa::invoke(crate::ty::infer_query)]
fn infer(&self, def: DefWithBody) -> Arc<InferenceResult>; fn infer(&self, def: DefWithBody) -> Arc<InferenceResult>;
#[salsa::invoke(crate::ty::type_for_def)] #[salsa::invoke(crate::ty::type_for_def)]
fn type_for_def(&self, def: TypableDef, ns: Namespace) -> Ty; fn type_for_def(&self, def: TypableDef, ns: Namespace) -> Ty;
#[salsa::invoke(crate::ty::type_for_field)] #[salsa::invoke(crate::ty::field_types_query)]
fn type_for_field(&self, field: StructField) -> Ty; fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalStructFieldId, Ty>>;
#[salsa::invoke(crate::ty::callable_item_sig)] #[salsa::invoke(crate::ty::callable_item_sig)]
fn callable_item_signature(&self, def: CallableDef) -> FnSig; fn callable_item_signature(&self, def: CallableDef) -> FnSig;
@ -108,11 +75,14 @@ pub trait HirDatabase: DefDatabase + AstDatabase {
// Interned IDs for Chalk integration // Interned IDs for Chalk integration
#[salsa::interned] #[salsa::interned]
fn intern_type_ctor(&self, type_ctor: TypeCtor) -> ids::TypeCtorId; fn intern_type_ctor(&self, type_ctor: TypeCtor) -> crate::ty::TypeCtorId;
#[salsa::interned] #[salsa::interned]
fn intern_chalk_impl(&self, impl_: Impl) -> ids::GlobalImplId; fn intern_chalk_impl(&self, impl_: Impl) -> crate::ty::traits::GlobalImplId;
#[salsa::interned] #[salsa::interned]
fn intern_assoc_ty_value(&self, assoc_ty_value: AssocTyValue) -> ids::AssocTyValueId; fn intern_assoc_ty_value(
&self,
assoc_ty_value: AssocTyValue,
) -> crate::ty::traits::AssocTyValueId;
#[salsa::invoke(crate::ty::traits::chalk::associated_ty_data_query)] #[salsa::invoke(crate::ty::traits::chalk::associated_ty_data_query)]
fn associated_ty_data( fn associated_ty_data(

View file

@ -1,3 +1,5 @@
//! XXX: This does not work at the moment.
//!
//! printf debugging infrastructure for rust-analyzer. //! printf debugging infrastructure for rust-analyzer.
//! //!
//! When you print a hir type, like a module, using `eprintln!("{:?}", module)`, //! When you print a hir type, like a module, using `eprintln!("{:?}", module)`,
@ -20,9 +22,10 @@
use std::fmt; use std::fmt;
use hir_expand::HirFileId;
use ra_db::{CrateId, FileId}; use ra_db::{CrateId, FileId};
use crate::{db::HirDatabase, Crate, HirFileId, Module, Name}; use crate::{db::HirDatabase, Crate, Module, Name};
impl Crate { impl Crate {
pub fn debug(self, db: &impl HirDebugDatabase) -> impl fmt::Debug + '_ { pub fn debug(self, db: &impl HirDebugDatabase) -> impl fmt::Debug + '_ {

View file

@ -2,9 +2,10 @@
use std::any::Any; use std::any::Any;
use hir_expand::HirFileId;
use ra_syntax::{ast, AstNode, AstPtr, SyntaxNodePtr}; use ra_syntax::{ast, AstNode, AstPtr, SyntaxNodePtr};
use crate::{db::AstDatabase, HirFileId, Name, Source}; use crate::{db::AstDatabase, Name, Source};
pub use hir_def::diagnostics::UnresolvedModule; pub use hir_def::diagnostics::UnresolvedModule;
pub use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink}; pub use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink};

View file

@ -44,15 +44,15 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
pub(crate) fn validate_body(&mut self, db: &impl HirDatabase) { pub(crate) fn validate_body(&mut self, db: &impl HirDatabase) {
let body = self.func.body(db); let body = self.func.body(db);
for e in body.exprs() { for e in body.exprs.iter() {
if let (id, Expr::RecordLit { path, fields, spread }) = e { if let (id, Expr::RecordLit { path, fields, spread }) = e {
self.validate_record_literal(id, path, fields, *spread, db); self.validate_record_literal(id, path, fields, *spread, db);
} }
} }
let body_expr = &body[body.body_expr()]; let body_expr = &body[body.body_expr];
if let Expr::Block { statements: _, tail: Some(t) } = body_expr { if let Expr::Block { statements: _, tail: Some(t) } = body_expr {
self.validate_results_in_tail_expr(body.body_expr(), *t, db); self.validate_results_in_tail_expr(body.body_expr, *t, db);
} }
} }

View file

@ -4,14 +4,14 @@
//! are splitting the hir. //! are splitting the hir.
use hir_def::{ use hir_def::{
AdtId, AssocItemId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, GenericDefId, AdtId, AssocItemId, AttrDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId,
ModuleDefId, StaticId, StructId, TypeAliasId, UnionId, GenericDefId, ModuleDefId, StaticId, StructFieldId, StructId, TypeAliasId, UnionId, VariantId,
}; };
use crate::{ use crate::{
ty::{CallableDef, TypableDef}, ty::{CallableDef, TypableDef},
Adt, AssocItem, Const, Crate, DefWithBody, EnumVariant, Function, GenericDef, ModuleDef, Adt, AssocItem, AttrDef, Const, Crate, DefWithBody, EnumVariant, Function, GenericDef,
Static, TypeAlias, ModuleDef, Static, StructField, TypeAlias, VariantDef,
}; };
impl From<ra_db::CrateId> for Crate { impl From<ra_db::CrateId> for Crate {
@ -70,6 +70,12 @@ impl From<EnumVariantId> for EnumVariant {
} }
} }
impl From<EnumVariant> for EnumVariantId {
fn from(def: EnumVariant) -> Self {
EnumVariantId { parent: def.parent.id, local_id: def.id }
}
}
impl From<ModuleDefId> for ModuleDef { impl From<ModuleDefId> for ModuleDef {
fn from(id: ModuleDefId) -> Self { fn from(id: ModuleDefId) -> Self {
match id { match id {
@ -219,3 +225,35 @@ impl From<CallableDef> for GenericDefId {
} }
} }
} }
impl From<VariantDef> for VariantId {
fn from(def: VariantDef) -> Self {
match def {
VariantDef::Struct(it) => VariantId::StructId(it.id),
VariantDef::EnumVariant(it) => VariantId::EnumVariantId(it.into()),
}
}
}
impl From<StructField> for StructFieldId {
fn from(def: StructField) -> Self {
StructFieldId { parent: def.parent.into(), local_id: def.id }
}
}
impl From<AttrDef> for AttrDefId {
fn from(def: AttrDef) -> Self {
match def {
AttrDef::Module(it) => AttrDefId::ModuleId(it.id),
AttrDef::StructField(it) => AttrDefId::StructFieldId(it.into()),
AttrDef::Adt(it) => AttrDefId::AdtId(it.into()),
AttrDef::Function(it) => AttrDefId::FunctionId(it.id),
AttrDef::EnumVariant(it) => AttrDefId::EnumVariantId(it.into()),
AttrDef::Static(it) => AttrDefId::StaticId(it.id),
AttrDef::Const(it) => AttrDefId::ConstId(it.id),
AttrDef::Trait(it) => AttrDefId::TraitId(it.id),
AttrDef::TypeAlias(it) => AttrDefId::TypeAliasId(it.id),
AttrDef::MacroDef(it) => AttrDefId::MacroDefId(it.id),
}
}
}

View file

@ -1,6 +1,6 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use hir_def::{ModuleId, StructId, StructOrUnionId, UnionId}; use hir_def::{AstItemDef, LocationCtx, ModuleId, StructId, StructOrUnionId, UnionId};
use hir_expand::{name::AsName, AstId, MacroDefId, MacroDefKind}; use hir_expand::{name::AsName, AstId, MacroDefId, MacroDefKind};
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, NameOwner}, ast::{self, AstNode, NameOwner},
@ -9,10 +9,9 @@ use ra_syntax::{
use crate::{ use crate::{
db::{AstDatabase, DefDatabase, HirDatabase}, db::{AstDatabase, DefDatabase, HirDatabase},
ids::{AstItemDef, LocationCtx}, AssocItem, Const, DefWithBody, Enum, EnumVariant, FieldSource, Function, HasSource, ImplBlock,
AssocItem, Const, DefWithBody, Enum, EnumVariant, FieldSource, Function, HasBody, HasSource, Local, MacroDef, Module, ModuleDef, ModuleSource, Source, Static, Struct, StructField, Trait,
ImplBlock, Local, MacroDef, Module, ModuleDef, ModuleSource, Source, Static, Struct, TypeAlias, Union, VariantDef,
StructField, Trait, TypeAlias, Union, VariantDef,
}; };
pub trait FromSource: Sized { pub trait FromSource: Sized {
@ -105,10 +104,21 @@ impl FromSource for Const {
impl FromSource for Static { impl FromSource for Static {
type Ast = ast::StaticDef; type Ast = ast::StaticDef;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> { fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
let id = from_source(db, src)?; let module = match Container::find(db, src.as_ref().map(|it| it.syntax()))? {
Some(Static { id }) Container::Module(it) => it,
Container::Trait(_) | Container::ImplBlock(_) => return None,
};
module
.declarations(db)
.into_iter()
.filter_map(|it| match it {
ModuleDef::Static(it) => Some(it),
_ => None,
})
.find(|it| same_source(&it.source(db), &src))
} }
} }
impl FromSource for TypeAlias { impl FromSource for TypeAlias {
type Ast = ast::TypeAliasDef; type Ast = ast::TypeAliasDef;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> { fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
@ -190,8 +200,7 @@ impl FromSource for StructField {
variant_def variant_def
.variant_data(db) .variant_data(db)
.fields() .fields()
.into_iter() .iter()
.flat_map(|it| it.iter())
.map(|(id, _)| StructField { parent: variant_def, id }) .map(|(id, _)| StructField { parent: variant_def, id })
.find(|f| f.source(db) == src) .find(|f| f.source(db) == src)
} }
@ -211,7 +220,7 @@ impl Local {
}; };
Some(res) Some(res)
})?; })?;
let source_map = parent.body_source_map(db); let (_body, source_map) = db.body_with_source_map(parent.into());
let src = src.map(ast::Pat::from); let src = src.map(ast::Pat::from);
let pat_id = source_map.node_pat(src.as_ref())?; let pat_id = source_map.node_pat(src.as_ref())?;
Some(Local { parent, pat_id }) Some(Local { parent, pat_id })
@ -272,7 +281,9 @@ where
let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax())); let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax()));
let module = Module::from_definition(db, Source::new(src.file_id, module_src))?; let module = Module::from_definition(db, Source::new(src.file_id, module_src))?;
let ctx = LocationCtx::new(db, module.id, src.file_id); let ctx = LocationCtx::new(db, module.id, src.file_id);
Some(DEF::from_ast(ctx, &src.value)) let items = db.ast_id_map(src.file_id);
let item_id = items.ast_id(&src.value);
Some(DEF::from_ast_id(ctx, item_id))
} }
enum Container { enum Container {

View file

@ -1,45 +0,0 @@
//! hir makes heavy use of ids: integer (u32) handlers to various things. You
//! can think of id as a pointer (but without a lifetime) or a file descriptor
//! (but for hir objects).
//!
//! This module defines a bunch of ids we are using. The most important ones are
//! probably `HirFileId` and `DefId`.
use ra_db::salsa;
pub use hir_def::{
AstItemDef, ConstId, EnumId, FunctionId, ItemLoc, LocationCtx, StaticId, StructId, TraitId,
TypeAliasId,
};
pub use hir_expand::{HirFileId, MacroCallId, MacroCallLoc, MacroDefId, MacroFile, MacroFileKind};
macro_rules! impl_intern_key {
($name:ident) => {
impl salsa::InternKey for $name {
fn from_intern_id(v: salsa::InternId) -> Self {
$name(v)
}
fn as_intern_id(&self) -> salsa::InternId {
self.0
}
}
};
}
/// This exists just for Chalk, because Chalk just has a single `StructId` where
/// we have different kinds of ADTs, primitive types and special type
/// constructors like tuples and function pointers.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TypeCtorId(salsa::InternId);
impl_intern_key!(TypeCtorId);
/// This exists just for Chalk, because our ImplIds are only unique per module.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct GlobalImplId(salsa::InternId);
impl_intern_key!(GlobalImplId);
/// This exists just for Chalk, because it needs a unique ID for each associated
/// type value in an impl (even synthetic ones).
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct AssocTyValueId(salsa::InternId);
impl_intern_key!(AssocTyValueId);

View file

@ -1,52 +0,0 @@
//! FIXME: write short doc here
use hir_def::{resolver::HasResolver, type_ref::TypeRef, AstItemDef};
use ra_syntax::ast;
use crate::{
db::{AstDatabase, DefDatabase, HirDatabase},
ty::Ty,
AssocItem, Crate, HasSource, ImplBlock, Module, Source, TraitRef,
};
impl HasSource for ImplBlock {
type Ast = ast::ImplBlock;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ImplBlock> {
self.id.source(db)
}
}
impl ImplBlock {
pub fn target_trait(&self, db: &impl DefDatabase) -> Option<TypeRef> {
db.impl_data(self.id).target_trait().cloned()
}
pub fn target_type(&self, db: &impl DefDatabase) -> TypeRef {
db.impl_data(self.id).target_type().clone()
}
pub fn target_ty(&self, db: &impl HirDatabase) -> Ty {
Ty::from_hir(db, &self.id.resolver(db), &self.target_type(db))
}
pub fn target_trait_ref(&self, db: &impl HirDatabase) -> Option<TraitRef> {
let target_ty = self.target_ty(db);
TraitRef::from_hir(db, &self.id.resolver(db), &self.target_trait(db)?, Some(target_ty))
}
pub fn items(&self, db: &impl DefDatabase) -> Vec<AssocItem> {
db.impl_data(self.id).items().iter().map(|it| (*it).into()).collect()
}
pub fn is_negative(&self, db: &impl DefDatabase) -> bool {
db.impl_data(self.id).is_negative()
}
pub fn module(&self, db: &impl DefDatabase) -> Module {
self.id.module(db).into()
}
pub fn krate(&self, db: &impl DefDatabase) -> Crate {
Crate { crate_id: self.module(db).id.krate }
}
}

View file

@ -1,160 +0,0 @@
//! FIXME: write short doc here
use rustc_hash::FxHashMap;
use std::sync::Arc;
use ra_syntax::{ast::AttrsOwner, SmolStr};
use crate::{
db::{AstDatabase, DefDatabase, HirDatabase},
Adt, Crate, Enum, Function, HasSource, ImplBlock, Module, ModuleDef, Static, Struct, Trait,
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum LangItemTarget {
Enum(Enum),
Function(Function),
ImplBlock(ImplBlock),
Static(Static),
Struct(Struct),
Trait(Trait),
}
impl LangItemTarget {
pub(crate) fn krate(&self, db: &impl HirDatabase) -> Option<Crate> {
Some(match self {
LangItemTarget::Enum(e) => e.module(db).krate(),
LangItemTarget::Function(f) => f.module(db).krate(),
LangItemTarget::ImplBlock(i) => i.krate(db),
LangItemTarget::Static(s) => s.module(db).krate(),
LangItemTarget::Struct(s) => s.module(db).krate(),
LangItemTarget::Trait(t) => t.module(db).krate(),
})
}
}
#[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct LangItems {
items: FxHashMap<SmolStr, LangItemTarget>,
}
impl LangItems {
pub fn target<'a>(&'a self, item: &str) -> Option<&'a LangItemTarget> {
self.items.get(item)
}
/// Salsa query. This will look for lang items in a specific crate.
pub(crate) fn crate_lang_items_query(
db: &(impl DefDatabase + AstDatabase),
krate: Crate,
) -> Arc<LangItems> {
let mut lang_items = LangItems::default();
if let Some(module) = krate.root_module(db) {
lang_items.collect_lang_items_recursive(db, module);
}
Arc::new(lang_items)
}
pub(crate) fn module_lang_items_query(
db: &(impl DefDatabase + AstDatabase),
module: Module,
) -> Option<Arc<LangItems>> {
let mut lang_items = LangItems::default();
lang_items.collect_lang_items(db, module);
if lang_items.items.is_empty() {
None
} else {
Some(Arc::new(lang_items))
}
}
/// Salsa query. Look for a lang item, starting from the specified crate and recursively
/// traversing its dependencies.
pub(crate) fn lang_item_query(
db: &impl DefDatabase,
start_crate: Crate,
item: SmolStr,
) -> Option<LangItemTarget> {
let lang_items = db.crate_lang_items(start_crate);
let start_crate_target = lang_items.items.get(&item);
if let Some(target) = start_crate_target {
Some(*target)
} else {
for dep in start_crate.dependencies(db) {
let dep_crate = dep.krate;
let dep_target = db.lang_item(dep_crate, item.clone());
if dep_target.is_some() {
return dep_target;
}
}
None
}
}
fn collect_lang_items(&mut self, db: &(impl DefDatabase + AstDatabase), module: Module) {
// Look for impl targets
for impl_block in module.impl_blocks(db) {
let src = impl_block.source(db);
if let Some(lang_item_name) = lang_item_name(&src.value) {
self.items
.entry(lang_item_name)
.or_insert_with(|| LangItemTarget::ImplBlock(impl_block));
}
}
for def in module.declarations(db) {
match def {
ModuleDef::Trait(trait_) => {
self.collect_lang_item(db, trait_, LangItemTarget::Trait)
}
ModuleDef::Adt(Adt::Enum(e)) => self.collect_lang_item(db, e, LangItemTarget::Enum),
ModuleDef::Adt(Adt::Struct(s)) => {
self.collect_lang_item(db, s, LangItemTarget::Struct)
}
ModuleDef::Function(f) => self.collect_lang_item(db, f, LangItemTarget::Function),
ModuleDef::Static(s) => self.collect_lang_item(db, s, LangItemTarget::Static),
_ => {}
}
}
}
fn collect_lang_items_recursive(
&mut self,
db: &(impl DefDatabase + AstDatabase),
module: Module,
) {
if let Some(module_lang_items) = db.module_lang_items(module) {
self.items.extend(module_lang_items.items.iter().map(|(k, v)| (k.clone(), *v)))
}
// Look for lang items in the children
for child in module.children(db) {
self.collect_lang_items_recursive(db, child);
}
}
fn collect_lang_item<T, N>(
&mut self,
db: &(impl DefDatabase + AstDatabase),
item: T,
constructor: fn(T) -> LangItemTarget,
) where
T: Copy + HasSource<Ast = N>,
N: AttrsOwner,
{
let node = item.source(db).value;
if let Some(lang_item_name) = lang_item_name(&node) {
self.items.entry(lang_item_name).or_insert_with(|| constructor(item));
}
}
}
fn lang_item_name<T: AttrsOwner>(node: &T) -> Option<SmolStr> {
node.attrs()
.filter_map(|a| a.as_simple_key_value())
.filter(|(key, _)| key == "lang")
.map(|(_, val)| val)
.nth(0)
}

View file

@ -31,12 +31,8 @@ pub mod debug;
pub mod db; pub mod db;
pub mod source_binder; pub mod source_binder;
mod ids;
mod type_alias;
mod ty; mod ty;
mod impl_block;
mod expr; mod expr;
mod lang_item;
pub mod diagnostics; pub mod diagnostics;
mod util; mod util;
@ -52,17 +48,13 @@ mod marks;
pub use crate::{ pub use crate::{
code_model::{ code_model::{
attrs::{AttrDef, Attrs}, src::HasSource, Adt, AssocItem, AttrDef, Const, Container, Crate, CrateDependency,
docs::{DocDef, Docs, Documentation}, DefWithBody, Docs, Enum, EnumVariant, FieldSource, Function, GenericDef, GenericParam,
src::{HasBodySource, HasSource}, HasAttrs, ImplBlock, Import, Local, MacroDef, Module, ModuleDef, ModuleSource, ScopeDef,
Adt, AssocItem, Const, ConstData, Container, Crate, CrateDependency, DefWithBody, Enum, Static, Struct, StructField, Trait, TypeAlias, Union, VariantDef,
EnumVariant, FieldSource, FnData, Function, GenericDef, GenericParam, HasBody, ImplBlock,
Local, MacroDef, Module, ModuleDef, ModuleSource, ScopeDef, Static, Struct, StructField,
Trait, TypeAlias, Union, VariantDef,
}, },
expr::ExprScopes, expr::ExprScopes,
from_source::FromSource, from_source::FromSource,
ids::{HirFileId, MacroCallId, MacroCallLoc, MacroDefId, MacroFile},
source_binder::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer}, source_binder::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer},
ty::{ ty::{
display::HirDisplay, display::HirDisplay,
@ -73,8 +65,10 @@ pub use crate::{
pub use hir_def::{ pub use hir_def::{
builtin_type::BuiltinType, builtin_type::BuiltinType,
nameres::{per_ns::PerNs, raw::ImportId}, docs::Documentation,
path::{Path, PathKind}, path::{Path, PathKind},
type_ref::Mutability, type_ref::Mutability,
}; };
pub use hir_expand::{either::Either, name::Name, Source}; pub use hir_expand::{
either::Either, name::Name, HirFileId, MacroCallId, MacroCallLoc, MacroDefId, MacroFile, Source,
};

View file

@ -13,7 +13,9 @@ use hir_def::{
resolver::{self, resolver_for_scope, HasResolver, Resolver, TypeNs, ValueNs}, resolver::{self, resolver_for_scope, HasResolver, Resolver, TypeNs, ValueNs},
DefWithBodyId, DefWithBodyId,
}; };
use hir_expand::{name::AsName, AstId, MacroCallId, MacroCallLoc, MacroFileKind, Source}; use hir_expand::{
name::AsName, AstId, HirFileId, MacroCallId, MacroCallLoc, MacroFileKind, Source,
};
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
match_ast, AstPtr, match_ast, AstPtr,
@ -24,11 +26,9 @@ use ra_syntax::{
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase,
expr::{BodySourceMap, ExprScopes, ScopeId}, expr::{BodySourceMap, ExprScopes, ScopeId},
ids::LocationCtx,
ty::method_resolution::{self, implements_trait}, ty::method_resolution::{self, implements_trait},
Adt, AssocItem, Const, DefWithBody, Either, Enum, EnumVariant, FromSource, Function, Adt, AssocItem, Const, DefWithBody, Either, Enum, EnumVariant, FromSource, Function,
GenericParam, HasBody, HirFileId, Local, MacroDef, Module, Name, Path, ScopeDef, Static, GenericParam, Local, MacroDef, Name, Path, ScopeDef, Static, Struct, Trait, Ty, TypeAlias,
Struct, Trait, Ty, TypeAlias,
}; };
fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -> Option<Resolver> { fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -> Option<Resolver> {
@ -67,16 +67,12 @@ fn def_with_body_from_child_node(
db: &impl HirDatabase, db: &impl HirDatabase,
child: Source<&SyntaxNode>, child: Source<&SyntaxNode>,
) -> Option<DefWithBody> { ) -> Option<DefWithBody> {
let module_source = crate::ModuleSource::from_child_node(db, child);
let module = Module::from_definition(db, Source::new(child.file_id, module_source))?;
let ctx = LocationCtx::new(db, module.id, child.file_id);
child.value.ancestors().find_map(|node| { child.value.ancestors().find_map(|node| {
match_ast! { match_ast! {
match node { match node {
ast::FnDef(def) => { return Function::from_source(db, child.with_value(def)).map(DefWithBody::from); }, ast::FnDef(def) => { return Function::from_source(db, child.with_value(def)).map(DefWithBody::from); },
ast::ConstDef(def) => { return Const::from_source(db, child.with_value(def)).map(DefWithBody::from); }, ast::ConstDef(def) => { return Const::from_source(db, child.with_value(def)).map(DefWithBody::from); },
ast::StaticDef(def) => { Some(Static { id: ctx.to_def(&def) }.into()) }, ast::StaticDef(def) => { return Static::from_source(db, child.with_value(def)).map(DefWithBody::from); },
_ => { None }, _ => { None },
} }
} }
@ -158,8 +154,8 @@ impl SourceAnalyzer {
) -> SourceAnalyzer { ) -> SourceAnalyzer {
let def_with_body = def_with_body_from_child_node(db, node); let def_with_body = def_with_body_from_child_node(db, node);
if let Some(def) = def_with_body { if let Some(def) = def_with_body {
let source_map = def.body_source_map(db); let (_body, source_map) = db.body_with_source_map(def.into());
let scopes = def.expr_scopes(db); let scopes = db.expr_scopes(def.into());
let scope = match offset { let scope = match offset {
None => scope_for(&scopes, &source_map, node), None => scope_for(&scopes, &source_map, node),
Some(offset) => scope_for_offset(&scopes, &source_map, node.with_value(offset)), Some(offset) => scope_for_offset(&scopes, &source_map, node.with_value(offset)),
@ -169,7 +165,7 @@ impl SourceAnalyzer {
resolver, resolver,
body_owner: Some(def), body_owner: Some(def),
body_source_map: Some(source_map), body_source_map: Some(source_map),
infer: Some(def.infer(db)), infer: Some(db.infer(def)),
scopes: Some(scopes), scopes: Some(scopes),
file_id: node.file_id, file_id: node.file_id,
} }
@ -219,6 +215,11 @@ impl SourceAnalyzer {
self.infer.as_ref()?.field_resolution(expr_id) self.infer.as_ref()?.field_resolution(expr_id)
} }
pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<crate::StructField> {
let expr_id = self.expr_id(&field.expr()?)?;
self.infer.as_ref()?.record_field_resolution(expr_id)
}
pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<crate::VariantDef> { pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<crate::VariantDef> {
let expr_id = self.expr_id(&record_lit.clone().into())?; let expr_id = self.expr_id(&record_lit.clone().into())?;
self.infer.as_ref()?.variant_resolution_for_expr(expr_id) self.infer.as_ref()?.variant_resolution_for_expr(expr_id)
@ -544,7 +545,7 @@ fn adjust(
} }
/// Given a `ast::MacroCall`, return what `MacroKindFile` it belongs to. /// Given a `ast::MacroCall`, return what `MacroKindFile` it belongs to.
/// FIXME: Not completed /// FIXME: Not completed
fn to_macro_file_kind(macro_call: &ast::MacroCall) -> MacroFileKind { fn to_macro_file_kind(macro_call: &ast::MacroCall) -> MacroFileKind {
let syn = macro_call.syntax(); let syn = macro_call.syntax();
let parent = match syn.parent() { let parent = match syn.parent() {

View file

@ -2,7 +2,7 @@
use std::{panic, sync::Arc}; use std::{panic, sync::Arc};
use hir_def::{db::DefDatabase2, ModuleId}; use hir_def::{db::DefDatabase, ModuleId};
use hir_expand::diagnostics::DiagnosticSink; use hir_expand::diagnostics::DiagnosticSink;
use parking_lot::Mutex; use parking_lot::Mutex;
use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath, SourceDatabase}; use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath, SourceDatabase};
@ -15,7 +15,6 @@ use crate::{db, debug::HirDebugHelper};
db::InternDatabaseStorage, db::InternDatabaseStorage,
db::AstDatabaseStorage, db::AstDatabaseStorage,
db::DefDatabaseStorage, db::DefDatabaseStorage,
db::DefDatabase2Storage,
db::HirDatabaseStorage db::HirDatabaseStorage
)] )]
#[derive(Debug, Default)] #[derive(Debug, Default)]
@ -81,7 +80,7 @@ impl TestDB {
let crate_graph = self.crate_graph(); let crate_graph = self.crate_graph();
for krate in crate_graph.iter().next() { for krate in crate_graph.iter().next() {
let crate_def_map = self.crate_def_map(krate); let crate_def_map = self.crate_def_map(krate);
for module_id in crate_def_map.modules() { for (module_id, _) in crate_def_map.modules.iter() {
let module_id = ModuleId { krate, module_id }; let module_id = ModuleId { krate, module_id };
let module = crate::Module::from(module_id); let module = crate::Module::from(module_id);
module.diagnostics( module.diagnostics(

View file

@ -18,6 +18,7 @@ use std::sync::Arc;
use std::{fmt, iter, mem}; use std::{fmt, iter, mem};
use hir_def::{generics::GenericParams, AdtId}; use hir_def::{generics::GenericParams, AdtId};
use ra_db::{impl_intern_key, salsa};
use crate::{ use crate::{
db::HirDatabase, expr::ExprId, util::make_mut_slice, Adt, Crate, DefWithBody, FloatTy, db::HirDatabase, expr::ExprId, util::make_mut_slice, Adt, Crate, DefWithBody, FloatTy,
@ -29,8 +30,9 @@ pub(crate) use autoderef::autoderef;
pub(crate) use infer::{infer_query, InferTy, InferenceResult}; pub(crate) use infer::{infer_query, InferTy, InferenceResult};
pub use lower::CallableDef; pub use lower::CallableDef;
pub(crate) use lower::{ pub(crate) use lower::{
callable_item_sig, generic_defaults_query, generic_predicates_for_param_query, callable_item_sig, field_types_query, generic_defaults_query,
generic_predicates_query, type_for_def, type_for_field, Namespace, TypableDef, generic_predicates_for_param_query, generic_predicates_query, type_for_def, Namespace,
TypableDef,
}; };
pub(crate) use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment}; pub(crate) use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment};
@ -114,6 +116,13 @@ pub enum TypeCtor {
Closure { def: DefWithBody, expr: ExprId }, Closure { def: DefWithBody, expr: ExprId },
} }
/// This exists just for Chalk, because Chalk just has a single `StructId` where
/// we have different kinds of ADTs, primitive types and special type
/// constructors like tuples and function pointers.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TypeCtorId(salsa::InternId);
impl_intern_key!(TypeCtorId);
impl TypeCtor { impl TypeCtor {
pub fn num_ty_params(self, db: &impl HirDatabase) -> usize { pub fn num_ty_params(self, db: &impl HirDatabase) -> usize {
match self { match self {

View file

@ -5,12 +5,13 @@
use std::iter::successors; use std::iter::successors;
use hir_def::resolver::Resolver; use hir_def::{lang_item::LangItemTarget, resolver::Resolver};
use hir_expand::name; use hir_expand::name;
use log::{info, warn}; use log::{info, warn};
use crate::{db::HirDatabase, Trait};
use super::{traits::Solution, Canonical, Substs, Ty, TypeWalk}; use super::{traits::Solution, Canonical, Substs, Ty, TypeWalk};
use crate::db::HirDatabase;
const AUTODEREF_RECURSION_LIMIT: usize = 10; const AUTODEREF_RECURSION_LIMIT: usize = 10;
@ -41,7 +42,7 @@ fn deref_by_trait(
) -> Option<Canonical<Ty>> { ) -> Option<Canonical<Ty>> {
let krate = resolver.krate()?; let krate = resolver.krate()?;
let deref_trait = match db.lang_item(krate.into(), "deref".into())? { let deref_trait = match db.lang_item(krate.into(), "deref".into())? {
crate::lang_item::LangItemTarget::Trait(t) => t, LangItemTarget::TraitId(t) => Trait::from(t),
_ => return None, _ => return None,
}; };
let target = deref_trait.associated_type_by_name(db, &name::TARGET_TYPE)?; let target = deref_trait.associated_type_by_name(db, &name::TARGET_TYPE)?;

View file

@ -22,6 +22,7 @@ use ena::unify::{InPlaceUnificationTable, NoError, UnifyKey, UnifyValue};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use hir_def::{ use hir_def::{
data::{ConstData, FunctionData},
path::known, path::known,
resolver::{HasResolver, Resolver, TypeNs}, resolver::{HasResolver, Resolver, TypeNs},
type_ref::{Mutability, TypeRef}, type_ref::{Mutability, TypeRef},
@ -43,8 +44,7 @@ use crate::{
db::HirDatabase, db::HirDatabase,
expr::{BindingAnnotation, Body, ExprId, PatId}, expr::{BindingAnnotation, Body, ExprId, PatId},
ty::infer::diagnostics::InferenceDiagnostic, ty::infer::diagnostics::InferenceDiagnostic,
Adt, AssocItem, ConstData, DefWithBody, FloatTy, FnData, Function, HasBody, IntTy, Path, Adt, AssocItem, DefWithBody, FloatTy, Function, IntTy, Path, StructField, Trait, VariantDef,
StructField, Trait, VariantDef,
}; };
macro_rules! ty_app { macro_rules! ty_app {
@ -68,10 +68,10 @@ pub fn infer_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<InferenceResu
let resolver = DefWithBodyId::from(def).resolver(db); let resolver = DefWithBodyId::from(def).resolver(db);
let mut ctx = InferenceContext::new(db, def, resolver); let mut ctx = InferenceContext::new(db, def, resolver);
match def { match &def {
DefWithBody::Const(ref c) => ctx.collect_const(&c.data(db)), DefWithBody::Const(c) => ctx.collect_const(&db.const_data(c.id)),
DefWithBody::Function(ref f) => ctx.collect_fn(&f.data(db)), DefWithBody::Function(f) => ctx.collect_fn(&db.function_data(f.id)),
DefWithBody::Static(ref s) => ctx.collect_const(&s.data(db)), DefWithBody::Static(s) => ctx.collect_const(&db.static_data(s.id)),
} }
ctx.infer_body(); ctx.infer_body();
@ -125,6 +125,8 @@ pub struct InferenceResult {
method_resolutions: FxHashMap<ExprId, Function>, method_resolutions: FxHashMap<ExprId, Function>,
/// For each field access expr, records the field it resolves to. /// For each field access expr, records the field it resolves to.
field_resolutions: FxHashMap<ExprId, StructField>, field_resolutions: FxHashMap<ExprId, StructField>,
/// For each field in record literal, records the field it resolves to.
record_field_resolutions: FxHashMap<ExprId, StructField>,
/// For each struct literal, records the variant it resolves to. /// For each struct literal, records the variant it resolves to.
variant_resolutions: FxHashMap<ExprOrPatId, VariantDef>, variant_resolutions: FxHashMap<ExprOrPatId, VariantDef>,
/// For each associated item record what it resolves to /// For each associated item record what it resolves to
@ -142,6 +144,9 @@ impl InferenceResult {
pub fn field_resolution(&self, expr: ExprId) -> Option<StructField> { pub fn field_resolution(&self, expr: ExprId) -> Option<StructField> {
self.field_resolutions.get(&expr).copied() self.field_resolutions.get(&expr).copied()
} }
pub fn record_field_resolution(&self, expr: ExprId) -> Option<StructField> {
self.record_field_resolutions.get(&expr).copied()
}
pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantDef> { pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantDef> {
self.variant_resolutions.get(&id.into()).copied() self.variant_resolutions.get(&id.into()).copied()
} }
@ -215,7 +220,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
coerce_unsized_map: Self::init_coerce_unsized_map(db, &resolver), coerce_unsized_map: Self::init_coerce_unsized_map(db, &resolver),
db, db,
owner, owner,
body: owner.body(db), body: db.body(owner.into()),
resolver, resolver,
} }
} }
@ -559,21 +564,21 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
fn collect_const(&mut self, data: &ConstData) { fn collect_const(&mut self, data: &ConstData) {
self.return_ty = self.make_ty(data.type_ref()); self.return_ty = self.make_ty(&data.type_ref);
} }
fn collect_fn(&mut self, data: &FnData) { fn collect_fn(&mut self, data: &FunctionData) {
let body = Arc::clone(&self.body); // avoid borrow checker problem let body = Arc::clone(&self.body); // avoid borrow checker problem
for (type_ref, pat) in data.params().iter().zip(body.params()) { for (type_ref, pat) in data.params.iter().zip(body.params.iter()) {
let ty = self.make_ty(type_ref); let ty = self.make_ty(type_ref);
self.infer_pat(*pat, &ty, BindingMode::default()); self.infer_pat(*pat, &ty, BindingMode::default());
} }
self.return_ty = self.make_ty(data.ret_type()); self.return_ty = self.make_ty(&data.ret_type);
} }
fn infer_body(&mut self) { fn infer_body(&mut self) {
self.infer_expr(self.body.body_expr(), &Expectation::has_type(self.return_ty.clone())); self.infer_expr(self.body.body_expr, &Expectation::has_type(self.return_ty.clone()));
} }
fn resolve_into_iter_item(&self) -> Option<TypeAlias> { fn resolve_into_iter_item(&self) -> Option<TypeAlias> {

View file

@ -4,13 +4,12 @@
//! //!
//! See: https://doc.rust-lang.org/nomicon/coercions.html //! See: https://doc.rust-lang.org/nomicon/coercions.html
use hir_def::resolver::Resolver; use hir_def::{lang_item::LangItemTarget, resolver::Resolver};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use test_utils::tested_by; use test_utils::tested_by;
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase,
lang_item::LangItemTarget,
ty::{autoderef, Substs, Ty, TypeCtor, TypeWalk}, ty::{autoderef, Substs, Ty, TypeCtor, TypeWalk},
Adt, Mutability, Adt, Mutability,
}; };
@ -50,7 +49,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
) -> FxHashMap<(TypeCtor, TypeCtor), usize> { ) -> FxHashMap<(TypeCtor, TypeCtor), usize> {
let krate = resolver.krate().unwrap(); let krate = resolver.krate().unwrap();
let impls = match db.lang_item(krate.into(), "coerce_unsized".into()) { let impls = match db.lang_item(krate.into(), "coerce_unsized".into()) {
Some(LangItemTarget::Trait(trait_)) => db.impls_for_trait(krate.into(), trait_), Some(LangItemTarget::TraitId(trait_)) => {
db.impls_for_trait(krate.into(), trait_.into())
}
_ => return FxHashMap::default(), _ => return FxHashMap::default(),
}; };
@ -244,14 +245,17 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
ty_app!(TypeCtor::Adt(Adt::Struct(struct1)), st1), ty_app!(TypeCtor::Adt(Adt::Struct(struct1)), st1),
ty_app!(TypeCtor::Adt(Adt::Struct(struct2)), st2), ty_app!(TypeCtor::Adt(Adt::Struct(struct2)), st2),
) if struct1 == struct2 => { ) if struct1 == struct2 => {
let fields = struct1.fields(self.db); let field_tys = self.db.field_types(struct1.id.into());
let (last_field, prev_fields) = fields.split_last()?; let struct_data = self.db.struct_data(struct1.id.0);
let mut fields = struct_data.variant_data.fields().iter();
let (last_field_id, _data) = fields.next_back()?;
// Get the generic parameter involved in the last field. // Get the generic parameter involved in the last field.
let unsize_generic_index = { let unsize_generic_index = {
let mut index = None; let mut index = None;
let mut multiple_param = false; let mut multiple_param = false;
last_field.ty(self.db).walk(&mut |ty| match ty { field_tys[last_field_id].walk(&mut |ty| match ty {
&Ty::Param { idx, .. } => { &Ty::Param { idx, .. } => {
if index.is_none() { if index.is_none() {
index = Some(idx); index = Some(idx);
@ -270,8 +274,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
// Check other fields do not involve it. // Check other fields do not involve it.
let mut multiple_used = false; let mut multiple_used = false;
prev_fields.iter().for_each(|field| { fields.for_each(|(field_id, _data)| {
field.ty(self.db).walk(&mut |ty| match ty { field_tys[field_id].walk(&mut |ty| match ty {
&Ty::Param { idx, .. } if idx == unsize_generic_index => { &Ty::Param { idx, .. } if idx == unsize_generic_index => {
multiple_used = true multiple_used = true
} }

View file

@ -214,19 +214,24 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
self.unify(&ty, &expected.ty); self.unify(&ty, &expected.ty);
let substs = ty.substs().unwrap_or_else(Substs::empty); let substs = ty.substs().unwrap_or_else(Substs::empty);
let field_types =
def_id.map(|it| self.db.field_types(it.into())).unwrap_or_default();
for (field_idx, field) in fields.iter().enumerate() { for (field_idx, field) in fields.iter().enumerate() {
let field_ty = def_id let field_def = def_id.and_then(|it| match it.field(self.db, &field.name) {
.and_then(|it| match it.field(self.db, &field.name) { Some(field) => Some(field),
Some(field) => Some(field), None => {
None => { self.push_diagnostic(InferenceDiagnostic::NoSuchField {
self.push_diagnostic(InferenceDiagnostic::NoSuchField { expr: tgt_expr,
expr: tgt_expr, field: field_idx,
field: field_idx, });
}); None
None }
} });
}) if let Some(field_def) = field_def {
.map_or(Ty::Unknown, |field| field.ty(self.db)) self.result.record_field_resolutions.insert(field.expr, field_def);
}
let field_ty = field_def
.map_or(Ty::Unknown, |it| field_types[it.id].clone())
.subst(&substs); .subst(&substs);
self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty)); self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
} }
@ -250,7 +255,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
.and_then(|idx| a_ty.parameters.0.get(idx).cloned()), .and_then(|idx| a_ty.parameters.0.get(idx).cloned()),
TypeCtor::Adt(Adt::Struct(s)) => s.field(self.db, name).map(|field| { TypeCtor::Adt(Adt::Struct(s)) => s.field(self.db, name).map(|field| {
self.write_field_resolution(tgt_expr, field); self.write_field_resolution(tgt_expr, field);
field.ty(self.db).subst(&a_ty.parameters) self.db.field_types(s.id.into())[field.id]
.clone()
.subst(&a_ty.parameters)
}), }),
_ => None, _ => None,
}, },

View file

@ -27,10 +27,11 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let substs = ty.substs().unwrap_or_else(Substs::empty); let substs = ty.substs().unwrap_or_else(Substs::empty);
let field_tys = def.map(|it| self.db.field_types(it.into())).unwrap_or_default();
for (i, &subpat) in subpats.iter().enumerate() { for (i, &subpat) in subpats.iter().enumerate() {
let expected_ty = def let expected_ty = def
.and_then(|d| d.field(self.db, &Name::new_tuple_field(i))) .and_then(|d| d.field(self.db, &Name::new_tuple_field(i)))
.map_or(Ty::Unknown, |field| field.ty(self.db)) .map_or(Ty::Unknown, |field| field_tys[field.id].clone())
.subst(&substs); .subst(&substs);
let expected_ty = self.normalize_associated_types_in(expected_ty); let expected_ty = self.normalize_associated_types_in(expected_ty);
self.infer_pat(subpat, &expected_ty, default_bm); self.infer_pat(subpat, &expected_ty, default_bm);
@ -56,10 +57,12 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let substs = ty.substs().unwrap_or_else(Substs::empty); let substs = ty.substs().unwrap_or_else(Substs::empty);
let field_tys = def.map(|it| self.db.field_types(it.into())).unwrap_or_default();
for subpat in subpats { for subpat in subpats {
let matching_field = def.and_then(|it| it.field(self.db, &subpat.name)); let matching_field = def.and_then(|it| it.field(self.db, &subpat.name));
let expected_ty = let expected_ty = matching_field
matching_field.map_or(Ty::Unknown, |field| field.ty(self.db)).subst(&substs); .map_or(Ty::Unknown, |field| field_tys[field.id].clone())
.subst(&substs);
let expected_ty = self.normalize_associated_types_in(expected_ty); let expected_ty = self.normalize_associated_types_in(expected_ty);
self.infer_pat(subpat.pat, &expected_ty, default_bm); self.infer_pat(subpat.pat, &expected_ty, default_bm);
} }

View file

@ -14,8 +14,9 @@ use hir_def::{
path::{GenericArg, PathSegment}, path::{GenericArg, PathSegment},
resolver::{HasResolver, Resolver, TypeNs}, resolver::{HasResolver, Resolver, TypeNs},
type_ref::{TypeBound, TypeRef}, type_ref::{TypeBound, TypeRef},
AdtId, GenericDefId, AdtId, GenericDefId, LocalStructFieldId, VariantId,
}; };
use ra_arena::map::ArenaMap;
use super::{ use super::{
FnSig, GenericPredicate, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, FnSig, GenericPredicate, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
@ -29,7 +30,7 @@ use crate::{
}, },
util::make_mut_slice, util::make_mut_slice,
Const, Enum, EnumVariant, Function, GenericDef, ImplBlock, ModuleDef, Path, Static, Struct, Const, Enum, EnumVariant, Function, GenericDef, ImplBlock, ModuleDef, Path, Static, Struct,
StructField, Trait, TypeAlias, Union, VariantDef, Trait, TypeAlias, Union,
}; };
// FIXME: this is only really used in `type_for_def`, which contains a bunch of // FIXME: this is only really used in `type_for_def`, which contains a bunch of
@ -549,16 +550,23 @@ pub(crate) fn callable_item_sig(db: &impl HirDatabase, def: CallableDef) -> FnSi
} }
} }
/// Build the type of a specific field of a struct or enum variant. /// Build the type of all specific fields of a struct or enum variant.
pub(crate) fn type_for_field(db: &impl HirDatabase, field: StructField) -> Ty { pub(crate) fn field_types_query(
let parent_def = field.parent_def(db); db: &impl HirDatabase,
let resolver = match parent_def { variant_id: VariantId,
VariantDef::Struct(it) => it.id.resolver(db), ) -> Arc<ArenaMap<LocalStructFieldId, Ty>> {
VariantDef::EnumVariant(it) => it.parent.id.resolver(db), let (resolver, var_data) = match variant_id {
VariantId::StructId(it) => (it.resolver(db), db.struct_data(it.0).variant_data.clone()),
VariantId::EnumVariantId(it) => (
it.parent.resolver(db),
db.enum_data(it.parent).variants[it.local_id].variant_data.clone(),
),
}; };
let var_data = parent_def.variant_data(db); let mut res = ArenaMap::default();
let type_ref = &var_data.fields().unwrap()[field.id].type_ref; for (field_id, field_data) in var_data.fields().iter() {
Ty::from_hir(db, &resolver, type_ref) res.insert(field_id, Ty::from_hir(db, &resolver, &field_data.type_ref))
}
Arc::new(res)
} }
/// This query exists only to be used when resolving short-hand associated types /// This query exists only to be used when resolving short-hand associated types
@ -622,10 +630,10 @@ pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDef) ->
} }
fn fn_sig_for_fn(db: &impl HirDatabase, def: Function) -> FnSig { fn fn_sig_for_fn(db: &impl HirDatabase, def: Function) -> FnSig {
let data = def.data(db); let data = db.function_data(def.id);
let resolver = def.id.resolver(db); let resolver = def.id.resolver(db);
let params = data.params().iter().map(|tr| Ty::from_hir(db, &resolver, tr)).collect::<Vec<_>>(); let params = data.params.iter().map(|tr| Ty::from_hir(db, &resolver, tr)).collect::<Vec<_>>();
let ret = Ty::from_hir(db, &resolver, data.ret_type()); let ret = Ty::from_hir(db, &resolver, &data.ret_type);
FnSig::from_params_and_return(params, ret) FnSig::from_params_and_return(params, ret)
} }
@ -639,18 +647,18 @@ fn type_for_fn(db: &impl HirDatabase, def: Function) -> Ty {
/// Build the declared type of a const. /// Build the declared type of a const.
fn type_for_const(db: &impl HirDatabase, def: Const) -> Ty { fn type_for_const(db: &impl HirDatabase, def: Const) -> Ty {
let data = def.data(db); let data = db.const_data(def.id);
let resolver = def.id.resolver(db); let resolver = def.id.resolver(db);
Ty::from_hir(db, &resolver, data.type_ref()) Ty::from_hir(db, &resolver, &data.type_ref)
} }
/// Build the declared type of a static. /// Build the declared type of a static.
fn type_for_static(db: &impl HirDatabase, def: Static) -> Ty { fn type_for_static(db: &impl HirDatabase, def: Static) -> Ty {
let data = def.data(db); let data = db.static_data(def.id);
let resolver = def.id.resolver(db); let resolver = def.id.resolver(db);
Ty::from_hir(db, &resolver, data.type_ref()) Ty::from_hir(db, &resolver, &data.type_ref)
} }
/// Build the declared type of a static. /// Build the declared type of a static.
@ -696,10 +704,7 @@ impl From<Option<BuiltinFloat>> for Uncertain<FloatTy> {
fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> FnSig { fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> FnSig {
let struct_data = db.struct_data(def.id.into()); let struct_data = db.struct_data(def.id.into());
let fields = match struct_data.variant_data.fields() { let fields = struct_data.variant_data.fields();
Some(fields) => fields,
None => panic!("fn_sig_for_struct_constructor called on unit struct"),
};
let resolver = def.id.resolver(db); let resolver = def.id.resolver(db);
let params = fields let params = fields
.iter() .iter()
@ -712,7 +717,7 @@ fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> FnSig {
/// Build the type of a tuple struct constructor. /// Build the type of a tuple struct constructor.
fn type_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> Ty { fn type_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> Ty {
let struct_data = db.struct_data(def.id.into()); let struct_data = db.struct_data(def.id.into());
if struct_data.variant_data.fields().is_none() { if struct_data.variant_data.is_unit() {
return type_for_adt(db, def); // Unit struct return type_for_adt(db, def); // Unit struct
} }
let generics = db.generic_params(def.id.into()); let generics = db.generic_params(def.id.into());
@ -722,10 +727,7 @@ fn type_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> Ty {
fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant) -> FnSig { fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant) -> FnSig {
let var_data = def.variant_data(db); let var_data = def.variant_data(db);
let fields = match var_data.fields() { let fields = var_data.fields();
Some(fields) => fields,
None => panic!("fn_sig_for_enum_variant_constructor called for unit variant"),
};
let resolver = def.parent.id.resolver(db); let resolver = def.parent.id.resolver(db);
let params = fields let params = fields
.iter() .iter()
@ -740,7 +742,7 @@ fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant)
/// Build the type of a tuple enum variant constructor. /// Build the type of a tuple enum variant constructor.
fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant) -> Ty { fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant) -> Ty {
let var_data = def.variant_data(db); let var_data = def.variant_data(db);
if var_data.fields().is_none() { if var_data.is_unit() {
return type_for_adt(db, def.parent_enum(db)); // Unit variant return type_for_adt(db, def.parent_enum(db)); // Unit variant
} }
let generics = db.generic_params(def.parent_enum(db).id.into()); let generics = db.generic_params(def.parent_enum(db).id.into());

View file

@ -5,7 +5,7 @@
use std::sync::Arc; use std::sync::Arc;
use arrayvec::ArrayVec; use arrayvec::ArrayVec;
use hir_def::resolver::Resolver; use hir_def::{lang_item::LangItemTarget, resolver::Resolver, AstItemDef};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use crate::{ use crate::{
@ -91,34 +91,43 @@ fn def_crates(db: &impl HirDatabase, cur_crate: Crate, ty: &Ty) -> Option<ArrayV
// Types like slice can have inherent impls in several crates, (core and alloc). // Types like slice can have inherent impls in several crates, (core and alloc).
// The corresponding impls are marked with lang items, so we can use them to find the required crates. // The corresponding impls are marked with lang items, so we can use them to find the required crates.
macro_rules! lang_item_crate { macro_rules! lang_item_crate {
($db:expr, $cur_crate:expr, $($name:expr),+ $(,)?) => {{ ($($name:expr),+ $(,)?) => {{
let mut v = ArrayVec::<[Crate; 2]>::new(); let mut v = ArrayVec::<[LangItemTarget; 2]>::new();
$( $(
v.extend($db.lang_item($cur_crate, $name.into()).and_then(|item| item.krate($db))); v.extend(db.lang_item(cur_crate.crate_id, $name.into()));
)+ )+
Some(v) v
}}; }};
} }
match ty { let lang_item_targets = match ty {
Ty::Apply(a_ty) => match a_ty.ctor { Ty::Apply(a_ty) => match a_ty.ctor {
TypeCtor::Adt(def_id) => Some(std::iter::once(def_id.krate(db)?).collect()), TypeCtor::Adt(def_id) => return Some(std::iter::once(def_id.krate(db)?).collect()),
TypeCtor::Bool => lang_item_crate!(db, cur_crate, "bool"), TypeCtor::Bool => lang_item_crate!("bool"),
TypeCtor::Char => lang_item_crate!(db, cur_crate, "char"), TypeCtor::Char => lang_item_crate!("char"),
TypeCtor::Float(Uncertain::Known(f)) => match f.bitness { TypeCtor::Float(Uncertain::Known(f)) => match f.bitness {
// There are two lang items: one in libcore (fXX) and one in libstd (fXX_runtime) // There are two lang items: one in libcore (fXX) and one in libstd (fXX_runtime)
FloatBitness::X32 => lang_item_crate!(db, cur_crate, "f32", "f32_runtime"), FloatBitness::X32 => lang_item_crate!("f32", "f32_runtime"),
FloatBitness::X64 => lang_item_crate!(db, cur_crate, "f64", "f64_runtime"), FloatBitness::X64 => lang_item_crate!("f64", "f64_runtime"),
}, },
TypeCtor::Int(Uncertain::Known(i)) => lang_item_crate!(db, cur_crate, i.ty_to_string()), TypeCtor::Int(Uncertain::Known(i)) => lang_item_crate!(i.ty_to_string()),
TypeCtor::Str => lang_item_crate!(db, cur_crate, "str_alloc", "str"), TypeCtor::Str => lang_item_crate!("str_alloc", "str"),
TypeCtor::Slice => lang_item_crate!(db, cur_crate, "slice_alloc", "slice"), TypeCtor::Slice => lang_item_crate!("slice_alloc", "slice"),
TypeCtor::RawPtr(Mutability::Shared) => lang_item_crate!(db, cur_crate, "const_ptr"), TypeCtor::RawPtr(Mutability::Shared) => lang_item_crate!("const_ptr"),
TypeCtor::RawPtr(Mutability::Mut) => lang_item_crate!(db, cur_crate, "mut_ptr"), TypeCtor::RawPtr(Mutability::Mut) => lang_item_crate!("mut_ptr"),
_ => None, _ => return None,
}, },
_ => None, _ => return None,
} };
let res = lang_item_targets
.into_iter()
.filter_map(|it| match it {
LangItemTarget::ImplBlockId(it) => Some(it),
_ => None,
})
.map(|it| it.module(db).krate.into())
.collect();
Some(res)
} }
/// Look up the method with the given name, returning the actual autoderefed /// Look up the method with the given name, returning the actual autoderefed
@ -233,7 +242,7 @@ fn iterate_trait_method_candidates<T>(
.chain(traits_from_env) .chain(traits_from_env)
.chain(resolver.traits_in_scope(db).into_iter().map(Trait::from)); .chain(resolver.traits_in_scope(db).into_iter().map(Trait::from));
'traits: for t in traits { 'traits: for t in traits {
let data = t.trait_data(db); let data = db.trait_data(t.id);
// we'll be lazy about checking whether the type implements the // we'll be lazy about checking whether the type implements the
// trait, but if we find out it doesn't, we'll skip the rest of the // trait, but if we find out it doesn't, we'll skip the rest of the
@ -291,9 +300,9 @@ fn is_valid_candidate(
) -> bool { ) -> bool {
match item { match item {
AssocItem::Function(m) => { AssocItem::Function(m) => {
let data = m.data(db); let data = db.function_data(m.id);
name.map_or(true, |name| data.name() == name) name.map_or(true, |name| data.name == *name)
&& (data.has_self_param() || mode == LookupMode::Path) && (data.has_self_param || mode == LookupMode::Path)
} }
AssocItem::Const(c) => { AssocItem::Const(c) => {
name.map_or(true, |name| Some(name) == c.name(db).as_ref()) name.map_or(true, |name| Some(name) == c.name(db).as_ref())

View file

@ -2550,8 +2550,6 @@ fn test() {
[233; 246) 'GLOBAL_STATIC': u32 [233; 246) 'GLOBAL_STATIC': u32
[256; 257) 'w': u32 [256; 257) 'w': u32
[260; 277) 'GLOBAL...IC_MUT': u32 [260; 277) 'GLOBAL...IC_MUT': u32
[118; 120) '99': u32
[161; 163) '99': u32
"### "###
); );
} }
@ -4857,3 +4855,41 @@ fn main() {
"### "###
); );
} }
#[test]
fn infer_builtin_macros_file() {
assert_snapshot!(
infer(r#"
#[rustc_builtin_macro]
macro_rules! file {() => {}}
fn main() {
let x = file!();
}
"#),
@r###"
![0; 2) '""': &str
[64; 88) '{ ...!(); }': ()
[74; 75) 'x': &str
"###
);
}
#[test]
fn infer_builtin_macros_column() {
assert_snapshot!(
infer(r#"
#[rustc_builtin_macro]
macro_rules! column {() => {}}
fn main() {
let x = column!();
}
"#),
@r###"
![0; 2) '13': i32
[66; 92) '{ ...!(); }': ()
[76; 77) 'x': i32
"###
);
}

View file

@ -3,7 +3,7 @@ use std::sync::{Arc, Mutex};
use chalk_ir::{cast::Cast, family::ChalkIr}; use chalk_ir::{cast::Cast, family::ChalkIr};
use log::debug; use log::debug;
use ra_db::salsa; use ra_db::{impl_intern_key, salsa};
use ra_prof::profile; use ra_prof::profile;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
@ -304,6 +304,10 @@ pub enum Impl {
/// Closure types implement the Fn traits synthetically. /// Closure types implement the Fn traits synthetically.
ClosureFnTraitImpl(ClosureFnTraitImplData), ClosureFnTraitImpl(ClosureFnTraitImplData),
} }
/// This exists just for Chalk, because our ImplIds are only unique per module.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct GlobalImplId(salsa::InternId);
impl_intern_key!(GlobalImplId);
/// An associated type value. Usually this comes from a `type` declaration /// An associated type value. Usually this comes from a `type` declaration
/// inside an impl block, but for built-in impls we have to synthesize it. /// inside an impl block, but for built-in impls we have to synthesize it.
@ -315,3 +319,8 @@ pub enum AssocTyValue {
/// The output type of the Fn trait implementation. /// The output type of the Fn trait implementation.
ClosureFnTraitImplOutput(ClosureFnTraitImplData), ClosureFnTraitImplOutput(ClosureFnTraitImplData),
} }
/// This exists just for Chalk, because it needs a unique ID for each associated
/// type value in an impl (even synthetic ones).
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct AssocTyValueId(salsa::InternId);
impl_intern_key!(AssocTyValueId);

View file

@ -9,6 +9,7 @@ use chalk_ir::{
}; };
use chalk_rust_ir::{AssociatedTyDatum, AssociatedTyValue, ImplDatum, StructDatum, TraitDatum}; use chalk_rust_ir::{AssociatedTyDatum, AssociatedTyValue, ImplDatum, StructDatum, TraitDatum};
use hir_def::lang_item::LangItemTarget;
use hir_expand::name; use hir_expand::name;
use ra_db::salsa::{InternId, InternKey}; use ra_db::salsa::{InternId, InternKey};
@ -18,7 +19,7 @@ use crate::{
db::HirDatabase, db::HirDatabase,
ty::display::HirDisplay, ty::display::HirDisplay,
ty::{ApplicationTy, GenericPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk}, ty::{ApplicationTy, GenericPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk},
Crate, GenericDef, HasBody, ImplBlock, Trait, TypeAlias, Crate, GenericDef, ImplBlock, Trait, TypeAlias,
}; };
/// This represents a trait whose name we could not resolve. /// This represents a trait whose name we could not resolve.
@ -714,7 +715,7 @@ fn closure_fn_trait_impl_datum(
let fn_once_trait = get_fn_trait(db, krate, super::FnTrait::FnOnce)?; let fn_once_trait = get_fn_trait(db, krate, super::FnTrait::FnOnce)?;
fn_once_trait.associated_type_by_name(db, &name::OUTPUT_TYPE)?; fn_once_trait.associated_type_by_name(db, &name::OUTPUT_TYPE)?;
let num_args: u16 = match &data.def.body(db)[data.expr] { let num_args: u16 = match &db.body(data.def.into())[data.expr] {
crate::expr::Expr::Lambda { args, .. } => args.len() as u16, crate::expr::Expr::Lambda { args, .. } => args.len() as u16,
_ => { _ => {
log::warn!("closure for closure type {:?} not found", data); log::warn!("closure for closure type {:?} not found", data);
@ -804,7 +805,7 @@ fn closure_fn_trait_output_assoc_ty_value(
) -> Arc<AssociatedTyValue<ChalkIr>> { ) -> Arc<AssociatedTyValue<ChalkIr>> {
let impl_id = Impl::ClosureFnTraitImpl(data.clone()).to_chalk(db); let impl_id = Impl::ClosureFnTraitImpl(data.clone()).to_chalk(db);
let num_args: u16 = match &data.def.body(db)[data.expr] { let num_args: u16 = match &db.body(data.def.into())[data.expr] {
crate::expr::Expr::Lambda { args, .. } => args.len() as u16, crate::expr::Expr::Lambda { args, .. } => args.len() as u16,
_ => { _ => {
log::warn!("closure for closure type {:?} not found", data); log::warn!("closure for closure type {:?} not found", data);
@ -832,9 +833,9 @@ fn closure_fn_trait_output_assoc_ty_value(
} }
fn get_fn_trait(db: &impl HirDatabase, krate: Crate, fn_trait: super::FnTrait) -> Option<Trait> { fn get_fn_trait(db: &impl HirDatabase, krate: Crate, fn_trait: super::FnTrait) -> Option<Trait> {
let target = db.lang_item(krate, fn_trait.lang_item_name().into())?; let target = db.lang_item(krate.crate_id, fn_trait.lang_item_name().into())?;
match target { match target {
crate::lang_item::LangItemTarget::Trait(t) => Some(t), LangItemTarget::TraitId(t) => Some(t.into()),
_ => None, _ => None,
} }
} }
@ -846,38 +847,38 @@ fn id_to_chalk<T: InternKey>(salsa_id: T) -> chalk_ir::RawId {
chalk_ir::RawId { index: salsa_id.as_intern_id().as_u32() } chalk_ir::RawId { index: salsa_id.as_intern_id().as_u32() }
} }
impl From<chalk_ir::StructId> for crate::ids::TypeCtorId { impl From<chalk_ir::StructId> for crate::ty::TypeCtorId {
fn from(struct_id: chalk_ir::StructId) -> Self { fn from(struct_id: chalk_ir::StructId) -> Self {
id_from_chalk(struct_id.0) id_from_chalk(struct_id.0)
} }
} }
impl From<crate::ids::TypeCtorId> for chalk_ir::StructId { impl From<crate::ty::TypeCtorId> for chalk_ir::StructId {
fn from(type_ctor_id: crate::ids::TypeCtorId) -> Self { fn from(type_ctor_id: crate::ty::TypeCtorId) -> Self {
chalk_ir::StructId(id_to_chalk(type_ctor_id)) chalk_ir::StructId(id_to_chalk(type_ctor_id))
} }
} }
impl From<chalk_ir::ImplId> for crate::ids::GlobalImplId { impl From<chalk_ir::ImplId> for crate::ty::traits::GlobalImplId {
fn from(impl_id: chalk_ir::ImplId) -> Self { fn from(impl_id: chalk_ir::ImplId) -> Self {
id_from_chalk(impl_id.0) id_from_chalk(impl_id.0)
} }
} }
impl From<crate::ids::GlobalImplId> for chalk_ir::ImplId { impl From<crate::ty::traits::GlobalImplId> for chalk_ir::ImplId {
fn from(impl_id: crate::ids::GlobalImplId) -> Self { fn from(impl_id: crate::ty::traits::GlobalImplId) -> Self {
chalk_ir::ImplId(id_to_chalk(impl_id)) chalk_ir::ImplId(id_to_chalk(impl_id))
} }
} }
impl From<chalk_rust_ir::AssociatedTyValueId> for crate::ids::AssocTyValueId { impl From<chalk_rust_ir::AssociatedTyValueId> for crate::ty::traits::AssocTyValueId {
fn from(id: chalk_rust_ir::AssociatedTyValueId) -> Self { fn from(id: chalk_rust_ir::AssociatedTyValueId) -> Self {
id_from_chalk(id.0) id_from_chalk(id.0)
} }
} }
impl From<crate::ids::AssocTyValueId> for chalk_rust_ir::AssociatedTyValueId { impl From<crate::ty::traits::AssocTyValueId> for chalk_rust_ir::AssociatedTyValueId {
fn from(assoc_ty_value_id: crate::ids::AssocTyValueId) -> Self { fn from(assoc_ty_value_id: crate::ty::traits::AssocTyValueId) -> Self {
chalk_rust_ir::AssociatedTyValueId(id_to_chalk(assoc_ty_value_id)) chalk_rust_ir::AssociatedTyValueId(id_to_chalk(assoc_ty_value_id))
} }
} }

View file

@ -1,31 +0,0 @@
//! HIR for type aliases (i.e. the `type` keyword).
use std::sync::Arc;
use hir_def::type_ref::TypeRef;
use hir_expand::name::{AsName, Name};
use ra_syntax::ast::NameOwner;
use crate::{
db::{AstDatabase, DefDatabase},
HasSource, TypeAlias,
};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TypeAliasData {
pub(crate) name: Name,
pub(crate) type_ref: Option<TypeRef>,
}
impl TypeAliasData {
pub(crate) fn type_alias_data_query(
db: &(impl DefDatabase + AstDatabase),
typ: TypeAlias,
) -> Arc<TypeAliasData> {
let node = typ.source(db).value;
let name = node.name().map_or_else(Name::missing, |n| n.as_name());
let type_ref = node.type_ref().map(TypeRef::from_ast);
Arc::new(TypeAliasData { name, type_ref })
}
}

View file

@ -2,13 +2,17 @@
use std::sync::Arc; use std::sync::Arc;
use hir_expand::name::{AsName, Name}; use hir_expand::{
use ra_arena::Arena; either::Either,
name::{AsName, Name},
Source,
};
use ra_arena::{map::ArenaMap, Arena};
use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner}; use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner};
use crate::{ use crate::{
db::DefDatabase2, type_ref::TypeRef, AstItemDef, EnumId, LocalEnumVariantId, db::DefDatabase, trace::Trace, type_ref::TypeRef, AstItemDef, EnumId, HasChildSource,
LocalStructFieldId, StructOrUnionId, LocalEnumVariantId, LocalStructFieldId, StructOrUnionId, VariantId,
}; };
/// Note that we use `StructData` for unions as well! /// Note that we use `StructData` for unions as well!
@ -30,13 +34,9 @@ pub struct EnumVariantData {
pub variant_data: Arc<VariantData>, pub variant_data: Arc<VariantData>,
} }
/// Fields of an enum variant or struct
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct VariantData(VariantDataInner); pub enum VariantData {
Record(Arena<LocalStructFieldId, StructFieldData>),
#[derive(Debug, Clone, PartialEq, Eq)]
enum VariantDataInner {
Struct(Arena<LocalStructFieldId, StructFieldData>),
Tuple(Arena<LocalStructFieldId, StructFieldData>), Tuple(Arena<LocalStructFieldId, StructFieldData>),
Unit, Unit,
} }
@ -49,10 +49,7 @@ pub struct StructFieldData {
} }
impl StructData { impl StructData {
pub(crate) fn struct_data_query( pub(crate) fn struct_data_query(db: &impl DefDatabase, id: StructOrUnionId) -> Arc<StructData> {
db: &impl DefDatabase2,
id: StructOrUnionId,
) -> Arc<StructData> {
let src = id.source(db); let src = id.source(db);
let name = src.value.name().map(|n| n.as_name()); let name = src.value.name().map(|n| n.as_name());
let variant_data = VariantData::new(src.value.kind()); let variant_data = VariantData::new(src.value.kind());
@ -62,20 +59,12 @@ impl StructData {
} }
impl EnumData { impl EnumData {
pub(crate) fn enum_data_query(db: &impl DefDatabase2, e: EnumId) -> Arc<EnumData> { pub(crate) fn enum_data_query(db: &impl DefDatabase, e: EnumId) -> Arc<EnumData> {
let src = e.source(db); let src = e.source(db);
let name = src.value.name().map(|n| n.as_name()); let name = src.value.name().map(|n| n.as_name());
let variants = src let mut trace = Trace::new_for_arena();
.value lower_enum(&mut trace, &src.value);
.variant_list() Arc::new(EnumData { name, variants: trace.into_arena() })
.into_iter()
.flat_map(|it| it.variants())
.map(|var| EnumVariantData {
name: var.name().map(|it| it.as_name()),
variant_data: Arc::new(VariantData::new(var.kind())),
})
.collect();
Arc::new(EnumData { name, variants })
} }
pub(crate) fn variant(&self, name: &Name) -> Option<LocalEnumVariantId> { pub(crate) fn variant(&self, name: &Name) -> Option<LocalEnumVariantId> {
@ -84,39 +73,117 @@ impl EnumData {
} }
} }
impl HasChildSource for EnumId {
type ChildId = LocalEnumVariantId;
type Value = ast::EnumVariant;
fn child_source(&self, db: &impl DefDatabase) -> Source<ArenaMap<Self::ChildId, Self::Value>> {
let src = self.source(db);
let mut trace = Trace::new_for_map();
lower_enum(&mut trace, &src.value);
src.with_value(trace.into_map())
}
}
fn lower_enum(
trace: &mut Trace<LocalEnumVariantId, EnumVariantData, ast::EnumVariant>,
ast: &ast::EnumDef,
) {
for var in ast.variant_list().into_iter().flat_map(|it| it.variants()) {
trace.alloc(
|| var.clone(),
|| EnumVariantData {
name: var.name().map(|it| it.as_name()),
variant_data: Arc::new(VariantData::new(var.kind())),
},
);
}
}
impl VariantData { impl VariantData {
fn new(flavor: ast::StructKind) -> Self { fn new(flavor: ast::StructKind) -> Self {
let inner = match flavor { let mut trace = Trace::new_for_arena();
ast::StructKind::Tuple(fl) => { match lower_struct(&mut trace, &flavor) {
let fields = fl StructKind::Tuple => VariantData::Tuple(trace.into_arena()),
.fields() StructKind::Record => VariantData::Record(trace.into_arena()),
.enumerate() StructKind::Unit => VariantData::Unit,
.map(|(i, fd)| StructFieldData { }
name: Name::new_tuple_field(i),
type_ref: TypeRef::from_ast_opt(fd.type_ref()),
})
.collect();
VariantDataInner::Tuple(fields)
}
ast::StructKind::Named(fl) => {
let fields = fl
.fields()
.map(|fd| StructFieldData {
name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing),
type_ref: TypeRef::from_ast_opt(fd.ascribed_type()),
})
.collect();
VariantDataInner::Struct(fields)
}
ast::StructKind::Unit => VariantDataInner::Unit,
};
VariantData(inner)
} }
pub fn fields(&self) -> Option<&Arena<LocalStructFieldId, StructFieldData>> { pub fn fields(&self) -> &Arena<LocalStructFieldId, StructFieldData> {
match &self.0 { const EMPTY: &Arena<LocalStructFieldId, StructFieldData> = &Arena::new();
VariantDataInner::Struct(fields) | VariantDataInner::Tuple(fields) => Some(fields), match &self {
_ => None, VariantData::Record(fields) | VariantData::Tuple(fields) => fields,
_ => EMPTY,
}
}
pub fn is_unit(&self) -> bool {
match self {
VariantData::Unit => true,
_ => false,
} }
} }
} }
impl HasChildSource for VariantId {
type ChildId = LocalStructFieldId;
type Value = Either<ast::TupleFieldDef, ast::RecordFieldDef>;
fn child_source(&self, db: &impl DefDatabase) -> Source<ArenaMap<Self::ChildId, Self::Value>> {
let src = match self {
VariantId::EnumVariantId(it) => {
// I don't really like the fact that we call into parent source
// here, this might add to more queries then necessary.
let src = it.parent.child_source(db);
src.map(|map| map[it.local_id].kind())
}
VariantId::StructId(it) => it.0.source(db).map(|it| it.kind()),
};
let mut trace = Trace::new_for_map();
lower_struct(&mut trace, &src.value);
src.with_value(trace.into_map())
}
}
enum StructKind {
Tuple,
Record,
Unit,
}
fn lower_struct(
trace: &mut Trace<
LocalStructFieldId,
StructFieldData,
Either<ast::TupleFieldDef, ast::RecordFieldDef>,
>,
ast: &ast::StructKind,
) -> StructKind {
match ast {
ast::StructKind::Tuple(fl) => {
for (i, fd) in fl.fields().enumerate() {
trace.alloc(
|| Either::A(fd.clone()),
|| StructFieldData {
name: Name::new_tuple_field(i),
type_ref: TypeRef::from_ast_opt(fd.type_ref()),
},
);
}
StructKind::Tuple
}
ast::StructKind::Record(fl) => {
for fd in fl.fields() {
trace.alloc(
|| Either::B(fd.clone()),
|| StructFieldData {
name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing),
type_ref: TypeRef::from_ast_opt(fd.ascribed_type()),
},
);
}
StructKind::Record
}
ast::StructKind::Unit => StructKind::Unit,
}
}

View file

@ -1,17 +1,93 @@
//! A higher level attributes based on TokenTree, with also some shortcuts. //! A higher level attributes based on TokenTree, with also some shortcuts.
use std::sync::Arc; use std::{ops, sync::Arc};
use hir_expand::hygiene::Hygiene; use hir_expand::{either::Either, hygiene::Hygiene, AstId, Source};
use mbe::ast_to_token_tree; use mbe::ast_to_token_tree;
use ra_cfg::CfgOptions;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, AttrsOwner}, ast::{self, AstNode, AttrsOwner},
SmolStr, SmolStr,
}; };
use tt::Subtree; use tt::Subtree;
use crate::path::Path; use crate::{
db::DefDatabase, path::Path, AdtId, AstItemDef, AttrDefId, HasChildSource, HasSource, Lookup,
};
#[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct Attrs {
entries: Option<Arc<[Attr]>>,
}
impl ops::Deref for Attrs {
type Target = [Attr];
fn deref(&self) -> &[Attr] {
match &self.entries {
Some(it) => &*it,
None => &[],
}
}
}
impl Attrs {
pub(crate) fn attrs_query(db: &impl DefDatabase, def: AttrDefId) -> Attrs {
match def {
AttrDefId::ModuleId(module) => {
let def_map = db.crate_def_map(module.krate);
let src = match def_map[module.module_id].declaration_source(db) {
Some(it) => it,
None => return Attrs::default(),
};
Attrs::from_attrs_owner(db, src.as_ref().map(|it| it as &dyn AttrsOwner))
}
AttrDefId::StructFieldId(it) => {
let src = it.parent.child_source(db);
match &src.value[it.local_id] {
Either::A(_tuple) => Attrs::default(),
Either::B(record) => Attrs::from_attrs_owner(db, src.with_value(record)),
}
}
AttrDefId::EnumVariantId(var_id) => {
let src = var_id.parent.child_source(db);
let src = src.as_ref().map(|it| &it[var_id.local_id]);
Attrs::from_attrs_owner(db, src.map(|it| it as &dyn AttrsOwner))
}
AttrDefId::AdtId(it) => match it {
AdtId::StructId(it) => attrs_from_ast(it.0.lookup_intern(db).ast_id, db),
AdtId::EnumId(it) => attrs_from_ast(it.lookup_intern(db).ast_id, db),
AdtId::UnionId(it) => attrs_from_ast(it.0.lookup_intern(db).ast_id, db),
},
AttrDefId::TraitId(it) => attrs_from_ast(it.lookup_intern(db).ast_id, db),
AttrDefId::MacroDefId(it) => attrs_from_ast(it.ast_id, db),
AttrDefId::ImplId(it) => attrs_from_ast(it.lookup_intern(db).ast_id, db),
AttrDefId::ConstId(it) => attrs_from_loc(it.lookup(db), db),
AttrDefId::StaticId(it) => attrs_from_loc(it.lookup(db), db),
AttrDefId::FunctionId(it) => attrs_from_loc(it.lookup(db), db),
AttrDefId::TypeAliasId(it) => attrs_from_loc(it.lookup(db), db),
}
}
fn from_attrs_owner(db: &impl DefDatabase, owner: Source<&dyn AttrsOwner>) -> Attrs {
let hygiene = Hygiene::new(db, owner.file_id);
Attrs::new(owner.value, &hygiene)
}
pub(crate) fn new(owner: &dyn AttrsOwner, hygiene: &Hygiene) -> Attrs {
let mut attrs = owner.attrs().peekable();
let entries = if attrs.peek().is_none() {
// Avoid heap allocation
None
} else {
Some(attrs.flat_map(|ast| Attr::from_src(ast, hygiene)).collect())
};
Attrs { entries }
}
pub fn by_key(&self, key: &'static str) -> AttrQuery<'_> {
AttrQuery { attrs: self, key }
}
}
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct Attr { pub struct Attr {
@ -26,7 +102,7 @@ pub enum AttrInput {
} }
impl Attr { impl Attr {
pub(crate) fn from_src(ast: ast::Attr, hygiene: &Hygiene) -> Option<Attr> { fn from_src(ast: ast::Attr, hygiene: &Hygiene) -> Option<Attr> {
let path = Path::from_src(ast.path()?, hygiene)?; let path = Path::from_src(ast.path()?, hygiene)?;
let input = match ast.input() { let input = match ast.input() {
None => None, None => None,
@ -42,43 +118,55 @@ impl Attr {
Some(Attr { path, input }) Some(Attr { path, input })
} }
}
pub fn from_attrs_owner(owner: &dyn AttrsOwner, hygiene: &Hygiene) -> Option<Arc<[Attr]>> { pub struct AttrQuery<'a> {
let mut attrs = owner.attrs().peekable(); attrs: &'a Attrs,
if attrs.peek().is_none() { key: &'static str,
// Avoid heap allocation }
return None;
}
Some(attrs.flat_map(|ast| Attr::from_src(ast, hygiene)).collect())
}
pub fn is_simple_atom(&self, name: &str) -> bool { impl<'a> AttrQuery<'a> {
// FIXME: Avoid cloning pub fn tt_values(self) -> impl Iterator<Item = &'a Subtree> {
self.path.as_ident().map_or(false, |s| s.to_string() == name) self.attrs().filter_map(|attr| match attr.input.as_ref()? {
} AttrInput::TokenTree(it) => Some(it),
// FIXME: handle cfg_attr :-)
pub fn as_cfg(&self) -> Option<&Subtree> {
if !self.is_simple_atom("cfg") {
return None;
}
match &self.input {
Some(AttrInput::TokenTree(subtree)) => Some(subtree),
_ => None, _ => None,
} })
} }
pub fn as_path(&self) -> Option<&SmolStr> { pub fn string_value(self) -> Option<&'a SmolStr> {
if !self.is_simple_atom("path") { self.attrs().find_map(|attr| match attr.input.as_ref()? {
return None; AttrInput::Literal(it) => Some(it),
}
match &self.input {
Some(AttrInput::Literal(it)) => Some(it),
_ => None, _ => None,
} })
} }
pub fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> Option<bool> { pub fn exists(self) -> bool {
cfg_options.is_cfg_enabled(self.as_cfg()?) self.attrs().next().is_some()
}
fn attrs(self) -> impl Iterator<Item = &'a Attr> {
let key = self.key;
self.attrs
.iter()
.filter(move |attr| attr.path.as_ident().map_or(false, |s| s.to_string() == key))
} }
} }
fn attrs_from_ast<D, N>(src: AstId<N>, db: &D) -> Attrs
where
N: ast::AttrsOwner,
D: DefDatabase,
{
let src = Source::new(src.file_id(), src.to_node(db));
Attrs::from_attrs_owner(db, src.as_ref().map(|it| it as &dyn AttrsOwner))
}
fn attrs_from_loc<T, D>(node: T, db: &D) -> Attrs
where
T: HasSource,
T::Value: ast::AttrsOwner,
D: DefDatabase,
{
let src = node.source(db);
Attrs::from_attrs_owner(db, src.as_ref().map(|it| it as &dyn AttrsOwner))
}

View file

@ -1,4 +1,5 @@
//! FIXME: write short doc here //! Defines `Body`: a lowered representation of bodies of functions, statics and
//! consts.
mod lower; mod lower;
pub mod scope; pub mod scope;
@ -13,14 +14,14 @@ use ra_syntax::{ast, AstNode, AstPtr};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use crate::{ use crate::{
db::DefDatabase2, db::DefDatabase,
expr::{Expr, ExprId, Pat, PatId}, expr::{Expr, ExprId, Pat, PatId},
nameres::CrateDefMap, nameres::CrateDefMap,
path::Path, path::Path,
AstItemDef, DefWithBodyId, HasModule, HasSource, Lookup, ModuleId, DefWithBodyId, HasModule, HasSource, Lookup, ModuleId,
}; };
pub struct Expander { struct Expander {
crate_def_map: Arc<CrateDefMap>, crate_def_map: Arc<CrateDefMap>,
current_file_id: HirFileId, current_file_id: HirFileId,
hygiene: Hygiene, hygiene: Hygiene,
@ -28,7 +29,7 @@ pub struct Expander {
} }
impl Expander { impl Expander {
pub fn new(db: &impl DefDatabase2, current_file_id: HirFileId, module: ModuleId) -> Expander { fn new(db: &impl DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander {
let crate_def_map = db.crate_def_map(module.krate); let crate_def_map = db.crate_def_map(module.krate);
let hygiene = Hygiene::new(db, current_file_id); let hygiene = Hygiene::new(db, current_file_id);
Expander { crate_def_map, current_file_id, hygiene, module } Expander { crate_def_map, current_file_id, hygiene, module }
@ -36,7 +37,7 @@ impl Expander {
fn enter_expand( fn enter_expand(
&mut self, &mut self,
db: &impl DefDatabase2, db: &impl DefDatabase,
macro_call: ast::MacroCall, macro_call: ast::MacroCall,
) -> Option<(Mark, ast::Expr)> { ) -> Option<(Mark, ast::Expr)> {
let ast_id = AstId::new( let ast_id = AstId::new(
@ -67,7 +68,7 @@ impl Expander {
None None
} }
fn exit(&mut self, db: &impl DefDatabase2, mark: Mark) { fn exit(&mut self, db: &impl DefDatabase, mark: Mark) {
self.hygiene = Hygiene::new(db, mark.file_id); self.hygiene = Hygiene::new(db, mark.file_id);
self.current_file_id = mark.file_id; self.current_file_id = mark.file_id;
std::mem::forget(mark); std::mem::forget(mark);
@ -81,8 +82,8 @@ impl Expander {
Path::from_src(path, &self.hygiene) Path::from_src(path, &self.hygiene)
} }
fn resolve_path_as_macro(&self, db: &impl DefDatabase2, path: &Path) -> Option<MacroDefId> { fn resolve_path_as_macro(&self, db: &impl DefDatabase, path: &Path) -> Option<MacroDefId> {
self.crate_def_map.resolve_path(db, self.module.module_id, path).0.get_macros() self.crate_def_map.resolve_path(db, self.module.module_id, path).0.take_macros()
} }
} }
@ -101,17 +102,17 @@ impl Drop for Mark {
/// The body of an item (function, const etc.). /// The body of an item (function, const etc.).
#[derive(Debug, Eq, PartialEq)] #[derive(Debug, Eq, PartialEq)]
pub struct Body { pub struct Body {
exprs: Arena<ExprId, Expr>, pub exprs: Arena<ExprId, Expr>,
pats: Arena<PatId, Pat>, pub pats: Arena<PatId, Pat>,
/// The patterns for the function's parameters. While the parameter types are /// The patterns for the function's parameters. While the parameter types are
/// part of the function signature, the patterns are not (they don't change /// part of the function signature, the patterns are not (they don't change
/// the external type of the function). /// the external type of the function).
/// ///
/// If this `Body` is for the body of a constant, this will just be /// If this `Body` is for the body of a constant, this will just be
/// empty. /// empty.
params: Vec<PatId>, pub params: Vec<PatId>,
/// The `ExprId` of the actual body expression. /// The `ExprId` of the actual body expression.
body_expr: ExprId, pub body_expr: ExprId,
} }
pub type ExprPtr = Either<AstPtr<ast::Expr>, AstPtr<ast::RecordField>>; pub type ExprPtr = Either<AstPtr<ast::Expr>, AstPtr<ast::RecordField>>;
@ -142,7 +143,7 @@ pub struct BodySourceMap {
impl Body { impl Body {
pub(crate) fn body_with_source_map_query( pub(crate) fn body_with_source_map_query(
db: &impl DefDatabase2, db: &impl DefDatabase,
def: DefWithBodyId, def: DefWithBodyId,
) -> (Arc<Body>, Arc<BodySourceMap>) { ) -> (Arc<Body>, Arc<BodySourceMap>) {
let mut params = None; let mut params = None;
@ -160,6 +161,7 @@ impl Body {
(src.file_id, c.module(db), src.value.body()) (src.file_id, c.module(db), src.value.body())
} }
DefWithBodyId::StaticId(s) => { DefWithBodyId::StaticId(s) => {
let s = s.lookup(db);
let src = s.source(db); let src = s.source(db);
(src.file_id, s.module(db), src.value.body()) (src.file_id, s.module(db), src.value.body())
} }
@ -169,34 +171,18 @@ impl Body {
(Arc::new(body), Arc::new(source_map)) (Arc::new(body), Arc::new(source_map))
} }
pub(crate) fn body_query(db: &impl DefDatabase2, def: DefWithBodyId) -> Arc<Body> { pub(crate) fn body_query(db: &impl DefDatabase, def: DefWithBodyId) -> Arc<Body> {
db.body_with_source_map(def).0 db.body_with_source_map(def).0
} }
fn new( fn new(
db: &impl DefDatabase2, db: &impl DefDatabase,
expander: Expander, expander: Expander,
params: Option<ast::ParamList>, params: Option<ast::ParamList>,
body: Option<ast::Expr>, body: Option<ast::Expr>,
) -> (Body, BodySourceMap) { ) -> (Body, BodySourceMap) {
lower::lower(db, expander, params, body) lower::lower(db, expander, params, body)
} }
pub fn params(&self) -> &[PatId] {
&self.params
}
pub fn body_expr(&self) -> ExprId {
self.body_expr
}
pub fn exprs(&self) -> impl Iterator<Item = (ExprId, &Expr)> {
self.exprs.iter()
}
pub fn pats(&self) -> impl Iterator<Item = (PatId, &Pat)> {
self.pats.iter()
}
} }
impl Index<ExprId> for Body { impl Index<ExprId> for Body {

View file

@ -1,4 +1,5 @@
//! FIXME: write short doc here //! Transforms `ast::Expr` into an equivalent `hir_def::expr::Expr`
//! representation.
use hir_expand::{ use hir_expand::{
either::Either, either::Either,
@ -17,7 +18,7 @@ use test_utils::tested_by;
use crate::{ use crate::{
body::{Body, BodySourceMap, Expander, PatPtr}, body::{Body, BodySourceMap, Expander, PatPtr},
builtin_type::{BuiltinFloat, BuiltinInt}, builtin_type::{BuiltinFloat, BuiltinInt},
db::DefDatabase2, db::DefDatabase,
expr::{ expr::{
ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp, ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement,
@ -28,7 +29,7 @@ use crate::{
}; };
pub(super) fn lower( pub(super) fn lower(
db: &impl DefDatabase2, db: &impl DefDatabase,
expander: Expander, expander: Expander,
params: Option<ast::ParamList>, params: Option<ast::ParamList>,
body: Option<ast::Expr>, body: Option<ast::Expr>,
@ -57,7 +58,7 @@ struct ExprCollector<DB> {
impl<'a, DB> ExprCollector<&'a DB> impl<'a, DB> ExprCollector<&'a DB>
where where
DB: DefDatabase2, DB: DefDatabase,
{ {
fn collect( fn collect(
mut self, mut self,

View file

@ -1,4 +1,4 @@
//! FIXME: write short doc here //! Name resolution for expressions.
use std::sync::Arc; use std::sync::Arc;
use hir_expand::name::Name; use hir_expand::name::Name;
@ -7,7 +7,7 @@ use rustc_hash::FxHashMap;
use crate::{ use crate::{
body::Body, body::Body,
db::DefDatabase2, db::DefDatabase,
expr::{Expr, ExprId, Pat, PatId, Statement}, expr::{Expr, ExprId, Pat, PatId, Statement},
DefWithBodyId, DefWithBodyId,
}; };
@ -45,7 +45,7 @@ pub struct ScopeData {
} }
impl ExprScopes { impl ExprScopes {
pub(crate) fn expr_scopes_query(db: &impl DefDatabase2, def: DefWithBodyId) -> Arc<ExprScopes> { pub(crate) fn expr_scopes_query(db: &impl DefDatabase, def: DefWithBodyId) -> Arc<ExprScopes> {
let body = db.body(def); let body = db.body(def);
Arc::new(ExprScopes::new(&*body)) Arc::new(ExprScopes::new(&*body))
} }
@ -54,8 +54,8 @@ impl ExprScopes {
let mut scopes = let mut scopes =
ExprScopes { scopes: Arena::default(), scope_by_expr: FxHashMap::default() }; ExprScopes { scopes: Arena::default(), scope_by_expr: FxHashMap::default() };
let root = scopes.root_scope(); let root = scopes.root_scope();
scopes.add_params_bindings(body, root, body.params()); scopes.add_params_bindings(body, root, &body.params);
compute_expr_scopes(body.body_expr(), body, &mut scopes, root); compute_expr_scopes(body.body_expr, body, &mut scopes, root);
scopes scopes
} }
@ -176,7 +176,7 @@ mod tests {
use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; use ra_syntax::{algo::find_node_at_offset, ast, AstNode};
use test_utils::{assert_eq_text, covers, extract_offset}; use test_utils::{assert_eq_text, covers, extract_offset};
use crate::{db::DefDatabase2, test_db::TestDB, FunctionId, ModuleDefId}; use crate::{db::DefDatabase, test_db::TestDB, FunctionId, ModuleDefId};
fn find_function(db: &TestDB, file_id: FileId) -> FunctionId { fn find_function(db: &TestDB, file_id: FileId) -> FunctionId {
let krate = db.test_crate(); let krate = db.test_crate();

View file

@ -0,0 +1,216 @@
//! Contains basic data about various HIR declarations.
use std::sync::Arc;
use hir_expand::{
name::{self, AsName, Name},
AstId,
};
use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner};
use crate::{
db::DefDatabase,
type_ref::{Mutability, TypeRef},
AssocItemId, AstItemDef, ConstId, ConstLoc, ContainerId, FunctionId, FunctionLoc, HasSource,
ImplId, Intern, Lookup, StaticId, TraitId, TypeAliasId, TypeAliasLoc,
};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FunctionData {
pub name: Name,
pub params: Vec<TypeRef>,
pub ret_type: TypeRef,
/// True if the first param is `self`. This is relevant to decide whether this
/// can be called as a method.
pub has_self_param: bool,
}
impl FunctionData {
pub(crate) fn fn_data_query(db: &impl DefDatabase, func: FunctionId) -> Arc<FunctionData> {
let src = func.lookup(db).source(db);
let name = src.value.name().map(|n| n.as_name()).unwrap_or_else(Name::missing);
let mut params = Vec::new();
let mut has_self_param = false;
if let Some(param_list) = src.value.param_list() {
if let Some(self_param) = param_list.self_param() {
let self_type = if let Some(type_ref) = self_param.ascribed_type() {
TypeRef::from_ast(type_ref)
} else {
let self_type = TypeRef::Path(name::SELF_TYPE.into());
match self_param.kind() {
ast::SelfParamKind::Owned => self_type,
ast::SelfParamKind::Ref => {
TypeRef::Reference(Box::new(self_type), Mutability::Shared)
}
ast::SelfParamKind::MutRef => {
TypeRef::Reference(Box::new(self_type), Mutability::Mut)
}
}
};
params.push(self_type);
has_self_param = true;
}
for param in param_list.params() {
let type_ref = TypeRef::from_ast_opt(param.ascribed_type());
params.push(type_ref);
}
}
let ret_type = if let Some(type_ref) = src.value.ret_type().and_then(|rt| rt.type_ref()) {
TypeRef::from_ast(type_ref)
} else {
TypeRef::unit()
};
let sig = FunctionData { name, params, ret_type, has_self_param };
Arc::new(sig)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TypeAliasData {
pub name: Name,
pub type_ref: Option<TypeRef>,
}
impl TypeAliasData {
pub(crate) fn type_alias_data_query(
db: &impl DefDatabase,
typ: TypeAliasId,
) -> Arc<TypeAliasData> {
let node = typ.lookup(db).source(db).value;
let name = node.name().map_or_else(Name::missing, |n| n.as_name());
let type_ref = node.type_ref().map(TypeRef::from_ast);
Arc::new(TypeAliasData { name, type_ref })
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TraitData {
pub name: Option<Name>,
pub items: Vec<AssocItemId>,
pub auto: bool,
}
impl TraitData {
pub(crate) fn trait_data_query(db: &impl DefDatabase, tr: TraitId) -> Arc<TraitData> {
let src = tr.source(db);
let name = src.value.name().map(|n| n.as_name());
let auto = src.value.is_auto();
let ast_id_map = db.ast_id_map(src.file_id);
let items = if let Some(item_list) = src.value.item_list() {
item_list
.impl_items()
.map(|item_node| match item_node {
ast::ImplItem::FnDef(it) => FunctionLoc {
container: ContainerId::TraitId(tr),
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
}
.intern(db)
.into(),
ast::ImplItem::ConstDef(it) => ConstLoc {
container: ContainerId::TraitId(tr),
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
}
.intern(db)
.into(),
ast::ImplItem::TypeAliasDef(it) => TypeAliasLoc {
container: ContainerId::TraitId(tr),
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
}
.intern(db)
.into(),
})
.collect()
} else {
Vec::new()
};
Arc::new(TraitData { name, items, auto })
}
pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ {
self.items.iter().filter_map(|item| match item {
AssocItemId::TypeAliasId(t) => Some(*t),
_ => None,
})
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ImplData {
pub target_trait: Option<TypeRef>,
pub target_type: TypeRef,
pub items: Vec<AssocItemId>,
pub is_negative: bool,
}
impl ImplData {
pub(crate) fn impl_data_query(db: &impl DefDatabase, id: ImplId) -> Arc<ImplData> {
let src = id.source(db);
let items = db.ast_id_map(src.file_id);
let target_trait = src.value.target_trait().map(TypeRef::from_ast);
let target_type = TypeRef::from_ast_opt(src.value.target_type());
let is_negative = src.value.is_negative();
let items = if let Some(item_list) = src.value.item_list() {
item_list
.impl_items()
.map(|item_node| match item_node {
ast::ImplItem::FnDef(it) => {
let def = FunctionLoc {
container: ContainerId::ImplId(id),
ast_id: AstId::new(src.file_id, items.ast_id(&it)),
}
.intern(db);
def.into()
}
ast::ImplItem::ConstDef(it) => {
let def = ConstLoc {
container: ContainerId::ImplId(id),
ast_id: AstId::new(src.file_id, items.ast_id(&it)),
}
.intern(db);
def.into()
}
ast::ImplItem::TypeAliasDef(it) => {
let def = TypeAliasLoc {
container: ContainerId::ImplId(id),
ast_id: AstId::new(src.file_id, items.ast_id(&it)),
}
.intern(db);
def.into()
}
})
.collect()
} else {
Vec::new()
};
let res = ImplData { target_trait, target_type, items, is_negative };
Arc::new(res)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ConstData {
pub name: Option<Name>,
pub type_ref: TypeRef,
}
impl ConstData {
pub(crate) fn const_data_query(db: &impl DefDatabase, konst: ConstId) -> Arc<ConstData> {
let node = konst.lookup(db).source(db).value;
Arc::new(ConstData::new(&node))
}
pub(crate) fn static_data_query(db: &impl DefDatabase, konst: StaticId) -> Arc<ConstData> {
let node = konst.lookup(db).source(db).value;
Arc::new(ConstData::new(&node))
}
fn new<N: NameOwner + TypeAscriptionOwner>(node: &N) -> ConstData {
let name = node.name().map(|n| n.as_name());
let type_ref = TypeRef::from_ast_opt(node.ascribed_type());
ConstData { name, type_ref }
}
}

View file

@ -3,43 +3,47 @@ use std::sync::Arc;
use hir_expand::{db::AstDatabase, HirFileId}; use hir_expand::{db::AstDatabase, HirFileId};
use ra_db::{salsa, CrateId, SourceDatabase}; use ra_db::{salsa, CrateId, SourceDatabase};
use ra_syntax::ast; use ra_syntax::{ast, SmolStr};
use crate::{ use crate::{
adt::{EnumData, StructData}, adt::{EnumData, StructData},
attr::Attrs,
body::{scope::ExprScopes, Body, BodySourceMap}, body::{scope::ExprScopes, Body, BodySourceMap},
data::{ConstData, FunctionData, ImplData, TraitData, TypeAliasData},
docs::Documentation,
generics::GenericParams, generics::GenericParams,
impls::ImplData, lang_item::{LangItemTarget, LangItems},
nameres::{ nameres::{
raw::{ImportSourceMap, RawItems}, raw::{ImportSourceMap, RawItems},
CrateDefMap, CrateDefMap,
}, },
traits::TraitData, AttrDefId, ConstId, ConstLoc, DefWithBodyId, EnumId, FunctionId, FunctionLoc, GenericDefId,
DefWithBodyId, EnumId, GenericDefId, ImplId, ItemLoc, StructOrUnionId, TraitId, ImplId, ItemLoc, ModuleId, StaticId, StaticLoc, StructOrUnionId, TraitId, TypeAliasId,
TypeAliasLoc,
}; };
#[salsa::query_group(InternDatabaseStorage)] #[salsa::query_group(InternDatabaseStorage)]
pub trait InternDatabase: SourceDatabase { pub trait InternDatabase: SourceDatabase {
#[salsa::interned] #[salsa::interned]
fn intern_function(&self, loc: crate::FunctionLoc) -> crate::FunctionId; fn intern_function(&self, loc: FunctionLoc) -> FunctionId;
#[salsa::interned] #[salsa::interned]
fn intern_struct_or_union(&self, loc: ItemLoc<ast::StructDef>) -> crate::StructOrUnionId; fn intern_struct_or_union(&self, loc: ItemLoc<ast::StructDef>) -> StructOrUnionId;
#[salsa::interned] #[salsa::interned]
fn intern_enum(&self, loc: ItemLoc<ast::EnumDef>) -> crate::EnumId; fn intern_enum(&self, loc: ItemLoc<ast::EnumDef>) -> EnumId;
#[salsa::interned] #[salsa::interned]
fn intern_const(&self, loc: crate::ConstLoc) -> crate::ConstId; fn intern_const(&self, loc: ConstLoc) -> ConstId;
#[salsa::interned] #[salsa::interned]
fn intern_static(&self, loc: ItemLoc<ast::StaticDef>) -> crate::StaticId; fn intern_static(&self, loc: StaticLoc) -> StaticId;
#[salsa::interned] #[salsa::interned]
fn intern_trait(&self, loc: ItemLoc<ast::TraitDef>) -> crate::TraitId; fn intern_trait(&self, loc: ItemLoc<ast::TraitDef>) -> TraitId;
#[salsa::interned] #[salsa::interned]
fn intern_type_alias(&self, loc: crate::TypeAliasLoc) -> crate::TypeAliasId; fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId;
#[salsa::interned] #[salsa::interned]
fn intern_impl(&self, loc: ItemLoc<ast::ImplBlock>) -> crate::ImplId; fn intern_impl(&self, loc: ItemLoc<ast::ImplBlock>) -> ImplId;
} }
#[salsa::query_group(DefDatabase2Storage)] #[salsa::query_group(DefDatabaseStorage)]
pub trait DefDatabase2: InternDatabase + AstDatabase { pub trait DefDatabase: InternDatabase + AstDatabase {
#[salsa::invoke(RawItems::raw_items_with_source_map_query)] #[salsa::invoke(RawItems::raw_items_with_source_map_query)]
fn raw_items_with_source_map( fn raw_items_with_source_map(
&self, &self,
@ -64,6 +68,18 @@ pub trait DefDatabase2: InternDatabase + AstDatabase {
#[salsa::invoke(TraitData::trait_data_query)] #[salsa::invoke(TraitData::trait_data_query)]
fn trait_data(&self, e: TraitId) -> Arc<TraitData>; fn trait_data(&self, e: TraitId) -> Arc<TraitData>;
#[salsa::invoke(TypeAliasData::type_alias_data_query)]
fn type_alias_data(&self, e: TypeAliasId) -> Arc<TypeAliasData>;
#[salsa::invoke(FunctionData::fn_data_query)]
fn function_data(&self, func: FunctionId) -> Arc<FunctionData>;
#[salsa::invoke(ConstData::const_data_query)]
fn const_data(&self, konst: ConstId) -> Arc<ConstData>;
#[salsa::invoke(ConstData::static_data_query)]
fn static_data(&self, konst: StaticId) -> Arc<ConstData>;
#[salsa::invoke(Body::body_with_source_map_query)] #[salsa::invoke(Body::body_with_source_map_query)]
fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc<Body>, Arc<BodySourceMap>); fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc<Body>, Arc<BodySourceMap>);
@ -75,4 +91,21 @@ pub trait DefDatabase2: InternDatabase + AstDatabase {
#[salsa::invoke(GenericParams::generic_params_query)] #[salsa::invoke(GenericParams::generic_params_query)]
fn generic_params(&self, def: GenericDefId) -> Arc<GenericParams>; fn generic_params(&self, def: GenericDefId) -> Arc<GenericParams>;
#[salsa::invoke(Attrs::attrs_query)]
fn attrs(&self, def: AttrDefId) -> Attrs;
#[salsa::invoke(LangItems::module_lang_items_query)]
fn module_lang_items(&self, module: ModuleId) -> Option<Arc<LangItems>>;
#[salsa::invoke(LangItems::crate_lang_items_query)]
fn crate_lang_items(&self, krate: CrateId) -> Arc<LangItems>;
#[salsa::invoke(LangItems::lang_item_query)]
fn lang_item(&self, start_crate: CrateId, item: SmolStr) -> Option<LangItemTarget>;
// FIXME(https://github.com/rust-analyzer/rust-analyzer/issues/2148#issuecomment-550519102)
// Remove this query completely, in favor of `Attrs::docs` method
#[salsa::invoke(Documentation::documentation_query)]
fn documentation(&self, def: AttrDefId) -> Option<Documentation>;
} }

View file

@ -0,0 +1,71 @@
//! Defines hir documentation.
//!
//! This really shouldn't exist, instead, we should deshugar doc comments into attributes, see
//! https://github.com/rust-analyzer/rust-analyzer/issues/2148#issuecomment-550519102
use std::sync::Arc;
use hir_expand::either::Either;
use ra_syntax::ast;
use crate::{db::DefDatabase, AdtId, AstItemDef, AttrDefId, HasChildSource, HasSource, Lookup};
/// Holds documentation
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Documentation(Arc<str>);
impl Into<String> for Documentation {
fn into(self) -> String {
self.as_str().to_owned()
}
}
impl Documentation {
fn new(s: &str) -> Documentation {
Documentation(s.into())
}
pub fn as_str(&self) -> &str {
&*self.0
}
pub(crate) fn documentation_query(
db: &impl DefDatabase,
def: AttrDefId,
) -> Option<Documentation> {
match def {
AttrDefId::ModuleId(module) => {
let def_map = db.crate_def_map(module.krate);
let src = def_map[module.module_id].declaration_source(db)?;
docs_from_ast(&src.value)
}
AttrDefId::StructFieldId(it) => {
let src = it.parent.child_source(db);
match &src.value[it.local_id] {
Either::A(_tuple) => None,
Either::B(record) => docs_from_ast(record),
}
}
AttrDefId::AdtId(it) => match it {
AdtId::StructId(it) => docs_from_ast(&it.0.source(db).value),
AdtId::EnumId(it) => docs_from_ast(&it.source(db).value),
AdtId::UnionId(it) => docs_from_ast(&it.0.source(db).value),
},
AttrDefId::EnumVariantId(it) => {
let src = it.parent.child_source(db);
docs_from_ast(&src.value[it.local_id])
}
AttrDefId::TraitId(it) => docs_from_ast(&it.source(db).value),
AttrDefId::MacroDefId(it) => docs_from_ast(&it.ast_id.to_node(db)),
AttrDefId::ConstId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AttrDefId::StaticId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AttrDefId::FunctionId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AttrDefId::TypeAliasId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AttrDefId::ImplId(_) => None,
}
}
}
pub(crate) fn docs_from_ast(node: &impl ast::DocCommentsOwner) -> Option<Documentation> {
node.doc_comment_text().map(|it| Documentation::new(&it))
}

View file

@ -8,7 +8,7 @@ use hir_expand::name::{self, AsName, Name};
use ra_syntax::ast::{self, NameOwner, TypeBoundsOwner, TypeParamsOwner}; use ra_syntax::ast::{self, NameOwner, TypeBoundsOwner, TypeParamsOwner};
use crate::{ use crate::{
db::DefDatabase2, db::DefDatabase,
type_ref::{TypeBound, TypeRef}, type_ref::{TypeBound, TypeRef},
AdtId, AstItemDef, ContainerId, GenericDefId, HasSource, Lookup, AdtId, AstItemDef, ContainerId, GenericDefId, HasSource, Lookup,
}; };
@ -42,7 +42,7 @@ pub struct WherePredicate {
impl GenericParams { impl GenericParams {
pub(crate) fn generic_params_query( pub(crate) fn generic_params_query(
db: &impl DefDatabase2, db: &impl DefDatabase,
def: GenericDefId, def: GenericDefId,
) -> Arc<GenericParams> { ) -> Arc<GenericParams> {
let parent_generics = parent_generic_def(db, def).map(|it| db.generic_params(it)); let parent_generics = parent_generic_def(db, def).map(|it| db.generic_params(it));
@ -50,7 +50,7 @@ impl GenericParams {
} }
fn new( fn new(
db: &impl DefDatabase2, db: &impl DefDatabase,
def: GenericDefId, def: GenericDefId,
parent_params: Option<Arc<GenericParams>>, parent_params: Option<Arc<GenericParams>>,
) -> GenericParams { ) -> GenericParams {
@ -168,7 +168,7 @@ impl GenericParams {
} }
} }
fn parent_generic_def(db: &impl DefDatabase2, def: GenericDefId) -> Option<GenericDefId> { fn parent_generic_def(db: &impl DefDatabase, def: GenericDefId) -> Option<GenericDefId> {
let container = match def { let container = match def {
GenericDefId::FunctionId(it) => it.lookup(db).container, GenericDefId::FunctionId(it) => it.lookup(db).container,
GenericDefId::TypeAliasId(it) => it.lookup(db).container, GenericDefId::TypeAliasId(it) => it.lookup(db).container,

View file

@ -1,86 +0,0 @@
//! Defines hir-level representation of impls.
//!
//! The handling is similar, but is not quite the same as for other items,
//! because `impl`s don't have names.
use std::sync::Arc;
use hir_expand::AstId;
use ra_syntax::ast;
use crate::{
db::DefDatabase2, type_ref::TypeRef, AssocItemId, AstItemDef, ConstLoc, ContainerId,
FunctionLoc, ImplId, Intern, TypeAliasLoc,
};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ImplData {
target_trait: Option<TypeRef>,
target_type: TypeRef,
items: Vec<AssocItemId>,
negative: bool,
}
impl ImplData {
pub(crate) fn impl_data_query(db: &impl DefDatabase2, id: ImplId) -> Arc<ImplData> {
let src = id.source(db);
let items = db.ast_id_map(src.file_id);
let target_trait = src.value.target_trait().map(TypeRef::from_ast);
let target_type = TypeRef::from_ast_opt(src.value.target_type());
let negative = src.value.is_negative();
let items = if let Some(item_list) = src.value.item_list() {
item_list
.impl_items()
.map(|item_node| match item_node {
ast::ImplItem::FnDef(it) => {
let def = FunctionLoc {
container: ContainerId::ImplId(id),
ast_id: AstId::new(src.file_id, items.ast_id(&it)),
}
.intern(db);
def.into()
}
ast::ImplItem::ConstDef(it) => {
let def = ConstLoc {
container: ContainerId::ImplId(id),
ast_id: AstId::new(src.file_id, items.ast_id(&it)),
}
.intern(db);
def.into()
}
ast::ImplItem::TypeAliasDef(it) => {
let def = TypeAliasLoc {
container: ContainerId::ImplId(id),
ast_id: AstId::new(src.file_id, items.ast_id(&it)),
}
.intern(db);
def.into()
}
})
.collect()
} else {
Vec::new()
};
let res = ImplData { target_trait, target_type, items, negative };
Arc::new(res)
}
pub fn target_trait(&self) -> Option<&TypeRef> {
self.target_trait.as_ref()
}
pub fn target_type(&self) -> &TypeRef {
&self.target_type
}
pub fn items(&self) -> &[AssocItemId] {
&self.items
}
pub fn is_negative(&self) -> bool {
self.negative
}
}

View file

@ -0,0 +1,121 @@
//! Collects lang items: items marked with `#[lang = "..."]` attribute.
//!
//! This attribute to tell the compiler about semi built-in std library
//! features, such as Fn family of traits.
use std::sync::Arc;
use ra_syntax::SmolStr;
use rustc_hash::FxHashMap;
use crate::{
db::DefDatabase, AdtId, AttrDefId, CrateId, EnumId, FunctionId, ImplId, ModuleDefId, ModuleId,
StaticId, StructId, TraitId,
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum LangItemTarget {
EnumId(EnumId),
FunctionId(FunctionId),
ImplBlockId(ImplId),
StaticId(StaticId),
StructId(StructId),
TraitId(TraitId),
}
#[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct LangItems {
items: FxHashMap<SmolStr, LangItemTarget>,
}
impl LangItems {
pub fn target<'a>(&'a self, item: &str) -> Option<&'a LangItemTarget> {
self.items.get(item)
}
/// Salsa query. This will look for lang items in a specific crate.
pub(crate) fn crate_lang_items_query(db: &impl DefDatabase, krate: CrateId) -> Arc<LangItems> {
let mut lang_items = LangItems::default();
let crate_def_map = db.crate_def_map(krate);
crate_def_map
.modules
.iter()
.filter_map(|(module_id, _)| db.module_lang_items(ModuleId { krate, module_id }))
.for_each(|it| lang_items.items.extend(it.items.iter().map(|(k, v)| (k.clone(), *v))));
Arc::new(lang_items)
}
pub(crate) fn module_lang_items_query(
db: &impl DefDatabase,
module: ModuleId,
) -> Option<Arc<LangItems>> {
let mut lang_items = LangItems::default();
lang_items.collect_lang_items(db, module);
if lang_items.items.is_empty() {
None
} else {
Some(Arc::new(lang_items))
}
}
/// Salsa query. Look for a lang item, starting from the specified crate and recursively
/// traversing its dependencies.
pub(crate) fn lang_item_query(
db: &impl DefDatabase,
start_crate: CrateId,
item: SmolStr,
) -> Option<LangItemTarget> {
let lang_items = db.crate_lang_items(start_crate);
let start_crate_target = lang_items.items.get(&item);
if let Some(target) = start_crate_target {
return Some(*target);
}
db.crate_graph()
.dependencies(start_crate)
.find_map(|dep| db.lang_item(dep.crate_id, item.clone()))
}
fn collect_lang_items(&mut self, db: &impl DefDatabase, module: ModuleId) {
// Look for impl targets
let def_map = db.crate_def_map(module.krate);
let module_data = &def_map[module.module_id];
for &impl_block in module_data.impls.iter() {
self.collect_lang_item(db, impl_block, LangItemTarget::ImplBlockId)
}
for def in module_data.scope.declarations() {
match def {
ModuleDefId::TraitId(trait_) => {
self.collect_lang_item(db, trait_, LangItemTarget::TraitId)
}
ModuleDefId::AdtId(AdtId::EnumId(e)) => {
self.collect_lang_item(db, e, LangItemTarget::EnumId)
}
ModuleDefId::AdtId(AdtId::StructId(s)) => {
self.collect_lang_item(db, s, LangItemTarget::StructId)
}
ModuleDefId::FunctionId(f) => {
self.collect_lang_item(db, f, LangItemTarget::FunctionId)
}
ModuleDefId::StaticId(s) => self.collect_lang_item(db, s, LangItemTarget::StaticId),
_ => {}
}
}
}
fn collect_lang_item<T>(
&mut self,
db: &impl DefDatabase,
item: T,
constructor: fn(T) -> LangItemTarget,
) where
T: Into<AttrDefId> + Copy,
{
let attrs = db.attrs(item.into());
if let Some(lang_item_name) = attrs.by_key("lang").string_value() {
self.items.entry(lang_item_name.clone()).or_insert_with(|| constructor(item));
}
}
}

View file

@ -8,121 +8,56 @@
//! actually true. //! actually true.
pub mod db; pub mod db;
pub mod attr; pub mod attr;
pub mod path; pub mod path;
pub mod type_ref; pub mod type_ref;
pub mod builtin_type; pub mod builtin_type;
pub mod adt;
pub mod impls;
pub mod diagnostics; pub mod diagnostics;
pub mod per_ns;
pub mod adt;
pub mod data;
pub mod generics;
pub mod lang_item;
pub mod docs;
pub mod expr; pub mod expr;
pub mod body; pub mod body;
pub mod generics;
pub mod traits;
pub mod resolver; pub mod resolver;
mod trace;
mod nameres;
#[cfg(test)] #[cfg(test)]
mod test_db; mod test_db;
#[cfg(test)] #[cfg(test)]
mod marks; mod marks;
// FIXME: this should be private
pub mod nameres;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use hir_expand::{ast_id_map::FileAstId, db::AstDatabase, AstId, HirFileId, Source}; use hir_expand::{ast_id_map::FileAstId, db::AstDatabase, AstId, HirFileId, MacroDefId, Source};
use ra_arena::{impl_arena_id, RawId}; use ra_arena::{impl_arena_id, map::ArenaMap, RawId};
use ra_db::{salsa, CrateId, FileId}; use ra_db::{impl_intern_key, salsa, CrateId};
use ra_syntax::{ast, AstNode, SyntaxNode}; use ra_syntax::{ast, AstNode};
use crate::{builtin_type::BuiltinType, db::InternDatabase}; use crate::{builtin_type::BuiltinType, db::InternDatabase};
pub enum ModuleSource { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
SourceFile(ast::SourceFile), pub struct LocalImportId(RawId);
Module(ast::Module), impl_arena_id!(LocalImportId);
}
impl ModuleSource {
pub fn new(
db: &impl db::DefDatabase2,
file_id: Option<FileId>,
decl_id: Option<AstId<ast::Module>>,
) -> ModuleSource {
match (file_id, decl_id) {
(Some(file_id), _) => {
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
}
(None, Some(item_id)) => {
let module = item_id.to_node(db);
assert!(module.item_list().is_some(), "expected inline module");
ModuleSource::Module(module)
}
(None, None) => panic!(),
}
}
// FIXME: this methods do not belong here
pub fn from_position(
db: &impl db::DefDatabase2,
position: ra_db::FilePosition,
) -> ModuleSource {
let parse = db.parse(position.file_id);
match &ra_syntax::algo::find_node_at_offset::<ast::Module>(
parse.tree().syntax(),
position.offset,
) {
Some(m) if !m.has_semi() => ModuleSource::Module(m.clone()),
_ => {
let source_file = parse.tree();
ModuleSource::SourceFile(source_file)
}
}
}
pub fn from_child_node(db: &impl db::DefDatabase2, child: Source<&SyntaxNode>) -> ModuleSource {
if let Some(m) =
child.value.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi())
{
ModuleSource::Module(m)
} else {
let file_id = child.file_id.original_file(db);
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
}
}
pub fn from_file_id(db: &impl db::DefDatabase2, file_id: FileId) -> ModuleSource {
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ModuleId { pub struct ModuleId {
pub krate: CrateId, pub krate: CrateId,
pub module_id: CrateModuleId, pub module_id: LocalModuleId,
} }
/// An ID of a module, **local** to a specific crate /// An ID of a module, **local** to a specific crate
// FIXME: rename to `LocalModuleId`. // FIXME: rename to `LocalModuleId`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct CrateModuleId(RawId); pub struct LocalModuleId(RawId);
impl_arena_id!(CrateModuleId); impl_arena_id!(LocalModuleId);
macro_rules! impl_intern_key {
($name:ident) => {
impl salsa::InternKey for $name {
fn from_intern_id(v: salsa::InternId) -> Self {
$name(v)
}
fn as_intern_id(&self) -> salsa::InternId {
self.0
}
}
};
}
#[derive(Debug)] #[derive(Debug)]
pub struct ItemLoc<N: AstNode> { pub struct ItemLoc<N: AstNode> {
@ -162,25 +97,10 @@ impl<'a, DB> LocationCtx<&'a DB> {
} }
} }
impl<'a, DB: AstDatabase + InternDatabase> LocationCtx<&'a DB> {
pub fn to_def<N, DEF>(self, ast: &N) -> DEF
where
N: AstNode,
DEF: AstItemDef<N>,
{
DEF::from_ast(self, ast)
}
}
pub trait AstItemDef<N: AstNode>: salsa::InternKey + Clone { pub trait AstItemDef<N: AstNode>: salsa::InternKey + Clone {
fn intern(db: &impl InternDatabase, loc: ItemLoc<N>) -> Self; fn intern(db: &impl InternDatabase, loc: ItemLoc<N>) -> Self;
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<N>; fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<N>;
fn from_ast(ctx: LocationCtx<&(impl AstDatabase + InternDatabase)>, ast: &N) -> Self {
let items = ctx.db.ast_id_map(ctx.file_id);
let item_id = items.ast_id(ast);
Self::from_ast_id(ctx, item_id)
}
fn from_ast_id(ctx: LocationCtx<&impl InternDatabase>, ast_id: FileAstId<N>) -> Self { fn from_ast_id(ctx: LocationCtx<&impl InternDatabase>, ast_id: FileAstId<N>) -> Self {
let loc = ItemLoc { module: ctx.module, ast_id: AstId::new(ctx.file_id, ast_id) }; let loc = ItemLoc { module: ctx.module, ast_id: AstId::new(ctx.file_id, ast_id) };
Self::intern(ctx.db, loc) Self::intern(ctx.db, loc)
@ -208,14 +128,14 @@ pub struct FunctionLoc {
impl Intern for FunctionLoc { impl Intern for FunctionLoc {
type ID = FunctionId; type ID = FunctionId;
fn intern(self, db: &impl db::DefDatabase2) -> FunctionId { fn intern(self, db: &impl db::DefDatabase) -> FunctionId {
db.intern_function(self) db.intern_function(self)
} }
} }
impl Lookup for FunctionId { impl Lookup for FunctionId {
type Data = FunctionLoc; type Data = FunctionLoc;
fn lookup(&self, db: &impl db::DefDatabase2) -> FunctionLoc { fn lookup(&self, db: &impl db::DefDatabase) -> FunctionLoc {
db.lookup_intern_function(*self) db.lookup_intern_function(*self)
} }
} }
@ -271,16 +191,10 @@ pub struct EnumVariantId {
pub struct LocalEnumVariantId(RawId); pub struct LocalEnumVariantId(RawId);
impl_arena_id!(LocalEnumVariantId); impl_arena_id!(LocalEnumVariantId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum VariantId {
EnumVariantId(EnumVariantId),
StructId(StructId),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StructFieldId { pub struct StructFieldId {
parent: VariantId, pub parent: VariantId,
local_id: LocalStructFieldId, pub local_id: LocalStructFieldId,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -298,14 +212,14 @@ pub struct ConstLoc {
impl Intern for ConstLoc { impl Intern for ConstLoc {
type ID = ConstId; type ID = ConstId;
fn intern(self, db: &impl db::DefDatabase2) -> ConstId { fn intern(self, db: &impl db::DefDatabase) -> ConstId {
db.intern_const(self) db.intern_const(self)
} }
} }
impl Lookup for ConstId { impl Lookup for ConstId {
type Data = ConstLoc; type Data = ConstLoc;
fn lookup(&self, db: &impl db::DefDatabase2) -> ConstLoc { fn lookup(&self, db: &impl db::DefDatabase) -> ConstLoc {
db.lookup_intern_const(*self) db.lookup_intern_const(*self)
} }
} }
@ -313,12 +227,24 @@ impl Lookup for ConstId {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StaticId(salsa::InternId); pub struct StaticId(salsa::InternId);
impl_intern_key!(StaticId); impl_intern_key!(StaticId);
impl AstItemDef<ast::StaticDef> for StaticId {
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::StaticDef>) -> Self { #[derive(Debug, Clone, PartialEq, Eq, Hash)]
db.intern_static(loc) pub struct StaticLoc {
pub container: ModuleId,
pub ast_id: AstId<ast::StaticDef>,
}
impl Intern for StaticLoc {
type ID = StaticId;
fn intern(self, db: &impl db::DefDatabase) -> StaticId {
db.intern_static(self)
} }
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::StaticDef> { }
db.lookup_intern_static(self)
impl Lookup for StaticId {
type Data = StaticLoc;
fn lookup(&self, db: &impl db::DefDatabase) -> StaticLoc {
db.lookup_intern_static(*self)
} }
} }
@ -346,14 +272,14 @@ pub struct TypeAliasLoc {
impl Intern for TypeAliasLoc { impl Intern for TypeAliasLoc {
type ID = TypeAliasId; type ID = TypeAliasId;
fn intern(self, db: &impl db::DefDatabase2) -> TypeAliasId { fn intern(self, db: &impl db::DefDatabase) -> TypeAliasId {
db.intern_type_alias(self) db.intern_type_alias(self)
} }
} }
impl Lookup for TypeAliasId { impl Lookup for TypeAliasId {
type Data = TypeAliasLoc; type Data = TypeAliasLoc;
fn lookup(&self, db: &impl db::DefDatabase2) -> TypeAliasLoc { fn lookup(&self, db: &impl db::DefDatabase) -> TypeAliasLoc {
db.lookup_intern_type_alias(*self) db.lookup_intern_type_alias(*self)
} }
} }
@ -476,22 +402,58 @@ impl_froms!(
ConstId ConstId
); );
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum AttrDefId {
ModuleId(ModuleId),
StructFieldId(StructFieldId),
AdtId(AdtId),
FunctionId(FunctionId),
EnumVariantId(EnumVariantId),
StaticId(StaticId),
ConstId(ConstId),
TraitId(TraitId),
TypeAliasId(TypeAliasId),
MacroDefId(MacroDefId),
ImplId(ImplId),
}
impl_froms!(
AttrDefId: ModuleId,
StructFieldId,
AdtId(StructId, EnumId, UnionId),
EnumVariantId,
StaticId,
ConstId,
FunctionId,
TraitId,
TypeAliasId,
MacroDefId,
ImplId
);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum VariantId {
EnumVariantId(EnumVariantId),
StructId(StructId),
}
impl_froms!(VariantId: EnumVariantId, StructId);
trait Intern { trait Intern {
type ID; type ID;
fn intern(self, db: &impl db::DefDatabase2) -> Self::ID; fn intern(self, db: &impl db::DefDatabase) -> Self::ID;
} }
pub trait Lookup { pub trait Lookup {
type Data; type Data;
fn lookup(&self, db: &impl db::DefDatabase2) -> Self::Data; fn lookup(&self, db: &impl db::DefDatabase) -> Self::Data;
} }
pub trait HasModule { pub trait HasModule {
fn module(&self, db: &impl db::DefDatabase2) -> ModuleId; fn module(&self, db: &impl db::DefDatabase) -> ModuleId;
} }
impl HasModule for FunctionLoc { impl HasModule for FunctionLoc {
fn module(&self, db: &impl db::DefDatabase2) -> ModuleId { fn module(&self, db: &impl db::DefDatabase) -> ModuleId {
match self.container { match self.container {
ContainerId::ModuleId(it) => it, ContainerId::ModuleId(it) => it,
ContainerId::ImplId(it) => it.module(db), ContainerId::ImplId(it) => it.module(db),
@ -501,7 +463,7 @@ impl HasModule for FunctionLoc {
} }
impl HasModule for TypeAliasLoc { impl HasModule for TypeAliasLoc {
fn module(&self, db: &impl db::DefDatabase2) -> ModuleId { fn module(&self, db: &impl db::DefDatabase) -> ModuleId {
match self.container { match self.container {
ContainerId::ModuleId(it) => it, ContainerId::ModuleId(it) => it,
ContainerId::ImplId(it) => it.module(db), ContainerId::ImplId(it) => it.module(db),
@ -511,7 +473,7 @@ impl HasModule for TypeAliasLoc {
} }
impl HasModule for ConstLoc { impl HasModule for ConstLoc {
fn module(&self, db: &impl db::DefDatabase2) -> ModuleId { fn module(&self, db: &impl db::DefDatabase) -> ModuleId {
match self.container { match self.container {
ContainerId::ModuleId(it) => it, ContainerId::ModuleId(it) => it,
ContainerId::ImplId(it) => it.module(db), ContainerId::ImplId(it) => it.module(db),
@ -520,15 +482,31 @@ impl HasModule for ConstLoc {
} }
} }
impl HasModule for AdtId {
fn module(&self, db: &impl db::DefDatabase) -> ModuleId {
match self {
AdtId::StructId(it) => it.0.module(db),
AdtId::UnionId(it) => it.0.module(db),
AdtId::EnumId(it) => it.module(db),
}
}
}
impl HasModule for StaticLoc {
fn module(&self, _db: &impl db::DefDatabase) -> ModuleId {
self.container
}
}
pub trait HasSource { pub trait HasSource {
type Value; type Value;
fn source(&self, db: &impl db::DefDatabase2) -> Source<Self::Value>; fn source(&self, db: &impl db::DefDatabase) -> Source<Self::Value>;
} }
impl HasSource for FunctionLoc { impl HasSource for FunctionLoc {
type Value = ast::FnDef; type Value = ast::FnDef;
fn source(&self, db: &impl db::DefDatabase2) -> Source<ast::FnDef> { fn source(&self, db: &impl db::DefDatabase) -> Source<ast::FnDef> {
let node = self.ast_id.to_node(db); let node = self.ast_id.to_node(db);
Source::new(self.ast_id.file_id(), node) Source::new(self.ast_id.file_id(), node)
} }
@ -537,7 +515,7 @@ impl HasSource for FunctionLoc {
impl HasSource for TypeAliasLoc { impl HasSource for TypeAliasLoc {
type Value = ast::TypeAliasDef; type Value = ast::TypeAliasDef;
fn source(&self, db: &impl db::DefDatabase2) -> Source<ast::TypeAliasDef> { fn source(&self, db: &impl db::DefDatabase) -> Source<ast::TypeAliasDef> {
let node = self.ast_id.to_node(db); let node = self.ast_id.to_node(db);
Source::new(self.ast_id.file_id(), node) Source::new(self.ast_id.file_id(), node)
} }
@ -546,8 +524,26 @@ impl HasSource for TypeAliasLoc {
impl HasSource for ConstLoc { impl HasSource for ConstLoc {
type Value = ast::ConstDef; type Value = ast::ConstDef;
fn source(&self, db: &impl db::DefDatabase2) -> Source<ast::ConstDef> { fn source(&self, db: &impl db::DefDatabase) -> Source<ast::ConstDef> {
let node = self.ast_id.to_node(db); let node = self.ast_id.to_node(db);
Source::new(self.ast_id.file_id(), node) Source::new(self.ast_id.file_id(), node)
} }
} }
impl HasSource for StaticLoc {
type Value = ast::StaticDef;
fn source(&self, db: &impl db::DefDatabase) -> Source<ast::StaticDef> {
let node = self.ast_id.to_node(db);
Source::new(self.ast_id.file_id(), node)
}
}
pub trait HasChildSource {
type ChildId;
type Value;
fn child_source(
&self,
db: &impl db::DefDatabase,
) -> Source<ArenaMap<Self::ChildId, Self::Value>>;
}

View file

@ -47,8 +47,7 @@
//! path and, upon success, we run macro expansion and "collect module" phase on //! path and, upon success, we run macro expansion and "collect module" phase on
//! the result //! the result
pub mod raw; pub(crate) mod raw;
pub mod per_ns;
mod collector; mod collector;
mod mod_resolution; mod mod_resolution;
mod path_resolution; mod path_resolution;
@ -58,70 +57,63 @@ mod tests;
use std::sync::Arc; use std::sync::Arc;
use hir_expand::{ast_id_map::FileAstId, diagnostics::DiagnosticSink, name::Name, MacroDefId}; use hir_expand::{
ast_id_map::FileAstId, diagnostics::DiagnosticSink, either::Either, name::Name, MacroDefId,
Source,
};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use ra_arena::Arena; use ra_arena::Arena;
use ra_db::{CrateId, Edition, FileId}; use ra_db::{CrateId, Edition, FileId};
use ra_prof::profile; use ra_prof::profile;
use ra_syntax::ast; use ra_syntax::ast;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::FxHashMap;
use crate::{ use crate::{
builtin_type::BuiltinType, builtin_type::BuiltinType,
db::DefDatabase2, db::DefDatabase,
nameres::{ nameres::{diagnostics::DefDiagnostic, path_resolution::ResolveMode},
diagnostics::DefDiagnostic, path_resolution::ResolveMode, per_ns::PerNs, raw::ImportId,
},
path::Path, path::Path,
AstId, CrateModuleId, FunctionId, ImplId, ModuleDefId, ModuleId, TraitId, per_ns::PerNs,
AstId, FunctionId, ImplId, LocalImportId, LocalModuleId, ModuleDefId, ModuleId, TraitId,
}; };
/// Contains all top-level defs from a macro-expanded crate /// Contains all top-level defs from a macro-expanded crate
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub struct CrateDefMap { pub struct CrateDefMap {
krate: CrateId, pub root: LocalModuleId,
edition: Edition, pub modules: Arena<LocalModuleId, ModuleData>,
pub(crate) krate: CrateId,
/// The prelude module for this crate. This either comes from an import /// The prelude module for this crate. This either comes from an import
/// marked with the `prelude_import` attribute, or (in the normal case) from /// marked with the `prelude_import` attribute, or (in the normal case) from
/// a dependency (`std` or `core`). /// a dependency (`std` or `core`).
prelude: Option<ModuleId>, pub(crate) prelude: Option<ModuleId>,
extern_prelude: FxHashMap<Name, ModuleDefId>, pub(crate) extern_prelude: FxHashMap<Name, ModuleDefId>,
root: CrateModuleId,
modules: Arena<CrateModuleId, ModuleData>,
/// Some macros are not well-behavior, which leads to infinite loop
/// e.g. macro_rules! foo { ($ty:ty) => { foo!($ty); } }
/// We mark it down and skip it in collector
///
/// FIXME:
/// Right now it only handle a poison macro in a single crate,
/// such that if other crate try to call that macro,
/// the whole process will do again until it became poisoned in that crate.
/// We should handle this macro set globally
/// However, do we want to put it as a global variable?
poison_macros: FxHashSet<MacroDefId>,
edition: Edition,
diagnostics: Vec<DefDiagnostic>, diagnostics: Vec<DefDiagnostic>,
} }
impl std::ops::Index<CrateModuleId> for CrateDefMap { impl std::ops::Index<LocalModuleId> for CrateDefMap {
type Output = ModuleData; type Output = ModuleData;
fn index(&self, id: CrateModuleId) -> &ModuleData { fn index(&self, id: LocalModuleId) -> &ModuleData {
&self.modules[id] &self.modules[id]
} }
} }
#[derive(Default, Debug, PartialEq, Eq)] #[derive(Default, Debug, PartialEq, Eq)]
pub struct ModuleData { pub struct ModuleData {
pub parent: Option<CrateModuleId>, pub parent: Option<LocalModuleId>,
pub children: FxHashMap<Name, CrateModuleId>, pub children: FxHashMap<Name, LocalModuleId>,
pub scope: ModuleScope, pub scope: ModuleScope,
// FIXME: these can't be both null, we need a three-state enum here.
/// None for root /// None for root
pub declaration: Option<AstId<ast::Module>>, pub declaration: Option<AstId<ast::Module>>,
/// None for inline modules. /// None for inline modules.
/// ///
/// Note that non-inline modules, by definition, live inside non-macro file. /// Note that non-inline modules, by definition, live inside non-macro file.
pub definition: Option<FileId>, pub definition: Option<FileId>,
pub impls: Vec<ImplId>, pub impls: Vec<ImplId>,
} }
@ -177,7 +169,7 @@ impl ModuleScope {
pub fn macros<'a>(&'a self) -> impl Iterator<Item = (&'a Name, MacroDefId)> + 'a { pub fn macros<'a>(&'a self) -> impl Iterator<Item = (&'a Name, MacroDefId)> + 'a {
self.items self.items
.iter() .iter()
.filter_map(|(name, res)| res.def.get_macros().map(|macro_| (name, macro_))) .filter_map(|(name, res)| res.def.take_macros().map(|macro_| (name, macro_)))
} }
/// Iterate over all legacy textual scoped macros visable at the end of the module /// Iterate over all legacy textual scoped macros visable at the end of the module
@ -207,21 +199,21 @@ pub struct Resolution {
/// None for unresolved /// None for unresolved
pub def: PerNs, pub def: PerNs,
/// ident by which this is imported into local scope. /// ident by which this is imported into local scope.
pub import: Option<ImportId>, pub import: Option<LocalImportId>,
} }
impl CrateDefMap { impl CrateDefMap {
pub(crate) fn crate_def_map_query( pub(crate) fn crate_def_map_query(
// Note that this doesn't have `+ AstDatabase`! // Note that this doesn't have `+ AstDatabase`!
// This gurantess that `CrateDefMap` is stable across reparses. // This gurantess that `CrateDefMap` is stable across reparses.
db: &impl DefDatabase2, db: &impl DefDatabase,
krate: CrateId, krate: CrateId,
) -> Arc<CrateDefMap> { ) -> Arc<CrateDefMap> {
let _p = profile("crate_def_map_query"); let _p = profile("crate_def_map_query");
let def_map = { let def_map = {
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
let edition = crate_graph.edition(krate); let edition = crate_graph.edition(krate);
let mut modules: Arena<CrateModuleId, ModuleData> = Arena::default(); let mut modules: Arena<LocalModuleId, ModuleData> = Arena::default();
let root = modules.alloc(ModuleData::default()); let root = modules.alloc(ModuleData::default());
CrateDefMap { CrateDefMap {
krate, krate,
@ -230,7 +222,6 @@ impl CrateDefMap {
prelude: None, prelude: None,
root, root,
modules, modules,
poison_macros: FxHashSet::default(),
diagnostics: Vec::new(), diagnostics: Vec::new(),
} }
}; };
@ -238,50 +229,53 @@ impl CrateDefMap {
Arc::new(def_map) Arc::new(def_map)
} }
pub fn krate(&self) -> CrateId {
self.krate
}
pub fn root(&self) -> CrateModuleId {
self.root
}
pub fn prelude(&self) -> Option<ModuleId> {
self.prelude
}
pub fn extern_prelude(&self) -> &FxHashMap<Name, ModuleDefId> {
&self.extern_prelude
}
pub fn add_diagnostics( pub fn add_diagnostics(
&self, &self,
db: &impl DefDatabase2, db: &impl DefDatabase,
module: CrateModuleId, module: LocalModuleId,
sink: &mut DiagnosticSink, sink: &mut DiagnosticSink,
) { ) {
self.diagnostics.iter().for_each(|it| it.add_to(db, module, sink)) self.diagnostics.iter().for_each(|it| it.add_to(db, module, sink))
} }
pub fn resolve_path( pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = LocalModuleId> + '_ {
self.modules
.iter()
.filter(move |(_id, data)| data.definition == Some(file_id))
.map(|(id, _data)| id)
}
pub(crate) fn resolve_path(
&self, &self,
db: &impl DefDatabase2, db: &impl DefDatabase,
original_module: CrateModuleId, original_module: LocalModuleId,
path: &Path, path: &Path,
) -> (PerNs, Option<usize>) { ) -> (PerNs, Option<usize>) {
let res = self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path); let res = self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path);
(res.resolved_def, res.segment_index) (res.resolved_def, res.segment_index)
} }
}
pub fn modules(&self) -> impl Iterator<Item = CrateModuleId> + '_ { impl ModuleData {
self.modules.iter().map(|(id, _data)| id) /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
pub fn definition_source(
&self,
db: &impl DefDatabase,
) -> Source<Either<ast::SourceFile, ast::Module>> {
if let Some(file_id) = self.definition {
let sf = db.parse(file_id).tree();
return Source::new(file_id.into(), Either::A(sf));
}
let decl = self.declaration.unwrap();
Source::new(decl.file_id(), Either::B(decl.to_node(db)))
} }
pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = CrateModuleId> + '_ { /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
self.modules /// `None` for the crate root.
.iter() pub fn declaration_source(&self, db: &impl DefDatabase) -> Option<Source<ast::Module>> {
.filter(move |(_id, data)| data.definition == Some(file_id)) let decl = self.declaration?;
.map(|(id, _data)| id) let value = decl.to_node(db);
Some(Source { file_id: decl.file_id(), value })
} }
} }
@ -290,12 +284,12 @@ mod diagnostics {
use ra_db::RelativePathBuf; use ra_db::RelativePathBuf;
use ra_syntax::{ast, AstPtr}; use ra_syntax::{ast, AstPtr};
use crate::{db::DefDatabase2, diagnostics::UnresolvedModule, nameres::CrateModuleId, AstId}; use crate::{db::DefDatabase, diagnostics::UnresolvedModule, nameres::LocalModuleId, AstId};
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub(super) enum DefDiagnostic { pub(super) enum DefDiagnostic {
UnresolvedModule { UnresolvedModule {
module: CrateModuleId, module: LocalModuleId,
declaration: AstId<ast::Module>, declaration: AstId<ast::Module>,
candidate: RelativePathBuf, candidate: RelativePathBuf,
}, },
@ -304,8 +298,8 @@ mod diagnostics {
impl DefDiagnostic { impl DefDiagnostic {
pub(super) fn add_to( pub(super) fn add_to(
&self, &self,
db: &impl DefDatabase2, db: &impl DefDatabase,
target_module: CrateModuleId, target_module: LocalModuleId,
sink: &mut DiagnosticSink, sink: &mut DiagnosticSink,
) { ) {
match self { match self {

View file

@ -1,4 +1,7 @@
//! FIXME: write short doc here //! The core of the module-level name resolution algorithm.
//!
//! `DefCollector::collect` contains the fixed-point iteration loop which
//! resolves imports and expands macros.
use hir_expand::{ use hir_expand::{
builtin_macro::find_builtin_macro, builtin_macro::find_builtin_macro,
@ -7,24 +10,25 @@ use hir_expand::{
}; };
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
use ra_db::{CrateId, FileId}; use ra_db::{CrateId, FileId};
use ra_syntax::{ast, SmolStr}; use ra_syntax::ast;
use rustc_hash::FxHashMap; use rustc_hash::{FxHashMap, FxHashSet};
use test_utils::tested_by; use test_utils::tested_by;
use crate::{ use crate::{
attr::Attr, attr::Attrs,
db::DefDatabase2, db::DefDatabase,
nameres::{ nameres::{
diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint, diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint,
per_ns::PerNs, raw, CrateDefMap, ModuleData, Resolution, ResolveMode, raw, CrateDefMap, ModuleData, Resolution, ResolveMode,
}, },
path::{Path, PathKind}, path::{Path, PathKind},
AdtId, AstId, AstItemDef, ConstLoc, ContainerId, CrateModuleId, EnumId, EnumVariantId, per_ns::PerNs,
FunctionLoc, ImplId, Intern, LocationCtx, ModuleDefId, ModuleId, StaticId, StructId, AdtId, AstId, AstItemDef, ConstLoc, ContainerId, EnumId, EnumVariantId, FunctionLoc, ImplId,
Intern, LocalImportId, LocalModuleId, LocationCtx, ModuleDefId, ModuleId, StaticLoc, StructId,
StructOrUnionId, TraitId, TypeAliasLoc, UnionId, StructOrUnionId, TraitId, TypeAliasLoc, UnionId,
}; };
pub(super) fn collect_defs(db: &impl DefDatabase2, mut def_map: CrateDefMap) -> CrateDefMap { pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap {
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
// populate external prelude // populate external prelude
@ -56,6 +60,7 @@ pub(super) fn collect_defs(db: &impl DefDatabase2, mut def_map: CrateDefMap) ->
unexpanded_macros: Vec::new(), unexpanded_macros: Vec::new(),
mod_dirs: FxHashMap::default(), mod_dirs: FxHashMap::default(),
macro_stack_monitor: MacroStackMonitor::default(), macro_stack_monitor: MacroStackMonitor::default(),
poison_macros: FxHashSet::default(),
cfg_options, cfg_options,
}; };
collector.collect(); collector.collect();
@ -94,21 +99,32 @@ impl MacroStackMonitor {
struct DefCollector<'a, DB> { struct DefCollector<'a, DB> {
db: &'a DB, db: &'a DB,
def_map: CrateDefMap, def_map: CrateDefMap,
glob_imports: FxHashMap<CrateModuleId, Vec<(CrateModuleId, raw::ImportId)>>, glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, LocalImportId)>>,
unresolved_imports: Vec<(CrateModuleId, raw::ImportId, raw::ImportData)>, unresolved_imports: Vec<(LocalModuleId, LocalImportId, raw::ImportData)>,
unexpanded_macros: Vec<(CrateModuleId, AstId<ast::MacroCall>, Path)>, unexpanded_macros: Vec<(LocalModuleId, AstId<ast::MacroCall>, Path)>,
mod_dirs: FxHashMap<CrateModuleId, ModDir>, mod_dirs: FxHashMap<LocalModuleId, ModDir>,
/// Some macro use `$tt:tt which mean we have to handle the macro perfectly /// Some macro use `$tt:tt which mean we have to handle the macro perfectly
/// To prevent stack overflow, we add a deep counter here for prevent that. /// To prevent stack overflow, we add a deep counter here for prevent that.
macro_stack_monitor: MacroStackMonitor, macro_stack_monitor: MacroStackMonitor,
/// Some macros are not well-behavior, which leads to infinite loop
/// e.g. macro_rules! foo { ($ty:ty) => { foo!($ty); } }
/// We mark it down and skip it in collector
///
/// FIXME:
/// Right now it only handle a poison macro in a single crate,
/// such that if other crate try to call that macro,
/// the whole process will do again until it became poisoned in that crate.
/// We should handle this macro set globally
/// However, do we want to put it as a global variable?
poison_macros: FxHashSet<MacroDefId>,
cfg_options: &'a CfgOptions, cfg_options: &'a CfgOptions,
} }
impl<DB> DefCollector<'_, DB> impl<DB> DefCollector<'_, DB>
where where
DB: DefDatabase2, DB: DefDatabase,
{ {
fn collect(&mut self) { fn collect(&mut self) {
let crate_graph = self.db.crate_graph(); let crate_graph = self.db.crate_graph();
@ -173,7 +189,7 @@ where
/// ``` /// ```
fn define_macro( fn define_macro(
&mut self, &mut self,
module_id: CrateModuleId, module_id: LocalModuleId,
name: Name, name: Name,
macro_: MacroDefId, macro_: MacroDefId,
export: bool, export: bool,
@ -200,7 +216,7 @@ where
/// the definition of current module. /// the definition of current module.
/// And also, `macro_use` on a module will import all legacy macros visable inside to /// And also, `macro_use` on a module will import all legacy macros visable inside to
/// current legacy scope, with possible shadowing. /// current legacy scope, with possible shadowing.
fn define_legacy_macro(&mut self, module_id: CrateModuleId, name: Name, macro_: MacroDefId) { fn define_legacy_macro(&mut self, module_id: LocalModuleId, name: Name, macro_: MacroDefId) {
// Always shadowing // Always shadowing
self.def_map.modules[module_id].scope.legacy_macros.insert(name, macro_); self.def_map.modules[module_id].scope.legacy_macros.insert(name, macro_);
} }
@ -208,7 +224,7 @@ where
/// Import macros from `#[macro_use] extern crate`. /// Import macros from `#[macro_use] extern crate`.
fn import_macros_from_extern_crate( fn import_macros_from_extern_crate(
&mut self, &mut self,
current_module_id: CrateModuleId, current_module_id: LocalModuleId,
import: &raw::ImportData, import: &raw::ImportData,
) { ) {
log::debug!( log::debug!(
@ -235,7 +251,7 @@ where
/// Exported macros are just all macros in the root module scope. /// Exported macros are just all macros in the root module scope.
/// Note that it contains not only all `#[macro_export]` macros, but also all aliases /// Note that it contains not only all `#[macro_export]` macros, but also all aliases
/// created by `use` in the root module, ignoring the visibility of `use`. /// created by `use` in the root module, ignoring the visibility of `use`.
fn import_all_macros_exported(&mut self, current_module_id: CrateModuleId, krate: CrateId) { fn import_all_macros_exported(&mut self, current_module_id: LocalModuleId, krate: CrateId) {
let def_map = self.db.crate_def_map(krate); let def_map = self.db.crate_def_map(krate);
for (name, def) in def_map[def_map.root].scope.macros() { for (name, def) in def_map[def_map.root].scope.macros() {
// `macro_use` only bring things into legacy scope. // `macro_use` only bring things into legacy scope.
@ -265,7 +281,7 @@ where
fn resolve_import( fn resolve_import(
&self, &self,
module_id: CrateModuleId, module_id: LocalModuleId,
import: &raw::ImportData, import: &raw::ImportData,
) -> (PerNs, ReachedFixedPoint) { ) -> (PerNs, ReachedFixedPoint) {
log::debug!("resolving import: {:?} ({:?})", import, self.def_map.edition); log::debug!("resolving import: {:?} ({:?})", import, self.def_map.edition);
@ -291,9 +307,9 @@ where
fn record_resolved_import( fn record_resolved_import(
&mut self, &mut self,
module_id: CrateModuleId, module_id: LocalModuleId,
def: PerNs, def: PerNs,
import_id: raw::ImportId, import_id: LocalImportId,
import: &raw::ImportData, import: &raw::ImportData,
) { ) {
if import.is_glob { if import.is_glob {
@ -387,8 +403,8 @@ where
fn update( fn update(
&mut self, &mut self,
module_id: CrateModuleId, module_id: LocalModuleId,
import: Option<raw::ImportId>, import: Option<LocalImportId>,
resolutions: &[(Name, Resolution)], resolutions: &[(Name, Resolution)],
) { ) {
self.update_recursive(module_id, import, resolutions, 0) self.update_recursive(module_id, import, resolutions, 0)
@ -396,8 +412,8 @@ where
fn update_recursive( fn update_recursive(
&mut self, &mut self,
module_id: CrateModuleId, module_id: LocalModuleId,
import: Option<raw::ImportId>, import: Option<LocalImportId>,
resolutions: &[(Name, Resolution)], resolutions: &[(Name, Resolution)],
depth: usize, depth: usize,
) { ) {
@ -463,7 +479,7 @@ where
path, path,
); );
if let Some(def) = resolved_res.resolved_def.get_macros() { if let Some(def) = resolved_res.resolved_def.take_macros() {
let call_id = self.db.intern_macro(MacroCallLoc { def, ast_id: *ast_id }); let call_id = self.db.intern_macro(MacroCallLoc { def, ast_id: *ast_id });
resolved.push((*module_id, call_id, def)); resolved.push((*module_id, call_id, def));
res = ReachedFixedPoint::No; res = ReachedFixedPoint::No;
@ -484,11 +500,11 @@ where
fn collect_macro_expansion( fn collect_macro_expansion(
&mut self, &mut self,
module_id: CrateModuleId, module_id: LocalModuleId,
macro_call_id: MacroCallId, macro_call_id: MacroCallId,
macro_def_id: MacroDefId, macro_def_id: MacroDefId,
) { ) {
if self.def_map.poison_macros.contains(&macro_def_id) { if self.poison_macros.contains(&macro_def_id) {
return; return;
} }
@ -508,7 +524,7 @@ where
.collect(raw_items.items()); .collect(raw_items.items());
} else { } else {
log::error!("Too deep macro expansion: {:?}", macro_call_id); log::error!("Too deep macro expansion: {:?}", macro_call_id);
self.def_map.poison_macros.insert(macro_def_id); self.poison_macros.insert(macro_def_id);
} }
self.macro_stack_monitor.decrease(macro_def_id); self.macro_stack_monitor.decrease(macro_def_id);
@ -522,7 +538,7 @@ where
/// Walks a single module, populating defs, imports and macros /// Walks a single module, populating defs, imports and macros
struct ModCollector<'a, D> { struct ModCollector<'a, D> {
def_collector: D, def_collector: D,
module_id: CrateModuleId, module_id: LocalModuleId,
file_id: HirFileId, file_id: HirFileId,
raw_items: &'a raw::RawItems, raw_items: &'a raw::RawItems,
mod_dir: ModDir, mod_dir: ModDir,
@ -530,7 +546,7 @@ struct ModCollector<'a, D> {
impl<DB> ModCollector<'_, &'_ mut DefCollector<'_, DB>> impl<DB> ModCollector<'_, &'_ mut DefCollector<'_, DB>>
where where
DB: DefDatabase2, DB: DefDatabase,
{ {
fn collect(&mut self, items: &[raw::RawItem]) { fn collect(&mut self, items: &[raw::RawItem]) {
// Note: don't assert that inserted value is fresh: it's simply not true // Note: don't assert that inserted value is fresh: it's simply not true
@ -549,7 +565,7 @@ where
// `#[macro_use] extern crate` is hoisted to imports macros before collecting // `#[macro_use] extern crate` is hoisted to imports macros before collecting
// any other items. // any other items.
for item in items { for item in items {
if self.is_cfg_enabled(item.attrs()) { if self.is_cfg_enabled(&item.attrs) {
if let raw::RawItemKind::Import(import_id) = item.kind { if let raw::RawItemKind::Import(import_id) = item.kind {
let import = self.raw_items[import_id].clone(); let import = self.raw_items[import_id].clone();
if import.is_extern_crate && import.is_macro_use { if import.is_extern_crate && import.is_macro_use {
@ -560,10 +576,10 @@ where
} }
for item in items { for item in items {
if self.is_cfg_enabled(item.attrs()) { if self.is_cfg_enabled(&item.attrs) {
match item.kind { match item.kind {
raw::RawItemKind::Module(m) => { raw::RawItemKind::Module(m) => {
self.collect_module(&self.raw_items[m], item.attrs()) self.collect_module(&self.raw_items[m], &item.attrs)
} }
raw::RawItemKind::Import(import_id) => self raw::RawItemKind::Import(import_id) => self
.def_collector .def_collector
@ -585,9 +601,9 @@ where
} }
} }
fn collect_module(&mut self, module: &raw::ModuleData, attrs: &[Attr]) { fn collect_module(&mut self, module: &raw::ModuleData, attrs: &Attrs) {
let path_attr = self.path_attr(attrs); let path_attr = attrs.by_key("path").string_value();
let is_macro_use = self.is_macro_use(attrs); let is_macro_use = attrs.by_key("macro_use").exists();
match module { match module {
// inline module, just recurse // inline module, just recurse
raw::ModuleData::Definition { name, items, ast_id } => { raw::ModuleData::Definition { name, items, ast_id } => {
@ -647,7 +663,7 @@ where
name: Name, name: Name,
declaration: AstId<ast::Module>, declaration: AstId<ast::Module>,
definition: Option<FileId>, definition: Option<FileId>,
) -> CrateModuleId { ) -> LocalModuleId {
let modules = &mut self.def_collector.def_map.modules; let modules = &mut self.def_collector.def_map.modules;
let res = modules.alloc(ModuleData::default()); let res = modules.alloc(ModuleData::default());
modules[res].parent = Some(self.module_id); modules[res].parent = Some(self.module_id);
@ -702,7 +718,10 @@ where
PerNs::values(def.into()) PerNs::values(def.into())
} }
raw::DefKind::Static(ast_id) => { raw::DefKind::Static(ast_id) => {
PerNs::values(StaticId::from_ast_id(ctx, ast_id).into()) let def = StaticLoc { container: module, ast_id: AstId::new(self.file_id, ast_id) }
.intern(self.def_collector.db);
PerNs::values(def.into())
} }
raw::DefKind::Trait(ast_id) => PerNs::types(TraitId::from_ast_id(ctx, ast_id).into()), raw::DefKind::Trait(ast_id) => PerNs::types(TraitId::from_ast_id(ctx, ast_id).into()),
raw::DefKind::TypeAlias(ast_id) => { raw::DefKind::TypeAlias(ast_id) => {
@ -772,23 +791,19 @@ where
self.def_collector.unexpanded_macros.push((self.module_id, ast_id, path)); self.def_collector.unexpanded_macros.push((self.module_id, ast_id, path));
} }
fn import_all_legacy_macros(&mut self, module_id: CrateModuleId) { fn import_all_legacy_macros(&mut self, module_id: LocalModuleId) {
let macros = self.def_collector.def_map[module_id].scope.legacy_macros.clone(); let macros = self.def_collector.def_map[module_id].scope.legacy_macros.clone();
for (name, macro_) in macros { for (name, macro_) in macros {
self.def_collector.define_legacy_macro(self.module_id, name.clone(), macro_); self.def_collector.define_legacy_macro(self.module_id, name.clone(), macro_);
} }
} }
fn is_cfg_enabled(&self, attrs: &[Attr]) -> bool { fn is_cfg_enabled(&self, attrs: &Attrs) -> bool {
attrs.iter().all(|attr| attr.is_cfg_enabled(&self.def_collector.cfg_options) != Some(false)) // FIXME: handle cfg_attr :-)
} attrs
.by_key("cfg")
fn path_attr<'a>(&self, attrs: &'a [Attr]) -> Option<&'a SmolStr> { .tt_values()
attrs.iter().find_map(|attr| attr.as_path()) .all(|tt| self.def_collector.cfg_options.is_cfg_enabled(tt) != Some(false))
}
fn is_macro_use<'a>(&self, attrs: &'a [Attr]) -> bool {
attrs.iter().any(|attr| attr.is_simple_atom("macro_use"))
} }
} }
@ -802,15 +817,15 @@ mod tests {
use ra_db::{fixture::WithFixture, SourceDatabase}; use ra_db::{fixture::WithFixture, SourceDatabase};
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use crate::{db::DefDatabase2, test_db::TestDB}; use crate::{db::DefDatabase, test_db::TestDB};
use super::*; use super::*;
fn do_collect_defs( fn do_collect_defs(
db: &impl DefDatabase2, db: &impl DefDatabase,
def_map: CrateDefMap, def_map: CrateDefMap,
monitor: MacroStackMonitor, monitor: MacroStackMonitor,
) -> CrateDefMap { ) -> (CrateDefMap, FxHashSet<MacroDefId>) {
let mut collector = DefCollector { let mut collector = DefCollector {
db, db,
def_map, def_map,
@ -819,19 +834,24 @@ mod tests {
unexpanded_macros: Vec::new(), unexpanded_macros: Vec::new(),
mod_dirs: FxHashMap::default(), mod_dirs: FxHashMap::default(),
macro_stack_monitor: monitor, macro_stack_monitor: monitor,
poison_macros: FxHashSet::default(),
cfg_options: &CfgOptions::default(), cfg_options: &CfgOptions::default(),
}; };
collector.collect(); collector.collect();
collector.finish() (collector.def_map, collector.poison_macros)
} }
fn do_limited_resolve(code: &str, limit: u32, poison_limit: u32) -> CrateDefMap { fn do_limited_resolve(
code: &str,
limit: u32,
poison_limit: u32,
) -> (CrateDefMap, FxHashSet<MacroDefId>) {
let (db, _file_id) = TestDB::with_single_file(&code); let (db, _file_id) = TestDB::with_single_file(&code);
let krate = db.test_crate(); let krate = db.test_crate();
let def_map = { let def_map = {
let edition = db.crate_graph().edition(krate); let edition = db.crate_graph().edition(krate);
let mut modules: Arena<CrateModuleId, ModuleData> = Arena::default(); let mut modules: Arena<LocalModuleId, ModuleData> = Arena::default();
let root = modules.alloc(ModuleData::default()); let root = modules.alloc(ModuleData::default());
CrateDefMap { CrateDefMap {
krate, krate,
@ -840,7 +860,6 @@ mod tests {
prelude: None, prelude: None,
root, root,
modules, modules,
poison_macros: FxHashSet::default(),
diagnostics: Vec::new(), diagnostics: Vec::new(),
} }
}; };
@ -870,7 +889,7 @@ foo!(KABOOM);
#[test] #[test]
fn test_macro_expand_poisoned() { fn test_macro_expand_poisoned() {
let def = do_limited_resolve( let (_, poison_macros) = do_limited_resolve(
r#" r#"
macro_rules! foo { macro_rules! foo {
($ty:ty) => { foo!($ty); } ($ty:ty) => { foo!($ty); }
@ -881,12 +900,12 @@ foo!(KABOOM);
16, 16,
); );
assert_eq!(def.poison_macros.len(), 1); assert_eq!(poison_macros.len(), 1);
} }
#[test] #[test]
fn test_macro_expand_normal() { fn test_macro_expand_normal() {
let def = do_limited_resolve( let (_, poison_macros) = do_limited_resolve(
r#" r#"
macro_rules! foo { macro_rules! foo {
($ident:ident) => { struct $ident {} } ($ident:ident) => { struct $ident {} }
@ -897,6 +916,6 @@ foo!(Bar);
16, 16,
); );
assert_eq!(def.poison_macros.len(), 0); assert_eq!(poison_macros.len(), 0);
} }
} }

View file

@ -3,7 +3,7 @@ use hir_expand::name::Name;
use ra_db::{FileId, RelativePathBuf}; use ra_db::{FileId, RelativePathBuf};
use ra_syntax::SmolStr; use ra_syntax::SmolStr;
use crate::{db::DefDatabase2, HirFileId}; use crate::{db::DefDatabase, HirFileId};
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub(super) struct ModDir { pub(super) struct ModDir {
@ -40,7 +40,7 @@ impl ModDir {
pub(super) fn resolve_declaration( pub(super) fn resolve_declaration(
&self, &self,
db: &impl DefDatabase2, db: &impl DefDatabase,
file_id: HirFileId, file_id: HirFileId,
name: &Name, name: &Name,
attr_path: Option<&SmolStr>, attr_path: Option<&SmolStr>,

View file

@ -15,10 +15,11 @@ use ra_db::Edition;
use test_utils::tested_by; use test_utils::tested_by;
use crate::{ use crate::{
db::DefDatabase2, db::DefDatabase,
nameres::{per_ns::PerNs, CrateDefMap}, nameres::CrateDefMap,
path::{Path, PathKind}, path::{Path, PathKind},
AdtId, CrateModuleId, EnumVariantId, ModuleDefId, ModuleId, per_ns::PerNs,
AdtId, EnumVariantId, LocalModuleId, ModuleDefId, ModuleId,
}; };
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
@ -63,9 +64,9 @@ impl CrateDefMap {
// the result. // the result.
pub(super) fn resolve_path_fp_with_macro( pub(super) fn resolve_path_fp_with_macro(
&self, &self,
db: &impl DefDatabase2, db: &impl DefDatabase,
mode: ResolveMode, mode: ResolveMode,
original_module: CrateModuleId, original_module: LocalModuleId,
path: &Path, path: &Path,
) -> ResolvePathResult { ) -> ResolvePathResult {
let mut segments = path.segments.iter().enumerate(); let mut segments = path.segments.iter().enumerate();
@ -216,8 +217,8 @@ impl CrateDefMap {
fn resolve_name_in_module( fn resolve_name_in_module(
&self, &self,
db: &impl DefDatabase2, db: &impl DefDatabase,
module: CrateModuleId, module: LocalModuleId,
name: &Name, name: &Name,
) -> PerNs { ) -> PerNs {
// Resolve in: // Resolve in:
@ -243,7 +244,7 @@ impl CrateDefMap {
from_crate_root.or(from_extern_prelude) from_crate_root.or(from_extern_prelude)
} }
fn resolve_in_prelude(&self, db: &impl DefDatabase2, name: &Name) -> PerNs { fn resolve_in_prelude(&self, db: &impl DefDatabase, name: &Name) -> PerNs {
if let Some(prelude) = self.prelude { if let Some(prelude) = self.prelude {
let keep; let keep;
let def_map = if prelude.krate == self.krate { let def_map = if prelude.krate == self.krate {

View file

@ -1,4 +1,9 @@
//! FIXME: write short doc here //! Lowers syntax tree of a rust file into a raw representation of containing
//! items, *without* attaching them to a module structure.
//!
//! That is, raw items don't have semantics, just as syntax, but, unlike syntax,
//! they don't change with trivial source code edits, making them a great tool
//! for building salsa recomputation firewalls.
use std::{ops::Index, sync::Arc}; use std::{ops::Index, sync::Arc};
@ -12,11 +17,14 @@ use hir_expand::{
use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId};
use ra_syntax::{ use ra_syntax::{
ast::{self, AttrsOwner, NameOwner}, ast::{self, AttrsOwner, NameOwner},
AstNode, AstPtr, SourceFile, AstNode, AstPtr,
}; };
use test_utils::tested_by; use test_utils::tested_by;
use crate::{attr::Attr, db::DefDatabase2, path::Path, FileAstId, HirFileId, ModuleSource, Source}; use crate::{
attr::Attrs, db::DefDatabase, path::Path, trace::Trace, FileAstId, HirFileId, LocalImportId,
Source,
};
/// `RawItems` is a set of top-level items in a file (except for impls). /// `RawItems` is a set of top-level items in a file (except for impls).
/// ///
@ -25,7 +33,7 @@ use crate::{attr::Attr, db::DefDatabase2, path::Path, FileAstId, HirFileId, Modu
#[derive(Debug, Default, PartialEq, Eq)] #[derive(Debug, Default, PartialEq, Eq)]
pub struct RawItems { pub struct RawItems {
modules: Arena<Module, ModuleData>, modules: Arena<Module, ModuleData>,
imports: Arena<ImportId, ImportData>, imports: Arena<LocalImportId, ImportData>,
defs: Arena<Def, DefData>, defs: Arena<Def, DefData>,
macros: Arena<Macro, MacroData>, macros: Arena<Macro, MacroData>,
impls: Arena<Impl, ImplData>, impls: Arena<Impl, ImplData>,
@ -35,47 +43,33 @@ pub struct RawItems {
#[derive(Debug, Default, PartialEq, Eq)] #[derive(Debug, Default, PartialEq, Eq)]
pub struct ImportSourceMap { pub struct ImportSourceMap {
map: ArenaMap<ImportId, ImportSourcePtr>, map: ArenaMap<LocalImportId, ImportSourcePtr>,
} }
type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>; type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>;
type ImportSource = Either<ast::UseTree, ast::ExternCrateItem>;
fn to_node(ptr: ImportSourcePtr, file: &SourceFile) -> ImportSource {
ptr.map(|ptr| ptr.to_node(file.syntax()), |ptr| ptr.to_node(file.syntax()))
}
impl ImportSourceMap { impl ImportSourceMap {
fn insert(&mut self, import: ImportId, ptr: ImportSourcePtr) { pub fn get(&self, import: LocalImportId) -> ImportSourcePtr {
self.map.insert(import, ptr) self.map[import].clone()
}
pub fn get(&self, source: &ModuleSource, import: ImportId) -> ImportSource {
let file = match source {
ModuleSource::SourceFile(file) => file.clone(),
ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(),
};
to_node(self.map[import], &file)
} }
} }
impl RawItems { impl RawItems {
pub(crate) fn raw_items_query( pub(crate) fn raw_items_query(
db: &(impl DefDatabase2 + AstDatabase), db: &(impl DefDatabase + AstDatabase),
file_id: HirFileId, file_id: HirFileId,
) -> Arc<RawItems> { ) -> Arc<RawItems> {
db.raw_items_with_source_map(file_id).0 db.raw_items_with_source_map(file_id).0
} }
pub(crate) fn raw_items_with_source_map_query( pub(crate) fn raw_items_with_source_map_query(
db: &(impl DefDatabase2 + AstDatabase), db: &(impl DefDatabase + AstDatabase),
file_id: HirFileId, file_id: HirFileId,
) -> (Arc<RawItems>, Arc<ImportSourceMap>) { ) -> (Arc<RawItems>, Arc<ImportSourceMap>) {
let mut collector = RawItemsCollector { let mut collector = RawItemsCollector {
raw_items: RawItems::default(), raw_items: RawItems::default(),
source_ast_id_map: db.ast_id_map(file_id), source_ast_id_map: db.ast_id_map(file_id),
source_map: ImportSourceMap::default(), imports: Trace::new(),
file_id, file_id,
hygiene: Hygiene::new(db, file_id), hygiene: Hygiene::new(db, file_id),
}; };
@ -86,7 +80,11 @@ impl RawItems {
collector.process_module(None, item_list); collector.process_module(None, item_list);
} }
} }
(Arc::new(collector.raw_items), Arc::new(collector.source_map)) let mut raw_items = collector.raw_items;
let (arena, map) = collector.imports.into_arena_and_map();
raw_items.imports = arena;
let source_map = ImportSourceMap { map };
(Arc::new(raw_items), Arc::new(source_map))
} }
pub(super) fn items(&self) -> &[RawItem] { pub(super) fn items(&self) -> &[RawItem] {
@ -101,9 +99,9 @@ impl Index<Module> for RawItems {
} }
} }
impl Index<ImportId> for RawItems { impl Index<LocalImportId> for RawItems {
type Output = ImportData; type Output = ImportData;
fn index(&self, idx: ImportId) -> &ImportData { fn index(&self, idx: LocalImportId) -> &ImportData {
&self.imports[idx] &self.imports[idx]
} }
} }
@ -129,25 +127,16 @@ impl Index<Impl> for RawItems {
} }
} }
// Avoid heap allocation on items without attributes.
type Attrs = Option<Arc<[Attr]>>;
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone)]
pub(super) struct RawItem { pub(super) struct RawItem {
attrs: Attrs, pub(super) attrs: Attrs,
pub(super) kind: RawItemKind, pub(super) kind: RawItemKind,
} }
impl RawItem {
pub(super) fn attrs(&self) -> &[Attr] {
self.attrs.as_ref().map_or(&[], |it| &*it)
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)] #[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub(super) enum RawItemKind { pub(super) enum RawItemKind {
Module(Module), Module(Module),
Import(ImportId), Import(LocalImportId),
Def(Def), Def(Def),
Macro(Macro), Macro(Macro),
Impl(Impl), Impl(Impl),
@ -163,10 +152,6 @@ pub(super) enum ModuleData {
Definition { name: Name, ast_id: FileAstId<ast::Module>, items: Vec<RawItem> }, Definition { name: Name, ast_id: FileAstId<ast::Module>, items: Vec<RawItem> },
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ImportId(RawId);
impl_arena_id!(ImportId);
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct ImportData { pub struct ImportData {
pub(super) path: Path, pub(super) path: Path,
@ -223,8 +208,8 @@ pub(super) struct ImplData {
struct RawItemsCollector { struct RawItemsCollector {
raw_items: RawItems, raw_items: RawItems,
imports: Trace<LocalImportId, ImportData, ImportSourcePtr>,
source_ast_id_map: Arc<AstIdMap>, source_ast_id_map: Arc<AstIdMap>,
source_map: ImportSourceMap,
file_id: HirFileId, file_id: HirFileId,
hygiene: Hygiene, hygiene: Hygiene,
} }
@ -408,8 +393,7 @@ impl RawItemsCollector {
data: ImportData, data: ImportData,
source: ImportSourcePtr, source: ImportSourcePtr,
) { ) {
let import = self.raw_items.imports.alloc(data); let import = self.imports.alloc(|| source, || data);
self.source_map.insert(import, source);
self.push_item(current_module, attrs, RawItemKind::Import(import)) self.push_item(current_module, attrs, RawItemKind::Import(import))
} }
@ -425,6 +409,6 @@ impl RawItemsCollector {
} }
fn parse_attrs(&self, item: &impl ast::AttrsOwner) -> Attrs { fn parse_attrs(&self, item: &impl ast::AttrsOwner) -> Attrs {
Attr::from_attrs_owner(item, &self.hygiene) Attrs::new(item, &self.hygiene)
} }
} }

View file

@ -10,7 +10,7 @@ use insta::assert_snapshot;
use ra_db::{fixture::WithFixture, SourceDatabase}; use ra_db::{fixture::WithFixture, SourceDatabase};
use test_utils::covers; use test_utils::covers;
use crate::{db::DefDatabase2, nameres::*, test_db::TestDB, CrateModuleId}; use crate::{db::DefDatabase, nameres::*, test_db::TestDB, LocalModuleId};
fn def_map(fixtute: &str) -> String { fn def_map(fixtute: &str) -> String {
let dm = compute_crate_def_map(fixtute); let dm = compute_crate_def_map(fixtute);
@ -25,10 +25,10 @@ fn compute_crate_def_map(fixture: &str) -> Arc<CrateDefMap> {
fn render_crate_def_map(map: &CrateDefMap) -> String { fn render_crate_def_map(map: &CrateDefMap) -> String {
let mut buf = String::new(); let mut buf = String::new();
go(&mut buf, map, "\ncrate", map.root()); go(&mut buf, map, "\ncrate", map.root);
return buf.trim().to_string(); return buf.trim().to_string();
fn go(buf: &mut String, map: &CrateDefMap, path: &str, module: CrateModuleId) { fn go(buf: &mut String, map: &CrateDefMap, path: &str, module: LocalModuleId) {
*buf += path; *buf += path;
*buf += "\n"; *buf += "\n";

View file

@ -665,7 +665,7 @@ fn unresolved_module_diagnostics() {
@r###" @r###"
[ [
UnresolvedModule { UnresolvedModule {
module: CrateModuleId( module: LocalModuleId(
0, 0,
), ),
declaration: AstId { declaration: AstId {

View file

@ -1,4 +1,4 @@
//! FIXME: write short doc here //! A desugared representation of paths like `crate::foo` or `<Type as Trait>::bar`.
use std::{iter, sync::Arc}; use std::{iter, sync::Arc};
@ -66,7 +66,7 @@ pub enum PathKind {
impl Path { impl Path {
/// Calls `cb` with all paths, represented by this use item. /// Calls `cb` with all paths, represented by this use item.
pub fn expand_use_item( pub(crate) fn expand_use_item(
item_src: Source<ast::UseItem>, item_src: Source<ast::UseItem>,
hygiene: &Hygiene, hygiene: &Hygiene,
mut cb: impl FnMut(Path, &ast::UseTree, bool, Option<Name>), mut cb: impl FnMut(Path, &ast::UseTree, bool, Option<Name>),
@ -76,7 +76,10 @@ impl Path {
} }
} }
pub fn from_simple_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> Path { pub(crate) fn from_simple_segments(
kind: PathKind,
segments: impl IntoIterator<Item = Name>,
) -> Path {
Path { Path {
kind, kind,
segments: segments segments: segments
@ -94,7 +97,7 @@ impl Path {
/// Converts an `ast::Path` to `Path`. Works with use trees. /// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call. /// It correctly handles `$crate` based path from macro call.
pub fn from_src(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path> { pub(crate) fn from_src(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path> {
let mut kind = PathKind::Plain; let mut kind = PathKind::Plain;
let mut segments = Vec::new(); let mut segments = Vec::new();
loop { loop {
@ -192,7 +195,7 @@ impl Path {
} }
/// Converts an `ast::NameRef` into a single-identifier `Path`. /// Converts an `ast::NameRef` into a single-identifier `Path`.
pub fn from_name_ref(name_ref: &ast::NameRef) -> Path { pub(crate) fn from_name_ref(name_ref: &ast::NameRef) -> Path {
name_ref.as_name().into() name_ref.as_name().into()
} }
@ -227,7 +230,7 @@ impl Path {
} }
impl GenericArgs { impl GenericArgs {
pub fn from_ast(node: ast::TypeArgList) -> Option<GenericArgs> { pub(crate) fn from_ast(node: ast::TypeArgList) -> Option<GenericArgs> {
let mut args = Vec::new(); let mut args = Vec::new();
for type_arg in node.type_args() { for type_arg in node.type_args() {
let type_ref = TypeRef::from_ast_opt(type_arg.type_ref()); let type_ref = TypeRef::from_ast_opt(type_arg.type_ref());

View file

@ -1,4 +1,7 @@
//! FIXME: write short doc here //! In rust, it is possible to have a value, a type and a macro with the same
//! name without conflicts.
//!
//! `PerNs` (per namespace) captures this.
use hir_expand::MacroDefId; use hir_expand::MacroDefId;
@ -44,10 +47,6 @@ impl PerNs {
self.types.is_none() && self.values.is_none() && self.macros.is_none() self.types.is_none() && self.values.is_none() && self.macros.is_none()
} }
pub fn is_all(&self) -> bool {
self.types.is_some() && self.values.is_some() && self.macros.is_some()
}
pub fn take_types(self) -> Option<ModuleDefId> { pub fn take_types(self) -> Option<ModuleDefId> {
self.types self.types
} }
@ -56,14 +55,10 @@ impl PerNs {
self.values self.values
} }
pub fn get_macros(&self) -> Option<MacroDefId> { pub fn take_macros(self) -> Option<MacroDefId> {
self.macros self.macros
} }
pub fn only_macros(&self) -> PerNs {
PerNs { types: None, values: None, macros: self.macros }
}
pub fn or(self, other: PerNs) -> PerNs { pub fn or(self, other: PerNs) -> PerNs {
PerNs { PerNs {
types: self.types.or(other.types), types: self.types.or(other.types),

View file

@ -11,14 +11,15 @@ use rustc_hash::FxHashSet;
use crate::{ use crate::{
body::scope::{ExprScopes, ScopeId}, body::scope::{ExprScopes, ScopeId},
builtin_type::BuiltinType, builtin_type::BuiltinType,
db::DefDatabase2, db::DefDatabase,
expr::{ExprId, PatId}, expr::{ExprId, PatId},
generics::GenericParams, generics::GenericParams,
nameres::{per_ns::PerNs, CrateDefMap}, nameres::CrateDefMap,
path::{Path, PathKind}, path::{Path, PathKind},
AdtId, AstItemDef, ConstId, ContainerId, CrateModuleId, DefWithBodyId, EnumId, EnumVariantId, per_ns::PerNs,
FunctionId, GenericDefId, ImplId, Lookup, ModuleDefId, ModuleId, StaticId, StructId, TraitId, AdtId, AstItemDef, ConstId, ContainerId, DefWithBodyId, EnumId, EnumVariantId, FunctionId,
TypeAliasId, GenericDefId, HasModule, ImplId, LocalModuleId, Lookup, ModuleDefId, ModuleId, StaticId,
StructId, TraitId, TypeAliasId,
}; };
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
@ -28,20 +29,20 @@ pub struct Resolver {
// FIXME how to store these best // FIXME how to store these best
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) struct ModuleItemMap { struct ModuleItemMap {
crate_def_map: Arc<CrateDefMap>, crate_def_map: Arc<CrateDefMap>,
module_id: CrateModuleId, module_id: LocalModuleId,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) struct ExprScope { struct ExprScope {
owner: DefWithBodyId, owner: DefWithBodyId,
expr_scopes: Arc<ExprScopes>, expr_scopes: Arc<ExprScopes>,
scope_id: ScopeId, scope_id: ScopeId,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) enum Scope { enum Scope {
/// All the items and imported names of a module /// All the items and imported names of a module
ModuleScope(ModuleItemMap), ModuleScope(ModuleItemMap),
/// Brings the generic parameters of an item into scope /// Brings the generic parameters of an item into scope
@ -87,7 +88,7 @@ pub enum ValueNs {
impl Resolver { impl Resolver {
/// Resolve known trait from std, like `std::futures::Future` /// Resolve known trait from std, like `std::futures::Future`
pub fn resolve_known_trait(&self, db: &impl DefDatabase2, path: &Path) -> Option<TraitId> { pub fn resolve_known_trait(&self, db: &impl DefDatabase, path: &Path) -> Option<TraitId> {
let res = self.resolve_module_path(db, path).take_types()?; let res = self.resolve_module_path(db, path).take_types()?;
match res { match res {
ModuleDefId::TraitId(it) => Some(it), ModuleDefId::TraitId(it) => Some(it),
@ -96,7 +97,7 @@ impl Resolver {
} }
/// Resolve known struct from std, like `std::boxed::Box` /// Resolve known struct from std, like `std::boxed::Box`
pub fn resolve_known_struct(&self, db: &impl DefDatabase2, path: &Path) -> Option<StructId> { pub fn resolve_known_struct(&self, db: &impl DefDatabase, path: &Path) -> Option<StructId> {
let res = self.resolve_module_path(db, path).take_types()?; let res = self.resolve_module_path(db, path).take_types()?;
match res { match res {
ModuleDefId::AdtId(AdtId::StructId(it)) => Some(it), ModuleDefId::AdtId(AdtId::StructId(it)) => Some(it),
@ -105,7 +106,7 @@ impl Resolver {
} }
/// Resolve known enum from std, like `std::result::Result` /// Resolve known enum from std, like `std::result::Result`
pub fn resolve_known_enum(&self, db: &impl DefDatabase2, path: &Path) -> Option<EnumId> { pub fn resolve_known_enum(&self, db: &impl DefDatabase, path: &Path) -> Option<EnumId> {
let res = self.resolve_module_path(db, path).take_types()?; let res = self.resolve_module_path(db, path).take_types()?;
match res { match res {
ModuleDefId::AdtId(AdtId::EnumId(it)) => Some(it), ModuleDefId::AdtId(AdtId::EnumId(it)) => Some(it),
@ -114,7 +115,7 @@ impl Resolver {
} }
/// pub only for source-binder /// pub only for source-binder
pub fn resolve_module_path(&self, db: &impl DefDatabase2, path: &Path) -> PerNs { pub fn resolve_module_path(&self, db: &impl DefDatabase, path: &Path) -> PerNs {
let (item_map, module) = match self.module() { let (item_map, module) = match self.module() {
Some(it) => it, Some(it) => it,
None => return PerNs::none(), None => return PerNs::none(),
@ -128,7 +129,7 @@ impl Resolver {
pub fn resolve_path_in_type_ns( pub fn resolve_path_in_type_ns(
&self, &self,
db: &impl DefDatabase2, db: &impl DefDatabase,
path: &Path, path: &Path,
) -> Option<(TypeNs, Option<usize>)> { ) -> Option<(TypeNs, Option<usize>)> {
if path.is_type_relative() { if path.is_type_relative() {
@ -184,7 +185,7 @@ impl Resolver {
pub fn resolve_path_in_type_ns_fully( pub fn resolve_path_in_type_ns_fully(
&self, &self,
db: &impl DefDatabase2, db: &impl DefDatabase,
path: &Path, path: &Path,
) -> Option<TypeNs> { ) -> Option<TypeNs> {
let (res, unresolved) = self.resolve_path_in_type_ns(db, path)?; let (res, unresolved) = self.resolve_path_in_type_ns(db, path)?;
@ -196,7 +197,7 @@ impl Resolver {
pub fn resolve_path_in_value_ns<'p>( pub fn resolve_path_in_value_ns<'p>(
&self, &self,
db: &impl DefDatabase2, db: &impl DefDatabase,
path: &'p Path, path: &'p Path,
) -> Option<ResolveValueResult> { ) -> Option<ResolveValueResult> {
if path.is_type_relative() { if path.is_type_relative() {
@ -296,7 +297,7 @@ impl Resolver {
pub fn resolve_path_in_value_ns_fully( pub fn resolve_path_in_value_ns_fully(
&self, &self,
db: &impl DefDatabase2, db: &impl DefDatabase,
path: &Path, path: &Path,
) -> Option<ValueNs> { ) -> Option<ValueNs> {
match self.resolve_path_in_value_ns(db, path)? { match self.resolve_path_in_value_ns(db, path)? {
@ -305,22 +306,22 @@ impl Resolver {
} }
} }
pub fn resolve_path_as_macro(&self, db: &impl DefDatabase2, path: &Path) -> Option<MacroDefId> { pub fn resolve_path_as_macro(&self, db: &impl DefDatabase, path: &Path) -> Option<MacroDefId> {
let (item_map, module) = self.module()?; let (item_map, module) = self.module()?;
item_map.resolve_path(db, module, path).0.get_macros() item_map.resolve_path(db, module, path).0.take_macros()
} }
pub fn process_all_names(&self, db: &impl DefDatabase2, f: &mut dyn FnMut(Name, ScopeDef)) { pub fn process_all_names(&self, db: &impl DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) {
for scope in self.scopes.iter().rev() { for scope in self.scopes.iter().rev() {
scope.process_names(db, f); scope.process_names(db, f);
} }
} }
pub fn traits_in_scope(&self, db: &impl DefDatabase2) -> FxHashSet<TraitId> { pub fn traits_in_scope(&self, db: &impl DefDatabase) -> FxHashSet<TraitId> {
let mut traits = FxHashSet::default(); let mut traits = FxHashSet::default();
for scope in &self.scopes { for scope in &self.scopes {
if let Scope::ModuleScope(m) = scope { if let Scope::ModuleScope(m) = scope {
if let Some(prelude) = m.crate_def_map.prelude() { if let Some(prelude) = m.crate_def_map.prelude {
let prelude_def_map = db.crate_def_map(prelude.krate); let prelude_def_map = db.crate_def_map(prelude.krate);
traits.extend(prelude_def_map[prelude.module_id].scope.traits()); traits.extend(prelude_def_map[prelude.module_id].scope.traits());
} }
@ -330,7 +331,7 @@ impl Resolver {
traits traits
} }
fn module(&self) -> Option<(&CrateDefMap, CrateModuleId)> { fn module(&self) -> Option<(&CrateDefMap, LocalModuleId)> {
self.scopes.iter().rev().find_map(|scope| match scope { self.scopes.iter().rev().find_map(|scope| match scope {
Scope::ModuleScope(m) => Some((&*m.crate_def_map, m.module_id)), Scope::ModuleScope(m) => Some((&*m.crate_def_map, m.module_id)),
@ -339,7 +340,7 @@ impl Resolver {
} }
pub fn krate(&self) -> Option<CrateId> { pub fn krate(&self) -> Option<CrateId> {
self.module().map(|t| t.0.krate()) self.module().map(|t| t.0.krate)
} }
pub fn where_predicates_in_scope<'a>( pub fn where_predicates_in_scope<'a>(
@ -378,7 +379,7 @@ pub enum ScopeDef {
} }
impl Scope { impl Scope {
fn process_names(&self, db: &impl DefDatabase2, f: &mut dyn FnMut(Name, ScopeDef)) { fn process_names(&self, db: &impl DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) {
match self { match self {
Scope::ModuleScope(m) => { Scope::ModuleScope(m) => {
// FIXME: should we provide `self` here? // FIXME: should we provide `self` here?
@ -394,10 +395,10 @@ impl Scope {
m.crate_def_map[m.module_id].scope.legacy_macros().for_each(|(name, macro_)| { m.crate_def_map[m.module_id].scope.legacy_macros().for_each(|(name, macro_)| {
f(name.clone(), ScopeDef::PerNs(PerNs::macros(macro_))); f(name.clone(), ScopeDef::PerNs(PerNs::macros(macro_)));
}); });
m.crate_def_map.extern_prelude().iter().for_each(|(name, &def)| { m.crate_def_map.extern_prelude.iter().for_each(|(name, &def)| {
f(name.clone(), ScopeDef::PerNs(PerNs::types(def.into()))); f(name.clone(), ScopeDef::PerNs(PerNs::types(def.into())));
}); });
if let Some(prelude) = m.crate_def_map.prelude() { if let Some(prelude) = m.crate_def_map.prelude {
let prelude_def_map = db.crate_def_map(prelude.krate); let prelude_def_map = db.crate_def_map(prelude.krate);
prelude_def_map[prelude.module_id].scope.entries().for_each(|(name, res)| { prelude_def_map[prelude.module_id].scope.entries().for_each(|(name, res)| {
f(name.clone(), ScopeDef::PerNs(res.def)); f(name.clone(), ScopeDef::PerNs(res.def));
@ -425,17 +426,13 @@ impl Scope {
} }
// needs arbitrary_self_types to be a method... or maybe move to the def? // needs arbitrary_self_types to be a method... or maybe move to the def?
pub fn resolver_for_expr( pub fn resolver_for_expr(db: &impl DefDatabase, owner: DefWithBodyId, expr_id: ExprId) -> Resolver {
db: &impl DefDatabase2,
owner: DefWithBodyId,
expr_id: ExprId,
) -> Resolver {
let scopes = db.expr_scopes(owner); let scopes = db.expr_scopes(owner);
resolver_for_scope(db, owner, scopes.scope_for(expr_id)) resolver_for_scope(db, owner, scopes.scope_for(expr_id))
} }
pub fn resolver_for_scope( pub fn resolver_for_scope(
db: &impl DefDatabase2, db: &impl DefDatabase,
owner: DefWithBodyId, owner: DefWithBodyId,
scope_id: Option<ScopeId>, scope_id: Option<ScopeId>,
) -> Resolver { ) -> Resolver {
@ -454,7 +451,7 @@ impl Resolver {
self self
} }
fn push_generic_params_scope(self, db: &impl DefDatabase2, def: GenericDefId) -> Resolver { fn push_generic_params_scope(self, db: &impl DefDatabase, def: GenericDefId) -> Resolver {
let params = db.generic_params(def); let params = db.generic_params(def);
if params.params.is_empty() { if params.params.is_empty() {
self self
@ -470,7 +467,7 @@ impl Resolver {
fn push_module_scope( fn push_module_scope(
self, self,
crate_def_map: Arc<CrateDefMap>, crate_def_map: Arc<CrateDefMap>,
module_id: CrateModuleId, module_id: LocalModuleId,
) -> Resolver { ) -> Resolver {
self.push_scope(Scope::ModuleScope(ModuleItemMap { crate_def_map, module_id })) self.push_scope(Scope::ModuleScope(ModuleItemMap { crate_def_map, module_id }))
} }
@ -487,32 +484,26 @@ impl Resolver {
pub trait HasResolver { pub trait HasResolver {
/// Builds a resolver for type references inside this def. /// Builds a resolver for type references inside this def.
fn resolver(self, db: &impl DefDatabase2) -> Resolver; fn resolver(self, db: &impl DefDatabase) -> Resolver;
} }
impl HasResolver for ModuleId { impl HasResolver for ModuleId {
fn resolver(self, db: &impl DefDatabase2) -> Resolver { fn resolver(self, db: &impl DefDatabase) -> Resolver {
let def_map = db.crate_def_map(self.krate); let def_map = db.crate_def_map(self.krate);
Resolver::default().push_module_scope(def_map, self.module_id) Resolver::default().push_module_scope(def_map, self.module_id)
} }
} }
impl HasResolver for TraitId { impl HasResolver for TraitId {
fn resolver(self, db: &impl DefDatabase2) -> Resolver { fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.module(db).resolver(db).push_generic_params_scope(db, self.into()) self.module(db).resolver(db).push_generic_params_scope(db, self.into())
} }
} }
impl<T: Into<AdtId>> HasResolver for T { impl<T: Into<AdtId>> HasResolver for T {
fn resolver(self, db: &impl DefDatabase2) -> Resolver { fn resolver(self, db: &impl DefDatabase) -> Resolver {
let def = self.into(); let def = self.into();
let module = match def { def.module(db)
AdtId::StructId(it) => it.0.module(db),
AdtId::UnionId(it) => it.0.module(db),
AdtId::EnumId(it) => it.module(db),
};
module
.resolver(db) .resolver(db)
.push_generic_params_scope(db, def.into()) .push_generic_params_scope(db, def.into())
.push_scope(Scope::AdtScope(def)) .push_scope(Scope::AdtScope(def))
@ -520,13 +511,13 @@ impl<T: Into<AdtId>> HasResolver for T {
} }
impl HasResolver for FunctionId { impl HasResolver for FunctionId {
fn resolver(self, db: &impl DefDatabase2) -> Resolver { fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
} }
} }
impl HasResolver for DefWithBodyId { impl HasResolver for DefWithBodyId {
fn resolver(self, db: &impl DefDatabase2) -> Resolver { fn resolver(self, db: &impl DefDatabase) -> Resolver {
match self { match self {
DefWithBodyId::ConstId(c) => c.resolver(db), DefWithBodyId::ConstId(c) => c.resolver(db),
DefWithBodyId::FunctionId(f) => f.resolver(db), DefWithBodyId::FunctionId(f) => f.resolver(db),
@ -536,25 +527,25 @@ impl HasResolver for DefWithBodyId {
} }
impl HasResolver for ConstId { impl HasResolver for ConstId {
fn resolver(self, db: &impl DefDatabase2) -> Resolver { fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.lookup(db).container.resolver(db) self.lookup(db).container.resolver(db)
} }
} }
impl HasResolver for StaticId { impl HasResolver for StaticId {
fn resolver(self, db: &impl DefDatabase2) -> Resolver { fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.module(db).resolver(db) self.lookup(db).container.resolver(db)
} }
} }
impl HasResolver for TypeAliasId { impl HasResolver for TypeAliasId {
fn resolver(self, db: &impl DefDatabase2) -> Resolver { fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
} }
} }
impl HasResolver for ContainerId { impl HasResolver for ContainerId {
fn resolver(self, db: &impl DefDatabase2) -> Resolver { fn resolver(self, db: &impl DefDatabase) -> Resolver {
match self { match self {
ContainerId::TraitId(it) => it.resolver(db), ContainerId::TraitId(it) => it.resolver(db),
ContainerId::ImplId(it) => it.resolver(db), ContainerId::ImplId(it) => it.resolver(db),
@ -564,7 +555,7 @@ impl HasResolver for ContainerId {
} }
impl HasResolver for GenericDefId { impl HasResolver for GenericDefId {
fn resolver(self, db: &impl DefDatabase2) -> Resolver { fn resolver(self, db: &impl DefDatabase) -> Resolver {
match self { match self {
GenericDefId::FunctionId(inner) => inner.resolver(db), GenericDefId::FunctionId(inner) => inner.resolver(db),
GenericDefId::AdtId(adt) => adt.resolver(db), GenericDefId::AdtId(adt) => adt.resolver(db),
@ -578,7 +569,7 @@ impl HasResolver for GenericDefId {
} }
impl HasResolver for ImplId { impl HasResolver for ImplId {
fn resolver(self, db: &impl DefDatabase2) -> Resolver { fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.module(db) self.module(db)
.resolver(db) .resolver(db)
.push_generic_params_scope(db, self.into()) .push_generic_params_scope(db, self.into())

View file

@ -12,7 +12,7 @@ use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath
ra_db::SourceDatabaseStorage, ra_db::SourceDatabaseStorage,
hir_expand::db::AstDatabaseStorage, hir_expand::db::AstDatabaseStorage,
crate::db::InternDatabaseStorage, crate::db::InternDatabaseStorage,
crate::db::DefDatabase2Storage crate::db::DefDatabaseStorage
)] )]
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct TestDB { pub struct TestDB {

View file

@ -0,0 +1,59 @@
//! Trace is a pretty niche data structure which is used when lowering a CST
//! into HIR.
//!
//! Lowering process calculates two bits of information:
//! * the lowered syntax itself
//! * a mapping between lowered syntax and original syntax
//!
//! Due to the way salsa works, the mapping is usually hot lava, as it contains
//! absolute offsets. The `Trace` structure (inspired, at least in name, by
//! Kotlin's `BindingTrace`) allows use the same code to compute both
//! projections.
use ra_arena::{map::ArenaMap, Arena, ArenaId, RawId};
pub(crate) struct Trace<ID: ArenaId, T, V> {
arena: Option<Arena<ID, T>>,
map: Option<ArenaMap<ID, V>>,
len: u32,
}
impl<ID: ra_arena::ArenaId + Copy, T, V> Trace<ID, T, V> {
pub(crate) fn new() -> Trace<ID, T, V> {
Trace { arena: Some(Arena::default()), map: Some(ArenaMap::default()), len: 0 }
}
pub(crate) fn new_for_arena() -> Trace<ID, T, V> {
Trace { arena: Some(Arena::default()), map: None, len: 0 }
}
pub(crate) fn new_for_map() -> Trace<ID, T, V> {
Trace { arena: None, map: Some(ArenaMap::default()), len: 0 }
}
pub(crate) fn alloc(&mut self, value: impl FnOnce() -> V, data: impl FnOnce() -> T) -> ID {
let id = if let Some(arena) = &mut self.arena {
arena.alloc(data())
} else {
let id = ID::from_raw(RawId::from(self.len));
self.len += 1;
id
};
if let Some(map) = &mut self.map {
map.insert(id, value());
}
id
}
pub(crate) fn into_arena(mut self) -> Arena<ID, T> {
self.arena.take().unwrap()
}
pub(crate) fn into_map(mut self) -> ArenaMap<ID, V> {
self.map.take().unwrap()
}
pub(crate) fn into_arena_and_map(mut self) -> (Arena<ID, T>, ArenaMap<ID, V>) {
(self.arena.take().unwrap(), self.map.take().unwrap())
}
}

View file

@ -1,66 +0,0 @@
//! HIR for trait definitions.
use std::sync::Arc;
use hir_expand::{
name::{AsName, Name},
AstId,
};
use ra_syntax::ast::{self, NameOwner};
use crate::{
db::DefDatabase2, AssocItemId, AstItemDef, ConstLoc, ContainerId, FunctionLoc, Intern, TraitId,
TypeAliasId, TypeAliasLoc,
};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TraitData {
pub name: Option<Name>,
pub items: Vec<AssocItemId>,
pub auto: bool,
}
impl TraitData {
pub(crate) fn trait_data_query(db: &impl DefDatabase2, tr: TraitId) -> Arc<TraitData> {
let src = tr.source(db);
let name = src.value.name().map(|n| n.as_name());
let auto = src.value.is_auto();
let ast_id_map = db.ast_id_map(src.file_id);
let items = if let Some(item_list) = src.value.item_list() {
item_list
.impl_items()
.map(|item_node| match item_node {
ast::ImplItem::FnDef(it) => FunctionLoc {
container: ContainerId::TraitId(tr),
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
}
.intern(db)
.into(),
ast::ImplItem::ConstDef(it) => ConstLoc {
container: ContainerId::TraitId(tr),
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
}
.intern(db)
.into(),
ast::ImplItem::TypeAliasDef(it) => TypeAliasLoc {
container: ContainerId::TraitId(tr),
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
}
.intern(db)
.into(),
})
.collect()
} else {
Vec::new()
};
Arc::new(TraitData { name, items, auto })
}
pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ {
self.items.iter().filter_map(|item| match item {
AssocItemId::TypeAliasId(t) => Some(*t),
_ => None,
})
}
}

View file

@ -64,7 +64,7 @@ pub enum TypeBound {
impl TypeRef { impl TypeRef {
/// Converts an `ast::TypeRef` to a `hir::TypeRef`. /// Converts an `ast::TypeRef` to a `hir::TypeRef`.
pub fn from_ast(node: ast::TypeRef) -> Self { pub(crate) fn from_ast(node: ast::TypeRef) -> Self {
match node { match node {
ast::TypeRef::ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()), ast::TypeRef::ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()),
ast::TypeRef::TupleType(inner) => { ast::TypeRef::TupleType(inner) => {
@ -113,7 +113,7 @@ impl TypeRef {
} }
} }
pub fn from_ast_opt(node: Option<ast::TypeRef>) -> Self { pub(crate) fn from_ast_opt(node: Option<ast::TypeRef>) -> Self {
if let Some(node) = node { if let Some(node) = node {
TypeRef::from_ast(node) TypeRef::from_ast(node)
} else { } else {
@ -121,7 +121,7 @@ impl TypeRef {
} }
} }
pub fn unit() -> TypeRef { pub(crate) fn unit() -> TypeRef {
TypeRef::Tuple(Vec::new()) TypeRef::Tuple(Vec::new())
} }
} }
@ -135,7 +135,7 @@ pub(crate) fn type_bounds_from_ast(type_bounds_opt: Option<ast::TypeBoundList>)
} }
impl TypeBound { impl TypeBound {
pub fn from_ast(node: ast::TypeBound) -> Self { pub(crate) fn from_ast(node: ast::TypeBound) -> Self {
match node.kind() { match node.kind() {
ast::TypeBoundKind::PathType(path_type) => { ast::TypeBoundKind::PathType(path_type) => {
let path = match path_type.path() { let path = match path_type.path() {

View file

@ -8,35 +8,47 @@ use crate::{
use crate::quote; use crate::quote;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] macro_rules! register_builtin {
pub enum BuiltinExpander { ( $(($name:ident, $kind: ident) => $expand:ident),* ) => {
Line, #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
} pub enum BuiltinFnLikeExpander {
$($kind),*
impl BuiltinExpander {
pub fn expand(
&self,
db: &dyn AstDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
match self {
BuiltinExpander::Line => line_expand(db, id, tt),
} }
}
impl BuiltinFnLikeExpander {
pub fn expand(
&self,
db: &dyn AstDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
let expander = match *self {
$( BuiltinFnLikeExpander::$kind => $expand, )*
};
expander(db, id, tt)
}
}
pub fn find_builtin_macro(
ident: &name::Name,
krate: CrateId,
ast_id: AstId<ast::MacroCall>,
) -> Option<MacroDefId> {
let kind = match ident {
$( id if id == &name::$name => BuiltinFnLikeExpander::$kind, )*
_ => return None,
};
Some(MacroDefId { krate, ast_id, kind: MacroDefKind::BuiltIn(kind) })
}
};
} }
pub fn find_builtin_macro( register_builtin! {
ident: &name::Name, (COLUMN_MACRO, Column) => column_expand,
krate: CrateId, (FILE_MACRO, File) => file_expand,
ast_id: AstId<ast::MacroCall>, (LINE_MACRO, Line) => line_expand,
) -> Option<MacroDefId> { (STRINGIFY_MACRO, Stringify) => stringify_expand
// FIXME: Better registering method
if ident == &name::LINE_MACRO {
Some(MacroDefId { krate, ast_id, kind: MacroDefKind::BuiltIn(BuiltinExpander::Line) })
} else {
None
}
} }
fn to_line_number(db: &dyn AstDatabase, file: HirFileId, pos: TextUnit) -> usize { fn to_line_number(db: &dyn AstDatabase, file: HirFileId, pos: TextUnit) -> usize {
@ -78,3 +90,173 @@ fn line_expand(
Ok(expanded) Ok(expanded)
} }
fn stringify_expand(
db: &dyn AstDatabase,
id: MacroCallId,
_tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
let loc = db.lookup_intern_macro(id);
let macro_call = loc.ast_id.to_node(db);
let macro_content = {
let arg = macro_call.token_tree().ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
let macro_args = arg.syntax().clone();
let text = macro_args.text();
let without_parens = TextUnit::of_char('(')..text.len() - TextUnit::of_char(')');
text.slice(without_parens).to_string()
};
let expanded = quote! {
#macro_content
};
Ok(expanded)
}
fn to_col_number(db: &dyn AstDatabase, file: HirFileId, pos: TextUnit) -> usize {
// FIXME: Use expansion info
let file_id = file.original_file(db);
let text = db.file_text(file_id);
let mut col_num = 1;
for c in text[..pos.to_usize()].chars().rev() {
if c == '\n' {
break;
}
col_num = col_num + 1;
}
col_num
}
fn column_expand(
db: &dyn AstDatabase,
id: MacroCallId,
_tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
let loc = db.lookup_intern_macro(id);
let macro_call = loc.ast_id.to_node(db);
let _arg = macro_call.token_tree().ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
let col_start = macro_call.syntax().text_range().start();
let file = id.as_file(MacroFileKind::Expr);
let col_num = to_col_number(db, file, col_start);
let expanded = quote! {
#col_num
};
Ok(expanded)
}
fn file_expand(
db: &dyn AstDatabase,
id: MacroCallId,
_tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
let loc = db.lookup_intern_macro(id);
let macro_call = loc.ast_id.to_node(db);
let _ = macro_call.token_tree().ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
// FIXME: RA purposefully lacks knowledge of absolute file names
// so just return "".
let file_name = "";
let expanded = quote! {
#file_name
};
Ok(expanded)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{test_db::TestDB, MacroCallLoc};
use ra_db::{fixture::WithFixture, SourceDatabase};
fn expand_builtin_macro(s: &str, expander: BuiltinFnLikeExpander) -> String {
let (db, file_id) = TestDB::with_single_file(&s);
let parsed = db.parse(file_id);
let macro_calls: Vec<_> =
parsed.syntax_node().descendants().filter_map(|it| ast::MacroCall::cast(it)).collect();
let ast_id_map = db.ast_id_map(file_id.into());
// the first one should be a macro_rules
let def = MacroDefId {
krate: CrateId(0),
ast_id: AstId::new(file_id.into(), ast_id_map.ast_id(&macro_calls[0])),
kind: MacroDefKind::BuiltIn(expander),
};
let loc = MacroCallLoc {
def,
ast_id: AstId::new(file_id.into(), ast_id_map.ast_id(&macro_calls[1])),
};
let id = db.intern_macro(loc);
let parsed = db.parse_or_expand(id.as_file(MacroFileKind::Expr)).unwrap();
parsed.text().to_string()
}
#[test]
fn test_column_expand() {
let expanded = expand_builtin_macro(
r#"
#[rustc_builtin_macro]
macro_rules! column {() => {}}
column!()
"#,
BuiltinFnLikeExpander::Column,
);
assert_eq!(expanded, "9");
}
#[test]
fn test_line_expand() {
let expanded = expand_builtin_macro(
r#"
#[rustc_builtin_macro]
macro_rules! line {() => {}}
line!()
"#,
BuiltinFnLikeExpander::Line,
);
assert_eq!(expanded, "4");
}
#[test]
fn test_stringify_expand() {
let expanded = expand_builtin_macro(
r#"
#[rustc_builtin_macro]
macro_rules! stringify {() => {}}
stringify!(a b c)
"#,
BuiltinFnLikeExpander::Stringify,
);
assert_eq!(expanded, "\"a b c\"");
}
#[test]
fn test_file_expand() {
let expanded = expand_builtin_macro(
r#"
#[rustc_builtin_macro]
macro_rules! file {() => {}}
file!()
"#,
BuiltinFnLikeExpander::File,
);
assert_eq!(expanded, "\"\"");
}
}

View file

@ -9,14 +9,14 @@ use ra_prof::profile;
use ra_syntax::{AstNode, Parse, SyntaxNode}; use ra_syntax::{AstNode, Parse, SyntaxNode};
use crate::{ use crate::{
ast_id_map::AstIdMap, BuiltinExpander, HirFileId, HirFileIdRepr, MacroCallId, MacroCallLoc, ast_id_map::AstIdMap, BuiltinFnLikeExpander, HirFileId, HirFileIdRepr, MacroCallId,
MacroDefId, MacroDefKind, MacroFile, MacroFileKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, MacroFileKind,
}; };
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander { pub enum TokenExpander {
MacroRules(mbe::MacroRules), MacroRules(mbe::MacroRules),
Builtin(BuiltinExpander), Builtin(BuiltinFnLikeExpander),
} }
impl TokenExpander { impl TokenExpander {

View file

@ -24,7 +24,10 @@ use ra_syntax::{
}; };
use crate::ast_id_map::FileAstId; use crate::ast_id_map::FileAstId;
use crate::builtin_macro::BuiltinExpander; use crate::builtin_macro::BuiltinFnLikeExpander;
#[cfg(test)]
mod test_db;
/// Input to the analyzer is a set of files, where each file is identified by /// Input to the analyzer is a set of files, where each file is identified by
/// `FileId` and contains source code. However, another source of source code in /// `FileId` and contains source code. However, another source of source code in
@ -135,7 +138,7 @@ pub struct MacroDefId {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum MacroDefKind { pub enum MacroDefKind {
Declarative, Declarative,
BuiltIn(BuiltinExpander), BuiltIn(BuiltinFnLikeExpander),
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]

View file

@ -142,4 +142,7 @@ pub const TARGET_TYPE: Name = Name::new_inline_ascii(6, b"Target");
pub const BOX_TYPE: Name = Name::new_inline_ascii(3, b"Box"); pub const BOX_TYPE: Name = Name::new_inline_ascii(3, b"Box");
// Builtin Macros // Builtin Macros
pub const FILE_MACRO: Name = Name::new_inline_ascii(4, b"file");
pub const COLUMN_MACRO: Name = Name::new_inline_ascii(6, b"column");
pub const LINE_MACRO: Name = Name::new_inline_ascii(4, b"line"); pub const LINE_MACRO: Name = Name::new_inline_ascii(4, b"line");
pub const STRINGIFY_MACRO: Name = Name::new_inline_ascii(9, b"stringify");

View file

@ -0,0 +1,50 @@
//! Database used for testing `hir_expand`.
use std::{
panic,
sync::{Arc, Mutex},
};
use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath};
#[salsa::database(
ra_db::SourceDatabaseExtStorage,
ra_db::SourceDatabaseStorage,
crate::db::AstDatabaseStorage
)]
#[derive(Debug, Default)]
pub struct TestDB {
runtime: salsa::Runtime<TestDB>,
events: Mutex<Option<Vec<salsa::Event<TestDB>>>>,
}
impl salsa::Database for TestDB {
fn salsa_runtime(&self) -> &salsa::Runtime<Self> {
&self.runtime
}
fn salsa_event(&self, event: impl Fn() -> salsa::Event<TestDB>) {
let mut events = self.events.lock().unwrap();
if let Some(events) = &mut *events {
events.push(event());
}
}
}
impl panic::RefUnwindSafe for TestDB {}
impl FileLoader for TestDB {
fn file_text(&self, file_id: FileId) -> Arc<String> {
FileLoaderDelegate(self).file_text(file_id)
}
fn resolve_relative_path(
&self,
anchor: FileId,
relative_path: &RelativePath,
) -> Option<FileId> {
FileLoaderDelegate(self).resolve_relative_path(anchor, relative_path)
}
fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id)
}
}

View file

@ -30,7 +30,7 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
let (callable_def, _subst) = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; let (callable_def, _subst) = analyzer.type_of(db, &expr.expr()?)?.as_callable()?;
match callable_def { match callable_def {
hir::CallableDef::Function(it) => { hir::CallableDef::Function(it) => {
(CallInfo::with_fn(db, it), it.data(db).has_self_param()) (CallInfo::with_fn(db, it), it.has_self_param(db))
} }
hir::CallableDef::Struct(it) => (CallInfo::with_struct(db, it)?, false), hir::CallableDef::Struct(it) => (CallInfo::with_struct(db, it)?, false),
hir::CallableDef::EnumVariant(it) => (CallInfo::with_enum_variant(db, it)?, false), hir::CallableDef::EnumVariant(it) => (CallInfo::with_enum_variant(db, it)?, false),
@ -38,7 +38,7 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
} }
FnCallNode::MethodCallExpr(expr) => { FnCallNode::MethodCallExpr(expr) => {
let function = analyzer.resolve_method_call(&expr)?; let function = analyzer.resolve_method_call(&expr)?;
(CallInfo::with_fn(db, function), function.data(db).has_self_param()) (CallInfo::with_fn(db, function), function.has_self_param(db))
} }
FnCallNode::MacroCallExpr(expr) => { FnCallNode::MacroCallExpr(expr) => {
let macro_def = analyzer.resolve_macro_call(db, &expr)?; let macro_def = analyzer.resolve_macro_call(db, &expr)?;

View file

@ -313,7 +313,7 @@ impl RootDatabase {
hir::db::RawItemsQuery hir::db::RawItemsQuery
hir::db::CrateDefMapQuery hir::db::CrateDefMapQuery
hir::db::GenericParamsQuery hir::db::GenericParamsQuery
hir::db::FnDataQuery hir::db::FunctionDataQuery
hir::db::TypeAliasDataQuery hir::db::TypeAliasDataQuery
hir::db::ConstDataQuery hir::db::ConstDataQuery
hir::db::StaticDataQuery hir::db::StaticDataQuery
@ -324,7 +324,7 @@ impl RootDatabase {
hir::db::ExprScopesQuery hir::db::ExprScopesQuery
hir::db::InferQuery hir::db::InferQuery
hir::db::TypeForDefQuery hir::db::TypeForDefQuery
hir::db::TypeForFieldQuery hir::db::FieldTypesQuery
hir::db::CallableItemSignatureQuery hir::db::CallableItemSignatureQuery
hir::db::GenericPredicatesQuery hir::db::GenericPredicatesQuery
hir::db::GenericDefaultsQuery hir::db::GenericDefaultsQuery

View file

@ -59,8 +59,7 @@ fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty)
fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) { fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) {
let mut seen_methods = FxHashSet::default(); let mut seen_methods = FxHashSet::default();
ctx.analyzer.iterate_method_candidates(ctx.db, receiver, None, |_ty, func| { ctx.analyzer.iterate_method_candidates(ctx.db, receiver, None, |_ty, func| {
let data = func.data(ctx.db); if func.has_self_param(ctx.db) && seen_methods.insert(func.name(ctx.db)) {
if data.has_self_param() && seen_methods.insert(data.name().clone()) {
acc.add_function(ctx, func); acc.add_function(ctx, func);
} }
None::<()> None::<()>

View file

@ -1,6 +1,6 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use hir::{Adt, Either, PathResolution}; use hir::{Adt, Either, HasSource, PathResolution};
use ra_syntax::AstNode; use ra_syntax::AstNode;
use test_utils::tested_by; use test_utils::tested_by;
@ -27,7 +27,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
} }
if Some(module) == ctx.module { if Some(module) == ctx.module {
if let Some(import) = import { if let Some(import) = import {
if let Either::A(use_tree) = module.import_source(ctx.db, import) { if let Either::A(use_tree) = import.source(ctx.db).value {
if use_tree.syntax().text_range().contains_inclusive(ctx.offset) { if use_tree.syntax().text_range().contains_inclusive(ctx.offset) {
// for `use self::foo<|>`, don't suggest `foo` as a completion // for `use self::foo<|>`, don't suggest `foo` as a completion
tested_by!(dont_complete_current_use); tested_by!(dont_complete_current_use);
@ -53,8 +53,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
ctx.analyzer.iterate_path_candidates(ctx.db, ty.clone(), None, |_ty, item| { ctx.analyzer.iterate_path_candidates(ctx.db, ty.clone(), None, |_ty, item| {
match item { match item {
hir::AssocItem::Function(func) => { hir::AssocItem::Function(func) => {
let data = func.data(ctx.db); if !func.has_self_param(ctx.db) {
if !data.has_self_param() {
acc.add_function(ctx, func); acc.add_function(ctx, func);
} }
} }
@ -80,8 +79,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
for item in t.items(ctx.db) { for item in t.items(ctx.db) {
match item { match item {
hir::AssocItem::Function(func) => { hir::AssocItem::Function(func) => {
let data = func.data(ctx.db); if !func.has_self_param(ctx.db) {
if !data.has_self_param() {
acc.add_function(ctx, func); acc.add_function(ctx, func);
} }
} }

View file

@ -1,6 +1,6 @@
//! This modules takes care of rendering various definitions as completion items. //! This modules takes care of rendering various definitions as completion items.
use hir::{db::HirDatabase, Attrs, Docs, HasSource, HirDisplay, ScopeDef, Ty, TypeWalk}; use hir::{db::HirDatabase, Docs, HasAttrs, HasSource, HirDisplay, ScopeDef, Ty, TypeWalk};
use join_to_string::join; use join_to_string::join;
use ra_syntax::ast::NameOwner; use ra_syntax::ast::NameOwner;
use test_utils::tested_by; use test_utils::tested_by;
@ -199,14 +199,17 @@ impl Completions {
name: Option<String>, name: Option<String>,
func: hir::Function, func: hir::Function,
) { ) {
let data = func.data(ctx.db); let func_name = func.name(ctx.db);
let name = name.unwrap_or_else(|| data.name().to_string()); let has_self_param = func.has_self_param(ctx.db);
let params = func.params(ctx.db);
let name = name.unwrap_or_else(|| func_name.to_string());
let ast_node = func.source(ctx.db).value; let ast_node = func.source(ctx.db).value;
let detail = function_label(&ast_node); let detail = function_label(&ast_node);
let mut builder = let mut builder =
CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.clone()) CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.clone())
.kind(if data.has_self_param() { .kind(if has_self_param {
CompletionItemKind::Method CompletionItemKind::Method
} else { } else {
CompletionItemKind::Function CompletionItemKind::Function
@ -221,12 +224,11 @@ impl Completions {
&& ctx.db.feature_flags.get("completion.insertion.add-call-parenthesis") && ctx.db.feature_flags.get("completion.insertion.add-call-parenthesis")
{ {
tested_by!(inserts_parens_for_function_calls); tested_by!(inserts_parens_for_function_calls);
let (snippet, label) = let (snippet, label) = if params.is_empty() || has_self_param && params.len() == 1 {
if data.params().is_empty() || data.has_self_param() && data.params().len() == 1 { (format!("{}()$0", func_name), format!("{}()", name))
(format!("{}()$0", data.name()), format!("{}()", name)) } else {
} else { (format!("{}($0)", func_name), format!("{}(…)", name))
(format!("{}($0)", data.name()), format!("{}(…)", name)) };
};
builder = builder.lookup_by(name).label(label).insert_snippet(snippet); builder = builder.lookup_by(name).label(label).insert_snippet(snippet);
} }
@ -285,11 +287,8 @@ impl Completions {
} }
} }
fn is_deprecated(node: impl Attrs, db: &impl HirDatabase) -> bool { fn is_deprecated(node: impl HasAttrs, db: &impl HirDatabase) -> bool {
match node.attrs(db) { node.attrs(db).by_key("deprecated").exists()
None => false,
Some(attrs) => attrs.iter().any(|x| x.is_simple_atom("deprecated")),
}
} }
fn has_non_default_type_params(def: hir::GenericDef, db: &db::RootDatabase) -> bool { fn has_non_default_type_params(def: hir::GenericDef, db: &db::RootDatabase) -> bool {

View file

@ -22,7 +22,6 @@ use crate::{
hir::db::InternDatabaseStorage, hir::db::InternDatabaseStorage,
hir::db::AstDatabaseStorage, hir::db::AstDatabaseStorage,
hir::db::DefDatabaseStorage, hir::db::DefDatabaseStorage,
hir::db::DefDatabase2Storage,
hir::db::HirDatabaseStorage hir::db::HirDatabaseStorage
)] )]
#[derive(Debug)] #[derive(Debug)]

View file

@ -55,7 +55,7 @@ impl FunctionSignature {
pub(crate) fn from_struct(db: &db::RootDatabase, st: hir::Struct) -> Option<Self> { pub(crate) fn from_struct(db: &db::RootDatabase, st: hir::Struct) -> Option<Self> {
let node: ast::StructDef = st.source(db).value; let node: ast::StructDef = st.source(db).value;
match node.kind() { match node.kind() {
ast::StructKind::Named(_) => return None, ast::StructKind::Record(_) => return None,
_ => (), _ => (),
}; };
@ -89,7 +89,7 @@ impl FunctionSignature {
) -> Option<Self> { ) -> Option<Self> {
let node: ast::EnumVariant = variant.source(db).value; let node: ast::EnumVariant = variant.source(db).value;
match node.kind() { match node.kind() {
ast::StructKind::Named(_) | ast::StructKind::Unit => return None, ast::StructKind::Record(_) | ast::StructKind::Unit => return None,
_ => (), _ => (),
}; };

View file

@ -40,16 +40,23 @@ fn expand_macro_recur(
let analyzer = hir::SourceAnalyzer::new(db, source, None); let analyzer = hir::SourceAnalyzer::new(db, source, None);
let expansion = analyzer.expand(db, macro_call)?; let expansion = analyzer.expand(db, macro_call)?;
let macro_file_id = expansion.file_id(); let macro_file_id = expansion.file_id();
let expanded: SyntaxNode = db.parse_or_expand(macro_file_id)?; let mut expanded: SyntaxNode = db.parse_or_expand(macro_file_id)?;
let children = expanded.descendants().filter_map(ast::MacroCall::cast); let children = expanded.descendants().filter_map(ast::MacroCall::cast);
let mut replaces = FxHashMap::default(); let mut replaces = FxHashMap::default();
for child in children.into_iter() { for child in children.into_iter() {
let node = hir::Source::new(macro_file_id, &child); let node = hir::Source::new(macro_file_id, &child);
let new_node = expand_macro_recur(db, source, node)?; if let Some(new_node) = expand_macro_recur(db, source, node) {
// Replace the whole node if it is root
replaces.insert(child.syntax().clone().into(), new_node.into()); // `replace_descendants` will not replace the parent node
// but `SyntaxNode::descendants include itself
if expanded == *child.syntax() {
expanded = new_node;
} else {
replaces.insert(child.syntax().clone().into(), new_node.into());
}
}
} }
Some(replace_descendants(&expanded, &replaces)) Some(replace_descendants(&expanded, &replaces))
@ -217,4 +224,49 @@ fn some_thing() -> u32 {
} }
"###); "###);
} }
#[test]
fn macro_expand_match_ast_inside_let_statement() {
let res = check_expand_macro(
r#"
//- /lib.rs
macro_rules! match_ast {
(match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
(match ($node:expr) {}) => {{}};
}
fn main() {
let p = f(|it| {
let res = mat<|>ch_ast! { match c {}};
Some(res)
})?;
}
"#,
);
assert_eq!(res.name, "match_ast");
assert_snapshot!(res.expansion, @r###"{}"###);
}
#[test]
fn macro_expand_inner_macro_fail_to_expand() {
let res = check_expand_macro(
r#"
//- /lib.rs
macro_rules! bar {
(BAD) => {};
}
macro_rules! foo {
() => {bar!()};
}
fn main() {
let res = fo<|>o!();
}
"#,
);
assert_eq!(res.name, "foo");
assert_snapshot!(res.expansion, @r###"bar!()"###);
}
} }

View file

@ -54,7 +54,7 @@ use std::sync::Arc;
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
use ra_db::{ use ra_db::{
salsa::{self, ParallelDatabase}, salsa::{self, ParallelDatabase},
CheckCanceled, FileLoader, SourceDatabase, CheckCanceled, Env, FileLoader, SourceDatabase,
}; };
use ra_syntax::{SourceFile, TextRange, TextUnit}; use ra_syntax::{SourceFile, TextRange, TextUnit};
@ -240,7 +240,7 @@ impl Analysis {
// Default to enable test for single file. // Default to enable test for single file.
let mut cfg_options = CfgOptions::default(); let mut cfg_options = CfgOptions::default();
cfg_options.insert_atom("test".into()); cfg_options.insert_atom("test".into());
crate_graph.add_crate_root(file_id, Edition::Edition2018, cfg_options); crate_graph.add_crate_root(file_id, Edition::Edition2018, cfg_options, Env::default());
change.add_file(source_root, file_id, "main.rs".into(), Arc::new(text)); change.add_file(source_root, file_id, "main.rs".into(), Arc::new(text));
change.set_crate_graph(crate_graph); change.set_crate_graph(crate_graph);
host.apply_change(change); host.apply_change(change);

View file

@ -3,7 +3,7 @@
use std::sync::Arc; use std::sync::Arc;
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
use ra_db::RelativePathBuf; use ra_db::{Env, RelativePathBuf};
use test_utils::{extract_offset, extract_range, parse_fixture, CURSOR_MARKER}; use test_utils::{extract_offset, extract_range, parse_fixture, CURSOR_MARKER};
use crate::{ use crate::{
@ -96,9 +96,15 @@ impl MockAnalysis {
let file_id = FileId(i as u32 + 1); let file_id = FileId(i as u32 + 1);
let cfg_options = CfgOptions::default(); let cfg_options = CfgOptions::default();
if path == "/lib.rs" || path == "/main.rs" { if path == "/lib.rs" || path == "/main.rs" {
root_crate = Some(crate_graph.add_crate_root(file_id, Edition2018, cfg_options)); root_crate = Some(crate_graph.add_crate_root(
file_id,
Edition2018,
cfg_options,
Env::default(),
));
} else if path.ends_with("/lib.rs") { } else if path.ends_with("/lib.rs") {
let other_crate = crate_graph.add_crate_root(file_id, Edition2018, cfg_options); let other_crate =
crate_graph.add_crate_root(file_id, Edition2018, cfg_options, Env::default());
let crate_name = path.parent().unwrap().file_name().unwrap(); let crate_name = path.parent().unwrap().file_name().unwrap();
if let Some(root_crate) = root_crate { if let Some(root_crate) = root_crate {
crate_graph.add_dep(root_crate, crate_name.into(), other_crate).unwrap(); crate_graph.add_dep(root_crate, crate_name.into(), other_crate).unwrap();

View file

@ -34,12 +34,14 @@ pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use ra_cfg::CfgOptions;
use ra_db::Env;
use crate::{ use crate::{
mock_analysis::{analysis_and_position, MockAnalysis}, mock_analysis::{analysis_and_position, MockAnalysis},
AnalysisChange, CrateGraph, AnalysisChange, CrateGraph,
Edition::Edition2018, Edition::Edition2018,
}; };
use ra_cfg::CfgOptions;
#[test] #[test]
fn test_resolve_parent_module() { fn test_resolve_parent_module() {
@ -87,7 +89,12 @@ mod tests {
assert!(host.analysis().crate_for(mod_file).unwrap().is_empty()); assert!(host.analysis().crate_for(mod_file).unwrap().is_empty());
let mut crate_graph = CrateGraph::default(); let mut crate_graph = CrateGraph::default();
let crate_id = crate_graph.add_crate_root(root_file, Edition2018, CfgOptions::default()); let crate_id = crate_graph.add_crate_root(
root_file,
Edition2018,
CfgOptions::default(),
Env::default(),
);
let mut change = AnalysisChange::new(); let mut change = AnalysisChange::new();
change.set_crate_graph(crate_graph); change.set_crate_graph(crate_graph);
host.apply_change(change); host.apply_change(change);

View file

@ -1,6 +1,6 @@
//! Functions that are used to classify an element from its definition or reference. //! Functions that are used to classify an element from its definition or reference.
use hir::{FromSource, Module, ModuleSource, Path, PathResolution, Source, SourceAnalyzer}; use hir::{FromSource, Module, ModuleSource, PathResolution, Source, SourceAnalyzer};
use ra_prof::profile; use ra_prof::profile;
use ra_syntax::{ast, match_ast, AstNode}; use ra_syntax::{ast, match_ast, AstNode};
use test_utils::tested_by; use test_utils::tested_by;
@ -140,12 +140,8 @@ pub(crate) fn classify_name_ref(
if let Some(record_field) = ast::RecordField::cast(parent.clone()) { if let Some(record_field) = ast::RecordField::cast(parent.clone()) {
tested_by!(goto_definition_works_for_record_fields); tested_by!(goto_definition_works_for_record_fields);
if let Some(record_lit) = record_field.syntax().ancestors().find_map(ast::RecordLit::cast) { if let Some(field_def) = analyzer.resolve_record_field(&record_field) {
let variant_def = analyzer.resolve_record_literal(&record_lit)?; return Some(from_struct_field(db, field_def));
let hir_path = Path::from_name_ref(name_ref.value);
let hir_name = hir_path.as_ident()?;
let field = variant_def.field(db, hir_name)?;
return Some(from_struct_field(db, field));
} }
} }

View file

@ -1,7 +1,6 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::str::FromStr;
use cargo_metadata::{CargoOpt, MetadataCommand}; use cargo_metadata::{CargoOpt, MetadataCommand};
use ra_arena::{impl_arena_id, Arena, RawId}; use ra_arena::{impl_arena_id, Arena, RawId};
@ -55,11 +54,13 @@ struct TargetData {
name: String, name: String,
root: PathBuf, root: PathBuf,
kind: TargetKind, kind: TargetKind,
is_proc_macro: bool,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TargetKind { pub enum TargetKind {
Bin, Bin,
/// Any kind of Cargo lib crate-type (dylib, rlib, proc-macro, ...).
Lib, Lib,
Example, Example,
Test, Test,
@ -75,6 +76,7 @@ impl TargetKind {
"test" => TargetKind::Test, "test" => TargetKind::Test,
"bench" => TargetKind::Bench, "bench" => TargetKind::Bench,
"example" => TargetKind::Example, "example" => TargetKind::Example,
"proc-macro" => TargetKind::Lib,
_ if kind.contains("lib") => TargetKind::Lib, _ if kind.contains("lib") => TargetKind::Lib,
_ => continue, _ => continue,
}; };
@ -124,6 +126,9 @@ impl Target {
pub fn kind(self, ws: &CargoWorkspace) -> TargetKind { pub fn kind(self, ws: &CargoWorkspace) -> TargetKind {
ws.targets[self].kind ws.targets[self].kind
} }
pub fn is_proc_macro(self, ws: &CargoWorkspace) -> bool {
ws.targets[self].is_proc_macro
}
} }
impl CargoWorkspace { impl CargoWorkspace {
@ -143,8 +148,7 @@ impl CargoWorkspace {
for meta_pkg in meta.packages { for meta_pkg in meta.packages {
let cargo_metadata::Package { id, edition, name, manifest_path, .. } = meta_pkg; let cargo_metadata::Package { id, edition, name, manifest_path, .. } = meta_pkg;
let is_member = ws_members.contains(&id); let is_member = ws_members.contains(&id);
let edition = Edition::from_str(&edition) let edition = edition.parse::<Edition>()?;
.map_err(|e| (format!("metadata for package {} failed: {}", &name, e.msg)))?;
let pkg = packages.alloc(PackageData { let pkg = packages.alloc(PackageData {
name, name,
manifest: manifest_path, manifest: manifest_path,
@ -157,11 +161,13 @@ impl CargoWorkspace {
let pkg_data = &mut packages[pkg]; let pkg_data = &mut packages[pkg];
pkg_by_id.insert(id, pkg); pkg_by_id.insert(id, pkg);
for meta_tgt in meta_pkg.targets { for meta_tgt in meta_pkg.targets {
let is_proc_macro = meta_tgt.kind.as_slice() == &["proc-macro"];
let tgt = targets.alloc(TargetData { let tgt = targets.alloc(TargetData {
pkg, pkg,
name: meta_tgt.name, name: meta_tgt.name,
root: meta_tgt.src_path.clone(), root: meta_tgt.src_path.clone(),
kind: TargetKind::new(meta_tgt.kind.as_slice()), kind: TargetKind::new(meta_tgt.kind.as_slice()),
is_proc_macro,
}); });
pkg_data.targets.push(tgt); pkg_data.targets.push(tgt);
} }

View file

@ -13,7 +13,7 @@ use std::{
}; };
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
use ra_db::{CrateGraph, CrateId, Edition, FileId}; use ra_db::{CrateGraph, CrateId, Edition, Env, FileId};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use serde_json::from_reader; use serde_json::from_reader;
@ -146,7 +146,12 @@ impl ProjectWorkspace {
}; };
crates.insert( crates.insert(
crate_id, crate_id,
crate_graph.add_crate_root(file_id, edition, cfg_options), crate_graph.add_crate_root(
file_id,
edition,
cfg_options,
Env::default(),
),
); );
} }
} }
@ -180,8 +185,12 @@ impl ProjectWorkspace {
opts opts
}; };
let crate_id = let crate_id = crate_graph.add_crate_root(
crate_graph.add_crate_root(file_id, Edition::Edition2018, cfg_options); file_id,
Edition::Edition2018,
cfg_options,
Env::default(),
);
sysroot_crates.insert(krate, crate_id); sysroot_crates.insert(krate, crate_id);
names.insert(crate_id, krate.name(&sysroot).to_string()); names.insert(crate_id, krate.name(&sysroot).to_string());
} }
@ -200,7 +209,10 @@ impl ProjectWorkspace {
} }
let libcore = sysroot.core().and_then(|it| sysroot_crates.get(&it).copied()); let libcore = sysroot.core().and_then(|it| sysroot_crates.get(&it).copied());
let liballoc = sysroot.alloc().and_then(|it| sysroot_crates.get(&it).copied());
let libstd = sysroot.std().and_then(|it| sysroot_crates.get(&it).copied()); let libstd = sysroot.std().and_then(|it| sysroot_crates.get(&it).copied());
let libproc_macro =
sysroot.proc_macro().and_then(|it| sysroot_crates.get(&it).copied());
let mut pkg_to_lib_crate = FxHashMap::default(); let mut pkg_to_lib_crate = FxHashMap::default();
let mut pkg_crates = FxHashMap::default(); let mut pkg_crates = FxHashMap::default();
@ -216,13 +228,32 @@ impl ProjectWorkspace {
opts.insert_features(pkg.features(&cargo).iter().map(Into::into)); opts.insert_features(pkg.features(&cargo).iter().map(Into::into));
opts opts
}; };
let crate_id = let crate_id = crate_graph.add_crate_root(
crate_graph.add_crate_root(file_id, edition, cfg_options); file_id,
edition,
cfg_options,
Env::default(),
);
names.insert(crate_id, pkg.name(&cargo).to_string()); names.insert(crate_id, pkg.name(&cargo).to_string());
if tgt.kind(&cargo) == TargetKind::Lib { if tgt.kind(&cargo) == TargetKind::Lib {
lib_tgt = Some(crate_id); lib_tgt = Some(crate_id);
pkg_to_lib_crate.insert(pkg, crate_id); pkg_to_lib_crate.insert(pkg, crate_id);
} }
if tgt.is_proc_macro(&cargo) {
if let Some(proc_macro) = libproc_macro {
if let Err(_) = crate_graph.add_dep(
crate_id,
"proc_macro".into(),
proc_macro,
) {
log::error!(
"cyclic dependency on proc_macro for {}",
pkg.name(&cargo)
)
}
}
}
pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id); pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id);
} }
} }
@ -248,6 +279,11 @@ impl ProjectWorkspace {
log::error!("cyclic dependency on core for {}", pkg.name(&cargo)) log::error!("cyclic dependency on core for {}", pkg.name(&cargo))
} }
} }
if let Some(alloc) = liballoc {
if let Err(_) = crate_graph.add_dep(from, "alloc".into(), alloc) {
log::error!("cyclic dependency on alloc for {}", pkg.name(&cargo))
}
}
if let Some(std) = libstd { if let Some(std) = libstd {
if let Err(_) = crate_graph.add_dep(from, "std".into(), std) { if let Err(_) = crate_graph.add_dep(from, "std".into(), std) {
log::error!("cyclic dependency on std for {}", pkg.name(&cargo)) log::error!("cyclic dependency on std for {}", pkg.name(&cargo))

View file

@ -31,10 +31,18 @@ impl Sysroot {
self.by_name("core") self.by_name("core")
} }
pub fn alloc(&self) -> Option<SysrootCrate> {
self.by_name("alloc")
}
pub fn std(&self) -> Option<SysrootCrate> { pub fn std(&self) -> Option<SysrootCrate> {
self.by_name("std") self.by_name("std")
} }
pub fn proc_macro(&self) -> Option<SysrootCrate> {
self.by_name("proc_macro")
}
pub fn crates<'a>(&'a self) -> impl Iterator<Item = SysrootCrate> + ExactSizeIterator + 'a { pub fn crates<'a>(&'a self) -> impl Iterator<Item = SysrootCrate> + ExactSizeIterator + 'a {
self.crates.iter().map(|(id, _data)| id) self.crates.iter().map(|(id, _data)| id)
} }
@ -70,7 +78,7 @@ impl Sysroot {
} }
} }
if let Some(alloc) = sysroot.by_name("alloc") { if let Some(alloc) = sysroot.by_name("alloc") {
if let Some(core) = sysroot.by_name("core") { if let Some(core) = sysroot.core() {
sysroot.crates[alloc].deps.push(core); sysroot.crates[alloc].deps.push(core);
} }
} }

View file

@ -13,11 +13,21 @@ use crate::{
make::{self, tokens}, make::{self, tokens},
AstNode, TypeBoundsOwner, AstNode, TypeBoundsOwner,
}, },
AstToken, Direction, InsertPosition, SmolStr, SyntaxElement, AstToken, Direction, InsertPosition, SmolStr, SyntaxElement, SyntaxKind,
SyntaxKind::{ATTR, COMMENT, WHITESPACE}, SyntaxKind::{ATTR, COMMENT, WHITESPACE},
SyntaxNode, SyntaxToken, T, SyntaxNode, SyntaxToken, T,
}; };
impl ast::BinExpr {
#[must_use]
pub fn replace_op(&self, op: SyntaxKind) -> Option<ast::BinExpr> {
let op_node: SyntaxElement = self.op_details()?.0.into();
let to_insert: Option<SyntaxElement> = Some(tokens::op(op).into());
let replace_range = RangeInclusive::new(op_node.clone(), op_node);
Some(replace_children(self, replace_range, to_insert.into_iter()))
}
}
impl ast::FnDef { impl ast::FnDef {
#[must_use] #[must_use]
pub fn with_body(&self, body: ast::Block) -> ast::FnDef { pub fn with_body(&self, body: ast::Block) -> ast::FnDef {

View file

@ -127,7 +127,7 @@ pub enum BinOp {
} }
impl ast::BinExpr { impl ast::BinExpr {
fn op_details(&self) -> Option<(SyntaxToken, BinOp)> { pub fn op_details(&self) -> Option<(SyntaxToken, BinOp)> {
self.syntax().children_with_tokens().filter_map(|it| it.into_token()).find_map(|c| { self.syntax().children_with_tokens().filter_map(|it| it.into_token()).find_map(|c| {
let bin_op = match c.kind() { let bin_op = match c.kind() {
T![||] => BinOp::BooleanOr, T![||] => BinOp::BooleanOr,

View file

@ -178,15 +178,15 @@ impl ast::ImplBlock {
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum StructKind { pub enum StructKind {
Record(ast::RecordFieldDefList),
Tuple(ast::TupleFieldDefList), Tuple(ast::TupleFieldDefList),
Named(ast::RecordFieldDefList),
Unit, Unit,
} }
impl StructKind { impl StructKind {
fn from_node<N: AstNode>(node: &N) -> StructKind { fn from_node<N: AstNode>(node: &N) -> StructKind {
if let Some(nfdl) = child_opt::<_, ast::RecordFieldDefList>(node) { if let Some(nfdl) = child_opt::<_, ast::RecordFieldDefList>(node) {
StructKind::Named(nfdl) StructKind::Record(nfdl)
} else if let Some(pfl) = child_opt::<_, ast::TupleFieldDefList>(node) { } else if let Some(pfl) = child_opt::<_, ast::TupleFieldDefList>(node) {
StructKind::Tuple(pfl) StructKind::Tuple(pfl)
} else { } else {

View file

@ -173,10 +173,21 @@ fn ast_from_text<N: AstNode>(text: &str) -> N {
} }
pub mod tokens { pub mod tokens {
use crate::{AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken, T}; use crate::{AstNode, Parse, SourceFile, SyntaxKind, SyntaxKind::*, SyntaxToken, T};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;")); static SOURCE_FILE: Lazy<Parse<SourceFile>> =
Lazy::new(|| SourceFile::parse("const C: () = (1 != 1, 2 == 2)\n;"));
pub fn op(op: SyntaxKind) -> SyntaxToken {
SOURCE_FILE
.tree()
.syntax()
.descendants_with_tokens()
.filter_map(|it| it.into_token())
.find(|it| it.kind() == op)
.unwrap()
}
pub fn comma() -> SyntaxToken { pub fn comma() -> SyntaxToken {
SOURCE_FILE SOURCE_FILE

View file

@ -141,6 +141,11 @@ to load path and require it in `init.el`
## Vim and NeoVim ## Vim and NeoVim
Neovim 0.5 has a built in language server. For a quick start configuration of
rust-analyzer, use [neovim/nvim-lsp](https://github.com/neovim/nvim-lsp#rust_analyzer).
Once `neovim/nvim-lsp` is installed, you can use `call nvim_lsp#setup("rust_analyzer", {})`
or `lua require'nvim_lsp'.rust_analyzer.setup({})` to quickly get set up.
* Install coc.nvim by following the instructions at [coc.nvim] * Install coc.nvim by following the instructions at [coc.nvim]
- You will need nodejs installed. - You will need nodejs installed.
- You may want to include some of the sample vim configurations [from here][coc-vim-conf] - You may want to include some of the sample vim configurations [from here][coc-vim-conf]

View file

@ -329,6 +329,25 @@ fn main() {
} }
``` ```
## `invert_if`
Apply invert_if
This transforms if expressions of the form `if !x {A} else {B}` into `if x {B} else {A}`
This also works with `!=`. This assist can only be applied with the cursor
on `if`.
```rust
// BEFORE
fn main() {
if┃ !y { A } else { B }
}
// AFTER
fn main() {
if y { B } else { A }
}
```
## `make_raw_string` ## `make_raw_string`
Adds `r#` to a plain string literal. Adds `r#` to a plain string literal.

View file

@ -74,7 +74,6 @@ fn no_docs_comments() {
"ra_db", "ra_db",
"ra_hir", "ra_hir",
"ra_hir_expand", "ra_hir_expand",
"ra_hir_def",
"ra_ide_api", "ra_ide_api",
"ra_lsp_server", "ra_lsp_server",
"ra_mbe", "ra_mbe",