2
0
Fork 0
mirror of https://github.com/rust-lang/rust-analyzer synced 2025-02-13 12:43:38 +00:00

Merge branch 'master' of https://github.com/rust-analyzer/rust-analyzer into feature/themes

This commit is contained in:
Seivan Heidari 2019-11-09 16:27:40 +01:00
commit 529b227d42
20 changed files with 406 additions and 190 deletions

1
Cargo.lock generated
View file

@ -1060,6 +1060,7 @@ dependencies = [
"ra_arena 0.1.0",
"ra_db 0.1.0",
"ra_mbe 0.1.0",
"ra_parser 0.1.0",
"ra_prof 0.1.0",
"ra_syntax 0.1.0",
"ra_tt 0.1.0",

View file

@ -12,7 +12,7 @@ use crate::{
impl Struct {
pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
db.struct_data(self.id).variant_data.clone()
db.struct_data(self.id.into()).variant_data.clone()
}
}

View file

@ -288,7 +288,7 @@ pub struct Struct {
impl Struct {
pub fn module(self, db: &impl DefDatabase) -> Module {
Module { id: self.id.module(db) }
Module { id: self.id.0.module(db) }
}
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
@ -296,11 +296,11 @@ impl Struct {
}
pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
db.struct_data(self.id).name.clone()
db.struct_data(self.id.into()).name.clone()
}
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> {
db.struct_data(self.id)
db.struct_data(self.id.into())
.variant_data
.fields()
.into_iter()
@ -310,7 +310,7 @@ impl Struct {
}
pub fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> {
db.struct_data(self.id)
db.struct_data(self.id.into())
.variant_data
.fields()
.into_iter()
@ -346,11 +346,11 @@ pub struct Union {
impl Union {
pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
db.union_data(self.id).name.clone()
db.struct_data(self.id.into()).name.clone()
}
pub fn module(self, db: &impl HirDatabase) -> Module {
Module { id: self.id.module(db) }
Module { id: self.id.0.module(db) }
}
pub fn ty(self, db: &impl HirDatabase) -> Ty {

View file

@ -78,13 +78,13 @@ impl HasSource for StructField {
impl HasSource for Struct {
type Ast = ast::StructDef;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StructDef> {
self.id.source(db)
self.id.0.source(db)
}
}
impl HasSource for Union {
type Ast = ast::StructDef;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StructDef> {
self.id.source(db)
self.id.0.source(db)
}
}
impl HasSource for Enum {

View file

@ -67,10 +67,7 @@ impl ExprScopes {
&self.scopes[scope].entries
}
pub(crate) fn scope_chain<'a>(
&'a self,
scope: Option<ScopeId>,
) -> impl Iterator<Item = ScopeId> + 'a {
pub(crate) fn scope_chain(&self, scope: Option<ScopeId>) -> impl Iterator<Item = ScopeId> + '_ {
std::iter::successors(scope, move |&scope| self.scopes[scope].parent)
}

View file

@ -1,5 +1,6 @@
//! FIXME: write short doc here
use hir_def::{StructId, StructOrUnionId, UnionId};
use hir_expand::name::AsName;
use ra_syntax::ast::{self, AstNode, NameOwner};
@ -15,18 +16,19 @@ pub trait FromSource: Sized {
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self>;
}
// FIXIME: these two impls are wrong, `ast::StructDef` might produce either a struct or a union
impl FromSource for Struct {
type Ast = ast::StructDef;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
let id = from_source(db, src)?;
Some(Struct { id })
let id: StructOrUnionId = from_source(db, src)?;
Some(Struct { id: StructId(id) })
}
}
impl FromSource for Union {
type Ast = ast::StructDef;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
let id = from_source(db, src)?;
Some(Union { id })
let id: StructOrUnionId = from_source(db, src)?;
Some(Union { id: UnionId(id) })
}
}
impl FromSource for Enum {

View file

@ -665,7 +665,7 @@ fn type_for_builtin(def: BuiltinType) -> Ty {
}
fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> FnSig {
let struct_data = db.struct_data(def.id);
let struct_data = db.struct_data(def.id.into());
let fields = match struct_data.variant_data.fields() {
Some(fields) => fields,
None => panic!("fn_sig_for_struct_constructor called on unit struct"),
@ -681,7 +681,7 @@ fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> FnSig {
/// Build the type of a tuple struct constructor.
fn type_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> Ty {
let struct_data = db.struct_data(def.id);
let struct_data = db.struct_data(def.id.into());
if struct_data.variant_data.fields().is_none() {
return type_for_adt(db, def); // Unit struct
}

View file

@ -8,7 +8,7 @@ use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner};
use crate::{
db::DefDatabase2, type_ref::TypeRef, AstItemDef, EnumId, LocalEnumVariantId,
LocalStructFieldId, StructId, UnionId,
LocalStructFieldId, StructOrUnionId,
};
/// Note that we use `StructData` for unions as well!
@ -49,15 +49,11 @@ pub struct StructFieldData {
}
impl StructData {
pub(crate) fn struct_data_query(db: &impl DefDatabase2, struct_: StructId) -> Arc<StructData> {
let src = struct_.source(db);
let name = src.ast.name().map(|n| n.as_name());
let variant_data = VariantData::new(src.ast.kind());
let variant_data = Arc::new(variant_data);
Arc::new(StructData { name, variant_data })
}
pub(crate) fn union_data_query(db: &impl DefDatabase2, struct_: UnionId) -> Arc<StructData> {
let src = struct_.source(db);
pub(crate) fn struct_data_query(
db: &impl DefDatabase2,
id: StructOrUnionId,
) -> Arc<StructData> {
let src = id.source(db);
let name = src.ast.name().map(|n| n.as_name());
let variant_data = VariantData::new(src.ast.kind());
let variant_data = Arc::new(variant_data);

View file

@ -11,7 +11,7 @@ use crate::{
raw::{ImportSourceMap, RawItems},
CrateDefMap,
},
EnumId, StructId, UnionId,
EnumId, StructOrUnionId,
};
#[salsa::query_group(InternDatabaseStorage)]
@ -19,9 +19,8 @@ pub trait InternDatabase: SourceDatabase {
#[salsa::interned]
fn intern_function(&self, loc: crate::ItemLoc<ast::FnDef>) -> crate::FunctionId;
#[salsa::interned]
fn intern_struct(&self, loc: crate::ItemLoc<ast::StructDef>) -> crate::StructId;
#[salsa::interned]
fn intern_union(&self, loc: crate::ItemLoc<ast::StructDef>) -> crate::UnionId;
fn intern_struct_or_union(&self, loc: crate::ItemLoc<ast::StructDef>)
-> crate::StructOrUnionId;
#[salsa::interned]
fn intern_enum(&self, loc: crate::ItemLoc<ast::EnumDef>) -> crate::EnumId;
#[salsa::interned]
@ -49,10 +48,7 @@ pub trait DefDatabase2: InternDatabase + AstDatabase {
fn crate_def_map(&self, krate: CrateId) -> Arc<CrateDefMap>;
#[salsa::invoke(StructData::struct_data_query)]
fn struct_data(&self, s: StructId) -> Arc<StructData>;
#[salsa::invoke(StructData::union_data_query)]
fn union_data(&self, s: UnionId) -> Arc<StructData>;
fn struct_data(&self, id: StructOrUnionId) -> Arc<StructData>;
#[salsa::invoke(EnumData::enum_data_query)]
fn enum_data(&self, e: EnumId) -> Arc<EnumData>;

View file

@ -205,26 +205,30 @@ impl AstItemDef<ast::FnDef> for FunctionId {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StructId(salsa::InternId);
impl_intern_key!(StructId);
impl AstItemDef<ast::StructDef> for StructId {
pub struct StructOrUnionId(salsa::InternId);
impl_intern_key!(StructOrUnionId);
impl AstItemDef<ast::StructDef> for StructOrUnionId {
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::StructDef>) -> Self {
db.intern_struct(loc)
db.intern_struct_or_union(loc)
}
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::StructDef> {
db.lookup_intern_struct(self)
db.lookup_intern_struct_or_union(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct UnionId(salsa::InternId);
impl_intern_key!(UnionId);
impl AstItemDef<ast::StructDef> for UnionId {
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::StructDef>) -> Self {
db.intern_union(loc)
pub struct StructId(pub StructOrUnionId);
impl From<StructId> for StructOrUnionId {
fn from(id: StructId) -> StructOrUnionId {
id.0
}
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::StructDef> {
db.lookup_intern_union(self)
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct UnionId(pub StructOrUnionId);
impl From<UnionId> for StructOrUnionId {
fn from(id: UnionId) -> StructOrUnionId {
id.0
}
}

View file

@ -19,7 +19,8 @@ use crate::{
},
path::{Path, PathKind},
AdtId, AstId, AstItemDef, ConstId, CrateModuleId, EnumId, EnumVariantId, FunctionId,
LocationCtx, ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, UnionId,
LocationCtx, ModuleDefId, ModuleId, StaticId, StructId, StructOrUnionId, TraitId, TypeAliasId,
UnionId,
};
pub(super) fn collect_defs(db: &impl DefDatabase2, mut def_map: CrateDefMap) -> CrateDefMap {
@ -664,12 +665,14 @@ where
PerNs::values(FunctionId::from_ast_id(ctx, ast_id).into())
}
raw::DefKind::Struct(ast_id) => {
let s = StructId::from_ast_id(ctx, ast_id).into();
let id = StructOrUnionId::from_ast_id(ctx, ast_id).into();
let s = StructId(id).into();
PerNs::both(s, s)
}
raw::DefKind::Union(ast_id) => {
let s = UnionId::from_ast_id(ctx, ast_id).into();
PerNs::both(s, s)
let id = StructOrUnionId::from_ast_id(ctx, ast_id).into();
let u = UnionId(id).into();
PerNs::both(u, u)
}
raw::DefKind::Enum(ast_id) => PerNs::types(EnumId::from_ast_id(ctx, ast_id).into()),
raw::DefKind::Const(ast_id) => PerNs::values(ConstId::from_ast_id(ctx, ast_id).into()),

View file

@ -10,6 +10,7 @@ log = "0.4.5"
ra_arena = { path = "../ra_arena" }
ra_db = { path = "../ra_db" }
ra_syntax = { path = "../ra_syntax" }
ra_parser = { path = "../ra_parser" }
ra_prof = { path = "../ra_prof" }
tt = { path = "../ra_tt", package = "ra_tt" }
mbe = { path = "../ra_mbe", package = "ra_mbe" }

View file

@ -4,6 +4,7 @@ use std::sync::Arc;
use mbe::MacroRules;
use ra_db::{salsa, SourceDatabase};
use ra_parser::FragmentKind;
use ra_prof::profile;
use ra_syntax::{AstNode, Parse, SyntaxNode};
@ -22,9 +23,12 @@ pub trait AstDatabase: SourceDatabase {
#[salsa::interned]
fn intern_macro(&self, macro_call: MacroCallLoc) -> MacroCallId;
fn macro_arg(&self, id: MacroCallId) -> Option<Arc<tt::Subtree>>;
fn macro_def(&self, id: MacroDefId) -> Option<Arc<mbe::MacroRules>>;
fn parse_macro(&self, macro_file: MacroFile) -> Option<Parse<SyntaxNode>>;
fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>;
fn macro_def(&self, id: MacroDefId) -> Option<Arc<(mbe::MacroRules, mbe::TokenMap)>>;
fn parse_macro(
&self,
macro_file: MacroFile,
) -> Option<(Parse<SyntaxNode>, Arc<mbe::RevTokenMap>)>;
fn macro_expand(&self, macro_call: MacroCallId) -> Result<Arc<tt::Subtree>, String>;
}
@ -34,10 +38,13 @@ pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdM
Arc::new(map)
}
pub(crate) fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> {
pub(crate) fn macro_def(
db: &dyn AstDatabase,
id: MacroDefId,
) -> Option<Arc<(mbe::MacroRules, mbe::TokenMap)>> {
let macro_call = id.ast_id.to_node(db);
let arg = macro_call.token_tree()?;
let (tt, _) = mbe::ast_to_token_tree(&arg).or_else(|| {
let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| {
log::warn!("fail on macro_def to token tree: {:#?}", arg);
None
})?;
@ -45,15 +52,18 @@ pub(crate) fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<Macr
log::warn!("fail on macro_def parse: {:#?}", tt);
None
})?;
Some(Arc::new(rules))
Some(Arc::new((rules, tmap)))
}
pub(crate) fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<tt::Subtree>> {
pub(crate) fn macro_arg(
db: &dyn AstDatabase,
id: MacroCallId,
) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
let loc = db.lookup_intern_macro(id);
let macro_call = loc.ast_id.to_node(db);
let arg = macro_call.token_tree()?;
let (tt, _) = mbe::ast_to_token_tree(&arg)?;
Some(Arc::new(tt))
let (tt, tmap) = mbe::ast_to_token_tree(&arg)?;
Some(Arc::new((tt, tmap)))
}
pub(crate) fn macro_expand(
@ -64,7 +74,7 @@ pub(crate) fn macro_expand(
let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?;
let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?;
let tt = macro_rules.expand(&macro_arg).map_err(|err| format!("{:?}", err))?;
let tt = macro_rules.0.expand(&macro_arg.0).map_err(|err| format!("{:?}", err))?;
// Set a hard limit for the expanded tt
let count = tt.count();
if count > 65536 {
@ -77,7 +87,7 @@ pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Optio
match file_id.0 {
HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
HirFileIdRepr::MacroFile(macro_file) => {
db.parse_macro(macro_file).map(|it| it.syntax_node())
db.parse_macro(macro_file).map(|(it, _)| it.syntax_node())
}
}
}
@ -85,8 +95,9 @@ pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Optio
pub(crate) fn parse_macro(
db: &dyn AstDatabase,
macro_file: MacroFile,
) -> Option<Parse<SyntaxNode>> {
) -> Option<(Parse<SyntaxNode>, Arc<mbe::RevTokenMap>)> {
let _p = profile("parse_macro_query");
let macro_call_id = macro_file.macro_call_id;
let tt = db
.macro_expand(macro_call_id)
@ -97,8 +108,11 @@ pub(crate) fn parse_macro(
log::warn!("fail on macro_parse: (reason: {})", err,);
})
.ok()?;
match macro_file.macro_file_kind {
MacroFileKind::Items => mbe::token_tree_to_items(&tt).ok().map(Parse::to_syntax),
MacroFileKind::Expr => mbe::token_tree_to_expr(&tt).ok().map(Parse::to_syntax),
}
let fragment_kind = match macro_file.macro_file_kind {
MacroFileKind::Items => FragmentKind::Items,
MacroFileKind::Expr => FragmentKind::Expr,
};
let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?;
Some((parse, Arc::new(rev_token_map)))
}

View file

@ -12,11 +12,12 @@ pub mod hygiene;
pub mod diagnostics;
use std::hash::{Hash, Hasher};
use std::sync::Arc;
use ra_db::{salsa, CrateId, FileId};
use ra_syntax::{
ast::{self, AstNode},
SyntaxNode,
SyntaxNode, TextRange, TextUnit,
};
use crate::ast_id_map::FileAstId;
@ -66,6 +67,30 @@ impl HirFileId {
}
}
}
/// Return expansion information if it is a macro-expansion file
pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option<ExpansionInfo> {
match self.0 {
HirFileIdRepr::FileId(_) => None,
HirFileIdRepr::MacroFile(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id);
let arg_start = loc.ast_id.to_node(db).token_tree()?.syntax().text_range().start();
let def_start =
loc.def.ast_id.to_node(db).token_tree()?.syntax().text_range().start();
let macro_def = db.macro_def(loc.def)?;
let shift = macro_def.0.shift();
let exp_map = db.parse_macro(macro_file)?.1;
let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
let arg_start = (loc.ast_id.file_id, arg_start);
let def_start = (loc.def.ast_id.file_id, def_start);
Some(ExpansionInfo { arg_start, def_start, macro_arg, macro_def, exp_map, shift })
}
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -112,6 +137,38 @@ impl MacroCallId {
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
pub struct ExpansionInfo {
pub(crate) arg_start: (HirFileId, TextUnit),
pub(crate) def_start: (HirFileId, TextUnit),
pub(crate) shift: u32,
pub(crate) macro_def: Arc<(mbe::MacroRules, mbe::TokenMap)>,
pub(crate) macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
pub(crate) exp_map: Arc<mbe::RevTokenMap>,
}
impl ExpansionInfo {
pub fn find_range(&self, from: TextRange) -> Option<(HirFileId, TextRange)> {
let token_id = look_in_rev_map(&self.exp_map, from)?;
let (token_map, (file_id, start_offset), token_id) = if token_id.0 >= self.shift {
(&self.macro_arg.1, self.arg_start, tt::TokenId(token_id.0 - self.shift).into())
} else {
(&self.macro_def.1, self.def_start, token_id)
};
let range = token_map.relative_range_of(token_id)?;
return Some((file_id, range + start_offset));
fn look_in_rev_map(exp_map: &mbe::RevTokenMap, from: TextRange) -> Option<tt::TokenId> {
exp_map.ranges.iter().find(|&it| it.0.is_subrange(&from)).map(|it| it.1)
}
}
}
/// `AstId` points to an AST node in any file.
///
/// It is stable across reparses, and can be used as salsa key/value.

View file

@ -29,6 +29,21 @@ pub struct NavigationTarget {
docs: Option<String>,
}
fn find_range_from_node(
db: &RootDatabase,
src: hir::HirFileId,
node: &SyntaxNode,
) -> (FileId, TextRange) {
let text_range = node.text_range();
let (file_id, text_range) = src
.expansion_info(db)
.and_then(|expansion_info| expansion_info.find_range(text_range))
.unwrap_or((src, text_range));
// FIXME: handle recursive macro generated macro
(file_id.original_file(db), text_range)
}
impl NavigationTarget {
/// When `focus_range` is specified, returns it. otherwise
/// returns `full_range`
@ -72,8 +87,12 @@ impl NavigationTarget {
self.focus_range
}
pub(crate) fn from_bind_pat(file_id: FileId, pat: &ast::BindPat) -> NavigationTarget {
NavigationTarget::from_named(file_id, pat, None, None)
pub(crate) fn from_bind_pat(
db: &RootDatabase,
file_id: FileId,
pat: &ast::BindPat,
) -> NavigationTarget {
NavigationTarget::from_named(db, file_id.into(), pat, None, None)
}
pub(crate) fn from_symbol(db: &RootDatabase, symbol: FileSymbol) -> NavigationTarget {
@ -96,7 +115,7 @@ impl NavigationTarget {
) -> NavigationTarget {
let parse = db.parse(file_id);
let pat = pat.to_node(parse.tree().syntax());
NavigationTarget::from_bind_pat(file_id, &pat)
NavigationTarget::from_bind_pat(db, file_id, &pat)
}
pub(crate) fn from_self_param(
@ -119,31 +138,46 @@ impl NavigationTarget {
pub(crate) fn from_module(db: &RootDatabase, module: hir::Module) -> NavigationTarget {
let src = module.definition_source(db);
let file_id = src.file_id.original_file(db);
let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default();
match src.ast {
ModuleSource::SourceFile(node) => {
NavigationTarget::from_syntax(file_id, name, None, node.syntax(), None, None)
let (file_id, text_range) = find_range_from_node(db, src.file_id, node.syntax());
NavigationTarget::from_syntax(
file_id,
name,
None,
text_range,
node.syntax(),
None,
None,
)
}
ModuleSource::Module(node) => {
let (file_id, text_range) = find_range_from_node(db, src.file_id, node.syntax());
NavigationTarget::from_syntax(
file_id,
name,
None,
text_range,
node.syntax(),
node.doc_comment_text(),
node.short_label(),
)
}
ModuleSource::Module(node) => NavigationTarget::from_syntax(
file_id,
name,
None,
node.syntax(),
node.doc_comment_text(),
node.short_label(),
),
}
}
pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget {
let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default();
if let Some(src) = module.declaration_source(db) {
let file_id = src.file_id.original_file(db);
let (file_id, text_range) = find_range_from_node(db, src.file_id, src.ast.syntax());
return NavigationTarget::from_syntax(
file_id,
name,
None,
text_range,
src.ast.syntax(),
src.ast.doc_comment_text(),
src.ast.short_label(),
@ -154,13 +188,25 @@ impl NavigationTarget {
pub(crate) fn from_field(db: &RootDatabase, field: hir::StructField) -> NavigationTarget {
let src = field.source(db);
let file_id = src.file_id.original_file(db);
match src.ast {
FieldSource::Named(it) => {
NavigationTarget::from_named(file_id, &it, it.doc_comment_text(), it.short_label())
}
FieldSource::Named(it) => NavigationTarget::from_named(
db,
src.file_id,
&it,
it.doc_comment_text(),
it.short_label(),
),
FieldSource::Pos(it) => {
NavigationTarget::from_syntax(file_id, "".into(), None, it.syntax(), None, None)
let (file_id, text_range) = find_range_from_node(db, src.file_id, it.syntax());
NavigationTarget::from_syntax(
file_id,
"".into(),
None,
text_range,
it.syntax(),
None,
None,
)
}
}
}
@ -172,7 +218,8 @@ impl NavigationTarget {
{
let src = def.source(db);
NavigationTarget::from_named(
src.file_id.original_file(db),
db,
src.file_id,
&src.ast,
src.ast.doc_comment_text(),
src.ast.short_label(),
@ -212,10 +259,13 @@ impl NavigationTarget {
impl_block: hir::ImplBlock,
) -> NavigationTarget {
let src = impl_block.source(db);
let (file_id, text_range) = find_range_from_node(db, src.file_id, src.ast.syntax());
NavigationTarget::from_syntax(
src.file_id.original_file(db),
file_id,
"impl".into(),
None,
text_range,
src.ast.syntax(),
None,
None,
@ -236,12 +286,7 @@ impl NavigationTarget {
pub(crate) fn from_macro_def(db: &RootDatabase, macro_call: hir::MacroDef) -> NavigationTarget {
let src = macro_call.source(db);
log::debug!("nav target {:#?}", src.ast.syntax());
NavigationTarget::from_named(
src.file_id.original_file(db),
&src.ast,
src.ast.doc_comment_text(),
None,
)
NavigationTarget::from_named(db, src.file_id, &src.ast, src.ast.doc_comment_text(), None)
}
#[cfg(test)]
@ -270,21 +315,33 @@ impl NavigationTarget {
/// Allows `NavigationTarget` to be created from a `NameOwner`
pub(crate) fn from_named(
file_id: FileId,
db: &RootDatabase,
file_id: hir::HirFileId,
node: &impl ast::NameOwner,
docs: Option<String>,
description: Option<String>,
) -> NavigationTarget {
//FIXME: use `_` instead of empty string
let name = node.name().map(|it| it.text().clone()).unwrap_or_default();
let focus_range = node.name().map(|it| it.syntax().text_range());
NavigationTarget::from_syntax(file_id, name, focus_range, node.syntax(), docs, description)
let focus_range = node.name().map(|it| find_range_from_node(db, file_id, it.syntax()).1);
let (file_id, full_range) = find_range_from_node(db, file_id, node.syntax());
NavigationTarget::from_syntax(
file_id,
name,
focus_range,
full_range,
node.syntax(),
docs,
description,
)
}
fn from_syntax(
file_id: FileId,
name: SmolStr,
focus_range: Option<TextRange>,
full_range: TextRange,
node: &SyntaxNode,
docs: Option<String>,
description: Option<String>,
@ -293,9 +350,8 @@ impl NavigationTarget {
file_id,
name,
kind: node.kind(),
full_range: node.text_range(),
full_range,
focus_range,
// ptr: Some(LocalSyntaxPtr::new(node)),
container_name: None,
description,
docs,

View file

@ -101,19 +101,20 @@ pub(crate) fn name_definition(
}
}
if let Some(nav) = named_target(file_id, &parent) {
if let Some(nav) = named_target(db, file_id, &parent) {
return Some(vec![nav]);
}
None
}
fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget> {
fn named_target(db: &RootDatabase, file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget> {
match_ast! {
match node {
ast::StructDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -121,7 +122,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::EnumDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -129,7 +131,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::EnumVariant(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -137,7 +140,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::FnDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -145,7 +149,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::TypeAliasDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -153,7 +158,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::ConstDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -161,7 +167,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::StaticDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -169,7 +176,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::TraitDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -177,7 +185,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::RecordFieldDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -185,7 +194,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::Module(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -193,7 +203,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::MacroCall(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
None,
@ -334,6 +345,46 @@ mod tests {
);
}
#[test]
fn goto_definition_works_for_macro_defined_fn_with_arg() {
check_goto(
"
//- /lib.rs
macro_rules! define_fn {
($name:ident) => (fn $name() {})
}
define_fn!(
foo
)
fn bar() {
<|>foo();
}
",
"foo FN_DEF FileId(1) [80; 83) [80; 83)",
);
}
#[test]
fn goto_definition_works_for_macro_defined_fn_no_arg() {
check_goto(
"
//- /lib.rs
macro_rules! define_fn {
() => (fn foo() {})
}
define_fn!();
fn bar() {
<|>foo();
}
",
"foo FN_DEF FileId(1) [39; 42) [39; 42)",
);
}
#[test]
fn goto_definition_works_for_methods() {
covers!(goto_definition_works_for_methods);

View file

@ -94,10 +94,10 @@ impl FromIterator<TableEntry<FileId, Parse<ast::SourceFile>>> for SyntaxTreeStat
}
}
impl FromIterator<TableEntry<MacroFile, Option<Parse<SyntaxNode>>>> for SyntaxTreeStats {
impl<M> FromIterator<TableEntry<MacroFile, Option<(Parse<SyntaxNode>, M)>>> for SyntaxTreeStats {
fn from_iter<T>(iter: T) -> SyntaxTreeStats
where
T: IntoIterator<Item = TableEntry<MacroFile, Option<Parse<SyntaxNode>>>>,
T: IntoIterator<Item = TableEntry<MacroFile, Option<(Parse<SyntaxNode>, M)>>>,
{
let mut res = SyntaxTreeStats::default();
for entry in iter {

View file

@ -31,8 +31,7 @@ pub enum ExpandError {
}
pub use crate::syntax_bridge::{
ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_expr, token_tree_to_items,
token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty,
ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, RevTokenMap, TokenMap,
};
/// This struct contains AST for a single `macro_rules` definition. What might
@ -118,6 +117,10 @@ impl MacroRules {
shift_subtree(&mut tt, self.shift);
mbe_expander::expand(self, &tt)
}
pub fn shift(&self) -> u32 {
self.shift
}
}
impl Rule {

View file

@ -1,9 +1,6 @@
//! FIXME: write short doc here
use ra_parser::{
FragmentKind::{self, *},
ParseError, TreeSink,
};
use ra_parser::{FragmentKind, ParseError, TreeSink};
use ra_syntax::{
ast, AstNode, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
SyntaxTreeBuilder, TextRange, TextUnit, T,
@ -14,12 +11,18 @@ use crate::subtree_source::SubtreeTokenSource;
use crate::ExpandError;
/// Maps `tt::TokenId` to the relative range of the original token.
#[derive(Default)]
#[derive(Debug, PartialEq, Eq, Default)]
pub struct TokenMap {
/// Maps `tt::TokenId` to the *relative* source range.
tokens: Vec<TextRange>,
}
/// Maps relative range of the expanded syntax node to `tt::TokenId`
#[derive(Debug, PartialEq, Eq, Default)]
pub struct RevTokenMap {
pub ranges: Vec<(TextRange, tt::TokenId)>,
}
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
/// will consume).
pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> {
@ -49,10 +52,10 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke
// * ImplItems(SmallVec<[ast::ImplItem; 1]>)
// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
fn fragment_to_syntax_node(
pub fn token_tree_to_syntax_node(
tt: &tt::Subtree,
fragment_kind: FragmentKind,
) -> Result<Parse<SyntaxNode>, ExpandError> {
) -> Result<(Parse<SyntaxNode>, RevTokenMap), ExpandError> {
let tmp;
let tokens = match tt {
tt::Subtree { delimiter: tt::Delimiter::None, token_trees } => token_trees.as_slice(),
@ -69,38 +72,8 @@ fn fragment_to_syntax_node(
return Err(ExpandError::ConversionError);
}
//FIXME: would be cool to report errors
let parse = tree_sink.inner.finish();
Ok(parse)
}
/// Parses the token tree (result of macro expansion) to an expression
pub fn token_tree_to_expr(tt: &tt::Subtree) -> Result<Parse<ast::Expr>, ExpandError> {
let parse = fragment_to_syntax_node(tt, Expr)?;
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
}
/// Parses the token tree (result of macro expansion) to a Pattern
pub fn token_tree_to_pat(tt: &tt::Subtree) -> Result<Parse<ast::Pat>, ExpandError> {
let parse = fragment_to_syntax_node(tt, Pattern)?;
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
}
/// Parses the token tree (result of macro expansion) to a Type
pub fn token_tree_to_ty(tt: &tt::Subtree) -> Result<Parse<ast::TypeRef>, ExpandError> {
let parse = fragment_to_syntax_node(tt, Type)?;
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
}
/// Parses the token tree (result of macro expansion) as a sequence of stmts
pub fn token_tree_to_macro_stmts(tt: &tt::Subtree) -> Result<Parse<ast::MacroStmts>, ExpandError> {
let parse = fragment_to_syntax_node(tt, Statements)?;
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
}
/// Parses the token tree (result of macro expansion) as a sequence of items
pub fn token_tree_to_items(tt: &tt::Subtree) -> Result<Parse<ast::MacroItems>, ExpandError> {
let parse = fragment_to_syntax_node(tt, Items)?;
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
let (parse, range_map) = tree_sink.finish();
Ok((parse, range_map))
}
impl TokenMap {
@ -116,6 +89,12 @@ impl TokenMap {
}
}
impl RevTokenMap {
fn add(&mut self, relative_range: TextRange, token_id: tt::TokenId) {
self.ranges.push((relative_range, token_id.clone()))
}
}
/// Returns the textual content of a doc comment block as a quoted string
/// That is, strips leading `///` (or `/**`, etc)
/// and strips the ending `*/`
@ -262,6 +241,7 @@ struct TtTreeSink<'a> {
cursor: Cursor<'a>,
text_pos: TextUnit,
inner: SyntaxTreeBuilder,
range_map: RevTokenMap,
// Number of roots
// Use for detect ill-form tree which is not single root
@ -276,8 +256,13 @@ impl<'a> TtTreeSink<'a> {
text_pos: 0.into(),
inner: SyntaxTreeBuilder::default(),
roots: smallvec::SmallVec::new(),
range_map: RevTokenMap::default(),
}
}
fn finish(self) -> (Parse<SyntaxNode>, RevTokenMap) {
(self.inner.finish(), self.range_map)
}
}
fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr {
@ -307,6 +292,15 @@ impl<'a> TreeSink for TtTreeSink<'a> {
match self.cursor.token_tree() {
Some(tt::TokenTree::Leaf(leaf)) => {
// Mark the range if needed
if let tt::Leaf::Ident(ident) = leaf {
if kind == IDENT {
let range =
TextRange::offset_len(self.text_pos, TextUnit::of_str(&ident.text));
self.range_map.add(range, ident.id);
}
}
self.cursor = self.cursor.bump();
self.buf += &format!("{}", leaf);
}
@ -337,6 +331,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
{
if curr.spacing == tt::Spacing::Alone {
self.inner.token(WHITESPACE, " ".into());
self.text_pos += TextUnit::of_char(' ');
}
}
}
@ -423,6 +418,6 @@ mod tests {
"#,
);
let expansion = expand(&rules, "stmts!();");
assert!(token_tree_to_expr(&expansion).is_err());
assert!(token_tree_to_syntax_node(&expansion, FragmentKind::Expr).is_err());
}
}

View file

@ -1,3 +1,4 @@
use ra_parser::FragmentKind;
use ra_syntax::{ast, AstNode, NodeOrToken, WalkEvent};
use test_utils::assert_eq_text;
@ -126,9 +127,9 @@ fn test_expr_order() {
"#,
);
let expanded = expand(&rules, "foo! { 1 + 1}");
let tree = token_tree_to_items(&expanded).unwrap().tree();
let tree = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap().0.syntax_node();
let dump = format!("{:#?}", tree.syntax());
let dump = format!("{:#?}", tree);
assert_eq_text!(
dump.trim(),
r#"MACRO_ITEMS@[0; 15)
@ -383,9 +384,9 @@ fn test_expand_to_item_list() {
",
);
let expansion = expand(&rules, "structs!(Foo, Bar);");
let tree = token_tree_to_items(&expansion).unwrap().tree();
let tree = token_tree_to_syntax_node(&expansion, FragmentKind::Items).unwrap().0.syntax_node();
assert_eq!(
format!("{:#?}", tree.syntax()).trim(),
format!("{:#?}", tree).trim(),
r#"
MACRO_ITEMS@[0; 40)
STRUCT_DEF@[0; 20)
@ -501,10 +502,11 @@ fn test_tt_to_stmts() {
);
let expanded = expand(&rules, "foo!{}");
let stmts = token_tree_to_macro_stmts(&expanded).unwrap().tree();
let stmts =
token_tree_to_syntax_node(&expanded, FragmentKind::Statements).unwrap().0.syntax_node();
assert_eq!(
format!("{:#?}", stmts.syntax()).trim(),
format!("{:#?}", stmts).trim(),
r#"MACRO_STMTS@[0; 15)
LET_STMT@[0; 7)
LET_KW@[0; 3) "let"
@ -754,7 +756,10 @@ fn test_all_items() {
}
"#,
);
assert_expansion(MacroKind::Items, &rules, r#"
assert_expansion(
MacroKind::Items,
&rules,
r#"
foo! {
extern crate a;
mod b;
@ -770,7 +775,9 @@ fn test_all_items() {
extern {}
type T = u8;
}
"#, r#"extern crate a ; mod b ; mod c {} use d ; const E : i32 = 0 ; static F : i32 = 0 ; impl G {} struct H ; enum I {Foo} trait J {} fn h () {} extern {} type T = u8 ;"#);
"#,
r#"extern crate a ; mod b ; mod c {} use d ; const E : i32 = 0 ; static F : i32 = 0 ; impl G {} struct H ; enum I {Foo} trait J {} fn h () {} extern {} type T = u8 ;"#,
);
}
#[test]
@ -946,10 +953,10 @@ fn test_vec() {
);
let expansion = expand(&rules, r#"vec![1u32,2];"#);
let tree = token_tree_to_expr(&expansion).unwrap().tree();
let tree = token_tree_to_syntax_node(&expansion, FragmentKind::Expr).unwrap().0.syntax_node();
assert_eq!(
format!("{:#?}", tree.syntax()).trim(),
format!("{:#?}", tree).trim(),
r#"BLOCK_EXPR@[0; 45)
BLOCK@[0; 45)
L_CURLY@[0; 1) "{"
@ -1088,8 +1095,12 @@ macro_rules! generate_pattern_iterators {
"#,
);
assert_expansion(MacroKind::Items, &rules, r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );"#,
"fn foo () {}");
assert_expansion(
MacroKind::Items,
&rules,
r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );"#,
"fn foo () {}",
);
}
#[test]
@ -1171,8 +1182,12 @@ fn test_impl_nonzero_fmt() {
"#,
);
assert_expansion(MacroKind::Items, &rules, r#"impl_nonzero_fmt! { # [stable(feature= "nonzero",since="1.28.0")] (Debug,Display,Binary,Octal,LowerHex,UpperHex) for NonZeroU8}"#,
"fn foo () {}");
assert_expansion(
MacroKind::Items,
&rules,
r#"impl_nonzero_fmt! { # [stable(feature= "nonzero",since="1.28.0")] (Debug,Display,Binary,Octal,LowerHex,UpperHex) for NonZeroU8}"#,
"fn foo () {}",
);
}
#[test]
@ -1189,8 +1204,12 @@ fn test_cfg_if_items() {
"#,
);
assert_expansion(MacroKind::Items, &rules, r#"__cfg_if_items ! { ( rustdoc , ) ; ( ( ) ( # [ cfg ( any ( target_os = "redox" , unix ) ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as unix ; # [ cfg ( windows ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as windows ; # [ cfg ( any ( target_os = "linux" , target_os = "l4re" ) ) ] pub mod linux ; ) ) , }"#,
"__cfg_if_items ! {(rustdoc ,) ;}");
assert_expansion(
MacroKind::Items,
&rules,
r#"__cfg_if_items ! { ( rustdoc , ) ; ( ( ) ( # [ cfg ( any ( target_os = "redox" , unix ) ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as unix ; # [ cfg ( windows ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as windows ; # [ cfg ( any ( target_os = "linux" , target_os = "l4re" ) ) ] pub mod linux ; ) ) , }"#,
"__cfg_if_items ! {(rustdoc ,) ;}",
);
}
#[test]
@ -1233,10 +1252,13 @@ cfg_if ! {
"#,
"__cfg_if_items ! {() ; ((target_env = \"msvc\") ()) , ((all (target_arch = \"wasm32\" , not (target_os = \"emscripten\"))) ()) , (() (mod libunwind ; pub use libunwind :: * ;)) ,}");
assert_expansion(MacroKind::Items, &rules, r#"
assert_expansion(
MacroKind::Items,
&rules,
r#"
cfg_if ! { @ __apply cfg ( all ( not ( any ( not ( any ( target_os = "solaris" , target_os = "illumos" ) ) ) ) ) ) , }
"#,
""
"",
);
}
@ -1291,10 +1313,13 @@ macro_rules! RIDL {
}"#,
);
let expanded = expand(&rules, r#"
let expanded = expand(
&rules,
r#"
RIDL!{interface ID3D11Asynchronous(ID3D11AsynchronousVtbl): ID3D11DeviceChild(ID3D11DeviceChildVtbl) {
fn GetDataSize(&mut self) -> UINT
}}"#);
}}"#,
);
assert_eq!(expanded.to_string(), "impl ID3D11Asynchronous {pub unsafe fn GetDataSize (& mut self) -> UINT {((* self . lpVtbl) .GetDataSize) (self)}}");
}
@ -1340,7 +1365,8 @@ quick_error ! (SORT [enum Wrapped # [derive (Debug)]] items [
#[test]
fn test_empty_repeat_vars_in_empty_repeat_vars() {
let rules = create_rules(r#"
let rules = create_rules(
r#"
macro_rules! delegate_impl {
([$self_type:ident, $self_wrap:ty, $self_map:ident]
pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* {
@ -1385,9 +1411,15 @@ macro_rules! delegate_impl {
}
}
}
"#);
"#,
);
assert_expansion(MacroKind::Items, &rules, r#"delegate_impl ! {[G , & 'a mut G , deref] pub trait Data : GraphBase {@ section type type NodeWeight ;}}"#, "impl <> Data for & \'a mut G where G : Data {}");
assert_expansion(
MacroKind::Items,
&rules,
r#"delegate_impl ! {[G , & 'a mut G , deref] pub trait Data : GraphBase {@ section type type NodeWeight ;}}"#,
"impl <> Data for & \'a mut G where G : Data {}",
);
}
pub(crate) fn create_rules(macro_definition: &str) -> MacroRules {
@ -1436,22 +1468,30 @@ pub(crate) fn assert_expansion(
};
let (expanded_tree, expected_tree) = match kind {
MacroKind::Items => {
let expanded_tree = token_tree_to_items(&expanded).unwrap().tree();
let expected_tree = token_tree_to_items(&expected).unwrap().tree();
let expanded_tree =
token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap().0.syntax_node();
let expected_tree =
token_tree_to_syntax_node(&expected, FragmentKind::Items).unwrap().0.syntax_node();
(
debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
debug_dump_ignore_spaces(expected_tree.syntax()).trim().to_string(),
debug_dump_ignore_spaces(&expanded_tree).trim().to_string(),
debug_dump_ignore_spaces(&expected_tree).trim().to_string(),
)
}
MacroKind::Stmts => {
let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().tree();
let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().tree();
let expanded_tree = token_tree_to_syntax_node(&expanded, FragmentKind::Statements)
.unwrap()
.0
.syntax_node();
let expected_tree = token_tree_to_syntax_node(&expected, FragmentKind::Statements)
.unwrap()
.0
.syntax_node();
(
debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
debug_dump_ignore_spaces(expected_tree.syntax()).trim().to_string(),
debug_dump_ignore_spaces(&expanded_tree).trim().to_string(),
debug_dump_ignore_spaces(&expected_tree).trim().to_string(),
)
}
};